diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index c968762..deea83c 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -24,5 +24,5 @@ jobs:
uses: ./
with:
name: 'JEST Tests'
- path: '__tests__/__results__/jest-junit.xml'
+ path: '__tests__/__results__/*.xml'
reporter: 'jest-junit'
diff --git a/.gitignore b/.gitignore
index 4f3bf2a..639696a 100644
--- a/.gitignore
+++ b/.gitignore
@@ -99,5 +99,4 @@ __tests__/runner/*
lib/**/*
# Project specific
-__tests__/__outputs__
__tests__/__results__
diff --git a/__tests__/__outputs__/dart-json.md b/__tests__/__outputs__/dart-json.md
new file mode 100644
index 0000000..26a0df7
--- /dev/null
+++ b/__tests__/__outputs__/dart-json.md
@@ -0,0 +1,38 @@
+### fixtures/dart-json.json
+
+**6** tests were completed in **3.760s** with **1** passed, **1** skipped and **4** failed.
+
+| Result | Suite | Tests | Time | Passed ✔️ | Skipped ✖️ | Failed ❌ |
+| :---: | :--- | ---: | ---: | ---: | ---: | ---: |
+| ❌ | [test\main_test.dart](#ts-0-test-maintest-dart) | 4 | 74ms | 1 | 0 | 3 |
+| ❌ | [test\second_test.dart](#ts-1-test-secondtest-dart) | 2 | 51ms | 0 | 1 | 1 |
+
+# Test Suites
+
+## test\main_test.dart ❌
+
+### Test 1
+
+| Result | Test | Time |
+| :---: | :--- | ---: |
+| ✔️ | Test 1 Passing test | 36ms |
+
+### Test 1 Test 1.1
+
+| Result | Test | Time |
+| :---: | :--- | ---: |
+| ❌ | Test 1 Test 1.1 Failing test | 20ms |
+| ❌ | Test 1 Test 1.1 Exception in target unit | 6ms |
+
+### Test 2
+
+| Result | Test | Time |
+| :---: | :--- | ---: |
+| ❌ | Test 2 Exception in test | 12ms |
+
+## test\second_test.dart ❌
+
+| Result | Test | Time |
+| :---: | :--- | ---: |
+| ❌ | Timeout test | 37ms |
+| ✖️ | Skipped test | 14ms |
diff --git a/__tests__/__outputs__/dotnet-trx.md b/__tests__/__outputs__/dotnet-trx.md
new file mode 100644
index 0000000..7a305bf
--- /dev/null
+++ b/__tests__/__outputs__/dotnet-trx.md
@@ -0,0 +1,21 @@
+### fixtures/dotnet-trx.trx
+
+**7** tests were completed in **1.061s** with **3** passed, **1** skipped and **3** failed.
+
+| Result | Suite | Tests | Time | Passed ✔️ | Skipped ✖️ | Failed ❌ |
+| :---: | :--- | ---: | ---: | ---: | ---: | ---: |
+| ❌ | [DotnetTests.XUnitTests.CalculatorTests](#ts-0-DotnetTests-XUnitTests-CalculatorTests) | 7 | 109.5761ms | 3 | 1 | 3 |
+
+# Test Suites
+
+## DotnetTests.XUnitTests.CalculatorTests ❌
+
+| Result | Test | Time |
+| :---: | :--- | ---: |
+| ❌ | Exception_In_TargetTest | 0.4975ms |
+| ❌ | Exception_In_Test | 2.2728ms |
+| ❌ | Failing_Test | 3.2953ms |
+| ✔️ | Passing_Test | 0.1254ms |
+| ✔️ | Passing_Test_With_Name | 0.103ms |
+| ✖️ | Skipped_Test | 1ms |
+| ✔️ | Timeout_Test | 102.2821ms |
diff --git a/__tests__/__outputs__/jest-junit.md b/__tests__/__outputs__/jest-junit.md
new file mode 100644
index 0000000..95fef88
--- /dev/null
+++ b/__tests__/__outputs__/jest-junit.md
@@ -0,0 +1,38 @@
+### fixtures/jest-junit.xml
+
+**6** tests were completed in **1.360s** with **1** passed, **1** skipped and **4** failed.
+
+| Result | Suite | Tests | Time | Passed ✔️ | Skipped ✖️ | Failed ❌ |
+| :---: | :--- | ---: | ---: | ---: | ---: | ---: |
+| ❌ | [__tests__\main.test.js](#ts-0-tests-main-test-js) | 4 | 486ms | 1 | 0 | 3 |
+| ❌ | [__tests__\second.test.js](#ts-1-tests-second-test-js) | 2 | 82ms | 0 | 1 | 1 |
+
+# Test Suites
+
+## __tests__\main.test.js ❌
+
+### Test 1
+
+| Result | Test | Time |
+| :---: | :--- | ---: |
+| ✔️ | Passing test | 1ms |
+
+### Test 1 › Test 1.1
+
+| Result | Test | Time |
+| :---: | :--- | ---: |
+| ❌ | Failing test | 2ms |
+| ❌ | Exception in target unit | 0ms |
+
+### Test 2
+
+| Result | Test | Time |
+| :---: | :--- | ---: |
+| ❌ | Exception in test | 0ms |
+
+## __tests__\second.test.js ❌
+
+| Result | Test | Time |
+| :---: | :--- | ---: |
+| ❌ | Timeout test | 4ms |
+| ✖️ | Skipped test | 0ms |
diff --git a/__tests__/__snapshots__/dart-json.test.ts.snap b/__tests__/__snapshots__/dart-json.test.ts.snap
index 194aadd..bddc0c1 100644
--- a/__tests__/__snapshots__/dart-json.test.ts.snap
+++ b/__tests__/__snapshots__/dart-json.test.ts.snap
@@ -52,11 +52,15 @@ dart:isolate _RawReceivePortImpl._handleMessage
"title": "[test\\\\second_test.dart] Timeout test",
},
],
- "summary": "**6** tests were completed in **3.760s** with **1** passed, **1** skipped and **4** failed.
-| Result | Suite | Tests | Time | Passed ✔️ | Failed ❌ | Skipped ✖️ |
+ "summary": "### fixtures/dart-json.json
+
+**6** tests were completed in **3.760s** with **1** passed, **1** skipped and **4** failed.
+
+| Result | Suite | Tests | Time | Passed ✔️ | Skipped ✖️ | Failed ❌ |
| :---: | :--- | ---: | ---: | ---: | ---: | ---: |
-| ❌ | [test\\\\main_test.dart](#ts-0-test-maintest-dart) | 4 | 74ms | 1 | 3 | 0 |
+| ❌ | [test\\\\main_test.dart](#ts-0-test-maintest-dart) | 4 | 74ms | 1 | 0 | 3 |
| ❌ | [test\\\\second_test.dart](#ts-1-test-secondtest-dart) | 2 | 51ms | 0 | 1 | 1 |
+
# Test Suites
## test\\\\main_test.dart ❌
diff --git a/__tests__/__snapshots__/dotnet-trx.test.ts.snap b/__tests__/__snapshots__/dotnet-trx.test.ts.snap
index 2677a7c..2440001 100644
--- a/__tests__/__snapshots__/dotnet-trx.test.ts.snap
+++ b/__tests__/__snapshots__/dotnet-trx.test.ts.snap
@@ -30,10 +30,14 @@ Actual: 2",
"title": "[DotnetTests.XUnitTests.CalculatorTests] Failing_Test",
},
],
- "summary": "**7** tests were completed in **1.061s** with **3** passed, **1** skipped and **3** failed.
-| Result | Suite | Tests | Time | Passed ✔️ | Failed ❌ | Skipped ✖️ |
+ "summary": "### fixtures/dotnet-trx.trx
+
+**7** tests were completed in **1.061s** with **3** passed, **1** skipped and **3** failed.
+
+| Result | Suite | Tests | Time | Passed ✔️ | Skipped ✖️ | Failed ❌ |
| :---: | :--- | ---: | ---: | ---: | ---: | ---: |
-| ❌ | [DotnetTests.XUnitTests.CalculatorTests](#ts-0-DotnetTests-XUnitTests-CalculatorTests) | 7 | 109.5761ms | 3 | 3 | 1 |
+| ❌ | [DotnetTests.XUnitTests.CalculatorTests](#ts-0-DotnetTests-XUnitTests-CalculatorTests) | 7 | 109.5761ms | 3 | 1 | 3 |
+
# Test Suites
## DotnetTests.XUnitTests.CalculatorTests ❌
diff --git a/__tests__/__snapshots__/jest-junit.test.ts.snap b/__tests__/__snapshots__/jest-junit.test.ts.snap
index bf8ec9b..3936f88 100644
--- a/__tests__/__snapshots__/jest-junit.test.ts.snap
+++ b/__tests__/__snapshots__/jest-junit.test.ts.snap
@@ -73,11 +73,15 @@ Received: false
"title": "[__tests__\\\\second.test.js] Timeout test",
},
],
- "summary": "**6** tests were completed in **1.360s** with **1** passed, **1** skipped and **4** failed.
-| Result | Suite | Tests | Time | Passed ✔️ | Failed ❌ | Skipped ✖️ |
+ "summary": "### fixtures/jest-junit.xml
+
+**6** tests were completed in **1.360s** with **1** passed, **1** skipped and **4** failed.
+
+| Result | Suite | Tests | Time | Passed ✔️ | Skipped ✖️ | Failed ❌ |
| :---: | :--- | ---: | ---: | ---: | ---: | ---: |
-| ❌ | [__tests__\\\\main.test.js](#ts-0-tests-main-test-js) | 4 | 486ms | 1 | 3 | 0 |
+| ❌ | [__tests__\\\\main.test.js](#ts-0-tests-main-test-js) | 4 | 486ms | 1 | 0 | 3 |
| ❌ | [__tests__\\\\second.test.js](#ts-1-tests-second-test-js) | 2 | 82ms | 0 | 1 | 1 |
+
# Test Suites
## __tests__\\\\main.test.js ❌
diff --git a/__tests__/dart-json.test.ts b/__tests__/dart-json.test.ts
index 99bd7c5..09da075 100644
--- a/__tests__/dart-json.test.ts
+++ b/__tests__/dart-json.test.ts
@@ -3,9 +3,14 @@ import * as path from 'path'
import {parseDartJson} from '../src/parsers/dart-json/dart-json-parser'
import {ParseOptions} from '../src/parsers/parser-types'
+import {normalizeFilePath} from '../src/utils/file-utils'
-const xmlFixture = fs.readFileSync(path.join(__dirname, 'fixtures', 'dart-json.json'), {encoding: 'utf8'})
-const outputPath = __dirname + '/__outputs__/dart-json.md'
+const fixturePath = path.join(__dirname, 'fixtures', 'dart-json.json')
+const outputPath = path.join(__dirname, '__outputs__', 'dart-json.md')
+const xmlFixture = {
+ path: normalizeFilePath(path.relative(__dirname, fixturePath)),
+ content: fs.readFileSync(fixturePath, {encoding: 'utf8'})
+}
describe('dart-json tests', () => {
it('matches report snapshot', async () => {
@@ -16,7 +21,7 @@ describe('dart-json tests', () => {
workDir: 'C:/Users/Michal/Workspace/dorny/test-check/reports/dart/'
}
- const result = await parseDartJson(xmlFixture, opts)
+ const result = await parseDartJson([xmlFixture], opts)
fs.mkdirSync(path.dirname(outputPath), {recursive: true})
fs.writeFileSync(outputPath, result?.output?.summary ?? '')
diff --git a/__tests__/dotnet-trx.test.ts b/__tests__/dotnet-trx.test.ts
index 6e1054b..b18075c 100644
--- a/__tests__/dotnet-trx.test.ts
+++ b/__tests__/dotnet-trx.test.ts
@@ -3,9 +3,14 @@ import * as path from 'path'
import {parseDotnetTrx} from '../src/parsers/dotnet-trx/dotnet-trx-parser'
import {ParseOptions} from '../src/parsers/parser-types'
+import {normalizeFilePath} from '../src/utils/file-utils'
-const xmlFixture = fs.readFileSync(path.join(__dirname, 'fixtures', 'dotnet-trx.trx'), {encoding: 'utf8'})
-const outputPath = __dirname + '/__outputs__/dotnet-trx.md'
+const fixturePath = path.join(__dirname, 'fixtures', 'dotnet-trx.trx')
+const outputPath = path.join(__dirname, '__outputs__', 'dotnet-trx.md')
+const xmlFixture = {
+ path: normalizeFilePath(path.relative(__dirname, fixturePath)),
+ content: fs.readFileSync(fixturePath, {encoding: 'utf8'})
+}
describe('dotnet-trx tests', () => {
it('matches report snapshot', async () => {
@@ -16,7 +21,7 @@ describe('dotnet-trx tests', () => {
workDir: 'C:/Users/Michal/Workspace/dorny/test-check/reports/dotnet/'
}
- const result = await parseDotnetTrx(xmlFixture, opts)
+ const result = await parseDotnetTrx([xmlFixture], opts)
fs.mkdirSync(path.dirname(outputPath), {recursive: true})
fs.writeFileSync(outputPath, result?.output?.summary ?? '')
diff --git a/__tests__/jest-junit.test.ts b/__tests__/jest-junit.test.ts
index 6ae22f1..efbc01c 100644
--- a/__tests__/jest-junit.test.ts
+++ b/__tests__/jest-junit.test.ts
@@ -3,9 +3,14 @@ import * as path from 'path'
import {parseJestJunit} from '../src/parsers/jest-junit/jest-junit-parser'
import {ParseOptions} from '../src/parsers/parser-types'
+import {normalizeFilePath} from '../src/utils/file-utils'
-const xmlFixture = fs.readFileSync(path.join(__dirname, 'fixtures', 'jest-junit.xml'), {encoding: 'utf8'})
-const outputPath = __dirname + '/__outputs__/jest-junit.md'
+const fixturePath = path.join(__dirname, 'fixtures', 'jest-junit.xml')
+const outputPath = path.join(__dirname, '__outputs__', 'jest-junit.md')
+const xmlFixture = {
+ path: normalizeFilePath(path.relative(__dirname, fixturePath)),
+ content: fs.readFileSync(fixturePath, {encoding: 'utf8'})
+}
describe('jest-junit tests', () => {
it('matches report snapshot', async () => {
@@ -16,7 +21,7 @@ describe('jest-junit tests', () => {
workDir: 'C:/Users/Michal/Workspace/dorny/test-check/reports/jest/'
}
- const result = await parseJestJunit(xmlFixture, opts)
+ const result = await parseJestJunit([xmlFixture], opts)
fs.mkdirSync(path.dirname(outputPath), {recursive: true})
fs.writeFileSync(outputPath, result?.output?.summary ?? '')
diff --git a/dist/index.js b/dist/index.js
index 9edcf20..4360200 100644
--- a/dist/index.js
+++ b/dist/index.js
@@ -26,9 +26,15 @@ var __importStar = (this && this.__importStar) || function (mod) {
__setModuleDefault(result, mod);
return result;
};
+var __importDefault = (this && this.__importDefault) || function (mod) {
+ return (mod && mod.__esModule) ? mod : { "default": mod };
+};
Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.getFiles = void 0;
const core = __importStar(__webpack_require__(2186));
const github = __importStar(__webpack_require__(5438));
+const fs = __importStar(__webpack_require__(5747));
+const fast_glob_1 = __importDefault(__webpack_require__(3664));
const dart_json_parser_1 = __webpack_require__(4528);
const dotnet_trx_parser_1 = __webpack_require__(2664);
const jest_junit_parser_1 = __webpack_require__(1113);
@@ -52,6 +58,7 @@ async function main() {
const token = core.getInput('token', { required: true });
const workDirInput = core.getInput('working-directory', { required: false });
if (workDirInput) {
+ core.info(`Changing directory to ${workDirInput}`);
process.chdir(workDirInput);
}
const workDir = file_utils_1.normalizeDirPath(process.cwd(), true);
@@ -66,8 +73,12 @@ async function main() {
workDir
};
const parser = getParser(reporter);
- const content = file_utils_1.getFileContent(path);
- const result = await parser(content, opts);
+ const files = await getFiles(path);
+ if (files.length === 0) {
+ core.setFailed(`No file matches path ${path}`);
+ return;
+ }
+ const result = await parser(files, opts);
const conclusion = result.success ? 'success' : 'failure';
await octokit.checks.create({
head_sha: sha,
@@ -96,6 +107,14 @@ function getParser(reporter) {
throw new Error(`Input parameter 'reporter' is set to invalid value '${reporter}'`);
}
}
+async function getFiles(pattern) {
+ const paths = await fast_glob_1.default(pattern, { dot: true });
+ return Promise.all(paths.map(async (path) => {
+ const content = await fs.promises.readFile(path, { encoding: 'utf8' });
+ return { path, content };
+ }));
+}
+exports.getFiles = getFiles;
run();
@@ -117,7 +136,8 @@ const markdown_utils_1 = __webpack_require__(6482);
const dart_json_types_1 = __webpack_require__(7887);
const test_results_1 = __webpack_require__(8407);
class TestRun {
- constructor(suites, success, time) {
+ constructor(path, suites, success, time) {
+ this.path = path;
this.suites = suites;
this.success = success;
this.time = time;
@@ -157,20 +177,22 @@ class TestCase {
return this.testDone !== undefined ? this.testDone.time - this.testStart.time : 0;
}
}
-async function parseDartJson(content, options) {
- const testRun = getTestRun(content);
- const icon = testRun.success ? markdown_utils_1.Icon.success : markdown_utils_1.Icon.fail;
+async function parseDartJson(files, options) {
+ const testRuns = files.map(f => getTestRun(f.path, f.content));
+ const testRunsResults = testRuns.map(getTestRunResult);
+ const success = testRuns.every(tr => tr.success);
+ const icon = success ? markdown_utils_1.Icon.success : markdown_utils_1.Icon.fail;
return {
- success: testRun.success,
+ success,
output: {
title: `${options.name.trim()} ${icon}`,
- summary: get_report_1.default(getTestRunResult(testRun)),
- annotations: options.annotations ? getAnnotations(testRun, options.workDir, options.trackedFiles) : undefined
+ summary: get_report_1.default(testRunsResults),
+ annotations: options.annotations ? getAnnotations(testRuns, options.workDir, options.trackedFiles) : undefined
}
};
}
exports.parseDartJson = parseDartJson;
-function getTestRun(content) {
+function getTestRun(path, content) {
const lines = content.split(/\n\r?/g).filter(line => line !== '');
const events = lines.map(str => JSON.parse(str));
let success = false;
@@ -202,13 +224,13 @@ function getTestRun(content) {
totalTime = evt.time;
}
}
- return new TestRun(Object.values(suites), success, totalTime);
+ return new TestRun(path, Object.values(suites), success, totalTime);
}
function getTestRunResult(tr) {
const suites = tr.suites.map(s => {
return new test_results_1.TestSuiteResult(s.suite.path, getGroups(s));
});
- return new test_results_1.TestRunResult(suites, tr.time);
+ return new test_results_1.TestRunResult(tr.path, suites, tr.time);
}
function getGroups(suite) {
const groups = Object.values(suite.groups).filter(grp => grp.tests.length > 0);
@@ -219,15 +241,17 @@ function getGroups(suite) {
return new test_results_1.TestGroupResult(group.group.name, tests);
});
}
-function getAnnotations(tr, workDir, trackedFiles) {
+function getAnnotations(testRuns, workDir, trackedFiles) {
const annotations = [];
- for (const suite of tr.suites) {
- for (const group of Object.values(suite.groups)) {
- for (const test of group.tests) {
- if (test.error) {
- const err = getAnnotation(test, suite, workDir, trackedFiles);
- if (err !== null) {
- annotations.push(err);
+ for (const tr of testRuns) {
+ for (const suite of tr.suites) {
+ for (const group of Object.values(suite.groups)) {
+ for (const test of group.tests) {
+ if (test.error) {
+ const err = getAnnotation(test, suite, workDir, trackedFiles);
+ if (err !== null) {
+ annotations.push(err);
+ }
}
}
}
@@ -367,25 +391,34 @@ class Test {
}
}
}
-async function parseDotnetTrx(content, options) {
- const trx = (await xml2js_1.parseStringPromise(content, {
- attrValueProcessors: [xml_utils_1.parseAttribute]
- }));
- const testClasses = getTestClasses(trx);
- const testRun = getTestRunResult(trx, testClasses);
- const success = testRun.result === 'success';
+async function parseDotnetTrx(files, options) {
+ const testRuns = [];
+ const testClasses = [];
+ for (const file of files) {
+ const trx = await getTrxReport(file.content);
+ const tc = getTestClasses(trx);
+ const tr = getTestRunResult(file.path, trx, tc);
+ testRuns.push(tr);
+ testClasses.push(...tc);
+ }
+ const success = testRuns.every(tr => tr.result === 'success');
const icon = success ? markdown_utils_1.Icon.success : markdown_utils_1.Icon.fail;
return {
success,
output: {
title: `${options.name.trim()} ${icon}`,
- summary: get_report_1.default(testRun),
+ summary: get_report_1.default(testRuns),
annotations: options.annotations ? getAnnotations(testClasses, options.workDir, options.trackedFiles) : undefined
}
};
}
exports.parseDotnetTrx = parseDotnetTrx;
-function getTestRunResult(trx, testClasses) {
+async function getTrxReport(content) {
+ return (await xml2js_1.parseStringPromise(content, {
+ attrValueProcessors: [xml_utils_1.parseAttribute]
+ }));
+}
+function getTestRunResult(path, trx, testClasses) {
const times = trx.TestRun.Times[0].$;
const totalTime = times.finish.getTime() - times.start.getTime();
const suites = testClasses.map(tc => {
@@ -393,7 +426,7 @@ function getTestRunResult(trx, testClasses) {
const group = new test_results_1.TestGroupResult(null, tests);
return new test_results_1.TestSuiteResult(tc.name, [group]);
});
- return new test_results_1.TestRunResult(suites, totalTime);
+ return new test_results_1.TestRunResult(path, suites, totalTime);
}
function getTestClasses(trx) {
var _a;
@@ -486,25 +519,33 @@ const file_utils_1 = __webpack_require__(2711);
const xml_utils_1 = __webpack_require__(8653);
const test_results_1 = __webpack_require__(8407);
const get_report_1 = __importDefault(__webpack_require__(3737));
-async function parseJestJunit(content, options) {
- var _a, _b;
- const junit = (await xml2js_1.parseStringPromise(content, {
- attrValueProcessors: [xml_utils_1.parseAttribute]
- }));
- const testsuites = junit.testsuites;
- const success = !(((_a = testsuites.$) === null || _a === void 0 ? void 0 : _a.failures) > 0 || ((_b = testsuites.$) === null || _b === void 0 ? void 0 : _b.errors) > 0);
+async function parseJestJunit(files, options) {
+ const junit = [];
+ const testRuns = [];
+ for (const file of files) {
+ const ju = await getJunitReport(file.content);
+ const tr = getTestRunResult(file.path, ju);
+ junit.push(ju);
+ testRuns.push(tr);
+ }
+ const success = testRuns.every(tr => tr.result === 'success');
const icon = success ? markdown_utils_1.Icon.success : markdown_utils_1.Icon.fail;
return {
success,
output: {
title: `${options.name.trim()} ${icon}`,
- summary: getSummary(junit),
+ summary: get_report_1.default(testRuns),
annotations: options.annotations ? getAnnotations(junit, options.workDir, options.trackedFiles) : undefined
}
};
}
exports.parseJestJunit = parseJestJunit;
-function getSummary(junit) {
+async function getJunitReport(content) {
+ return (await xml2js_1.parseStringPromise(content, {
+ attrValueProcessors: [xml_utils_1.parseAttribute]
+ }));
+}
+function getTestRunResult(path, junit) {
const suites = junit.testsuites.testsuite.map(ts => {
const name = ts.$.name.trim();
const time = ts.$.time * 1000;
@@ -512,8 +553,7 @@ function getSummary(junit) {
return sr;
});
const time = junit.testsuites.$.time * 1000;
- const tr = new test_results_1.TestRunResult(suites, time);
- return get_report_1.default(tr);
+ return new test_results_1.TestRunResult(path, suites, time);
}
function getGroups(suite) {
const groups = [];
@@ -542,26 +582,28 @@ function getTestCaseResult(test) {
return 'skipped';
return 'success';
}
-function getAnnotations(junit, workDir, trackedFiles) {
+function getAnnotations(junitReports, workDir, trackedFiles) {
const annotations = [];
- for (const suite of junit.testsuites.testsuite) {
- for (const tc of suite.testcase) {
- if (!tc.failure) {
- continue;
- }
- for (const ex of tc.failure) {
- const src = exceptionThrowSource(ex, workDir, trackedFiles);
- if (src === null) {
+ for (const junit of junitReports) {
+ for (const suite of junit.testsuites.testsuite) {
+ for (const tc of suite.testcase) {
+ if (!tc.failure) {
continue;
}
- annotations.push({
- annotation_level: 'failure',
- start_line: src.line,
- end_line: src.line,
- path: src.file,
- message: markdown_utils_1.fixEol(ex),
- title: `[${suite.$.name}] ${tc.$.name.trim()}`
- });
+ for (const ex of tc.failure) {
+ const src = exceptionThrowSource(ex, workDir, trackedFiles);
+ if (src === null) {
+ continue;
+ }
+ annotations.push({
+ annotation_level: 'failure',
+ start_line: src.line,
+ end_line: src.line,
+ path: src.file,
+ message: markdown_utils_1.fixEol(ex),
+ title: `[${suite.$.name}] ${tc.$.name.trim()}`
+ });
+ }
}
}
}
@@ -598,23 +640,31 @@ exports.exceptionThrowSource = exceptionThrowSource;
Object.defineProperty(exports, "__esModule", ({ value: true }));
const markdown_utils_1 = __webpack_require__(6482);
const slugger_1 = __webpack_require__(3328);
-function getReport(tr) {
+function getReport(results) {
+ const runsSummary = results.map(getRunSummary).join('\n\n');
+ const suites = results
+ .flatMap(tr => tr.suites)
+ .map((ts, i) => getSuiteSummary(ts, i))
+ .join('\n');
+ const suitesSection = `# Test Suites\n\n${suites}`;
+ return [runsSummary, suitesSection].join('\n\n');
+}
+exports.default = getReport;
+function getRunSummary(tr) {
const time = `${(tr.time / 1000).toFixed(3)}s`;
- const headingLine = `**${tr.tests}** tests were completed in **${time}** with **${tr.passed}** passed, **${tr.skipped}** skipped and **${tr.failed}** failed.`;
+ const headingLine1 = `### ${tr.path}`;
+ const headingLine2 = `**${tr.tests}** tests were completed in **${time}** with **${tr.passed}** passed, **${tr.skipped}** skipped and **${tr.failed}** failed.`;
const suitesSummary = tr.suites.map((s, i) => {
const icon = getResultIcon(s.result);
const tsTime = `${s.time}ms`;
const tsName = s.name;
const tsAddr = makeSuiteSlug(i, tsName).link;
const tsNameLink = markdown_utils_1.link(tsName, tsAddr);
- return [icon, tsNameLink, s.tests, tsTime, s.passed, s.failed, s.skipped];
+ return [icon, tsNameLink, s.tests, tsTime, s.passed, s.skipped, s.failed];
});
- const summary = markdown_utils_1.table(['Result', 'Suite', 'Tests', 'Time', `Passed ${markdown_utils_1.Icon.success}`, `Failed ${markdown_utils_1.Icon.fail}`, `Skipped ${markdown_utils_1.Icon.skip}`], [markdown_utils_1.Align.Center, markdown_utils_1.Align.Left, markdown_utils_1.Align.Right, markdown_utils_1.Align.Right, markdown_utils_1.Align.Right, markdown_utils_1.Align.Right, markdown_utils_1.Align.Right], ...suitesSummary);
- const suites = tr.suites.map((ts, i) => getSuiteSummary(ts, i)).join('\n');
- const suitesSection = `# Test Suites\n\n${suites}`;
- return `${headingLine}\n${summary}\n${suitesSection}`;
+ const summary = markdown_utils_1.table(['Result', 'Suite', 'Tests', 'Time', `Passed ${markdown_utils_1.Icon.success}`, `Skipped ${markdown_utils_1.Icon.skip}`, `Failed ${markdown_utils_1.Icon.fail}`], [markdown_utils_1.Align.Center, markdown_utils_1.Align.Left, markdown_utils_1.Align.Right, markdown_utils_1.Align.Right, markdown_utils_1.Align.Right, markdown_utils_1.Align.Right, markdown_utils_1.Align.Right], ...suitesSummary);
+ return [headingLine1, headingLine2, summary].join('\n\n');
}
-exports.default = getReport;
function getSuiteSummary(ts, index) {
const icon = getResultIcon(ts.result);
const content = ts.groups
@@ -662,7 +712,8 @@ function getResultIcon(result) {
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.TestCaseResult = exports.TestGroupResult = exports.TestSuiteResult = exports.TestRunResult = void 0;
class TestRunResult {
- constructor(suites, totalTime) {
+ constructor(path, suites, totalTime) {
+ this.path = path;
this.suites = suites;
this.totalTime = totalTime;
}
@@ -774,48 +825,18 @@ exports.default = exec;
/***/ }),
/***/ 2711:
-/***/ (function(__unused_webpack_module, exports, __webpack_require__) {
+/***/ ((__unused_webpack_module, exports) => {
"use strict";
-var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
- if (k2 === undefined) k2 = k;
- Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
-}) : (function(o, m, k, k2) {
- if (k2 === undefined) k2 = k;
- o[k2] = m[k];
-}));
-var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
- Object.defineProperty(o, "default", { enumerable: true, value: v });
-}) : function(o, v) {
- o["default"] = v;
-});
-var __importStar = (this && this.__importStar) || function (mod) {
- if (mod && mod.__esModule) return mod;
- var result = {};
- if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
- __setModuleDefault(result, mod);
- return result;
-};
Object.defineProperty(exports, "__esModule", ({ value: true }));
-exports.normalizeFilePath = exports.normalizeDirPath = exports.getFileContent = void 0;
-const fs = __importStar(__webpack_require__(5747));
-function getFileContent(path) {
- if (!fs.existsSync(path)) {
- throw new Error(`File '${path}' not found`);
- }
- if (!fs.lstatSync(path).isFile()) {
- throw new Error(`'${path}' is not a file`);
- }
- return fs.readFileSync(path, { encoding: 'utf8' });
-}
-exports.getFileContent = getFileContent;
-function normalizeDirPath(path, trailingSeparator) {
+exports.normalizeFilePath = exports.normalizeDirPath = void 0;
+function normalizeDirPath(path, addTrailingSlash) {
if (!path) {
return path;
}
path = normalizeFilePath(path);
- if (trailingSeparator && !path.endsWith('/')) {
+ if (addTrailingSlash && !path.endsWith('/')) {
path += '/';
}
return path;
@@ -3399,1474 +3420,2321 @@ function copyFile(srcFile, destFile, force) {
/***/ }),
-/***/ 334:
-/***/ ((__unused_webpack_module, exports) => {
+/***/ 3803:
+/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
-
Object.defineProperty(exports, "__esModule", ({ value: true }));
-
-async function auth(token) {
- const tokenType = token.split(/\./).length === 3 ? "app" : /^v\d+\./.test(token) ? "installation" : "oauth";
- return {
- type: "token",
- token: token,
- tokenType
- };
+const fs = __webpack_require__(5747);
+exports.FILE_SYSTEM_ADAPTER = {
+ lstat: fs.lstat,
+ stat: fs.stat,
+ lstatSync: fs.lstatSync,
+ statSync: fs.statSync,
+ readdir: fs.readdir,
+ readdirSync: fs.readdirSync
+};
+function createFileSystemAdapter(fsMethods) {
+ if (fsMethods === undefined) {
+ return exports.FILE_SYSTEM_ADAPTER;
+ }
+ return Object.assign(Object.assign({}, exports.FILE_SYSTEM_ADAPTER), fsMethods);
}
+exports.createFileSystemAdapter = createFileSystemAdapter;
+
+/***/ }),
+
+/***/ 8838:
+/***/ ((__unused_webpack_module, exports) => {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+const NODE_PROCESS_VERSION_PARTS = process.versions.node.split('.');
+const MAJOR_VERSION = parseInt(NODE_PROCESS_VERSION_PARTS[0], 10);
+const MINOR_VERSION = parseInt(NODE_PROCESS_VERSION_PARTS[1], 10);
+const SUPPORTED_MAJOR_VERSION = 10;
+const SUPPORTED_MINOR_VERSION = 10;
+const IS_MATCHED_BY_MAJOR = MAJOR_VERSION > SUPPORTED_MAJOR_VERSION;
+const IS_MATCHED_BY_MAJOR_AND_MINOR = MAJOR_VERSION === SUPPORTED_MAJOR_VERSION && MINOR_VERSION >= SUPPORTED_MINOR_VERSION;
/**
- * Prefix token for usage in the Authorization header
- *
- * @param token OAuth token or JSON Web Token
+ * IS `true` for Node.js 10.10 and greater.
*/
-function withAuthorizationPrefix(token) {
- if (token.split(/\./).length === 3) {
- return `bearer ${token}`;
- }
+exports.IS_SUPPORT_READDIR_WITH_FILE_TYPES = IS_MATCHED_BY_MAJOR || IS_MATCHED_BY_MAJOR_AND_MINOR;
- return `token ${token}`;
-}
-async function hook(token, request, route, parameters) {
- const endpoint = request.endpoint.merge(route, parameters);
- endpoint.headers.authorization = withAuthorizationPrefix(token);
- return request(endpoint);
+/***/ }),
+
+/***/ 5667:
+/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+const async = __webpack_require__(4507);
+const sync = __webpack_require__(9560);
+const settings_1 = __webpack_require__(8662);
+exports.Settings = settings_1.default;
+function scandir(path, optionsOrSettingsOrCallback, callback) {
+ if (typeof optionsOrSettingsOrCallback === 'function') {
+ return async.read(path, getSettings(), optionsOrSettingsOrCallback);
+ }
+ async.read(path, getSettings(optionsOrSettingsOrCallback), callback);
+}
+exports.scandir = scandir;
+function scandirSync(path, optionsOrSettings) {
+ const settings = getSettings(optionsOrSettings);
+ return sync.read(path, settings);
+}
+exports.scandirSync = scandirSync;
+function getSettings(settingsOrOptions = {}) {
+ if (settingsOrOptions instanceof settings_1.default) {
+ return settingsOrOptions;
+ }
+ return new settings_1.default(settingsOrOptions);
}
-const createTokenAuth = function createTokenAuth(token) {
- if (!token) {
- throw new Error("[@octokit/auth-token] No token passed to createTokenAuth");
- }
- if (typeof token !== "string") {
- throw new Error("[@octokit/auth-token] Token passed to createTokenAuth is not a string");
- }
+/***/ }),
- token = token.replace(/^(token|bearer) +/i, "");
- return Object.assign(auth.bind(null, token), {
- hook: hook.bind(null, token)
- });
-};
+/***/ 4507:
+/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
-exports.createTokenAuth = createTokenAuth;
-//# sourceMappingURL=index.js.map
+"use strict";
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+const fsStat = __webpack_require__(109);
+const rpl = __webpack_require__(5288);
+const constants_1 = __webpack_require__(8838);
+const utils = __webpack_require__(6297);
+function read(directory, settings, callback) {
+ if (!settings.stats && constants_1.IS_SUPPORT_READDIR_WITH_FILE_TYPES) {
+ return readdirWithFileTypes(directory, settings, callback);
+ }
+ return readdir(directory, settings, callback);
+}
+exports.read = read;
+function readdirWithFileTypes(directory, settings, callback) {
+ settings.fs.readdir(directory, { withFileTypes: true }, (readdirError, dirents) => {
+ if (readdirError !== null) {
+ return callFailureCallback(callback, readdirError);
+ }
+ const entries = dirents.map((dirent) => ({
+ dirent,
+ name: dirent.name,
+ path: `${directory}${settings.pathSegmentSeparator}${dirent.name}`
+ }));
+ if (!settings.followSymbolicLinks) {
+ return callSuccessCallback(callback, entries);
+ }
+ const tasks = entries.map((entry) => makeRplTaskEntry(entry, settings));
+ rpl(tasks, (rplError, rplEntries) => {
+ if (rplError !== null) {
+ return callFailureCallback(callback, rplError);
+ }
+ callSuccessCallback(callback, rplEntries);
+ });
+ });
+}
+exports.readdirWithFileTypes = readdirWithFileTypes;
+function makeRplTaskEntry(entry, settings) {
+ return (done) => {
+ if (!entry.dirent.isSymbolicLink()) {
+ return done(null, entry);
+ }
+ settings.fs.stat(entry.path, (statError, stats) => {
+ if (statError !== null) {
+ if (settings.throwErrorOnBrokenSymbolicLink) {
+ return done(statError);
+ }
+ return done(null, entry);
+ }
+ entry.dirent = utils.fs.createDirentFromStats(entry.name, stats);
+ return done(null, entry);
+ });
+ };
+}
+function readdir(directory, settings, callback) {
+ settings.fs.readdir(directory, (readdirError, names) => {
+ if (readdirError !== null) {
+ return callFailureCallback(callback, readdirError);
+ }
+ const filepaths = names.map((name) => `${directory}${settings.pathSegmentSeparator}${name}`);
+ const tasks = filepaths.map((filepath) => {
+ return (done) => fsStat.stat(filepath, settings.fsStatSettings, done);
+ });
+ rpl(tasks, (rplError, results) => {
+ if (rplError !== null) {
+ return callFailureCallback(callback, rplError);
+ }
+ const entries = [];
+ names.forEach((name, index) => {
+ const stats = results[index];
+ const entry = {
+ name,
+ path: filepaths[index],
+ dirent: utils.fs.createDirentFromStats(name, stats)
+ };
+ if (settings.stats) {
+ entry.stats = stats;
+ }
+ entries.push(entry);
+ });
+ callSuccessCallback(callback, entries);
+ });
+ });
+}
+exports.readdir = readdir;
+function callFailureCallback(callback, error) {
+ callback(error);
+}
+function callSuccessCallback(callback, result) {
+ callback(null, result);
+}
/***/ }),
-/***/ 6762:
+/***/ 9560:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
-
Object.defineProperty(exports, "__esModule", ({ value: true }));
+const fsStat = __webpack_require__(109);
+const constants_1 = __webpack_require__(8838);
+const utils = __webpack_require__(6297);
+function read(directory, settings) {
+ if (!settings.stats && constants_1.IS_SUPPORT_READDIR_WITH_FILE_TYPES) {
+ return readdirWithFileTypes(directory, settings);
+ }
+ return readdir(directory, settings);
+}
+exports.read = read;
+function readdirWithFileTypes(directory, settings) {
+ const dirents = settings.fs.readdirSync(directory, { withFileTypes: true });
+ return dirents.map((dirent) => {
+ const entry = {
+ dirent,
+ name: dirent.name,
+ path: `${directory}${settings.pathSegmentSeparator}${dirent.name}`
+ };
+ if (entry.dirent.isSymbolicLink() && settings.followSymbolicLinks) {
+ try {
+ const stats = settings.fs.statSync(entry.path);
+ entry.dirent = utils.fs.createDirentFromStats(entry.name, stats);
+ }
+ catch (error) {
+ if (settings.throwErrorOnBrokenSymbolicLink) {
+ throw error;
+ }
+ }
+ }
+ return entry;
+ });
+}
+exports.readdirWithFileTypes = readdirWithFileTypes;
+function readdir(directory, settings) {
+ const names = settings.fs.readdirSync(directory);
+ return names.map((name) => {
+ const entryPath = `${directory}${settings.pathSegmentSeparator}${name}`;
+ const stats = fsStat.statSync(entryPath, settings.fsStatSettings);
+ const entry = {
+ name,
+ path: entryPath,
+ dirent: utils.fs.createDirentFromStats(name, stats)
+ };
+ if (settings.stats) {
+ entry.stats = stats;
+ }
+ return entry;
+ });
+}
+exports.readdir = readdir;
-var universalUserAgent = __webpack_require__(5030);
-var beforeAfterHook = __webpack_require__(3682);
-var request = __webpack_require__(6234);
-var graphql = __webpack_require__(8467);
-var authToken = __webpack_require__(334);
-function _objectWithoutPropertiesLoose(source, excluded) {
- if (source == null) return {};
- var target = {};
- var sourceKeys = Object.keys(source);
- var key, i;
+/***/ }),
- for (i = 0; i < sourceKeys.length; i++) {
- key = sourceKeys[i];
- if (excluded.indexOf(key) >= 0) continue;
- target[key] = source[key];
- }
+/***/ 8662:
+/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
- return target;
+"use strict";
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+const path = __webpack_require__(5622);
+const fsStat = __webpack_require__(109);
+const fs = __webpack_require__(3803);
+class Settings {
+ constructor(_options = {}) {
+ this._options = _options;
+ this.followSymbolicLinks = this._getValue(this._options.followSymbolicLinks, false);
+ this.fs = fs.createFileSystemAdapter(this._options.fs);
+ this.pathSegmentSeparator = this._getValue(this._options.pathSegmentSeparator, path.sep);
+ this.stats = this._getValue(this._options.stats, false);
+ this.throwErrorOnBrokenSymbolicLink = this._getValue(this._options.throwErrorOnBrokenSymbolicLink, true);
+ this.fsStatSettings = new fsStat.Settings({
+ followSymbolicLink: this.followSymbolicLinks,
+ fs: this.fs,
+ throwErrorOnBrokenSymbolicLink: this.throwErrorOnBrokenSymbolicLink
+ });
+ }
+ _getValue(option, value) {
+ return option === undefined ? value : option;
+ }
}
+exports.default = Settings;
-function _objectWithoutProperties(source, excluded) {
- if (source == null) return {};
- var target = _objectWithoutPropertiesLoose(source, excluded);
+/***/ }),
- var key, i;
+/***/ 883:
+/***/ ((__unused_webpack_module, exports) => {
- if (Object.getOwnPropertySymbols) {
- var sourceSymbolKeys = Object.getOwnPropertySymbols(source);
+"use strict";
- for (i = 0; i < sourceSymbolKeys.length; i++) {
- key = sourceSymbolKeys[i];
- if (excluded.indexOf(key) >= 0) continue;
- if (!Object.prototype.propertyIsEnumerable.call(source, key)) continue;
- target[key] = source[key];
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+class DirentFromStats {
+ constructor(name, stats) {
+ this.name = name;
+ this.isBlockDevice = stats.isBlockDevice.bind(stats);
+ this.isCharacterDevice = stats.isCharacterDevice.bind(stats);
+ this.isDirectory = stats.isDirectory.bind(stats);
+ this.isFIFO = stats.isFIFO.bind(stats);
+ this.isFile = stats.isFile.bind(stats);
+ this.isSocket = stats.isSocket.bind(stats);
+ this.isSymbolicLink = stats.isSymbolicLink.bind(stats);
}
- }
-
- return target;
}
+function createDirentFromStats(name, stats) {
+ return new DirentFromStats(name, stats);
+}
+exports.createDirentFromStats = createDirentFromStats;
-const VERSION = "3.2.1";
-class Octokit {
- constructor(options = {}) {
- const hook = new beforeAfterHook.Collection();
- const requestDefaults = {
- baseUrl: request.request.endpoint.DEFAULTS.baseUrl,
- headers: {},
- request: Object.assign({}, options.request, {
- hook: hook.bind(null, "request")
- }),
- mediaType: {
- previews: [],
- format: ""
- }
- }; // prepend default user agent with `options.userAgent` if set
+/***/ }),
- requestDefaults.headers["user-agent"] = [options.userAgent, `octokit-core.js/${VERSION} ${universalUserAgent.getUserAgent()}`].filter(Boolean).join(" ");
+/***/ 6297:
+/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
- if (options.baseUrl) {
- requestDefaults.baseUrl = options.baseUrl;
- }
+"use strict";
- if (options.previews) {
- requestDefaults.mediaType.previews = options.previews;
- }
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+const fs = __webpack_require__(883);
+exports.fs = fs;
- if (options.timeZone) {
- requestDefaults.headers["time-zone"] = options.timeZone;
- }
- this.request = request.request.defaults(requestDefaults);
- this.graphql = graphql.withCustomRequest(this.request).defaults(requestDefaults);
- this.log = Object.assign({
- debug: () => {},
- info: () => {},
- warn: console.warn.bind(console),
- error: console.error.bind(console)
- }, options.log);
- this.hook = hook; // (1) If neither `options.authStrategy` nor `options.auth` are set, the `octokit` instance
- // is unauthenticated. The `this.auth()` method is a no-op and no request hook is registered.
- // (2) If only `options.auth` is set, use the default token authentication strategy.
- // (3) If `options.authStrategy` is set then use it and pass in `options.auth`. Always pass own request as many strategies accept a custom request instance.
- // TODO: type `options.auth` based on `options.authStrategy`.
+/***/ }),
- if (!options.authStrategy) {
- if (!options.auth) {
- // (1)
- this.auth = async () => ({
- type: "unauthenticated"
- });
- } else {
- // (2)
- const auth = authToken.createTokenAuth(options.auth); // @ts-ignore ¯\_(ツ)_/¯
-
- hook.wrap("request", auth.hook);
- this.auth = auth;
- }
- } else {
- const {
- authStrategy
- } = options,
- otherOptions = _objectWithoutProperties(options, ["authStrategy"]);
+/***/ 2987:
+/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
- const auth = authStrategy(Object.assign({
- request: this.request,
- log: this.log,
- // we pass the current octokit instance as well as its constructor options
- // to allow for authentication strategies that return a new octokit instance
- // that shares the same internal state as the current one. The original
- // requirement for this was the "event-octokit" authentication strategy
- // of https://github.com/probot/octokit-auth-probot.
- octokit: this,
- octokitOptions: otherOptions
- }, options.auth)); // @ts-ignore ¯\_(ツ)_/¯
+"use strict";
- hook.wrap("request", auth.hook);
- this.auth = auth;
- } // apply plugins
- // https://stackoverflow.com/a/16345172
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+const fs = __webpack_require__(5747);
+exports.FILE_SYSTEM_ADAPTER = {
+ lstat: fs.lstat,
+ stat: fs.stat,
+ lstatSync: fs.lstatSync,
+ statSync: fs.statSync
+};
+function createFileSystemAdapter(fsMethods) {
+ if (fsMethods === undefined) {
+ return exports.FILE_SYSTEM_ADAPTER;
+ }
+ return Object.assign(Object.assign({}, exports.FILE_SYSTEM_ADAPTER), fsMethods);
+}
+exports.createFileSystemAdapter = createFileSystemAdapter;
- const classConstructor = this.constructor;
- classConstructor.plugins.forEach(plugin => {
- Object.assign(this, plugin(this, options));
- });
- }
+/***/ }),
- static defaults(defaults) {
- const OctokitWithDefaults = class extends this {
- constructor(...args) {
- const options = args[0] || {};
+/***/ 109:
+/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
- if (typeof defaults === "function") {
- super(defaults(options));
- return;
- }
+"use strict";
- super(Object.assign({}, defaults, options, options.userAgent && defaults.userAgent ? {
- userAgent: `${options.userAgent} ${defaults.userAgent}`
- } : null));
- }
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+const async = __webpack_require__(4147);
+const sync = __webpack_require__(4527);
+const settings_1 = __webpack_require__(2410);
+exports.Settings = settings_1.default;
+function stat(path, optionsOrSettingsOrCallback, callback) {
+ if (typeof optionsOrSettingsOrCallback === 'function') {
+ return async.read(path, getSettings(), optionsOrSettingsOrCallback);
+ }
+ async.read(path, getSettings(optionsOrSettingsOrCallback), callback);
+}
+exports.stat = stat;
+function statSync(path, optionsOrSettings) {
+ const settings = getSettings(optionsOrSettings);
+ return sync.read(path, settings);
+}
+exports.statSync = statSync;
+function getSettings(settingsOrOptions = {}) {
+ if (settingsOrOptions instanceof settings_1.default) {
+ return settingsOrOptions;
+ }
+ return new settings_1.default(settingsOrOptions);
+}
- };
- return OctokitWithDefaults;
- }
- /**
- * Attach a plugin (or many) to your Octokit instance.
- *
- * @example
- * const API = Octokit.plugin(plugin1, plugin2, plugin3, ...)
- */
+/***/ }),
- static plugin(...newPlugins) {
- var _a;
+/***/ 4147:
+/***/ ((__unused_webpack_module, exports) => {
- const currentPlugins = this.plugins;
- const NewOctokit = (_a = class extends this {}, _a.plugins = currentPlugins.concat(newPlugins.filter(plugin => !currentPlugins.includes(plugin))), _a);
- return NewOctokit;
- }
+"use strict";
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+function read(path, settings, callback) {
+ settings.fs.lstat(path, (lstatError, lstat) => {
+ if (lstatError !== null) {
+ return callFailureCallback(callback, lstatError);
+ }
+ if (!lstat.isSymbolicLink() || !settings.followSymbolicLink) {
+ return callSuccessCallback(callback, lstat);
+ }
+ settings.fs.stat(path, (statError, stat) => {
+ if (statError !== null) {
+ if (settings.throwErrorOnBrokenSymbolicLink) {
+ return callFailureCallback(callback, statError);
+ }
+ return callSuccessCallback(callback, lstat);
+ }
+ if (settings.markSymbolicLink) {
+ stat.isSymbolicLink = () => true;
+ }
+ callSuccessCallback(callback, stat);
+ });
+ });
+}
+exports.read = read;
+function callFailureCallback(callback, error) {
+ callback(error);
+}
+function callSuccessCallback(callback, result) {
+ callback(null, result);
}
-Octokit.VERSION = VERSION;
-Octokit.plugins = [];
-
-exports.Octokit = Octokit;
-//# sourceMappingURL=index.js.map
/***/ }),
-/***/ 9440:
-/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
+/***/ 4527:
+/***/ ((__unused_webpack_module, exports) => {
"use strict";
-
Object.defineProperty(exports, "__esModule", ({ value: true }));
+function read(path, settings) {
+ const lstat = settings.fs.lstatSync(path);
+ if (!lstat.isSymbolicLink() || !settings.followSymbolicLink) {
+ return lstat;
+ }
+ try {
+ const stat = settings.fs.statSync(path);
+ if (settings.markSymbolicLink) {
+ stat.isSymbolicLink = () => true;
+ }
+ return stat;
+ }
+ catch (error) {
+ if (!settings.throwErrorOnBrokenSymbolicLink) {
+ return lstat;
+ }
+ throw error;
+ }
+}
+exports.read = read;
-var isPlainObject = __webpack_require__(558);
-var universalUserAgent = __webpack_require__(5030);
-function lowercaseKeys(object) {
- if (!object) {
- return {};
- }
+/***/ }),
- return Object.keys(object).reduce((newObj, key) => {
- newObj[key.toLowerCase()] = object[key];
- return newObj;
- }, {});
-}
+/***/ 2410:
+/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
-function mergeDeep(defaults, options) {
- const result = Object.assign({}, defaults);
- Object.keys(options).forEach(key => {
- if (isPlainObject.isPlainObject(options[key])) {
- if (!(key in defaults)) Object.assign(result, {
- [key]: options[key]
- });else result[key] = mergeDeep(defaults[key], options[key]);
- } else {
- Object.assign(result, {
- [key]: options[key]
- });
+"use strict";
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+const fs = __webpack_require__(2987);
+class Settings {
+ constructor(_options = {}) {
+ this._options = _options;
+ this.followSymbolicLink = this._getValue(this._options.followSymbolicLink, true);
+ this.fs = fs.createFileSystemAdapter(this._options.fs);
+ this.markSymbolicLink = this._getValue(this._options.markSymbolicLink, false);
+ this.throwErrorOnBrokenSymbolicLink = this._getValue(this._options.throwErrorOnBrokenSymbolicLink, true);
+ }
+ _getValue(option, value) {
+ return option === undefined ? value : option;
}
- });
- return result;
}
+exports.default = Settings;
-function removeUndefinedProperties(obj) {
- for (const key in obj) {
- if (obj[key] === undefined) {
- delete obj[key];
- }
- }
- return obj;
-}
+/***/ }),
-function merge(defaults, route, options) {
- if (typeof route === "string") {
- let [method, url] = route.split(" ");
- options = Object.assign(url ? {
- method,
- url
- } : {
- url: method
- }, options);
- } else {
- options = Object.assign({}, route);
- } // lowercase header names before merging with defaults to avoid duplicates
+/***/ 6026:
+/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
+"use strict";
- options.headers = lowercaseKeys(options.headers); // remove properties with undefined values before merging
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+const async_1 = __webpack_require__(7523);
+const stream_1 = __webpack_require__(6737);
+const sync_1 = __webpack_require__(3068);
+const settings_1 = __webpack_require__(141);
+exports.Settings = settings_1.default;
+function walk(directory, optionsOrSettingsOrCallback, callback) {
+ if (typeof optionsOrSettingsOrCallback === 'function') {
+ return new async_1.default(directory, getSettings()).read(optionsOrSettingsOrCallback);
+ }
+ new async_1.default(directory, getSettings(optionsOrSettingsOrCallback)).read(callback);
+}
+exports.walk = walk;
+function walkSync(directory, optionsOrSettings) {
+ const settings = getSettings(optionsOrSettings);
+ const provider = new sync_1.default(directory, settings);
+ return provider.read();
+}
+exports.walkSync = walkSync;
+function walkStream(directory, optionsOrSettings) {
+ const settings = getSettings(optionsOrSettings);
+ const provider = new stream_1.default(directory, settings);
+ return provider.read();
+}
+exports.walkStream = walkStream;
+function getSettings(settingsOrOptions = {}) {
+ if (settingsOrOptions instanceof settings_1.default) {
+ return settingsOrOptions;
+ }
+ return new settings_1.default(settingsOrOptions);
+}
- removeUndefinedProperties(options);
- removeUndefinedProperties(options.headers);
- const mergedOptions = mergeDeep(defaults || {}, options); // mediaType.previews arrays are merged, instead of overwritten
- if (defaults && defaults.mediaType.previews.length) {
- mergedOptions.mediaType.previews = defaults.mediaType.previews.filter(preview => !mergedOptions.mediaType.previews.includes(preview)).concat(mergedOptions.mediaType.previews);
- }
+/***/ }),
- mergedOptions.mediaType.previews = mergedOptions.mediaType.previews.map(preview => preview.replace(/-preview/, ""));
- return mergedOptions;
+/***/ 7523:
+/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+const async_1 = __webpack_require__(5732);
+class AsyncProvider {
+ constructor(_root, _settings) {
+ this._root = _root;
+ this._settings = _settings;
+ this._reader = new async_1.default(this._root, this._settings);
+ this._storage = new Set();
+ }
+ read(callback) {
+ this._reader.onError((error) => {
+ callFailureCallback(callback, error);
+ });
+ this._reader.onEntry((entry) => {
+ this._storage.add(entry);
+ });
+ this._reader.onEnd(() => {
+ callSuccessCallback(callback, [...this._storage]);
+ });
+ this._reader.read();
+ }
+}
+exports.default = AsyncProvider;
+function callFailureCallback(callback, error) {
+ callback(error);
+}
+function callSuccessCallback(callback, entries) {
+ callback(null, entries);
}
-function addQueryParameters(url, parameters) {
- const separator = /\?/.test(url) ? "&" : "?";
- const names = Object.keys(parameters);
- if (names.length === 0) {
- return url;
- }
+/***/ }),
- return url + separator + names.map(name => {
- if (name === "q") {
- return "q=" + parameters.q.split("+").map(encodeURIComponent).join("+");
- }
+/***/ 6737:
+/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
- return `${name}=${encodeURIComponent(parameters[name])}`;
- }).join("&");
+"use strict";
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+const stream_1 = __webpack_require__(2413);
+const async_1 = __webpack_require__(5732);
+class StreamProvider {
+ constructor(_root, _settings) {
+ this._root = _root;
+ this._settings = _settings;
+ this._reader = new async_1.default(this._root, this._settings);
+ this._stream = new stream_1.Readable({
+ objectMode: true,
+ read: () => { },
+ destroy: this._reader.destroy.bind(this._reader)
+ });
+ }
+ read() {
+ this._reader.onError((error) => {
+ this._stream.emit('error', error);
+ });
+ this._reader.onEntry((entry) => {
+ this._stream.push(entry);
+ });
+ this._reader.onEnd(() => {
+ this._stream.push(null);
+ });
+ this._reader.read();
+ return this._stream;
+ }
}
+exports.default = StreamProvider;
-const urlVariableRegex = /\{[^}]+\}/g;
-function removeNonChars(variableName) {
- return variableName.replace(/^\W+|\W+$/g, "").split(/,/);
-}
+/***/ }),
-function extractUrlVariableNames(url) {
- const matches = url.match(urlVariableRegex);
+/***/ 3068:
+/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
- if (!matches) {
- return [];
- }
+"use strict";
- return matches.map(removeNonChars).reduce((a, b) => a.concat(b), []);
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+const sync_1 = __webpack_require__(3595);
+class SyncProvider {
+ constructor(_root, _settings) {
+ this._root = _root;
+ this._settings = _settings;
+ this._reader = new sync_1.default(this._root, this._settings);
+ }
+ read() {
+ return this._reader.read();
+ }
}
+exports.default = SyncProvider;
-function omit(object, keysToOmit) {
- return Object.keys(object).filter(option => !keysToOmit.includes(option)).reduce((obj, key) => {
- obj[key] = object[key];
- return obj;
- }, {});
-}
-// Based on https://github.com/bramstein/url-template, licensed under BSD
-// TODO: create separate package.
-//
-// Copyright (c) 2012-2014, Bram Stein
-// All rights reserved.
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions
-// are met:
-// 1. Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// 2. Redistributions in binary form must reproduce the above copyright
-// notice, this list of conditions and the following disclaimer in the
-// documentation and/or other materials provided with the distribution.
-// 3. The name of the author may not be used to endorse or promote products
-// derived from this software without specific prior written permission.
-// THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR IMPLIED
-// WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
-// MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
-// EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
-// INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
-// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
-// OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
-// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
-// EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+/***/ }),
-/* istanbul ignore file */
-function encodeReserved(str) {
- return str.split(/(%[0-9A-Fa-f]{2})/g).map(function (part) {
- if (!/%[0-9A-Fa-f]/.test(part)) {
- part = encodeURI(part).replace(/%5B/g, "[").replace(/%5D/g, "]");
- }
+/***/ 5732:
+/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
- return part;
- }).join("");
-}
+"use strict";
-function encodeUnreserved(str) {
- return encodeURIComponent(str).replace(/[!'()*]/g, function (c) {
- return "%" + c.charCodeAt(0).toString(16).toUpperCase();
- });
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+const events_1 = __webpack_require__(8614);
+const fsScandir = __webpack_require__(5667);
+const fastq = __webpack_require__(7340);
+const common = __webpack_require__(7988);
+const reader_1 = __webpack_require__(8311);
+class AsyncReader extends reader_1.default {
+ constructor(_root, _settings) {
+ super(_root, _settings);
+ this._settings = _settings;
+ this._scandir = fsScandir.scandir;
+ this._emitter = new events_1.EventEmitter();
+ this._queue = fastq(this._worker.bind(this), this._settings.concurrency);
+ this._isFatalError = false;
+ this._isDestroyed = false;
+ this._queue.drain = () => {
+ if (!this._isFatalError) {
+ this._emitter.emit('end');
+ }
+ };
+ }
+ read() {
+ this._isFatalError = false;
+ this._isDestroyed = false;
+ setImmediate(() => {
+ this._pushToQueue(this._root, this._settings.basePath);
+ });
+ return this._emitter;
+ }
+ destroy() {
+ if (this._isDestroyed) {
+ throw new Error('The reader is already destroyed');
+ }
+ this._isDestroyed = true;
+ this._queue.killAndDrain();
+ }
+ onEntry(callback) {
+ this._emitter.on('entry', callback);
+ }
+ onError(callback) {
+ this._emitter.once('error', callback);
+ }
+ onEnd(callback) {
+ this._emitter.once('end', callback);
+ }
+ _pushToQueue(directory, base) {
+ const queueItem = { directory, base };
+ this._queue.push(queueItem, (error) => {
+ if (error !== null) {
+ this._handleError(error);
+ }
+ });
+ }
+ _worker(item, done) {
+ this._scandir(item.directory, this._settings.fsScandirSettings, (error, entries) => {
+ if (error !== null) {
+ return done(error, undefined);
+ }
+ for (const entry of entries) {
+ this._handleEntry(entry, item.base);
+ }
+ done(null, undefined);
+ });
+ }
+ _handleError(error) {
+ if (!common.isFatalError(this._settings, error)) {
+ return;
+ }
+ this._isFatalError = true;
+ this._isDestroyed = true;
+ this._emitter.emit('error', error);
+ }
+ _handleEntry(entry, base) {
+ if (this._isDestroyed || this._isFatalError) {
+ return;
+ }
+ const fullpath = entry.path;
+ if (base !== undefined) {
+ entry.path = common.joinPathSegments(base, entry.name, this._settings.pathSegmentSeparator);
+ }
+ if (common.isAppliedFilter(this._settings.entryFilter, entry)) {
+ this._emitEntry(entry);
+ }
+ if (entry.dirent.isDirectory() && common.isAppliedFilter(this._settings.deepFilter, entry)) {
+ this._pushToQueue(fullpath, entry.path);
+ }
+ }
+ _emitEntry(entry) {
+ this._emitter.emit('entry', entry);
+ }
}
+exports.default = AsyncReader;
-function encodeValue(operator, value, key) {
- value = operator === "+" || operator === "#" ? encodeReserved(value) : encodeUnreserved(value);
- if (key) {
- return encodeUnreserved(key) + "=" + value;
- } else {
- return value;
- }
-}
+/***/ }),
-function isDefined(value) {
- return value !== undefined && value !== null;
-}
+/***/ 7988:
+/***/ ((__unused_webpack_module, exports) => {
-function isKeyOperator(operator) {
- return operator === ";" || operator === "&" || operator === "?";
-}
+"use strict";
-function getValues(context, operator, key, modifier) {
- var value = context[key],
- result = [];
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+function isFatalError(settings, error) {
+ if (settings.errorFilter === null) {
+ return true;
+ }
+ return !settings.errorFilter(error);
+}
+exports.isFatalError = isFatalError;
+function isAppliedFilter(filter, value) {
+ return filter === null || filter(value);
+}
+exports.isAppliedFilter = isAppliedFilter;
+function replacePathSegmentSeparator(filepath, separator) {
+ return filepath.split(/[\\/]/).join(separator);
+}
+exports.replacePathSegmentSeparator = replacePathSegmentSeparator;
+function joinPathSegments(a, b, separator) {
+ if (a === '') {
+ return b;
+ }
+ return a + separator + b;
+}
+exports.joinPathSegments = joinPathSegments;
- if (isDefined(value) && value !== "") {
- if (typeof value === "string" || typeof value === "number" || typeof value === "boolean") {
- value = value.toString();
- if (modifier && modifier !== "*") {
- value = value.substring(0, parseInt(modifier, 10));
- }
+/***/ }),
- result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : ""));
- } else {
- if (modifier === "*") {
- if (Array.isArray(value)) {
- value.filter(isDefined).forEach(function (value) {
- result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : ""));
- });
- } else {
- Object.keys(value).forEach(function (k) {
- if (isDefined(value[k])) {
- result.push(encodeValue(operator, value[k], k));
- }
- });
- }
- } else {
- const tmp = [];
+/***/ 8311:
+/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
- if (Array.isArray(value)) {
- value.filter(isDefined).forEach(function (value) {
- tmp.push(encodeValue(operator, value));
- });
- } else {
- Object.keys(value).forEach(function (k) {
- if (isDefined(value[k])) {
- tmp.push(encodeUnreserved(k));
- tmp.push(encodeValue(operator, value[k].toString()));
- }
- });
- }
+"use strict";
- if (isKeyOperator(operator)) {
- result.push(encodeUnreserved(key) + "=" + tmp.join(","));
- } else if (tmp.length !== 0) {
- result.push(tmp.join(","));
- }
- }
- }
- } else {
- if (operator === ";") {
- if (isDefined(value)) {
- result.push(encodeUnreserved(key));
- }
- } else if (value === "" && (operator === "&" || operator === "?")) {
- result.push(encodeUnreserved(key) + "=");
- } else if (value === "") {
- result.push("");
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+const common = __webpack_require__(7988);
+class Reader {
+ constructor(_root, _settings) {
+ this._root = _root;
+ this._settings = _settings;
+ this._root = common.replacePathSegmentSeparator(_root, _settings.pathSegmentSeparator);
}
- }
-
- return result;
-}
-
-function parseUrl(template) {
- return {
- expand: expand.bind(null, template)
- };
}
+exports.default = Reader;
-function expand(template, context) {
- var operators = ["+", "#", ".", "/", ";", "?", "&"];
- return template.replace(/\{([^\{\}]+)\}|([^\{\}]+)/g, function (_, expression, literal) {
- if (expression) {
- let operator = "";
- const values = [];
- if (operators.indexOf(expression.charAt(0)) !== -1) {
- operator = expression.charAt(0);
- expression = expression.substr(1);
- }
+/***/ }),
- expression.split(/,/g).forEach(function (variable) {
- var tmp = /([^:\*]*)(?::(\d+)|(\*))?/.exec(variable);
- values.push(getValues(context, operator, tmp[1], tmp[2] || tmp[3]));
- });
+/***/ 3595:
+/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
- if (operator && operator !== "+") {
- var separator = ",";
+"use strict";
- if (operator === "?") {
- separator = "&";
- } else if (operator !== "#") {
- separator = operator;
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+const fsScandir = __webpack_require__(5667);
+const common = __webpack_require__(7988);
+const reader_1 = __webpack_require__(8311);
+class SyncReader extends reader_1.default {
+ constructor() {
+ super(...arguments);
+ this._scandir = fsScandir.scandirSync;
+ this._storage = new Set();
+ this._queue = new Set();
+ }
+ read() {
+ this._pushToQueue(this._root, this._settings.basePath);
+ this._handleQueue();
+ return [...this._storage];
+ }
+ _pushToQueue(directory, base) {
+ this._queue.add({ directory, base });
+ }
+ _handleQueue() {
+ for (const item of this._queue.values()) {
+ this._handleDirectory(item.directory, item.base);
}
-
- return (values.length !== 0 ? operator : "") + values.join(separator);
- } else {
- return values.join(",");
- }
- } else {
- return encodeReserved(literal);
}
- });
+ _handleDirectory(directory, base) {
+ try {
+ const entries = this._scandir(directory, this._settings.fsScandirSettings);
+ for (const entry of entries) {
+ this._handleEntry(entry, base);
+ }
+ }
+ catch (error) {
+ this._handleError(error);
+ }
+ }
+ _handleError(error) {
+ if (!common.isFatalError(this._settings, error)) {
+ return;
+ }
+ throw error;
+ }
+ _handleEntry(entry, base) {
+ const fullpath = entry.path;
+ if (base !== undefined) {
+ entry.path = common.joinPathSegments(base, entry.name, this._settings.pathSegmentSeparator);
+ }
+ if (common.isAppliedFilter(this._settings.entryFilter, entry)) {
+ this._pushToStorage(entry);
+ }
+ if (entry.dirent.isDirectory() && common.isAppliedFilter(this._settings.deepFilter, entry)) {
+ this._pushToQueue(fullpath, entry.path);
+ }
+ }
+ _pushToStorage(entry) {
+ this._storage.add(entry);
+ }
}
+exports.default = SyncReader;
-function parse(options) {
- // https://fetch.spec.whatwg.org/#methods
- let method = options.method.toUpperCase(); // replace :varname with {varname} to make it RFC 6570 compatible
-
- let url = (options.url || "/").replace(/:([a-z]\w+)/g, "{$1}");
- let headers = Object.assign({}, options.headers);
- let body;
- let parameters = omit(options, ["method", "baseUrl", "url", "headers", "request", "mediaType"]); // extract variable names from URL to calculate remaining variables later
- const urlVariableNames = extractUrlVariableNames(url);
- url = parseUrl(url).expand(parameters);
+/***/ }),
- if (!/^http/.test(url)) {
- url = options.baseUrl + url;
- }
+/***/ 141:
+/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
- const omittedParameters = Object.keys(options).filter(option => urlVariableNames.includes(option)).concat("baseUrl");
- const remainingParameters = omit(parameters, omittedParameters);
- const isBinaryRequest = /application\/octet-stream/i.test(headers.accept);
+"use strict";
- if (!isBinaryRequest) {
- if (options.mediaType.format) {
- // e.g. application/vnd.github.v3+json => application/vnd.github.v3.raw
- headers.accept = headers.accept.split(/,/).map(preview => preview.replace(/application\/vnd(\.\w+)(\.v3)?(\.\w+)?(\+json)?$/, `application/vnd$1$2.${options.mediaType.format}`)).join(",");
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+const path = __webpack_require__(5622);
+const fsScandir = __webpack_require__(5667);
+class Settings {
+ constructor(_options = {}) {
+ this._options = _options;
+ this.basePath = this._getValue(this._options.basePath, undefined);
+ this.concurrency = this._getValue(this._options.concurrency, Infinity);
+ this.deepFilter = this._getValue(this._options.deepFilter, null);
+ this.entryFilter = this._getValue(this._options.entryFilter, null);
+ this.errorFilter = this._getValue(this._options.errorFilter, null);
+ this.pathSegmentSeparator = this._getValue(this._options.pathSegmentSeparator, path.sep);
+ this.fsScandirSettings = new fsScandir.Settings({
+ followSymbolicLinks: this._options.followSymbolicLinks,
+ fs: this._options.fs,
+ pathSegmentSeparator: this._options.pathSegmentSeparator,
+ stats: this._options.stats,
+ throwErrorOnBrokenSymbolicLink: this._options.throwErrorOnBrokenSymbolicLink
+ });
}
-
- if (options.mediaType.previews.length) {
- const previewsFromAcceptHeader = headers.accept.match(/[\w-]+(?=-preview)/g) || [];
- headers.accept = previewsFromAcceptHeader.concat(options.mediaType.previews).map(preview => {
- const format = options.mediaType.format ? `.${options.mediaType.format}` : "+json";
- return `application/vnd.github.${preview}-preview${format}`;
- }).join(",");
+ _getValue(option, value) {
+ return option === undefined ? value : option;
}
- } // for GET/HEAD requests, set URL query parameters from remaining parameters
- // for PATCH/POST/PUT/DELETE requests, set request body from remaining parameters
+}
+exports.default = Settings;
- if (["GET", "HEAD"].includes(method)) {
- url = addQueryParameters(url, remainingParameters);
- } else {
- if ("data" in remainingParameters) {
- body = remainingParameters.data;
- } else {
- if (Object.keys(remainingParameters).length) {
- body = remainingParameters;
- } else {
- headers["content-length"] = 0;
- }
- }
- } // default content-type for JSON if body is set
+/***/ }),
+/***/ 334:
+/***/ ((__unused_webpack_module, exports) => {
- if (!headers["content-type"] && typeof body !== "undefined") {
- headers["content-type"] = "application/json; charset=utf-8";
- } // GitHub expects 'content-length: 0' header for PUT/PATCH requests without body.
- // fetch does not allow to set `content-length` header, but we can set body to an empty string
+"use strict";
- if (["PATCH", "PUT"].includes(method) && typeof body === "undefined") {
- body = "";
- } // Only return body/request keys if present
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+async function auth(token) {
+ const tokenType = token.split(/\./).length === 3 ? "app" : /^v\d+\./.test(token) ? "installation" : "oauth";
+ return {
+ type: "token",
+ token: token,
+ tokenType
+ };
+}
- return Object.assign({
- method,
- url,
- headers
- }, typeof body !== "undefined" ? {
- body
- } : null, options.request ? {
- request: options.request
- } : null);
-}
+/**
+ * Prefix token for usage in the Authorization header
+ *
+ * @param token OAuth token or JSON Web Token
+ */
+function withAuthorizationPrefix(token) {
+ if (token.split(/\./).length === 3) {
+ return `bearer ${token}`;
+ }
-function endpointWithDefaults(defaults, route, options) {
- return parse(merge(defaults, route, options));
+ return `token ${token}`;
}
-function withDefaults(oldDefaults, newDefaults) {
- const DEFAULTS = merge(oldDefaults, newDefaults);
- const endpoint = endpointWithDefaults.bind(null, DEFAULTS);
- return Object.assign(endpoint, {
- DEFAULTS,
- defaults: withDefaults.bind(null, DEFAULTS),
- merge: merge.bind(null, DEFAULTS),
- parse
- });
+async function hook(token, request, route, parameters) {
+ const endpoint = request.endpoint.merge(route, parameters);
+ endpoint.headers.authorization = withAuthorizationPrefix(token);
+ return request(endpoint);
}
-const VERSION = "6.0.9";
-
-const userAgent = `octokit-endpoint.js/${VERSION} ${universalUserAgent.getUserAgent()}`; // DEFAULTS has all properties set that EndpointOptions has, except url.
-// So we use RequestParameters and add method as additional required property.
+const createTokenAuth = function createTokenAuth(token) {
+ if (!token) {
+ throw new Error("[@octokit/auth-token] No token passed to createTokenAuth");
+ }
-const DEFAULTS = {
- method: "GET",
- baseUrl: "https://api.github.com",
- headers: {
- accept: "application/vnd.github.v3+json",
- "user-agent": userAgent
- },
- mediaType: {
- format: "",
- previews: []
+ if (typeof token !== "string") {
+ throw new Error("[@octokit/auth-token] Token passed to createTokenAuth is not a string");
}
-};
-const endpoint = withDefaults(null, DEFAULTS);
+ token = token.replace(/^(token|bearer) +/i, "");
+ return Object.assign(auth.bind(null, token), {
+ hook: hook.bind(null, token)
+ });
+};
-exports.endpoint = endpoint;
+exports.createTokenAuth = createTokenAuth;
//# sourceMappingURL=index.js.map
/***/ }),
-/***/ 558:
-/***/ ((__unused_webpack_module, exports) => {
+/***/ 6762:
+/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
-/*!
- * is-plain-object
- *
- * Copyright (c) 2014-2017, Jon Schlinkert.
- * Released under the MIT License.
- */
-
-function isObject(o) {
- return Object.prototype.toString.call(o) === '[object Object]';
-}
-
-function isPlainObject(o) {
- var ctor,prot;
-
- if (isObject(o) === false) return false;
-
- // If has modified constructor
- ctor = o.constructor;
- if (ctor === undefined) return true;
+var universalUserAgent = __webpack_require__(5030);
+var beforeAfterHook = __webpack_require__(3682);
+var request = __webpack_require__(6234);
+var graphql = __webpack_require__(8467);
+var authToken = __webpack_require__(334);
- // If has modified prototype
- prot = ctor.prototype;
- if (isObject(prot) === false) return false;
+function _objectWithoutPropertiesLoose(source, excluded) {
+ if (source == null) return {};
+ var target = {};
+ var sourceKeys = Object.keys(source);
+ var key, i;
- // If constructor does not have an Object-specific method
- if (prot.hasOwnProperty('isPrototypeOf') === false) {
- return false;
+ for (i = 0; i < sourceKeys.length; i++) {
+ key = sourceKeys[i];
+ if (excluded.indexOf(key) >= 0) continue;
+ target[key] = source[key];
}
- // Most likely a plain Object
- return true;
+ return target;
}
-exports.isPlainObject = isPlainObject;
+function _objectWithoutProperties(source, excluded) {
+ if (source == null) return {};
+ var target = _objectWithoutPropertiesLoose(source, excluded);
-/***/ }),
+ var key, i;
-/***/ 8467:
-/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
+ if (Object.getOwnPropertySymbols) {
+ var sourceSymbolKeys = Object.getOwnPropertySymbols(source);
-"use strict";
+ for (i = 0; i < sourceSymbolKeys.length; i++) {
+ key = sourceSymbolKeys[i];
+ if (excluded.indexOf(key) >= 0) continue;
+ if (!Object.prototype.propertyIsEnumerable.call(source, key)) continue;
+ target[key] = source[key];
+ }
+ }
+ return target;
+}
-Object.defineProperty(exports, "__esModule", ({ value: true }));
+const VERSION = "3.2.1";
-var request = __webpack_require__(6234);
-var universalUserAgent = __webpack_require__(5030);
+class Octokit {
+ constructor(options = {}) {
+ const hook = new beforeAfterHook.Collection();
+ const requestDefaults = {
+ baseUrl: request.request.endpoint.DEFAULTS.baseUrl,
+ headers: {},
+ request: Object.assign({}, options.request, {
+ hook: hook.bind(null, "request")
+ }),
+ mediaType: {
+ previews: [],
+ format: ""
+ }
+ }; // prepend default user agent with `options.userAgent` if set
-const VERSION = "4.5.7";
+ requestDefaults.headers["user-agent"] = [options.userAgent, `octokit-core.js/${VERSION} ${universalUserAgent.getUserAgent()}`].filter(Boolean).join(" ");
-class GraphqlError extends Error {
- constructor(request, response) {
- const message = response.data.errors[0].message;
- super(message);
- Object.assign(this, response.data);
- Object.assign(this, {
- headers: response.headers
- });
- this.name = "GraphqlError";
- this.request = request; // Maintains proper stack trace (only available on V8)
+ if (options.baseUrl) {
+ requestDefaults.baseUrl = options.baseUrl;
+ }
- /* istanbul ignore next */
+ if (options.previews) {
+ requestDefaults.mediaType.previews = options.previews;
+ }
- if (Error.captureStackTrace) {
- Error.captureStackTrace(this, this.constructor);
+ if (options.timeZone) {
+ requestDefaults.headers["time-zone"] = options.timeZone;
}
- }
-}
+ this.request = request.request.defaults(requestDefaults);
+ this.graphql = graphql.withCustomRequest(this.request).defaults(requestDefaults);
+ this.log = Object.assign({
+ debug: () => {},
+ info: () => {},
+ warn: console.warn.bind(console),
+ error: console.error.bind(console)
+ }, options.log);
+ this.hook = hook; // (1) If neither `options.authStrategy` nor `options.auth` are set, the `octokit` instance
+ // is unauthenticated. The `this.auth()` method is a no-op and no request hook is registered.
+ // (2) If only `options.auth` is set, use the default token authentication strategy.
+ // (3) If `options.authStrategy` is set then use it and pass in `options.auth`. Always pass own request as many strategies accept a custom request instance.
+ // TODO: type `options.auth` based on `options.authStrategy`.
-const NON_VARIABLE_OPTIONS = ["method", "baseUrl", "url", "headers", "request", "query", "mediaType"];
-const GHES_V3_SUFFIX_REGEX = /\/api\/v3\/?$/;
-function graphql(request, query, options) {
- if (typeof query === "string" && options && "query" in options) {
- return Promise.reject(new Error(`[@octokit/graphql] "query" cannot be used as variable name`));
- }
+ if (!options.authStrategy) {
+ if (!options.auth) {
+ // (1)
+ this.auth = async () => ({
+ type: "unauthenticated"
+ });
+ } else {
+ // (2)
+ const auth = authToken.createTokenAuth(options.auth); // @ts-ignore ¯\_(ツ)_/¯
- const parsedOptions = typeof query === "string" ? Object.assign({
- query
- }, options) : query;
- const requestOptions = Object.keys(parsedOptions).reduce((result, key) => {
- if (NON_VARIABLE_OPTIONS.includes(key)) {
- result[key] = parsedOptions[key];
- return result;
- }
+ hook.wrap("request", auth.hook);
+ this.auth = auth;
+ }
+ } else {
+ const {
+ authStrategy
+ } = options,
+ otherOptions = _objectWithoutProperties(options, ["authStrategy"]);
- if (!result.variables) {
- result.variables = {};
- }
+ const auth = authStrategy(Object.assign({
+ request: this.request,
+ log: this.log,
+ // we pass the current octokit instance as well as its constructor options
+ // to allow for authentication strategies that return a new octokit instance
+ // that shares the same internal state as the current one. The original
+ // requirement for this was the "event-octokit" authentication strategy
+ // of https://github.com/probot/octokit-auth-probot.
+ octokit: this,
+ octokitOptions: otherOptions
+ }, options.auth)); // @ts-ignore ¯\_(ツ)_/¯
- result.variables[key] = parsedOptions[key];
- return result;
- }, {}); // workaround for GitHub Enterprise baseUrl set with /api/v3 suffix
- // https://github.com/octokit/auth-app.js/issues/111#issuecomment-657610451
+ hook.wrap("request", auth.hook);
+ this.auth = auth;
+ } // apply plugins
+ // https://stackoverflow.com/a/16345172
- const baseUrl = parsedOptions.baseUrl || request.endpoint.DEFAULTS.baseUrl;
- if (GHES_V3_SUFFIX_REGEX.test(baseUrl)) {
- requestOptions.url = baseUrl.replace(GHES_V3_SUFFIX_REGEX, "/api/graphql");
+ const classConstructor = this.constructor;
+ classConstructor.plugins.forEach(plugin => {
+ Object.assign(this, plugin(this, options));
+ });
}
- return request(requestOptions).then(response => {
- if (response.data.errors) {
- const headers = {};
-
- for (const key of Object.keys(response.headers)) {
- headers[key] = response.headers[key];
- }
+ static defaults(defaults) {
+ const OctokitWithDefaults = class extends this {
+ constructor(...args) {
+ const options = args[0] || {};
- throw new GraphqlError(requestOptions, {
- headers,
- data: response.data
- });
- }
+ if (typeof defaults === "function") {
+ super(defaults(options));
+ return;
+ }
- return response.data.data;
- });
-}
+ super(Object.assign({}, defaults, options, options.userAgent && defaults.userAgent ? {
+ userAgent: `${options.userAgent} ${defaults.userAgent}`
+ } : null));
+ }
-function withDefaults(request$1, newDefaults) {
- const newRequest = request$1.defaults(newDefaults);
+ };
+ return OctokitWithDefaults;
+ }
+ /**
+ * Attach a plugin (or many) to your Octokit instance.
+ *
+ * @example
+ * const API = Octokit.plugin(plugin1, plugin2, plugin3, ...)
+ */
- const newApi = (query, options) => {
- return graphql(newRequest, query, options);
- };
- return Object.assign(newApi, {
- defaults: withDefaults.bind(null, newRequest),
- endpoint: request.request.endpoint
- });
-}
+ static plugin(...newPlugins) {
+ var _a;
+
+ const currentPlugins = this.plugins;
+ const NewOctokit = (_a = class extends this {}, _a.plugins = currentPlugins.concat(newPlugins.filter(plugin => !currentPlugins.includes(plugin))), _a);
+ return NewOctokit;
+ }
-const graphql$1 = withDefaults(request.request, {
- headers: {
- "user-agent": `octokit-graphql.js/${VERSION} ${universalUserAgent.getUserAgent()}`
- },
- method: "POST",
- url: "/graphql"
-});
-function withCustomRequest(customRequest) {
- return withDefaults(customRequest, {
- method: "POST",
- url: "/graphql"
- });
}
+Octokit.VERSION = VERSION;
+Octokit.plugins = [];
-exports.graphql = graphql$1;
-exports.withCustomRequest = withCustomRequest;
+exports.Octokit = Octokit;
//# sourceMappingURL=index.js.map
/***/ }),
-/***/ 4193:
-/***/ ((__unused_webpack_module, exports) => {
+/***/ 9440:
+/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
-const VERSION = "2.6.0";
-
-/**
- * Some “list” response that can be paginated have a different response structure
- *
- * They have a `total_count` key in the response (search also has `incomplete_results`,
- * /installation/repositories also has `repository_selection`), as well as a key with
- * the list of the items which name varies from endpoint to endpoint.
- *
- * Octokit normalizes these responses so that paginated results are always returned following
- * the same structure. One challenge is that if the list response has only one page, no Link
- * header is provided, so this header alone is not sufficient to check wether a response is
- * paginated or not.
- *
- * We check if a "total_count" key is present in the response data, but also make sure that
- * a "url" property is not, as the "Get the combined status for a specific ref" endpoint would
- * otherwise match: https://developer.github.com/v3/repos/statuses/#get-the-combined-status-for-a-specific-ref
- */
-function normalizePaginatedListResponse(response) {
- const responseNeedsNormalization = "total_count" in response.data && !("url" in response.data);
- if (!responseNeedsNormalization) return response; // keep the additional properties intact as there is currently no other way
- // to retrieve the same information.
-
- const incompleteResults = response.data.incomplete_results;
- const repositorySelection = response.data.repository_selection;
- const totalCount = response.data.total_count;
- delete response.data.incomplete_results;
- delete response.data.repository_selection;
- delete response.data.total_count;
- const namespaceKey = Object.keys(response.data)[0];
- const data = response.data[namespaceKey];
- response.data = data;
-
- if (typeof incompleteResults !== "undefined") {
- response.data.incomplete_results = incompleteResults;
- }
+var isPlainObject = __webpack_require__(558);
+var universalUserAgent = __webpack_require__(5030);
- if (typeof repositorySelection !== "undefined") {
- response.data.repository_selection = repositorySelection;
+function lowercaseKeys(object) {
+ if (!object) {
+ return {};
}
- response.data.total_count = totalCount;
- return response;
+ return Object.keys(object).reduce((newObj, key) => {
+ newObj[key.toLowerCase()] = object[key];
+ return newObj;
+ }, {});
}
-function iterator(octokit, route, parameters) {
- const options = typeof route === "function" ? route.endpoint(parameters) : octokit.request.endpoint(route, parameters);
- const requestMethod = typeof route === "function" ? route : octokit.request;
- const method = options.method;
- const headers = options.headers;
- let url = options.url;
- return {
- [Symbol.asyncIterator]: () => ({
- async next() {
- if (!url) return {
- done: true
- };
- const response = await requestMethod({
- method,
- url,
- headers
- });
- const normalizedResponse = normalizePaginatedListResponse(response); // `response.headers.link` format:
- // '; rel="next", ; rel="last"'
- // sets `url` to undefined if "next" URL is not present or `link` header is not set
-
- url = ((normalizedResponse.headers.link || "").match(/<([^>]+)>;\s*rel="next"/) || [])[1];
- return {
- value: normalizedResponse
- };
- }
-
- })
- };
+function mergeDeep(defaults, options) {
+ const result = Object.assign({}, defaults);
+ Object.keys(options).forEach(key => {
+ if (isPlainObject.isPlainObject(options[key])) {
+ if (!(key in defaults)) Object.assign(result, {
+ [key]: options[key]
+ });else result[key] = mergeDeep(defaults[key], options[key]);
+ } else {
+ Object.assign(result, {
+ [key]: options[key]
+ });
+ }
+ });
+ return result;
}
-function paginate(octokit, route, parameters, mapFn) {
- if (typeof parameters === "function") {
- mapFn = parameters;
- parameters = undefined;
+function removeUndefinedProperties(obj) {
+ for (const key in obj) {
+ if (obj[key] === undefined) {
+ delete obj[key];
+ }
}
- return gather(octokit, [], iterator(octokit, route, parameters)[Symbol.asyncIterator](), mapFn);
+ return obj;
}
-function gather(octokit, results, iterator, mapFn) {
- return iterator.next().then(result => {
- if (result.done) {
- return results;
- }
+function merge(defaults, route, options) {
+ if (typeof route === "string") {
+ let [method, url] = route.split(" ");
+ options = Object.assign(url ? {
+ method,
+ url
+ } : {
+ url: method
+ }, options);
+ } else {
+ options = Object.assign({}, route);
+ } // lowercase header names before merging with defaults to avoid duplicates
- let earlyExit = false;
- function done() {
- earlyExit = true;
- }
+ options.headers = lowercaseKeys(options.headers); // remove properties with undefined values before merging
- results = results.concat(mapFn ? mapFn(result.value, done) : result.value.data);
+ removeUndefinedProperties(options);
+ removeUndefinedProperties(options.headers);
+ const mergedOptions = mergeDeep(defaults || {}, options); // mediaType.previews arrays are merged, instead of overwritten
- if (earlyExit) {
- return results;
- }
+ if (defaults && defaults.mediaType.previews.length) {
+ mergedOptions.mediaType.previews = defaults.mediaType.previews.filter(preview => !mergedOptions.mediaType.previews.includes(preview)).concat(mergedOptions.mediaType.previews);
+ }
- return gather(octokit, results, iterator, mapFn);
- });
+ mergedOptions.mediaType.previews = mergedOptions.mediaType.previews.map(preview => preview.replace(/-preview/, ""));
+ return mergedOptions;
}
-const composePaginateRest = Object.assign(paginate, {
- iterator
-});
+function addQueryParameters(url, parameters) {
+ const separator = /\?/.test(url) ? "&" : "?";
+ const names = Object.keys(parameters);
-/**
- * @param octokit Octokit instance
- * @param options Options passed to Octokit constructor
- */
+ if (names.length === 0) {
+ return url;
+ }
-function paginateRest(octokit) {
- return {
- paginate: Object.assign(paginate.bind(null, octokit), {
- iterator: iterator.bind(null, octokit)
- })
- };
-}
-paginateRest.VERSION = VERSION;
+ return url + separator + names.map(name => {
+ if (name === "q") {
+ return "q=" + parameters.q.split("+").map(encodeURIComponent).join("+");
+ }
-exports.composePaginateRest = composePaginateRest;
-exports.paginateRest = paginateRest;
-//# sourceMappingURL=index.js.map
+ return `${name}=${encodeURIComponent(parameters[name])}`;
+ }).join("&");
+}
+const urlVariableRegex = /\{[^}]+\}/g;
-/***/ }),
+function removeNonChars(variableName) {
+ return variableName.replace(/^\W+|\W+$/g, "").split(/,/);
+}
-/***/ 3044:
-/***/ ((__unused_webpack_module, exports) => {
+function extractUrlVariableNames(url) {
+ const matches = url.match(urlVariableRegex);
-"use strict";
+ if (!matches) {
+ return [];
+ }
+ return matches.map(removeNonChars).reduce((a, b) => a.concat(b), []);
+}
-Object.defineProperty(exports, "__esModule", ({ value: true }));
+function omit(object, keysToOmit) {
+ return Object.keys(object).filter(option => !keysToOmit.includes(option)).reduce((obj, key) => {
+ obj[key] = object[key];
+ return obj;
+ }, {});
+}
-const Endpoints = {
- actions: {
- addSelectedRepoToOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}"],
- cancelWorkflowRun: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel"],
- createOrUpdateOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}"],
- createOrUpdateRepoSecret: ["PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}"],
- createRegistrationTokenForOrg: ["POST /orgs/{org}/actions/runners/registration-token"],
- createRegistrationTokenForRepo: ["POST /repos/{owner}/{repo}/actions/runners/registration-token"],
- createRemoveTokenForOrg: ["POST /orgs/{org}/actions/runners/remove-token"],
- createRemoveTokenForRepo: ["POST /repos/{owner}/{repo}/actions/runners/remove-token"],
- createWorkflowDispatch: ["POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches"],
- deleteArtifact: ["DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"],
- deleteOrgSecret: ["DELETE /orgs/{org}/actions/secrets/{secret_name}"],
- deleteRepoSecret: ["DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}"],
- deleteSelfHostedRunnerFromOrg: ["DELETE /orgs/{org}/actions/runners/{runner_id}"],
- deleteSelfHostedRunnerFromRepo: ["DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}"],
- deleteWorkflowRun: ["DELETE /repos/{owner}/{repo}/actions/runs/{run_id}"],
- deleteWorkflowRunLogs: ["DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs"],
- downloadArtifact: ["GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}"],
- downloadJobLogsForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs"],
- downloadWorkflowRunLogs: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs"],
- getArtifact: ["GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"],
- getJobForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}"],
- getOrgPublicKey: ["GET /orgs/{org}/actions/secrets/public-key"],
- getOrgSecret: ["GET /orgs/{org}/actions/secrets/{secret_name}"],
- getRepoPublicKey: ["GET /repos/{owner}/{repo}/actions/secrets/public-key"],
- getRepoSecret: ["GET /repos/{owner}/{repo}/actions/secrets/{secret_name}"],
- getSelfHostedRunnerForOrg: ["GET /orgs/{org}/actions/runners/{runner_id}"],
- getSelfHostedRunnerForRepo: ["GET /repos/{owner}/{repo}/actions/runners/{runner_id}"],
- getWorkflow: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}"],
- getWorkflowRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}"],
- getWorkflowRunUsage: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing"],
- getWorkflowUsage: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing"],
- listArtifactsForRepo: ["GET /repos/{owner}/{repo}/actions/artifacts"],
- listJobsForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs"],
- listOrgSecrets: ["GET /orgs/{org}/actions/secrets"],
- listRepoSecrets: ["GET /repos/{owner}/{repo}/actions/secrets"],
- listRepoWorkflows: ["GET /repos/{owner}/{repo}/actions/workflows"],
- listRunnerApplicationsForOrg: ["GET /orgs/{org}/actions/runners/downloads"],
- listRunnerApplicationsForRepo: ["GET /repos/{owner}/{repo}/actions/runners/downloads"],
- listSelectedReposForOrgSecret: ["GET /orgs/{org}/actions/secrets/{secret_name}/repositories"],
- listSelfHostedRunnersForOrg: ["GET /orgs/{org}/actions/runners"],
- listSelfHostedRunnersForRepo: ["GET /repos/{owner}/{repo}/actions/runners"],
- listWorkflowRunArtifacts: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts"],
- listWorkflowRuns: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs"],
- listWorkflowRunsForRepo: ["GET /repos/{owner}/{repo}/actions/runs"],
- reRunWorkflow: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun"],
- removeSelectedRepoFromOrgSecret: ["DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}"],
- setSelectedReposForOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}/repositories"]
- },
- activity: {
- checkRepoIsStarredByAuthenticatedUser: ["GET /user/starred/{owner}/{repo}"],
- deleteRepoSubscription: ["DELETE /repos/{owner}/{repo}/subscription"],
- deleteThreadSubscription: ["DELETE /notifications/threads/{thread_id}/subscription"],
- getFeeds: ["GET /feeds"],
- getRepoSubscription: ["GET /repos/{owner}/{repo}/subscription"],
- getThread: ["GET /notifications/threads/{thread_id}"],
- getThreadSubscriptionForAuthenticatedUser: ["GET /notifications/threads/{thread_id}/subscription"],
- listEventsForAuthenticatedUser: ["GET /users/{username}/events"],
- listNotificationsForAuthenticatedUser: ["GET /notifications"],
- listOrgEventsForAuthenticatedUser: ["GET /users/{username}/events/orgs/{org}"],
- listPublicEvents: ["GET /events"],
- listPublicEventsForRepoNetwork: ["GET /networks/{owner}/{repo}/events"],
- listPublicEventsForUser: ["GET /users/{username}/events/public"],
- listPublicOrgEvents: ["GET /orgs/{org}/events"],
- listReceivedEventsForUser: ["GET /users/{username}/received_events"],
- listReceivedPublicEventsForUser: ["GET /users/{username}/received_events/public"],
- listRepoEvents: ["GET /repos/{owner}/{repo}/events"],
- listRepoNotificationsForAuthenticatedUser: ["GET /repos/{owner}/{repo}/notifications"],
- listReposStarredByAuthenticatedUser: ["GET /user/starred"],
- listReposStarredByUser: ["GET /users/{username}/starred"],
- listReposWatchedByUser: ["GET /users/{username}/subscriptions"],
- listStargazersForRepo: ["GET /repos/{owner}/{repo}/stargazers"],
- listWatchedReposForAuthenticatedUser: ["GET /user/subscriptions"],
- listWatchersForRepo: ["GET /repos/{owner}/{repo}/subscribers"],
- markNotificationsAsRead: ["PUT /notifications"],
- markRepoNotificationsAsRead: ["PUT /repos/{owner}/{repo}/notifications"],
- markThreadAsRead: ["PATCH /notifications/threads/{thread_id}"],
- setRepoSubscription: ["PUT /repos/{owner}/{repo}/subscription"],
- setThreadSubscription: ["PUT /notifications/threads/{thread_id}/subscription"],
- starRepoForAuthenticatedUser: ["PUT /user/starred/{owner}/{repo}"],
- unstarRepoForAuthenticatedUser: ["DELETE /user/starred/{owner}/{repo}"]
- },
- apps: {
- addRepoToInstallation: ["PUT /user/installations/{installation_id}/repositories/{repository_id}"],
- checkToken: ["POST /applications/{client_id}/token"],
- createContentAttachment: ["POST /content_references/{content_reference_id}/attachments", {
- mediaType: {
- previews: ["corsair"]
- }
- }],
- createFromManifest: ["POST /app-manifests/{code}/conversions"],
- createInstallationAccessToken: ["POST /app/installations/{installation_id}/access_tokens"],
- deleteAuthorization: ["DELETE /applications/{client_id}/grant"],
- deleteInstallation: ["DELETE /app/installations/{installation_id}"],
- deleteToken: ["DELETE /applications/{client_id}/token"],
- getAuthenticated: ["GET /app"],
- getBySlug: ["GET /apps/{app_slug}"],
- getInstallation: ["GET /app/installations/{installation_id}"],
- getOrgInstallation: ["GET /orgs/{org}/installation"],
- getRepoInstallation: ["GET /repos/{owner}/{repo}/installation"],
- getSubscriptionPlanForAccount: ["GET /marketplace_listing/accounts/{account_id}"],
- getSubscriptionPlanForAccountStubbed: ["GET /marketplace_listing/stubbed/accounts/{account_id}"],
- getUserInstallation: ["GET /users/{username}/installation"],
- listAccountsForPlan: ["GET /marketplace_listing/plans/{plan_id}/accounts"],
- listAccountsForPlanStubbed: ["GET /marketplace_listing/stubbed/plans/{plan_id}/accounts"],
- listInstallationReposForAuthenticatedUser: ["GET /user/installations/{installation_id}/repositories"],
- listInstallations: ["GET /app/installations"],
- listInstallationsForAuthenticatedUser: ["GET /user/installations"],
- listPlans: ["GET /marketplace_listing/plans"],
- listPlansStubbed: ["GET /marketplace_listing/stubbed/plans"],
- listReposAccessibleToInstallation: ["GET /installation/repositories"],
- listSubscriptionsForAuthenticatedUser: ["GET /user/marketplace_purchases"],
- listSubscriptionsForAuthenticatedUserStubbed: ["GET /user/marketplace_purchases/stubbed"],
- removeRepoFromInstallation: ["DELETE /user/installations/{installation_id}/repositories/{repository_id}"],
- resetToken: ["PATCH /applications/{client_id}/token"],
- revokeInstallationAccessToken: ["DELETE /installation/token"],
- suspendInstallation: ["PUT /app/installations/{installation_id}/suspended"],
- unsuspendInstallation: ["DELETE /app/installations/{installation_id}/suspended"]
- },
- billing: {
- getGithubActionsBillingOrg: ["GET /orgs/{org}/settings/billing/actions"],
- getGithubActionsBillingUser: ["GET /users/{username}/settings/billing/actions"],
- getGithubPackagesBillingOrg: ["GET /orgs/{org}/settings/billing/packages"],
- getGithubPackagesBillingUser: ["GET /users/{username}/settings/billing/packages"],
- getSharedStorageBillingOrg: ["GET /orgs/{org}/settings/billing/shared-storage"],
- getSharedStorageBillingUser: ["GET /users/{username}/settings/billing/shared-storage"]
- },
- checks: {
- create: ["POST /repos/{owner}/{repo}/check-runs", {
- mediaType: {
- previews: ["antiope"]
- }
- }],
- createSuite: ["POST /repos/{owner}/{repo}/check-suites", {
- mediaType: {
- previews: ["antiope"]
+// Based on https://github.com/bramstein/url-template, licensed under BSD
+// TODO: create separate package.
+//
+// Copyright (c) 2012-2014, Bram Stein
+// All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions
+// are met:
+// 1. Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// 2. Redistributions in binary form must reproduce the above copyright
+// notice, this list of conditions and the following disclaimer in the
+// documentation and/or other materials provided with the distribution.
+// 3. The name of the author may not be used to endorse or promote products
+// derived from this software without specific prior written permission.
+// THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR IMPLIED
+// WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+// MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+// EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
+// INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
+// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
+// OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
+// EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+/* istanbul ignore file */
+function encodeReserved(str) {
+ return str.split(/(%[0-9A-Fa-f]{2})/g).map(function (part) {
+ if (!/%[0-9A-Fa-f]/.test(part)) {
+ part = encodeURI(part).replace(/%5B/g, "[").replace(/%5D/g, "]");
+ }
+
+ return part;
+ }).join("");
+}
+
+function encodeUnreserved(str) {
+ return encodeURIComponent(str).replace(/[!'()*]/g, function (c) {
+ return "%" + c.charCodeAt(0).toString(16).toUpperCase();
+ });
+}
+
+function encodeValue(operator, value, key) {
+ value = operator === "+" || operator === "#" ? encodeReserved(value) : encodeUnreserved(value);
+
+ if (key) {
+ return encodeUnreserved(key) + "=" + value;
+ } else {
+ return value;
+ }
+}
+
+function isDefined(value) {
+ return value !== undefined && value !== null;
+}
+
+function isKeyOperator(operator) {
+ return operator === ";" || operator === "&" || operator === "?";
+}
+
+function getValues(context, operator, key, modifier) {
+ var value = context[key],
+ result = [];
+
+ if (isDefined(value) && value !== "") {
+ if (typeof value === "string" || typeof value === "number" || typeof value === "boolean") {
+ value = value.toString();
+
+ if (modifier && modifier !== "*") {
+ value = value.substring(0, parseInt(modifier, 10));
}
- }],
- get: ["GET /repos/{owner}/{repo}/check-runs/{check_run_id}", {
- mediaType: {
- previews: ["antiope"]
+
+ result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : ""));
+ } else {
+ if (modifier === "*") {
+ if (Array.isArray(value)) {
+ value.filter(isDefined).forEach(function (value) {
+ result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : ""));
+ });
+ } else {
+ Object.keys(value).forEach(function (k) {
+ if (isDefined(value[k])) {
+ result.push(encodeValue(operator, value[k], k));
+ }
+ });
+ }
+ } else {
+ const tmp = [];
+
+ if (Array.isArray(value)) {
+ value.filter(isDefined).forEach(function (value) {
+ tmp.push(encodeValue(operator, value));
+ });
+ } else {
+ Object.keys(value).forEach(function (k) {
+ if (isDefined(value[k])) {
+ tmp.push(encodeUnreserved(k));
+ tmp.push(encodeValue(operator, value[k].toString()));
+ }
+ });
+ }
+
+ if (isKeyOperator(operator)) {
+ result.push(encodeUnreserved(key) + "=" + tmp.join(","));
+ } else if (tmp.length !== 0) {
+ result.push(tmp.join(","));
+ }
}
- }],
- getSuite: ["GET /repos/{owner}/{repo}/check-suites/{check_suite_id}", {
- mediaType: {
- previews: ["antiope"]
+ }
+ } else {
+ if (operator === ";") {
+ if (isDefined(value)) {
+ result.push(encodeUnreserved(key));
}
- }],
- listAnnotations: ["GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations", {
- mediaType: {
- previews: ["antiope"]
- }
- }],
- listForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-runs", {
- mediaType: {
- previews: ["antiope"]
- }
- }],
- listForSuite: ["GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs", {
- mediaType: {
- previews: ["antiope"]
- }
- }],
- listSuitesForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-suites", {
- mediaType: {
- previews: ["antiope"]
- }
- }],
- rerequestSuite: ["POST /repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest", {
- mediaType: {
- previews: ["antiope"]
- }
- }],
- setSuitesPreferences: ["PATCH /repos/{owner}/{repo}/check-suites/preferences", {
- mediaType: {
- previews: ["antiope"]
- }
- }],
- update: ["PATCH /repos/{owner}/{repo}/check-runs/{check_run_id}", {
- mediaType: {
- previews: ["antiope"]
- }
- }]
- },
- codeScanning: {
- getAlert: ["GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}", {}, {
- renamedParameters: {
- alert_id: "alert_number"
- }
- }],
- listAlertsForRepo: ["GET /repos/{owner}/{repo}/code-scanning/alerts"],
- listRecentAnalyses: ["GET /repos/{owner}/{repo}/code-scanning/analyses"],
- updateAlert: ["PATCH /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}"],
- uploadSarif: ["POST /repos/{owner}/{repo}/code-scanning/sarifs"]
- },
- codesOfConduct: {
- getAllCodesOfConduct: ["GET /codes_of_conduct", {
- mediaType: {
- previews: ["scarlet-witch"]
+ } else if (value === "" && (operator === "&" || operator === "?")) {
+ result.push(encodeUnreserved(key) + "=");
+ } else if (value === "") {
+ result.push("");
+ }
+ }
+
+ return result;
+}
+
+function parseUrl(template) {
+ return {
+ expand: expand.bind(null, template)
+ };
+}
+
+function expand(template, context) {
+ var operators = ["+", "#", ".", "/", ";", "?", "&"];
+ return template.replace(/\{([^\{\}]+)\}|([^\{\}]+)/g, function (_, expression, literal) {
+ if (expression) {
+ let operator = "";
+ const values = [];
+
+ if (operators.indexOf(expression.charAt(0)) !== -1) {
+ operator = expression.charAt(0);
+ expression = expression.substr(1);
}
- }],
- getConductCode: ["GET /codes_of_conduct/{key}", {
- mediaType: {
- previews: ["scarlet-witch"]
+
+ expression.split(/,/g).forEach(function (variable) {
+ var tmp = /([^:\*]*)(?::(\d+)|(\*))?/.exec(variable);
+ values.push(getValues(context, operator, tmp[1], tmp[2] || tmp[3]));
+ });
+
+ if (operator && operator !== "+") {
+ var separator = ",";
+
+ if (operator === "?") {
+ separator = "&";
+ } else if (operator !== "#") {
+ separator = operator;
+ }
+
+ return (values.length !== 0 ? operator : "") + values.join(separator);
+ } else {
+ return values.join(",");
}
- }],
- getForRepo: ["GET /repos/{owner}/{repo}/community/code_of_conduct", {
- mediaType: {
- previews: ["scarlet-witch"]
+ } else {
+ return encodeReserved(literal);
+ }
+ });
+}
+
+function parse(options) {
+ // https://fetch.spec.whatwg.org/#methods
+ let method = options.method.toUpperCase(); // replace :varname with {varname} to make it RFC 6570 compatible
+
+ let url = (options.url || "/").replace(/:([a-z]\w+)/g, "{$1}");
+ let headers = Object.assign({}, options.headers);
+ let body;
+ let parameters = omit(options, ["method", "baseUrl", "url", "headers", "request", "mediaType"]); // extract variable names from URL to calculate remaining variables later
+
+ const urlVariableNames = extractUrlVariableNames(url);
+ url = parseUrl(url).expand(parameters);
+
+ if (!/^http/.test(url)) {
+ url = options.baseUrl + url;
+ }
+
+ const omittedParameters = Object.keys(options).filter(option => urlVariableNames.includes(option)).concat("baseUrl");
+ const remainingParameters = omit(parameters, omittedParameters);
+ const isBinaryRequest = /application\/octet-stream/i.test(headers.accept);
+
+ if (!isBinaryRequest) {
+ if (options.mediaType.format) {
+ // e.g. application/vnd.github.v3+json => application/vnd.github.v3.raw
+ headers.accept = headers.accept.split(/,/).map(preview => preview.replace(/application\/vnd(\.\w+)(\.v3)?(\.\w+)?(\+json)?$/, `application/vnd$1$2.${options.mediaType.format}`)).join(",");
+ }
+
+ if (options.mediaType.previews.length) {
+ const previewsFromAcceptHeader = headers.accept.match(/[\w-]+(?=-preview)/g) || [];
+ headers.accept = previewsFromAcceptHeader.concat(options.mediaType.previews).map(preview => {
+ const format = options.mediaType.format ? `.${options.mediaType.format}` : "+json";
+ return `application/vnd.github.${preview}-preview${format}`;
+ }).join(",");
+ }
+ } // for GET/HEAD requests, set URL query parameters from remaining parameters
+ // for PATCH/POST/PUT/DELETE requests, set request body from remaining parameters
+
+
+ if (["GET", "HEAD"].includes(method)) {
+ url = addQueryParameters(url, remainingParameters);
+ } else {
+ if ("data" in remainingParameters) {
+ body = remainingParameters.data;
+ } else {
+ if (Object.keys(remainingParameters).length) {
+ body = remainingParameters;
+ } else {
+ headers["content-length"] = 0;
}
- }]
- },
- emojis: {
- get: ["GET /emojis"]
- },
- gists: {
- checkIsStarred: ["GET /gists/{gist_id}/star"],
- create: ["POST /gists"],
- createComment: ["POST /gists/{gist_id}/comments"],
- delete: ["DELETE /gists/{gist_id}"],
- deleteComment: ["DELETE /gists/{gist_id}/comments/{comment_id}"],
- fork: ["POST /gists/{gist_id}/forks"],
- get: ["GET /gists/{gist_id}"],
- getComment: ["GET /gists/{gist_id}/comments/{comment_id}"],
- getRevision: ["GET /gists/{gist_id}/{sha}"],
- list: ["GET /gists"],
- listComments: ["GET /gists/{gist_id}/comments"],
- listCommits: ["GET /gists/{gist_id}/commits"],
- listForUser: ["GET /users/{username}/gists"],
- listForks: ["GET /gists/{gist_id}/forks"],
- listPublic: ["GET /gists/public"],
- listStarred: ["GET /gists/starred"],
- star: ["PUT /gists/{gist_id}/star"],
- unstar: ["DELETE /gists/{gist_id}/star"],
- update: ["PATCH /gists/{gist_id}"],
- updateComment: ["PATCH /gists/{gist_id}/comments/{comment_id}"]
- },
- git: {
- createBlob: ["POST /repos/{owner}/{repo}/git/blobs"],
- createCommit: ["POST /repos/{owner}/{repo}/git/commits"],
- createRef: ["POST /repos/{owner}/{repo}/git/refs"],
- createTag: ["POST /repos/{owner}/{repo}/git/tags"],
- createTree: ["POST /repos/{owner}/{repo}/git/trees"],
- deleteRef: ["DELETE /repos/{owner}/{repo}/git/refs/{ref}"],
- getBlob: ["GET /repos/{owner}/{repo}/git/blobs/{file_sha}"],
- getCommit: ["GET /repos/{owner}/{repo}/git/commits/{commit_sha}"],
- getRef: ["GET /repos/{owner}/{repo}/git/ref/{ref}"],
- getTag: ["GET /repos/{owner}/{repo}/git/tags/{tag_sha}"],
- getTree: ["GET /repos/{owner}/{repo}/git/trees/{tree_sha}"],
- listMatchingRefs: ["GET /repos/{owner}/{repo}/git/matching-refs/{ref}"],
- updateRef: ["PATCH /repos/{owner}/{repo}/git/refs/{ref}"]
- },
- gitignore: {
- getAllTemplates: ["GET /gitignore/templates"],
- getTemplate: ["GET /gitignore/templates/{name}"]
+ }
+ } // default content-type for JSON if body is set
+
+
+ if (!headers["content-type"] && typeof body !== "undefined") {
+ headers["content-type"] = "application/json; charset=utf-8";
+ } // GitHub expects 'content-length: 0' header for PUT/PATCH requests without body.
+ // fetch does not allow to set `content-length` header, but we can set body to an empty string
+
+
+ if (["PATCH", "PUT"].includes(method) && typeof body === "undefined") {
+ body = "";
+ } // Only return body/request keys if present
+
+
+ return Object.assign({
+ method,
+ url,
+ headers
+ }, typeof body !== "undefined" ? {
+ body
+ } : null, options.request ? {
+ request: options.request
+ } : null);
+}
+
+function endpointWithDefaults(defaults, route, options) {
+ return parse(merge(defaults, route, options));
+}
+
+function withDefaults(oldDefaults, newDefaults) {
+ const DEFAULTS = merge(oldDefaults, newDefaults);
+ const endpoint = endpointWithDefaults.bind(null, DEFAULTS);
+ return Object.assign(endpoint, {
+ DEFAULTS,
+ defaults: withDefaults.bind(null, DEFAULTS),
+ merge: merge.bind(null, DEFAULTS),
+ parse
+ });
+}
+
+const VERSION = "6.0.9";
+
+const userAgent = `octokit-endpoint.js/${VERSION} ${universalUserAgent.getUserAgent()}`; // DEFAULTS has all properties set that EndpointOptions has, except url.
+// So we use RequestParameters and add method as additional required property.
+
+const DEFAULTS = {
+ method: "GET",
+ baseUrl: "https://api.github.com",
+ headers: {
+ accept: "application/vnd.github.v3+json",
+ "user-agent": userAgent
},
- interactions: {
- getRestrictionsForOrg: ["GET /orgs/{org}/interaction-limits", {
- mediaType: {
- previews: ["sombra"]
- }
- }],
- getRestrictionsForRepo: ["GET /repos/{owner}/{repo}/interaction-limits", {
- mediaType: {
- previews: ["sombra"]
- }
- }],
- removeRestrictionsForOrg: ["DELETE /orgs/{org}/interaction-limits", {
- mediaType: {
- previews: ["sombra"]
- }
- }],
- removeRestrictionsForRepo: ["DELETE /repos/{owner}/{repo}/interaction-limits", {
- mediaType: {
- previews: ["sombra"]
- }
- }],
- setRestrictionsForOrg: ["PUT /orgs/{org}/interaction-limits", {
- mediaType: {
- previews: ["sombra"]
- }
- }],
- setRestrictionsForRepo: ["PUT /repos/{owner}/{repo}/interaction-limits", {
- mediaType: {
- previews: ["sombra"]
- }
- }]
- },
- issues: {
- addAssignees: ["POST /repos/{owner}/{repo}/issues/{issue_number}/assignees"],
- addLabels: ["POST /repos/{owner}/{repo}/issues/{issue_number}/labels"],
- checkUserCanBeAssigned: ["GET /repos/{owner}/{repo}/assignees/{assignee}"],
- create: ["POST /repos/{owner}/{repo}/issues"],
- createComment: ["POST /repos/{owner}/{repo}/issues/{issue_number}/comments"],
- createLabel: ["POST /repos/{owner}/{repo}/labels"],
- createMilestone: ["POST /repos/{owner}/{repo}/milestones"],
- deleteComment: ["DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}"],
- deleteLabel: ["DELETE /repos/{owner}/{repo}/labels/{name}"],
- deleteMilestone: ["DELETE /repos/{owner}/{repo}/milestones/{milestone_number}"],
- get: ["GET /repos/{owner}/{repo}/issues/{issue_number}"],
- getComment: ["GET /repos/{owner}/{repo}/issues/comments/{comment_id}"],
- getEvent: ["GET /repos/{owner}/{repo}/issues/events/{event_id}"],
- getLabel: ["GET /repos/{owner}/{repo}/labels/{name}"],
- getMilestone: ["GET /repos/{owner}/{repo}/milestones/{milestone_number}"],
- list: ["GET /issues"],
- listAssignees: ["GET /repos/{owner}/{repo}/assignees"],
- listComments: ["GET /repos/{owner}/{repo}/issues/{issue_number}/comments"],
- listCommentsForRepo: ["GET /repos/{owner}/{repo}/issues/comments"],
- listEvents: ["GET /repos/{owner}/{repo}/issues/{issue_number}/events"],
- listEventsForRepo: ["GET /repos/{owner}/{repo}/issues/events"],
- listEventsForTimeline: ["GET /repos/{owner}/{repo}/issues/{issue_number}/timeline", {
- mediaType: {
- previews: ["mockingbird"]
- }
- }],
- listForAuthenticatedUser: ["GET /user/issues"],
- listForOrg: ["GET /orgs/{org}/issues"],
- listForRepo: ["GET /repos/{owner}/{repo}/issues"],
- listLabelsForMilestone: ["GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels"],
- listLabelsForRepo: ["GET /repos/{owner}/{repo}/labels"],
- listLabelsOnIssue: ["GET /repos/{owner}/{repo}/issues/{issue_number}/labels"],
- listMilestones: ["GET /repos/{owner}/{repo}/milestones"],
- lock: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/lock"],
- removeAllLabels: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels"],
- removeAssignees: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/assignees"],
- removeLabel: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels/{name}"],
- setLabels: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/labels"],
- unlock: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/lock"],
- update: ["PATCH /repos/{owner}/{repo}/issues/{issue_number}"],
- updateComment: ["PATCH /repos/{owner}/{repo}/issues/comments/{comment_id}"],
- updateLabel: ["PATCH /repos/{owner}/{repo}/labels/{name}"],
- updateMilestone: ["PATCH /repos/{owner}/{repo}/milestones/{milestone_number}"]
- },
- licenses: {
- get: ["GET /licenses/{license}"],
- getAllCommonlyUsed: ["GET /licenses"],
- getForRepo: ["GET /repos/{owner}/{repo}/license"]
- },
- markdown: {
- render: ["POST /markdown"],
- renderRaw: ["POST /markdown/raw", {
- headers: {
- "content-type": "text/plain; charset=utf-8"
+ mediaType: {
+ format: "",
+ previews: []
+ }
+};
+
+const endpoint = withDefaults(null, DEFAULTS);
+
+exports.endpoint = endpoint;
+//# sourceMappingURL=index.js.map
+
+
+/***/ }),
+
+/***/ 558:
+/***/ ((__unused_webpack_module, exports) => {
+
+"use strict";
+
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+
+/*!
+ * is-plain-object
+ *
+ * Copyright (c) 2014-2017, Jon Schlinkert.
+ * Released under the MIT License.
+ */
+
+function isObject(o) {
+ return Object.prototype.toString.call(o) === '[object Object]';
+}
+
+function isPlainObject(o) {
+ var ctor,prot;
+
+ if (isObject(o) === false) return false;
+
+ // If has modified constructor
+ ctor = o.constructor;
+ if (ctor === undefined) return true;
+
+ // If has modified prototype
+ prot = ctor.prototype;
+ if (isObject(prot) === false) return false;
+
+ // If constructor does not have an Object-specific method
+ if (prot.hasOwnProperty('isPrototypeOf') === false) {
+ return false;
+ }
+
+ // Most likely a plain Object
+ return true;
+}
+
+exports.isPlainObject = isPlainObject;
+
+
+/***/ }),
+
+/***/ 8467:
+/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
+
+"use strict";
+
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+
+var request = __webpack_require__(6234);
+var universalUserAgent = __webpack_require__(5030);
+
+const VERSION = "4.5.7";
+
+class GraphqlError extends Error {
+ constructor(request, response) {
+ const message = response.data.errors[0].message;
+ super(message);
+ Object.assign(this, response.data);
+ Object.assign(this, {
+ headers: response.headers
+ });
+ this.name = "GraphqlError";
+ this.request = request; // Maintains proper stack trace (only available on V8)
+
+ /* istanbul ignore next */
+
+ if (Error.captureStackTrace) {
+ Error.captureStackTrace(this, this.constructor);
+ }
+ }
+
+}
+
+const NON_VARIABLE_OPTIONS = ["method", "baseUrl", "url", "headers", "request", "query", "mediaType"];
+const GHES_V3_SUFFIX_REGEX = /\/api\/v3\/?$/;
+function graphql(request, query, options) {
+ if (typeof query === "string" && options && "query" in options) {
+ return Promise.reject(new Error(`[@octokit/graphql] "query" cannot be used as variable name`));
+ }
+
+ const parsedOptions = typeof query === "string" ? Object.assign({
+ query
+ }, options) : query;
+ const requestOptions = Object.keys(parsedOptions).reduce((result, key) => {
+ if (NON_VARIABLE_OPTIONS.includes(key)) {
+ result[key] = parsedOptions[key];
+ return result;
+ }
+
+ if (!result.variables) {
+ result.variables = {};
+ }
+
+ result.variables[key] = parsedOptions[key];
+ return result;
+ }, {}); // workaround for GitHub Enterprise baseUrl set with /api/v3 suffix
+ // https://github.com/octokit/auth-app.js/issues/111#issuecomment-657610451
+
+ const baseUrl = parsedOptions.baseUrl || request.endpoint.DEFAULTS.baseUrl;
+
+ if (GHES_V3_SUFFIX_REGEX.test(baseUrl)) {
+ requestOptions.url = baseUrl.replace(GHES_V3_SUFFIX_REGEX, "/api/graphql");
+ }
+
+ return request(requestOptions).then(response => {
+ if (response.data.errors) {
+ const headers = {};
+
+ for (const key of Object.keys(response.headers)) {
+ headers[key] = response.headers[key];
}
- }]
- },
- meta: {
- get: ["GET /meta"]
+
+ throw new GraphqlError(requestOptions, {
+ headers,
+ data: response.data
+ });
+ }
+
+ return response.data.data;
+ });
+}
+
+function withDefaults(request$1, newDefaults) {
+ const newRequest = request$1.defaults(newDefaults);
+
+ const newApi = (query, options) => {
+ return graphql(newRequest, query, options);
+ };
+
+ return Object.assign(newApi, {
+ defaults: withDefaults.bind(null, newRequest),
+ endpoint: request.request.endpoint
+ });
+}
+
+const graphql$1 = withDefaults(request.request, {
+ headers: {
+ "user-agent": `octokit-graphql.js/${VERSION} ${universalUserAgent.getUserAgent()}`
},
- migrations: {
- cancelImport: ["DELETE /repos/{owner}/{repo}/import"],
- deleteArchiveForAuthenticatedUser: ["DELETE /user/migrations/{migration_id}/archive", {
- mediaType: {
- previews: ["wyandotte"]
- }
- }],
- deleteArchiveForOrg: ["DELETE /orgs/{org}/migrations/{migration_id}/archive", {
- mediaType: {
- previews: ["wyandotte"]
+ method: "POST",
+ url: "/graphql"
+});
+function withCustomRequest(customRequest) {
+ return withDefaults(customRequest, {
+ method: "POST",
+ url: "/graphql"
+ });
+}
+
+exports.graphql = graphql$1;
+exports.withCustomRequest = withCustomRequest;
+//# sourceMappingURL=index.js.map
+
+
+/***/ }),
+
+/***/ 4193:
+/***/ ((__unused_webpack_module, exports) => {
+
+"use strict";
+
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+
+const VERSION = "2.6.0";
+
+/**
+ * Some “list” response that can be paginated have a different response structure
+ *
+ * They have a `total_count` key in the response (search also has `incomplete_results`,
+ * /installation/repositories also has `repository_selection`), as well as a key with
+ * the list of the items which name varies from endpoint to endpoint.
+ *
+ * Octokit normalizes these responses so that paginated results are always returned following
+ * the same structure. One challenge is that if the list response has only one page, no Link
+ * header is provided, so this header alone is not sufficient to check wether a response is
+ * paginated or not.
+ *
+ * We check if a "total_count" key is present in the response data, but also make sure that
+ * a "url" property is not, as the "Get the combined status for a specific ref" endpoint would
+ * otherwise match: https://developer.github.com/v3/repos/statuses/#get-the-combined-status-for-a-specific-ref
+ */
+function normalizePaginatedListResponse(response) {
+ const responseNeedsNormalization = "total_count" in response.data && !("url" in response.data);
+ if (!responseNeedsNormalization) return response; // keep the additional properties intact as there is currently no other way
+ // to retrieve the same information.
+
+ const incompleteResults = response.data.incomplete_results;
+ const repositorySelection = response.data.repository_selection;
+ const totalCount = response.data.total_count;
+ delete response.data.incomplete_results;
+ delete response.data.repository_selection;
+ delete response.data.total_count;
+ const namespaceKey = Object.keys(response.data)[0];
+ const data = response.data[namespaceKey];
+ response.data = data;
+
+ if (typeof incompleteResults !== "undefined") {
+ response.data.incomplete_results = incompleteResults;
+ }
+
+ if (typeof repositorySelection !== "undefined") {
+ response.data.repository_selection = repositorySelection;
+ }
+
+ response.data.total_count = totalCount;
+ return response;
+}
+
+function iterator(octokit, route, parameters) {
+ const options = typeof route === "function" ? route.endpoint(parameters) : octokit.request.endpoint(route, parameters);
+ const requestMethod = typeof route === "function" ? route : octokit.request;
+ const method = options.method;
+ const headers = options.headers;
+ let url = options.url;
+ return {
+ [Symbol.asyncIterator]: () => ({
+ async next() {
+ if (!url) return {
+ done: true
+ };
+ const response = await requestMethod({
+ method,
+ url,
+ headers
+ });
+ const normalizedResponse = normalizePaginatedListResponse(response); // `response.headers.link` format:
+ // '; rel="next", ; rel="last"'
+ // sets `url` to undefined if "next" URL is not present or `link` header is not set
+
+ url = ((normalizedResponse.headers.link || "").match(/<([^>]+)>;\s*rel="next"/) || [])[1];
+ return {
+ value: normalizedResponse
+ };
}
- }],
- downloadArchiveForOrg: ["GET /orgs/{org}/migrations/{migration_id}/archive", {
+
+ })
+ };
+}
+
+function paginate(octokit, route, parameters, mapFn) {
+ if (typeof parameters === "function") {
+ mapFn = parameters;
+ parameters = undefined;
+ }
+
+ return gather(octokit, [], iterator(octokit, route, parameters)[Symbol.asyncIterator](), mapFn);
+}
+
+function gather(octokit, results, iterator, mapFn) {
+ return iterator.next().then(result => {
+ if (result.done) {
+ return results;
+ }
+
+ let earlyExit = false;
+
+ function done() {
+ earlyExit = true;
+ }
+
+ results = results.concat(mapFn ? mapFn(result.value, done) : result.value.data);
+
+ if (earlyExit) {
+ return results;
+ }
+
+ return gather(octokit, results, iterator, mapFn);
+ });
+}
+
+const composePaginateRest = Object.assign(paginate, {
+ iterator
+});
+
+/**
+ * @param octokit Octokit instance
+ * @param options Options passed to Octokit constructor
+ */
+
+function paginateRest(octokit) {
+ return {
+ paginate: Object.assign(paginate.bind(null, octokit), {
+ iterator: iterator.bind(null, octokit)
+ })
+ };
+}
+paginateRest.VERSION = VERSION;
+
+exports.composePaginateRest = composePaginateRest;
+exports.paginateRest = paginateRest;
+//# sourceMappingURL=index.js.map
+
+
+/***/ }),
+
+/***/ 3044:
+/***/ ((__unused_webpack_module, exports) => {
+
+"use strict";
+
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+
+const Endpoints = {
+ actions: {
+ addSelectedRepoToOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}"],
+ cancelWorkflowRun: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel"],
+ createOrUpdateOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}"],
+ createOrUpdateRepoSecret: ["PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}"],
+ createRegistrationTokenForOrg: ["POST /orgs/{org}/actions/runners/registration-token"],
+ createRegistrationTokenForRepo: ["POST /repos/{owner}/{repo}/actions/runners/registration-token"],
+ createRemoveTokenForOrg: ["POST /orgs/{org}/actions/runners/remove-token"],
+ createRemoveTokenForRepo: ["POST /repos/{owner}/{repo}/actions/runners/remove-token"],
+ createWorkflowDispatch: ["POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches"],
+ deleteArtifact: ["DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"],
+ deleteOrgSecret: ["DELETE /orgs/{org}/actions/secrets/{secret_name}"],
+ deleteRepoSecret: ["DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}"],
+ deleteSelfHostedRunnerFromOrg: ["DELETE /orgs/{org}/actions/runners/{runner_id}"],
+ deleteSelfHostedRunnerFromRepo: ["DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}"],
+ deleteWorkflowRun: ["DELETE /repos/{owner}/{repo}/actions/runs/{run_id}"],
+ deleteWorkflowRunLogs: ["DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs"],
+ downloadArtifact: ["GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}"],
+ downloadJobLogsForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs"],
+ downloadWorkflowRunLogs: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs"],
+ getArtifact: ["GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"],
+ getJobForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}"],
+ getOrgPublicKey: ["GET /orgs/{org}/actions/secrets/public-key"],
+ getOrgSecret: ["GET /orgs/{org}/actions/secrets/{secret_name}"],
+ getRepoPublicKey: ["GET /repos/{owner}/{repo}/actions/secrets/public-key"],
+ getRepoSecret: ["GET /repos/{owner}/{repo}/actions/secrets/{secret_name}"],
+ getSelfHostedRunnerForOrg: ["GET /orgs/{org}/actions/runners/{runner_id}"],
+ getSelfHostedRunnerForRepo: ["GET /repos/{owner}/{repo}/actions/runners/{runner_id}"],
+ getWorkflow: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}"],
+ getWorkflowRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}"],
+ getWorkflowRunUsage: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing"],
+ getWorkflowUsage: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing"],
+ listArtifactsForRepo: ["GET /repos/{owner}/{repo}/actions/artifacts"],
+ listJobsForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs"],
+ listOrgSecrets: ["GET /orgs/{org}/actions/secrets"],
+ listRepoSecrets: ["GET /repos/{owner}/{repo}/actions/secrets"],
+ listRepoWorkflows: ["GET /repos/{owner}/{repo}/actions/workflows"],
+ listRunnerApplicationsForOrg: ["GET /orgs/{org}/actions/runners/downloads"],
+ listRunnerApplicationsForRepo: ["GET /repos/{owner}/{repo}/actions/runners/downloads"],
+ listSelectedReposForOrgSecret: ["GET /orgs/{org}/actions/secrets/{secret_name}/repositories"],
+ listSelfHostedRunnersForOrg: ["GET /orgs/{org}/actions/runners"],
+ listSelfHostedRunnersForRepo: ["GET /repos/{owner}/{repo}/actions/runners"],
+ listWorkflowRunArtifacts: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts"],
+ listWorkflowRuns: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs"],
+ listWorkflowRunsForRepo: ["GET /repos/{owner}/{repo}/actions/runs"],
+ reRunWorkflow: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun"],
+ removeSelectedRepoFromOrgSecret: ["DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}"],
+ setSelectedReposForOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}/repositories"]
+ },
+ activity: {
+ checkRepoIsStarredByAuthenticatedUser: ["GET /user/starred/{owner}/{repo}"],
+ deleteRepoSubscription: ["DELETE /repos/{owner}/{repo}/subscription"],
+ deleteThreadSubscription: ["DELETE /notifications/threads/{thread_id}/subscription"],
+ getFeeds: ["GET /feeds"],
+ getRepoSubscription: ["GET /repos/{owner}/{repo}/subscription"],
+ getThread: ["GET /notifications/threads/{thread_id}"],
+ getThreadSubscriptionForAuthenticatedUser: ["GET /notifications/threads/{thread_id}/subscription"],
+ listEventsForAuthenticatedUser: ["GET /users/{username}/events"],
+ listNotificationsForAuthenticatedUser: ["GET /notifications"],
+ listOrgEventsForAuthenticatedUser: ["GET /users/{username}/events/orgs/{org}"],
+ listPublicEvents: ["GET /events"],
+ listPublicEventsForRepoNetwork: ["GET /networks/{owner}/{repo}/events"],
+ listPublicEventsForUser: ["GET /users/{username}/events/public"],
+ listPublicOrgEvents: ["GET /orgs/{org}/events"],
+ listReceivedEventsForUser: ["GET /users/{username}/received_events"],
+ listReceivedPublicEventsForUser: ["GET /users/{username}/received_events/public"],
+ listRepoEvents: ["GET /repos/{owner}/{repo}/events"],
+ listRepoNotificationsForAuthenticatedUser: ["GET /repos/{owner}/{repo}/notifications"],
+ listReposStarredByAuthenticatedUser: ["GET /user/starred"],
+ listReposStarredByUser: ["GET /users/{username}/starred"],
+ listReposWatchedByUser: ["GET /users/{username}/subscriptions"],
+ listStargazersForRepo: ["GET /repos/{owner}/{repo}/stargazers"],
+ listWatchedReposForAuthenticatedUser: ["GET /user/subscriptions"],
+ listWatchersForRepo: ["GET /repos/{owner}/{repo}/subscribers"],
+ markNotificationsAsRead: ["PUT /notifications"],
+ markRepoNotificationsAsRead: ["PUT /repos/{owner}/{repo}/notifications"],
+ markThreadAsRead: ["PATCH /notifications/threads/{thread_id}"],
+ setRepoSubscription: ["PUT /repos/{owner}/{repo}/subscription"],
+ setThreadSubscription: ["PUT /notifications/threads/{thread_id}/subscription"],
+ starRepoForAuthenticatedUser: ["PUT /user/starred/{owner}/{repo}"],
+ unstarRepoForAuthenticatedUser: ["DELETE /user/starred/{owner}/{repo}"]
+ },
+ apps: {
+ addRepoToInstallation: ["PUT /user/installations/{installation_id}/repositories/{repository_id}"],
+ checkToken: ["POST /applications/{client_id}/token"],
+ createContentAttachment: ["POST /content_references/{content_reference_id}/attachments", {
mediaType: {
- previews: ["wyandotte"]
+ previews: ["corsair"]
}
}],
- getArchiveForAuthenticatedUser: ["GET /user/migrations/{migration_id}/archive", {
+ createFromManifest: ["POST /app-manifests/{code}/conversions"],
+ createInstallationAccessToken: ["POST /app/installations/{installation_id}/access_tokens"],
+ deleteAuthorization: ["DELETE /applications/{client_id}/grant"],
+ deleteInstallation: ["DELETE /app/installations/{installation_id}"],
+ deleteToken: ["DELETE /applications/{client_id}/token"],
+ getAuthenticated: ["GET /app"],
+ getBySlug: ["GET /apps/{app_slug}"],
+ getInstallation: ["GET /app/installations/{installation_id}"],
+ getOrgInstallation: ["GET /orgs/{org}/installation"],
+ getRepoInstallation: ["GET /repos/{owner}/{repo}/installation"],
+ getSubscriptionPlanForAccount: ["GET /marketplace_listing/accounts/{account_id}"],
+ getSubscriptionPlanForAccountStubbed: ["GET /marketplace_listing/stubbed/accounts/{account_id}"],
+ getUserInstallation: ["GET /users/{username}/installation"],
+ listAccountsForPlan: ["GET /marketplace_listing/plans/{plan_id}/accounts"],
+ listAccountsForPlanStubbed: ["GET /marketplace_listing/stubbed/plans/{plan_id}/accounts"],
+ listInstallationReposForAuthenticatedUser: ["GET /user/installations/{installation_id}/repositories"],
+ listInstallations: ["GET /app/installations"],
+ listInstallationsForAuthenticatedUser: ["GET /user/installations"],
+ listPlans: ["GET /marketplace_listing/plans"],
+ listPlansStubbed: ["GET /marketplace_listing/stubbed/plans"],
+ listReposAccessibleToInstallation: ["GET /installation/repositories"],
+ listSubscriptionsForAuthenticatedUser: ["GET /user/marketplace_purchases"],
+ listSubscriptionsForAuthenticatedUserStubbed: ["GET /user/marketplace_purchases/stubbed"],
+ removeRepoFromInstallation: ["DELETE /user/installations/{installation_id}/repositories/{repository_id}"],
+ resetToken: ["PATCH /applications/{client_id}/token"],
+ revokeInstallationAccessToken: ["DELETE /installation/token"],
+ suspendInstallation: ["PUT /app/installations/{installation_id}/suspended"],
+ unsuspendInstallation: ["DELETE /app/installations/{installation_id}/suspended"]
+ },
+ billing: {
+ getGithubActionsBillingOrg: ["GET /orgs/{org}/settings/billing/actions"],
+ getGithubActionsBillingUser: ["GET /users/{username}/settings/billing/actions"],
+ getGithubPackagesBillingOrg: ["GET /orgs/{org}/settings/billing/packages"],
+ getGithubPackagesBillingUser: ["GET /users/{username}/settings/billing/packages"],
+ getSharedStorageBillingOrg: ["GET /orgs/{org}/settings/billing/shared-storage"],
+ getSharedStorageBillingUser: ["GET /users/{username}/settings/billing/shared-storage"]
+ },
+ checks: {
+ create: ["POST /repos/{owner}/{repo}/check-runs", {
mediaType: {
- previews: ["wyandotte"]
+ previews: ["antiope"]
}
}],
- getCommitAuthors: ["GET /repos/{owner}/{repo}/import/authors"],
- getImportStatus: ["GET /repos/{owner}/{repo}/import"],
- getLargeFiles: ["GET /repos/{owner}/{repo}/import/large_files"],
- getStatusForAuthenticatedUser: ["GET /user/migrations/{migration_id}", {
+ createSuite: ["POST /repos/{owner}/{repo}/check-suites", {
mediaType: {
- previews: ["wyandotte"]
+ previews: ["antiope"]
}
}],
- getStatusForOrg: ["GET /orgs/{org}/migrations/{migration_id}", {
+ get: ["GET /repos/{owner}/{repo}/check-runs/{check_run_id}", {
mediaType: {
- previews: ["wyandotte"]
+ previews: ["antiope"]
}
}],
- listForAuthenticatedUser: ["GET /user/migrations", {
+ getSuite: ["GET /repos/{owner}/{repo}/check-suites/{check_suite_id}", {
mediaType: {
- previews: ["wyandotte"]
+ previews: ["antiope"]
}
}],
- listForOrg: ["GET /orgs/{org}/migrations", {
+ listAnnotations: ["GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations", {
mediaType: {
- previews: ["wyandotte"]
+ previews: ["antiope"]
}
}],
- listReposForOrg: ["GET /orgs/{org}/migrations/{migration_id}/repositories", {
+ listForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-runs", {
mediaType: {
- previews: ["wyandotte"]
+ previews: ["antiope"]
}
}],
- listReposForUser: ["GET /user/migrations/{migration_id}/repositories", {
+ listForSuite: ["GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs", {
mediaType: {
- previews: ["wyandotte"]
+ previews: ["antiope"]
}
}],
- mapCommitAuthor: ["PATCH /repos/{owner}/{repo}/import/authors/{author_id}"],
- setLfsPreference: ["PATCH /repos/{owner}/{repo}/import/lfs"],
- startForAuthenticatedUser: ["POST /user/migrations"],
- startForOrg: ["POST /orgs/{org}/migrations"],
- startImport: ["PUT /repos/{owner}/{repo}/import"],
- unlockRepoForAuthenticatedUser: ["DELETE /user/migrations/{migration_id}/repos/{repo_name}/lock", {
+ listSuitesForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-suites", {
mediaType: {
- previews: ["wyandotte"]
+ previews: ["antiope"]
}
}],
- unlockRepoForOrg: ["DELETE /orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock", {
+ rerequestSuite: ["POST /repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest", {
mediaType: {
- previews: ["wyandotte"]
+ previews: ["antiope"]
}
}],
- updateImport: ["PATCH /repos/{owner}/{repo}/import"]
- },
- orgs: {
- blockUser: ["PUT /orgs/{org}/blocks/{username}"],
- checkBlockedUser: ["GET /orgs/{org}/blocks/{username}"],
- checkMembershipForUser: ["GET /orgs/{org}/members/{username}"],
- checkPublicMembershipForUser: ["GET /orgs/{org}/public_members/{username}"],
- convertMemberToOutsideCollaborator: ["PUT /orgs/{org}/outside_collaborators/{username}"],
- createInvitation: ["POST /orgs/{org}/invitations"],
- createWebhook: ["POST /orgs/{org}/hooks"],
- deleteWebhook: ["DELETE /orgs/{org}/hooks/{hook_id}"],
- get: ["GET /orgs/{org}"],
- getMembershipForAuthenticatedUser: ["GET /user/memberships/orgs/{org}"],
- getMembershipForUser: ["GET /orgs/{org}/memberships/{username}"],
- getWebhook: ["GET /orgs/{org}/hooks/{hook_id}"],
- list: ["GET /organizations"],
- listAppInstallations: ["GET /orgs/{org}/installations"],
- listBlockedUsers: ["GET /orgs/{org}/blocks"],
- listForAuthenticatedUser: ["GET /user/orgs"],
- listForUser: ["GET /users/{username}/orgs"],
- listInvitationTeams: ["GET /orgs/{org}/invitations/{invitation_id}/teams"],
- listMembers: ["GET /orgs/{org}/members"],
- listMembershipsForAuthenticatedUser: ["GET /user/memberships/orgs"],
- listOutsideCollaborators: ["GET /orgs/{org}/outside_collaborators"],
- listPendingInvitations: ["GET /orgs/{org}/invitations"],
- listPublicMembers: ["GET /orgs/{org}/public_members"],
- listWebhooks: ["GET /orgs/{org}/hooks"],
- pingWebhook: ["POST /orgs/{org}/hooks/{hook_id}/pings"],
- removeMember: ["DELETE /orgs/{org}/members/{username}"],
- removeMembershipForUser: ["DELETE /orgs/{org}/memberships/{username}"],
- removeOutsideCollaborator: ["DELETE /orgs/{org}/outside_collaborators/{username}"],
- removePublicMembershipForAuthenticatedUser: ["DELETE /orgs/{org}/public_members/{username}"],
- setMembershipForUser: ["PUT /orgs/{org}/memberships/{username}"],
- setPublicMembershipForAuthenticatedUser: ["PUT /orgs/{org}/public_members/{username}"],
- unblockUser: ["DELETE /orgs/{org}/blocks/{username}"],
- update: ["PATCH /orgs/{org}"],
- updateMembershipForAuthenticatedUser: ["PATCH /user/memberships/orgs/{org}"],
- updateWebhook: ["PATCH /orgs/{org}/hooks/{hook_id}"]
- },
- projects: {
- addCollaborator: ["PUT /projects/{project_id}/collaborators/{username}", {
+ setSuitesPreferences: ["PATCH /repos/{owner}/{repo}/check-suites/preferences", {
mediaType: {
- previews: ["inertia"]
+ previews: ["antiope"]
}
}],
- createCard: ["POST /projects/columns/{column_id}/cards", {
+ update: ["PATCH /repos/{owner}/{repo}/check-runs/{check_run_id}", {
mediaType: {
- previews: ["inertia"]
+ previews: ["antiope"]
+ }
+ }]
+ },
+ codeScanning: {
+ getAlert: ["GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}", {}, {
+ renamedParameters: {
+ alert_id: "alert_number"
}
}],
- createColumn: ["POST /projects/{project_id}/columns", {
+ listAlertsForRepo: ["GET /repos/{owner}/{repo}/code-scanning/alerts"],
+ listRecentAnalyses: ["GET /repos/{owner}/{repo}/code-scanning/analyses"],
+ updateAlert: ["PATCH /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}"],
+ uploadSarif: ["POST /repos/{owner}/{repo}/code-scanning/sarifs"]
+ },
+ codesOfConduct: {
+ getAllCodesOfConduct: ["GET /codes_of_conduct", {
mediaType: {
- previews: ["inertia"]
+ previews: ["scarlet-witch"]
}
}],
- createForAuthenticatedUser: ["POST /user/projects", {
+ getConductCode: ["GET /codes_of_conduct/{key}", {
mediaType: {
- previews: ["inertia"]
+ previews: ["scarlet-witch"]
}
}],
- createForOrg: ["POST /orgs/{org}/projects", {
+ getForRepo: ["GET /repos/{owner}/{repo}/community/code_of_conduct", {
mediaType: {
- previews: ["inertia"]
+ previews: ["scarlet-witch"]
}
- }],
- createForRepo: ["POST /repos/{owner}/{repo}/projects", {
+ }]
+ },
+ emojis: {
+ get: ["GET /emojis"]
+ },
+ gists: {
+ checkIsStarred: ["GET /gists/{gist_id}/star"],
+ create: ["POST /gists"],
+ createComment: ["POST /gists/{gist_id}/comments"],
+ delete: ["DELETE /gists/{gist_id}"],
+ deleteComment: ["DELETE /gists/{gist_id}/comments/{comment_id}"],
+ fork: ["POST /gists/{gist_id}/forks"],
+ get: ["GET /gists/{gist_id}"],
+ getComment: ["GET /gists/{gist_id}/comments/{comment_id}"],
+ getRevision: ["GET /gists/{gist_id}/{sha}"],
+ list: ["GET /gists"],
+ listComments: ["GET /gists/{gist_id}/comments"],
+ listCommits: ["GET /gists/{gist_id}/commits"],
+ listForUser: ["GET /users/{username}/gists"],
+ listForks: ["GET /gists/{gist_id}/forks"],
+ listPublic: ["GET /gists/public"],
+ listStarred: ["GET /gists/starred"],
+ star: ["PUT /gists/{gist_id}/star"],
+ unstar: ["DELETE /gists/{gist_id}/star"],
+ update: ["PATCH /gists/{gist_id}"],
+ updateComment: ["PATCH /gists/{gist_id}/comments/{comment_id}"]
+ },
+ git: {
+ createBlob: ["POST /repos/{owner}/{repo}/git/blobs"],
+ createCommit: ["POST /repos/{owner}/{repo}/git/commits"],
+ createRef: ["POST /repos/{owner}/{repo}/git/refs"],
+ createTag: ["POST /repos/{owner}/{repo}/git/tags"],
+ createTree: ["POST /repos/{owner}/{repo}/git/trees"],
+ deleteRef: ["DELETE /repos/{owner}/{repo}/git/refs/{ref}"],
+ getBlob: ["GET /repos/{owner}/{repo}/git/blobs/{file_sha}"],
+ getCommit: ["GET /repos/{owner}/{repo}/git/commits/{commit_sha}"],
+ getRef: ["GET /repos/{owner}/{repo}/git/ref/{ref}"],
+ getTag: ["GET /repos/{owner}/{repo}/git/tags/{tag_sha}"],
+ getTree: ["GET /repos/{owner}/{repo}/git/trees/{tree_sha}"],
+ listMatchingRefs: ["GET /repos/{owner}/{repo}/git/matching-refs/{ref}"],
+ updateRef: ["PATCH /repos/{owner}/{repo}/git/refs/{ref}"]
+ },
+ gitignore: {
+ getAllTemplates: ["GET /gitignore/templates"],
+ getTemplate: ["GET /gitignore/templates/{name}"]
+ },
+ interactions: {
+ getRestrictionsForOrg: ["GET /orgs/{org}/interaction-limits", {
mediaType: {
- previews: ["inertia"]
+ previews: ["sombra"]
}
}],
- delete: ["DELETE /projects/{project_id}", {
+ getRestrictionsForRepo: ["GET /repos/{owner}/{repo}/interaction-limits", {
mediaType: {
- previews: ["inertia"]
+ previews: ["sombra"]
}
}],
- deleteCard: ["DELETE /projects/columns/cards/{card_id}", {
+ removeRestrictionsForOrg: ["DELETE /orgs/{org}/interaction-limits", {
mediaType: {
- previews: ["inertia"]
+ previews: ["sombra"]
}
}],
- deleteColumn: ["DELETE /projects/columns/{column_id}", {
+ removeRestrictionsForRepo: ["DELETE /repos/{owner}/{repo}/interaction-limits", {
mediaType: {
- previews: ["inertia"]
+ previews: ["sombra"]
}
}],
- get: ["GET /projects/{project_id}", {
+ setRestrictionsForOrg: ["PUT /orgs/{org}/interaction-limits", {
mediaType: {
- previews: ["inertia"]
+ previews: ["sombra"]
}
}],
- getCard: ["GET /projects/columns/cards/{card_id}", {
+ setRestrictionsForRepo: ["PUT /repos/{owner}/{repo}/interaction-limits", {
mediaType: {
- previews: ["inertia"]
+ previews: ["sombra"]
}
- }],
- getColumn: ["GET /projects/columns/{column_id}", {
+ }]
+ },
+ issues: {
+ addAssignees: ["POST /repos/{owner}/{repo}/issues/{issue_number}/assignees"],
+ addLabels: ["POST /repos/{owner}/{repo}/issues/{issue_number}/labels"],
+ checkUserCanBeAssigned: ["GET /repos/{owner}/{repo}/assignees/{assignee}"],
+ create: ["POST /repos/{owner}/{repo}/issues"],
+ createComment: ["POST /repos/{owner}/{repo}/issues/{issue_number}/comments"],
+ createLabel: ["POST /repos/{owner}/{repo}/labels"],
+ createMilestone: ["POST /repos/{owner}/{repo}/milestones"],
+ deleteComment: ["DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}"],
+ deleteLabel: ["DELETE /repos/{owner}/{repo}/labels/{name}"],
+ deleteMilestone: ["DELETE /repos/{owner}/{repo}/milestones/{milestone_number}"],
+ get: ["GET /repos/{owner}/{repo}/issues/{issue_number}"],
+ getComment: ["GET /repos/{owner}/{repo}/issues/comments/{comment_id}"],
+ getEvent: ["GET /repos/{owner}/{repo}/issues/events/{event_id}"],
+ getLabel: ["GET /repos/{owner}/{repo}/labels/{name}"],
+ getMilestone: ["GET /repos/{owner}/{repo}/milestones/{milestone_number}"],
+ list: ["GET /issues"],
+ listAssignees: ["GET /repos/{owner}/{repo}/assignees"],
+ listComments: ["GET /repos/{owner}/{repo}/issues/{issue_number}/comments"],
+ listCommentsForRepo: ["GET /repos/{owner}/{repo}/issues/comments"],
+ listEvents: ["GET /repos/{owner}/{repo}/issues/{issue_number}/events"],
+ listEventsForRepo: ["GET /repos/{owner}/{repo}/issues/events"],
+ listEventsForTimeline: ["GET /repos/{owner}/{repo}/issues/{issue_number}/timeline", {
mediaType: {
- previews: ["inertia"]
+ previews: ["mockingbird"]
}
}],
- getPermissionForUser: ["GET /projects/{project_id}/collaborators/{username}/permission", {
+ listForAuthenticatedUser: ["GET /user/issues"],
+ listForOrg: ["GET /orgs/{org}/issues"],
+ listForRepo: ["GET /repos/{owner}/{repo}/issues"],
+ listLabelsForMilestone: ["GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels"],
+ listLabelsForRepo: ["GET /repos/{owner}/{repo}/labels"],
+ listLabelsOnIssue: ["GET /repos/{owner}/{repo}/issues/{issue_number}/labels"],
+ listMilestones: ["GET /repos/{owner}/{repo}/milestones"],
+ lock: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/lock"],
+ removeAllLabels: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels"],
+ removeAssignees: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/assignees"],
+ removeLabel: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels/{name}"],
+ setLabels: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/labels"],
+ unlock: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/lock"],
+ update: ["PATCH /repos/{owner}/{repo}/issues/{issue_number}"],
+ updateComment: ["PATCH /repos/{owner}/{repo}/issues/comments/{comment_id}"],
+ updateLabel: ["PATCH /repos/{owner}/{repo}/labels/{name}"],
+ updateMilestone: ["PATCH /repos/{owner}/{repo}/milestones/{milestone_number}"]
+ },
+ licenses: {
+ get: ["GET /licenses/{license}"],
+ getAllCommonlyUsed: ["GET /licenses"],
+ getForRepo: ["GET /repos/{owner}/{repo}/license"]
+ },
+ markdown: {
+ render: ["POST /markdown"],
+ renderRaw: ["POST /markdown/raw", {
+ headers: {
+ "content-type": "text/plain; charset=utf-8"
+ }
+ }]
+ },
+ meta: {
+ get: ["GET /meta"]
+ },
+ migrations: {
+ cancelImport: ["DELETE /repos/{owner}/{repo}/import"],
+ deleteArchiveForAuthenticatedUser: ["DELETE /user/migrations/{migration_id}/archive", {
+ mediaType: {
+ previews: ["wyandotte"]
+ }
+ }],
+ deleteArchiveForOrg: ["DELETE /orgs/{org}/migrations/{migration_id}/archive", {
+ mediaType: {
+ previews: ["wyandotte"]
+ }
+ }],
+ downloadArchiveForOrg: ["GET /orgs/{org}/migrations/{migration_id}/archive", {
+ mediaType: {
+ previews: ["wyandotte"]
+ }
+ }],
+ getArchiveForAuthenticatedUser: ["GET /user/migrations/{migration_id}/archive", {
+ mediaType: {
+ previews: ["wyandotte"]
+ }
+ }],
+ getCommitAuthors: ["GET /repos/{owner}/{repo}/import/authors"],
+ getImportStatus: ["GET /repos/{owner}/{repo}/import"],
+ getLargeFiles: ["GET /repos/{owner}/{repo}/import/large_files"],
+ getStatusForAuthenticatedUser: ["GET /user/migrations/{migration_id}", {
+ mediaType: {
+ previews: ["wyandotte"]
+ }
+ }],
+ getStatusForOrg: ["GET /orgs/{org}/migrations/{migration_id}", {
+ mediaType: {
+ previews: ["wyandotte"]
+ }
+ }],
+ listForAuthenticatedUser: ["GET /user/migrations", {
+ mediaType: {
+ previews: ["wyandotte"]
+ }
+ }],
+ listForOrg: ["GET /orgs/{org}/migrations", {
+ mediaType: {
+ previews: ["wyandotte"]
+ }
+ }],
+ listReposForOrg: ["GET /orgs/{org}/migrations/{migration_id}/repositories", {
+ mediaType: {
+ previews: ["wyandotte"]
+ }
+ }],
+ listReposForUser: ["GET /user/migrations/{migration_id}/repositories", {
+ mediaType: {
+ previews: ["wyandotte"]
+ }
+ }],
+ mapCommitAuthor: ["PATCH /repos/{owner}/{repo}/import/authors/{author_id}"],
+ setLfsPreference: ["PATCH /repos/{owner}/{repo}/import/lfs"],
+ startForAuthenticatedUser: ["POST /user/migrations"],
+ startForOrg: ["POST /orgs/{org}/migrations"],
+ startImport: ["PUT /repos/{owner}/{repo}/import"],
+ unlockRepoForAuthenticatedUser: ["DELETE /user/migrations/{migration_id}/repos/{repo_name}/lock", {
+ mediaType: {
+ previews: ["wyandotte"]
+ }
+ }],
+ unlockRepoForOrg: ["DELETE /orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock", {
+ mediaType: {
+ previews: ["wyandotte"]
+ }
+ }],
+ updateImport: ["PATCH /repos/{owner}/{repo}/import"]
+ },
+ orgs: {
+ blockUser: ["PUT /orgs/{org}/blocks/{username}"],
+ checkBlockedUser: ["GET /orgs/{org}/blocks/{username}"],
+ checkMembershipForUser: ["GET /orgs/{org}/members/{username}"],
+ checkPublicMembershipForUser: ["GET /orgs/{org}/public_members/{username}"],
+ convertMemberToOutsideCollaborator: ["PUT /orgs/{org}/outside_collaborators/{username}"],
+ createInvitation: ["POST /orgs/{org}/invitations"],
+ createWebhook: ["POST /orgs/{org}/hooks"],
+ deleteWebhook: ["DELETE /orgs/{org}/hooks/{hook_id}"],
+ get: ["GET /orgs/{org}"],
+ getMembershipForAuthenticatedUser: ["GET /user/memberships/orgs/{org}"],
+ getMembershipForUser: ["GET /orgs/{org}/memberships/{username}"],
+ getWebhook: ["GET /orgs/{org}/hooks/{hook_id}"],
+ list: ["GET /organizations"],
+ listAppInstallations: ["GET /orgs/{org}/installations"],
+ listBlockedUsers: ["GET /orgs/{org}/blocks"],
+ listForAuthenticatedUser: ["GET /user/orgs"],
+ listForUser: ["GET /users/{username}/orgs"],
+ listInvitationTeams: ["GET /orgs/{org}/invitations/{invitation_id}/teams"],
+ listMembers: ["GET /orgs/{org}/members"],
+ listMembershipsForAuthenticatedUser: ["GET /user/memberships/orgs"],
+ listOutsideCollaborators: ["GET /orgs/{org}/outside_collaborators"],
+ listPendingInvitations: ["GET /orgs/{org}/invitations"],
+ listPublicMembers: ["GET /orgs/{org}/public_members"],
+ listWebhooks: ["GET /orgs/{org}/hooks"],
+ pingWebhook: ["POST /orgs/{org}/hooks/{hook_id}/pings"],
+ removeMember: ["DELETE /orgs/{org}/members/{username}"],
+ removeMembershipForUser: ["DELETE /orgs/{org}/memberships/{username}"],
+ removeOutsideCollaborator: ["DELETE /orgs/{org}/outside_collaborators/{username}"],
+ removePublicMembershipForAuthenticatedUser: ["DELETE /orgs/{org}/public_members/{username}"],
+ setMembershipForUser: ["PUT /orgs/{org}/memberships/{username}"],
+ setPublicMembershipForAuthenticatedUser: ["PUT /orgs/{org}/public_members/{username}"],
+ unblockUser: ["DELETE /orgs/{org}/blocks/{username}"],
+ update: ["PATCH /orgs/{org}"],
+ updateMembershipForAuthenticatedUser: ["PATCH /user/memberships/orgs/{org}"],
+ updateWebhook: ["PATCH /orgs/{org}/hooks/{hook_id}"]
+ },
+ projects: {
+ addCollaborator: ["PUT /projects/{project_id}/collaborators/{username}", {
+ mediaType: {
+ previews: ["inertia"]
+ }
+ }],
+ createCard: ["POST /projects/columns/{column_id}/cards", {
+ mediaType: {
+ previews: ["inertia"]
+ }
+ }],
+ createColumn: ["POST /projects/{project_id}/columns", {
+ mediaType: {
+ previews: ["inertia"]
+ }
+ }],
+ createForAuthenticatedUser: ["POST /user/projects", {
+ mediaType: {
+ previews: ["inertia"]
+ }
+ }],
+ createForOrg: ["POST /orgs/{org}/projects", {
+ mediaType: {
+ previews: ["inertia"]
+ }
+ }],
+ createForRepo: ["POST /repos/{owner}/{repo}/projects", {
+ mediaType: {
+ previews: ["inertia"]
+ }
+ }],
+ delete: ["DELETE /projects/{project_id}", {
+ mediaType: {
+ previews: ["inertia"]
+ }
+ }],
+ deleteCard: ["DELETE /projects/columns/cards/{card_id}", {
+ mediaType: {
+ previews: ["inertia"]
+ }
+ }],
+ deleteColumn: ["DELETE /projects/columns/{column_id}", {
+ mediaType: {
+ previews: ["inertia"]
+ }
+ }],
+ get: ["GET /projects/{project_id}", {
+ mediaType: {
+ previews: ["inertia"]
+ }
+ }],
+ getCard: ["GET /projects/columns/cards/{card_id}", {
+ mediaType: {
+ previews: ["inertia"]
+ }
+ }],
+ getColumn: ["GET /projects/columns/{column_id}", {
+ mediaType: {
+ previews: ["inertia"]
+ }
+ }],
+ getPermissionForUser: ["GET /projects/{project_id}/collaborators/{username}/permission", {
mediaType: {
previews: ["inertia"]
}
@@ -5395,2281 +6263,8075 @@ const Endpoints = {
}
};
-const VERSION = "4.2.1";
+const VERSION = "4.2.1";
+
+function endpointsToMethods(octokit, endpointsMap) {
+ const newMethods = {};
+
+ for (const [scope, endpoints] of Object.entries(endpointsMap)) {
+ for (const [methodName, endpoint] of Object.entries(endpoints)) {
+ const [route, defaults, decorations] = endpoint;
+ const [method, url] = route.split(/ /);
+ const endpointDefaults = Object.assign({
+ method,
+ url
+ }, defaults);
+
+ if (!newMethods[scope]) {
+ newMethods[scope] = {};
+ }
+
+ const scopeMethods = newMethods[scope];
+
+ if (decorations) {
+ scopeMethods[methodName] = decorate(octokit, scope, methodName, endpointDefaults, decorations);
+ continue;
+ }
+
+ scopeMethods[methodName] = octokit.request.defaults(endpointDefaults);
+ }
+ }
+
+ return newMethods;
+}
+
+function decorate(octokit, scope, methodName, defaults, decorations) {
+ const requestWithDefaults = octokit.request.defaults(defaults);
+ /* istanbul ignore next */
+
+ function withDecorations(...args) {
+ // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488
+ let options = requestWithDefaults.endpoint.merge(...args); // There are currently no other decorations than `.mapToData`
+
+ if (decorations.mapToData) {
+ options = Object.assign({}, options, {
+ data: options[decorations.mapToData],
+ [decorations.mapToData]: undefined
+ });
+ return requestWithDefaults(options);
+ }
+
+ if (decorations.renamed) {
+ const [newScope, newMethodName] = decorations.renamed;
+ octokit.log.warn(`octokit.${scope}.${methodName}() has been renamed to octokit.${newScope}.${newMethodName}()`);
+ }
+
+ if (decorations.deprecated) {
+ octokit.log.warn(decorations.deprecated);
+ }
+
+ if (decorations.renamedParameters) {
+ // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488
+ const options = requestWithDefaults.endpoint.merge(...args);
+
+ for (const [name, alias] of Object.entries(decorations.renamedParameters)) {
+ if (name in options) {
+ octokit.log.warn(`"${name}" parameter is deprecated for "octokit.${scope}.${methodName}()". Use "${alias}" instead`);
+
+ if (!(alias in options)) {
+ options[alias] = options[name];
+ }
+
+ delete options[name];
+ }
+ }
+
+ return requestWithDefaults(options);
+ } // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488
+
+
+ return requestWithDefaults(...args);
+ }
+
+ return Object.assign(withDecorations, requestWithDefaults);
+}
+
+/**
+ * This plugin is a 1:1 copy of internal @octokit/rest plugins. The primary
+ * goal is to rebuild @octokit/rest on top of @octokit/core. Once that is
+ * done, we will remove the registerEndpoints methods and return the methods
+ * directly as with the other plugins. At that point we will also remove the
+ * legacy workarounds and deprecations.
+ *
+ * See the plan at
+ * https://github.com/octokit/plugin-rest-endpoint-methods.js/pull/1
+ */
+
+function restEndpointMethods(octokit) {
+ return endpointsToMethods(octokit, Endpoints);
+}
+restEndpointMethods.VERSION = VERSION;
+
+exports.restEndpointMethods = restEndpointMethods;
+//# sourceMappingURL=index.js.map
+
+
+/***/ }),
+
+/***/ 537:
+/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
+
+"use strict";
+
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+
+function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }
+
+var deprecation = __webpack_require__(8932);
+var once = _interopDefault(__webpack_require__(1223));
+
+const logOnce = once(deprecation => console.warn(deprecation));
+/**
+ * Error with extra properties to help with debugging
+ */
+
+class RequestError extends Error {
+ constructor(message, statusCode, options) {
+ super(message); // Maintains proper stack trace (only available on V8)
+
+ /* istanbul ignore next */
+
+ if (Error.captureStackTrace) {
+ Error.captureStackTrace(this, this.constructor);
+ }
+
+ this.name = "HttpError";
+ this.status = statusCode;
+ Object.defineProperty(this, "code", {
+ get() {
+ logOnce(new deprecation.Deprecation("[@octokit/request-error] `error.code` is deprecated, use `error.status`."));
+ return statusCode;
+ }
+
+ });
+ this.headers = options.headers || {}; // redact request credentials without mutating original request options
+
+ const requestCopy = Object.assign({}, options.request);
+
+ if (options.request.headers.authorization) {
+ requestCopy.headers = Object.assign({}, options.request.headers, {
+ authorization: options.request.headers.authorization.replace(/ .*$/, " [REDACTED]")
+ });
+ }
+
+ requestCopy.url = requestCopy.url // client_id & client_secret can be passed as URL query parameters to increase rate limit
+ // see https://developer.github.com/v3/#increasing-the-unauthenticated-rate-limit-for-oauth-applications
+ .replace(/\bclient_secret=\w+/g, "client_secret=[REDACTED]") // OAuth tokens can be passed as URL query parameters, although it is not recommended
+ // see https://developer.github.com/v3/#oauth2-token-sent-in-a-header
+ .replace(/\baccess_token=\w+/g, "access_token=[REDACTED]");
+ this.request = requestCopy;
+ }
+
+}
+
+exports.RequestError = RequestError;
+//# sourceMappingURL=index.js.map
+
+
+/***/ }),
+
+/***/ 6234:
+/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
+
+"use strict";
+
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+
+function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }
+
+var endpoint = __webpack_require__(9440);
+var universalUserAgent = __webpack_require__(5030);
+var isPlainObject = __webpack_require__(9062);
+var nodeFetch = _interopDefault(__webpack_require__(467));
+var requestError = __webpack_require__(537);
+
+const VERSION = "5.4.10";
+
+function getBufferResponse(response) {
+ return response.arrayBuffer();
+}
+
+function fetchWrapper(requestOptions) {
+ if (isPlainObject.isPlainObject(requestOptions.body) || Array.isArray(requestOptions.body)) {
+ requestOptions.body = JSON.stringify(requestOptions.body);
+ }
+
+ let headers = {};
+ let status;
+ let url;
+ const fetch = requestOptions.request && requestOptions.request.fetch || nodeFetch;
+ return fetch(requestOptions.url, Object.assign({
+ method: requestOptions.method,
+ body: requestOptions.body,
+ headers: requestOptions.headers,
+ redirect: requestOptions.redirect
+ }, requestOptions.request)).then(response => {
+ url = response.url;
+ status = response.status;
+
+ for (const keyAndValue of response.headers) {
+ headers[keyAndValue[0]] = keyAndValue[1];
+ }
+
+ if (status === 204 || status === 205) {
+ return;
+ } // GitHub API returns 200 for HEAD requests
+
+
+ if (requestOptions.method === "HEAD") {
+ if (status < 400) {
+ return;
+ }
+
+ throw new requestError.RequestError(response.statusText, status, {
+ headers,
+ request: requestOptions
+ });
+ }
+
+ if (status === 304) {
+ throw new requestError.RequestError("Not modified", status, {
+ headers,
+ request: requestOptions
+ });
+ }
+
+ if (status >= 400) {
+ return response.text().then(message => {
+ const error = new requestError.RequestError(message, status, {
+ headers,
+ request: requestOptions
+ });
+
+ try {
+ let responseBody = JSON.parse(error.message);
+ Object.assign(error, responseBody);
+ let errors = responseBody.errors; // Assumption `errors` would always be in Array format
+
+ error.message = error.message + ": " + errors.map(JSON.stringify).join(", ");
+ } catch (e) {// ignore, see octokit/rest.js#684
+ }
+
+ throw error;
+ });
+ }
+
+ const contentType = response.headers.get("content-type");
+
+ if (/application\/json/.test(contentType)) {
+ return response.json();
+ }
+
+ if (!contentType || /^text\/|charset=utf-8$/.test(contentType)) {
+ return response.text();
+ }
+
+ return getBufferResponse(response);
+ }).then(data => {
+ return {
+ status,
+ url,
+ headers,
+ data
+ };
+ }).catch(error => {
+ if (error instanceof requestError.RequestError) {
+ throw error;
+ }
+
+ throw new requestError.RequestError(error.message, 500, {
+ headers,
+ request: requestOptions
+ });
+ });
+}
+
+function withDefaults(oldEndpoint, newDefaults) {
+ const endpoint = oldEndpoint.defaults(newDefaults);
+
+ const newApi = function (route, parameters) {
+ const endpointOptions = endpoint.merge(route, parameters);
+
+ if (!endpointOptions.request || !endpointOptions.request.hook) {
+ return fetchWrapper(endpoint.parse(endpointOptions));
+ }
+
+ const request = (route, parameters) => {
+ return fetchWrapper(endpoint.parse(endpoint.merge(route, parameters)));
+ };
+
+ Object.assign(request, {
+ endpoint,
+ defaults: withDefaults.bind(null, endpoint)
+ });
+ return endpointOptions.request.hook(request, endpointOptions);
+ };
+
+ return Object.assign(newApi, {
+ endpoint,
+ defaults: withDefaults.bind(null, endpoint)
+ });
+}
+
+const request = withDefaults(endpoint.endpoint, {
+ headers: {
+ "user-agent": `octokit-request.js/${VERSION} ${universalUserAgent.getUserAgent()}`
+ }
+});
+
+exports.request = request;
+//# sourceMappingURL=index.js.map
+
+
+/***/ }),
+
+/***/ 9062:
+/***/ ((__unused_webpack_module, exports) => {
+
+"use strict";
+
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+
+/*!
+ * is-plain-object
+ *
+ * Copyright (c) 2014-2017, Jon Schlinkert.
+ * Released under the MIT License.
+ */
+
+function isObject(o) {
+ return Object.prototype.toString.call(o) === '[object Object]';
+}
+
+function isPlainObject(o) {
+ var ctor,prot;
+
+ if (isObject(o) === false) return false;
+
+ // If has modified constructor
+ ctor = o.constructor;
+ if (ctor === undefined) return true;
+
+ // If has modified prototype
+ prot = ctor.prototype;
+ if (isObject(prot) === false) return false;
+
+ // If constructor does not have an Object-specific method
+ if (prot.hasOwnProperty('isPrototypeOf') === false) {
+ return false;
+ }
+
+ // Most likely a plain Object
+ return true;
+}
+
+exports.isPlainObject = isPlainObject;
+
+
+/***/ }),
+
+/***/ 3682:
+/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
+
+var register = __webpack_require__(4670)
+var addHook = __webpack_require__(5549)
+var removeHook = __webpack_require__(6819)
+
+// bind with array of arguments: https://stackoverflow.com/a/21792913
+var bind = Function.bind
+var bindable = bind.bind(bind)
+
+function bindApi (hook, state, name) {
+ var removeHookRef = bindable(removeHook, null).apply(null, name ? [state, name] : [state])
+ hook.api = { remove: removeHookRef }
+ hook.remove = removeHookRef
+
+ ;['before', 'error', 'after', 'wrap'].forEach(function (kind) {
+ var args = name ? [state, kind, name] : [state, kind]
+ hook[kind] = hook.api[kind] = bindable(addHook, null).apply(null, args)
+ })
+}
+
+function HookSingular () {
+ var singularHookName = 'h'
+ var singularHookState = {
+ registry: {}
+ }
+ var singularHook = register.bind(null, singularHookState, singularHookName)
+ bindApi(singularHook, singularHookState, singularHookName)
+ return singularHook
+}
+
+function HookCollection () {
+ var state = {
+ registry: {}
+ }
+
+ var hook = register.bind(null, state)
+ bindApi(hook, state)
+
+ return hook
+}
+
+var collectionHookDeprecationMessageDisplayed = false
+function Hook () {
+ if (!collectionHookDeprecationMessageDisplayed) {
+ console.warn('[before-after-hook]: "Hook()" repurposing warning, use "Hook.Collection()". Read more: https://git.io/upgrade-before-after-hook-to-1.4')
+ collectionHookDeprecationMessageDisplayed = true
+ }
+ return HookCollection()
+}
+
+Hook.Singular = HookSingular.bind()
+Hook.Collection = HookCollection.bind()
+
+module.exports = Hook
+// expose constructors as a named property for TypeScript
+module.exports.Hook = Hook
+module.exports.Singular = Hook.Singular
+module.exports.Collection = Hook.Collection
+
+
+/***/ }),
+
+/***/ 5549:
+/***/ ((module) => {
+
+module.exports = addHook
+
+function addHook (state, kind, name, hook) {
+ var orig = hook
+ if (!state.registry[name]) {
+ state.registry[name] = []
+ }
+
+ if (kind === 'before') {
+ hook = function (method, options) {
+ return Promise.resolve()
+ .then(orig.bind(null, options))
+ .then(method.bind(null, options))
+ }
+ }
+
+ if (kind === 'after') {
+ hook = function (method, options) {
+ var result
+ return Promise.resolve()
+ .then(method.bind(null, options))
+ .then(function (result_) {
+ result = result_
+ return orig(result, options)
+ })
+ .then(function () {
+ return result
+ })
+ }
+ }
+
+ if (kind === 'error') {
+ hook = function (method, options) {
+ return Promise.resolve()
+ .then(method.bind(null, options))
+ .catch(function (error) {
+ return orig(error, options)
+ })
+ }
+ }
+
+ state.registry[name].push({
+ hook: hook,
+ orig: orig
+ })
+}
+
+
+/***/ }),
+
+/***/ 4670:
+/***/ ((module) => {
+
+module.exports = register
+
+function register (state, name, method, options) {
+ if (typeof method !== 'function') {
+ throw new Error('method for before hook must be a function')
+ }
+
+ if (!options) {
+ options = {}
+ }
+
+ if (Array.isArray(name)) {
+ return name.reverse().reduce(function (callback, name) {
+ return register.bind(null, state, name, callback, options)
+ }, method)()
+ }
+
+ return Promise.resolve()
+ .then(function () {
+ if (!state.registry[name]) {
+ return method(options)
+ }
+
+ return (state.registry[name]).reduce(function (method, registered) {
+ return registered.hook.bind(null, method, options)
+ }, method)()
+ })
+}
+
+
+/***/ }),
+
+/***/ 6819:
+/***/ ((module) => {
+
+module.exports = removeHook
+
+function removeHook (state, name, method) {
+ if (!state.registry[name]) {
+ return
+ }
+
+ var index = state.registry[name]
+ .map(function (registered) { return registered.orig })
+ .indexOf(method)
+
+ if (index === -1) {
+ return
+ }
+
+ state.registry[name].splice(index, 1)
+}
+
+
+/***/ }),
+
+/***/ 8932:
+/***/ ((__unused_webpack_module, exports) => {
+
+"use strict";
+
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+
+class Deprecation extends Error {
+ constructor(message) {
+ super(message); // Maintains proper stack trace (only available on V8)
+
+ /* istanbul ignore next */
+
+ if (Error.captureStackTrace) {
+ Error.captureStackTrace(this, this.constructor);
+ }
+
+ this.name = 'Deprecation';
+ }
+
+}
+
+exports.Deprecation = Deprecation;
+
+
+/***/ }),
+
+/***/ 5582:
+/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
+
+"use strict";
+
+
+const stringify = __webpack_require__(4810);
+const compile = __webpack_require__(7123);
+const expand = __webpack_require__(6944);
+const parse = __webpack_require__(9889);
+
+/**
+ * Expand the given pattern or create a regex-compatible string.
+ *
+ * ```js
+ * const braces = require('braces');
+ * console.log(braces('{a,b,c}', { compile: true })); //=> ['(a|b|c)']
+ * console.log(braces('{a,b,c}')); //=> ['a', 'b', 'c']
+ * ```
+ * @param {String} `str`
+ * @param {Object} `options`
+ * @return {String}
+ * @api public
+ */
+
+const braces = (input, options = {}) => {
+ let output = [];
+
+ if (Array.isArray(input)) {
+ for (let pattern of input) {
+ let result = braces.create(pattern, options);
+ if (Array.isArray(result)) {
+ output.push(...result);
+ } else {
+ output.push(result);
+ }
+ }
+ } else {
+ output = [].concat(braces.create(input, options));
+ }
+
+ if (options && options.expand === true && options.nodupes === true) {
+ output = [...new Set(output)];
+ }
+ return output;
+};
+
+/**
+ * Parse the given `str` with the given `options`.
+ *
+ * ```js
+ * // braces.parse(pattern, [, options]);
+ * const ast = braces.parse('a/{b,c}/d');
+ * console.log(ast);
+ * ```
+ * @param {String} pattern Brace pattern to parse
+ * @param {Object} options
+ * @return {Object} Returns an AST
+ * @api public
+ */
+
+braces.parse = (input, options = {}) => parse(input, options);
+
+/**
+ * Creates a braces string from an AST, or an AST node.
+ *
+ * ```js
+ * const braces = require('braces');
+ * let ast = braces.parse('foo/{a,b}/bar');
+ * console.log(stringify(ast.nodes[2])); //=> '{a,b}'
+ * ```
+ * @param {String} `input` Brace pattern or AST.
+ * @param {Object} `options`
+ * @return {Array} Returns an array of expanded values.
+ * @api public
+ */
+
+braces.stringify = (input, options = {}) => {
+ if (typeof input === 'string') {
+ return stringify(braces.parse(input, options), options);
+ }
+ return stringify(input, options);
+};
+
+/**
+ * Compiles a brace pattern into a regex-compatible, optimized string.
+ * This method is called by the main [braces](#braces) function by default.
+ *
+ * ```js
+ * const braces = require('braces');
+ * console.log(braces.compile('a/{b,c}/d'));
+ * //=> ['a/(b|c)/d']
+ * ```
+ * @param {String} `input` Brace pattern or AST.
+ * @param {Object} `options`
+ * @return {Array} Returns an array of expanded values.
+ * @api public
+ */
+
+braces.compile = (input, options = {}) => {
+ if (typeof input === 'string') {
+ input = braces.parse(input, options);
+ }
+ return compile(input, options);
+};
+
+/**
+ * Expands a brace pattern into an array. This method is called by the
+ * main [braces](#braces) function when `options.expand` is true. Before
+ * using this method it's recommended that you read the [performance notes](#performance))
+ * and advantages of using [.compile](#compile) instead.
+ *
+ * ```js
+ * const braces = require('braces');
+ * console.log(braces.expand('a/{b,c}/d'));
+ * //=> ['a/b/d', 'a/c/d'];
+ * ```
+ * @param {String} `pattern` Brace pattern
+ * @param {Object} `options`
+ * @return {Array} Returns an array of expanded values.
+ * @api public
+ */
+
+braces.expand = (input, options = {}) => {
+ if (typeof input === 'string') {
+ input = braces.parse(input, options);
+ }
+
+ let result = expand(input, options);
+
+ // filter out empty strings if specified
+ if (options.noempty === true) {
+ result = result.filter(Boolean);
+ }
+
+ // filter out duplicates if specified
+ if (options.nodupes === true) {
+ result = [...new Set(result)];
+ }
+
+ return result;
+};
+
+/**
+ * Processes a brace pattern and returns either an expanded array
+ * (if `options.expand` is true), a highly optimized regex-compatible string.
+ * This method is called by the main [braces](#braces) function.
+ *
+ * ```js
+ * const braces = require('braces');
+ * console.log(braces.create('user-{200..300}/project-{a,b,c}-{1..10}'))
+ * //=> 'user-(20[0-9]|2[1-9][0-9]|300)/project-(a|b|c)-([1-9]|10)'
+ * ```
+ * @param {String} `pattern` Brace pattern
+ * @param {Object} `options`
+ * @return {Array} Returns an array of expanded values.
+ * @api public
+ */
+
+braces.create = (input, options = {}) => {
+ if (input === '' || input.length < 3) {
+ return [input];
+ }
+
+ return options.expand !== true
+ ? braces.compile(input, options)
+ : braces.expand(input, options);
+};
+
+/**
+ * Expose "braces"
+ */
+
+module.exports = braces;
+
+
+/***/ }),
+
+/***/ 7123:
+/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
+
+"use strict";
+
+
+const fill = __webpack_require__(791);
+const utils = __webpack_require__(7691);
+
+const compile = (ast, options = {}) => {
+ let walk = (node, parent = {}) => {
+ let invalidBlock = utils.isInvalidBrace(parent);
+ let invalidNode = node.invalid === true && options.escapeInvalid === true;
+ let invalid = invalidBlock === true || invalidNode === true;
+ let prefix = options.escapeInvalid === true ? '\\' : '';
+ let output = '';
+
+ if (node.isOpen === true) {
+ return prefix + node.value;
+ }
+ if (node.isClose === true) {
+ return prefix + node.value;
+ }
+
+ if (node.type === 'open') {
+ return invalid ? (prefix + node.value) : '(';
+ }
+
+ if (node.type === 'close') {
+ return invalid ? (prefix + node.value) : ')';
+ }
+
+ if (node.type === 'comma') {
+ return node.prev.type === 'comma' ? '' : (invalid ? node.value : '|');
+ }
+
+ if (node.value) {
+ return node.value;
+ }
+
+ if (node.nodes && node.ranges > 0) {
+ let args = utils.reduce(node.nodes);
+ let range = fill(...args, { ...options, wrap: false, toRegex: true });
+
+ if (range.length !== 0) {
+ return args.length > 1 && range.length > 1 ? `(${range})` : range;
+ }
+ }
+
+ if (node.nodes) {
+ for (let child of node.nodes) {
+ output += walk(child, node);
+ }
+ }
+ return output;
+ };
+
+ return walk(ast);
+};
+
+module.exports = compile;
+
+
+/***/ }),
+
+/***/ 5412:
+/***/ ((module) => {
+
+"use strict";
+
+
+module.exports = {
+ MAX_LENGTH: 1024 * 64,
+
+ // Digits
+ CHAR_0: '0', /* 0 */
+ CHAR_9: '9', /* 9 */
+
+ // Alphabet chars.
+ CHAR_UPPERCASE_A: 'A', /* A */
+ CHAR_LOWERCASE_A: 'a', /* a */
+ CHAR_UPPERCASE_Z: 'Z', /* Z */
+ CHAR_LOWERCASE_Z: 'z', /* z */
+
+ CHAR_LEFT_PARENTHESES: '(', /* ( */
+ CHAR_RIGHT_PARENTHESES: ')', /* ) */
+
+ CHAR_ASTERISK: '*', /* * */
+
+ // Non-alphabetic chars.
+ CHAR_AMPERSAND: '&', /* & */
+ CHAR_AT: '@', /* @ */
+ CHAR_BACKSLASH: '\\', /* \ */
+ CHAR_BACKTICK: '`', /* ` */
+ CHAR_CARRIAGE_RETURN: '\r', /* \r */
+ CHAR_CIRCUMFLEX_ACCENT: '^', /* ^ */
+ CHAR_COLON: ':', /* : */
+ CHAR_COMMA: ',', /* , */
+ CHAR_DOLLAR: '$', /* . */
+ CHAR_DOT: '.', /* . */
+ CHAR_DOUBLE_QUOTE: '"', /* " */
+ CHAR_EQUAL: '=', /* = */
+ CHAR_EXCLAMATION_MARK: '!', /* ! */
+ CHAR_FORM_FEED: '\f', /* \f */
+ CHAR_FORWARD_SLASH: '/', /* / */
+ CHAR_HASH: '#', /* # */
+ CHAR_HYPHEN_MINUS: '-', /* - */
+ CHAR_LEFT_ANGLE_BRACKET: '<', /* < */
+ CHAR_LEFT_CURLY_BRACE: '{', /* { */
+ CHAR_LEFT_SQUARE_BRACKET: '[', /* [ */
+ CHAR_LINE_FEED: '\n', /* \n */
+ CHAR_NO_BREAK_SPACE: '\u00A0', /* \u00A0 */
+ CHAR_PERCENT: '%', /* % */
+ CHAR_PLUS: '+', /* + */
+ CHAR_QUESTION_MARK: '?', /* ? */
+ CHAR_RIGHT_ANGLE_BRACKET: '>', /* > */
+ CHAR_RIGHT_CURLY_BRACE: '}', /* } */
+ CHAR_RIGHT_SQUARE_BRACKET: ']', /* ] */
+ CHAR_SEMICOLON: ';', /* ; */
+ CHAR_SINGLE_QUOTE: '\'', /* ' */
+ CHAR_SPACE: ' ', /* */
+ CHAR_TAB: '\t', /* \t */
+ CHAR_UNDERSCORE: '_', /* _ */
+ CHAR_VERTICAL_LINE: '|', /* | */
+ CHAR_ZERO_WIDTH_NOBREAK_SPACE: '\uFEFF' /* \uFEFF */
+};
+
+
+/***/ }),
+
+/***/ 6944:
+/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
+
+"use strict";
+
+
+const fill = __webpack_require__(791);
+const stringify = __webpack_require__(4810);
+const utils = __webpack_require__(7691);
+
+const append = (queue = '', stash = '', enclose = false) => {
+ let result = [];
+
+ queue = [].concat(queue);
+ stash = [].concat(stash);
+
+ if (!stash.length) return queue;
+ if (!queue.length) {
+ return enclose ? utils.flatten(stash).map(ele => `{${ele}}`) : stash;
+ }
+
+ for (let item of queue) {
+ if (Array.isArray(item)) {
+ for (let value of item) {
+ result.push(append(value, stash, enclose));
+ }
+ } else {
+ for (let ele of stash) {
+ if (enclose === true && typeof ele === 'string') ele = `{${ele}}`;
+ result.push(Array.isArray(ele) ? append(item, ele, enclose) : (item + ele));
+ }
+ }
+ }
+ return utils.flatten(result);
+};
+
+const expand = (ast, options = {}) => {
+ let rangeLimit = options.rangeLimit === void 0 ? 1000 : options.rangeLimit;
+
+ let walk = (node, parent = {}) => {
+ node.queue = [];
+
+ let p = parent;
+ let q = parent.queue;
+
+ while (p.type !== 'brace' && p.type !== 'root' && p.parent) {
+ p = p.parent;
+ q = p.queue;
+ }
+
+ if (node.invalid || node.dollar) {
+ q.push(append(q.pop(), stringify(node, options)));
+ return;
+ }
+
+ if (node.type === 'brace' && node.invalid !== true && node.nodes.length === 2) {
+ q.push(append(q.pop(), ['{}']));
+ return;
+ }
+
+ if (node.nodes && node.ranges > 0) {
+ let args = utils.reduce(node.nodes);
+
+ if (utils.exceedsLimit(...args, options.step, rangeLimit)) {
+ throw new RangeError('expanded array length exceeds range limit. Use options.rangeLimit to increase or disable the limit.');
+ }
+
+ let range = fill(...args, options);
+ if (range.length === 0) {
+ range = stringify(node, options);
+ }
+
+ q.push(append(q.pop(), range));
+ node.nodes = [];
+ return;
+ }
+
+ let enclose = utils.encloseBrace(node);
+ let queue = node.queue;
+ let block = node;
+
+ while (block.type !== 'brace' && block.type !== 'root' && block.parent) {
+ block = block.parent;
+ queue = block.queue;
+ }
+
+ for (let i = 0; i < node.nodes.length; i++) {
+ let child = node.nodes[i];
+
+ if (child.type === 'comma' && node.type === 'brace') {
+ if (i === 1) queue.push('');
+ queue.push('');
+ continue;
+ }
+
+ if (child.type === 'close') {
+ q.push(append(q.pop(), queue, enclose));
+ continue;
+ }
+
+ if (child.value && child.type !== 'open') {
+ queue.push(append(queue.pop(), child.value));
+ continue;
+ }
+
+ if (child.nodes) {
+ walk(child, node);
+ }
+ }
+
+ return queue;
+ };
+
+ return utils.flatten(walk(ast));
+};
+
+module.exports = expand;
+
+
+/***/ }),
+
+/***/ 9889:
+/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
+
+"use strict";
+
+
+const stringify = __webpack_require__(4810);
+
+/**
+ * Constants
+ */
+
+const {
+ MAX_LENGTH,
+ CHAR_BACKSLASH, /* \ */
+ CHAR_BACKTICK, /* ` */
+ CHAR_COMMA, /* , */
+ CHAR_DOT, /* . */
+ CHAR_LEFT_PARENTHESES, /* ( */
+ CHAR_RIGHT_PARENTHESES, /* ) */
+ CHAR_LEFT_CURLY_BRACE, /* { */
+ CHAR_RIGHT_CURLY_BRACE, /* } */
+ CHAR_LEFT_SQUARE_BRACKET, /* [ */
+ CHAR_RIGHT_SQUARE_BRACKET, /* ] */
+ CHAR_DOUBLE_QUOTE, /* " */
+ CHAR_SINGLE_QUOTE, /* ' */
+ CHAR_NO_BREAK_SPACE,
+ CHAR_ZERO_WIDTH_NOBREAK_SPACE
+} = __webpack_require__(5412);
+
+/**
+ * parse
+ */
+
+const parse = (input, options = {}) => {
+ if (typeof input !== 'string') {
+ throw new TypeError('Expected a string');
+ }
+
+ let opts = options || {};
+ let max = typeof opts.maxLength === 'number' ? Math.min(MAX_LENGTH, opts.maxLength) : MAX_LENGTH;
+ if (input.length > max) {
+ throw new SyntaxError(`Input length (${input.length}), exceeds max characters (${max})`);
+ }
+
+ let ast = { type: 'root', input, nodes: [] };
+ let stack = [ast];
+ let block = ast;
+ let prev = ast;
+ let brackets = 0;
+ let length = input.length;
+ let index = 0;
+ let depth = 0;
+ let value;
+ let memo = {};
+
+ /**
+ * Helpers
+ */
+
+ const advance = () => input[index++];
+ const push = node => {
+ if (node.type === 'text' && prev.type === 'dot') {
+ prev.type = 'text';
+ }
+
+ if (prev && prev.type === 'text' && node.type === 'text') {
+ prev.value += node.value;
+ return;
+ }
+
+ block.nodes.push(node);
+ node.parent = block;
+ node.prev = prev;
+ prev = node;
+ return node;
+ };
+
+ push({ type: 'bos' });
+
+ while (index < length) {
+ block = stack[stack.length - 1];
+ value = advance();
+
+ /**
+ * Invalid chars
+ */
+
+ if (value === CHAR_ZERO_WIDTH_NOBREAK_SPACE || value === CHAR_NO_BREAK_SPACE) {
+ continue;
+ }
+
+ /**
+ * Escaped chars
+ */
+
+ if (value === CHAR_BACKSLASH) {
+ push({ type: 'text', value: (options.keepEscaping ? value : '') + advance() });
+ continue;
+ }
+
+ /**
+ * Right square bracket (literal): ']'
+ */
+
+ if (value === CHAR_RIGHT_SQUARE_BRACKET) {
+ push({ type: 'text', value: '\\' + value });
+ continue;
+ }
+
+ /**
+ * Left square bracket: '['
+ */
+
+ if (value === CHAR_LEFT_SQUARE_BRACKET) {
+ brackets++;
+
+ let closed = true;
+ let next;
+
+ while (index < length && (next = advance())) {
+ value += next;
+
+ if (next === CHAR_LEFT_SQUARE_BRACKET) {
+ brackets++;
+ continue;
+ }
+
+ if (next === CHAR_BACKSLASH) {
+ value += advance();
+ continue;
+ }
+
+ if (next === CHAR_RIGHT_SQUARE_BRACKET) {
+ brackets--;
+
+ if (brackets === 0) {
+ break;
+ }
+ }
+ }
+
+ push({ type: 'text', value });
+ continue;
+ }
+
+ /**
+ * Parentheses
+ */
+
+ if (value === CHAR_LEFT_PARENTHESES) {
+ block = push({ type: 'paren', nodes: [] });
+ stack.push(block);
+ push({ type: 'text', value });
+ continue;
+ }
+
+ if (value === CHAR_RIGHT_PARENTHESES) {
+ if (block.type !== 'paren') {
+ push({ type: 'text', value });
+ continue;
+ }
+ block = stack.pop();
+ push({ type: 'text', value });
+ block = stack[stack.length - 1];
+ continue;
+ }
+
+ /**
+ * Quotes: '|"|`
+ */
+
+ if (value === CHAR_DOUBLE_QUOTE || value === CHAR_SINGLE_QUOTE || value === CHAR_BACKTICK) {
+ let open = value;
+ let next;
+
+ if (options.keepQuotes !== true) {
+ value = '';
+ }
+
+ while (index < length && (next = advance())) {
+ if (next === CHAR_BACKSLASH) {
+ value += next + advance();
+ continue;
+ }
+
+ if (next === open) {
+ if (options.keepQuotes === true) value += next;
+ break;
+ }
+
+ value += next;
+ }
+
+ push({ type: 'text', value });
+ continue;
+ }
+
+ /**
+ * Left curly brace: '{'
+ */
+
+ if (value === CHAR_LEFT_CURLY_BRACE) {
+ depth++;
+
+ let dollar = prev.value && prev.value.slice(-1) === '$' || block.dollar === true;
+ let brace = {
+ type: 'brace',
+ open: true,
+ close: false,
+ dollar,
+ depth,
+ commas: 0,
+ ranges: 0,
+ nodes: []
+ };
+
+ block = push(brace);
+ stack.push(block);
+ push({ type: 'open', value });
+ continue;
+ }
+
+ /**
+ * Right curly brace: '}'
+ */
+
+ if (value === CHAR_RIGHT_CURLY_BRACE) {
+ if (block.type !== 'brace') {
+ push({ type: 'text', value });
+ continue;
+ }
+
+ let type = 'close';
+ block = stack.pop();
+ block.close = true;
+
+ push({ type, value });
+ depth--;
+
+ block = stack[stack.length - 1];
+ continue;
+ }
+
+ /**
+ * Comma: ','
+ */
+
+ if (value === CHAR_COMMA && depth > 0) {
+ if (block.ranges > 0) {
+ block.ranges = 0;
+ let open = block.nodes.shift();
+ block.nodes = [open, { type: 'text', value: stringify(block) }];
+ }
+
+ push({ type: 'comma', value });
+ block.commas++;
+ continue;
+ }
+
+ /**
+ * Dot: '.'
+ */
+
+ if (value === CHAR_DOT && depth > 0 && block.commas === 0) {
+ let siblings = block.nodes;
+
+ if (depth === 0 || siblings.length === 0) {
+ push({ type: 'text', value });
+ continue;
+ }
+
+ if (prev.type === 'dot') {
+ block.range = [];
+ prev.value += value;
+ prev.type = 'range';
+
+ if (block.nodes.length !== 3 && block.nodes.length !== 5) {
+ block.invalid = true;
+ block.ranges = 0;
+ prev.type = 'text';
+ continue;
+ }
+
+ block.ranges++;
+ block.args = [];
+ continue;
+ }
+
+ if (prev.type === 'range') {
+ siblings.pop();
+
+ let before = siblings[siblings.length - 1];
+ before.value += prev.value + value;
+ prev = before;
+ block.ranges--;
+ continue;
+ }
+
+ push({ type: 'dot', value });
+ continue;
+ }
+
+ /**
+ * Text
+ */
+
+ push({ type: 'text', value });
+ }
+
+ // Mark imbalanced braces and brackets as invalid
+ do {
+ block = stack.pop();
+
+ if (block.type !== 'root') {
+ block.nodes.forEach(node => {
+ if (!node.nodes) {
+ if (node.type === 'open') node.isOpen = true;
+ if (node.type === 'close') node.isClose = true;
+ if (!node.nodes) node.type = 'text';
+ node.invalid = true;
+ }
+ });
+
+ // get the location of the block on parent.nodes (block's siblings)
+ let parent = stack[stack.length - 1];
+ let index = parent.nodes.indexOf(block);
+ // replace the (invalid) block with it's nodes
+ parent.nodes.splice(index, 1, ...block.nodes);
+ }
+ } while (stack.length > 0);
+
+ push({ type: 'eos' });
+ return ast;
+};
+
+module.exports = parse;
+
+
+/***/ }),
+
+/***/ 4810:
+/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
+
+"use strict";
+
+
+const utils = __webpack_require__(7691);
+
+module.exports = (ast, options = {}) => {
+ let stringify = (node, parent = {}) => {
+ let invalidBlock = options.escapeInvalid && utils.isInvalidBrace(parent);
+ let invalidNode = node.invalid === true && options.escapeInvalid === true;
+ let output = '';
+
+ if (node.value) {
+ if ((invalidBlock || invalidNode) && utils.isOpenOrClose(node)) {
+ return '\\' + node.value;
+ }
+ return node.value;
+ }
+
+ if (node.value) {
+ return node.value;
+ }
+
+ if (node.nodes) {
+ for (let child of node.nodes) {
+ output += stringify(child);
+ }
+ }
+ return output;
+ };
+
+ return stringify(ast);
+};
+
+
+
+/***/ }),
+
+/***/ 7691:
+/***/ ((__unused_webpack_module, exports) => {
+
+"use strict";
+
+
+exports.isInteger = num => {
+ if (typeof num === 'number') {
+ return Number.isInteger(num);
+ }
+ if (typeof num === 'string' && num.trim() !== '') {
+ return Number.isInteger(Number(num));
+ }
+ return false;
+};
+
+/**
+ * Find a node of the given type
+ */
+
+exports.find = (node, type) => node.nodes.find(node => node.type === type);
+
+/**
+ * Find a node of the given type
+ */
+
+exports.exceedsLimit = (min, max, step = 1, limit) => {
+ if (limit === false) return false;
+ if (!exports.isInteger(min) || !exports.isInteger(max)) return false;
+ return ((Number(max) - Number(min)) / Number(step)) >= limit;
+};
+
+/**
+ * Escape the given node with '\\' before node.value
+ */
+
+exports.escapeNode = (block, n = 0, type) => {
+ let node = block.nodes[n];
+ if (!node) return;
+
+ if ((type && node.type === type) || node.type === 'open' || node.type === 'close') {
+ if (node.escaped !== true) {
+ node.value = '\\' + node.value;
+ node.escaped = true;
+ }
+ }
+};
+
+/**
+ * Returns true if the given brace node should be enclosed in literal braces
+ */
+
+exports.encloseBrace = node => {
+ if (node.type !== 'brace') return false;
+ if ((node.commas >> 0 + node.ranges >> 0) === 0) {
+ node.invalid = true;
+ return true;
+ }
+ return false;
+};
+
+/**
+ * Returns true if a brace node is invalid.
+ */
+
+exports.isInvalidBrace = block => {
+ if (block.type !== 'brace') return false;
+ if (block.invalid === true || block.dollar) return true;
+ if ((block.commas >> 0 + block.ranges >> 0) === 0) {
+ block.invalid = true;
+ return true;
+ }
+ if (block.open !== true || block.close !== true) {
+ block.invalid = true;
+ return true;
+ }
+ return false;
+};
+
+/**
+ * Returns true if a node is an open or close node
+ */
+
+exports.isOpenOrClose = node => {
+ if (node.type === 'open' || node.type === 'close') {
+ return true;
+ }
+ return node.open === true || node.close === true;
+};
+
+/**
+ * Reduce an array of text nodes.
+ */
+
+exports.reduce = nodes => nodes.reduce((acc, node) => {
+ if (node.type === 'text') acc.push(node.value);
+ if (node.type === 'range') node.type = 'text';
+ return acc;
+}, []);
+
+/**
+ * Flatten an array
+ */
+
+exports.flatten = (...args) => {
+ const result = [];
+ const flat = arr => {
+ for (let i = 0; i < arr.length; i++) {
+ let ele = arr[i];
+ Array.isArray(ele) ? flat(ele, result) : ele !== void 0 && result.push(ele);
+ }
+ return result;
+ };
+ flat(args);
+ return result;
+};
+
+
+/***/ }),
+
+/***/ 791:
+/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
+
+"use strict";
+/*!
+ * fill-range
+ *
+ * Copyright (c) 2014-present, Jon Schlinkert.
+ * Licensed under the MIT License.
+ */
+
+
+
+const util = __webpack_require__(1669);
+const toRegexRange = __webpack_require__(6867);
+
+const isObject = val => val !== null && typeof val === 'object' && !Array.isArray(val);
+
+const transform = toNumber => {
+ return value => toNumber === true ? Number(value) : String(value);
+};
+
+const isValidValue = value => {
+ return typeof value === 'number' || (typeof value === 'string' && value !== '');
+};
+
+const isNumber = num => Number.isInteger(+num);
+
+const zeros = input => {
+ let value = `${input}`;
+ let index = -1;
+ if (value[0] === '-') value = value.slice(1);
+ if (value === '0') return false;
+ while (value[++index] === '0');
+ return index > 0;
+};
+
+const stringify = (start, end, options) => {
+ if (typeof start === 'string' || typeof end === 'string') {
+ return true;
+ }
+ return options.stringify === true;
+};
+
+const pad = (input, maxLength, toNumber) => {
+ if (maxLength > 0) {
+ let dash = input[0] === '-' ? '-' : '';
+ if (dash) input = input.slice(1);
+ input = (dash + input.padStart(dash ? maxLength - 1 : maxLength, '0'));
+ }
+ if (toNumber === false) {
+ return String(input);
+ }
+ return input;
+};
+
+const toMaxLen = (input, maxLength) => {
+ let negative = input[0] === '-' ? '-' : '';
+ if (negative) {
+ input = input.slice(1);
+ maxLength--;
+ }
+ while (input.length < maxLength) input = '0' + input;
+ return negative ? ('-' + input) : input;
+};
+
+const toSequence = (parts, options) => {
+ parts.negatives.sort((a, b) => a < b ? -1 : a > b ? 1 : 0);
+ parts.positives.sort((a, b) => a < b ? -1 : a > b ? 1 : 0);
+
+ let prefix = options.capture ? '' : '?:';
+ let positives = '';
+ let negatives = '';
+ let result;
+
+ if (parts.positives.length) {
+ positives = parts.positives.join('|');
+ }
+
+ if (parts.negatives.length) {
+ negatives = `-(${prefix}${parts.negatives.join('|')})`;
+ }
+
+ if (positives && negatives) {
+ result = `${positives}|${negatives}`;
+ } else {
+ result = positives || negatives;
+ }
+
+ if (options.wrap) {
+ return `(${prefix}${result})`;
+ }
+
+ return result;
+};
+
+const toRange = (a, b, isNumbers, options) => {
+ if (isNumbers) {
+ return toRegexRange(a, b, { wrap: false, ...options });
+ }
+
+ let start = String.fromCharCode(a);
+ if (a === b) return start;
+
+ let stop = String.fromCharCode(b);
+ return `[${start}-${stop}]`;
+};
+
+const toRegex = (start, end, options) => {
+ if (Array.isArray(start)) {
+ let wrap = options.wrap === true;
+ let prefix = options.capture ? '' : '?:';
+ return wrap ? `(${prefix}${start.join('|')})` : start.join('|');
+ }
+ return toRegexRange(start, end, options);
+};
+
+const rangeError = (...args) => {
+ return new RangeError('Invalid range arguments: ' + util.inspect(...args));
+};
+
+const invalidRange = (start, end, options) => {
+ if (options.strictRanges === true) throw rangeError([start, end]);
+ return [];
+};
+
+const invalidStep = (step, options) => {
+ if (options.strictRanges === true) {
+ throw new TypeError(`Expected step "${step}" to be a number`);
+ }
+ return [];
+};
+
+const fillNumbers = (start, end, step = 1, options = {}) => {
+ let a = Number(start);
+ let b = Number(end);
+
+ if (!Number.isInteger(a) || !Number.isInteger(b)) {
+ if (options.strictRanges === true) throw rangeError([start, end]);
+ return [];
+ }
+
+ // fix negative zero
+ if (a === 0) a = 0;
+ if (b === 0) b = 0;
+
+ let descending = a > b;
+ let startString = String(start);
+ let endString = String(end);
+ let stepString = String(step);
+ step = Math.max(Math.abs(step), 1);
+
+ let padded = zeros(startString) || zeros(endString) || zeros(stepString);
+ let maxLen = padded ? Math.max(startString.length, endString.length, stepString.length) : 0;
+ let toNumber = padded === false && stringify(start, end, options) === false;
+ let format = options.transform || transform(toNumber);
+
+ if (options.toRegex && step === 1) {
+ return toRange(toMaxLen(start, maxLen), toMaxLen(end, maxLen), true, options);
+ }
+
+ let parts = { negatives: [], positives: [] };
+ let push = num => parts[num < 0 ? 'negatives' : 'positives'].push(Math.abs(num));
+ let range = [];
+ let index = 0;
+
+ while (descending ? a >= b : a <= b) {
+ if (options.toRegex === true && step > 1) {
+ push(a);
+ } else {
+ range.push(pad(format(a, index), maxLen, toNumber));
+ }
+ a = descending ? a - step : a + step;
+ index++;
+ }
+
+ if (options.toRegex === true) {
+ return step > 1
+ ? toSequence(parts, options)
+ : toRegex(range, null, { wrap: false, ...options });
+ }
+
+ return range;
+};
+
+const fillLetters = (start, end, step = 1, options = {}) => {
+ if ((!isNumber(start) && start.length > 1) || (!isNumber(end) && end.length > 1)) {
+ return invalidRange(start, end, options);
+ }
+
+
+ let format = options.transform || (val => String.fromCharCode(val));
+ let a = `${start}`.charCodeAt(0);
+ let b = `${end}`.charCodeAt(0);
+
+ let descending = a > b;
+ let min = Math.min(a, b);
+ let max = Math.max(a, b);
+
+ if (options.toRegex && step === 1) {
+ return toRange(min, max, false, options);
+ }
+
+ let range = [];
+ let index = 0;
+
+ while (descending ? a >= b : a <= b) {
+ range.push(format(a, index));
+ a = descending ? a - step : a + step;
+ index++;
+ }
+
+ if (options.toRegex === true) {
+ return toRegex(range, null, { wrap: false, options });
+ }
+
+ return range;
+};
+
+const fill = (start, end, step, options = {}) => {
+ if (end == null && isValidValue(start)) {
+ return [start];
+ }
+
+ if (!isValidValue(start) || !isValidValue(end)) {
+ return invalidRange(start, end, options);
+ }
+
+ if (typeof step === 'function') {
+ return fill(start, end, 1, { transform: step });
+ }
+
+ if (isObject(step)) {
+ return fill(start, end, 0, step);
+ }
+
+ let opts = { ...options };
+ if (opts.capture === true) opts.wrap = true;
+ step = step || opts.step || 1;
+
+ if (!isNumber(step)) {
+ if (step != null && !isObject(step)) return invalidStep(step, opts);
+ return fill(start, end, 1, step);
+ }
+
+ if (isNumber(start) && isNumber(end)) {
+ return fillNumbers(start, end, step, opts);
+ }
+
+ return fillLetters(start, end, Math.max(Math.abs(step), 1), opts);
+};
+
+module.exports = fill;
+
+
+/***/ }),
+
+/***/ 2840:
+/***/ ((module) => {
+
+"use strict";
+/*!
+ * is-number
+ *
+ * Copyright (c) 2014-present, Jon Schlinkert.
+ * Released under the MIT License.
+ */
+
+
+
+module.exports = function(num) {
+ if (typeof num === 'number') {
+ return num - num === 0;
+ }
+ if (typeof num === 'string' && num.trim() !== '') {
+ return Number.isFinite ? Number.isFinite(+num) : isFinite(+num);
+ }
+ return false;
+};
+
+
+/***/ }),
+
+/***/ 3913:
+/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
+
+"use strict";
+
+
+const util = __webpack_require__(1669);
+const braces = __webpack_require__(5582);
+const picomatch = __webpack_require__(8569);
+const utils = __webpack_require__(479);
+const isEmptyString = val => typeof val === 'string' && (val === '' || val === './');
+
+/**
+ * Returns an array of strings that match one or more glob patterns.
+ *
+ * ```js
+ * const mm = require('micromatch');
+ * // mm(list, patterns[, options]);
+ *
+ * console.log(mm(['a.js', 'a.txt'], ['*.js']));
+ * //=> [ 'a.js' ]
+ * ```
+ * @param {String|Array} list List of strings to match.
+ * @param {String|Array} patterns One or more glob patterns to use for matching.
+ * @param {Object} options See available [options](#options)
+ * @return {Array} Returns an array of matches
+ * @summary false
+ * @api public
+ */
+
+const micromatch = (list, patterns, options) => {
+ patterns = [].concat(patterns);
+ list = [].concat(list);
+
+ let omit = new Set();
+ let keep = new Set();
+ let items = new Set();
+ let negatives = 0;
+
+ let onResult = state => {
+ items.add(state.output);
+ if (options && options.onResult) {
+ options.onResult(state);
+ }
+ };
+
+ for (let i = 0; i < patterns.length; i++) {
+ let isMatch = picomatch(String(patterns[i]), { ...options, onResult }, true);
+ let negated = isMatch.state.negated || isMatch.state.negatedExtglob;
+ if (negated) negatives++;
+
+ for (let item of list) {
+ let matched = isMatch(item, true);
+
+ let match = negated ? !matched.isMatch : matched.isMatch;
+ if (!match) continue;
+
+ if (negated) {
+ omit.add(matched.output);
+ } else {
+ omit.delete(matched.output);
+ keep.add(matched.output);
+ }
+ }
+ }
+
+ let result = negatives === patterns.length ? [...items] : [...keep];
+ let matches = result.filter(item => !omit.has(item));
+
+ if (options && matches.length === 0) {
+ if (options.failglob === true) {
+ throw new Error(`No matches found for "${patterns.join(', ')}"`);
+ }
+
+ if (options.nonull === true || options.nullglob === true) {
+ return options.unescape ? patterns.map(p => p.replace(/\\/g, '')) : patterns;
+ }
+ }
+
+ return matches;
+};
+
+/**
+ * Backwards compatibility
+ */
+
+micromatch.match = micromatch;
+
+/**
+ * Returns a matcher function from the given glob `pattern` and `options`.
+ * The returned function takes a string to match as its only argument and returns
+ * true if the string is a match.
+ *
+ * ```js
+ * const mm = require('micromatch');
+ * // mm.matcher(pattern[, options]);
+ *
+ * const isMatch = mm.matcher('*.!(*a)');
+ * console.log(isMatch('a.a')); //=> false
+ * console.log(isMatch('a.b')); //=> true
+ * ```
+ * @param {String} `pattern` Glob pattern
+ * @param {Object} `options`
+ * @return {Function} Returns a matcher function.
+ * @api public
+ */
+
+micromatch.matcher = (pattern, options) => picomatch(pattern, options);
+
+/**
+ * Returns true if **any** of the given glob `patterns` match the specified `string`.
+ *
+ * ```js
+ * const mm = require('micromatch');
+ * // mm.isMatch(string, patterns[, options]);
+ *
+ * console.log(mm.isMatch('a.a', ['b.*', '*.a'])); //=> true
+ * console.log(mm.isMatch('a.a', 'b.*')); //=> false
+ * ```
+ * @param {String} str The string to test.
+ * @param {String|Array} patterns One or more glob patterns to use for matching.
+ * @param {Object} [options] See available [options](#options).
+ * @return {Boolean} Returns true if any patterns match `str`
+ * @api public
+ */
+
+micromatch.isMatch = (str, patterns, options) => picomatch(patterns, options)(str);
+
+/**
+ * Backwards compatibility
+ */
+
+micromatch.any = micromatch.isMatch;
+
+/**
+ * Returns a list of strings that _**do not match any**_ of the given `patterns`.
+ *
+ * ```js
+ * const mm = require('micromatch');
+ * // mm.not(list, patterns[, options]);
+ *
+ * console.log(mm.not(['a.a', 'b.b', 'c.c'], '*.a'));
+ * //=> ['b.b', 'c.c']
+ * ```
+ * @param {Array} `list` Array of strings to match.
+ * @param {String|Array} `patterns` One or more glob pattern to use for matching.
+ * @param {Object} `options` See available [options](#options) for changing how matches are performed
+ * @return {Array} Returns an array of strings that **do not match** the given patterns.
+ * @api public
+ */
+
+micromatch.not = (list, patterns, options = {}) => {
+ patterns = [].concat(patterns).map(String);
+ let result = new Set();
+ let items = [];
+
+ let onResult = state => {
+ if (options.onResult) options.onResult(state);
+ items.push(state.output);
+ };
+
+ let matches = micromatch(list, patterns, { ...options, onResult });
+
+ for (let item of items) {
+ if (!matches.includes(item)) {
+ result.add(item);
+ }
+ }
+ return [...result];
+};
+
+/**
+ * Returns true if the given `string` contains the given pattern. Similar
+ * to [.isMatch](#isMatch) but the pattern can match any part of the string.
+ *
+ * ```js
+ * var mm = require('micromatch');
+ * // mm.contains(string, pattern[, options]);
+ *
+ * console.log(mm.contains('aa/bb/cc', '*b'));
+ * //=> true
+ * console.log(mm.contains('aa/bb/cc', '*d'));
+ * //=> false
+ * ```
+ * @param {String} `str` The string to match.
+ * @param {String|Array} `patterns` Glob pattern to use for matching.
+ * @param {Object} `options` See available [options](#options) for changing how matches are performed
+ * @return {Boolean} Returns true if the patter matches any part of `str`.
+ * @api public
+ */
+
+micromatch.contains = (str, pattern, options) => {
+ if (typeof str !== 'string') {
+ throw new TypeError(`Expected a string: "${util.inspect(str)}"`);
+ }
+
+ if (Array.isArray(pattern)) {
+ return pattern.some(p => micromatch.contains(str, p, options));
+ }
+
+ if (typeof pattern === 'string') {
+ if (isEmptyString(str) || isEmptyString(pattern)) {
+ return false;
+ }
+
+ if (str.includes(pattern) || (str.startsWith('./') && str.slice(2).includes(pattern))) {
+ return true;
+ }
+ }
+
+ return micromatch.isMatch(str, pattern, { ...options, contains: true });
+};
+
+/**
+ * Filter the keys of the given object with the given `glob` pattern
+ * and `options`. Does not attempt to match nested keys. If you need this feature,
+ * use [glob-object][] instead.
+ *
+ * ```js
+ * const mm = require('micromatch');
+ * // mm.matchKeys(object, patterns[, options]);
+ *
+ * const obj = { aa: 'a', ab: 'b', ac: 'c' };
+ * console.log(mm.matchKeys(obj, '*b'));
+ * //=> { ab: 'b' }
+ * ```
+ * @param {Object} `object` The object with keys to filter.
+ * @param {String|Array} `patterns` One or more glob patterns to use for matching.
+ * @param {Object} `options` See available [options](#options) for changing how matches are performed
+ * @return {Object} Returns an object with only keys that match the given patterns.
+ * @api public
+ */
+
+micromatch.matchKeys = (obj, patterns, options) => {
+ if (!utils.isObject(obj)) {
+ throw new TypeError('Expected the first argument to be an object');
+ }
+ let keys = micromatch(Object.keys(obj), patterns, options);
+ let res = {};
+ for (let key of keys) res[key] = obj[key];
+ return res;
+};
+
+/**
+ * Returns true if some of the strings in the given `list` match any of the given glob `patterns`.
+ *
+ * ```js
+ * const mm = require('micromatch');
+ * // mm.some(list, patterns[, options]);
+ *
+ * console.log(mm.some(['foo.js', 'bar.js'], ['*.js', '!foo.js']));
+ * // true
+ * console.log(mm.some(['foo.js'], ['*.js', '!foo.js']));
+ * // false
+ * ```
+ * @param {String|Array} `list` The string or array of strings to test. Returns as soon as the first match is found.
+ * @param {String|Array} `patterns` One or more glob patterns to use for matching.
+ * @param {Object} `options` See available [options](#options) for changing how matches are performed
+ * @return {Boolean} Returns true if any patterns match `str`
+ * @api public
+ */
+
+micromatch.some = (list, patterns, options) => {
+ let items = [].concat(list);
+
+ for (let pattern of [].concat(patterns)) {
+ let isMatch = picomatch(String(pattern), options);
+ if (items.some(item => isMatch(item))) {
+ return true;
+ }
+ }
+ return false;
+};
+
+/**
+ * Returns true if every string in the given `list` matches
+ * any of the given glob `patterns`.
+ *
+ * ```js
+ * const mm = require('micromatch');
+ * // mm.every(list, patterns[, options]);
+ *
+ * console.log(mm.every('foo.js', ['foo.js']));
+ * // true
+ * console.log(mm.every(['foo.js', 'bar.js'], ['*.js']));
+ * // true
+ * console.log(mm.every(['foo.js', 'bar.js'], ['*.js', '!foo.js']));
+ * // false
+ * console.log(mm.every(['foo.js'], ['*.js', '!foo.js']));
+ * // false
+ * ```
+ * @param {String|Array} `list` The string or array of strings to test.
+ * @param {String|Array} `patterns` One or more glob patterns to use for matching.
+ * @param {Object} `options` See available [options](#options) for changing how matches are performed
+ * @return {Boolean} Returns true if any patterns match `str`
+ * @api public
+ */
+
+micromatch.every = (list, patterns, options) => {
+ let items = [].concat(list);
+
+ for (let pattern of [].concat(patterns)) {
+ let isMatch = picomatch(String(pattern), options);
+ if (!items.every(item => isMatch(item))) {
+ return false;
+ }
+ }
+ return true;
+};
+
+/**
+ * Returns true if **all** of the given `patterns` match
+ * the specified string.
+ *
+ * ```js
+ * const mm = require('micromatch');
+ * // mm.all(string, patterns[, options]);
+ *
+ * console.log(mm.all('foo.js', ['foo.js']));
+ * // true
+ *
+ * console.log(mm.all('foo.js', ['*.js', '!foo.js']));
+ * // false
+ *
+ * console.log(mm.all('foo.js', ['*.js', 'foo.js']));
+ * // true
+ *
+ * console.log(mm.all('foo.js', ['*.js', 'f*', '*o*', '*o.js']));
+ * // true
+ * ```
+ * @param {String|Array} `str` The string to test.
+ * @param {String|Array} `patterns` One or more glob patterns to use for matching.
+ * @param {Object} `options` See available [options](#options) for changing how matches are performed
+ * @return {Boolean} Returns true if any patterns match `str`
+ * @api public
+ */
+
+micromatch.all = (str, patterns, options) => {
+ if (typeof str !== 'string') {
+ throw new TypeError(`Expected a string: "${util.inspect(str)}"`);
+ }
+
+ return [].concat(patterns).every(p => picomatch(p, options)(str));
+};
+
+/**
+ * Returns an array of matches captured by `pattern` in `string, or `null` if the pattern did not match.
+ *
+ * ```js
+ * const mm = require('micromatch');
+ * // mm.capture(pattern, string[, options]);
+ *
+ * console.log(mm.capture('test/*.js', 'test/foo.js'));
+ * //=> ['foo']
+ * console.log(mm.capture('test/*.js', 'foo/bar.css'));
+ * //=> null
+ * ```
+ * @param {String} `glob` Glob pattern to use for matching.
+ * @param {String} `input` String to match
+ * @param {Object} `options` See available [options](#options) for changing how matches are performed
+ * @return {Boolean} Returns an array of captures if the input matches the glob pattern, otherwise `null`.
+ * @api public
+ */
+
+micromatch.capture = (glob, input, options) => {
+ let posix = utils.isWindows(options);
+ let regex = picomatch.makeRe(String(glob), { ...options, capture: true });
+ let match = regex.exec(posix ? utils.toPosixSlashes(input) : input);
+
+ if (match) {
+ return match.slice(1).map(v => v === void 0 ? '' : v);
+ }
+};
+
+/**
+ * Create a regular expression from the given glob `pattern`.
+ *
+ * ```js
+ * const mm = require('micromatch');
+ * // mm.makeRe(pattern[, options]);
+ *
+ * console.log(mm.makeRe('*.js'));
+ * //=> /^(?:(\.[\\\/])?(?!\.)(?=.)[^\/]*?\.js)$/
+ * ```
+ * @param {String} `pattern` A glob pattern to convert to regex.
+ * @param {Object} `options`
+ * @return {RegExp} Returns a regex created from the given pattern.
+ * @api public
+ */
+
+micromatch.makeRe = (...args) => picomatch.makeRe(...args);
+
+/**
+ * Scan a glob pattern to separate the pattern into segments. Used
+ * by the [split](#split) method.
+ *
+ * ```js
+ * const mm = require('micromatch');
+ * const state = mm.scan(pattern[, options]);
+ * ```
+ * @param {String} `pattern`
+ * @param {Object} `options`
+ * @return {Object} Returns an object with
+ * @api public
+ */
+
+micromatch.scan = (...args) => picomatch.scan(...args);
+
+/**
+ * Parse a glob pattern to create the source string for a regular
+ * expression.
+ *
+ * ```js
+ * const mm = require('micromatch');
+ * const state = mm(pattern[, options]);
+ * ```
+ * @param {String} `glob`
+ * @param {Object} `options`
+ * @return {Object} Returns an object with useful properties and output to be used as regex source string.
+ * @api public
+ */
+
+micromatch.parse = (patterns, options) => {
+ let res = [];
+ for (let pattern of [].concat(patterns || [])) {
+ for (let str of braces(String(pattern), options)) {
+ res.push(picomatch.parse(str, options));
+ }
+ }
+ return res;
+};
+
+/**
+ * Process the given brace `pattern`.
+ *
+ * ```js
+ * const { braces } = require('micromatch');
+ * console.log(braces('foo/{a,b,c}/bar'));
+ * //=> [ 'foo/(a|b|c)/bar' ]
+ *
+ * console.log(braces('foo/{a,b,c}/bar', { expand: true }));
+ * //=> [ 'foo/a/bar', 'foo/b/bar', 'foo/c/bar' ]
+ * ```
+ * @param {String} `pattern` String with brace pattern to process.
+ * @param {Object} `options` Any [options](#options) to change how expansion is performed. See the [braces][] library for all available options.
+ * @return {Array}
+ * @api public
+ */
+
+micromatch.braces = (pattern, options) => {
+ if (typeof pattern !== 'string') throw new TypeError('Expected a string');
+ if ((options && options.nobrace === true) || !/\{.*\}/.test(pattern)) {
+ return [pattern];
+ }
+ return braces(pattern, options);
+};
+
+/**
+ * Expand braces
+ */
+
+micromatch.braceExpand = (pattern, options) => {
+ if (typeof pattern !== 'string') throw new TypeError('Expected a string');
+ return micromatch.braces(pattern, { ...options, expand: true });
+};
+
+/**
+ * Expose micromatch
+ */
+
+module.exports = micromatch;
+
+
+/***/ }),
+
+/***/ 6867:
+/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
+
+"use strict";
+/*!
+ * to-regex-range
+ *
+ * Copyright (c) 2015-present, Jon Schlinkert.
+ * Released under the MIT License.
+ */
+
+
+
+const isNumber = __webpack_require__(2840);
+
+const toRegexRange = (min, max, options) => {
+ if (isNumber(min) === false) {
+ throw new TypeError('toRegexRange: expected the first argument to be a number');
+ }
+
+ if (max === void 0 || min === max) {
+ return String(min);
+ }
+
+ if (isNumber(max) === false) {
+ throw new TypeError('toRegexRange: expected the second argument to be a number.');
+ }
+
+ let opts = { relaxZeros: true, ...options };
+ if (typeof opts.strictZeros === 'boolean') {
+ opts.relaxZeros = opts.strictZeros === false;
+ }
+
+ let relax = String(opts.relaxZeros);
+ let shorthand = String(opts.shorthand);
+ let capture = String(opts.capture);
+ let wrap = String(opts.wrap);
+ let cacheKey = min + ':' + max + '=' + relax + shorthand + capture + wrap;
+
+ if (toRegexRange.cache.hasOwnProperty(cacheKey)) {
+ return toRegexRange.cache[cacheKey].result;
+ }
+
+ let a = Math.min(min, max);
+ let b = Math.max(min, max);
+
+ if (Math.abs(a - b) === 1) {
+ let result = min + '|' + max;
+ if (opts.capture) {
+ return `(${result})`;
+ }
+ if (opts.wrap === false) {
+ return result;
+ }
+ return `(?:${result})`;
+ }
+
+ let isPadded = hasPadding(min) || hasPadding(max);
+ let state = { min, max, a, b };
+ let positives = [];
+ let negatives = [];
+
+ if (isPadded) {
+ state.isPadded = isPadded;
+ state.maxLen = String(state.max).length;
+ }
+
+ if (a < 0) {
+ let newMin = b < 0 ? Math.abs(b) : 1;
+ negatives = splitToPatterns(newMin, Math.abs(a), state, opts);
+ a = state.a = 0;
+ }
+
+ if (b >= 0) {
+ positives = splitToPatterns(a, b, state, opts);
+ }
+
+ state.negatives = negatives;
+ state.positives = positives;
+ state.result = collatePatterns(negatives, positives, opts);
+
+ if (opts.capture === true) {
+ state.result = `(${state.result})`;
+ } else if (opts.wrap !== false && (positives.length + negatives.length) > 1) {
+ state.result = `(?:${state.result})`;
+ }
+
+ toRegexRange.cache[cacheKey] = state;
+ return state.result;
+};
+
+function collatePatterns(neg, pos, options) {
+ let onlyNegative = filterPatterns(neg, pos, '-', false, options) || [];
+ let onlyPositive = filterPatterns(pos, neg, '', false, options) || [];
+ let intersected = filterPatterns(neg, pos, '-?', true, options) || [];
+ let subpatterns = onlyNegative.concat(intersected).concat(onlyPositive);
+ return subpatterns.join('|');
+}
+
+function splitToRanges(min, max) {
+ let nines = 1;
+ let zeros = 1;
+
+ let stop = countNines(min, nines);
+ let stops = new Set([max]);
+
+ while (min <= stop && stop <= max) {
+ stops.add(stop);
+ nines += 1;
+ stop = countNines(min, nines);
+ }
+
+ stop = countZeros(max + 1, zeros) - 1;
+
+ while (min < stop && stop <= max) {
+ stops.add(stop);
+ zeros += 1;
+ stop = countZeros(max + 1, zeros) - 1;
+ }
+
+ stops = [...stops];
+ stops.sort(compare);
+ return stops;
+}
+
+/**
+ * Convert a range to a regex pattern
+ * @param {Number} `start`
+ * @param {Number} `stop`
+ * @return {String}
+ */
+
+function rangeToPattern(start, stop, options) {
+ if (start === stop) {
+ return { pattern: start, count: [], digits: 0 };
+ }
+
+ let zipped = zip(start, stop);
+ let digits = zipped.length;
+ let pattern = '';
+ let count = 0;
+
+ for (let i = 0; i < digits; i++) {
+ let [startDigit, stopDigit] = zipped[i];
+
+ if (startDigit === stopDigit) {
+ pattern += startDigit;
+
+ } else if (startDigit !== '0' || stopDigit !== '9') {
+ pattern += toCharacterClass(startDigit, stopDigit, options);
+
+ } else {
+ count++;
+ }
+ }
+
+ if (count) {
+ pattern += options.shorthand === true ? '\\d' : '[0-9]';
+ }
+
+ return { pattern, count: [count], digits };
+}
+
+function splitToPatterns(min, max, tok, options) {
+ let ranges = splitToRanges(min, max);
+ let tokens = [];
+ let start = min;
+ let prev;
+
+ for (let i = 0; i < ranges.length; i++) {
+ let max = ranges[i];
+ let obj = rangeToPattern(String(start), String(max), options);
+ let zeros = '';
+
+ if (!tok.isPadded && prev && prev.pattern === obj.pattern) {
+ if (prev.count.length > 1) {
+ prev.count.pop();
+ }
+
+ prev.count.push(obj.count[0]);
+ prev.string = prev.pattern + toQuantifier(prev.count);
+ start = max + 1;
+ continue;
+ }
+
+ if (tok.isPadded) {
+ zeros = padZeros(max, tok, options);
+ }
+
+ obj.string = zeros + obj.pattern + toQuantifier(obj.count);
+ tokens.push(obj);
+ start = max + 1;
+ prev = obj;
+ }
+
+ return tokens;
+}
+
+function filterPatterns(arr, comparison, prefix, intersection, options) {
+ let result = [];
+
+ for (let ele of arr) {
+ let { string } = ele;
+
+ // only push if _both_ are negative...
+ if (!intersection && !contains(comparison, 'string', string)) {
+ result.push(prefix + string);
+ }
+
+ // or _both_ are positive
+ if (intersection && contains(comparison, 'string', string)) {
+ result.push(prefix + string);
+ }
+ }
+ return result;
+}
+
+/**
+ * Zip strings
+ */
+
+function zip(a, b) {
+ let arr = [];
+ for (let i = 0; i < a.length; i++) arr.push([a[i], b[i]]);
+ return arr;
+}
+
+function compare(a, b) {
+ return a > b ? 1 : b > a ? -1 : 0;
+}
+
+function contains(arr, key, val) {
+ return arr.some(ele => ele[key] === val);
+}
+
+function countNines(min, len) {
+ return Number(String(min).slice(0, -len) + '9'.repeat(len));
+}
+
+function countZeros(integer, zeros) {
+ return integer - (integer % Math.pow(10, zeros));
+}
+
+function toQuantifier(digits) {
+ let [start = 0, stop = ''] = digits;
+ if (stop || start > 1) {
+ return `{${start + (stop ? ',' + stop : '')}}`;
+ }
+ return '';
+}
+
+function toCharacterClass(a, b, options) {
+ return `[${a}${(b - a === 1) ? '' : '-'}${b}]`;
+}
+
+function hasPadding(str) {
+ return /^-?(0+)\d/.test(str);
+}
+
+function padZeros(value, tok, options) {
+ if (!tok.isPadded) {
+ return value;
+ }
+
+ let diff = Math.abs(tok.maxLen - String(value).length);
+ let relax = options.relaxZeros !== false;
+
+ switch (diff) {
+ case 0:
+ return '';
+ case 1:
+ return relax ? '0?' : '0';
+ case 2:
+ return relax ? '0{0,2}' : '00';
+ default: {
+ return relax ? `0{0,${diff}}` : `0{${diff}}`;
+ }
+ }
+}
+
+/**
+ * Cache
+ */
+
+toRegexRange.cache = {};
+toRegexRange.clearCache = () => (toRegexRange.cache = {});
+
+/**
+ * Expose `toRegexRange`
+ */
+
+module.exports = toRegexRange;
+
+
+/***/ }),
+
+/***/ 3664:
+/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
+
+"use strict";
+
+const taskManager = __webpack_require__(2708);
+const async_1 = __webpack_require__(5679);
+const stream_1 = __webpack_require__(4630);
+const sync_1 = __webpack_require__(2405);
+const settings_1 = __webpack_require__(952);
+const utils = __webpack_require__(5444);
+async function FastGlob(source, options) {
+ assertPatternsInput(source);
+ const works = getWorks(source, async_1.default, options);
+ const result = await Promise.all(works);
+ return utils.array.flatten(result);
+}
+// https://github.com/typescript-eslint/typescript-eslint/issues/60
+// eslint-disable-next-line no-redeclare
+(function (FastGlob) {
+ function sync(source, options) {
+ assertPatternsInput(source);
+ const works = getWorks(source, sync_1.default, options);
+ return utils.array.flatten(works);
+ }
+ FastGlob.sync = sync;
+ function stream(source, options) {
+ assertPatternsInput(source);
+ const works = getWorks(source, stream_1.default, options);
+ /**
+ * The stream returned by the provider cannot work with an asynchronous iterator.
+ * To support asynchronous iterators, regardless of the number of tasks, we always multiplex streams.
+ * This affects performance (+25%). I don't see best solution right now.
+ */
+ return utils.stream.merge(works);
+ }
+ FastGlob.stream = stream;
+ function generateTasks(source, options) {
+ assertPatternsInput(source);
+ const patterns = [].concat(source);
+ const settings = new settings_1.default(options);
+ return taskManager.generate(patterns, settings);
+ }
+ FastGlob.generateTasks = generateTasks;
+ function isDynamicPattern(source, options) {
+ assertPatternsInput(source);
+ const settings = new settings_1.default(options);
+ return utils.pattern.isDynamicPattern(source, settings);
+ }
+ FastGlob.isDynamicPattern = isDynamicPattern;
+ function escapePath(source) {
+ assertPatternsInput(source);
+ return utils.path.escape(source);
+ }
+ FastGlob.escapePath = escapePath;
+})(FastGlob || (FastGlob = {}));
+function getWorks(source, _Provider, options) {
+ const patterns = [].concat(source);
+ const settings = new settings_1.default(options);
+ const tasks = taskManager.generate(patterns, settings);
+ const provider = new _Provider(settings);
+ return tasks.map(provider.read, provider);
+}
+function assertPatternsInput(input) {
+ const source = [].concat(input);
+ const isValidSource = source.every((item) => utils.string.isString(item) && !utils.string.isEmpty(item));
+ if (!isValidSource) {
+ throw new TypeError('Patterns must be a string (non empty) or an array of strings');
+ }
+}
+module.exports = FastGlob;
+
+
+/***/ }),
+
+/***/ 2708:
+/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.convertPatternGroupToTask = exports.convertPatternGroupsToTasks = exports.groupPatternsByBaseDirectory = exports.getNegativePatternsAsPositive = exports.getPositivePatterns = exports.convertPatternsToTasks = exports.generate = void 0;
+const utils = __webpack_require__(5444);
+function generate(patterns, settings) {
+ const positivePatterns = getPositivePatterns(patterns);
+ const negativePatterns = getNegativePatternsAsPositive(patterns, settings.ignore);
+ const staticPatterns = positivePatterns.filter((pattern) => utils.pattern.isStaticPattern(pattern, settings));
+ const dynamicPatterns = positivePatterns.filter((pattern) => utils.pattern.isDynamicPattern(pattern, settings));
+ const staticTasks = convertPatternsToTasks(staticPatterns, negativePatterns, /* dynamic */ false);
+ const dynamicTasks = convertPatternsToTasks(dynamicPatterns, negativePatterns, /* dynamic */ true);
+ return staticTasks.concat(dynamicTasks);
+}
+exports.generate = generate;
+function convertPatternsToTasks(positive, negative, dynamic) {
+ const positivePatternsGroup = groupPatternsByBaseDirectory(positive);
+ // When we have a global group – there is no reason to divide the patterns into independent tasks.
+ // In this case, the global task covers the rest.
+ if ('.' in positivePatternsGroup) {
+ const task = convertPatternGroupToTask('.', positive, negative, dynamic);
+ return [task];
+ }
+ return convertPatternGroupsToTasks(positivePatternsGroup, negative, dynamic);
+}
+exports.convertPatternsToTasks = convertPatternsToTasks;
+function getPositivePatterns(patterns) {
+ return utils.pattern.getPositivePatterns(patterns);
+}
+exports.getPositivePatterns = getPositivePatterns;
+function getNegativePatternsAsPositive(patterns, ignore) {
+ const negative = utils.pattern.getNegativePatterns(patterns).concat(ignore);
+ const positive = negative.map(utils.pattern.convertToPositivePattern);
+ return positive;
+}
+exports.getNegativePatternsAsPositive = getNegativePatternsAsPositive;
+function groupPatternsByBaseDirectory(patterns) {
+ const group = {};
+ return patterns.reduce((collection, pattern) => {
+ const base = utils.pattern.getBaseDirectory(pattern);
+ if (base in collection) {
+ collection[base].push(pattern);
+ }
+ else {
+ collection[base] = [pattern];
+ }
+ return collection;
+ }, group);
+}
+exports.groupPatternsByBaseDirectory = groupPatternsByBaseDirectory;
+function convertPatternGroupsToTasks(positive, negative, dynamic) {
+ return Object.keys(positive).map((base) => {
+ return convertPatternGroupToTask(base, positive[base], negative, dynamic);
+ });
+}
+exports.convertPatternGroupsToTasks = convertPatternGroupsToTasks;
+function convertPatternGroupToTask(base, positive, negative, dynamic) {
+ return {
+ dynamic,
+ positive,
+ negative,
+ base,
+ patterns: [].concat(positive, negative.map(utils.pattern.convertToNegativePattern))
+ };
+}
+exports.convertPatternGroupToTask = convertPatternGroupToTask;
+
+
+/***/ }),
+
+/***/ 5679:
+/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+const stream_1 = __webpack_require__(2083);
+const provider_1 = __webpack_require__(257);
+class ProviderAsync extends provider_1.default {
+ constructor() {
+ super(...arguments);
+ this._reader = new stream_1.default(this._settings);
+ }
+ read(task) {
+ const root = this._getRootDirectory(task);
+ const options = this._getReaderOptions(task);
+ const entries = [];
+ return new Promise((resolve, reject) => {
+ const stream = this.api(root, task, options);
+ stream.once('error', reject);
+ stream.on('data', (entry) => entries.push(options.transform(entry)));
+ stream.once('end', () => resolve(entries));
+ });
+ }
+ api(root, task, options) {
+ if (task.dynamic) {
+ return this._reader.dynamic(root, options);
+ }
+ return this._reader.static(task.patterns, options);
+ }
+}
+exports.default = ProviderAsync;
+
+
+/***/ }),
+
+/***/ 6983:
+/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+const utils = __webpack_require__(5444);
+const partial_1 = __webpack_require__(5295);
+class DeepFilter {
+ constructor(_settings, _micromatchOptions) {
+ this._settings = _settings;
+ this._micromatchOptions = _micromatchOptions;
+ }
+ getFilter(basePath, positive, negative) {
+ const matcher = this._getMatcher(positive);
+ const negativeRe = this._getNegativePatternsRe(negative);
+ return (entry) => this._filter(basePath, entry, matcher, negativeRe);
+ }
+ _getMatcher(patterns) {
+ return new partial_1.default(patterns, this._settings, this._micromatchOptions);
+ }
+ _getNegativePatternsRe(patterns) {
+ const affectDepthOfReadingPatterns = patterns.filter(utils.pattern.isAffectDepthOfReadingPattern);
+ return utils.pattern.convertPatternsToRe(affectDepthOfReadingPatterns, this._micromatchOptions);
+ }
+ _filter(basePath, entry, matcher, negativeRe) {
+ if (this._isSkippedByDeep(basePath, entry.path)) {
+ return false;
+ }
+ if (this._isSkippedSymbolicLink(entry)) {
+ return false;
+ }
+ const filepath = utils.path.removeLeadingDotSegment(entry.path);
+ if (this._isSkippedByPositivePatterns(filepath, matcher)) {
+ return false;
+ }
+ return this._isSkippedByNegativePatterns(filepath, negativeRe);
+ }
+ _isSkippedByDeep(basePath, entryPath) {
+ /**
+ * Avoid unnecessary depth calculations when it doesn't matter.
+ */
+ if (this._settings.deep === Infinity) {
+ return false;
+ }
+ return this._getEntryLevel(basePath, entryPath) >= this._settings.deep;
+ }
+ _getEntryLevel(basePath, entryPath) {
+ const entryPathDepth = entryPath.split('/').length;
+ if (basePath === '') {
+ return entryPathDepth;
+ }
+ const basePathDepth = basePath.split('/').length;
+ return entryPathDepth - basePathDepth;
+ }
+ _isSkippedSymbolicLink(entry) {
+ return !this._settings.followSymbolicLinks && entry.dirent.isSymbolicLink();
+ }
+ _isSkippedByPositivePatterns(entryPath, matcher) {
+ return !this._settings.baseNameMatch && !matcher.match(entryPath);
+ }
+ _isSkippedByNegativePatterns(entryPath, patternsRe) {
+ return !utils.pattern.matchAny(entryPath, patternsRe);
+ }
+}
+exports.default = DeepFilter;
+
+
+/***/ }),
+
+/***/ 1343:
+/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+const utils = __webpack_require__(5444);
+class EntryFilter {
+ constructor(_settings, _micromatchOptions) {
+ this._settings = _settings;
+ this._micromatchOptions = _micromatchOptions;
+ this.index = new Map();
+ }
+ getFilter(positive, negative) {
+ const positiveRe = utils.pattern.convertPatternsToRe(positive, this._micromatchOptions);
+ const negativeRe = utils.pattern.convertPatternsToRe(negative, this._micromatchOptions);
+ return (entry) => this._filter(entry, positiveRe, negativeRe);
+ }
+ _filter(entry, positiveRe, negativeRe) {
+ if (this._settings.unique && this._isDuplicateEntry(entry)) {
+ return false;
+ }
+ if (this._onlyFileFilter(entry) || this._onlyDirectoryFilter(entry)) {
+ return false;
+ }
+ if (this._isSkippedByAbsoluteNegativePatterns(entry.path, negativeRe)) {
+ return false;
+ }
+ const filepath = this._settings.baseNameMatch ? entry.name : entry.path;
+ const isMatched = this._isMatchToPatterns(filepath, positiveRe) && !this._isMatchToPatterns(entry.path, negativeRe);
+ if (this._settings.unique && isMatched) {
+ this._createIndexRecord(entry);
+ }
+ return isMatched;
+ }
+ _isDuplicateEntry(entry) {
+ return this.index.has(entry.path);
+ }
+ _createIndexRecord(entry) {
+ this.index.set(entry.path, undefined);
+ }
+ _onlyFileFilter(entry) {
+ return this._settings.onlyFiles && !entry.dirent.isFile();
+ }
+ _onlyDirectoryFilter(entry) {
+ return this._settings.onlyDirectories && !entry.dirent.isDirectory();
+ }
+ _isSkippedByAbsoluteNegativePatterns(entryPath, patternsRe) {
+ if (!this._settings.absolute) {
+ return false;
+ }
+ const fullpath = utils.path.makeAbsolute(this._settings.cwd, entryPath);
+ return utils.pattern.matchAny(fullpath, patternsRe);
+ }
+ _isMatchToPatterns(entryPath, patternsRe) {
+ const filepath = utils.path.removeLeadingDotSegment(entryPath);
+ return utils.pattern.matchAny(filepath, patternsRe);
+ }
+}
+exports.default = EntryFilter;
+
+
+/***/ }),
+
+/***/ 6654:
+/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+const utils = __webpack_require__(5444);
+class ErrorFilter {
+ constructor(_settings) {
+ this._settings = _settings;
+ }
+ getFilter() {
+ return (error) => this._isNonFatalError(error);
+ }
+ _isNonFatalError(error) {
+ return utils.errno.isEnoentCodeError(error) || this._settings.suppressErrors;
+ }
+}
+exports.default = ErrorFilter;
+
+
+/***/ }),
+
+/***/ 2576:
+/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+const utils = __webpack_require__(5444);
+class Matcher {
+ constructor(_patterns, _settings, _micromatchOptions) {
+ this._patterns = _patterns;
+ this._settings = _settings;
+ this._micromatchOptions = _micromatchOptions;
+ this._storage = [];
+ this._fillStorage();
+ }
+ _fillStorage() {
+ /**
+ * The original pattern may include `{,*,**,a/*}`, which will lead to problems with matching (unresolved level).
+ * So, before expand patterns with brace expansion into separated patterns.
+ */
+ const patterns = utils.pattern.expandPatternsWithBraceExpansion(this._patterns);
+ for (const pattern of patterns) {
+ const segments = this._getPatternSegments(pattern);
+ const sections = this._splitSegmentsIntoSections(segments);
+ this._storage.push({
+ complete: sections.length <= 1,
+ pattern,
+ segments,
+ sections
+ });
+ }
+ }
+ _getPatternSegments(pattern) {
+ const parts = utils.pattern.getPatternParts(pattern, this._micromatchOptions);
+ return parts.map((part) => {
+ const dynamic = utils.pattern.isDynamicPattern(part, this._settings);
+ if (!dynamic) {
+ return {
+ dynamic: false,
+ pattern: part
+ };
+ }
+ return {
+ dynamic: true,
+ pattern: part,
+ patternRe: utils.pattern.makeRe(part, this._micromatchOptions)
+ };
+ });
+ }
+ _splitSegmentsIntoSections(segments) {
+ return utils.array.splitWhen(segments, (segment) => segment.dynamic && utils.pattern.hasGlobStar(segment.pattern));
+ }
+}
+exports.default = Matcher;
+
+
+/***/ }),
+
+/***/ 5295:
+/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+const matcher_1 = __webpack_require__(2576);
+class PartialMatcher extends matcher_1.default {
+ match(filepath) {
+ const parts = filepath.split('/');
+ const levels = parts.length;
+ const patterns = this._storage.filter((info) => !info.complete || info.segments.length > levels);
+ for (const pattern of patterns) {
+ const section = pattern.sections[0];
+ /**
+ * In this case, the pattern has a globstar and we must read all directories unconditionally,
+ * but only if the level has reached the end of the first group.
+ *
+ * fixtures/{a,b}/**
+ * ^ true/false ^ always true
+ */
+ if (!pattern.complete && levels > section.length) {
+ return true;
+ }
+ const match = parts.every((part, index) => {
+ const segment = pattern.segments[index];
+ if (segment.dynamic && segment.patternRe.test(part)) {
+ return true;
+ }
+ if (!segment.dynamic && segment.pattern === part) {
+ return true;
+ }
+ return false;
+ });
+ if (match) {
+ return true;
+ }
+ }
+ return false;
+ }
+}
+exports.default = PartialMatcher;
+
+
+/***/ }),
+
+/***/ 257:
+/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+const path = __webpack_require__(5622);
+const deep_1 = __webpack_require__(6983);
+const entry_1 = __webpack_require__(1343);
+const error_1 = __webpack_require__(6654);
+const entry_2 = __webpack_require__(4029);
+class Provider {
+ constructor(_settings) {
+ this._settings = _settings;
+ this.errorFilter = new error_1.default(this._settings);
+ this.entryFilter = new entry_1.default(this._settings, this._getMicromatchOptions());
+ this.deepFilter = new deep_1.default(this._settings, this._getMicromatchOptions());
+ this.entryTransformer = new entry_2.default(this._settings);
+ }
+ _getRootDirectory(task) {
+ return path.resolve(this._settings.cwd, task.base);
+ }
+ _getReaderOptions(task) {
+ const basePath = task.base === '.' ? '' : task.base;
+ return {
+ basePath,
+ pathSegmentSeparator: '/',
+ concurrency: this._settings.concurrency,
+ deepFilter: this.deepFilter.getFilter(basePath, task.positive, task.negative),
+ entryFilter: this.entryFilter.getFilter(task.positive, task.negative),
+ errorFilter: this.errorFilter.getFilter(),
+ followSymbolicLinks: this._settings.followSymbolicLinks,
+ fs: this._settings.fs,
+ stats: this._settings.stats,
+ throwErrorOnBrokenSymbolicLink: this._settings.throwErrorOnBrokenSymbolicLink,
+ transform: this.entryTransformer.getTransformer()
+ };
+ }
+ _getMicromatchOptions() {
+ return {
+ dot: this._settings.dot,
+ matchBase: this._settings.baseNameMatch,
+ nobrace: !this._settings.braceExpansion,
+ nocase: !this._settings.caseSensitiveMatch,
+ noext: !this._settings.extglob,
+ noglobstar: !this._settings.globstar,
+ posix: true,
+ strictSlashes: false
+ };
+ }
+}
+exports.default = Provider;
+
+
+/***/ }),
+
+/***/ 4630:
+/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+const stream_1 = __webpack_require__(2413);
+const stream_2 = __webpack_require__(2083);
+const provider_1 = __webpack_require__(257);
+class ProviderStream extends provider_1.default {
+ constructor() {
+ super(...arguments);
+ this._reader = new stream_2.default(this._settings);
+ }
+ read(task) {
+ const root = this._getRootDirectory(task);
+ const options = this._getReaderOptions(task);
+ const source = this.api(root, task, options);
+ const destination = new stream_1.Readable({ objectMode: true, read: () => { } });
+ source
+ .once('error', (error) => destination.emit('error', error))
+ .on('data', (entry) => destination.emit('data', options.transform(entry)))
+ .once('end', () => destination.emit('end'));
+ destination
+ .once('close', () => source.destroy());
+ return destination;
+ }
+ api(root, task, options) {
+ if (task.dynamic) {
+ return this._reader.dynamic(root, options);
+ }
+ return this._reader.static(task.patterns, options);
+ }
+}
+exports.default = ProviderStream;
+
+
+/***/ }),
+
+/***/ 2405:
+/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+const sync_1 = __webpack_require__(8821);
+const provider_1 = __webpack_require__(257);
+class ProviderSync extends provider_1.default {
+ constructor() {
+ super(...arguments);
+ this._reader = new sync_1.default(this._settings);
+ }
+ read(task) {
+ const root = this._getRootDirectory(task);
+ const options = this._getReaderOptions(task);
+ const entries = this.api(root, task, options);
+ return entries.map(options.transform);
+ }
+ api(root, task, options) {
+ if (task.dynamic) {
+ return this._reader.dynamic(root, options);
+ }
+ return this._reader.static(task.patterns, options);
+ }
+}
+exports.default = ProviderSync;
+
+
+/***/ }),
+
+/***/ 4029:
+/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+const utils = __webpack_require__(5444);
+class EntryTransformer {
+ constructor(_settings) {
+ this._settings = _settings;
+ }
+ getTransformer() {
+ return (entry) => this._transform(entry);
+ }
+ _transform(entry) {
+ let filepath = entry.path;
+ if (this._settings.absolute) {
+ filepath = utils.path.makeAbsolute(this._settings.cwd, filepath);
+ filepath = utils.path.unixify(filepath);
+ }
+ if (this._settings.markDirectories && entry.dirent.isDirectory()) {
+ filepath += '/';
+ }
+ if (!this._settings.objectMode) {
+ return filepath;
+ }
+ return Object.assign(Object.assign({}, entry), { path: filepath });
+ }
+}
+exports.default = EntryTransformer;
+
+
+/***/ }),
+
+/***/ 8062:
+/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+const path = __webpack_require__(5622);
+const fsStat = __webpack_require__(109);
+const utils = __webpack_require__(5444);
+class Reader {
+ constructor(_settings) {
+ this._settings = _settings;
+ this._fsStatSettings = new fsStat.Settings({
+ followSymbolicLink: this._settings.followSymbolicLinks,
+ fs: this._settings.fs,
+ throwErrorOnBrokenSymbolicLink: this._settings.followSymbolicLinks
+ });
+ }
+ _getFullEntryPath(filepath) {
+ return path.resolve(this._settings.cwd, filepath);
+ }
+ _makeEntry(stats, pattern) {
+ const entry = {
+ name: pattern,
+ path: pattern,
+ dirent: utils.fs.createDirentFromStats(pattern, stats)
+ };
+ if (this._settings.stats) {
+ entry.stats = stats;
+ }
+ return entry;
+ }
+ _isFatalError(error) {
+ return !utils.errno.isEnoentCodeError(error) && !this._settings.suppressErrors;
+ }
+}
+exports.default = Reader;
+
+
+/***/ }),
+
+/***/ 2083:
+/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+const stream_1 = __webpack_require__(2413);
+const fsStat = __webpack_require__(109);
+const fsWalk = __webpack_require__(6026);
+const reader_1 = __webpack_require__(8062);
+class ReaderStream extends reader_1.default {
+ constructor() {
+ super(...arguments);
+ this._walkStream = fsWalk.walkStream;
+ this._stat = fsStat.stat;
+ }
+ dynamic(root, options) {
+ return this._walkStream(root, options);
+ }
+ static(patterns, options) {
+ const filepaths = patterns.map(this._getFullEntryPath, this);
+ const stream = new stream_1.PassThrough({ objectMode: true });
+ stream._write = (index, _enc, done) => {
+ return this._getEntry(filepaths[index], patterns[index], options)
+ .then((entry) => {
+ if (entry !== null && options.entryFilter(entry)) {
+ stream.push(entry);
+ }
+ if (index === filepaths.length - 1) {
+ stream.end();
+ }
+ done();
+ })
+ .catch(done);
+ };
+ for (let i = 0; i < filepaths.length; i++) {
+ stream.write(i);
+ }
+ return stream;
+ }
+ _getEntry(filepath, pattern, options) {
+ return this._getStat(filepath)
+ .then((stats) => this._makeEntry(stats, pattern))
+ .catch((error) => {
+ if (options.errorFilter(error)) {
+ return null;
+ }
+ throw error;
+ });
+ }
+ _getStat(filepath) {
+ return new Promise((resolve, reject) => {
+ this._stat(filepath, this._fsStatSettings, (error, stats) => {
+ return error === null ? resolve(stats) : reject(error);
+ });
+ });
+ }
+}
+exports.default = ReaderStream;
+
+
+/***/ }),
+
+/***/ 8821:
+/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+const fsStat = __webpack_require__(109);
+const fsWalk = __webpack_require__(6026);
+const reader_1 = __webpack_require__(8062);
+class ReaderSync extends reader_1.default {
+ constructor() {
+ super(...arguments);
+ this._walkSync = fsWalk.walkSync;
+ this._statSync = fsStat.statSync;
+ }
+ dynamic(root, options) {
+ return this._walkSync(root, options);
+ }
+ static(patterns, options) {
+ const entries = [];
+ for (const pattern of patterns) {
+ const filepath = this._getFullEntryPath(pattern);
+ const entry = this._getEntry(filepath, pattern, options);
+ if (entry === null || !options.entryFilter(entry)) {
+ continue;
+ }
+ entries.push(entry);
+ }
+ return entries;
+ }
+ _getEntry(filepath, pattern, options) {
+ try {
+ const stats = this._getStat(filepath);
+ return this._makeEntry(stats, pattern);
+ }
+ catch (error) {
+ if (options.errorFilter(error)) {
+ return null;
+ }
+ throw error;
+ }
+ }
+ _getStat(filepath) {
+ return this._statSync(filepath, this._fsStatSettings);
+ }
+}
+exports.default = ReaderSync;
+
+
+/***/ }),
+
+/***/ 952:
+/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.DEFAULT_FILE_SYSTEM_ADAPTER = void 0;
+const fs = __webpack_require__(5747);
+const os = __webpack_require__(2087);
+const CPU_COUNT = os.cpus().length;
+exports.DEFAULT_FILE_SYSTEM_ADAPTER = {
+ lstat: fs.lstat,
+ lstatSync: fs.lstatSync,
+ stat: fs.stat,
+ statSync: fs.statSync,
+ readdir: fs.readdir,
+ readdirSync: fs.readdirSync
+};
+class Settings {
+ constructor(_options = {}) {
+ this._options = _options;
+ this.absolute = this._getValue(this._options.absolute, false);
+ this.baseNameMatch = this._getValue(this._options.baseNameMatch, false);
+ this.braceExpansion = this._getValue(this._options.braceExpansion, true);
+ this.caseSensitiveMatch = this._getValue(this._options.caseSensitiveMatch, true);
+ this.concurrency = this._getValue(this._options.concurrency, CPU_COUNT);
+ this.cwd = this._getValue(this._options.cwd, process.cwd());
+ this.deep = this._getValue(this._options.deep, Infinity);
+ this.dot = this._getValue(this._options.dot, false);
+ this.extglob = this._getValue(this._options.extglob, true);
+ this.followSymbolicLinks = this._getValue(this._options.followSymbolicLinks, true);
+ this.fs = this._getFileSystemMethods(this._options.fs);
+ this.globstar = this._getValue(this._options.globstar, true);
+ this.ignore = this._getValue(this._options.ignore, []);
+ this.markDirectories = this._getValue(this._options.markDirectories, false);
+ this.objectMode = this._getValue(this._options.objectMode, false);
+ this.onlyDirectories = this._getValue(this._options.onlyDirectories, false);
+ this.onlyFiles = this._getValue(this._options.onlyFiles, true);
+ this.stats = this._getValue(this._options.stats, false);
+ this.suppressErrors = this._getValue(this._options.suppressErrors, false);
+ this.throwErrorOnBrokenSymbolicLink = this._getValue(this._options.throwErrorOnBrokenSymbolicLink, false);
+ this.unique = this._getValue(this._options.unique, true);
+ if (this.onlyDirectories) {
+ this.onlyFiles = false;
+ }
+ if (this.stats) {
+ this.objectMode = true;
+ }
+ }
+ _getValue(option, value) {
+ return option === undefined ? value : option;
+ }
+ _getFileSystemMethods(methods = {}) {
+ return Object.assign(Object.assign({}, exports.DEFAULT_FILE_SYSTEM_ADAPTER), methods);
+ }
+}
+exports.default = Settings;
+
+
+/***/ }),
+
+/***/ 5325:
+/***/ ((__unused_webpack_module, exports) => {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.splitWhen = exports.flatten = void 0;
+function flatten(items) {
+ return items.reduce((collection, item) => [].concat(collection, item), []);
+}
+exports.flatten = flatten;
+function splitWhen(items, predicate) {
+ const result = [[]];
+ let groupIndex = 0;
+ for (const item of items) {
+ if (predicate(item)) {
+ groupIndex++;
+ result[groupIndex] = [];
+ }
+ else {
+ result[groupIndex].push(item);
+ }
+ }
+ return result;
+}
+exports.splitWhen = splitWhen;
+
+
+/***/ }),
+
+/***/ 1230:
+/***/ ((__unused_webpack_module, exports) => {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.isEnoentCodeError = void 0;
+function isEnoentCodeError(error) {
+ return error.code === 'ENOENT';
+}
+exports.isEnoentCodeError = isEnoentCodeError;
+
+
+/***/ }),
+
+/***/ 7543:
+/***/ ((__unused_webpack_module, exports) => {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.createDirentFromStats = void 0;
+class DirentFromStats {
+ constructor(name, stats) {
+ this.name = name;
+ this.isBlockDevice = stats.isBlockDevice.bind(stats);
+ this.isCharacterDevice = stats.isCharacterDevice.bind(stats);
+ this.isDirectory = stats.isDirectory.bind(stats);
+ this.isFIFO = stats.isFIFO.bind(stats);
+ this.isFile = stats.isFile.bind(stats);
+ this.isSocket = stats.isSocket.bind(stats);
+ this.isSymbolicLink = stats.isSymbolicLink.bind(stats);
+ }
+}
+function createDirentFromStats(name, stats) {
+ return new DirentFromStats(name, stats);
+}
+exports.createDirentFromStats = createDirentFromStats;
+
+
+/***/ }),
+
+/***/ 5444:
+/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.string = exports.stream = exports.pattern = exports.path = exports.fs = exports.errno = exports.array = void 0;
+const array = __webpack_require__(5325);
+exports.array = array;
+const errno = __webpack_require__(1230);
+exports.errno = errno;
+const fs = __webpack_require__(7543);
+exports.fs = fs;
+const path = __webpack_require__(3873);
+exports.path = path;
+const pattern = __webpack_require__(1221);
+exports.pattern = pattern;
+const stream = __webpack_require__(8382);
+exports.stream = stream;
+const string = __webpack_require__(2203);
+exports.string = string;
+
+
+/***/ }),
+
+/***/ 3873:
+/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.removeLeadingDotSegment = exports.escape = exports.makeAbsolute = exports.unixify = void 0;
+const path = __webpack_require__(5622);
+const LEADING_DOT_SEGMENT_CHARACTERS_COUNT = 2; // ./ or .\\
+const UNESCAPED_GLOB_SYMBOLS_RE = /(\\?)([()*?[\]{|}]|^!|[!+@](?=\())/g;
+/**
+ * Designed to work only with simple paths: `dir\\file`.
+ */
+function unixify(filepath) {
+ return filepath.replace(/\\/g, '/');
+}
+exports.unixify = unixify;
+function makeAbsolute(cwd, filepath) {
+ return path.resolve(cwd, filepath);
+}
+exports.makeAbsolute = makeAbsolute;
+function escape(pattern) {
+ return pattern.replace(UNESCAPED_GLOB_SYMBOLS_RE, '\\$2');
+}
+exports.escape = escape;
+function removeLeadingDotSegment(entry) {
+ // We do not use `startsWith` because this is 10x slower than current implementation for some cases.
+ // eslint-disable-next-line @typescript-eslint/prefer-string-starts-ends-with
+ if (entry.charAt(0) === '.') {
+ const secondCharactery = entry.charAt(1);
+ if (secondCharactery === '/' || secondCharactery === '\\') {
+ return entry.slice(LEADING_DOT_SEGMENT_CHARACTERS_COUNT);
+ }
+ }
+ return entry;
+}
+exports.removeLeadingDotSegment = removeLeadingDotSegment;
+
+
+/***/ }),
+
+/***/ 1221:
+/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.matchAny = exports.convertPatternsToRe = exports.makeRe = exports.getPatternParts = exports.expandBraceExpansion = exports.expandPatternsWithBraceExpansion = exports.isAffectDepthOfReadingPattern = exports.endsWithSlashGlobStar = exports.hasGlobStar = exports.getBaseDirectory = exports.getPositivePatterns = exports.getNegativePatterns = exports.isPositivePattern = exports.isNegativePattern = exports.convertToNegativePattern = exports.convertToPositivePattern = exports.isDynamicPattern = exports.isStaticPattern = void 0;
+const path = __webpack_require__(5622);
+const globParent = __webpack_require__(4655);
+const micromatch = __webpack_require__(3913);
+const picomatch = __webpack_require__(8569);
+const GLOBSTAR = '**';
+const ESCAPE_SYMBOL = '\\';
+const COMMON_GLOB_SYMBOLS_RE = /[*?]|^!/;
+const REGEX_CHARACTER_CLASS_SYMBOLS_RE = /\[.*]/;
+const REGEX_GROUP_SYMBOLS_RE = /(?:^|[^!*+?@])\(.*\|.*\)/;
+const GLOB_EXTENSION_SYMBOLS_RE = /[!*+?@]\(.*\)/;
+const BRACE_EXPANSIONS_SYMBOLS_RE = /{.*(?:,|\.\.).*}/;
+function isStaticPattern(pattern, options = {}) {
+ return !isDynamicPattern(pattern, options);
+}
+exports.isStaticPattern = isStaticPattern;
+function isDynamicPattern(pattern, options = {}) {
+ /**
+ * A special case with an empty string is necessary for matching patterns that start with a forward slash.
+ * An empty string cannot be a dynamic pattern.
+ * For example, the pattern `/lib/*` will be spread into parts: '', 'lib', '*'.
+ */
+ if (pattern === '') {
+ return false;
+ }
+ /**
+ * When the `caseSensitiveMatch` option is disabled, all patterns must be marked as dynamic, because we cannot check
+ * filepath directly (without read directory).
+ */
+ if (options.caseSensitiveMatch === false || pattern.includes(ESCAPE_SYMBOL)) {
+ return true;
+ }
+ if (COMMON_GLOB_SYMBOLS_RE.test(pattern) || REGEX_CHARACTER_CLASS_SYMBOLS_RE.test(pattern) || REGEX_GROUP_SYMBOLS_RE.test(pattern)) {
+ return true;
+ }
+ if (options.extglob !== false && GLOB_EXTENSION_SYMBOLS_RE.test(pattern)) {
+ return true;
+ }
+ if (options.braceExpansion !== false && BRACE_EXPANSIONS_SYMBOLS_RE.test(pattern)) {
+ return true;
+ }
+ return false;
+}
+exports.isDynamicPattern = isDynamicPattern;
+function convertToPositivePattern(pattern) {
+ return isNegativePattern(pattern) ? pattern.slice(1) : pattern;
+}
+exports.convertToPositivePattern = convertToPositivePattern;
+function convertToNegativePattern(pattern) {
+ return '!' + pattern;
+}
+exports.convertToNegativePattern = convertToNegativePattern;
+function isNegativePattern(pattern) {
+ return pattern.startsWith('!') && pattern[1] !== '(';
+}
+exports.isNegativePattern = isNegativePattern;
+function isPositivePattern(pattern) {
+ return !isNegativePattern(pattern);
+}
+exports.isPositivePattern = isPositivePattern;
+function getNegativePatterns(patterns) {
+ return patterns.filter(isNegativePattern);
+}
+exports.getNegativePatterns = getNegativePatterns;
+function getPositivePatterns(patterns) {
+ return patterns.filter(isPositivePattern);
+}
+exports.getPositivePatterns = getPositivePatterns;
+function getBaseDirectory(pattern) {
+ return globParent(pattern, { flipBackslashes: false });
+}
+exports.getBaseDirectory = getBaseDirectory;
+function hasGlobStar(pattern) {
+ return pattern.includes(GLOBSTAR);
+}
+exports.hasGlobStar = hasGlobStar;
+function endsWithSlashGlobStar(pattern) {
+ return pattern.endsWith('/' + GLOBSTAR);
+}
+exports.endsWithSlashGlobStar = endsWithSlashGlobStar;
+function isAffectDepthOfReadingPattern(pattern) {
+ const basename = path.basename(pattern);
+ return endsWithSlashGlobStar(pattern) || isStaticPattern(basename);
+}
+exports.isAffectDepthOfReadingPattern = isAffectDepthOfReadingPattern;
+function expandPatternsWithBraceExpansion(patterns) {
+ return patterns.reduce((collection, pattern) => {
+ return collection.concat(expandBraceExpansion(pattern));
+ }, []);
+}
+exports.expandPatternsWithBraceExpansion = expandPatternsWithBraceExpansion;
+function expandBraceExpansion(pattern) {
+ return micromatch.braces(pattern, {
+ expand: true,
+ nodupes: true
+ });
+}
+exports.expandBraceExpansion = expandBraceExpansion;
+function getPatternParts(pattern, options) {
+ let { parts } = picomatch.scan(pattern, Object.assign(Object.assign({}, options), { parts: true }));
+ /**
+ * The scan method returns an empty array in some cases.
+ * See micromatch/picomatch#58 for more details.
+ */
+ if (parts.length === 0) {
+ parts = [pattern];
+ }
+ /**
+ * The scan method does not return an empty part for the pattern with a forward slash.
+ * This is another part of micromatch/picomatch#58.
+ */
+ if (parts[0].startsWith('/')) {
+ parts[0] = parts[0].slice(1);
+ parts.unshift('');
+ }
+ return parts;
+}
+exports.getPatternParts = getPatternParts;
+function makeRe(pattern, options) {
+ return micromatch.makeRe(pattern, options);
+}
+exports.makeRe = makeRe;
+function convertPatternsToRe(patterns, options) {
+ return patterns.map((pattern) => makeRe(pattern, options));
+}
+exports.convertPatternsToRe = convertPatternsToRe;
+function matchAny(entry, patternsRe) {
+ return patternsRe.some((patternRe) => patternRe.test(entry));
+}
+exports.matchAny = matchAny;
+
+
+/***/ }),
+
+/***/ 8382:
+/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.merge = void 0;
+const merge2 = __webpack_require__(2578);
+function merge(streams) {
+ const mergedStream = merge2(streams);
+ streams.forEach((stream) => {
+ stream.once('error', (error) => mergedStream.emit('error', error));
+ });
+ mergedStream.once('close', () => propagateCloseEventToSources(streams));
+ mergedStream.once('end', () => propagateCloseEventToSources(streams));
+ return mergedStream;
+}
+exports.merge = merge;
+function propagateCloseEventToSources(streams) {
+ streams.forEach((stream) => stream.emit('close'));
+}
+
+
+/***/ }),
+
+/***/ 2203:
+/***/ ((__unused_webpack_module, exports) => {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.isEmpty = exports.isString = void 0;
+function isString(input) {
+ return typeof input === 'string';
+}
+exports.isString = isString;
+function isEmpty(input) {
+ return input === '';
+}
+exports.isEmpty = isEmpty;
+
+
+/***/ }),
+
+/***/ 7340:
+/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
+
+"use strict";
+
+
+var reusify = __webpack_require__(2113)
+
+function fastqueue (context, worker, concurrency) {
+ if (typeof context === 'function') {
+ concurrency = worker
+ worker = context
+ context = null
+ }
+
+ var cache = reusify(Task)
+ var queueHead = null
+ var queueTail = null
+ var _running = 0
+
+ var self = {
+ push: push,
+ drain: noop,
+ saturated: noop,
+ pause: pause,
+ paused: false,
+ concurrency: concurrency,
+ running: running,
+ resume: resume,
+ idle: idle,
+ length: length,
+ getQueue: getQueue,
+ unshift: unshift,
+ empty: noop,
+ kill: kill,
+ killAndDrain: killAndDrain
+ }
+
+ return self
+
+ function running () {
+ return _running
+ }
+
+ function pause () {
+ self.paused = true
+ }
+
+ function length () {
+ var current = queueHead
+ var counter = 0
+
+ while (current) {
+ current = current.next
+ counter++
+ }
+
+ return counter
+ }
+
+ function getQueue () {
+ var current = queueHead
+ var tasks = []
+
+ while (current) {
+ tasks.push(current.value)
+ current = current.next
+ }
+
+ return tasks
+ }
+
+ function resume () {
+ if (!self.paused) return
+ self.paused = false
+ for (var i = 0; i < self.concurrency; i++) {
+ _running++
+ release()
+ }
+ }
+
+ function idle () {
+ return _running === 0 && self.length() === 0
+ }
+
+ function push (value, done) {
+ var current = cache.get()
+
+ current.context = context
+ current.release = release
+ current.value = value
+ current.callback = done || noop
+
+ if (_running === self.concurrency || self.paused) {
+ if (queueTail) {
+ queueTail.next = current
+ queueTail = current
+ } else {
+ queueHead = current
+ queueTail = current
+ self.saturated()
+ }
+ } else {
+ _running++
+ worker.call(context, current.value, current.worked)
+ }
+ }
+
+ function unshift (value, done) {
+ var current = cache.get()
+
+ current.context = context
+ current.release = release
+ current.value = value
+ current.callback = done || noop
+
+ if (_running === self.concurrency || self.paused) {
+ if (queueHead) {
+ current.next = queueHead
+ queueHead = current
+ } else {
+ queueHead = current
+ queueTail = current
+ self.saturated()
+ }
+ } else {
+ _running++
+ worker.call(context, current.value, current.worked)
+ }
+ }
+
+ function release (holder) {
+ if (holder) {
+ cache.release(holder)
+ }
+ var next = queueHead
+ if (next) {
+ if (!self.paused) {
+ if (queueTail === queueHead) {
+ queueTail = null
+ }
+ queueHead = next.next
+ next.next = null
+ worker.call(context, next.value, next.worked)
+ if (queueTail === null) {
+ self.empty()
+ }
+ } else {
+ _running--
+ }
+ } else if (--_running === 0) {
+ self.drain()
+ }
+ }
+
+ function kill () {
+ queueHead = null
+ queueTail = null
+ self.drain = noop
+ }
+
+ function killAndDrain () {
+ queueHead = null
+ queueTail = null
+ self.drain()
+ self.drain = noop
+ }
+}
+
+function noop () {}
+
+function Task () {
+ this.value = null
+ this.callback = noop
+ this.next = null
+ this.release = noop
+ this.context = null
+
+ var self = this
+
+ this.worked = function worked (err, result) {
+ var callback = self.callback
+ self.value = null
+ self.callback = noop
+ callback.call(self.context, err, result)
+ self.release(self)
+ }
+}
+
+module.exports = fastqueue
+
+
+/***/ }),
+
+/***/ 4655:
+/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
+
+"use strict";
+
+
+var isGlob = __webpack_require__(4466);
+var pathPosixDirname = __webpack_require__(5622).posix.dirname;
+var isWin32 = __webpack_require__(2087).platform() === 'win32';
+
+var slash = '/';
+var backslash = /\\/g;
+var enclosure = /[\{\[].*[\/]*.*[\}\]]$/;
+var globby = /(^|[^\\])([\{\[]|\([^\)]+$)/;
+var escaped = /\\([\!\*\?\|\[\]\(\)\{\}])/g;
+
+/**
+ * @param {string} str
+ * @param {Object} opts
+ * @param {boolean} [opts.flipBackslashes=true]
+ */
+module.exports = function globParent(str, opts) {
+ var options = Object.assign({ flipBackslashes: true }, opts);
+
+ // flip windows path separators
+ if (options.flipBackslashes && isWin32 && str.indexOf(slash) < 0) {
+ str = str.replace(backslash, slash);
+ }
+
+ // special case for strings ending in enclosure containing path separator
+ if (enclosure.test(str)) {
+ str += slash;
+ }
+
+ // preserves full path in case of trailing path separator
+ str += 'a';
+
+ // remove path parts that are globby
+ do {
+ str = pathPosixDirname(str);
+ } while (isGlob(str) || globby.test(str));
+
+ // remove escape chars and return result
+ return str.replace(escaped, '$1');
+};
+
+
+/***/ }),
+
+/***/ 6435:
+/***/ ((module) => {
+
+/*!
+ * is-extglob
+ *
+ * Copyright (c) 2014-2016, Jon Schlinkert.
+ * Licensed under the MIT License.
+ */
+
+module.exports = function isExtglob(str) {
+ if (typeof str !== 'string' || str === '') {
+ return false;
+ }
+
+ var match;
+ while ((match = /(\\).|([@?!+*]\(.*\))/g.exec(str))) {
+ if (match[2]) return true;
+ str = str.slice(match.index + match[0].length);
+ }
+
+ return false;
+};
+
+
+/***/ }),
+
+/***/ 4466:
+/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
+
+/*!
+ * is-glob
+ *
+ * Copyright (c) 2014-2017, Jon Schlinkert.
+ * Released under the MIT License.
+ */
+
+var isExtglob = __webpack_require__(6435);
+var chars = { '{': '}', '(': ')', '[': ']'};
+var strictRegex = /\\(.)|(^!|\*|[\].+)]\?|\[[^\\\]]+\]|\{[^\\}]+\}|\(\?[:!=][^\\)]+\)|\([^|]+\|[^\\)]+\))/;
+var relaxedRegex = /\\(.)|(^!|[*?{}()[\]]|\(\?)/;
+
+module.exports = function isGlob(str, options) {
+ if (typeof str !== 'string' || str === '') {
+ return false;
+ }
+
+ if (isExtglob(str)) {
+ return true;
+ }
+
+ var regex = strictRegex;
+ var match;
+
+ // optionally relax regex
+ if (options && options.strict === false) {
+ regex = relaxedRegex;
+ }
+
+ while ((match = regex.exec(str))) {
+ if (match[2]) return true;
+ var idx = match.index + match[0].length;
+
+ // if an open bracket/brace/paren is escaped,
+ // set the index to the next closing character
+ var open = match[1];
+ var close = open ? chars[open] : null;
+ if (open && close) {
+ var n = str.indexOf(close, idx);
+ if (n !== -1) {
+ idx = n + 1;
+ }
+ }
+
+ str = str.slice(idx);
+ }
+ return false;
+};
+
+
+/***/ }),
+
+/***/ 2578:
+/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
+
+"use strict";
+
+/*
+ * merge2
+ * https://github.com/teambition/merge2
+ *
+ * Copyright (c) 2014-2020 Teambition
+ * Licensed under the MIT license.
+ */
+const Stream = __webpack_require__(2413)
+const PassThrough = Stream.PassThrough
+const slice = Array.prototype.slice
+
+module.exports = merge2
+
+function merge2 () {
+ const streamsQueue = []
+ const args = slice.call(arguments)
+ let merging = false
+ let options = args[args.length - 1]
+
+ if (options && !Array.isArray(options) && options.pipe == null) {
+ args.pop()
+ } else {
+ options = {}
+ }
+
+ const doEnd = options.end !== false
+ const doPipeError = options.pipeError === true
+ if (options.objectMode == null) {
+ options.objectMode = true
+ }
+ if (options.highWaterMark == null) {
+ options.highWaterMark = 64 * 1024
+ }
+ const mergedStream = PassThrough(options)
+
+ function addStream () {
+ for (let i = 0, len = arguments.length; i < len; i++) {
+ streamsQueue.push(pauseStreams(arguments[i], options))
+ }
+ mergeStream()
+ return this
+ }
+
+ function mergeStream () {
+ if (merging) {
+ return
+ }
+ merging = true
+
+ let streams = streamsQueue.shift()
+ if (!streams) {
+ process.nextTick(endStream)
+ return
+ }
+ if (!Array.isArray(streams)) {
+ streams = [streams]
+ }
+
+ let pipesCount = streams.length + 1
+
+ function next () {
+ if (--pipesCount > 0) {
+ return
+ }
+ merging = false
+ mergeStream()
+ }
+
+ function pipe (stream) {
+ function onend () {
+ stream.removeListener('merge2UnpipeEnd', onend)
+ stream.removeListener('end', onend)
+ if (doPipeError) {
+ stream.removeListener('error', onerror)
+ }
+ next()
+ }
+ function onerror (err) {
+ mergedStream.emit('error', err)
+ }
+ // skip ended stream
+ if (stream._readableState.endEmitted) {
+ return next()
+ }
+
+ stream.on('merge2UnpipeEnd', onend)
+ stream.on('end', onend)
+
+ if (doPipeError) {
+ stream.on('error', onerror)
+ }
+
+ stream.pipe(mergedStream, { end: false })
+ // compatible for old stream
+ stream.resume()
+ }
+
+ for (let i = 0; i < streams.length; i++) {
+ pipe(streams[i])
+ }
+
+ next()
+ }
+
+ function endStream () {
+ merging = false
+ // emit 'queueDrain' when all streams merged.
+ mergedStream.emit('queueDrain')
+ if (doEnd) {
+ mergedStream.end()
+ }
+ }
+
+ mergedStream.setMaxListeners(0)
+ mergedStream.add = addStream
+ mergedStream.on('unpipe', function (stream) {
+ stream.emit('merge2UnpipeEnd')
+ })
+
+ if (args.length) {
+ addStream.apply(null, args)
+ }
+ return mergedStream
+}
+
+// check and pause streams for pipe.
+function pauseStreams (streams, options) {
+ if (!Array.isArray(streams)) {
+ // Backwards-compat with old-style streams
+ if (!streams._readableState && streams.pipe) {
+ streams = streams.pipe(PassThrough(options))
+ }
+ if (!streams._readableState || !streams.pause || !streams.pipe) {
+ throw new Error('Only readable stream can be merged.')
+ }
+ streams.pause()
+ } else {
+ for (let i = 0, len = streams.length; i < len; i++) {
+ streams[i] = pauseStreams(streams[i], options)
+ }
+ }
+ return streams
+}
+
+
+/***/ }),
+
+/***/ 467:
+/***/ ((module, exports, __webpack_require__) => {
+
+"use strict";
+
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+
+function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }
+
+var Stream = _interopDefault(__webpack_require__(2413));
+var http = _interopDefault(__webpack_require__(8605));
+var Url = _interopDefault(__webpack_require__(8835));
+var https = _interopDefault(__webpack_require__(7211));
+var zlib = _interopDefault(__webpack_require__(8761));
+
+// Based on https://github.com/tmpvar/jsdom/blob/aa85b2abf07766ff7bf5c1f6daafb3726f2f2db5/lib/jsdom/living/blob.js
+
+// fix for "Readable" isn't a named export issue
+const Readable = Stream.Readable;
+
+const BUFFER = Symbol('buffer');
+const TYPE = Symbol('type');
+
+class Blob {
+ constructor() {
+ this[TYPE] = '';
+
+ const blobParts = arguments[0];
+ const options = arguments[1];
+
+ const buffers = [];
+ let size = 0;
+
+ if (blobParts) {
+ const a = blobParts;
+ const length = Number(a.length);
+ for (let i = 0; i < length; i++) {
+ const element = a[i];
+ let buffer;
+ if (element instanceof Buffer) {
+ buffer = element;
+ } else if (ArrayBuffer.isView(element)) {
+ buffer = Buffer.from(element.buffer, element.byteOffset, element.byteLength);
+ } else if (element instanceof ArrayBuffer) {
+ buffer = Buffer.from(element);
+ } else if (element instanceof Blob) {
+ buffer = element[BUFFER];
+ } else {
+ buffer = Buffer.from(typeof element === 'string' ? element : String(element));
+ }
+ size += buffer.length;
+ buffers.push(buffer);
+ }
+ }
+
+ this[BUFFER] = Buffer.concat(buffers);
+
+ let type = options && options.type !== undefined && String(options.type).toLowerCase();
+ if (type && !/[^\u0020-\u007E]/.test(type)) {
+ this[TYPE] = type;
+ }
+ }
+ get size() {
+ return this[BUFFER].length;
+ }
+ get type() {
+ return this[TYPE];
+ }
+ text() {
+ return Promise.resolve(this[BUFFER].toString());
+ }
+ arrayBuffer() {
+ const buf = this[BUFFER];
+ const ab = buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength);
+ return Promise.resolve(ab);
+ }
+ stream() {
+ const readable = new Readable();
+ readable._read = function () {};
+ readable.push(this[BUFFER]);
+ readable.push(null);
+ return readable;
+ }
+ toString() {
+ return '[object Blob]';
+ }
+ slice() {
+ const size = this.size;
+
+ const start = arguments[0];
+ const end = arguments[1];
+ let relativeStart, relativeEnd;
+ if (start === undefined) {
+ relativeStart = 0;
+ } else if (start < 0) {
+ relativeStart = Math.max(size + start, 0);
+ } else {
+ relativeStart = Math.min(start, size);
+ }
+ if (end === undefined) {
+ relativeEnd = size;
+ } else if (end < 0) {
+ relativeEnd = Math.max(size + end, 0);
+ } else {
+ relativeEnd = Math.min(end, size);
+ }
+ const span = Math.max(relativeEnd - relativeStart, 0);
+
+ const buffer = this[BUFFER];
+ const slicedBuffer = buffer.slice(relativeStart, relativeStart + span);
+ const blob = new Blob([], { type: arguments[2] });
+ blob[BUFFER] = slicedBuffer;
+ return blob;
+ }
+}
+
+Object.defineProperties(Blob.prototype, {
+ size: { enumerable: true },
+ type: { enumerable: true },
+ slice: { enumerable: true }
+});
+
+Object.defineProperty(Blob.prototype, Symbol.toStringTag, {
+ value: 'Blob',
+ writable: false,
+ enumerable: false,
+ configurable: true
+});
+
+/**
+ * fetch-error.js
+ *
+ * FetchError interface for operational errors
+ */
+
+/**
+ * Create FetchError instance
+ *
+ * @param String message Error message for human
+ * @param String type Error type for machine
+ * @param String systemError For Node.js system error
+ * @return FetchError
+ */
+function FetchError(message, type, systemError) {
+ Error.call(this, message);
+
+ this.message = message;
+ this.type = type;
+
+ // when err.type is `system`, err.code contains system error code
+ if (systemError) {
+ this.code = this.errno = systemError.code;
+ }
+
+ // hide custom error implementation details from end-users
+ Error.captureStackTrace(this, this.constructor);
+}
+
+FetchError.prototype = Object.create(Error.prototype);
+FetchError.prototype.constructor = FetchError;
+FetchError.prototype.name = 'FetchError';
+
+let convert;
+try {
+ convert = __webpack_require__(2877).convert;
+} catch (e) {}
+
+const INTERNALS = Symbol('Body internals');
+
+// fix an issue where "PassThrough" isn't a named export for node <10
+const PassThrough = Stream.PassThrough;
+
+/**
+ * Body mixin
+ *
+ * Ref: https://fetch.spec.whatwg.org/#body
+ *
+ * @param Stream body Readable stream
+ * @param Object opts Response options
+ * @return Void
+ */
+function Body(body) {
+ var _this = this;
+
+ var _ref = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {},
+ _ref$size = _ref.size;
+
+ let size = _ref$size === undefined ? 0 : _ref$size;
+ var _ref$timeout = _ref.timeout;
+ let timeout = _ref$timeout === undefined ? 0 : _ref$timeout;
+
+ if (body == null) {
+ // body is undefined or null
+ body = null;
+ } else if (isURLSearchParams(body)) {
+ // body is a URLSearchParams
+ body = Buffer.from(body.toString());
+ } else if (isBlob(body)) ; else if (Buffer.isBuffer(body)) ; else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') {
+ // body is ArrayBuffer
+ body = Buffer.from(body);
+ } else if (ArrayBuffer.isView(body)) {
+ // body is ArrayBufferView
+ body = Buffer.from(body.buffer, body.byteOffset, body.byteLength);
+ } else if (body instanceof Stream) ; else {
+ // none of the above
+ // coerce to string then buffer
+ body = Buffer.from(String(body));
+ }
+ this[INTERNALS] = {
+ body,
+ disturbed: false,
+ error: null
+ };
+ this.size = size;
+ this.timeout = timeout;
+
+ if (body instanceof Stream) {
+ body.on('error', function (err) {
+ const error = err.name === 'AbortError' ? err : new FetchError(`Invalid response body while trying to fetch ${_this.url}: ${err.message}`, 'system', err);
+ _this[INTERNALS].error = error;
+ });
+ }
+}
+
+Body.prototype = {
+ get body() {
+ return this[INTERNALS].body;
+ },
+
+ get bodyUsed() {
+ return this[INTERNALS].disturbed;
+ },
+
+ /**
+ * Decode response as ArrayBuffer
+ *
+ * @return Promise
+ */
+ arrayBuffer() {
+ return consumeBody.call(this).then(function (buf) {
+ return buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength);
+ });
+ },
+
+ /**
+ * Return raw response as Blob
+ *
+ * @return Promise
+ */
+ blob() {
+ let ct = this.headers && this.headers.get('content-type') || '';
+ return consumeBody.call(this).then(function (buf) {
+ return Object.assign(
+ // Prevent copying
+ new Blob([], {
+ type: ct.toLowerCase()
+ }), {
+ [BUFFER]: buf
+ });
+ });
+ },
+
+ /**
+ * Decode response as json
+ *
+ * @return Promise
+ */
+ json() {
+ var _this2 = this;
+
+ return consumeBody.call(this).then(function (buffer) {
+ try {
+ return JSON.parse(buffer.toString());
+ } catch (err) {
+ return Body.Promise.reject(new FetchError(`invalid json response body at ${_this2.url} reason: ${err.message}`, 'invalid-json'));
+ }
+ });
+ },
+
+ /**
+ * Decode response as text
+ *
+ * @return Promise
+ */
+ text() {
+ return consumeBody.call(this).then(function (buffer) {
+ return buffer.toString();
+ });
+ },
+
+ /**
+ * Decode response as buffer (non-spec api)
+ *
+ * @return Promise
+ */
+ buffer() {
+ return consumeBody.call(this);
+ },
+
+ /**
+ * Decode response as text, while automatically detecting the encoding and
+ * trying to decode to UTF-8 (non-spec api)
+ *
+ * @return Promise
+ */
+ textConverted() {
+ var _this3 = this;
+
+ return consumeBody.call(this).then(function (buffer) {
+ return convertBody(buffer, _this3.headers);
+ });
+ }
+};
+
+// In browsers, all properties are enumerable.
+Object.defineProperties(Body.prototype, {
+ body: { enumerable: true },
+ bodyUsed: { enumerable: true },
+ arrayBuffer: { enumerable: true },
+ blob: { enumerable: true },
+ json: { enumerable: true },
+ text: { enumerable: true }
+});
+
+Body.mixIn = function (proto) {
+ for (const name of Object.getOwnPropertyNames(Body.prototype)) {
+ // istanbul ignore else: future proof
+ if (!(name in proto)) {
+ const desc = Object.getOwnPropertyDescriptor(Body.prototype, name);
+ Object.defineProperty(proto, name, desc);
+ }
+ }
+};
+
+/**
+ * Consume and convert an entire Body to a Buffer.
+ *
+ * Ref: https://fetch.spec.whatwg.org/#concept-body-consume-body
+ *
+ * @return Promise
+ */
+function consumeBody() {
+ var _this4 = this;
+
+ if (this[INTERNALS].disturbed) {
+ return Body.Promise.reject(new TypeError(`body used already for: ${this.url}`));
+ }
+
+ this[INTERNALS].disturbed = true;
+
+ if (this[INTERNALS].error) {
+ return Body.Promise.reject(this[INTERNALS].error);
+ }
+
+ let body = this.body;
+
+ // body is null
+ if (body === null) {
+ return Body.Promise.resolve(Buffer.alloc(0));
+ }
+
+ // body is blob
+ if (isBlob(body)) {
+ body = body.stream();
+ }
+
+ // body is buffer
+ if (Buffer.isBuffer(body)) {
+ return Body.Promise.resolve(body);
+ }
+
+ // istanbul ignore if: should never happen
+ if (!(body instanceof Stream)) {
+ return Body.Promise.resolve(Buffer.alloc(0));
+ }
+
+ // body is stream
+ // get ready to actually consume the body
+ let accum = [];
+ let accumBytes = 0;
+ let abort = false;
+
+ return new Body.Promise(function (resolve, reject) {
+ let resTimeout;
+
+ // allow timeout on slow response body
+ if (_this4.timeout) {
+ resTimeout = setTimeout(function () {
+ abort = true;
+ reject(new FetchError(`Response timeout while trying to fetch ${_this4.url} (over ${_this4.timeout}ms)`, 'body-timeout'));
+ }, _this4.timeout);
+ }
+
+ // handle stream errors
+ body.on('error', function (err) {
+ if (err.name === 'AbortError') {
+ // if the request was aborted, reject with this Error
+ abort = true;
+ reject(err);
+ } else {
+ // other errors, such as incorrect content-encoding
+ reject(new FetchError(`Invalid response body while trying to fetch ${_this4.url}: ${err.message}`, 'system', err));
+ }
+ });
+
+ body.on('data', function (chunk) {
+ if (abort || chunk === null) {
+ return;
+ }
+
+ if (_this4.size && accumBytes + chunk.length > _this4.size) {
+ abort = true;
+ reject(new FetchError(`content size at ${_this4.url} over limit: ${_this4.size}`, 'max-size'));
+ return;
+ }
+
+ accumBytes += chunk.length;
+ accum.push(chunk);
+ });
+
+ body.on('end', function () {
+ if (abort) {
+ return;
+ }
+
+ clearTimeout(resTimeout);
+
+ try {
+ resolve(Buffer.concat(accum, accumBytes));
+ } catch (err) {
+ // handle streams that have accumulated too much data (issue #414)
+ reject(new FetchError(`Could not create Buffer from response body for ${_this4.url}: ${err.message}`, 'system', err));
+ }
+ });
+ });
+}
+
+/**
+ * Detect buffer encoding and convert to target encoding
+ * ref: http://www.w3.org/TR/2011/WD-html5-20110113/parsing.html#determining-the-character-encoding
+ *
+ * @param Buffer buffer Incoming buffer
+ * @param String encoding Target encoding
+ * @return String
+ */
+function convertBody(buffer, headers) {
+ if (typeof convert !== 'function') {
+ throw new Error('The package `encoding` must be installed to use the textConverted() function');
+ }
+
+ const ct = headers.get('content-type');
+ let charset = 'utf-8';
+ let res, str;
+
+ // header
+ if (ct) {
+ res = /charset=([^;]*)/i.exec(ct);
+ }
+
+ // no charset in content type, peek at response body for at most 1024 bytes
+ str = buffer.slice(0, 1024).toString();
+
+ // html5
+ if (!res && str) {
+ res = / 0 && arguments[0] !== undefined ? arguments[0] : undefined;
+
+ this[MAP] = Object.create(null);
+
+ if (init instanceof Headers) {
+ const rawHeaders = init.raw();
+ const headerNames = Object.keys(rawHeaders);
+
+ for (const headerName of headerNames) {
+ for (const value of rawHeaders[headerName]) {
+ this.append(headerName, value);
+ }
+ }
+
+ return;
+ }
+
+ // We don't worry about converting prop to ByteString here as append()
+ // will handle it.
+ if (init == null) ; else if (typeof init === 'object') {
+ const method = init[Symbol.iterator];
+ if (method != null) {
+ if (typeof method !== 'function') {
+ throw new TypeError('Header pairs must be iterable');
+ }
+
+ // sequence>
+ // Note: per spec we have to first exhaust the lists then process them
+ const pairs = [];
+ for (const pair of init) {
+ if (typeof pair !== 'object' || typeof pair[Symbol.iterator] !== 'function') {
+ throw new TypeError('Each header pair must be iterable');
+ }
+ pairs.push(Array.from(pair));
+ }
+
+ for (const pair of pairs) {
+ if (pair.length !== 2) {
+ throw new TypeError('Each header pair must be a name/value tuple');
+ }
+ this.append(pair[0], pair[1]);
+ }
+ } else {
+ // record
+ for (const key of Object.keys(init)) {
+ const value = init[key];
+ this.append(key, value);
+ }
+ }
+ } else {
+ throw new TypeError('Provided initializer must be an object');
+ }
+ }
+
+ /**
+ * Return combined header value given name
+ *
+ * @param String name Header name
+ * @return Mixed
+ */
+ get(name) {
+ name = `${name}`;
+ validateName(name);
+ const key = find(this[MAP], name);
+ if (key === undefined) {
+ return null;
+ }
+
+ return this[MAP][key].join(', ');
+ }
+
+ /**
+ * Iterate over all headers
+ *
+ * @param Function callback Executed for each item with parameters (value, name, thisArg)
+ * @param Boolean thisArg `this` context for callback function
+ * @return Void
+ */
+ forEach(callback) {
+ let thisArg = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : undefined;
+
+ let pairs = getHeaders(this);
+ let i = 0;
+ while (i < pairs.length) {
+ var _pairs$i = pairs[i];
+ const name = _pairs$i[0],
+ value = _pairs$i[1];
+
+ callback.call(thisArg, value, name, this);
+ pairs = getHeaders(this);
+ i++;
+ }
+ }
+
+ /**
+ * Overwrite header values given name
+ *
+ * @param String name Header name
+ * @param String value Header value
+ * @return Void
+ */
+ set(name, value) {
+ name = `${name}`;
+ value = `${value}`;
+ validateName(name);
+ validateValue(value);
+ const key = find(this[MAP], name);
+ this[MAP][key !== undefined ? key : name] = [value];
+ }
+
+ /**
+ * Append a value onto existing header
+ *
+ * @param String name Header name
+ * @param String value Header value
+ * @return Void
+ */
+ append(name, value) {
+ name = `${name}`;
+ value = `${value}`;
+ validateName(name);
+ validateValue(value);
+ const key = find(this[MAP], name);
+ if (key !== undefined) {
+ this[MAP][key].push(value);
+ } else {
+ this[MAP][name] = [value];
+ }
+ }
+
+ /**
+ * Check for header name existence
+ *
+ * @param String name Header name
+ * @return Boolean
+ */
+ has(name) {
+ name = `${name}`;
+ validateName(name);
+ return find(this[MAP], name) !== undefined;
+ }
+
+ /**
+ * Delete all header values given name
+ *
+ * @param String name Header name
+ * @return Void
+ */
+ delete(name) {
+ name = `${name}`;
+ validateName(name);
+ const key = find(this[MAP], name);
+ if (key !== undefined) {
+ delete this[MAP][key];
+ }
+ }
+
+ /**
+ * Return raw headers (non-spec api)
+ *
+ * @return Object
+ */
+ raw() {
+ return this[MAP];
+ }
+
+ /**
+ * Get an iterator on keys.
+ *
+ * @return Iterator
+ */
+ keys() {
+ return createHeadersIterator(this, 'key');
+ }
+
+ /**
+ * Get an iterator on values.
+ *
+ * @return Iterator
+ */
+ values() {
+ return createHeadersIterator(this, 'value');
+ }
+
+ /**
+ * Get an iterator on entries.
+ *
+ * This is the default iterator of the Headers object.
+ *
+ * @return Iterator
+ */
+ [Symbol.iterator]() {
+ return createHeadersIterator(this, 'key+value');
+ }
+}
+Headers.prototype.entries = Headers.prototype[Symbol.iterator];
+
+Object.defineProperty(Headers.prototype, Symbol.toStringTag, {
+ value: 'Headers',
+ writable: false,
+ enumerable: false,
+ configurable: true
+});
+
+Object.defineProperties(Headers.prototype, {
+ get: { enumerable: true },
+ forEach: { enumerable: true },
+ set: { enumerable: true },
+ append: { enumerable: true },
+ has: { enumerable: true },
+ delete: { enumerable: true },
+ keys: { enumerable: true },
+ values: { enumerable: true },
+ entries: { enumerable: true }
+});
+
+function getHeaders(headers) {
+ let kind = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'key+value';
+
+ const keys = Object.keys(headers[MAP]).sort();
+ return keys.map(kind === 'key' ? function (k) {
+ return k.toLowerCase();
+ } : kind === 'value' ? function (k) {
+ return headers[MAP][k].join(', ');
+ } : function (k) {
+ return [k.toLowerCase(), headers[MAP][k].join(', ')];
+ });
+}
+
+const INTERNAL = Symbol('internal');
+
+function createHeadersIterator(target, kind) {
+ const iterator = Object.create(HeadersIteratorPrototype);
+ iterator[INTERNAL] = {
+ target,
+ kind,
+ index: 0
+ };
+ return iterator;
+}
+
+const HeadersIteratorPrototype = Object.setPrototypeOf({
+ next() {
+ // istanbul ignore if
+ if (!this || Object.getPrototypeOf(this) !== HeadersIteratorPrototype) {
+ throw new TypeError('Value of `this` is not a HeadersIterator');
+ }
+
+ var _INTERNAL = this[INTERNAL];
+ const target = _INTERNAL.target,
+ kind = _INTERNAL.kind,
+ index = _INTERNAL.index;
+
+ const values = getHeaders(target, kind);
+ const len = values.length;
+ if (index >= len) {
+ return {
+ value: undefined,
+ done: true
+ };
+ }
+
+ this[INTERNAL].index = index + 1;
+
+ return {
+ value: values[index],
+ done: false
+ };
+ }
+}, Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]())));
+
+Object.defineProperty(HeadersIteratorPrototype, Symbol.toStringTag, {
+ value: 'HeadersIterator',
+ writable: false,
+ enumerable: false,
+ configurable: true
+});
+
+/**
+ * Export the Headers object in a form that Node.js can consume.
+ *
+ * @param Headers headers
+ * @return Object
+ */
+function exportNodeCompatibleHeaders(headers) {
+ const obj = Object.assign({ __proto__: null }, headers[MAP]);
+
+ // http.request() only supports string as Host header. This hack makes
+ // specifying custom Host header possible.
+ const hostHeaderKey = find(headers[MAP], 'Host');
+ if (hostHeaderKey !== undefined) {
+ obj[hostHeaderKey] = obj[hostHeaderKey][0];
+ }
+
+ return obj;
+}
+
+/**
+ * Create a Headers object from an object of headers, ignoring those that do
+ * not conform to HTTP grammar productions.
+ *
+ * @param Object obj Object of headers
+ * @return Headers
+ */
+function createHeadersLenient(obj) {
+ const headers = new Headers();
+ for (const name of Object.keys(obj)) {
+ if (invalidTokenRegex.test(name)) {
+ continue;
+ }
+ if (Array.isArray(obj[name])) {
+ for (const val of obj[name]) {
+ if (invalidHeaderCharRegex.test(val)) {
+ continue;
+ }
+ if (headers[MAP][name] === undefined) {
+ headers[MAP][name] = [val];
+ } else {
+ headers[MAP][name].push(val);
+ }
+ }
+ } else if (!invalidHeaderCharRegex.test(obj[name])) {
+ headers[MAP][name] = [obj[name]];
+ }
+ }
+ return headers;
+}
+
+const INTERNALS$1 = Symbol('Response internals');
+
+// fix an issue where "STATUS_CODES" aren't a named export for node <10
+const STATUS_CODES = http.STATUS_CODES;
+
+/**
+ * Response class
+ *
+ * @param Stream body Readable stream
+ * @param Object opts Response options
+ * @return Void
+ */
+class Response {
+ constructor() {
+ let body = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : null;
+ let opts = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
+
+ Body.call(this, body, opts);
+
+ const status = opts.status || 200;
+ const headers = new Headers(opts.headers);
+
+ if (body != null && !headers.has('Content-Type')) {
+ const contentType = extractContentType(body);
+ if (contentType) {
+ headers.append('Content-Type', contentType);
+ }
+ }
+
+ this[INTERNALS$1] = {
+ url: opts.url,
+ status,
+ statusText: opts.statusText || STATUS_CODES[status],
+ headers,
+ counter: opts.counter
+ };
+ }
+
+ get url() {
+ return this[INTERNALS$1].url || '';
+ }
+
+ get status() {
+ return this[INTERNALS$1].status;
+ }
+
+ /**
+ * Convenience property representing if the request ended normally
+ */
+ get ok() {
+ return this[INTERNALS$1].status >= 200 && this[INTERNALS$1].status < 300;
+ }
+
+ get redirected() {
+ return this[INTERNALS$1].counter > 0;
+ }
+
+ get statusText() {
+ return this[INTERNALS$1].statusText;
+ }
+
+ get headers() {
+ return this[INTERNALS$1].headers;
+ }
+
+ /**
+ * Clone this response
+ *
+ * @return Response
+ */
+ clone() {
+ return new Response(clone(this), {
+ url: this.url,
+ status: this.status,
+ statusText: this.statusText,
+ headers: this.headers,
+ ok: this.ok,
+ redirected: this.redirected
+ });
+ }
+}
+
+Body.mixIn(Response.prototype);
+
+Object.defineProperties(Response.prototype, {
+ url: { enumerable: true },
+ status: { enumerable: true },
+ ok: { enumerable: true },
+ redirected: { enumerable: true },
+ statusText: { enumerable: true },
+ headers: { enumerable: true },
+ clone: { enumerable: true }
+});
+
+Object.defineProperty(Response.prototype, Symbol.toStringTag, {
+ value: 'Response',
+ writable: false,
+ enumerable: false,
+ configurable: true
+});
+
+const INTERNALS$2 = Symbol('Request internals');
+
+// fix an issue where "format", "parse" aren't a named export for node <10
+const parse_url = Url.parse;
+const format_url = Url.format;
+
+const streamDestructionSupported = 'destroy' in Stream.Readable.prototype;
+
+/**
+ * Check if a value is an instance of Request.
+ *
+ * @param Mixed input
+ * @return Boolean
+ */
+function isRequest(input) {
+ return typeof input === 'object' && typeof input[INTERNALS$2] === 'object';
+}
+
+function isAbortSignal(signal) {
+ const proto = signal && typeof signal === 'object' && Object.getPrototypeOf(signal);
+ return !!(proto && proto.constructor.name === 'AbortSignal');
+}
+
+/**
+ * Request class
+ *
+ * @param Mixed input Url or Request instance
+ * @param Object init Custom options
+ * @return Void
+ */
+class Request {
+ constructor(input) {
+ let init = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
+
+ let parsedURL;
+
+ // normalize input
+ if (!isRequest(input)) {
+ if (input && input.href) {
+ // in order to support Node.js' Url objects; though WHATWG's URL objects
+ // will fall into this branch also (since their `toString()` will return
+ // `href` property anyway)
+ parsedURL = parse_url(input.href);
+ } else {
+ // coerce input to a string before attempting to parse
+ parsedURL = parse_url(`${input}`);
+ }
+ input = {};
+ } else {
+ parsedURL = parse_url(input.url);
+ }
+
+ let method = init.method || input.method || 'GET';
+ method = method.toUpperCase();
+
+ if ((init.body != null || isRequest(input) && input.body !== null) && (method === 'GET' || method === 'HEAD')) {
+ throw new TypeError('Request with GET/HEAD method cannot have body');
+ }
+
+ let inputBody = init.body != null ? init.body : isRequest(input) && input.body !== null ? clone(input) : null;
+
+ Body.call(this, inputBody, {
+ timeout: init.timeout || input.timeout || 0,
+ size: init.size || input.size || 0
+ });
+
+ const headers = new Headers(init.headers || input.headers || {});
+
+ if (inputBody != null && !headers.has('Content-Type')) {
+ const contentType = extractContentType(inputBody);
+ if (contentType) {
+ headers.append('Content-Type', contentType);
+ }
+ }
+
+ let signal = isRequest(input) ? input.signal : null;
+ if ('signal' in init) signal = init.signal;
+
+ if (signal != null && !isAbortSignal(signal)) {
+ throw new TypeError('Expected signal to be an instanceof AbortSignal');
+ }
+
+ this[INTERNALS$2] = {
+ method,
+ redirect: init.redirect || input.redirect || 'follow',
+ headers,
+ parsedURL,
+ signal
+ };
+
+ // node-fetch-only options
+ this.follow = init.follow !== undefined ? init.follow : input.follow !== undefined ? input.follow : 20;
+ this.compress = init.compress !== undefined ? init.compress : input.compress !== undefined ? input.compress : true;
+ this.counter = init.counter || input.counter || 0;
+ this.agent = init.agent || input.agent;
+ }
+
+ get method() {
+ return this[INTERNALS$2].method;
+ }
+
+ get url() {
+ return format_url(this[INTERNALS$2].parsedURL);
+ }
+
+ get headers() {
+ return this[INTERNALS$2].headers;
+ }
+
+ get redirect() {
+ return this[INTERNALS$2].redirect;
+ }
+
+ get signal() {
+ return this[INTERNALS$2].signal;
+ }
+
+ /**
+ * Clone this request
+ *
+ * @return Request
+ */
+ clone() {
+ return new Request(this);
+ }
+}
+
+Body.mixIn(Request.prototype);
+
+Object.defineProperty(Request.prototype, Symbol.toStringTag, {
+ value: 'Request',
+ writable: false,
+ enumerable: false,
+ configurable: true
+});
+
+Object.defineProperties(Request.prototype, {
+ method: { enumerable: true },
+ url: { enumerable: true },
+ headers: { enumerable: true },
+ redirect: { enumerable: true },
+ clone: { enumerable: true },
+ signal: { enumerable: true }
+});
+
+/**
+ * Convert a Request to Node.js http request options.
+ *
+ * @param Request A Request instance
+ * @return Object The options object to be passed to http.request
+ */
+function getNodeRequestOptions(request) {
+ const parsedURL = request[INTERNALS$2].parsedURL;
+ const headers = new Headers(request[INTERNALS$2].headers);
+
+ // fetch step 1.3
+ if (!headers.has('Accept')) {
+ headers.set('Accept', '*/*');
+ }
+
+ // Basic fetch
+ if (!parsedURL.protocol || !parsedURL.hostname) {
+ throw new TypeError('Only absolute URLs are supported');
+ }
+
+ if (!/^https?:$/.test(parsedURL.protocol)) {
+ throw new TypeError('Only HTTP(S) protocols are supported');
+ }
+
+ if (request.signal && request.body instanceof Stream.Readable && !streamDestructionSupported) {
+ throw new Error('Cancellation of streamed requests with AbortSignal is not supported in node < 8');
+ }
+
+ // HTTP-network-or-cache fetch steps 2.4-2.7
+ let contentLengthValue = null;
+ if (request.body == null && /^(POST|PUT)$/i.test(request.method)) {
+ contentLengthValue = '0';
+ }
+ if (request.body != null) {
+ const totalBytes = getTotalBytes(request);
+ if (typeof totalBytes === 'number') {
+ contentLengthValue = String(totalBytes);
+ }
+ }
+ if (contentLengthValue) {
+ headers.set('Content-Length', contentLengthValue);
+ }
+
+ // HTTP-network-or-cache fetch step 2.11
+ if (!headers.has('User-Agent')) {
+ headers.set('User-Agent', 'node-fetch/1.0 (+https://github.com/bitinn/node-fetch)');
+ }
+
+ // HTTP-network-or-cache fetch step 2.15
+ if (request.compress && !headers.has('Accept-Encoding')) {
+ headers.set('Accept-Encoding', 'gzip,deflate');
+ }
+
+ let agent = request.agent;
+ if (typeof agent === 'function') {
+ agent = agent(parsedURL);
+ }
+
+ if (!headers.has('Connection') && !agent) {
+ headers.set('Connection', 'close');
+ }
+
+ // HTTP-network fetch step 4.2
+ // chunked encoding is handled by Node.js
+
+ return Object.assign({}, parsedURL, {
+ method: request.method,
+ headers: exportNodeCompatibleHeaders(headers),
+ agent
+ });
+}
+
+/**
+ * abort-error.js
+ *
+ * AbortError interface for cancelled requests
+ */
+
+/**
+ * Create AbortError instance
+ *
+ * @param String message Error message for human
+ * @return AbortError
+ */
+function AbortError(message) {
+ Error.call(this, message);
+
+ this.type = 'aborted';
+ this.message = message;
+
+ // hide custom error implementation details from end-users
+ Error.captureStackTrace(this, this.constructor);
+}
+
+AbortError.prototype = Object.create(Error.prototype);
+AbortError.prototype.constructor = AbortError;
+AbortError.prototype.name = 'AbortError';
+
+// fix an issue where "PassThrough", "resolve" aren't a named export for node <10
+const PassThrough$1 = Stream.PassThrough;
+const resolve_url = Url.resolve;
+
+/**
+ * Fetch function
+ *
+ * @param Mixed url Absolute url or Request instance
+ * @param Object opts Fetch options
+ * @return Promise
+ */
+function fetch(url, opts) {
+
+ // allow custom promise
+ if (!fetch.Promise) {
+ throw new Error('native promise missing, set fetch.Promise to your favorite alternative');
+ }
+
+ Body.Promise = fetch.Promise;
+
+ // wrap http.request into fetch
+ return new fetch.Promise(function (resolve, reject) {
+ // build request object
+ const request = new Request(url, opts);
+ const options = getNodeRequestOptions(request);
+
+ const send = (options.protocol === 'https:' ? https : http).request;
+ const signal = request.signal;
+
+ let response = null;
+
+ const abort = function abort() {
+ let error = new AbortError('The user aborted a request.');
+ reject(error);
+ if (request.body && request.body instanceof Stream.Readable) {
+ request.body.destroy(error);
+ }
+ if (!response || !response.body) return;
+ response.body.emit('error', error);
+ };
+
+ if (signal && signal.aborted) {
+ abort();
+ return;
+ }
+
+ const abortAndFinalize = function abortAndFinalize() {
+ abort();
+ finalize();
+ };
-function endpointsToMethods(octokit, endpointsMap) {
- const newMethods = {};
+ // send request
+ const req = send(options);
+ let reqTimeout;
- for (const [scope, endpoints] of Object.entries(endpointsMap)) {
- for (const [methodName, endpoint] of Object.entries(endpoints)) {
- const [route, defaults, decorations] = endpoint;
- const [method, url] = route.split(/ /);
- const endpointDefaults = Object.assign({
- method,
- url
- }, defaults);
+ if (signal) {
+ signal.addEventListener('abort', abortAndFinalize);
+ }
- if (!newMethods[scope]) {
- newMethods[scope] = {};
- }
+ function finalize() {
+ req.abort();
+ if (signal) signal.removeEventListener('abort', abortAndFinalize);
+ clearTimeout(reqTimeout);
+ }
- const scopeMethods = newMethods[scope];
+ if (request.timeout) {
+ req.once('socket', function (socket) {
+ reqTimeout = setTimeout(function () {
+ reject(new FetchError(`network timeout at: ${request.url}`, 'request-timeout'));
+ finalize();
+ }, request.timeout);
+ });
+ }
- if (decorations) {
- scopeMethods[methodName] = decorate(octokit, scope, methodName, endpointDefaults, decorations);
- continue;
- }
+ req.on('error', function (err) {
+ reject(new FetchError(`request to ${request.url} failed, reason: ${err.message}`, 'system', err));
+ finalize();
+ });
- scopeMethods[methodName] = octokit.request.defaults(endpointDefaults);
- }
- }
+ req.on('response', function (res) {
+ clearTimeout(reqTimeout);
- return newMethods;
-}
+ const headers = createHeadersLenient(res.headers);
-function decorate(octokit, scope, methodName, defaults, decorations) {
- const requestWithDefaults = octokit.request.defaults(defaults);
- /* istanbul ignore next */
+ // HTTP fetch step 5
+ if (fetch.isRedirect(res.statusCode)) {
+ // HTTP fetch step 5.2
+ const location = headers.get('Location');
- function withDecorations(...args) {
- // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488
- let options = requestWithDefaults.endpoint.merge(...args); // There are currently no other decorations than `.mapToData`
+ // HTTP fetch step 5.3
+ const locationURL = location === null ? null : resolve_url(request.url, location);
- if (decorations.mapToData) {
- options = Object.assign({}, options, {
- data: options[decorations.mapToData],
- [decorations.mapToData]: undefined
- });
- return requestWithDefaults(options);
- }
+ // HTTP fetch step 5.5
+ switch (request.redirect) {
+ case 'error':
+ reject(new FetchError(`uri requested responds with a redirect, redirect mode is set to error: ${request.url}`, 'no-redirect'));
+ finalize();
+ return;
+ case 'manual':
+ // node-fetch-specific step: make manual redirect a bit easier to use by setting the Location header value to the resolved URL.
+ if (locationURL !== null) {
+ // handle corrupted header
+ try {
+ headers.set('Location', locationURL);
+ } catch (err) {
+ // istanbul ignore next: nodejs server prevent invalid response headers, we can't test this through normal request
+ reject(err);
+ }
+ }
+ break;
+ case 'follow':
+ // HTTP-redirect fetch step 2
+ if (locationURL === null) {
+ break;
+ }
- if (decorations.renamed) {
- const [newScope, newMethodName] = decorations.renamed;
- octokit.log.warn(`octokit.${scope}.${methodName}() has been renamed to octokit.${newScope}.${newMethodName}()`);
- }
+ // HTTP-redirect fetch step 5
+ if (request.counter >= request.follow) {
+ reject(new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect'));
+ finalize();
+ return;
+ }
- if (decorations.deprecated) {
- octokit.log.warn(decorations.deprecated);
- }
+ // HTTP-redirect fetch step 6 (counter increment)
+ // Create a new Request object.
+ const requestOpts = {
+ headers: new Headers(request.headers),
+ follow: request.follow,
+ counter: request.counter + 1,
+ agent: request.agent,
+ compress: request.compress,
+ method: request.method,
+ body: request.body,
+ signal: request.signal,
+ timeout: request.timeout,
+ size: request.size
+ };
- if (decorations.renamedParameters) {
- // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488
- const options = requestWithDefaults.endpoint.merge(...args);
+ // HTTP-redirect fetch step 9
+ if (res.statusCode !== 303 && request.body && getTotalBytes(request) === null) {
+ reject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect'));
+ finalize();
+ return;
+ }
- for (const [name, alias] of Object.entries(decorations.renamedParameters)) {
- if (name in options) {
- octokit.log.warn(`"${name}" parameter is deprecated for "octokit.${scope}.${methodName}()". Use "${alias}" instead`);
+ // HTTP-redirect fetch step 11
+ if (res.statusCode === 303 || (res.statusCode === 301 || res.statusCode === 302) && request.method === 'POST') {
+ requestOpts.method = 'GET';
+ requestOpts.body = undefined;
+ requestOpts.headers.delete('content-length');
+ }
- if (!(alias in options)) {
- options[alias] = options[name];
- }
+ // HTTP-redirect fetch step 15
+ resolve(fetch(new Request(locationURL, requestOpts)));
+ finalize();
+ return;
+ }
+ }
- delete options[name];
- }
- }
+ // prepare response
+ res.once('end', function () {
+ if (signal) signal.removeEventListener('abort', abortAndFinalize);
+ });
+ let body = res.pipe(new PassThrough$1());
- return requestWithDefaults(options);
- } // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488
+ const response_options = {
+ url: request.url,
+ status: res.statusCode,
+ statusText: res.statusMessage,
+ headers: headers,
+ size: request.size,
+ timeout: request.timeout,
+ counter: request.counter
+ };
+ // HTTP-network fetch step 12.1.1.3
+ const codings = headers.get('Content-Encoding');
- return requestWithDefaults(...args);
- }
+ // HTTP-network fetch step 12.1.1.4: handle content codings
- return Object.assign(withDecorations, requestWithDefaults);
-}
+ // in following scenarios we ignore compression support
+ // 1. compression support is disabled
+ // 2. HEAD request
+ // 3. no Content-Encoding header
+ // 4. no content response (204)
+ // 5. content not modified response (304)
+ if (!request.compress || request.method === 'HEAD' || codings === null || res.statusCode === 204 || res.statusCode === 304) {
+ response = new Response(body, response_options);
+ resolve(response);
+ return;
+ }
+
+ // For Node v6+
+ // Be less strict when decoding compressed responses, since sometimes
+ // servers send slightly invalid responses that are still accepted
+ // by common browsers.
+ // Always using Z_SYNC_FLUSH is what cURL does.
+ const zlibOptions = {
+ flush: zlib.Z_SYNC_FLUSH,
+ finishFlush: zlib.Z_SYNC_FLUSH
+ };
+
+ // for gzip
+ if (codings == 'gzip' || codings == 'x-gzip') {
+ body = body.pipe(zlib.createGunzip(zlibOptions));
+ response = new Response(body, response_options);
+ resolve(response);
+ return;
+ }
+
+ // for deflate
+ if (codings == 'deflate' || codings == 'x-deflate') {
+ // handle the infamous raw deflate response from old servers
+ // a hack for old IIS and Apache servers
+ const raw = res.pipe(new PassThrough$1());
+ raw.once('data', function (chunk) {
+ // see http://stackoverflow.com/questions/37519828
+ if ((chunk[0] & 0x0F) === 0x08) {
+ body = body.pipe(zlib.createInflate());
+ } else {
+ body = body.pipe(zlib.createInflateRaw());
+ }
+ response = new Response(body, response_options);
+ resolve(response);
+ });
+ return;
+ }
+ // for br
+ if (codings == 'br' && typeof zlib.createBrotliDecompress === 'function') {
+ body = body.pipe(zlib.createBrotliDecompress());
+ response = new Response(body, response_options);
+ resolve(response);
+ return;
+ }
+
+ // otherwise, use response as-is
+ response = new Response(body, response_options);
+ resolve(response);
+ });
+
+ writeToStream(req, request);
+ });
+}
/**
- * This plugin is a 1:1 copy of internal @octokit/rest plugins. The primary
- * goal is to rebuild @octokit/rest on top of @octokit/core. Once that is
- * done, we will remove the registerEndpoints methods and return the methods
- * directly as with the other plugins. At that point we will also remove the
- * legacy workarounds and deprecations.
+ * Redirect code matching
*
- * See the plan at
- * https://github.com/octokit/plugin-rest-endpoint-methods.js/pull/1
+ * @param Number code Status code
+ * @return Boolean
*/
+fetch.isRedirect = function (code) {
+ return code === 301 || code === 302 || code === 303 || code === 307 || code === 308;
+};
-function restEndpointMethods(octokit) {
- return endpointsToMethods(octokit, Endpoints);
-}
-restEndpointMethods.VERSION = VERSION;
-
-exports.restEndpointMethods = restEndpointMethods;
-//# sourceMappingURL=index.js.map
+// expose Promise
+fetch.Promise = global.Promise;
+module.exports = exports = fetch;
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.default = exports;
+exports.Headers = Headers;
+exports.Request = Request;
+exports.Response = Response;
+exports.FetchError = FetchError;
-/***/ }),
-/***/ 537:
-/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
+/***/ }),
-"use strict";
+/***/ 1223:
+/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
+var wrappy = __webpack_require__(2940)
+module.exports = wrappy(once)
+module.exports.strict = wrappy(onceStrict)
-Object.defineProperty(exports, "__esModule", ({ value: true }));
+once.proto = once(function () {
+ Object.defineProperty(Function.prototype, 'once', {
+ value: function () {
+ return once(this)
+ },
+ configurable: true
+ })
-function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }
+ Object.defineProperty(Function.prototype, 'onceStrict', {
+ value: function () {
+ return onceStrict(this)
+ },
+ configurable: true
+ })
+})
-var deprecation = __webpack_require__(8932);
-var once = _interopDefault(__webpack_require__(1223));
+function once (fn) {
+ var f = function () {
+ if (f.called) return f.value
+ f.called = true
+ return f.value = fn.apply(this, arguments)
+ }
+ f.called = false
+ return f
+}
-const logOnce = once(deprecation => console.warn(deprecation));
-/**
- * Error with extra properties to help with debugging
- */
+function onceStrict (fn) {
+ var f = function () {
+ if (f.called)
+ throw new Error(f.onceError)
+ f.called = true
+ return f.value = fn.apply(this, arguments)
+ }
+ var name = fn.name || 'Function wrapped with `once`'
+ f.onceError = name + " shouldn't be called more than once"
+ f.called = false
+ return f
+}
-class RequestError extends Error {
- constructor(message, statusCode, options) {
- super(message); // Maintains proper stack trace (only available on V8)
- /* istanbul ignore next */
+/***/ }),
- if (Error.captureStackTrace) {
- Error.captureStackTrace(this, this.constructor);
- }
+/***/ 8569:
+/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
- this.name = "HttpError";
- this.status = statusCode;
- Object.defineProperty(this, "code", {
- get() {
- logOnce(new deprecation.Deprecation("[@octokit/request-error] `error.code` is deprecated, use `error.status`."));
- return statusCode;
- }
+"use strict";
- });
- this.headers = options.headers || {}; // redact request credentials without mutating original request options
- const requestCopy = Object.assign({}, options.request);
+module.exports = __webpack_require__(3322);
- if (options.request.headers.authorization) {
- requestCopy.headers = Object.assign({}, options.request.headers, {
- authorization: options.request.headers.authorization.replace(/ .*$/, " [REDACTED]")
- });
- }
- requestCopy.url = requestCopy.url // client_id & client_secret can be passed as URL query parameters to increase rate limit
- // see https://developer.github.com/v3/#increasing-the-unauthenticated-rate-limit-for-oauth-applications
- .replace(/\bclient_secret=\w+/g, "client_secret=[REDACTED]") // OAuth tokens can be passed as URL query parameters, although it is not recommended
- // see https://developer.github.com/v3/#oauth2-token-sent-in-a-header
- .replace(/\baccess_token=\w+/g, "access_token=[REDACTED]");
- this.request = requestCopy;
- }
+/***/ }),
-}
+/***/ 6099:
+/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
-exports.RequestError = RequestError;
-//# sourceMappingURL=index.js.map
+"use strict";
-/***/ }),
+const path = __webpack_require__(5622);
+const WIN_SLASH = '\\\\/';
+const WIN_NO_SLASH = `[^${WIN_SLASH}]`;
-/***/ 6234:
-/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
+/**
+ * Posix glob regex
+ */
-"use strict";
+const DOT_LITERAL = '\\.';
+const PLUS_LITERAL = '\\+';
+const QMARK_LITERAL = '\\?';
+const SLASH_LITERAL = '\\/';
+const ONE_CHAR = '(?=.)';
+const QMARK = '[^/]';
+const END_ANCHOR = `(?:${SLASH_LITERAL}|$)`;
+const START_ANCHOR = `(?:^|${SLASH_LITERAL})`;
+const DOTS_SLASH = `${DOT_LITERAL}{1,2}${END_ANCHOR}`;
+const NO_DOT = `(?!${DOT_LITERAL})`;
+const NO_DOTS = `(?!${START_ANCHOR}${DOTS_SLASH})`;
+const NO_DOT_SLASH = `(?!${DOT_LITERAL}{0,1}${END_ANCHOR})`;
+const NO_DOTS_SLASH = `(?!${DOTS_SLASH})`;
+const QMARK_NO_DOT = `[^.${SLASH_LITERAL}]`;
+const STAR = `${QMARK}*?`;
+
+const POSIX_CHARS = {
+ DOT_LITERAL,
+ PLUS_LITERAL,
+ QMARK_LITERAL,
+ SLASH_LITERAL,
+ ONE_CHAR,
+ QMARK,
+ END_ANCHOR,
+ DOTS_SLASH,
+ NO_DOT,
+ NO_DOTS,
+ NO_DOT_SLASH,
+ NO_DOTS_SLASH,
+ QMARK_NO_DOT,
+ STAR,
+ START_ANCHOR
+};
+/**
+ * Windows glob regex
+ */
-Object.defineProperty(exports, "__esModule", ({ value: true }));
+const WINDOWS_CHARS = {
+ ...POSIX_CHARS,
+
+ SLASH_LITERAL: `[${WIN_SLASH}]`,
+ QMARK: WIN_NO_SLASH,
+ STAR: `${WIN_NO_SLASH}*?`,
+ DOTS_SLASH: `${DOT_LITERAL}{1,2}(?:[${WIN_SLASH}]|$)`,
+ NO_DOT: `(?!${DOT_LITERAL})`,
+ NO_DOTS: `(?!(?:^|[${WIN_SLASH}])${DOT_LITERAL}{1,2}(?:[${WIN_SLASH}]|$))`,
+ NO_DOT_SLASH: `(?!${DOT_LITERAL}{0,1}(?:[${WIN_SLASH}]|$))`,
+ NO_DOTS_SLASH: `(?!${DOT_LITERAL}{1,2}(?:[${WIN_SLASH}]|$))`,
+ QMARK_NO_DOT: `[^.${WIN_SLASH}]`,
+ START_ANCHOR: `(?:^|[${WIN_SLASH}])`,
+ END_ANCHOR: `(?:[${WIN_SLASH}]|$)`
+};
-function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }
+/**
+ * POSIX Bracket Regex
+ */
-var endpoint = __webpack_require__(9440);
-var universalUserAgent = __webpack_require__(5030);
-var isPlainObject = __webpack_require__(9062);
-var nodeFetch = _interopDefault(__webpack_require__(467));
-var requestError = __webpack_require__(537);
+const POSIX_REGEX_SOURCE = {
+ alnum: 'a-zA-Z0-9',
+ alpha: 'a-zA-Z',
+ ascii: '\\x00-\\x7F',
+ blank: ' \\t',
+ cntrl: '\\x00-\\x1F\\x7F',
+ digit: '0-9',
+ graph: '\\x21-\\x7E',
+ lower: 'a-z',
+ print: '\\x20-\\x7E ',
+ punct: '\\-!"#$%&\'()\\*+,./:;<=>?@[\\]^_`{|}~',
+ space: ' \\t\\r\\n\\v\\f',
+ upper: 'A-Z',
+ word: 'A-Za-z0-9_',
+ xdigit: 'A-Fa-f0-9'
+};
-const VERSION = "5.4.10";
+module.exports = {
+ MAX_LENGTH: 1024 * 64,
+ POSIX_REGEX_SOURCE,
+
+ // regular expressions
+ REGEX_BACKSLASH: /\\(?![*+?^${}(|)[\]])/g,
+ REGEX_NON_SPECIAL_CHARS: /^[^@![\].,$*+?^{}()|\\/]+/,
+ REGEX_SPECIAL_CHARS: /[-*+?.^${}(|)[\]]/,
+ REGEX_SPECIAL_CHARS_BACKREF: /(\\?)((\W)(\3*))/g,
+ REGEX_SPECIAL_CHARS_GLOBAL: /([-*+?.^${}(|)[\]])/g,
+ REGEX_REMOVE_BACKSLASH: /(?:\[.*?[^\\]\]|\\(?=.))/g,
+
+ // Replace globs with equivalent patterns to reduce parsing time.
+ REPLACEMENTS: {
+ '***': '*',
+ '**/**': '**',
+ '**/**/**': '**'
+ },
-function getBufferResponse(response) {
- return response.arrayBuffer();
-}
+ // Digits
+ CHAR_0: 48, /* 0 */
+ CHAR_9: 57, /* 9 */
+
+ // Alphabet chars.
+ CHAR_UPPERCASE_A: 65, /* A */
+ CHAR_LOWERCASE_A: 97, /* a */
+ CHAR_UPPERCASE_Z: 90, /* Z */
+ CHAR_LOWERCASE_Z: 122, /* z */
+
+ CHAR_LEFT_PARENTHESES: 40, /* ( */
+ CHAR_RIGHT_PARENTHESES: 41, /* ) */
+
+ CHAR_ASTERISK: 42, /* * */
+
+ // Non-alphabetic chars.
+ CHAR_AMPERSAND: 38, /* & */
+ CHAR_AT: 64, /* @ */
+ CHAR_BACKWARD_SLASH: 92, /* \ */
+ CHAR_CARRIAGE_RETURN: 13, /* \r */
+ CHAR_CIRCUMFLEX_ACCENT: 94, /* ^ */
+ CHAR_COLON: 58, /* : */
+ CHAR_COMMA: 44, /* , */
+ CHAR_DOT: 46, /* . */
+ CHAR_DOUBLE_QUOTE: 34, /* " */
+ CHAR_EQUAL: 61, /* = */
+ CHAR_EXCLAMATION_MARK: 33, /* ! */
+ CHAR_FORM_FEED: 12, /* \f */
+ CHAR_FORWARD_SLASH: 47, /* / */
+ CHAR_GRAVE_ACCENT: 96, /* ` */
+ CHAR_HASH: 35, /* # */
+ CHAR_HYPHEN_MINUS: 45, /* - */
+ CHAR_LEFT_ANGLE_BRACKET: 60, /* < */
+ CHAR_LEFT_CURLY_BRACE: 123, /* { */
+ CHAR_LEFT_SQUARE_BRACKET: 91, /* [ */
+ CHAR_LINE_FEED: 10, /* \n */
+ CHAR_NO_BREAK_SPACE: 160, /* \u00A0 */
+ CHAR_PERCENT: 37, /* % */
+ CHAR_PLUS: 43, /* + */
+ CHAR_QUESTION_MARK: 63, /* ? */
+ CHAR_RIGHT_ANGLE_BRACKET: 62, /* > */
+ CHAR_RIGHT_CURLY_BRACE: 125, /* } */
+ CHAR_RIGHT_SQUARE_BRACKET: 93, /* ] */
+ CHAR_SEMICOLON: 59, /* ; */
+ CHAR_SINGLE_QUOTE: 39, /* ' */
+ CHAR_SPACE: 32, /* */
+ CHAR_TAB: 9, /* \t */
+ CHAR_UNDERSCORE: 95, /* _ */
+ CHAR_VERTICAL_LINE: 124, /* | */
+ CHAR_ZERO_WIDTH_NOBREAK_SPACE: 65279, /* \uFEFF */
+
+ SEP: path.sep,
-function fetchWrapper(requestOptions) {
- if (isPlainObject.isPlainObject(requestOptions.body) || Array.isArray(requestOptions.body)) {
- requestOptions.body = JSON.stringify(requestOptions.body);
- }
+ /**
+ * Create EXTGLOB_CHARS
+ */
- let headers = {};
- let status;
- let url;
- const fetch = requestOptions.request && requestOptions.request.fetch || nodeFetch;
- return fetch(requestOptions.url, Object.assign({
- method: requestOptions.method,
- body: requestOptions.body,
- headers: requestOptions.headers,
- redirect: requestOptions.redirect
- }, requestOptions.request)).then(response => {
- url = response.url;
- status = response.status;
+ extglobChars(chars) {
+ return {
+ '!': { type: 'negate', open: '(?:(?!(?:', close: `))${chars.STAR})` },
+ '?': { type: 'qmark', open: '(?:', close: ')?' },
+ '+': { type: 'plus', open: '(?:', close: ')+' },
+ '*': { type: 'star', open: '(?:', close: ')*' },
+ '@': { type: 'at', open: '(?:', close: ')' }
+ };
+ },
- for (const keyAndValue of response.headers) {
- headers[keyAndValue[0]] = keyAndValue[1];
- }
+ /**
+ * Create GLOB_CHARS
+ */
- if (status === 204 || status === 205) {
- return;
- } // GitHub API returns 200 for HEAD requests
+ globChars(win32) {
+ return win32 === true ? WINDOWS_CHARS : POSIX_CHARS;
+ }
+};
- if (requestOptions.method === "HEAD") {
- if (status < 400) {
- return;
- }
+/***/ }),
- throw new requestError.RequestError(response.statusText, status, {
- headers,
- request: requestOptions
- });
- }
+/***/ 2139:
+/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
- if (status === 304) {
- throw new requestError.RequestError("Not modified", status, {
- headers,
- request: requestOptions
- });
- }
+"use strict";
- if (status >= 400) {
- return response.text().then(message => {
- const error = new requestError.RequestError(message, status, {
- headers,
- request: requestOptions
- });
- try {
- let responseBody = JSON.parse(error.message);
- Object.assign(error, responseBody);
- let errors = responseBody.errors; // Assumption `errors` would always be in Array format
+const constants = __webpack_require__(6099);
+const utils = __webpack_require__(479);
- error.message = error.message + ": " + errors.map(JSON.stringify).join(", ");
- } catch (e) {// ignore, see octokit/rest.js#684
- }
+/**
+ * Constants
+ */
- throw error;
- });
- }
+const {
+ MAX_LENGTH,
+ POSIX_REGEX_SOURCE,
+ REGEX_NON_SPECIAL_CHARS,
+ REGEX_SPECIAL_CHARS_BACKREF,
+ REPLACEMENTS
+} = constants;
- const contentType = response.headers.get("content-type");
+/**
+ * Helpers
+ */
- if (/application\/json/.test(contentType)) {
- return response.json();
- }
+const expandRange = (args, options) => {
+ if (typeof options.expandRange === 'function') {
+ return options.expandRange(...args, options);
+ }
- if (!contentType || /^text\/|charset=utf-8$/.test(contentType)) {
- return response.text();
- }
+ args.sort();
+ const value = `[${args.join('-')}]`;
- return getBufferResponse(response);
- }).then(data => {
- return {
- status,
- url,
- headers,
- data
- };
- }).catch(error => {
- if (error instanceof requestError.RequestError) {
- throw error;
- }
+ try {
+ /* eslint-disable-next-line no-new */
+ new RegExp(value);
+ } catch (ex) {
+ return args.map(v => utils.escapeRegex(v)).join('..');
+ }
- throw new requestError.RequestError(error.message, 500, {
- headers,
- request: requestOptions
- });
- });
-}
+ return value;
+};
-function withDefaults(oldEndpoint, newDefaults) {
- const endpoint = oldEndpoint.defaults(newDefaults);
+/**
+ * Create the message for a syntax error
+ */
- const newApi = function (route, parameters) {
- const endpointOptions = endpoint.merge(route, parameters);
+const syntaxError = (type, char) => {
+ return `Missing ${type}: "${char}" - use "\\\\${char}" to match literal characters`;
+};
- if (!endpointOptions.request || !endpointOptions.request.hook) {
- return fetchWrapper(endpoint.parse(endpointOptions));
- }
+/**
+ * Parse the given input string.
+ * @param {String} input
+ * @param {Object} options
+ * @return {Object}
+ */
- const request = (route, parameters) => {
- return fetchWrapper(endpoint.parse(endpoint.merge(route, parameters)));
- };
+const parse = (input, options) => {
+ if (typeof input !== 'string') {
+ throw new TypeError('Expected a string');
+ }
- Object.assign(request, {
- endpoint,
- defaults: withDefaults.bind(null, endpoint)
- });
- return endpointOptions.request.hook(request, endpointOptions);
- };
+ input = REPLACEMENTS[input] || input;
- return Object.assign(newApi, {
- endpoint,
- defaults: withDefaults.bind(null, endpoint)
- });
-}
+ const opts = { ...options };
+ const max = typeof opts.maxLength === 'number' ? Math.min(MAX_LENGTH, opts.maxLength) : MAX_LENGTH;
-const request = withDefaults(endpoint.endpoint, {
- headers: {
- "user-agent": `octokit-request.js/${VERSION} ${universalUserAgent.getUserAgent()}`
+ let len = input.length;
+ if (len > max) {
+ throw new SyntaxError(`Input length: ${len}, exceeds maximum allowed length: ${max}`);
}
-});
-exports.request = request;
-//# sourceMappingURL=index.js.map
+ const bos = { type: 'bos', value: '', output: opts.prepend || '' };
+ const tokens = [bos];
+
+ const capture = opts.capture ? '' : '?:';
+ const win32 = utils.isWindows(options);
+
+ // create constants based on platform, for windows or posix
+ const PLATFORM_CHARS = constants.globChars(win32);
+ const EXTGLOB_CHARS = constants.extglobChars(PLATFORM_CHARS);
+
+ const {
+ DOT_LITERAL,
+ PLUS_LITERAL,
+ SLASH_LITERAL,
+ ONE_CHAR,
+ DOTS_SLASH,
+ NO_DOT,
+ NO_DOT_SLASH,
+ NO_DOTS_SLASH,
+ QMARK,
+ QMARK_NO_DOT,
+ STAR,
+ START_ANCHOR
+ } = PLATFORM_CHARS;
+
+ const globstar = (opts) => {
+ return `(${capture}(?:(?!${START_ANCHOR}${opts.dot ? DOTS_SLASH : DOT_LITERAL}).)*?)`;
+ };
+ const nodot = opts.dot ? '' : NO_DOT;
+ const qmarkNoDot = opts.dot ? QMARK : QMARK_NO_DOT;
+ let star = opts.bash === true ? globstar(opts) : STAR;
-/***/ }),
+ if (opts.capture) {
+ star = `(${star})`;
+ }
-/***/ 9062:
-/***/ ((__unused_webpack_module, exports) => {
+ // minimatch options support
+ if (typeof opts.noext === 'boolean') {
+ opts.noextglob = opts.noext;
+ }
-"use strict";
+ const state = {
+ input,
+ index: -1,
+ start: 0,
+ dot: opts.dot === true,
+ consumed: '',
+ output: '',
+ prefix: '',
+ backtrack: false,
+ negated: false,
+ brackets: 0,
+ braces: 0,
+ parens: 0,
+ quotes: 0,
+ globstar: false,
+ tokens
+ };
+ input = utils.removePrefix(input, state);
+ len = input.length;
-Object.defineProperty(exports, "__esModule", ({ value: true }));
+ const extglobs = [];
+ const braces = [];
+ const stack = [];
+ let prev = bos;
+ let value;
-/*!
- * is-plain-object
- *
- * Copyright (c) 2014-2017, Jon Schlinkert.
- * Released under the MIT License.
- */
+ /**
+ * Tokenizing helpers
+ */
-function isObject(o) {
- return Object.prototype.toString.call(o) === '[object Object]';
-}
+ const eos = () => state.index === len - 1;
+ const peek = state.peek = (n = 1) => input[state.index + n];
+ const advance = state.advance = () => input[++state.index];
+ const remaining = () => input.slice(state.index + 1);
+ const consume = (value = '', num = 0) => {
+ state.consumed += value;
+ state.index += num;
+ };
+ const append = token => {
+ state.output += token.output != null ? token.output : token.value;
+ consume(token.value);
+ };
-function isPlainObject(o) {
- var ctor,prot;
+ const negate = () => {
+ let count = 1;
- if (isObject(o) === false) return false;
+ while (peek() === '!' && (peek(2) !== '(' || peek(3) === '?')) {
+ advance();
+ state.start++;
+ count++;
+ }
- // If has modified constructor
- ctor = o.constructor;
- if (ctor === undefined) return true;
+ if (count % 2 === 0) {
+ return false;
+ }
- // If has modified prototype
- prot = ctor.prototype;
- if (isObject(prot) === false) return false;
+ state.negated = true;
+ state.start++;
+ return true;
+ };
- // If constructor does not have an Object-specific method
- if (prot.hasOwnProperty('isPrototypeOf') === false) {
- return false;
- }
+ const increment = type => {
+ state[type]++;
+ stack.push(type);
+ };
- // Most likely a plain Object
- return true;
-}
+ const decrement = type => {
+ state[type]--;
+ stack.pop();
+ };
-exports.isPlainObject = isPlainObject;
+ /**
+ * Push tokens onto the tokens array. This helper speeds up
+ * tokenizing by 1) helping us avoid backtracking as much as possible,
+ * and 2) helping us avoid creating extra tokens when consecutive
+ * characters are plain text. This improves performance and simplifies
+ * lookbehinds.
+ */
+ const push = tok => {
+ if (prev.type === 'globstar') {
+ const isBrace = state.braces > 0 && (tok.type === 'comma' || tok.type === 'brace');
+ const isExtglob = tok.extglob === true || (extglobs.length && (tok.type === 'pipe' || tok.type === 'paren'));
-/***/ }),
+ if (tok.type !== 'slash' && tok.type !== 'paren' && !isBrace && !isExtglob) {
+ state.output = state.output.slice(0, -prev.output.length);
+ prev.type = 'star';
+ prev.value = '*';
+ prev.output = star;
+ state.output += prev.output;
+ }
+ }
-/***/ 3682:
-/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
+ if (extglobs.length && tok.type !== 'paren' && !EXTGLOB_CHARS[tok.value]) {
+ extglobs[extglobs.length - 1].inner += tok.value;
+ }
-var register = __webpack_require__(4670)
-var addHook = __webpack_require__(5549)
-var removeHook = __webpack_require__(6819)
+ if (tok.value || tok.output) append(tok);
+ if (prev && prev.type === 'text' && tok.type === 'text') {
+ prev.value += tok.value;
+ prev.output = (prev.output || '') + tok.value;
+ return;
+ }
-// bind with array of arguments: https://stackoverflow.com/a/21792913
-var bind = Function.bind
-var bindable = bind.bind(bind)
+ tok.prev = prev;
+ tokens.push(tok);
+ prev = tok;
+ };
-function bindApi (hook, state, name) {
- var removeHookRef = bindable(removeHook, null).apply(null, name ? [state, name] : [state])
- hook.api = { remove: removeHookRef }
- hook.remove = removeHookRef
+ const extglobOpen = (type, value) => {
+ const token = { ...EXTGLOB_CHARS[value], conditions: 1, inner: '' };
- ;['before', 'error', 'after', 'wrap'].forEach(function (kind) {
- var args = name ? [state, kind, name] : [state, kind]
- hook[kind] = hook.api[kind] = bindable(addHook, null).apply(null, args)
- })
-}
+ token.prev = prev;
+ token.parens = state.parens;
+ token.output = state.output;
+ const output = (opts.capture ? '(' : '') + token.open;
-function HookSingular () {
- var singularHookName = 'h'
- var singularHookState = {
- registry: {}
- }
- var singularHook = register.bind(null, singularHookState, singularHookName)
- bindApi(singularHook, singularHookState, singularHookName)
- return singularHook
-}
+ increment('parens');
+ push({ type, value, output: state.output ? '' : ONE_CHAR });
+ push({ type: 'paren', extglob: true, value: advance(), output });
+ extglobs.push(token);
+ };
-function HookCollection () {
- var state = {
- registry: {}
- }
+ const extglobClose = token => {
+ let output = token.close + (opts.capture ? ')' : '');
- var hook = register.bind(null, state)
- bindApi(hook, state)
+ if (token.type === 'negate') {
+ let extglobStar = star;
- return hook
-}
+ if (token.inner && token.inner.length > 1 && token.inner.includes('/')) {
+ extglobStar = globstar(opts);
+ }
-var collectionHookDeprecationMessageDisplayed = false
-function Hook () {
- if (!collectionHookDeprecationMessageDisplayed) {
- console.warn('[before-after-hook]: "Hook()" repurposing warning, use "Hook.Collection()". Read more: https://git.io/upgrade-before-after-hook-to-1.4')
- collectionHookDeprecationMessageDisplayed = true
- }
- return HookCollection()
-}
+ if (extglobStar !== star || eos() || /^\)+$/.test(remaining())) {
+ output = token.close = `)$))${extglobStar}`;
+ }
-Hook.Singular = HookSingular.bind()
-Hook.Collection = HookCollection.bind()
+ if (token.prev.type === 'bos' && eos()) {
+ state.negatedExtglob = true;
+ }
+ }
-module.exports = Hook
-// expose constructors as a named property for TypeScript
-module.exports.Hook = Hook
-module.exports.Singular = Hook.Singular
-module.exports.Collection = Hook.Collection
+ push({ type: 'paren', extglob: true, value, output });
+ decrement('parens');
+ };
+ /**
+ * Fast paths
+ */
-/***/ }),
+ if (opts.fastpaths !== false && !/(^[*!]|[/()[\]{}"])/.test(input)) {
+ let backslashes = false;
-/***/ 5549:
-/***/ ((module) => {
+ let output = input.replace(REGEX_SPECIAL_CHARS_BACKREF, (m, esc, chars, first, rest, index) => {
+ if (first === '\\') {
+ backslashes = true;
+ return m;
+ }
-module.exports = addHook
+ if (first === '?') {
+ if (esc) {
+ return esc + first + (rest ? QMARK.repeat(rest.length) : '');
+ }
+ if (index === 0) {
+ return qmarkNoDot + (rest ? QMARK.repeat(rest.length) : '');
+ }
+ return QMARK.repeat(chars.length);
+ }
-function addHook (state, kind, name, hook) {
- var orig = hook
- if (!state.registry[name]) {
- state.registry[name] = []
- }
+ if (first === '.') {
+ return DOT_LITERAL.repeat(chars.length);
+ }
- if (kind === 'before') {
- hook = function (method, options) {
- return Promise.resolve()
- .then(orig.bind(null, options))
- .then(method.bind(null, options))
+ if (first === '*') {
+ if (esc) {
+ return esc + first + (rest ? star : '');
+ }
+ return star;
+ }
+ return esc ? m : `\\${m}`;
+ });
+
+ if (backslashes === true) {
+ if (opts.unescape === true) {
+ output = output.replace(/\\/g, '');
+ } else {
+ output = output.replace(/\\+/g, m => {
+ return m.length % 2 === 0 ? '\\\\' : (m ? '\\' : '');
+ });
+ }
}
- }
- if (kind === 'after') {
- hook = function (method, options) {
- var result
- return Promise.resolve()
- .then(method.bind(null, options))
- .then(function (result_) {
- result = result_
- return orig(result, options)
- })
- .then(function () {
- return result
- })
+ if (output === input && opts.contains === true) {
+ state.output = input;
+ return state;
}
+
+ state.output = utils.wrapOutput(output, state, options);
+ return state;
}
- if (kind === 'error') {
- hook = function (method, options) {
- return Promise.resolve()
- .then(method.bind(null, options))
- .catch(function (error) {
- return orig(error, options)
- })
+ /**
+ * Tokenize input until we reach end-of-string
+ */
+
+ while (!eos()) {
+ value = advance();
+
+ if (value === '\u0000') {
+ continue;
}
- }
- state.registry[name].push({
- hook: hook,
- orig: orig
- })
-}
+ /**
+ * Escaped characters
+ */
+ if (value === '\\') {
+ const next = peek();
-/***/ }),
+ if (next === '/' && opts.bash !== true) {
+ continue;
+ }
-/***/ 4670:
-/***/ ((module) => {
+ if (next === '.' || next === ';') {
+ continue;
+ }
-module.exports = register
+ if (!next) {
+ value += '\\';
+ push({ type: 'text', value });
+ continue;
+ }
-function register (state, name, method, options) {
- if (typeof method !== 'function') {
- throw new Error('method for before hook must be a function')
- }
+ // collapse slashes to reduce potential for exploits
+ const match = /^\\+/.exec(remaining());
+ let slashes = 0;
- if (!options) {
- options = {}
- }
+ if (match && match[0].length > 2) {
+ slashes = match[0].length;
+ state.index += slashes;
+ if (slashes % 2 !== 0) {
+ value += '\\';
+ }
+ }
- if (Array.isArray(name)) {
- return name.reverse().reduce(function (callback, name) {
- return register.bind(null, state, name, callback, options)
- }, method)()
- }
+ if (opts.unescape === true) {
+ value = advance() || '';
+ } else {
+ value += advance() || '';
+ }
- return Promise.resolve()
- .then(function () {
- if (!state.registry[name]) {
- return method(options)
+ if (state.brackets === 0) {
+ push({ type: 'text', value });
+ continue;
}
+ }
- return (state.registry[name]).reduce(function (method, registered) {
- return registered.hook.bind(null, method, options)
- }, method)()
- })
-}
+ /**
+ * If we're inside a regex character class, continue
+ * until we reach the closing bracket.
+ */
+ if (state.brackets > 0 && (value !== ']' || prev.value === '[' || prev.value === '[^')) {
+ if (opts.posix !== false && value === ':') {
+ const inner = prev.value.slice(1);
+ if (inner.includes('[')) {
+ prev.posix = true;
+
+ if (inner.includes(':')) {
+ const idx = prev.value.lastIndexOf('[');
+ const pre = prev.value.slice(0, idx);
+ const rest = prev.value.slice(idx + 2);
+ const posix = POSIX_REGEX_SOURCE[rest];
+ if (posix) {
+ prev.value = pre + posix;
+ state.backtrack = true;
+ advance();
+
+ if (!bos.output && tokens.indexOf(prev) === 1) {
+ bos.output = ONE_CHAR;
+ }
+ continue;
+ }
+ }
+ }
+ }
-/***/ }),
+ if ((value === '[' && peek() !== ':') || (value === '-' && peek() === ']')) {
+ value = `\\${value}`;
+ }
-/***/ 6819:
-/***/ ((module) => {
+ if (value === ']' && (prev.value === '[' || prev.value === '[^')) {
+ value = `\\${value}`;
+ }
-module.exports = removeHook
+ if (opts.posix === true && value === '!' && prev.value === '[') {
+ value = '^';
+ }
-function removeHook (state, name, method) {
- if (!state.registry[name]) {
- return
- }
+ prev.value += value;
+ append({ value });
+ continue;
+ }
- var index = state.registry[name]
- .map(function (registered) { return registered.orig })
- .indexOf(method)
+ /**
+ * If we're inside a quoted string, continue
+ * until we reach the closing double quote.
+ */
- if (index === -1) {
- return
- }
+ if (state.quotes === 1 && value !== '"') {
+ value = utils.escapeRegex(value);
+ prev.value += value;
+ append({ value });
+ continue;
+ }
- state.registry[name].splice(index, 1)
-}
+ /**
+ * Double quotes
+ */
+ if (value === '"') {
+ state.quotes = state.quotes === 1 ? 0 : 1;
+ if (opts.keepQuotes === true) {
+ push({ type: 'text', value });
+ }
+ continue;
+ }
-/***/ }),
+ /**
+ * Parentheses
+ */
-/***/ 8932:
-/***/ ((__unused_webpack_module, exports) => {
+ if (value === '(') {
+ increment('parens');
+ push({ type: 'paren', value });
+ continue;
+ }
-"use strict";
+ if (value === ')') {
+ if (state.parens === 0 && opts.strictBrackets === true) {
+ throw new SyntaxError(syntaxError('opening', '('));
+ }
+
+ const extglob = extglobs[extglobs.length - 1];
+ if (extglob && state.parens === extglob.parens + 1) {
+ extglobClose(extglobs.pop());
+ continue;
+ }
+ push({ type: 'paren', value, output: state.parens ? ')' : '\\)' });
+ decrement('parens');
+ continue;
+ }
-Object.defineProperty(exports, "__esModule", ({ value: true }));
+ /**
+ * Square brackets
+ */
-class Deprecation extends Error {
- constructor(message) {
- super(message); // Maintains proper stack trace (only available on V8)
+ if (value === '[') {
+ if (opts.nobracket === true || !remaining().includes(']')) {
+ if (opts.nobracket !== true && opts.strictBrackets === true) {
+ throw new SyntaxError(syntaxError('closing', ']'));
+ }
- /* istanbul ignore next */
+ value = `\\${value}`;
+ } else {
+ increment('brackets');
+ }
- if (Error.captureStackTrace) {
- Error.captureStackTrace(this, this.constructor);
+ push({ type: 'bracket', value });
+ continue;
}
- this.name = 'Deprecation';
- }
+ if (value === ']') {
+ if (opts.nobracket === true || (prev && prev.type === 'bracket' && prev.value.length === 1)) {
+ push({ type: 'text', value, output: `\\${value}` });
+ continue;
+ }
-}
+ if (state.brackets === 0) {
+ if (opts.strictBrackets === true) {
+ throw new SyntaxError(syntaxError('opening', '['));
+ }
-exports.Deprecation = Deprecation;
+ push({ type: 'text', value, output: `\\${value}` });
+ continue;
+ }
+ decrement('brackets');
-/***/ }),
+ const prevValue = prev.value.slice(1);
+ if (prev.posix !== true && prevValue[0] === '^' && !prevValue.includes('/')) {
+ value = `/${value}`;
+ }
-/***/ 467:
-/***/ ((module, exports, __webpack_require__) => {
+ prev.value += value;
+ append({ value });
-"use strict";
+ // when literal brackets are explicitly disabled
+ // assume we should match with a regex character class
+ if (opts.literalBrackets === false || utils.hasRegexChars(prevValue)) {
+ continue;
+ }
+ const escaped = utils.escapeRegex(prev.value);
+ state.output = state.output.slice(0, -prev.value.length);
-Object.defineProperty(exports, "__esModule", ({ value: true }));
+ // when literal brackets are explicitly enabled
+ // assume we should escape the brackets to match literal characters
+ if (opts.literalBrackets === true) {
+ state.output += escaped;
+ prev.value = escaped;
+ continue;
+ }
-function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }
+ // when the user specifies nothing, try to match both
+ prev.value = `(${capture}${escaped}|${prev.value})`;
+ state.output += prev.value;
+ continue;
+ }
-var Stream = _interopDefault(__webpack_require__(2413));
-var http = _interopDefault(__webpack_require__(8605));
-var Url = _interopDefault(__webpack_require__(8835));
-var https = _interopDefault(__webpack_require__(7211));
-var zlib = _interopDefault(__webpack_require__(8761));
+ /**
+ * Braces
+ */
-// Based on https://github.com/tmpvar/jsdom/blob/aa85b2abf07766ff7bf5c1f6daafb3726f2f2db5/lib/jsdom/living/blob.js
+ if (value === '{' && opts.nobrace !== true) {
+ increment('braces');
-// fix for "Readable" isn't a named export issue
-const Readable = Stream.Readable;
+ const open = {
+ type: 'brace',
+ value,
+ output: '(',
+ outputIndex: state.output.length,
+ tokensIndex: state.tokens.length
+ };
-const BUFFER = Symbol('buffer');
-const TYPE = Symbol('type');
+ braces.push(open);
+ push(open);
+ continue;
+ }
-class Blob {
- constructor() {
- this[TYPE] = '';
+ if (value === '}') {
+ const brace = braces[braces.length - 1];
- const blobParts = arguments[0];
- const options = arguments[1];
+ if (opts.nobrace === true || !brace) {
+ push({ type: 'text', value, output: value });
+ continue;
+ }
- const buffers = [];
- let size = 0;
+ let output = ')';
- if (blobParts) {
- const a = blobParts;
- const length = Number(a.length);
- for (let i = 0; i < length; i++) {
- const element = a[i];
- let buffer;
- if (element instanceof Buffer) {
- buffer = element;
- } else if (ArrayBuffer.isView(element)) {
- buffer = Buffer.from(element.buffer, element.byteOffset, element.byteLength);
- } else if (element instanceof ArrayBuffer) {
- buffer = Buffer.from(element);
- } else if (element instanceof Blob) {
- buffer = element[BUFFER];
- } else {
- buffer = Buffer.from(typeof element === 'string' ? element : String(element));
- }
- size += buffer.length;
- buffers.push(buffer);
- }
- }
+ if (brace.dots === true) {
+ const arr = tokens.slice();
+ const range = [];
- this[BUFFER] = Buffer.concat(buffers);
+ for (let i = arr.length - 1; i >= 0; i--) {
+ tokens.pop();
+ if (arr[i].type === 'brace') {
+ break;
+ }
+ if (arr[i].type !== 'dots') {
+ range.unshift(arr[i].value);
+ }
+ }
- let type = options && options.type !== undefined && String(options.type).toLowerCase();
- if (type && !/[^\u0020-\u007E]/.test(type)) {
- this[TYPE] = type;
- }
- }
- get size() {
- return this[BUFFER].length;
- }
- get type() {
- return this[TYPE];
- }
- text() {
- return Promise.resolve(this[BUFFER].toString());
- }
- arrayBuffer() {
- const buf = this[BUFFER];
- const ab = buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength);
- return Promise.resolve(ab);
- }
- stream() {
- const readable = new Readable();
- readable._read = function () {};
- readable.push(this[BUFFER]);
- readable.push(null);
- return readable;
- }
- toString() {
- return '[object Blob]';
- }
- slice() {
- const size = this.size;
+ output = expandRange(range, opts);
+ state.backtrack = true;
+ }
- const start = arguments[0];
- const end = arguments[1];
- let relativeStart, relativeEnd;
- if (start === undefined) {
- relativeStart = 0;
- } else if (start < 0) {
- relativeStart = Math.max(size + start, 0);
- } else {
- relativeStart = Math.min(start, size);
- }
- if (end === undefined) {
- relativeEnd = size;
- } else if (end < 0) {
- relativeEnd = Math.max(size + end, 0);
- } else {
- relativeEnd = Math.min(end, size);
- }
- const span = Math.max(relativeEnd - relativeStart, 0);
+ if (brace.comma !== true && brace.dots !== true) {
+ const out = state.output.slice(0, brace.outputIndex);
+ const toks = state.tokens.slice(brace.tokensIndex);
+ brace.value = brace.output = '\\{';
+ value = output = '\\}';
+ state.output = out;
+ for (const t of toks) {
+ state.output += (t.output || t.value);
+ }
+ }
- const buffer = this[BUFFER];
- const slicedBuffer = buffer.slice(relativeStart, relativeStart + span);
- const blob = new Blob([], { type: arguments[2] });
- blob[BUFFER] = slicedBuffer;
- return blob;
- }
-}
+ push({ type: 'brace', value, output });
+ decrement('braces');
+ braces.pop();
+ continue;
+ }
-Object.defineProperties(Blob.prototype, {
- size: { enumerable: true },
- type: { enumerable: true },
- slice: { enumerable: true }
-});
+ /**
+ * Pipes
+ */
-Object.defineProperty(Blob.prototype, Symbol.toStringTag, {
- value: 'Blob',
- writable: false,
- enumerable: false,
- configurable: true
-});
+ if (value === '|') {
+ if (extglobs.length > 0) {
+ extglobs[extglobs.length - 1].conditions++;
+ }
+ push({ type: 'text', value });
+ continue;
+ }
-/**
- * fetch-error.js
- *
- * FetchError interface for operational errors
- */
+ /**
+ * Commas
+ */
-/**
- * Create FetchError instance
- *
- * @param String message Error message for human
- * @param String type Error type for machine
- * @param String systemError For Node.js system error
- * @return FetchError
- */
-function FetchError(message, type, systemError) {
- Error.call(this, message);
+ if (value === ',') {
+ let output = value;
- this.message = message;
- this.type = type;
+ const brace = braces[braces.length - 1];
+ if (brace && stack[stack.length - 1] === 'braces') {
+ brace.comma = true;
+ output = '|';
+ }
- // when err.type is `system`, err.code contains system error code
- if (systemError) {
- this.code = this.errno = systemError.code;
- }
+ push({ type: 'comma', value, output });
+ continue;
+ }
- // hide custom error implementation details from end-users
- Error.captureStackTrace(this, this.constructor);
-}
+ /**
+ * Slashes
+ */
-FetchError.prototype = Object.create(Error.prototype);
-FetchError.prototype.constructor = FetchError;
-FetchError.prototype.name = 'FetchError';
+ if (value === '/') {
+ // if the beginning of the glob is "./", advance the start
+ // to the current index, and don't add the "./" characters
+ // to the state. This greatly simplifies lookbehinds when
+ // checking for BOS characters like "!" and "." (not "./")
+ if (prev.type === 'dot' && state.index === state.start + 1) {
+ state.start = state.index + 1;
+ state.consumed = '';
+ state.output = '';
+ tokens.pop();
+ prev = bos; // reset "prev" to the first token
+ continue;
+ }
-let convert;
-try {
- convert = __webpack_require__(2877).convert;
-} catch (e) {}
+ push({ type: 'slash', value, output: SLASH_LITERAL });
+ continue;
+ }
-const INTERNALS = Symbol('Body internals');
+ /**
+ * Dots
+ */
-// fix an issue where "PassThrough" isn't a named export for node <10
-const PassThrough = Stream.PassThrough;
+ if (value === '.') {
+ if (state.braces > 0 && prev.type === 'dot') {
+ if (prev.value === '.') prev.output = DOT_LITERAL;
+ const brace = braces[braces.length - 1];
+ prev.type = 'dots';
+ prev.output += value;
+ prev.value += value;
+ brace.dots = true;
+ continue;
+ }
-/**
- * Body mixin
- *
- * Ref: https://fetch.spec.whatwg.org/#body
- *
- * @param Stream body Readable stream
- * @param Object opts Response options
- * @return Void
- */
-function Body(body) {
- var _this = this;
+ if ((state.braces + state.parens) === 0 && prev.type !== 'bos' && prev.type !== 'slash') {
+ push({ type: 'text', value, output: DOT_LITERAL });
+ continue;
+ }
- var _ref = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {},
- _ref$size = _ref.size;
+ push({ type: 'dot', value, output: DOT_LITERAL });
+ continue;
+ }
- let size = _ref$size === undefined ? 0 : _ref$size;
- var _ref$timeout = _ref.timeout;
- let timeout = _ref$timeout === undefined ? 0 : _ref$timeout;
+ /**
+ * Question marks
+ */
- if (body == null) {
- // body is undefined or null
- body = null;
- } else if (isURLSearchParams(body)) {
- // body is a URLSearchParams
- body = Buffer.from(body.toString());
- } else if (isBlob(body)) ; else if (Buffer.isBuffer(body)) ; else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') {
- // body is ArrayBuffer
- body = Buffer.from(body);
- } else if (ArrayBuffer.isView(body)) {
- // body is ArrayBufferView
- body = Buffer.from(body.buffer, body.byteOffset, body.byteLength);
- } else if (body instanceof Stream) ; else {
- // none of the above
- // coerce to string then buffer
- body = Buffer.from(String(body));
- }
- this[INTERNALS] = {
- body,
- disturbed: false,
- error: null
- };
- this.size = size;
- this.timeout = timeout;
+ if (value === '?') {
+ const isGroup = prev && prev.value === '(';
+ if (!isGroup && opts.noextglob !== true && peek() === '(' && peek(2) !== '?') {
+ extglobOpen('qmark', value);
+ continue;
+ }
+
+ if (prev && prev.type === 'paren') {
+ const next = peek();
+ let output = value;
+
+ if (next === '<' && !utils.supportsLookbehinds()) {
+ throw new Error('Node.js v10 or higher is required for regex lookbehinds');
+ }
- if (body instanceof Stream) {
- body.on('error', function (err) {
- const error = err.name === 'AbortError' ? err : new FetchError(`Invalid response body while trying to fetch ${_this.url}: ${err.message}`, 'system', err);
- _this[INTERNALS].error = error;
- });
- }
-}
+ if ((prev.value === '(' && !/[!=<:]/.test(next)) || (next === '<' && !/<([!=]|\w+>)/.test(remaining()))) {
+ output = `\\${value}`;
+ }
-Body.prototype = {
- get body() {
- return this[INTERNALS].body;
- },
+ push({ type: 'text', value, output });
+ continue;
+ }
- get bodyUsed() {
- return this[INTERNALS].disturbed;
- },
+ if (opts.dot !== true && (prev.type === 'slash' || prev.type === 'bos')) {
+ push({ type: 'qmark', value, output: QMARK_NO_DOT });
+ continue;
+ }
- /**
- * Decode response as ArrayBuffer
- *
- * @return Promise
- */
- arrayBuffer() {
- return consumeBody.call(this).then(function (buf) {
- return buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength);
- });
- },
+ push({ type: 'qmark', value, output: QMARK });
+ continue;
+ }
- /**
- * Return raw response as Blob
- *
- * @return Promise
- */
- blob() {
- let ct = this.headers && this.headers.get('content-type') || '';
- return consumeBody.call(this).then(function (buf) {
- return Object.assign(
- // Prevent copying
- new Blob([], {
- type: ct.toLowerCase()
- }), {
- [BUFFER]: buf
- });
- });
- },
+ /**
+ * Exclamation
+ */
- /**
- * Decode response as json
- *
- * @return Promise
- */
- json() {
- var _this2 = this;
+ if (value === '!') {
+ if (opts.noextglob !== true && peek() === '(') {
+ if (peek(2) !== '?' || !/[!=<:]/.test(peek(3))) {
+ extglobOpen('negate', value);
+ continue;
+ }
+ }
- return consumeBody.call(this).then(function (buffer) {
- try {
- return JSON.parse(buffer.toString());
- } catch (err) {
- return Body.Promise.reject(new FetchError(`invalid json response body at ${_this2.url} reason: ${err.message}`, 'invalid-json'));
- }
- });
- },
+ if (opts.nonegate !== true && state.index === 0) {
+ negate();
+ continue;
+ }
+ }
- /**
- * Decode response as text
- *
- * @return Promise
- */
- text() {
- return consumeBody.call(this).then(function (buffer) {
- return buffer.toString();
- });
- },
+ /**
+ * Plus
+ */
- /**
- * Decode response as buffer (non-spec api)
- *
- * @return Promise
- */
- buffer() {
- return consumeBody.call(this);
- },
+ if (value === '+') {
+ if (opts.noextglob !== true && peek() === '(' && peek(2) !== '?') {
+ extglobOpen('plus', value);
+ continue;
+ }
- /**
- * Decode response as text, while automatically detecting the encoding and
- * trying to decode to UTF-8 (non-spec api)
- *
- * @return Promise
- */
- textConverted() {
- var _this3 = this;
+ if ((prev && prev.value === '(') || opts.regex === false) {
+ push({ type: 'plus', value, output: PLUS_LITERAL });
+ continue;
+ }
- return consumeBody.call(this).then(function (buffer) {
- return convertBody(buffer, _this3.headers);
- });
- }
-};
+ if ((prev && (prev.type === 'bracket' || prev.type === 'paren' || prev.type === 'brace')) || state.parens > 0) {
+ push({ type: 'plus', value });
+ continue;
+ }
-// In browsers, all properties are enumerable.
-Object.defineProperties(Body.prototype, {
- body: { enumerable: true },
- bodyUsed: { enumerable: true },
- arrayBuffer: { enumerable: true },
- blob: { enumerable: true },
- json: { enumerable: true },
- text: { enumerable: true }
-});
+ push({ type: 'plus', value: PLUS_LITERAL });
+ continue;
+ }
-Body.mixIn = function (proto) {
- for (const name of Object.getOwnPropertyNames(Body.prototype)) {
- // istanbul ignore else: future proof
- if (!(name in proto)) {
- const desc = Object.getOwnPropertyDescriptor(Body.prototype, name);
- Object.defineProperty(proto, name, desc);
- }
- }
-};
+ /**
+ * Plain text
+ */
-/**
- * Consume and convert an entire Body to a Buffer.
- *
- * Ref: https://fetch.spec.whatwg.org/#concept-body-consume-body
- *
- * @return Promise
- */
-function consumeBody() {
- var _this4 = this;
+ if (value === '@') {
+ if (opts.noextglob !== true && peek() === '(' && peek(2) !== '?') {
+ push({ type: 'at', extglob: true, value, output: '' });
+ continue;
+ }
- if (this[INTERNALS].disturbed) {
- return Body.Promise.reject(new TypeError(`body used already for: ${this.url}`));
- }
+ push({ type: 'text', value });
+ continue;
+ }
- this[INTERNALS].disturbed = true;
+ /**
+ * Plain text
+ */
- if (this[INTERNALS].error) {
- return Body.Promise.reject(this[INTERNALS].error);
- }
+ if (value !== '*') {
+ if (value === '$' || value === '^') {
+ value = `\\${value}`;
+ }
- let body = this.body;
+ const match = REGEX_NON_SPECIAL_CHARS.exec(remaining());
+ if (match) {
+ value += match[0];
+ state.index += match[0].length;
+ }
- // body is null
- if (body === null) {
- return Body.Promise.resolve(Buffer.alloc(0));
- }
+ push({ type: 'text', value });
+ continue;
+ }
- // body is blob
- if (isBlob(body)) {
- body = body.stream();
- }
+ /**
+ * Stars
+ */
- // body is buffer
- if (Buffer.isBuffer(body)) {
- return Body.Promise.resolve(body);
- }
+ if (prev && (prev.type === 'globstar' || prev.star === true)) {
+ prev.type = 'star';
+ prev.star = true;
+ prev.value += value;
+ prev.output = star;
+ state.backtrack = true;
+ state.globstar = true;
+ consume(value);
+ continue;
+ }
- // istanbul ignore if: should never happen
- if (!(body instanceof Stream)) {
- return Body.Promise.resolve(Buffer.alloc(0));
- }
+ let rest = remaining();
+ if (opts.noextglob !== true && /^\([^?]/.test(rest)) {
+ extglobOpen('star', value);
+ continue;
+ }
- // body is stream
- // get ready to actually consume the body
- let accum = [];
- let accumBytes = 0;
- let abort = false;
+ if (prev.type === 'star') {
+ if (opts.noglobstar === true) {
+ consume(value);
+ continue;
+ }
- return new Body.Promise(function (resolve, reject) {
- let resTimeout;
+ const prior = prev.prev;
+ const before = prior.prev;
+ const isStart = prior.type === 'slash' || prior.type === 'bos';
+ const afterStar = before && (before.type === 'star' || before.type === 'globstar');
- // allow timeout on slow response body
- if (_this4.timeout) {
- resTimeout = setTimeout(function () {
- abort = true;
- reject(new FetchError(`Response timeout while trying to fetch ${_this4.url} (over ${_this4.timeout}ms)`, 'body-timeout'));
- }, _this4.timeout);
- }
+ if (opts.bash === true && (!isStart || (rest[0] && rest[0] !== '/'))) {
+ push({ type: 'star', value, output: '' });
+ continue;
+ }
- // handle stream errors
- body.on('error', function (err) {
- if (err.name === 'AbortError') {
- // if the request was aborted, reject with this Error
- abort = true;
- reject(err);
- } else {
- // other errors, such as incorrect content-encoding
- reject(new FetchError(`Invalid response body while trying to fetch ${_this4.url}: ${err.message}`, 'system', err));
- }
- });
+ const isBrace = state.braces > 0 && (prior.type === 'comma' || prior.type === 'brace');
+ const isExtglob = extglobs.length && (prior.type === 'pipe' || prior.type === 'paren');
+ if (!isStart && prior.type !== 'paren' && !isBrace && !isExtglob) {
+ push({ type: 'star', value, output: '' });
+ continue;
+ }
- body.on('data', function (chunk) {
- if (abort || chunk === null) {
- return;
- }
+ // strip consecutive `/**/`
+ while (rest.slice(0, 3) === '/**') {
+ const after = input[state.index + 4];
+ if (after && after !== '/') {
+ break;
+ }
+ rest = rest.slice(3);
+ consume('/**', 3);
+ }
- if (_this4.size && accumBytes + chunk.length > _this4.size) {
- abort = true;
- reject(new FetchError(`content size at ${_this4.url} over limit: ${_this4.size}`, 'max-size'));
- return;
- }
+ if (prior.type === 'bos' && eos()) {
+ prev.type = 'globstar';
+ prev.value += value;
+ prev.output = globstar(opts);
+ state.output = prev.output;
+ state.globstar = true;
+ consume(value);
+ continue;
+ }
- accumBytes += chunk.length;
- accum.push(chunk);
- });
+ if (prior.type === 'slash' && prior.prev.type !== 'bos' && !afterStar && eos()) {
+ state.output = state.output.slice(0, -(prior.output + prev.output).length);
+ prior.output = `(?:${prior.output}`;
- body.on('end', function () {
- if (abort) {
- return;
- }
+ prev.type = 'globstar';
+ prev.output = globstar(opts) + (opts.strictSlashes ? ')' : '|$)');
+ prev.value += value;
+ state.globstar = true;
+ state.output += prior.output + prev.output;
+ consume(value);
+ continue;
+ }
- clearTimeout(resTimeout);
+ if (prior.type === 'slash' && prior.prev.type !== 'bos' && rest[0] === '/') {
+ const end = rest[1] !== void 0 ? '|$' : '';
- try {
- resolve(Buffer.concat(accum, accumBytes));
- } catch (err) {
- // handle streams that have accumulated too much data (issue #414)
- reject(new FetchError(`Could not create Buffer from response body for ${_this4.url}: ${err.message}`, 'system', err));
- }
- });
- });
-}
+ state.output = state.output.slice(0, -(prior.output + prev.output).length);
+ prior.output = `(?:${prior.output}`;
-/**
- * Detect buffer encoding and convert to target encoding
- * ref: http://www.w3.org/TR/2011/WD-html5-20110113/parsing.html#determining-the-character-encoding
- *
- * @param Buffer buffer Incoming buffer
- * @param String encoding Target encoding
- * @return String
- */
-function convertBody(buffer, headers) {
- if (typeof convert !== 'function') {
- throw new Error('The package `encoding` must be installed to use the textConverted() function');
- }
+ prev.type = 'globstar';
+ prev.output = `${globstar(opts)}${SLASH_LITERAL}|${SLASH_LITERAL}${end})`;
+ prev.value += value;
- const ct = headers.get('content-type');
- let charset = 'utf-8';
- let res, str;
+ state.output += prior.output + prev.output;
+ state.globstar = true;
+
+ consume(value + advance());
+
+ push({ type: 'slash', value: '/', output: '' });
+ continue;
+ }
- // header
- if (ct) {
- res = /charset=([^;]*)/i.exec(ct);
- }
+ if (prior.type === 'bos' && rest[0] === '/') {
+ prev.type = 'globstar';
+ prev.value += value;
+ prev.output = `(?:^|${SLASH_LITERAL}|${globstar(opts)}${SLASH_LITERAL})`;
+ state.output = prev.output;
+ state.globstar = true;
+ consume(value + advance());
+ push({ type: 'slash', value: '/', output: '' });
+ continue;
+ }
- // no charset in content type, peek at response body for at most 1024 bytes
- str = buffer.slice(0, 1024).toString();
+ // remove single star from output
+ state.output = state.output.slice(0, -prev.output.length);
- // html5
- if (!res && str) {
- res = / 0) {
+ if (opts.strictBrackets === true) throw new SyntaxError(syntaxError('closing', ']'));
+ state.output = utils.escapeLast(state.output, '[');
+ decrement('brackets');
+ }
- // don't allow cloning a used body
- if (instance.bodyUsed) {
- throw new Error('cannot clone body after it is used');
- }
+ while (state.parens > 0) {
+ if (opts.strictBrackets === true) throw new SyntaxError(syntaxError('closing', ')'));
+ state.output = utils.escapeLast(state.output, '(');
+ decrement('parens');
+ }
- // check that body is a stream and not form-data object
- // note: we can't clone the form-data object without having it as a dependency
- if (body instanceof Stream && typeof body.getBoundary !== 'function') {
- // tee instance body
- p1 = new PassThrough();
- p2 = new PassThrough();
- body.pipe(p1);
- body.pipe(p2);
- // set instance body to teed body and return the other teed body
- instance[INTERNALS].body = p1;
- body = p2;
- }
+ while (state.braces > 0) {
+ if (opts.strictBrackets === true) throw new SyntaxError(syntaxError('closing', '}'));
+ state.output = utils.escapeLast(state.output, '{');
+ decrement('braces');
+ }
- return body;
-}
+ if (opts.strictSlashes !== true && (prev.type === 'star' || prev.type === 'bracket')) {
+ push({ type: 'maybe_slash', value: '', output: `${SLASH_LITERAL}?` });
+ }
-/**
- * Performs the operation "extract a `Content-Type` value from |object|" as
- * specified in the specification:
- * https://fetch.spec.whatwg.org/#concept-bodyinit-extract
- *
- * This function assumes that instance.body is present.
- *
- * @param Mixed instance Any options.body input
- */
-function extractContentType(body) {
- if (body === null) {
- // body is null
- return null;
- } else if (typeof body === 'string') {
- // body is string
- return 'text/plain;charset=UTF-8';
- } else if (isURLSearchParams(body)) {
- // body is a URLSearchParams
- return 'application/x-www-form-urlencoded;charset=UTF-8';
- } else if (isBlob(body)) {
- // body is blob
- return body.type || null;
- } else if (Buffer.isBuffer(body)) {
- // body is buffer
- return null;
- } else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') {
- // body is ArrayBuffer
- return null;
- } else if (ArrayBuffer.isView(body)) {
- // body is ArrayBufferView
- return null;
- } else if (typeof body.getBoundary === 'function') {
- // detect form data input from form-data module
- return `multipart/form-data;boundary=${body.getBoundary()}`;
- } else if (body instanceof Stream) {
- // body is stream
- // can't really do much about this
- return null;
- } else {
- // Body constructor defaults other things to string
- return 'text/plain;charset=UTF-8';
- }
-}
+ // rebuild the output if we had to backtrack at any point
+ if (state.backtrack === true) {
+ state.output = '';
-/**
- * The Fetch Standard treats this as if "total bytes" is a property on the body.
- * For us, we have to explicitly get it with a function.
- *
- * ref: https://fetch.spec.whatwg.org/#concept-body-total-bytes
- *
- * @param Body instance Instance of Body
- * @return Number? Number of bytes, or null if not possible
- */
-function getTotalBytes(instance) {
- const body = instance.body;
+ for (const token of state.tokens) {
+ state.output += token.output != null ? token.output : token.value;
+ if (token.suffix) {
+ state.output += token.suffix;
+ }
+ }
+ }
- if (body === null) {
- // body is null
- return 0;
- } else if (isBlob(body)) {
- return body.size;
- } else if (Buffer.isBuffer(body)) {
- // body is buffer
- return body.length;
- } else if (body && typeof body.getLengthSync === 'function') {
- // detect form data input from form-data module
- if (body._lengthRetrievers && body._lengthRetrievers.length == 0 || // 1.x
- body.hasKnownLength && body.hasKnownLength()) {
- // 2.x
- return body.getLengthSync();
- }
- return null;
- } else {
- // body is stream
- return null;
- }
-}
+ return state;
+};
/**
- * Write a Body to a Node.js WritableStream (e.g. http.Request) object.
- *
- * @param Body instance Instance of Body
- * @return Void
+ * Fast paths for creating regular expressions for common glob patterns.
+ * This can significantly speed up processing and has very little downside
+ * impact when none of the fast paths match.
*/
-function writeToStream(dest, instance) {
- const body = instance.body;
+parse.fastpaths = (input, options) => {
+ const opts = { ...options };
+ const max = typeof opts.maxLength === 'number' ? Math.min(MAX_LENGTH, opts.maxLength) : MAX_LENGTH;
+ const len = input.length;
+ if (len > max) {
+ throw new SyntaxError(`Input length: ${len}, exceeds maximum allowed length: ${max}`);
+ }
- if (body === null) {
- // body is null
- dest.end();
- } else if (isBlob(body)) {
- body.stream().pipe(dest);
- } else if (Buffer.isBuffer(body)) {
- // body is buffer
- dest.write(body);
- dest.end();
- } else {
- // body is stream
- body.pipe(dest);
- }
-}
+ input = REPLACEMENTS[input] || input;
+ const win32 = utils.isWindows(options);
+
+ // create constants based on platform, for windows or posix
+ const {
+ DOT_LITERAL,
+ SLASH_LITERAL,
+ ONE_CHAR,
+ DOTS_SLASH,
+ NO_DOT,
+ NO_DOTS,
+ NO_DOTS_SLASH,
+ STAR,
+ START_ANCHOR
+ } = constants.globChars(win32);
+
+ const nodot = opts.dot ? NO_DOTS : NO_DOT;
+ const slashDot = opts.dot ? NO_DOTS_SLASH : NO_DOT;
+ const capture = opts.capture ? '' : '?:';
+ const state = { negated: false, prefix: '' };
+ let star = opts.bash === true ? '.*?' : STAR;
+
+ if (opts.capture) {
+ star = `(${star})`;
+ }
-// expose Promise
-Body.Promise = global.Promise;
+ const globstar = (opts) => {
+ if (opts.noglobstar === true) return star;
+ return `(${capture}(?:(?!${START_ANCHOR}${opts.dot ? DOTS_SLASH : DOT_LITERAL}).)*?)`;
+ };
-/**
- * headers.js
- *
- * Headers class offers convenient helpers
- */
+ const create = str => {
+ switch (str) {
+ case '*':
+ return `${nodot}${ONE_CHAR}${star}`;
-const invalidTokenRegex = /[^\^_`a-zA-Z\-0-9!#$%&'*+.|~]/;
-const invalidHeaderCharRegex = /[^\t\x20-\x7e\x80-\xff]/;
+ case '.*':
+ return `${DOT_LITERAL}${ONE_CHAR}${star}`;
-function validateName(name) {
- name = `${name}`;
- if (invalidTokenRegex.test(name) || name === '') {
- throw new TypeError(`${name} is not a legal HTTP header name`);
- }
-}
+ case '*.*':
+ return `${nodot}${star}${DOT_LITERAL}${ONE_CHAR}${star}`;
-function validateValue(value) {
- value = `${value}`;
- if (invalidHeaderCharRegex.test(value)) {
- throw new TypeError(`${value} is not a legal HTTP header value`);
- }
-}
+ case '*/*':
+ return `${nodot}${star}${SLASH_LITERAL}${ONE_CHAR}${slashDot}${star}`;
-/**
- * Find the key in the map object given a header name.
- *
- * Returns undefined if not found.
- *
- * @param String name Header name
- * @return String|Undefined
- */
-function find(map, name) {
- name = name.toLowerCase();
- for (const key in map) {
- if (key.toLowerCase() === name) {
- return key;
- }
- }
- return undefined;
-}
+ case '**':
+ return nodot + globstar(opts);
-const MAP = Symbol('map');
-class Headers {
- /**
- * Headers class
- *
- * @param Object headers Response headers
- * @return Void
- */
- constructor() {
- let init = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : undefined;
+ case '**/*':
+ return `(?:${nodot}${globstar(opts)}${SLASH_LITERAL})?${slashDot}${ONE_CHAR}${star}`;
- this[MAP] = Object.create(null);
+ case '**/*.*':
+ return `(?:${nodot}${globstar(opts)}${SLASH_LITERAL})?${slashDot}${star}${DOT_LITERAL}${ONE_CHAR}${star}`;
- if (init instanceof Headers) {
- const rawHeaders = init.raw();
- const headerNames = Object.keys(rawHeaders);
+ case '**/.*':
+ return `(?:${nodot}${globstar(opts)}${SLASH_LITERAL})?${DOT_LITERAL}${ONE_CHAR}${star}`;
- for (const headerName of headerNames) {
- for (const value of rawHeaders[headerName]) {
- this.append(headerName, value);
- }
- }
+ default: {
+ const match = /^(.*?)\.(\w+)$/.exec(str);
+ if (!match) return;
- return;
- }
+ const source = create(match[1]);
+ if (!source) return;
- // We don't worry about converting prop to ByteString here as append()
- // will handle it.
- if (init == null) ; else if (typeof init === 'object') {
- const method = init[Symbol.iterator];
- if (method != null) {
- if (typeof method !== 'function') {
- throw new TypeError('Header pairs must be iterable');
- }
+ return source + DOT_LITERAL + match[2];
+ }
+ }
+ };
- // sequence>
- // Note: per spec we have to first exhaust the lists then process them
- const pairs = [];
- for (const pair of init) {
- if (typeof pair !== 'object' || typeof pair[Symbol.iterator] !== 'function') {
- throw new TypeError('Each header pair must be iterable');
- }
- pairs.push(Array.from(pair));
- }
+ const output = utils.removePrefix(input, state);
+ let source = create(output);
- for (const pair of pairs) {
- if (pair.length !== 2) {
- throw new TypeError('Each header pair must be a name/value tuple');
- }
- this.append(pair[0], pair[1]);
- }
- } else {
- // record
- for (const key of Object.keys(init)) {
- const value = init[key];
- this.append(key, value);
- }
- }
- } else {
- throw new TypeError('Provided initializer must be an object');
- }
- }
+ if (source && opts.strictSlashes !== true) {
+ source += `${SLASH_LITERAL}?`;
+ }
- /**
- * Return combined header value given name
- *
- * @param String name Header name
- * @return Mixed
- */
- get(name) {
- name = `${name}`;
- validateName(name);
- const key = find(this[MAP], name);
- if (key === undefined) {
- return null;
- }
+ return source;
+};
- return this[MAP][key].join(', ');
- }
+module.exports = parse;
- /**
- * Iterate over all headers
- *
- * @param Function callback Executed for each item with parameters (value, name, thisArg)
- * @param Boolean thisArg `this` context for callback function
- * @return Void
- */
- forEach(callback) {
- let thisArg = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : undefined;
- let pairs = getHeaders(this);
- let i = 0;
- while (i < pairs.length) {
- var _pairs$i = pairs[i];
- const name = _pairs$i[0],
- value = _pairs$i[1];
+/***/ }),
- callback.call(thisArg, value, name, this);
- pairs = getHeaders(this);
- i++;
- }
- }
+/***/ 3322:
+/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
- /**
- * Overwrite header values given name
- *
- * @param String name Header name
- * @param String value Header value
- * @return Void
- */
- set(name, value) {
- name = `${name}`;
- value = `${value}`;
- validateName(name);
- validateValue(value);
- const key = find(this[MAP], name);
- this[MAP][key !== undefined ? key : name] = [value];
- }
+"use strict";
- /**
- * Append a value onto existing header
- *
- * @param String name Header name
- * @param String value Header value
- * @return Void
- */
- append(name, value) {
- name = `${name}`;
- value = `${value}`;
- validateName(name);
- validateValue(value);
- const key = find(this[MAP], name);
- if (key !== undefined) {
- this[MAP][key].push(value);
- } else {
- this[MAP][name] = [value];
- }
- }
- /**
- * Check for header name existence
- *
- * @param String name Header name
- * @return Boolean
- */
- has(name) {
- name = `${name}`;
- validateName(name);
- return find(this[MAP], name) !== undefined;
- }
+const path = __webpack_require__(5622);
+const scan = __webpack_require__(2429);
+const parse = __webpack_require__(2139);
+const utils = __webpack_require__(479);
+const constants = __webpack_require__(6099);
+const isObject = val => val && typeof val === 'object' && !Array.isArray(val);
- /**
- * Delete all header values given name
- *
- * @param String name Header name
- * @return Void
- */
- delete(name) {
- name = `${name}`;
- validateName(name);
- const key = find(this[MAP], name);
- if (key !== undefined) {
- delete this[MAP][key];
- }
- }
+/**
+ * Creates a matcher function from one or more glob patterns. The
+ * returned function takes a string to match as its first argument,
+ * and returns true if the string is a match. The returned matcher
+ * function also takes a boolean as the second argument that, when true,
+ * returns an object with additional information.
+ *
+ * ```js
+ * const picomatch = require('picomatch');
+ * // picomatch(glob[, options]);
+ *
+ * const isMatch = picomatch('*.!(*a)');
+ * console.log(isMatch('a.a')); //=> false
+ * console.log(isMatch('a.b')); //=> true
+ * ```
+ * @name picomatch
+ * @param {String|Array} `globs` One or more glob patterns.
+ * @param {Object=} `options`
+ * @return {Function=} Returns a matcher function.
+ * @api public
+ */
- /**
- * Return raw headers (non-spec api)
- *
- * @return Object
- */
- raw() {
- return this[MAP];
- }
+const picomatch = (glob, options, returnState = false) => {
+ if (Array.isArray(glob)) {
+ const fns = glob.map(input => picomatch(input, options, returnState));
+ const arrayMatcher = str => {
+ for (const isMatch of fns) {
+ const state = isMatch(str);
+ if (state) return state;
+ }
+ return false;
+ };
+ return arrayMatcher;
+ }
- /**
- * Get an iterator on keys.
- *
- * @return Iterator
- */
- keys() {
- return createHeadersIterator(this, 'key');
- }
+ const isState = isObject(glob) && glob.tokens && glob.input;
- /**
- * Get an iterator on values.
- *
- * @return Iterator
- */
- values() {
- return createHeadersIterator(this, 'value');
- }
+ if (glob === '' || (typeof glob !== 'string' && !isState)) {
+ throw new TypeError('Expected pattern to be a non-empty string');
+ }
- /**
- * Get an iterator on entries.
- *
- * This is the default iterator of the Headers object.
- *
- * @return Iterator
- */
- [Symbol.iterator]() {
- return createHeadersIterator(this, 'key+value');
- }
-}
-Headers.prototype.entries = Headers.prototype[Symbol.iterator];
+ const opts = options || {};
+ const posix = utils.isWindows(options);
+ const regex = isState
+ ? picomatch.compileRe(glob, options)
+ : picomatch.makeRe(glob, options, false, true);
-Object.defineProperty(Headers.prototype, Symbol.toStringTag, {
- value: 'Headers',
- writable: false,
- enumerable: false,
- configurable: true
-});
+ const state = regex.state;
+ delete regex.state;
-Object.defineProperties(Headers.prototype, {
- get: { enumerable: true },
- forEach: { enumerable: true },
- set: { enumerable: true },
- append: { enumerable: true },
- has: { enumerable: true },
- delete: { enumerable: true },
- keys: { enumerable: true },
- values: { enumerable: true },
- entries: { enumerable: true }
-});
+ let isIgnored = () => false;
+ if (opts.ignore) {
+ const ignoreOpts = { ...options, ignore: null, onMatch: null, onResult: null };
+ isIgnored = picomatch(opts.ignore, ignoreOpts, returnState);
+ }
-function getHeaders(headers) {
- let kind = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'key+value';
+ const matcher = (input, returnObject = false) => {
+ const { isMatch, match, output } = picomatch.test(input, regex, options, { glob, posix });
+ const result = { glob, state, regex, posix, input, output, match, isMatch };
- const keys = Object.keys(headers[MAP]).sort();
- return keys.map(kind === 'key' ? function (k) {
- return k.toLowerCase();
- } : kind === 'value' ? function (k) {
- return headers[MAP][k].join(', ');
- } : function (k) {
- return [k.toLowerCase(), headers[MAP][k].join(', ')];
- });
-}
+ if (typeof opts.onResult === 'function') {
+ opts.onResult(result);
+ }
-const INTERNAL = Symbol('internal');
+ if (isMatch === false) {
+ result.isMatch = false;
+ return returnObject ? result : false;
+ }
-function createHeadersIterator(target, kind) {
- const iterator = Object.create(HeadersIteratorPrototype);
- iterator[INTERNAL] = {
- target,
- kind,
- index: 0
- };
- return iterator;
-}
+ if (isIgnored(input)) {
+ if (typeof opts.onIgnore === 'function') {
+ opts.onIgnore(result);
+ }
+ result.isMatch = false;
+ return returnObject ? result : false;
+ }
-const HeadersIteratorPrototype = Object.setPrototypeOf({
- next() {
- // istanbul ignore if
- if (!this || Object.getPrototypeOf(this) !== HeadersIteratorPrototype) {
- throw new TypeError('Value of `this` is not a HeadersIterator');
- }
+ if (typeof opts.onMatch === 'function') {
+ opts.onMatch(result);
+ }
+ return returnObject ? result : true;
+ };
- var _INTERNAL = this[INTERNAL];
- const target = _INTERNAL.target,
- kind = _INTERNAL.kind,
- index = _INTERNAL.index;
+ if (returnState) {
+ matcher.state = state;
+ }
+
+ return matcher;
+};
+
+/**
+ * Test `input` with the given `regex`. This is used by the main
+ * `picomatch()` function to test the input string.
+ *
+ * ```js
+ * const picomatch = require('picomatch');
+ * // picomatch.test(input, regex[, options]);
+ *
+ * console.log(picomatch.test('foo/bar', /^(?:([^/]*?)\/([^/]*?))$/));
+ * // { isMatch: true, match: [ 'foo/', 'foo', 'bar' ], output: 'foo/bar' }
+ * ```
+ * @param {String} `input` String to test.
+ * @param {RegExp} `regex`
+ * @return {Object} Returns an object with matching info.
+ * @api public
+ */
- const values = getHeaders(target, kind);
- const len = values.length;
- if (index >= len) {
- return {
- value: undefined,
- done: true
- };
- }
+picomatch.test = (input, regex, options, { glob, posix } = {}) => {
+ if (typeof input !== 'string') {
+ throw new TypeError('Expected input to be a string');
+ }
- this[INTERNAL].index = index + 1;
+ if (input === '') {
+ return { isMatch: false, output: '' };
+ }
- return {
- value: values[index],
- done: false
- };
- }
-}, Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]())));
+ const opts = options || {};
+ const format = opts.format || (posix ? utils.toPosixSlashes : null);
+ let match = input === glob;
+ let output = (match && format) ? format(input) : input;
-Object.defineProperty(HeadersIteratorPrototype, Symbol.toStringTag, {
- value: 'HeadersIterator',
- writable: false,
- enumerable: false,
- configurable: true
-});
+ if (match === false) {
+ output = format ? format(input) : input;
+ match = output === glob;
+ }
+
+ if (match === false || opts.capture === true) {
+ if (opts.matchBase === true || opts.basename === true) {
+ match = picomatch.matchBase(input, regex, options, posix);
+ } else {
+ match = regex.exec(output);
+ }
+ }
+
+ return { isMatch: Boolean(match), match, output };
+};
/**
- * Export the Headers object in a form that Node.js can consume.
+ * Match the basename of a filepath.
*
- * @param Headers headers
- * @return Object
+ * ```js
+ * const picomatch = require('picomatch');
+ * // picomatch.matchBase(input, glob[, options]);
+ * console.log(picomatch.matchBase('foo/bar.js', '*.js'); // true
+ * ```
+ * @param {String} `input` String to test.
+ * @param {RegExp|String} `glob` Glob pattern or regex created by [.makeRe](#makeRe).
+ * @return {Boolean}
+ * @api public
*/
-function exportNodeCompatibleHeaders(headers) {
- const obj = Object.assign({ __proto__: null }, headers[MAP]);
-
- // http.request() only supports string as Host header. This hack makes
- // specifying custom Host header possible.
- const hostHeaderKey = find(headers[MAP], 'Host');
- if (hostHeaderKey !== undefined) {
- obj[hostHeaderKey] = obj[hostHeaderKey][0];
- }
- return obj;
-}
+picomatch.matchBase = (input, glob, options, posix = utils.isWindows(options)) => {
+ const regex = glob instanceof RegExp ? glob : picomatch.makeRe(glob, options);
+ return regex.test(path.basename(input));
+};
/**
- * Create a Headers object from an object of headers, ignoring those that do
- * not conform to HTTP grammar productions.
+ * Returns true if **any** of the given glob `patterns` match the specified `string`.
*
- * @param Object obj Object of headers
- * @return Headers
+ * ```js
+ * const picomatch = require('picomatch');
+ * // picomatch.isMatch(string, patterns[, options]);
+ *
+ * console.log(picomatch.isMatch('a.a', ['b.*', '*.a'])); //=> true
+ * console.log(picomatch.isMatch('a.a', 'b.*')); //=> false
+ * ```
+ * @param {String|Array} str The string to test.
+ * @param {String|Array} patterns One or more glob patterns to use for matching.
+ * @param {Object} [options] See available [options](#options).
+ * @return {Boolean} Returns true if any patterns match `str`
+ * @api public
*/
-function createHeadersLenient(obj) {
- const headers = new Headers();
- for (const name of Object.keys(obj)) {
- if (invalidTokenRegex.test(name)) {
- continue;
- }
- if (Array.isArray(obj[name])) {
- for (const val of obj[name]) {
- if (invalidHeaderCharRegex.test(val)) {
- continue;
- }
- if (headers[MAP][name] === undefined) {
- headers[MAP][name] = [val];
- } else {
- headers[MAP][name].push(val);
- }
- }
- } else if (!invalidHeaderCharRegex.test(obj[name])) {
- headers[MAP][name] = [obj[name]];
- }
- }
- return headers;
-}
-const INTERNALS$1 = Symbol('Response internals');
+picomatch.isMatch = (str, patterns, options) => picomatch(patterns, options)(str);
-// fix an issue where "STATUS_CODES" aren't a named export for node <10
-const STATUS_CODES = http.STATUS_CODES;
+/**
+ * Parse a glob pattern to create the source string for a regular
+ * expression.
+ *
+ * ```js
+ * const picomatch = require('picomatch');
+ * const result = picomatch.parse(pattern[, options]);
+ * ```
+ * @param {String} `pattern`
+ * @param {Object} `options`
+ * @return {Object} Returns an object with useful properties and output to be used as a regex source string.
+ * @api public
+ */
+
+picomatch.parse = (pattern, options) => {
+ if (Array.isArray(pattern)) return pattern.map(p => picomatch.parse(p, options));
+ return parse(pattern, { ...options, fastpaths: false });
+};
/**
- * Response class
+ * Scan a glob pattern to separate the pattern into segments.
*
- * @param Stream body Readable stream
- * @param Object opts Response options
- * @return Void
+ * ```js
+ * const picomatch = require('picomatch');
+ * // picomatch.scan(input[, options]);
+ *
+ * const result = picomatch.scan('!./foo/*.js');
+ * console.log(result);
+ * { prefix: '!./',
+ * input: '!./foo/*.js',
+ * start: 3,
+ * base: 'foo',
+ * glob: '*.js',
+ * isBrace: false,
+ * isBracket: false,
+ * isGlob: true,
+ * isExtglob: false,
+ * isGlobstar: false,
+ * negated: true }
+ * ```
+ * @param {String} `input` Glob pattern to scan.
+ * @param {Object} `options`
+ * @return {Object} Returns an object with
+ * @api public
*/
-class Response {
- constructor() {
- let body = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : null;
- let opts = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
- Body.call(this, body, opts);
+picomatch.scan = (input, options) => scan(input, options);
- const status = opts.status || 200;
- const headers = new Headers(opts.headers);
+/**
+ * Create a regular expression from a parsed glob pattern.
+ *
+ * ```js
+ * const picomatch = require('picomatch');
+ * const state = picomatch.parse('*.js');
+ * // picomatch.compileRe(state[, options]);
+ *
+ * console.log(picomatch.compileRe(state));
+ * //=> /^(?:(?!\.)(?=.)[^/]*?\.js)$/
+ * ```
+ * @param {String} `state` The object returned from the `.parse` method.
+ * @param {Object} `options`
+ * @return {RegExp} Returns a regex created from the given pattern.
+ * @api public
+ */
- if (body != null && !headers.has('Content-Type')) {
- const contentType = extractContentType(body);
- if (contentType) {
- headers.append('Content-Type', contentType);
- }
- }
+picomatch.compileRe = (parsed, options, returnOutput = false, returnState = false) => {
+ if (returnOutput === true) {
+ return parsed.output;
+ }
- this[INTERNALS$1] = {
- url: opts.url,
- status,
- statusText: opts.statusText || STATUS_CODES[status],
- headers,
- counter: opts.counter
- };
- }
+ const opts = options || {};
+ const prepend = opts.contains ? '' : '^';
+ const append = opts.contains ? '' : '$';
- get url() {
- return this[INTERNALS$1].url || '';
- }
+ let source = `${prepend}(?:${parsed.output})${append}`;
+ if (parsed && parsed.negated === true) {
+ source = `^(?!${source}).*$`;
+ }
- get status() {
- return this[INTERNALS$1].status;
- }
+ const regex = picomatch.toRegex(source, options);
+ if (returnState === true) {
+ regex.state = parsed;
+ }
- /**
- * Convenience property representing if the request ended normally
- */
- get ok() {
- return this[INTERNALS$1].status >= 200 && this[INTERNALS$1].status < 300;
- }
+ return regex;
+};
- get redirected() {
- return this[INTERNALS$1].counter > 0;
- }
+picomatch.makeRe = (input, options, returnOutput = false, returnState = false) => {
+ if (!input || typeof input !== 'string') {
+ throw new TypeError('Expected a non-empty string');
+ }
- get statusText() {
- return this[INTERNALS$1].statusText;
- }
+ const opts = options || {};
+ let parsed = { negated: false, fastpaths: true };
+ let prefix = '';
+ let output;
- get headers() {
- return this[INTERNALS$1].headers;
- }
+ if (input.startsWith('./')) {
+ input = input.slice(2);
+ prefix = parsed.prefix = './';
+ }
- /**
- * Clone this response
- *
- * @return Response
- */
- clone() {
- return new Response(clone(this), {
- url: this.url,
- status: this.status,
- statusText: this.statusText,
- headers: this.headers,
- ok: this.ok,
- redirected: this.redirected
- });
- }
-}
+ if (opts.fastpaths !== false && (input[0] === '.' || input[0] === '*')) {
+ output = parse.fastpaths(input, options);
+ }
-Body.mixIn(Response.prototype);
+ if (output === undefined) {
+ parsed = parse(input, options);
+ parsed.prefix = prefix + (parsed.prefix || '');
+ } else {
+ parsed.output = output;
+ }
-Object.defineProperties(Response.prototype, {
- url: { enumerable: true },
- status: { enumerable: true },
- ok: { enumerable: true },
- redirected: { enumerable: true },
- statusText: { enumerable: true },
- headers: { enumerable: true },
- clone: { enumerable: true }
-});
+ return picomatch.compileRe(parsed, options, returnOutput, returnState);
+};
-Object.defineProperty(Response.prototype, Symbol.toStringTag, {
- value: 'Response',
- writable: false,
- enumerable: false,
- configurable: true
-});
+/**
+ * Create a regular expression from the given regex source string.
+ *
+ * ```js
+ * const picomatch = require('picomatch');
+ * // picomatch.toRegex(source[, options]);
+ *
+ * const { output } = picomatch.parse('*.js');
+ * console.log(picomatch.toRegex(output));
+ * //=> /^(?:(?!\.)(?=.)[^/]*?\.js)$/
+ * ```
+ * @param {String} `source` Regular expression source string.
+ * @param {Object} `options`
+ * @return {RegExp}
+ * @api public
+ */
-const INTERNALS$2 = Symbol('Request internals');
+picomatch.toRegex = (source, options) => {
+ try {
+ const opts = options || {};
+ return new RegExp(source, opts.flags || (opts.nocase ? 'i' : ''));
+ } catch (err) {
+ if (options && options.debug === true) throw err;
+ return /$^/;
+ }
+};
-// fix an issue where "format", "parse" aren't a named export for node <10
-const parse_url = Url.parse;
-const format_url = Url.format;
+/**
+ * Picomatch constants.
+ * @return {Object}
+ */
-const streamDestructionSupported = 'destroy' in Stream.Readable.prototype;
+picomatch.constants = constants;
/**
- * Check if a value is an instance of Request.
- *
- * @param Mixed input
- * @return Boolean
+ * Expose "picomatch"
*/
-function isRequest(input) {
- return typeof input === 'object' && typeof input[INTERNALS$2] === 'object';
-}
-function isAbortSignal(signal) {
- const proto = signal && typeof signal === 'object' && Object.getPrototypeOf(signal);
- return !!(proto && proto.constructor.name === 'AbortSignal');
-}
+module.exports = picomatch;
+
+
+/***/ }),
+
+/***/ 2429:
+/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
+
+"use strict";
+
+
+const utils = __webpack_require__(479);
+const {
+ CHAR_ASTERISK, /* * */
+ CHAR_AT, /* @ */
+ CHAR_BACKWARD_SLASH, /* \ */
+ CHAR_COMMA, /* , */
+ CHAR_DOT, /* . */
+ CHAR_EXCLAMATION_MARK, /* ! */
+ CHAR_FORWARD_SLASH, /* / */
+ CHAR_LEFT_CURLY_BRACE, /* { */
+ CHAR_LEFT_PARENTHESES, /* ( */
+ CHAR_LEFT_SQUARE_BRACKET, /* [ */
+ CHAR_PLUS, /* + */
+ CHAR_QUESTION_MARK, /* ? */
+ CHAR_RIGHT_CURLY_BRACE, /* } */
+ CHAR_RIGHT_PARENTHESES, /* ) */
+ CHAR_RIGHT_SQUARE_BRACKET /* ] */
+} = __webpack_require__(6099);
+
+const isPathSeparator = code => {
+ return code === CHAR_FORWARD_SLASH || code === CHAR_BACKWARD_SLASH;
+};
+
+const depth = token => {
+ if (token.isPrefix !== true) {
+ token.depth = token.isGlobstar ? Infinity : 1;
+ }
+};
/**
- * Request class
+ * Quickly scans a glob pattern and returns an object with a handful of
+ * useful properties, like `isGlob`, `path` (the leading non-glob, if it exists),
+ * `glob` (the actual pattern), and `negated` (true if the path starts with `!`).
*
- * @param Mixed input Url or Request instance
- * @param Object init Custom options
- * @return Void
+ * ```js
+ * const pm = require('picomatch');
+ * console.log(pm.scan('foo/bar/*.js'));
+ * { isGlob: true, input: 'foo/bar/*.js', base: 'foo/bar', glob: '*.js' }
+ * ```
+ * @param {String} `str`
+ * @param {Object} `options`
+ * @return {Object} Returns an object with tokens and regex source string.
+ * @api public
*/
-class Request {
- constructor(input) {
- let init = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
- let parsedURL;
+const scan = (input, options) => {
+ const opts = options || {};
+
+ const length = input.length - 1;
+ const scanToEnd = opts.parts === true || opts.scanToEnd === true;
+ const slashes = [];
+ const tokens = [];
+ const parts = [];
+
+ let str = input;
+ let index = -1;
+ let start = 0;
+ let lastIndex = 0;
+ let isBrace = false;
+ let isBracket = false;
+ let isGlob = false;
+ let isExtglob = false;
+ let isGlobstar = false;
+ let braceEscaped = false;
+ let backslashes = false;
+ let negated = false;
+ let finished = false;
+ let braces = 0;
+ let prev;
+ let code;
+ let token = { value: '', depth: 0, isGlob: false };
+
+ const eos = () => index >= length;
+ const peek = () => str.charCodeAt(index + 1);
+ const advance = () => {
+ prev = code;
+ return str.charCodeAt(++index);
+ };
- // normalize input
- if (!isRequest(input)) {
- if (input && input.href) {
- // in order to support Node.js' Url objects; though WHATWG's URL objects
- // will fall into this branch also (since their `toString()` will return
- // `href` property anyway)
- parsedURL = parse_url(input.href);
- } else {
- // coerce input to a string before attempting to parse
- parsedURL = parse_url(`${input}`);
- }
- input = {};
- } else {
- parsedURL = parse_url(input.url);
- }
+ while (index < length) {
+ code = advance();
+ let next;
- let method = init.method || input.method || 'GET';
- method = method.toUpperCase();
+ if (code === CHAR_BACKWARD_SLASH) {
+ backslashes = token.backslashes = true;
+ code = advance();
- if ((init.body != null || isRequest(input) && input.body !== null) && (method === 'GET' || method === 'HEAD')) {
- throw new TypeError('Request with GET/HEAD method cannot have body');
- }
+ if (code === CHAR_LEFT_CURLY_BRACE) {
+ braceEscaped = true;
+ }
+ continue;
+ }
- let inputBody = init.body != null ? init.body : isRequest(input) && input.body !== null ? clone(input) : null;
+ if (braceEscaped === true || code === CHAR_LEFT_CURLY_BRACE) {
+ braces++;
- Body.call(this, inputBody, {
- timeout: init.timeout || input.timeout || 0,
- size: init.size || input.size || 0
- });
+ while (eos() !== true && (code = advance())) {
+ if (code === CHAR_BACKWARD_SLASH) {
+ backslashes = token.backslashes = true;
+ advance();
+ continue;
+ }
- const headers = new Headers(init.headers || input.headers || {});
+ if (code === CHAR_LEFT_CURLY_BRACE) {
+ braces++;
+ continue;
+ }
+
+ if (braceEscaped !== true && code === CHAR_DOT && (code = advance()) === CHAR_DOT) {
+ isBrace = token.isBrace = true;
+ isGlob = token.isGlob = true;
+ finished = true;
+
+ if (scanToEnd === true) {
+ continue;
+ }
+
+ break;
+ }
- if (inputBody != null && !headers.has('Content-Type')) {
- const contentType = extractContentType(inputBody);
- if (contentType) {
- headers.append('Content-Type', contentType);
- }
- }
+ if (braceEscaped !== true && code === CHAR_COMMA) {
+ isBrace = token.isBrace = true;
+ isGlob = token.isGlob = true;
+ finished = true;
- let signal = isRequest(input) ? input.signal : null;
- if ('signal' in init) signal = init.signal;
+ if (scanToEnd === true) {
+ continue;
+ }
- if (signal != null && !isAbortSignal(signal)) {
- throw new TypeError('Expected signal to be an instanceof AbortSignal');
- }
+ break;
+ }
- this[INTERNALS$2] = {
- method,
- redirect: init.redirect || input.redirect || 'follow',
- headers,
- parsedURL,
- signal
- };
+ if (code === CHAR_RIGHT_CURLY_BRACE) {
+ braces--;
- // node-fetch-only options
- this.follow = init.follow !== undefined ? init.follow : input.follow !== undefined ? input.follow : 20;
- this.compress = init.compress !== undefined ? init.compress : input.compress !== undefined ? input.compress : true;
- this.counter = init.counter || input.counter || 0;
- this.agent = init.agent || input.agent;
- }
+ if (braces === 0) {
+ braceEscaped = false;
+ isBrace = token.isBrace = true;
+ finished = true;
+ break;
+ }
+ }
+ }
- get method() {
- return this[INTERNALS$2].method;
- }
+ if (scanToEnd === true) {
+ continue;
+ }
- get url() {
- return format_url(this[INTERNALS$2].parsedURL);
- }
+ break;
+ }
- get headers() {
- return this[INTERNALS$2].headers;
- }
+ if (code === CHAR_FORWARD_SLASH) {
+ slashes.push(index);
+ tokens.push(token);
+ token = { value: '', depth: 0, isGlob: false };
- get redirect() {
- return this[INTERNALS$2].redirect;
- }
+ if (finished === true) continue;
+ if (prev === CHAR_DOT && index === (start + 1)) {
+ start += 2;
+ continue;
+ }
- get signal() {
- return this[INTERNALS$2].signal;
- }
+ lastIndex = index + 1;
+ continue;
+ }
- /**
- * Clone this request
- *
- * @return Request
- */
- clone() {
- return new Request(this);
- }
-}
+ if (opts.noext !== true) {
+ const isExtglobChar = code === CHAR_PLUS
+ || code === CHAR_AT
+ || code === CHAR_ASTERISK
+ || code === CHAR_QUESTION_MARK
+ || code === CHAR_EXCLAMATION_MARK;
-Body.mixIn(Request.prototype);
+ if (isExtglobChar === true && peek() === CHAR_LEFT_PARENTHESES) {
+ isGlob = token.isGlob = true;
+ isExtglob = token.isExtglob = true;
+ finished = true;
-Object.defineProperty(Request.prototype, Symbol.toStringTag, {
- value: 'Request',
- writable: false,
- enumerable: false,
- configurable: true
-});
+ if (scanToEnd === true) {
+ while (eos() !== true && (code = advance())) {
+ if (code === CHAR_BACKWARD_SLASH) {
+ backslashes = token.backslashes = true;
+ code = advance();
+ continue;
+ }
-Object.defineProperties(Request.prototype, {
- method: { enumerable: true },
- url: { enumerable: true },
- headers: { enumerable: true },
- redirect: { enumerable: true },
- clone: { enumerable: true },
- signal: { enumerable: true }
-});
+ if (code === CHAR_RIGHT_PARENTHESES) {
+ isGlob = token.isGlob = true;
+ finished = true;
+ break;
+ }
+ }
+ continue;
+ }
+ break;
+ }
+ }
-/**
- * Convert a Request to Node.js http request options.
- *
- * @param Request A Request instance
- * @return Object The options object to be passed to http.request
- */
-function getNodeRequestOptions(request) {
- const parsedURL = request[INTERNALS$2].parsedURL;
- const headers = new Headers(request[INTERNALS$2].headers);
+ if (code === CHAR_ASTERISK) {
+ if (prev === CHAR_ASTERISK) isGlobstar = token.isGlobstar = true;
+ isGlob = token.isGlob = true;
+ finished = true;
- // fetch step 1.3
- if (!headers.has('Accept')) {
- headers.set('Accept', '*/*');
- }
+ if (scanToEnd === true) {
+ continue;
+ }
+ break;
+ }
- // Basic fetch
- if (!parsedURL.protocol || !parsedURL.hostname) {
- throw new TypeError('Only absolute URLs are supported');
- }
+ if (code === CHAR_QUESTION_MARK) {
+ isGlob = token.isGlob = true;
+ finished = true;
- if (!/^https?:$/.test(parsedURL.protocol)) {
- throw new TypeError('Only HTTP(S) protocols are supported');
- }
+ if (scanToEnd === true) {
+ continue;
+ }
+ break;
+ }
- if (request.signal && request.body instanceof Stream.Readable && !streamDestructionSupported) {
- throw new Error('Cancellation of streamed requests with AbortSignal is not supported in node < 8');
- }
+ if (code === CHAR_LEFT_SQUARE_BRACKET) {
+ while (eos() !== true && (next = advance())) {
+ if (next === CHAR_BACKWARD_SLASH) {
+ backslashes = token.backslashes = true;
+ advance();
+ continue;
+ }
- // HTTP-network-or-cache fetch steps 2.4-2.7
- let contentLengthValue = null;
- if (request.body == null && /^(POST|PUT)$/i.test(request.method)) {
- contentLengthValue = '0';
- }
- if (request.body != null) {
- const totalBytes = getTotalBytes(request);
- if (typeof totalBytes === 'number') {
- contentLengthValue = String(totalBytes);
- }
- }
- if (contentLengthValue) {
- headers.set('Content-Length', contentLengthValue);
- }
+ if (next === CHAR_RIGHT_SQUARE_BRACKET) {
+ isBracket = token.isBracket = true;
+ isGlob = token.isGlob = true;
+ finished = true;
- // HTTP-network-or-cache fetch step 2.11
- if (!headers.has('User-Agent')) {
- headers.set('User-Agent', 'node-fetch/1.0 (+https://github.com/bitinn/node-fetch)');
- }
+ if (scanToEnd === true) {
+ continue;
+ }
+ break;
+ }
+ }
+ }
- // HTTP-network-or-cache fetch step 2.15
- if (request.compress && !headers.has('Accept-Encoding')) {
- headers.set('Accept-Encoding', 'gzip,deflate');
- }
+ if (opts.nonegate !== true && code === CHAR_EXCLAMATION_MARK && index === start) {
+ negated = token.negated = true;
+ start++;
+ continue;
+ }
- let agent = request.agent;
- if (typeof agent === 'function') {
- agent = agent(parsedURL);
- }
+ if (opts.noparen !== true && code === CHAR_LEFT_PARENTHESES) {
+ isGlob = token.isGlob = true;
- if (!headers.has('Connection') && !agent) {
- headers.set('Connection', 'close');
- }
+ if (scanToEnd === true) {
+ while (eos() !== true && (code = advance())) {
+ if (code === CHAR_LEFT_PARENTHESES) {
+ backslashes = token.backslashes = true;
+ code = advance();
+ continue;
+ }
- // HTTP-network fetch step 4.2
- // chunked encoding is handled by Node.js
+ if (code === CHAR_RIGHT_PARENTHESES) {
+ finished = true;
+ break;
+ }
+ }
+ continue;
+ }
+ break;
+ }
- return Object.assign({}, parsedURL, {
- method: request.method,
- headers: exportNodeCompatibleHeaders(headers),
- agent
- });
-}
+ if (isGlob === true) {
+ finished = true;
-/**
- * abort-error.js
- *
- * AbortError interface for cancelled requests
- */
+ if (scanToEnd === true) {
+ continue;
+ }
-/**
- * Create AbortError instance
- *
- * @param String message Error message for human
- * @return AbortError
- */
-function AbortError(message) {
- Error.call(this, message);
+ break;
+ }
+ }
- this.type = 'aborted';
- this.message = message;
+ if (opts.noext === true) {
+ isExtglob = false;
+ isGlob = false;
+ }
- // hide custom error implementation details from end-users
- Error.captureStackTrace(this, this.constructor);
-}
+ let base = str;
+ let prefix = '';
+ let glob = '';
-AbortError.prototype = Object.create(Error.prototype);
-AbortError.prototype.constructor = AbortError;
-AbortError.prototype.name = 'AbortError';
+ if (start > 0) {
+ prefix = str.slice(0, start);
+ str = str.slice(start);
+ lastIndex -= start;
+ }
-// fix an issue where "PassThrough", "resolve" aren't a named export for node <10
-const PassThrough$1 = Stream.PassThrough;
-const resolve_url = Url.resolve;
+ if (base && isGlob === true && lastIndex > 0) {
+ base = str.slice(0, lastIndex);
+ glob = str.slice(lastIndex);
+ } else if (isGlob === true) {
+ base = '';
+ glob = str;
+ } else {
+ base = str;
+ }
-/**
- * Fetch function
- *
- * @param Mixed url Absolute url or Request instance
- * @param Object opts Fetch options
- * @return Promise
- */
-function fetch(url, opts) {
+ if (base && base !== '' && base !== '/' && base !== str) {
+ if (isPathSeparator(base.charCodeAt(base.length - 1))) {
+ base = base.slice(0, -1);
+ }
+ }
- // allow custom promise
- if (!fetch.Promise) {
- throw new Error('native promise missing, set fetch.Promise to your favorite alternative');
- }
+ if (opts.unescape === true) {
+ if (glob) glob = utils.removeBackslashes(glob);
- Body.Promise = fetch.Promise;
+ if (base && backslashes === true) {
+ base = utils.removeBackslashes(base);
+ }
+ }
- // wrap http.request into fetch
- return new fetch.Promise(function (resolve, reject) {
- // build request object
- const request = new Request(url, opts);
- const options = getNodeRequestOptions(request);
+ const state = {
+ prefix,
+ input,
+ start,
+ base,
+ glob,
+ isBrace,
+ isBracket,
+ isGlob,
+ isExtglob,
+ isGlobstar,
+ negated
+ };
- const send = (options.protocol === 'https:' ? https : http).request;
- const signal = request.signal;
+ if (opts.tokens === true) {
+ state.maxDepth = 0;
+ if (!isPathSeparator(code)) {
+ tokens.push(token);
+ }
+ state.tokens = tokens;
+ }
- let response = null;
+ if (opts.parts === true || opts.tokens === true) {
+ let prevIndex;
+
+ for (let idx = 0; idx < slashes.length; idx++) {
+ const n = prevIndex ? prevIndex + 1 : start;
+ const i = slashes[idx];
+ const value = input.slice(n, i);
+ if (opts.tokens) {
+ if (idx === 0 && start !== 0) {
+ tokens[idx].isPrefix = true;
+ tokens[idx].value = prefix;
+ } else {
+ tokens[idx].value = value;
+ }
+ depth(tokens[idx]);
+ state.maxDepth += tokens[idx].depth;
+ }
+ if (idx !== 0 || value !== '') {
+ parts.push(value);
+ }
+ prevIndex = i;
+ }
- const abort = function abort() {
- let error = new AbortError('The user aborted a request.');
- reject(error);
- if (request.body && request.body instanceof Stream.Readable) {
- request.body.destroy(error);
- }
- if (!response || !response.body) return;
- response.body.emit('error', error);
- };
+ if (prevIndex && prevIndex + 1 < input.length) {
+ const value = input.slice(prevIndex + 1);
+ parts.push(value);
- if (signal && signal.aborted) {
- abort();
- return;
- }
+ if (opts.tokens) {
+ tokens[tokens.length - 1].value = value;
+ depth(tokens[tokens.length - 1]);
+ state.maxDepth += tokens[tokens.length - 1].depth;
+ }
+ }
- const abortAndFinalize = function abortAndFinalize() {
- abort();
- finalize();
- };
+ state.slashes = slashes;
+ state.parts = parts;
+ }
- // send request
- const req = send(options);
- let reqTimeout;
+ return state;
+};
- if (signal) {
- signal.addEventListener('abort', abortAndFinalize);
- }
+module.exports = scan;
- function finalize() {
- req.abort();
- if (signal) signal.removeEventListener('abort', abortAndFinalize);
- clearTimeout(reqTimeout);
- }
- if (request.timeout) {
- req.once('socket', function (socket) {
- reqTimeout = setTimeout(function () {
- reject(new FetchError(`network timeout at: ${request.url}`, 'request-timeout'));
- finalize();
- }, request.timeout);
- });
- }
+/***/ }),
- req.on('error', function (err) {
- reject(new FetchError(`request to ${request.url} failed, reason: ${err.message}`, 'system', err));
- finalize();
- });
+/***/ 479:
+/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
- req.on('response', function (res) {
- clearTimeout(reqTimeout);
+"use strict";
- const headers = createHeadersLenient(res.headers);
- // HTTP fetch step 5
- if (fetch.isRedirect(res.statusCode)) {
- // HTTP fetch step 5.2
- const location = headers.get('Location');
+const path = __webpack_require__(5622);
+const win32 = process.platform === 'win32';
+const {
+ REGEX_BACKSLASH,
+ REGEX_REMOVE_BACKSLASH,
+ REGEX_SPECIAL_CHARS,
+ REGEX_SPECIAL_CHARS_GLOBAL
+} = __webpack_require__(6099);
+
+exports.isObject = val => val !== null && typeof val === 'object' && !Array.isArray(val);
+exports.hasRegexChars = str => REGEX_SPECIAL_CHARS.test(str);
+exports.isRegexChar = str => str.length === 1 && exports.hasRegexChars(str);
+exports.escapeRegex = str => str.replace(REGEX_SPECIAL_CHARS_GLOBAL, '\\$1');
+exports.toPosixSlashes = str => str.replace(REGEX_BACKSLASH, '/');
+
+exports.removeBackslashes = str => {
+ return str.replace(REGEX_REMOVE_BACKSLASH, match => {
+ return match === '\\' ? '' : match;
+ });
+};
- // HTTP fetch step 5.3
- const locationURL = location === null ? null : resolve_url(request.url, location);
+exports.supportsLookbehinds = () => {
+ const segs = process.version.slice(1).split('.').map(Number);
+ if (segs.length === 3 && segs[0] >= 9 || (segs[0] === 8 && segs[1] >= 10)) {
+ return true;
+ }
+ return false;
+};
- // HTTP fetch step 5.5
- switch (request.redirect) {
- case 'error':
- reject(new FetchError(`uri requested responds with a redirect, redirect mode is set to error: ${request.url}`, 'no-redirect'));
- finalize();
- return;
- case 'manual':
- // node-fetch-specific step: make manual redirect a bit easier to use by setting the Location header value to the resolved URL.
- if (locationURL !== null) {
- // handle corrupted header
- try {
- headers.set('Location', locationURL);
- } catch (err) {
- // istanbul ignore next: nodejs server prevent invalid response headers, we can't test this through normal request
- reject(err);
- }
- }
- break;
- case 'follow':
- // HTTP-redirect fetch step 2
- if (locationURL === null) {
- break;
- }
+exports.isWindows = options => {
+ if (options && typeof options.windows === 'boolean') {
+ return options.windows;
+ }
+ return win32 === true || path.sep === '\\';
+};
- // HTTP-redirect fetch step 5
- if (request.counter >= request.follow) {
- reject(new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect'));
- finalize();
- return;
- }
+exports.escapeLast = (input, char, lastIdx) => {
+ const idx = input.lastIndexOf(char, lastIdx);
+ if (idx === -1) return input;
+ if (input[idx - 1] === '\\') return exports.escapeLast(input, char, idx - 1);
+ return `${input.slice(0, idx)}\\${input.slice(idx)}`;
+};
- // HTTP-redirect fetch step 6 (counter increment)
- // Create a new Request object.
- const requestOpts = {
- headers: new Headers(request.headers),
- follow: request.follow,
- counter: request.counter + 1,
- agent: request.agent,
- compress: request.compress,
- method: request.method,
- body: request.body,
- signal: request.signal,
- timeout: request.timeout,
- size: request.size
- };
+exports.removePrefix = (input, state = {}) => {
+ let output = input;
+ if (output.startsWith('./')) {
+ output = output.slice(2);
+ state.prefix = './';
+ }
+ return output;
+};
- // HTTP-redirect fetch step 9
- if (res.statusCode !== 303 && request.body && getTotalBytes(request) === null) {
- reject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect'));
- finalize();
- return;
- }
+exports.wrapOutput = (input, state = {}, options = {}) => {
+ const prepend = options.contains ? '' : '^';
+ const append = options.contains ? '' : '$';
- // HTTP-redirect fetch step 11
- if (res.statusCode === 303 || (res.statusCode === 301 || res.statusCode === 302) && request.method === 'POST') {
- requestOpts.method = 'GET';
- requestOpts.body = undefined;
- requestOpts.headers.delete('content-length');
- }
+ let output = `${prepend}(?:${input})${append}`;
+ if (state.negated === true) {
+ output = `(?:^(?!${output}).*$)`;
+ }
+ return output;
+};
- // HTTP-redirect fetch step 15
- resolve(fetch(new Request(locationURL, requestOpts)));
- finalize();
- return;
- }
- }
- // prepare response
- res.once('end', function () {
- if (signal) signal.removeEventListener('abort', abortAndFinalize);
- });
- let body = res.pipe(new PassThrough$1());
+/***/ }),
- const response_options = {
- url: request.url,
- status: res.statusCode,
- statusText: res.statusMessage,
- headers: headers,
- size: request.size,
- timeout: request.timeout,
- counter: request.counter
- };
+/***/ 2113:
+/***/ ((module) => {
- // HTTP-network fetch step 12.1.1.3
- const codings = headers.get('Content-Encoding');
+"use strict";
- // HTTP-network fetch step 12.1.1.4: handle content codings
- // in following scenarios we ignore compression support
- // 1. compression support is disabled
- // 2. HEAD request
- // 3. no Content-Encoding header
- // 4. no content response (204)
- // 5. content not modified response (304)
- if (!request.compress || request.method === 'HEAD' || codings === null || res.statusCode === 204 || res.statusCode === 304) {
- response = new Response(body, response_options);
- resolve(response);
- return;
- }
+function reusify (Constructor) {
+ var head = new Constructor()
+ var tail = head
- // For Node v6+
- // Be less strict when decoding compressed responses, since sometimes
- // servers send slightly invalid responses that are still accepted
- // by common browsers.
- // Always using Z_SYNC_FLUSH is what cURL does.
- const zlibOptions = {
- flush: zlib.Z_SYNC_FLUSH,
- finishFlush: zlib.Z_SYNC_FLUSH
- };
+ function get () {
+ var current = head
- // for gzip
- if (codings == 'gzip' || codings == 'x-gzip') {
- body = body.pipe(zlib.createGunzip(zlibOptions));
- response = new Response(body, response_options);
- resolve(response);
- return;
- }
+ if (current.next) {
+ head = current.next
+ } else {
+ head = new Constructor()
+ tail = head
+ }
- // for deflate
- if (codings == 'deflate' || codings == 'x-deflate') {
- // handle the infamous raw deflate response from old servers
- // a hack for old IIS and Apache servers
- const raw = res.pipe(new PassThrough$1());
- raw.once('data', function (chunk) {
- // see http://stackoverflow.com/questions/37519828
- if ((chunk[0] & 0x0F) === 0x08) {
- body = body.pipe(zlib.createInflate());
- } else {
- body = body.pipe(zlib.createInflateRaw());
- }
- response = new Response(body, response_options);
- resolve(response);
- });
- return;
- }
+ current.next = null
- // for br
- if (codings == 'br' && typeof zlib.createBrotliDecompress === 'function') {
- body = body.pipe(zlib.createBrotliDecompress());
- response = new Response(body, response_options);
- resolve(response);
- return;
- }
+ return current
+ }
- // otherwise, use response as-is
- response = new Response(body, response_options);
- resolve(response);
- });
+ function release (obj) {
+ tail.next = obj
+ tail = obj
+ }
- writeToStream(req, request);
- });
+ return {
+ get: get,
+ release: release
+ }
}
-/**
- * Redirect code matching
- *
- * @param Number code Status code
- * @return Boolean
- */
-fetch.isRedirect = function (code) {
- return code === 301 || code === 302 || code === 303 || code === 307 || code === 308;
-};
-
-// expose Promise
-fetch.Promise = global.Promise;
-module.exports = exports = fetch;
-Object.defineProperty(exports, "__esModule", ({ value: true }));
-exports.default = exports;
-exports.Headers = Headers;
-exports.Request = Request;
-exports.Response = Response;
-exports.FetchError = FetchError;
+module.exports = reusify
/***/ }),
-/***/ 1223:
-/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
+/***/ 5288:
+/***/ ((module) => {
-var wrappy = __webpack_require__(2940)
-module.exports = wrappy(once)
-module.exports.strict = wrappy(onceStrict)
+module.exports = runParallel
-once.proto = once(function () {
- Object.defineProperty(Function.prototype, 'once', {
- value: function () {
- return once(this)
- },
- configurable: true
- })
+function runParallel (tasks, cb) {
+ var results, pending, keys
+ var isSync = true
- Object.defineProperty(Function.prototype, 'onceStrict', {
- value: function () {
- return onceStrict(this)
- },
- configurable: true
- })
-})
+ if (Array.isArray(tasks)) {
+ results = []
+ pending = tasks.length
+ } else {
+ keys = Object.keys(tasks)
+ results = {}
+ pending = keys.length
+ }
-function once (fn) {
- var f = function () {
- if (f.called) return f.value
- f.called = true
- return f.value = fn.apply(this, arguments)
+ function done (err) {
+ function end () {
+ if (cb) cb(err, results)
+ cb = null
+ }
+ if (isSync) process.nextTick(end)
+ else end()
}
- f.called = false
- return f
-}
-function onceStrict (fn) {
- var f = function () {
- if (f.called)
- throw new Error(f.onceError)
- f.called = true
- return f.value = fn.apply(this, arguments)
+ function each (i, err, result) {
+ results[i] = result
+ if (--pending === 0 || err) {
+ done(err)
+ }
}
- var name = fn.name || 'Function wrapped with `once`'
- f.onceError = name + " shouldn't be called more than once"
- f.called = false
- return f
+
+ if (!pending) {
+ // empty
+ done(null)
+ } else if (keys) {
+ // object
+ keys.forEach(function (key) {
+ tasks[key](function (err, result) { each(key, err, result) })
+ })
+ } else {
+ // array
+ tasks.forEach(function (task, i) {
+ task(function (err, result) { each(i, err, result) })
+ })
+ }
+
+ isSync = false
}
diff --git a/dist/index.js.map b/dist/index.js.map
index 4922621..57b0599 100644
--- a/dist/index.js.map
+++ b/dist/index.js.map
@@ -1 +1 @@
-{"version":3,"file":"index.js","sources":["../webpack://test-check/./lib/main.js","../webpack://test-check/./lib/parsers/dart-json/dart-json-parser.js","../webpack://test-check/./lib/parsers/dart-json/dart-json-types.js","../webpack://test-check/./lib/parsers/dotnet-trx/dotnet-trx-parser.js","../webpack://test-check/./lib/parsers/jest-junit/jest-junit-parser.js","../webpack://test-check/./lib/report/get-report.js","../webpack://test-check/./lib/report/test-results.js","../webpack://test-check/./lib/utils/exec.js","../webpack://test-check/./lib/utils/file-utils.js","../webpack://test-check/./lib/utils/git.js","../webpack://test-check/./lib/utils/github-utils.js","../webpack://test-check/./lib/utils/markdown-utils.js","../webpack://test-check/./lib/utils/slugger.js","../webpack://test-check/./lib/utils/xml-utils.js","../webpack://test-check/./node_modules/@actions/core/lib/command.js","../webpack://test-check/./node_modules/@actions/core/lib/core.js","../webpack://test-check/./node_modules/@actions/core/lib/file-command.js","../webpack://test-check/./node_modules/@actions/core/lib/utils.js","../webpack://test-check/./node_modules/@actions/exec/lib/exec.js","../webpack://test-check/./node_modules/@actions/exec/lib/toolrunner.js","../webpack://test-check/./node_modules/@actions/github/lib/context.js","../webpack://test-check/./node_modules/@actions/github/lib/github.js","../webpack://test-check/./node_modules/@actions/github/lib/internal/utils.js","../webpack://test-check/./node_modules/@actions/github/lib/utils.js","../webpack://test-check/./node_modules/@actions/http-client/index.js","../webpack://test-check/./node_modules/@actions/http-client/proxy.js","../webpack://test-check/./node_modules/@actions/io/lib/io-util.js","../webpack://test-check/./node_modules/@actions/io/lib/io.js","../webpack://test-check/./node_modules/@octokit/auth-token/dist-node/index.js","../webpack://test-check/./node_modules/@octokit/core/dist-node/index.js","../webpack://test-check/./node_modules/@octokit/endpoint/dist-node/index.js","../webpack://test-check/./node_modules/@octokit/endpoint/node_modules/is-plain-object/dist/is-plain-object.js","../webpack://test-check/./node_modules/@octokit/graphql/dist-node/index.js","../webpack://test-check/./node_modules/@octokit/plugin-paginate-rest/dist-node/index.js","../webpack://test-check/./node_modules/@octokit/plugin-rest-endpoint-methods/dist-node/index.js","../webpack://test-check/./node_modules/@octokit/request-error/dist-node/index.js","../webpack://test-check/./node_modules/@octokit/request/dist-node/index.js","../webpack://test-check/./node_modules/@octokit/request/node_modules/is-plain-object/dist/is-plain-object.js","../webpack://test-check/./node_modules/before-after-hook/index.js","../webpack://test-check/./node_modules/before-after-hook/lib/add.js","../webpack://test-check/./node_modules/before-after-hook/lib/register.js","../webpack://test-check/./node_modules/before-after-hook/lib/remove.js","../webpack://test-check/./node_modules/deprecation/dist-node/index.js","../webpack://test-check/./node_modules/node-fetch/lib/index.js","../webpack://test-check/./node_modules/once/once.js","../webpack://test-check/./node_modules/sax/lib/sax.js","../webpack://test-check/./node_modules/tunnel/index.js","../webpack://test-check/./node_modules/tunnel/lib/tunnel.js","../webpack://test-check/./node_modules/universal-user-agent/dist-node/index.js","../webpack://test-check/./node_modules/wrappy/wrappy.js","../webpack://test-check/./node_modules/xml2js/lib/bom.js","../webpack://test-check/./node_modules/xml2js/lib/builder.js","../webpack://test-check/./node_modules/xml2js/lib/defaults.js","../webpack://test-check/./node_modules/xml2js/lib/parser.js","../webpack://test-check/./node_modules/xml2js/lib/processors.js","../webpack://test-check/./node_modules/xml2js/lib/xml2js.js","../webpack://test-check/./node_modules/xmlbuilder/lib/DocumentPosition.js","../webpack://test-check/./node_modules/xmlbuilder/lib/NodeType.js","../webpack://test-check/./node_modules/xmlbuilder/lib/Utility.js","../webpack://test-check/./node_modules/xmlbuilder/lib/WriterState.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLAttribute.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLCData.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLCharacterData.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLComment.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLDOMConfiguration.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLDOMErrorHandler.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLDOMImplementation.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLDOMStringList.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLDTDAttList.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLDTDElement.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLDTDEntity.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLDTDNotation.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLDeclaration.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLDocType.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLDocument.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLDocumentCB.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLDummy.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLElement.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLNamedNodeMap.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLNode.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLNodeList.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLProcessingInstruction.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLRaw.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLStreamWriter.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLStringWriter.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLStringifier.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLText.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLWriterBase.js","../webpack://test-check/./node_modules/xmlbuilder/lib/index.js","../webpack://test-check/./node_modules/@vercel/ncc/dist/ncc/@@notfound.js","../webpack://test-check/external \"assert\"","../webpack://test-check/external \"child_process\"","../webpack://test-check/external \"events\"","../webpack://test-check/external \"fs\"","../webpack://test-check/external \"http\"","../webpack://test-check/external \"https\"","../webpack://test-check/external \"net\"","../webpack://test-check/external \"os\"","../webpack://test-check/external \"path\"","../webpack://test-check/external \"stream\"","../webpack://test-check/external \"string_decoder\"","../webpack://test-check/external \"timers\"","../webpack://test-check/external \"tls\"","../webpack://test-check/external \"url\"","../webpack://test-check/external \"util\"","../webpack://test-check/external \"zlib\"","../webpack://test-check/webpack/bootstrap","../webpack://test-check/webpack/runtime/compat","../webpack://test-check/webpack/startup"],"sourcesContent":["\"use strict\";\r\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\r\n if (k2 === undefined) k2 = k;\r\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\r\n}) : (function(o, m, k, k2) {\r\n if (k2 === undefined) k2 = k;\r\n o[k2] = m[k];\r\n}));\r\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\r\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\r\n}) : function(o, v) {\r\n o[\"default\"] = v;\r\n});\r\nvar __importStar = (this && this.__importStar) || function (mod) {\r\n if (mod && mod.__esModule) return mod;\r\n var result = {};\r\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\r\n __setModuleDefault(result, mod);\r\n return result;\r\n};\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nconst core = __importStar(require(\"@actions/core\"));\r\nconst github = __importStar(require(\"@actions/github\"));\r\nconst dart_json_parser_1 = require(\"./parsers/dart-json/dart-json-parser\");\r\nconst dotnet_trx_parser_1 = require(\"./parsers/dotnet-trx/dotnet-trx-parser\");\r\nconst jest_junit_parser_1 = require(\"./parsers/jest-junit/jest-junit-parser\");\r\nconst file_utils_1 = require(\"./utils/file-utils\");\r\nconst git_1 = require(\"./utils/git\");\r\nconst github_utils_1 = require(\"./utils/github-utils\");\r\nasync function run() {\r\n try {\r\n await main();\r\n }\r\n catch (error) {\r\n core.setFailed(error.message);\r\n }\r\n}\r\nasync function main() {\r\n const annotations = core.getInput('annotations', { required: true }) === 'true';\r\n const failOnError = core.getInput('fail-on-error', { required: true }) === 'true';\r\n const name = core.getInput('name', { required: true });\r\n const path = core.getInput('path', { required: true });\r\n const reporter = core.getInput('reporter', { required: true });\r\n const token = core.getInput('token', { required: true });\r\n const workDirInput = core.getInput('working-directory', { required: false });\r\n if (workDirInput) {\r\n process.chdir(workDirInput);\r\n }\r\n const workDir = file_utils_1.normalizeDirPath(process.cwd(), true);\r\n const octokit = github.getOctokit(token);\r\n const sha = github_utils_1.getCheckRunSha();\r\n // We won't need tracked files if we are not going to create annotations\r\n const trackedFiles = annotations ? await git_1.listFiles() : [];\r\n const opts = {\r\n name,\r\n annotations,\r\n trackedFiles,\r\n workDir\r\n };\r\n const parser = getParser(reporter);\r\n const content = file_utils_1.getFileContent(path);\r\n const result = await parser(content, opts);\r\n const conclusion = result.success ? 'success' : 'failure';\r\n await octokit.checks.create({\r\n head_sha: sha,\r\n name,\r\n conclusion,\r\n status: 'completed',\r\n output: result.output,\r\n ...github.context.repo\r\n });\r\n core.setOutput('conclusion', conclusion);\r\n if (failOnError && !result.success) {\r\n core.setFailed(`Failed test has been found and 'fail-on-error' option is set to ${failOnError}.`);\r\n }\r\n}\r\nfunction getParser(reporter) {\r\n switch (reporter) {\r\n case 'dart-json':\r\n return dart_json_parser_1.parseDartJson;\r\n case 'dotnet-trx':\r\n return dotnet_trx_parser_1.parseDotnetTrx;\r\n case 'flutter-machine':\r\n return dart_json_parser_1.parseDartJson;\r\n case 'jest-junit':\r\n return jest_junit_parser_1.parseJestJunit;\r\n default:\r\n throw new Error(`Input parameter 'reporter' is set to invalid value '${reporter}'`);\r\n }\r\n}\r\nrun();\r\n","\"use strict\";\r\nvar __importDefault = (this && this.__importDefault) || function (mod) {\r\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\r\n};\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.parseDartJson = void 0;\r\nconst get_report_1 = __importDefault(require(\"../../report/get-report\"));\r\nconst file_utils_1 = require(\"../../utils/file-utils\");\r\nconst markdown_utils_1 = require(\"../../utils/markdown-utils\");\r\nconst dart_json_types_1 = require(\"./dart-json-types\");\r\nconst test_results_1 = require(\"../../report/test-results\");\r\nclass TestRun {\r\n constructor(suites, success, time) {\r\n this.suites = suites;\r\n this.success = success;\r\n this.time = time;\r\n }\r\n}\r\nclass TestSuite {\r\n constructor(suite) {\r\n this.suite = suite;\r\n this.groups = {};\r\n }\r\n}\r\nclass TestGroup {\r\n constructor(group) {\r\n this.group = group;\r\n this.tests = [];\r\n }\r\n}\r\nclass TestCase {\r\n constructor(testStart) {\r\n this.testStart = testStart;\r\n this.groupId = testStart.test.groupIDs[testStart.test.groupIDs.length - 1];\r\n }\r\n get result() {\r\n var _a, _b, _c, _d;\r\n if ((_a = this.testDone) === null || _a === void 0 ? void 0 : _a.skipped) {\r\n return 'skipped';\r\n }\r\n if (((_b = this.testDone) === null || _b === void 0 ? void 0 : _b.result) === 'success') {\r\n return 'success';\r\n }\r\n if (((_c = this.testDone) === null || _c === void 0 ? void 0 : _c.result) === 'error' || ((_d = this.testDone) === null || _d === void 0 ? void 0 : _d.result) === 'failure') {\r\n return 'failed';\r\n }\r\n return undefined;\r\n }\r\n get time() {\r\n return this.testDone !== undefined ? this.testDone.time - this.testStart.time : 0;\r\n }\r\n}\r\nasync function parseDartJson(content, options) {\r\n const testRun = getTestRun(content);\r\n const icon = testRun.success ? markdown_utils_1.Icon.success : markdown_utils_1.Icon.fail;\r\n return {\r\n success: testRun.success,\r\n output: {\r\n title: `${options.name.trim()} ${icon}`,\r\n summary: get_report_1.default(getTestRunResult(testRun)),\r\n annotations: options.annotations ? getAnnotations(testRun, options.workDir, options.trackedFiles) : undefined\r\n }\r\n };\r\n}\r\nexports.parseDartJson = parseDartJson;\r\nfunction getTestRun(content) {\r\n const lines = content.split(/\\n\\r?/g).filter(line => line !== '');\r\n const events = lines.map(str => JSON.parse(str));\r\n let success = false;\r\n let totalTime = 0;\r\n const suites = {};\r\n const tests = {};\r\n for (const evt of events) {\r\n if (dart_json_types_1.isSuiteEvent(evt)) {\r\n suites[evt.suite.id] = new TestSuite(evt.suite);\r\n }\r\n else if (dart_json_types_1.isGroupEvent(evt)) {\r\n suites[evt.group.suiteID].groups[evt.group.id] = new TestGroup(evt.group);\r\n }\r\n else if (dart_json_types_1.isTestStartEvent(evt) && evt.test.url !== null) {\r\n const test = new TestCase(evt);\r\n const suite = suites[evt.test.suiteID];\r\n const group = suite.groups[evt.test.groupIDs[evt.test.groupIDs.length - 1]];\r\n group.tests.push(test);\r\n tests[evt.test.id] = test;\r\n }\r\n else if (dart_json_types_1.isTestDoneEvent(evt) && !evt.hidden) {\r\n tests[evt.testID].testDone = evt;\r\n }\r\n else if (dart_json_types_1.isErrorEvent(evt)) {\r\n tests[evt.testID].error = evt;\r\n }\r\n else if (dart_json_types_1.isDoneEvent(evt)) {\r\n success = evt.success;\r\n totalTime = evt.time;\r\n }\r\n }\r\n return new TestRun(Object.values(suites), success, totalTime);\r\n}\r\nfunction getTestRunResult(tr) {\r\n const suites = tr.suites.map(s => {\r\n return new test_results_1.TestSuiteResult(s.suite.path, getGroups(s));\r\n });\r\n return new test_results_1.TestRunResult(suites, tr.time);\r\n}\r\nfunction getGroups(suite) {\r\n const groups = Object.values(suite.groups).filter(grp => grp.tests.length > 0);\r\n groups.sort((a, b) => { var _a, _b; return ((_a = a.group.line) !== null && _a !== void 0 ? _a : 0) - ((_b = b.group.line) !== null && _b !== void 0 ? _b : 0); });\r\n return groups.map(group => {\r\n group.tests.sort((a, b) => { var _a, _b; return ((_a = a.testStart.test.line) !== null && _a !== void 0 ? _a : 0) - ((_b = b.testStart.test.line) !== null && _b !== void 0 ? _b : 0); });\r\n const tests = group.tests.map(t => new test_results_1.TestCaseResult(t.testStart.test.name, t.result, t.time));\r\n return new test_results_1.TestGroupResult(group.group.name, tests);\r\n });\r\n}\r\nfunction getAnnotations(tr, workDir, trackedFiles) {\r\n const annotations = [];\r\n for (const suite of tr.suites) {\r\n for (const group of Object.values(suite.groups)) {\r\n for (const test of group.tests) {\r\n if (test.error) {\r\n const err = getAnnotation(test, suite, workDir, trackedFiles);\r\n if (err !== null) {\r\n annotations.push(err);\r\n }\r\n }\r\n }\r\n }\r\n }\r\n return annotations;\r\n}\r\nfunction getAnnotation(test, testSuite, workDir, trackedFiles) {\r\n var _a, _b, _c, _d, _e, _f;\r\n const stack = (_b = (_a = test.error) === null || _a === void 0 ? void 0 : _a.stackTrace) !== null && _b !== void 0 ? _b : '';\r\n let src = exceptionThrowSource(stack, trackedFiles);\r\n if (src === null) {\r\n const file = getRelativePathFromUrl((_c = test.testStart.test.url) !== null && _c !== void 0 ? _c : '', workDir);\r\n if (!trackedFiles.includes(file)) {\r\n return null;\r\n }\r\n src = {\r\n file,\r\n line: (_d = test.testStart.test.line) !== null && _d !== void 0 ? _d : 0\r\n };\r\n }\r\n return {\r\n annotation_level: 'failure',\r\n start_line: src.line,\r\n end_line: src.line,\r\n path: src.file,\r\n message: `${markdown_utils_1.fixEol((_e = test.error) === null || _e === void 0 ? void 0 : _e.error)}\\n\\n${markdown_utils_1.fixEol((_f = test.error) === null || _f === void 0 ? void 0 : _f.stackTrace)}`,\r\n title: `[${testSuite.suite.path}] ${test.testStart.test.name}`\r\n };\r\n}\r\nfunction exceptionThrowSource(ex, trackedFiles) {\r\n // imports from package which is tested are listed in stack traces as 'package:xyz/' which maps to relative path 'lib/'\r\n const packageRe = /^package:[a-zA-z0-9_$]+\\//;\r\n const lines = ex.split(/\\r?\\n/).map(str => str.replace(packageRe, 'lib/'));\r\n // regexp to extract file path and line number from stack trace\r\n const re = /^(.*)\\s+(\\d+):\\d+\\s+/;\r\n for (const str of lines) {\r\n const match = str.match(re);\r\n if (match !== null) {\r\n const [_, fileStr, lineStr] = match;\r\n const file = file_utils_1.normalizeFilePath(fileStr);\r\n if (trackedFiles.includes(file)) {\r\n const line = parseInt(lineStr);\r\n return { file, line };\r\n }\r\n }\r\n }\r\n return null;\r\n}\r\nfunction getRelativePathFromUrl(file, workdir) {\r\n const prefix = 'file:///';\r\n if (file.startsWith(prefix)) {\r\n file = file.substr(prefix.length);\r\n }\r\n if (file.startsWith(workdir)) {\r\n file = file.substr(workdir.length);\r\n }\r\n return file;\r\n}\r\n","\"use strict\";\r\n/// reflects documentation at https://github.com/dart-lang/test/blob/master/pkgs/test/doc/json_reporter.md\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.isDoneEvent = exports.isErrorEvent = exports.isTestDoneEvent = exports.isTestStartEvent = exports.isGroupEvent = exports.isSuiteEvent = void 0;\r\nfunction isSuiteEvent(event) {\r\n return event.type === 'suite';\r\n}\r\nexports.isSuiteEvent = isSuiteEvent;\r\nfunction isGroupEvent(event) {\r\n return event.type === 'group';\r\n}\r\nexports.isGroupEvent = isGroupEvent;\r\nfunction isTestStartEvent(event) {\r\n return event.type === 'testStart';\r\n}\r\nexports.isTestStartEvent = isTestStartEvent;\r\nfunction isTestDoneEvent(event) {\r\n return event.type === 'testDone';\r\n}\r\nexports.isTestDoneEvent = isTestDoneEvent;\r\nfunction isErrorEvent(event) {\r\n return event.type === 'error';\r\n}\r\nexports.isErrorEvent = isErrorEvent;\r\nfunction isDoneEvent(event) {\r\n return event.type === 'done';\r\n}\r\nexports.isDoneEvent = isDoneEvent;\r\n","\"use strict\";\r\nvar __importDefault = (this && this.__importDefault) || function (mod) {\r\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\r\n};\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.exceptionThrowSource = exports.parseDotnetTrx = void 0;\r\nconst xml2js_1 = require(\"xml2js\");\r\nconst file_utils_1 = require(\"../../utils/file-utils\");\r\nconst xml_utils_1 = require(\"../../utils/xml-utils\");\r\nconst markdown_utils_1 = require(\"../../utils/markdown-utils\");\r\nconst test_results_1 = require(\"../../report/test-results\");\r\nconst get_report_1 = __importDefault(require(\"../../report/get-report\"));\r\nclass TestClass {\r\n constructor(name) {\r\n this.name = name;\r\n this.tests = [];\r\n }\r\n}\r\nclass Test {\r\n constructor(name, outcome, duration, error) {\r\n this.name = name;\r\n this.outcome = outcome;\r\n this.duration = duration;\r\n this.error = error;\r\n }\r\n get result() {\r\n switch (this.outcome) {\r\n case 'Passed':\r\n return 'success';\r\n case 'NotExecuted':\r\n return 'skipped';\r\n case 'Failed':\r\n return 'failed';\r\n }\r\n }\r\n}\r\nasync function parseDotnetTrx(content, options) {\r\n const trx = (await xml2js_1.parseStringPromise(content, {\r\n attrValueProcessors: [xml_utils_1.parseAttribute]\r\n }));\r\n const testClasses = getTestClasses(trx);\r\n const testRun = getTestRunResult(trx, testClasses);\r\n const success = testRun.result === 'success';\r\n const icon = success ? markdown_utils_1.Icon.success : markdown_utils_1.Icon.fail;\r\n return {\r\n success,\r\n output: {\r\n title: `${options.name.trim()} ${icon}`,\r\n summary: get_report_1.default(testRun),\r\n annotations: options.annotations ? getAnnotations(testClasses, options.workDir, options.trackedFiles) : undefined\r\n }\r\n };\r\n}\r\nexports.parseDotnetTrx = parseDotnetTrx;\r\nfunction getTestRunResult(trx, testClasses) {\r\n const times = trx.TestRun.Times[0].$;\r\n const totalTime = times.finish.getTime() - times.start.getTime();\r\n const suites = testClasses.map(tc => {\r\n const tests = tc.tests.map(t => new test_results_1.TestCaseResult(t.name, t.result, t.duration));\r\n const group = new test_results_1.TestGroupResult(null, tests);\r\n return new test_results_1.TestSuiteResult(tc.name, [group]);\r\n });\r\n return new test_results_1.TestRunResult(suites, totalTime);\r\n}\r\nfunction getTestClasses(trx) {\r\n var _a;\r\n const unitTests = {};\r\n for (const td of trx.TestRun.TestDefinitions) {\r\n for (const ut of td.UnitTest) {\r\n unitTests[ut.$.id] = ut.TestMethod[0];\r\n }\r\n }\r\n const unitTestsResults = trx.TestRun.Results.flatMap(r => r.UnitTestResult).flatMap(unitTestResult => ({\r\n unitTestResult,\r\n testMethod: unitTests[unitTestResult.$.testId]\r\n }));\r\n const testClasses = {};\r\n for (const r of unitTestsResults) {\r\n let tc = testClasses[r.testMethod.$.className];\r\n if (tc === undefined) {\r\n tc = new TestClass(r.testMethod.$.className);\r\n testClasses[tc.name] = tc;\r\n }\r\n const output = r.unitTestResult.Output;\r\n const error = (output === null || output === void 0 ? void 0 : output.length) > 0 && ((_a = output[0].ErrorInfo) === null || _a === void 0 ? void 0 : _a.length) > 0 ? output[0].ErrorInfo[0] : undefined;\r\n const test = new Test(r.testMethod.$.name, r.unitTestResult.$.outcome, r.unitTestResult.$.duration, error);\r\n tc.tests.push(test);\r\n }\r\n const result = Object.values(testClasses);\r\n result.sort((a, b) => a.name.localeCompare(b.name));\r\n for (const tc of result) {\r\n tc.tests.sort((a, b) => a.name.localeCompare(b.name));\r\n }\r\n return result;\r\n}\r\nfunction getAnnotations(testClasses, workDir, trackedFiles) {\r\n const annotations = [];\r\n for (const tc of testClasses) {\r\n for (const t of tc.tests) {\r\n if (t.error) {\r\n const src = exceptionThrowSource(t.error.StackTrace[0], workDir, trackedFiles);\r\n if (src === null) {\r\n continue;\r\n }\r\n annotations.push({\r\n annotation_level: 'failure',\r\n start_line: src.line,\r\n end_line: src.line,\r\n path: src.file,\r\n message: markdown_utils_1.fixEol(t.error.Message[0]),\r\n title: `[${tc.name}] ${t.name}`\r\n });\r\n }\r\n }\r\n }\r\n return annotations;\r\n}\r\nfunction exceptionThrowSource(ex, workDir, trackedFiles) {\r\n const lines = ex.split(/\\r*\\n/);\r\n const re = / in (.+):line (\\d+)$/;\r\n for (const str of lines) {\r\n const match = str.match(re);\r\n if (match !== null) {\r\n const [_, fileStr, lineStr] = match;\r\n const filePath = file_utils_1.normalizeFilePath(fileStr);\r\n const file = filePath.startsWith(workDir) ? filePath.substr(workDir.length) : filePath;\r\n if (trackedFiles.includes(file)) {\r\n const line = parseInt(lineStr);\r\n return { file, line };\r\n }\r\n }\r\n }\r\n return null;\r\n}\r\nexports.exceptionThrowSource = exceptionThrowSource;\r\n","\"use strict\";\r\nvar __importDefault = (this && this.__importDefault) || function (mod) {\r\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\r\n};\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.exceptionThrowSource = exports.parseJestJunit = void 0;\r\nconst xml2js_1 = require(\"xml2js\");\r\nconst markdown_utils_1 = require(\"../../utils/markdown-utils\");\r\nconst file_utils_1 = require(\"../../utils/file-utils\");\r\nconst xml_utils_1 = require(\"../../utils/xml-utils\");\r\nconst test_results_1 = require(\"../../report/test-results\");\r\nconst get_report_1 = __importDefault(require(\"../../report/get-report\"));\r\nasync function parseJestJunit(content, options) {\r\n var _a, _b;\r\n const junit = (await xml2js_1.parseStringPromise(content, {\r\n attrValueProcessors: [xml_utils_1.parseAttribute]\r\n }));\r\n const testsuites = junit.testsuites;\r\n const success = !(((_a = testsuites.$) === null || _a === void 0 ? void 0 : _a.failures) > 0 || ((_b = testsuites.$) === null || _b === void 0 ? void 0 : _b.errors) > 0);\r\n const icon = success ? markdown_utils_1.Icon.success : markdown_utils_1.Icon.fail;\r\n return {\r\n success,\r\n output: {\r\n title: `${options.name.trim()} ${icon}`,\r\n summary: getSummary(junit),\r\n annotations: options.annotations ? getAnnotations(junit, options.workDir, options.trackedFiles) : undefined\r\n }\r\n };\r\n}\r\nexports.parseJestJunit = parseJestJunit;\r\nfunction getSummary(junit) {\r\n const suites = junit.testsuites.testsuite.map(ts => {\r\n const name = ts.$.name.trim();\r\n const time = ts.$.time * 1000;\r\n const sr = new test_results_1.TestSuiteResult(name, getGroups(ts), time);\r\n return sr;\r\n });\r\n const time = junit.testsuites.$.time * 1000;\r\n const tr = new test_results_1.TestRunResult(suites, time);\r\n return get_report_1.default(tr);\r\n}\r\nfunction getGroups(suite) {\r\n const groups = [];\r\n for (const tc of suite.testcase) {\r\n let grp = groups.find(g => g.describe === tc.$.classname);\r\n if (grp === undefined) {\r\n grp = { describe: tc.$.classname, tests: [] };\r\n groups.push(grp);\r\n }\r\n grp.tests.push(tc);\r\n }\r\n return groups.map(grp => {\r\n const tests = grp.tests.map(tc => {\r\n const name = tc.$.name.trim();\r\n const result = getTestCaseResult(tc);\r\n const time = tc.$.time * 1000;\r\n return new test_results_1.TestCaseResult(name, result, time);\r\n });\r\n return new test_results_1.TestGroupResult(grp.describe, tests);\r\n });\r\n}\r\nfunction getTestCaseResult(test) {\r\n if (test.failure)\r\n return 'failed';\r\n if (test.skipped)\r\n return 'skipped';\r\n return 'success';\r\n}\r\nfunction getAnnotations(junit, workDir, trackedFiles) {\r\n const annotations = [];\r\n for (const suite of junit.testsuites.testsuite) {\r\n for (const tc of suite.testcase) {\r\n if (!tc.failure) {\r\n continue;\r\n }\r\n for (const ex of tc.failure) {\r\n const src = exceptionThrowSource(ex, workDir, trackedFiles);\r\n if (src === null) {\r\n continue;\r\n }\r\n annotations.push({\r\n annotation_level: 'failure',\r\n start_line: src.line,\r\n end_line: src.line,\r\n path: src.file,\r\n message: markdown_utils_1.fixEol(ex),\r\n title: `[${suite.$.name}] ${tc.$.name.trim()}`\r\n });\r\n }\r\n }\r\n }\r\n return annotations;\r\n}\r\nfunction exceptionThrowSource(ex, workDir, trackedFiles) {\r\n const lines = ex.split(/\\r?\\n/);\r\n const re = /\\((.*):(\\d+):(\\d+)\\)$/;\r\n for (const str of lines) {\r\n const match = str.match(re);\r\n if (match !== null) {\r\n const [_, fileStr, lineStr, colStr] = match;\r\n const filePath = file_utils_1.normalizeFilePath(fileStr);\r\n const file = filePath.startsWith(workDir) ? filePath.substr(workDir.length) : filePath;\r\n if (trackedFiles.includes(file)) {\r\n const line = parseInt(lineStr);\r\n const column = parseInt(colStr);\r\n return { file, line, column };\r\n }\r\n }\r\n }\r\n return null;\r\n}\r\nexports.exceptionThrowSource = exceptionThrowSource;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nconst markdown_utils_1 = require(\"../utils/markdown-utils\");\r\nconst slugger_1 = require(\"../utils/slugger\");\r\nfunction getReport(tr) {\r\n const time = `${(tr.time / 1000).toFixed(3)}s`;\r\n const headingLine = `**${tr.tests}** tests were completed in **${time}** with **${tr.passed}** passed, **${tr.skipped}** skipped and **${tr.failed}** failed.`;\r\n const suitesSummary = tr.suites.map((s, i) => {\r\n const icon = getResultIcon(s.result);\r\n const tsTime = `${s.time}ms`;\r\n const tsName = s.name;\r\n const tsAddr = makeSuiteSlug(i, tsName).link;\r\n const tsNameLink = markdown_utils_1.link(tsName, tsAddr);\r\n return [icon, tsNameLink, s.tests, tsTime, s.passed, s.failed, s.skipped];\r\n });\r\n const summary = markdown_utils_1.table(['Result', 'Suite', 'Tests', 'Time', `Passed ${markdown_utils_1.Icon.success}`, `Failed ${markdown_utils_1.Icon.fail}`, `Skipped ${markdown_utils_1.Icon.skip}`], [markdown_utils_1.Align.Center, markdown_utils_1.Align.Left, markdown_utils_1.Align.Right, markdown_utils_1.Align.Right, markdown_utils_1.Align.Right, markdown_utils_1.Align.Right, markdown_utils_1.Align.Right], ...suitesSummary);\r\n const suites = tr.suites.map((ts, i) => getSuiteSummary(ts, i)).join('\\n');\r\n const suitesSection = `# Test Suites\\n\\n${suites}`;\r\n return `${headingLine}\\n${summary}\\n${suitesSection}`;\r\n}\r\nexports.default = getReport;\r\nfunction getSuiteSummary(ts, index) {\r\n const icon = getResultIcon(ts.result);\r\n const content = ts.groups\r\n .map(grp => {\r\n const header = grp.name ? `### ${grp.name}\\n\\n` : '';\r\n const tests = markdown_utils_1.table(['Result', 'Test', 'Time'], [markdown_utils_1.Align.Center, markdown_utils_1.Align.Left, markdown_utils_1.Align.Right], ...grp.tests.map(tc => {\r\n const name = tc.name;\r\n const time = `${tc.time}ms`;\r\n const result = getResultIcon(tc.result);\r\n return [result, name, time];\r\n }));\r\n return `${header}${tests}\\n`;\r\n })\r\n .join('\\n');\r\n const tsName = ts.name;\r\n const tsSlug = makeSuiteSlug(index, tsName);\r\n const tsNameLink = `${tsName}`;\r\n return `## ${tsNameLink} ${icon}\\n\\n${content}`;\r\n}\r\nfunction makeSuiteSlug(index, name) {\r\n // use \"ts-$index-\" as prefix to avoid slug conflicts after escaping the paths\r\n return slugger_1.slug(`ts-${index}-${name}`);\r\n}\r\nfunction getResultIcon(result) {\r\n switch (result) {\r\n case 'success':\r\n return markdown_utils_1.Icon.success;\r\n case 'skipped':\r\n return markdown_utils_1.Icon.skip;\r\n case 'failed':\r\n return markdown_utils_1.Icon.fail;\r\n default:\r\n return '';\r\n }\r\n}\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.TestCaseResult = exports.TestGroupResult = exports.TestSuiteResult = exports.TestRunResult = void 0;\r\nclass TestRunResult {\r\n constructor(suites, totalTime) {\r\n this.suites = suites;\r\n this.totalTime = totalTime;\r\n }\r\n get tests() {\r\n return this.suites.reduce((sum, g) => sum + g.tests, 0);\r\n }\r\n get passed() {\r\n return this.suites.reduce((sum, g) => sum + g.passed, 0);\r\n }\r\n get failed() {\r\n return this.suites.reduce((sum, g) => sum + g.failed, 0);\r\n }\r\n get skipped() {\r\n return this.suites.reduce((sum, g) => sum + g.skipped, 0);\r\n }\r\n get time() {\r\n var _a;\r\n return (_a = this.totalTime) !== null && _a !== void 0 ? _a : this.suites.reduce((sum, g) => sum + g.time, 0);\r\n }\r\n get result() {\r\n return this.suites.some(t => t.result === 'failed') ? 'failed' : 'success';\r\n }\r\n}\r\nexports.TestRunResult = TestRunResult;\r\nclass TestSuiteResult {\r\n constructor(name, groups, totalTime) {\r\n this.name = name;\r\n this.groups = groups;\r\n this.totalTime = totalTime;\r\n }\r\n get tests() {\r\n return this.groups.reduce((sum, g) => sum + g.tests.length, 0);\r\n }\r\n get passed() {\r\n return this.groups.reduce((sum, g) => sum + g.passed, 0);\r\n }\r\n get failed() {\r\n return this.groups.reduce((sum, g) => sum + g.failed, 0);\r\n }\r\n get skipped() {\r\n return this.groups.reduce((sum, g) => sum + g.skipped, 0);\r\n }\r\n get time() {\r\n var _a;\r\n return (_a = this.totalTime) !== null && _a !== void 0 ? _a : this.groups.reduce((sum, g) => sum + g.time, 0);\r\n }\r\n get result() {\r\n return this.groups.some(t => t.result === 'failed') ? 'failed' : 'success';\r\n }\r\n}\r\nexports.TestSuiteResult = TestSuiteResult;\r\nclass TestGroupResult {\r\n constructor(name, tests) {\r\n this.name = name;\r\n this.tests = tests;\r\n }\r\n get passed() {\r\n return this.tests.reduce((sum, t) => (t.result === 'success' ? sum + 1 : sum), 0);\r\n }\r\n get failed() {\r\n return this.tests.reduce((sum, t) => (t.result === 'failed' ? sum + 1 : sum), 0);\r\n }\r\n get skipped() {\r\n return this.tests.reduce((sum, t) => (t.result === 'skipped' ? sum + 1 : sum), 0);\r\n }\r\n get time() {\r\n return this.tests.reduce((sum, t) => sum + t.time, 0);\r\n }\r\n get result() {\r\n return this.tests.some(t => t.result === 'failed') ? 'failed' : 'success';\r\n }\r\n}\r\nexports.TestGroupResult = TestGroupResult;\r\nclass TestCaseResult {\r\n constructor(name, result, time) {\r\n this.name = name;\r\n this.result = result;\r\n this.time = time;\r\n }\r\n}\r\nexports.TestCaseResult = TestCaseResult;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nconst exec_1 = require(\"@actions/exec\");\r\n// Wraps original exec() function\r\n// Returns exit code and whole stdout/stderr\r\nasync function exec(commandLine, args, options) {\r\n options = options || {};\r\n let stdout = '';\r\n let stderr = '';\r\n options.listeners = {\r\n stdout: (data) => (stdout += data.toString()),\r\n stderr: (data) => (stderr += data.toString())\r\n };\r\n const code = await exec_1.exec(commandLine, args, options);\r\n return { code, stdout, stderr };\r\n}\r\nexports.default = exec;\r\n","\"use strict\";\r\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\r\n if (k2 === undefined) k2 = k;\r\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\r\n}) : (function(o, m, k, k2) {\r\n if (k2 === undefined) k2 = k;\r\n o[k2] = m[k];\r\n}));\r\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\r\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\r\n}) : function(o, v) {\r\n o[\"default\"] = v;\r\n});\r\nvar __importStar = (this && this.__importStar) || function (mod) {\r\n if (mod && mod.__esModule) return mod;\r\n var result = {};\r\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\r\n __setModuleDefault(result, mod);\r\n return result;\r\n};\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.normalizeFilePath = exports.normalizeDirPath = exports.getFileContent = void 0;\r\nconst fs = __importStar(require(\"fs\"));\r\nfunction getFileContent(path) {\r\n if (!fs.existsSync(path)) {\r\n throw new Error(`File '${path}' not found`);\r\n }\r\n if (!fs.lstatSync(path).isFile()) {\r\n throw new Error(`'${path}' is not a file`);\r\n }\r\n return fs.readFileSync(path, { encoding: 'utf8' });\r\n}\r\nexports.getFileContent = getFileContent;\r\nfunction normalizeDirPath(path, trailingSeparator) {\r\n if (!path) {\r\n return path;\r\n }\r\n path = normalizeFilePath(path);\r\n if (trailingSeparator && !path.endsWith('/')) {\r\n path += '/';\r\n }\r\n return path;\r\n}\r\nexports.normalizeDirPath = normalizeDirPath;\r\nfunction normalizeFilePath(path) {\r\n if (!path) {\r\n return path;\r\n }\r\n return path.trim().replace(/\\\\/g, '/');\r\n}\r\nexports.normalizeFilePath = normalizeFilePath;\r\n","\"use strict\";\r\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\r\n if (k2 === undefined) k2 = k;\r\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\r\n}) : (function(o, m, k, k2) {\r\n if (k2 === undefined) k2 = k;\r\n o[k2] = m[k];\r\n}));\r\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\r\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\r\n}) : function(o, v) {\r\n o[\"default\"] = v;\r\n});\r\nvar __importStar = (this && this.__importStar) || function (mod) {\r\n if (mod && mod.__esModule) return mod;\r\n var result = {};\r\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\r\n __setModuleDefault(result, mod);\r\n return result;\r\n};\r\nvar __importDefault = (this && this.__importDefault) || function (mod) {\r\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\r\n};\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.listFiles = void 0;\r\nconst core = __importStar(require(\"@actions/core\"));\r\nconst exec_1 = __importDefault(require(\"./exec\"));\r\nasync function listFiles() {\r\n core.startGroup('Listing all files tracked by git');\r\n let output = '';\r\n try {\r\n output = (await exec_1.default('git', ['ls-files', '-z'])).stdout;\r\n }\r\n finally {\r\n fixStdOutNullTermination();\r\n core.endGroup();\r\n }\r\n return output.split('\\u0000').filter(s => s.length > 0);\r\n}\r\nexports.listFiles = listFiles;\r\nfunction fixStdOutNullTermination() {\r\n // Previous command uses NULL as delimiters and output is printed to stdout.\r\n // We have to make sure next thing written to stdout will start on new line.\r\n // Otherwise things like ::set-output wouldn't work.\r\n core.info('');\r\n}\r\n","\"use strict\";\r\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\r\n if (k2 === undefined) k2 = k;\r\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\r\n}) : (function(o, m, k, k2) {\r\n if (k2 === undefined) k2 = k;\r\n o[k2] = m[k];\r\n}));\r\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\r\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\r\n}) : function(o, v) {\r\n o[\"default\"] = v;\r\n});\r\nvar __importStar = (this && this.__importStar) || function (mod) {\r\n if (mod && mod.__esModule) return mod;\r\n var result = {};\r\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\r\n __setModuleDefault(result, mod);\r\n return result;\r\n};\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.getCheckRunSha = void 0;\r\nconst github = __importStar(require(\"@actions/github\"));\r\nfunction getCheckRunSha() {\r\n if (github.context.payload.pull_request) {\r\n const pr = github.context.payload.pull_request;\r\n return pr.head.sha;\r\n }\r\n return github.context.sha;\r\n}\r\nexports.getCheckRunSha = getCheckRunSha;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.fixEol = exports.tableEscape = exports.table = exports.link = exports.details = exports.Icon = exports.Align = void 0;\r\nvar Align;\r\n(function (Align) {\r\n Align[\"Left\"] = \":---\";\r\n Align[\"Center\"] = \":---:\";\r\n Align[\"Right\"] = \"---:\";\r\n Align[\"None\"] = \"---\";\r\n})(Align = exports.Align || (exports.Align = {}));\r\nexports.Icon = {\r\n skip: '✖️',\r\n success: '✔️',\r\n fail: '❌' // ':x:'\r\n};\r\nfunction details(summary, content) {\r\n return `${summary}
${content} `;\r\n}\r\nexports.details = details;\r\nfunction link(title, address) {\r\n return `[${title}](${address})`;\r\n}\r\nexports.link = link;\r\nfunction table(headers, align, ...rows) {\r\n const headerRow = `| ${headers.map(tableEscape).join(' | ')} |`;\r\n const alignRow = `| ${align.join(' | ')} |`;\r\n const contentRows = rows.map(row => `| ${row.map(tableEscape).join(' | ')} |`).join('\\n');\r\n return [headerRow, alignRow, contentRows].join('\\n');\r\n}\r\nexports.table = table;\r\nfunction tableEscape(content) {\r\n return content.toString().replace('|', '\\\\|');\r\n}\r\nexports.tableEscape = tableEscape;\r\nfunction fixEol(text) {\r\n var _a;\r\n return (_a = text === null || text === void 0 ? void 0 : text.replace(/\\r/g, '')) !== null && _a !== void 0 ? _a : '';\r\n}\r\nexports.fixEol = fixEol;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.slug = void 0;\r\n// Returns HTML element id and href link usable as manual anchor links\r\n// This is needed because Github in check run summary doesn't automatically\r\n// create links out of headings as it normally does for other markdown content\r\nfunction slug(name) {\r\n const slugId = name\r\n .trim()\r\n .replace(/_/g, '')\r\n .replace(/[./\\\\]/g, '-')\r\n .replace(/[^\\w-]/g, '');\r\n const id = `user-content-${slugId}`;\r\n const link = `#${slugId}`;\r\n return { id, link };\r\n}\r\nexports.slug = slug;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.parseAttribute = void 0;\r\nconst isoDateRe = /^\\d{4}-[01]\\d-[0-3]\\dT[0-2]\\d:[0-5]\\d:[0-5]\\d\\.\\d+([+-][0-2]\\d:[0-5]\\d|Z)$/;\r\n// matches dotnet duration: 00:00:00.0010000\r\nconst durationRe = /^(\\d\\d):(\\d\\d):(\\d\\d\\.\\d+)$/;\r\nfunction parseAttribute(str) {\r\n if (str === '' || str === undefined) {\r\n return str;\r\n }\r\n if (isoDateRe.test(str)) {\r\n return new Date(str);\r\n }\r\n const durationMatch = str.match(durationRe);\r\n if (durationMatch !== null) {\r\n const [_, hourStr, minStr, secStr] = durationMatch;\r\n return (parseInt(hourStr) * 3600 + parseInt(minStr) * 60 + parseFloat(secStr)) * 1000;\r\n }\r\n const num = parseFloat(str);\r\n if (isNaN(num)) {\r\n return str;\r\n }\r\n return num;\r\n}\r\nexports.parseAttribute = parseAttribute;\r\n","\"use strict\";\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];\n result[\"default\"] = mod;\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst os = __importStar(require(\"os\"));\nconst utils_1 = require(\"./utils\");\n/**\n * Commands\n *\n * Command Format:\n * ::name key=value,key=value::message\n *\n * Examples:\n * ::warning::This is the message\n * ::set-env name=MY_VAR::some value\n */\nfunction issueCommand(command, properties, message) {\n const cmd = new Command(command, properties, message);\n process.stdout.write(cmd.toString() + os.EOL);\n}\nexports.issueCommand = issueCommand;\nfunction issue(name, message = '') {\n issueCommand(name, {}, message);\n}\nexports.issue = issue;\nconst CMD_STRING = '::';\nclass Command {\n constructor(command, properties, message) {\n if (!command) {\n command = 'missing.command';\n }\n this.command = command;\n this.properties = properties;\n this.message = message;\n }\n toString() {\n let cmdStr = CMD_STRING + this.command;\n if (this.properties && Object.keys(this.properties).length > 0) {\n cmdStr += ' ';\n let first = true;\n for (const key in this.properties) {\n if (this.properties.hasOwnProperty(key)) {\n const val = this.properties[key];\n if (val) {\n if (first) {\n first = false;\n }\n else {\n cmdStr += ',';\n }\n cmdStr += `${key}=${escapeProperty(val)}`;\n }\n }\n }\n }\n cmdStr += `${CMD_STRING}${escapeData(this.message)}`;\n return cmdStr;\n }\n}\nfunction escapeData(s) {\n return utils_1.toCommandValue(s)\n .replace(/%/g, '%25')\n .replace(/\\r/g, '%0D')\n .replace(/\\n/g, '%0A');\n}\nfunction escapeProperty(s) {\n return utils_1.toCommandValue(s)\n .replace(/%/g, '%25')\n .replace(/\\r/g, '%0D')\n .replace(/\\n/g, '%0A')\n .replace(/:/g, '%3A')\n .replace(/,/g, '%2C');\n}\n//# sourceMappingURL=command.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];\n result[\"default\"] = mod;\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst command_1 = require(\"./command\");\nconst file_command_1 = require(\"./file-command\");\nconst utils_1 = require(\"./utils\");\nconst os = __importStar(require(\"os\"));\nconst path = __importStar(require(\"path\"));\n/**\n * The code to exit an action\n */\nvar ExitCode;\n(function (ExitCode) {\n /**\n * A code indicating that the action was successful\n */\n ExitCode[ExitCode[\"Success\"] = 0] = \"Success\";\n /**\n * A code indicating that the action was a failure\n */\n ExitCode[ExitCode[\"Failure\"] = 1] = \"Failure\";\n})(ExitCode = exports.ExitCode || (exports.ExitCode = {}));\n//-----------------------------------------------------------------------\n// Variables\n//-----------------------------------------------------------------------\n/**\n * Sets env variable for this action and future actions in the job\n * @param name the name of the variable to set\n * @param val the value of the variable. Non-string values will be converted to a string via JSON.stringify\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction exportVariable(name, val) {\n const convertedVal = utils_1.toCommandValue(val);\n process.env[name] = convertedVal;\n const filePath = process.env['GITHUB_ENV'] || '';\n if (filePath) {\n const delimiter = '_GitHubActionsFileCommandDelimeter_';\n const commandValue = `${name}<<${delimiter}${os.EOL}${convertedVal}${os.EOL}${delimiter}`;\n file_command_1.issueCommand('ENV', commandValue);\n }\n else {\n command_1.issueCommand('set-env', { name }, convertedVal);\n }\n}\nexports.exportVariable = exportVariable;\n/**\n * Registers a secret which will get masked from logs\n * @param secret value of the secret\n */\nfunction setSecret(secret) {\n command_1.issueCommand('add-mask', {}, secret);\n}\nexports.setSecret = setSecret;\n/**\n * Prepends inputPath to the PATH (for this action and future actions)\n * @param inputPath\n */\nfunction addPath(inputPath) {\n const filePath = process.env['GITHUB_PATH'] || '';\n if (filePath) {\n file_command_1.issueCommand('PATH', inputPath);\n }\n else {\n command_1.issueCommand('add-path', {}, inputPath);\n }\n process.env['PATH'] = `${inputPath}${path.delimiter}${process.env['PATH']}`;\n}\nexports.addPath = addPath;\n/**\n * Gets the value of an input. The value is also trimmed.\n *\n * @param name name of the input to get\n * @param options optional. See InputOptions.\n * @returns string\n */\nfunction getInput(name, options) {\n const val = process.env[`INPUT_${name.replace(/ /g, '_').toUpperCase()}`] || '';\n if (options && options.required && !val) {\n throw new Error(`Input required and not supplied: ${name}`);\n }\n return val.trim();\n}\nexports.getInput = getInput;\n/**\n * Sets the value of an output.\n *\n * @param name name of the output to set\n * @param value value to store. Non-string values will be converted to a string via JSON.stringify\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction setOutput(name, value) {\n command_1.issueCommand('set-output', { name }, value);\n}\nexports.setOutput = setOutput;\n/**\n * Enables or disables the echoing of commands into stdout for the rest of the step.\n * Echoing is disabled by default if ACTIONS_STEP_DEBUG is not set.\n *\n */\nfunction setCommandEcho(enabled) {\n command_1.issue('echo', enabled ? 'on' : 'off');\n}\nexports.setCommandEcho = setCommandEcho;\n//-----------------------------------------------------------------------\n// Results\n//-----------------------------------------------------------------------\n/**\n * Sets the action status to failed.\n * When the action exits it will be with an exit code of 1\n * @param message add error issue message\n */\nfunction setFailed(message) {\n process.exitCode = ExitCode.Failure;\n error(message);\n}\nexports.setFailed = setFailed;\n//-----------------------------------------------------------------------\n// Logging Commands\n//-----------------------------------------------------------------------\n/**\n * Gets whether Actions Step Debug is on or not\n */\nfunction isDebug() {\n return process.env['RUNNER_DEBUG'] === '1';\n}\nexports.isDebug = isDebug;\n/**\n * Writes debug message to user log\n * @param message debug message\n */\nfunction debug(message) {\n command_1.issueCommand('debug', {}, message);\n}\nexports.debug = debug;\n/**\n * Adds an error issue\n * @param message error issue message. Errors will be converted to string via toString()\n */\nfunction error(message) {\n command_1.issue('error', message instanceof Error ? message.toString() : message);\n}\nexports.error = error;\n/**\n * Adds an warning issue\n * @param message warning issue message. Errors will be converted to string via toString()\n */\nfunction warning(message) {\n command_1.issue('warning', message instanceof Error ? message.toString() : message);\n}\nexports.warning = warning;\n/**\n * Writes info to log with console.log.\n * @param message info message\n */\nfunction info(message) {\n process.stdout.write(message + os.EOL);\n}\nexports.info = info;\n/**\n * Begin an output group.\n *\n * Output until the next `groupEnd` will be foldable in this group\n *\n * @param name The name of the output group\n */\nfunction startGroup(name) {\n command_1.issue('group', name);\n}\nexports.startGroup = startGroup;\n/**\n * End an output group.\n */\nfunction endGroup() {\n command_1.issue('endgroup');\n}\nexports.endGroup = endGroup;\n/**\n * Wrap an asynchronous function call in a group.\n *\n * Returns the same type as the function itself.\n *\n * @param name The name of the group\n * @param fn The function to wrap in the group\n */\nfunction group(name, fn) {\n return __awaiter(this, void 0, void 0, function* () {\n startGroup(name);\n let result;\n try {\n result = yield fn();\n }\n finally {\n endGroup();\n }\n return result;\n });\n}\nexports.group = group;\n//-----------------------------------------------------------------------\n// Wrapper action state\n//-----------------------------------------------------------------------\n/**\n * Saves state for current action, the state can only be retrieved by this action's post job execution.\n *\n * @param name name of the state to store\n * @param value value to store. Non-string values will be converted to a string via JSON.stringify\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction saveState(name, value) {\n command_1.issueCommand('save-state', { name }, value);\n}\nexports.saveState = saveState;\n/**\n * Gets the value of an state set by this action's main execution.\n *\n * @param name name of the state to get\n * @returns string\n */\nfunction getState(name) {\n return process.env[`STATE_${name}`] || '';\n}\nexports.getState = getState;\n//# sourceMappingURL=core.js.map","\"use strict\";\n// For internal use, subject to change.\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];\n result[\"default\"] = mod;\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\n// We use any as a valid input type\n/* eslint-disable @typescript-eslint/no-explicit-any */\nconst fs = __importStar(require(\"fs\"));\nconst os = __importStar(require(\"os\"));\nconst utils_1 = require(\"./utils\");\nfunction issueCommand(command, message) {\n const filePath = process.env[`GITHUB_${command}`];\n if (!filePath) {\n throw new Error(`Unable to find environment variable for file command ${command}`);\n }\n if (!fs.existsSync(filePath)) {\n throw new Error(`Missing file at path: ${filePath}`);\n }\n fs.appendFileSync(filePath, `${utils_1.toCommandValue(message)}${os.EOL}`, {\n encoding: 'utf8'\n });\n}\nexports.issueCommand = issueCommand;\n//# sourceMappingURL=file-command.js.map","\"use strict\";\n// We use any as a valid input type\n/* eslint-disable @typescript-eslint/no-explicit-any */\nObject.defineProperty(exports, \"__esModule\", { value: true });\n/**\n * Sanitizes an input into a string so it can be passed into issueCommand safely\n * @param input input to sanitize into a string\n */\nfunction toCommandValue(input) {\n if (input === null || input === undefined) {\n return '';\n }\n else if (typeof input === 'string' || input instanceof String) {\n return input;\n }\n return JSON.stringify(input);\n}\nexports.toCommandValue = toCommandValue;\n//# sourceMappingURL=utils.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];\n result[\"default\"] = mod;\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst tr = __importStar(require(\"./toolrunner\"));\n/**\n * Exec a command.\n * Output will be streamed to the live console.\n * Returns promise with return code\n *\n * @param commandLine command to execute (can include additional args). Must be correctly escaped.\n * @param args optional arguments for tool. Escaping is handled by the lib.\n * @param options optional exec options. See ExecOptions\n * @returns Promise exit code\n */\nfunction exec(commandLine, args, options) {\n return __awaiter(this, void 0, void 0, function* () {\n const commandArgs = tr.argStringToArray(commandLine);\n if (commandArgs.length === 0) {\n throw new Error(`Parameter 'commandLine' cannot be null or empty.`);\n }\n // Path to tool to execute should be first arg\n const toolPath = commandArgs[0];\n args = commandArgs.slice(1).concat(args || []);\n const runner = new tr.ToolRunner(toolPath, args, options);\n return runner.exec();\n });\n}\nexports.exec = exec;\n//# sourceMappingURL=exec.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];\n result[\"default\"] = mod;\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst os = __importStar(require(\"os\"));\nconst events = __importStar(require(\"events\"));\nconst child = __importStar(require(\"child_process\"));\nconst path = __importStar(require(\"path\"));\nconst io = __importStar(require(\"@actions/io\"));\nconst ioUtil = __importStar(require(\"@actions/io/lib/io-util\"));\n/* eslint-disable @typescript-eslint/unbound-method */\nconst IS_WINDOWS = process.platform === 'win32';\n/*\n * Class for running command line tools. Handles quoting and arg parsing in a platform agnostic way.\n */\nclass ToolRunner extends events.EventEmitter {\n constructor(toolPath, args, options) {\n super();\n if (!toolPath) {\n throw new Error(\"Parameter 'toolPath' cannot be null or empty.\");\n }\n this.toolPath = toolPath;\n this.args = args || [];\n this.options = options || {};\n }\n _debug(message) {\n if (this.options.listeners && this.options.listeners.debug) {\n this.options.listeners.debug(message);\n }\n }\n _getCommandString(options, noPrefix) {\n const toolPath = this._getSpawnFileName();\n const args = this._getSpawnArgs(options);\n let cmd = noPrefix ? '' : '[command]'; // omit prefix when piped to a second tool\n if (IS_WINDOWS) {\n // Windows + cmd file\n if (this._isCmdFile()) {\n cmd += toolPath;\n for (const a of args) {\n cmd += ` ${a}`;\n }\n }\n // Windows + verbatim\n else if (options.windowsVerbatimArguments) {\n cmd += `\"${toolPath}\"`;\n for (const a of args) {\n cmd += ` ${a}`;\n }\n }\n // Windows (regular)\n else {\n cmd += this._windowsQuoteCmdArg(toolPath);\n for (const a of args) {\n cmd += ` ${this._windowsQuoteCmdArg(a)}`;\n }\n }\n }\n else {\n // OSX/Linux - this can likely be improved with some form of quoting.\n // creating processes on Unix is fundamentally different than Windows.\n // on Unix, execvp() takes an arg array.\n cmd += toolPath;\n for (const a of args) {\n cmd += ` ${a}`;\n }\n }\n return cmd;\n }\n _processLineBuffer(data, strBuffer, onLine) {\n try {\n let s = strBuffer + data.toString();\n let n = s.indexOf(os.EOL);\n while (n > -1) {\n const line = s.substring(0, n);\n onLine(line);\n // the rest of the string ...\n s = s.substring(n + os.EOL.length);\n n = s.indexOf(os.EOL);\n }\n strBuffer = s;\n }\n catch (err) {\n // streaming lines to console is best effort. Don't fail a build.\n this._debug(`error processing line. Failed with error ${err}`);\n }\n }\n _getSpawnFileName() {\n if (IS_WINDOWS) {\n if (this._isCmdFile()) {\n return process.env['COMSPEC'] || 'cmd.exe';\n }\n }\n return this.toolPath;\n }\n _getSpawnArgs(options) {\n if (IS_WINDOWS) {\n if (this._isCmdFile()) {\n let argline = `/D /S /C \"${this._windowsQuoteCmdArg(this.toolPath)}`;\n for (const a of this.args) {\n argline += ' ';\n argline += options.windowsVerbatimArguments\n ? a\n : this._windowsQuoteCmdArg(a);\n }\n argline += '\"';\n return [argline];\n }\n }\n return this.args;\n }\n _endsWith(str, end) {\n return str.endsWith(end);\n }\n _isCmdFile() {\n const upperToolPath = this.toolPath.toUpperCase();\n return (this._endsWith(upperToolPath, '.CMD') ||\n this._endsWith(upperToolPath, '.BAT'));\n }\n _windowsQuoteCmdArg(arg) {\n // for .exe, apply the normal quoting rules that libuv applies\n if (!this._isCmdFile()) {\n return this._uvQuoteCmdArg(arg);\n }\n // otherwise apply quoting rules specific to the cmd.exe command line parser.\n // the libuv rules are generic and are not designed specifically for cmd.exe\n // command line parser.\n //\n // for a detailed description of the cmd.exe command line parser, refer to\n // http://stackoverflow.com/questions/4094699/how-does-the-windows-command-interpreter-cmd-exe-parse-scripts/7970912#7970912\n // need quotes for empty arg\n if (!arg) {\n return '\"\"';\n }\n // determine whether the arg needs to be quoted\n const cmdSpecialChars = [\n ' ',\n '\\t',\n '&',\n '(',\n ')',\n '[',\n ']',\n '{',\n '}',\n '^',\n '=',\n ';',\n '!',\n \"'\",\n '+',\n ',',\n '`',\n '~',\n '|',\n '<',\n '>',\n '\"'\n ];\n let needsQuotes = false;\n for (const char of arg) {\n if (cmdSpecialChars.some(x => x === char)) {\n needsQuotes = true;\n break;\n }\n }\n // short-circuit if quotes not needed\n if (!needsQuotes) {\n return arg;\n }\n // the following quoting rules are very similar to the rules that by libuv applies.\n //\n // 1) wrap the string in quotes\n //\n // 2) double-up quotes - i.e. \" => \"\"\n //\n // this is different from the libuv quoting rules. libuv replaces \" with \\\", which unfortunately\n // doesn't work well with a cmd.exe command line.\n //\n // note, replacing \" with \"\" also works well if the arg is passed to a downstream .NET console app.\n // for example, the command line:\n // foo.exe \"myarg:\"\"my val\"\"\"\n // is parsed by a .NET console app into an arg array:\n // [ \"myarg:\\\"my val\\\"\" ]\n // which is the same end result when applying libuv quoting rules. although the actual\n // command line from libuv quoting rules would look like:\n // foo.exe \"myarg:\\\"my val\\\"\"\n //\n // 3) double-up slashes that precede a quote,\n // e.g. hello \\world => \"hello \\world\"\n // hello\\\"world => \"hello\\\\\"\"world\"\n // hello\\\\\"world => \"hello\\\\\\\\\"\"world\"\n // hello world\\ => \"hello world\\\\\"\n //\n // technically this is not required for a cmd.exe command line, or the batch argument parser.\n // the reasons for including this as a .cmd quoting rule are:\n //\n // a) this is optimized for the scenario where the argument is passed from the .cmd file to an\n // external program. many programs (e.g. .NET console apps) rely on the slash-doubling rule.\n //\n // b) it's what we've been doing previously (by deferring to node default behavior) and we\n // haven't heard any complaints about that aspect.\n //\n // note, a weakness of the quoting rules chosen here, is that % is not escaped. in fact, % cannot be\n // escaped when used on the command line directly - even though within a .cmd file % can be escaped\n // by using %%.\n //\n // the saving grace is, on the command line, %var% is left as-is if var is not defined. this contrasts\n // the line parsing rules within a .cmd file, where if var is not defined it is replaced with nothing.\n //\n // one option that was explored was replacing % with ^% - i.e. %var% => ^%var^%. this hack would\n // often work, since it is unlikely that var^ would exist, and the ^ character is removed when the\n // variable is used. the problem, however, is that ^ is not removed when %* is used to pass the args\n // to an external program.\n //\n // an unexplored potential solution for the % escaping problem, is to create a wrapper .cmd file.\n // % can be escaped within a .cmd file.\n let reverse = '\"';\n let quoteHit = true;\n for (let i = arg.length; i > 0; i--) {\n // walk the string in reverse\n reverse += arg[i - 1];\n if (quoteHit && arg[i - 1] === '\\\\') {\n reverse += '\\\\'; // double the slash\n }\n else if (arg[i - 1] === '\"') {\n quoteHit = true;\n reverse += '\"'; // double the quote\n }\n else {\n quoteHit = false;\n }\n }\n reverse += '\"';\n return reverse\n .split('')\n .reverse()\n .join('');\n }\n _uvQuoteCmdArg(arg) {\n // Tool runner wraps child_process.spawn() and needs to apply the same quoting as\n // Node in certain cases where the undocumented spawn option windowsVerbatimArguments\n // is used.\n //\n // Since this function is a port of quote_cmd_arg from Node 4.x (technically, lib UV,\n // see https://github.com/nodejs/node/blob/v4.x/deps/uv/src/win/process.c for details),\n // pasting copyright notice from Node within this function:\n //\n // Copyright Joyent, Inc. and other Node contributors. All rights reserved.\n //\n // Permission is hereby granted, free of charge, to any person obtaining a copy\n // of this software and associated documentation files (the \"Software\"), to\n // deal in the Software without restriction, including without limitation the\n // rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n // sell copies of the Software, and to permit persons to whom the Software is\n // furnished to do so, subject to the following conditions:\n //\n // The above copyright notice and this permission notice shall be included in\n // all copies or substantial portions of the Software.\n //\n // THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n // IN THE SOFTWARE.\n if (!arg) {\n // Need double quotation for empty argument\n return '\"\"';\n }\n if (!arg.includes(' ') && !arg.includes('\\t') && !arg.includes('\"')) {\n // No quotation needed\n return arg;\n }\n if (!arg.includes('\"') && !arg.includes('\\\\')) {\n // No embedded double quotes or backslashes, so I can just wrap\n // quote marks around the whole thing.\n return `\"${arg}\"`;\n }\n // Expected input/output:\n // input : hello\"world\n // output: \"hello\\\"world\"\n // input : hello\"\"world\n // output: \"hello\\\"\\\"world\"\n // input : hello\\world\n // output: hello\\world\n // input : hello\\\\world\n // output: hello\\\\world\n // input : hello\\\"world\n // output: \"hello\\\\\\\"world\"\n // input : hello\\\\\"world\n // output: \"hello\\\\\\\\\\\"world\"\n // input : hello world\\\n // output: \"hello world\\\\\" - note the comment in libuv actually reads \"hello world\\\"\n // but it appears the comment is wrong, it should be \"hello world\\\\\"\n let reverse = '\"';\n let quoteHit = true;\n for (let i = arg.length; i > 0; i--) {\n // walk the string in reverse\n reverse += arg[i - 1];\n if (quoteHit && arg[i - 1] === '\\\\') {\n reverse += '\\\\';\n }\n else if (arg[i - 1] === '\"') {\n quoteHit = true;\n reverse += '\\\\';\n }\n else {\n quoteHit = false;\n }\n }\n reverse += '\"';\n return reverse\n .split('')\n .reverse()\n .join('');\n }\n _cloneExecOptions(options) {\n options = options || {};\n const result = {\n cwd: options.cwd || process.cwd(),\n env: options.env || process.env,\n silent: options.silent || false,\n windowsVerbatimArguments: options.windowsVerbatimArguments || false,\n failOnStdErr: options.failOnStdErr || false,\n ignoreReturnCode: options.ignoreReturnCode || false,\n delay: options.delay || 10000\n };\n result.outStream = options.outStream || process.stdout;\n result.errStream = options.errStream || process.stderr;\n return result;\n }\n _getSpawnOptions(options, toolPath) {\n options = options || {};\n const result = {};\n result.cwd = options.cwd;\n result.env = options.env;\n result['windowsVerbatimArguments'] =\n options.windowsVerbatimArguments || this._isCmdFile();\n if (options.windowsVerbatimArguments) {\n result.argv0 = `\"${toolPath}\"`;\n }\n return result;\n }\n /**\n * Exec a tool.\n * Output will be streamed to the live console.\n * Returns promise with return code\n *\n * @param tool path to tool to exec\n * @param options optional exec options. See ExecOptions\n * @returns number\n */\n exec() {\n return __awaiter(this, void 0, void 0, function* () {\n // root the tool path if it is unrooted and contains relative pathing\n if (!ioUtil.isRooted(this.toolPath) &&\n (this.toolPath.includes('/') ||\n (IS_WINDOWS && this.toolPath.includes('\\\\')))) {\n // prefer options.cwd if it is specified, however options.cwd may also need to be rooted\n this.toolPath = path.resolve(process.cwd(), this.options.cwd || process.cwd(), this.toolPath);\n }\n // if the tool is only a file name, then resolve it from the PATH\n // otherwise verify it exists (add extension on Windows if necessary)\n this.toolPath = yield io.which(this.toolPath, true);\n return new Promise((resolve, reject) => {\n this._debug(`exec tool: ${this.toolPath}`);\n this._debug('arguments:');\n for (const arg of this.args) {\n this._debug(` ${arg}`);\n }\n const optionsNonNull = this._cloneExecOptions(this.options);\n if (!optionsNonNull.silent && optionsNonNull.outStream) {\n optionsNonNull.outStream.write(this._getCommandString(optionsNonNull) + os.EOL);\n }\n const state = new ExecState(optionsNonNull, this.toolPath);\n state.on('debug', (message) => {\n this._debug(message);\n });\n const fileName = this._getSpawnFileName();\n const cp = child.spawn(fileName, this._getSpawnArgs(optionsNonNull), this._getSpawnOptions(this.options, fileName));\n const stdbuffer = '';\n if (cp.stdout) {\n cp.stdout.on('data', (data) => {\n if (this.options.listeners && this.options.listeners.stdout) {\n this.options.listeners.stdout(data);\n }\n if (!optionsNonNull.silent && optionsNonNull.outStream) {\n optionsNonNull.outStream.write(data);\n }\n this._processLineBuffer(data, stdbuffer, (line) => {\n if (this.options.listeners && this.options.listeners.stdline) {\n this.options.listeners.stdline(line);\n }\n });\n });\n }\n const errbuffer = '';\n if (cp.stderr) {\n cp.stderr.on('data', (data) => {\n state.processStderr = true;\n if (this.options.listeners && this.options.listeners.stderr) {\n this.options.listeners.stderr(data);\n }\n if (!optionsNonNull.silent &&\n optionsNonNull.errStream &&\n optionsNonNull.outStream) {\n const s = optionsNonNull.failOnStdErr\n ? optionsNonNull.errStream\n : optionsNonNull.outStream;\n s.write(data);\n }\n this._processLineBuffer(data, errbuffer, (line) => {\n if (this.options.listeners && this.options.listeners.errline) {\n this.options.listeners.errline(line);\n }\n });\n });\n }\n cp.on('error', (err) => {\n state.processError = err.message;\n state.processExited = true;\n state.processClosed = true;\n state.CheckComplete();\n });\n cp.on('exit', (code) => {\n state.processExitCode = code;\n state.processExited = true;\n this._debug(`Exit code ${code} received from tool '${this.toolPath}'`);\n state.CheckComplete();\n });\n cp.on('close', (code) => {\n state.processExitCode = code;\n state.processExited = true;\n state.processClosed = true;\n this._debug(`STDIO streams have closed for tool '${this.toolPath}'`);\n state.CheckComplete();\n });\n state.on('done', (error, exitCode) => {\n if (stdbuffer.length > 0) {\n this.emit('stdline', stdbuffer);\n }\n if (errbuffer.length > 0) {\n this.emit('errline', errbuffer);\n }\n cp.removeAllListeners();\n if (error) {\n reject(error);\n }\n else {\n resolve(exitCode);\n }\n });\n if (this.options.input) {\n if (!cp.stdin) {\n throw new Error('child process missing stdin');\n }\n cp.stdin.end(this.options.input);\n }\n });\n });\n }\n}\nexports.ToolRunner = ToolRunner;\n/**\n * Convert an arg string to an array of args. Handles escaping\n *\n * @param argString string of arguments\n * @returns string[] array of arguments\n */\nfunction argStringToArray(argString) {\n const args = [];\n let inQuotes = false;\n let escaped = false;\n let arg = '';\n function append(c) {\n // we only escape double quotes.\n if (escaped && c !== '\"') {\n arg += '\\\\';\n }\n arg += c;\n escaped = false;\n }\n for (let i = 0; i < argString.length; i++) {\n const c = argString.charAt(i);\n if (c === '\"') {\n if (!escaped) {\n inQuotes = !inQuotes;\n }\n else {\n append(c);\n }\n continue;\n }\n if (c === '\\\\' && escaped) {\n append(c);\n continue;\n }\n if (c === '\\\\' && inQuotes) {\n escaped = true;\n continue;\n }\n if (c === ' ' && !inQuotes) {\n if (arg.length > 0) {\n args.push(arg);\n arg = '';\n }\n continue;\n }\n append(c);\n }\n if (arg.length > 0) {\n args.push(arg.trim());\n }\n return args;\n}\nexports.argStringToArray = argStringToArray;\nclass ExecState extends events.EventEmitter {\n constructor(options, toolPath) {\n super();\n this.processClosed = false; // tracks whether the process has exited and stdio is closed\n this.processError = '';\n this.processExitCode = 0;\n this.processExited = false; // tracks whether the process has exited\n this.processStderr = false; // tracks whether stderr was written to\n this.delay = 10000; // 10 seconds\n this.done = false;\n this.timeout = null;\n if (!toolPath) {\n throw new Error('toolPath must not be empty');\n }\n this.options = options;\n this.toolPath = toolPath;\n if (options.delay) {\n this.delay = options.delay;\n }\n }\n CheckComplete() {\n if (this.done) {\n return;\n }\n if (this.processClosed) {\n this._setResult();\n }\n else if (this.processExited) {\n this.timeout = setTimeout(ExecState.HandleTimeout, this.delay, this);\n }\n }\n _debug(message) {\n this.emit('debug', message);\n }\n _setResult() {\n // determine whether there is an error\n let error;\n if (this.processExited) {\n if (this.processError) {\n error = new Error(`There was an error when attempting to execute the process '${this.toolPath}'. This may indicate the process failed to start. Error: ${this.processError}`);\n }\n else if (this.processExitCode !== 0 && !this.options.ignoreReturnCode) {\n error = new Error(`The process '${this.toolPath}' failed with exit code ${this.processExitCode}`);\n }\n else if (this.processStderr && this.options.failOnStdErr) {\n error = new Error(`The process '${this.toolPath}' failed because one or more lines were written to the STDERR stream`);\n }\n }\n // clear the timeout\n if (this.timeout) {\n clearTimeout(this.timeout);\n this.timeout = null;\n }\n this.done = true;\n this.emit('done', error, this.processExitCode);\n }\n static HandleTimeout(state) {\n if (state.done) {\n return;\n }\n if (!state.processClosed && state.processExited) {\n const message = `The STDIO streams did not close within ${state.delay /\n 1000} seconds of the exit event from process '${state.toolPath}'. This may indicate a child process inherited the STDIO streams and has not yet exited.`;\n state._debug(message);\n }\n state._setResult();\n }\n}\n//# sourceMappingURL=toolrunner.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Context = void 0;\nconst fs_1 = require(\"fs\");\nconst os_1 = require(\"os\");\nclass Context {\n /**\n * Hydrate the context from the environment\n */\n constructor() {\n this.payload = {};\n if (process.env.GITHUB_EVENT_PATH) {\n if (fs_1.existsSync(process.env.GITHUB_EVENT_PATH)) {\n this.payload = JSON.parse(fs_1.readFileSync(process.env.GITHUB_EVENT_PATH, { encoding: 'utf8' }));\n }\n else {\n const path = process.env.GITHUB_EVENT_PATH;\n process.stdout.write(`GITHUB_EVENT_PATH ${path} does not exist${os_1.EOL}`);\n }\n }\n this.eventName = process.env.GITHUB_EVENT_NAME;\n this.sha = process.env.GITHUB_SHA;\n this.ref = process.env.GITHUB_REF;\n this.workflow = process.env.GITHUB_WORKFLOW;\n this.action = process.env.GITHUB_ACTION;\n this.actor = process.env.GITHUB_ACTOR;\n this.job = process.env.GITHUB_JOB;\n this.runNumber = parseInt(process.env.GITHUB_RUN_NUMBER, 10);\n this.runId = parseInt(process.env.GITHUB_RUN_ID, 10);\n }\n get issue() {\n const payload = this.payload;\n return Object.assign(Object.assign({}, this.repo), { number: (payload.issue || payload.pull_request || payload).number });\n }\n get repo() {\n if (process.env.GITHUB_REPOSITORY) {\n const [owner, repo] = process.env.GITHUB_REPOSITORY.split('/');\n return { owner, repo };\n }\n if (this.payload.repository) {\n return {\n owner: this.payload.repository.owner.login,\n repo: this.payload.repository.name\n };\n }\n throw new Error(\"context.repo requires a GITHUB_REPOSITORY environment variable like 'owner/repo'\");\n }\n}\nexports.Context = Context;\n//# sourceMappingURL=context.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getOctokit = exports.context = void 0;\nconst Context = __importStar(require(\"./context\"));\nconst utils_1 = require(\"./utils\");\nexports.context = new Context.Context();\n/**\n * Returns a hydrated octokit ready to use for GitHub Actions\n *\n * @param token the repo PAT or GITHUB_TOKEN\n * @param options other options to set\n */\nfunction getOctokit(token, options) {\n return new utils_1.GitHub(utils_1.getOctokitOptions(token, options));\n}\nexports.getOctokit = getOctokit;\n//# sourceMappingURL=github.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getApiBaseUrl = exports.getProxyAgent = exports.getAuthString = void 0;\nconst httpClient = __importStar(require(\"@actions/http-client\"));\nfunction getAuthString(token, options) {\n if (!token && !options.auth) {\n throw new Error('Parameter token or opts.auth is required');\n }\n else if (token && options.auth) {\n throw new Error('Parameters token and opts.auth may not both be specified');\n }\n return typeof options.auth === 'string' ? options.auth : `token ${token}`;\n}\nexports.getAuthString = getAuthString;\nfunction getProxyAgent(destinationUrl) {\n const hc = new httpClient.HttpClient();\n return hc.getAgent(destinationUrl);\n}\nexports.getProxyAgent = getProxyAgent;\nfunction getApiBaseUrl() {\n return process.env['GITHUB_API_URL'] || 'https://api.github.com';\n}\nexports.getApiBaseUrl = getApiBaseUrl;\n//# sourceMappingURL=utils.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getOctokitOptions = exports.GitHub = exports.context = void 0;\nconst Context = __importStar(require(\"./context\"));\nconst Utils = __importStar(require(\"./internal/utils\"));\n// octokit + plugins\nconst core_1 = require(\"@octokit/core\");\nconst plugin_rest_endpoint_methods_1 = require(\"@octokit/plugin-rest-endpoint-methods\");\nconst plugin_paginate_rest_1 = require(\"@octokit/plugin-paginate-rest\");\nexports.context = new Context.Context();\nconst baseUrl = Utils.getApiBaseUrl();\nconst defaults = {\n baseUrl,\n request: {\n agent: Utils.getProxyAgent(baseUrl)\n }\n};\nexports.GitHub = core_1.Octokit.plugin(plugin_rest_endpoint_methods_1.restEndpointMethods, plugin_paginate_rest_1.paginateRest).defaults(defaults);\n/**\n * Convience function to correctly format Octokit Options to pass into the constructor.\n *\n * @param token the repo PAT or GITHUB_TOKEN\n * @param options other options to set\n */\nfunction getOctokitOptions(token, options) {\n const opts = Object.assign({}, options || {}); // Shallow clone - don't mutate the object provided by the caller\n // Auth\n const auth = Utils.getAuthString(token, opts);\n if (auth) {\n opts.auth = auth;\n }\n return opts;\n}\nexports.getOctokitOptions = getOctokitOptions;\n//# sourceMappingURL=utils.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst http = require(\"http\");\nconst https = require(\"https\");\nconst pm = require(\"./proxy\");\nlet tunnel;\nvar HttpCodes;\n(function (HttpCodes) {\n HttpCodes[HttpCodes[\"OK\"] = 200] = \"OK\";\n HttpCodes[HttpCodes[\"MultipleChoices\"] = 300] = \"MultipleChoices\";\n HttpCodes[HttpCodes[\"MovedPermanently\"] = 301] = \"MovedPermanently\";\n HttpCodes[HttpCodes[\"ResourceMoved\"] = 302] = \"ResourceMoved\";\n HttpCodes[HttpCodes[\"SeeOther\"] = 303] = \"SeeOther\";\n HttpCodes[HttpCodes[\"NotModified\"] = 304] = \"NotModified\";\n HttpCodes[HttpCodes[\"UseProxy\"] = 305] = \"UseProxy\";\n HttpCodes[HttpCodes[\"SwitchProxy\"] = 306] = \"SwitchProxy\";\n HttpCodes[HttpCodes[\"TemporaryRedirect\"] = 307] = \"TemporaryRedirect\";\n HttpCodes[HttpCodes[\"PermanentRedirect\"] = 308] = \"PermanentRedirect\";\n HttpCodes[HttpCodes[\"BadRequest\"] = 400] = \"BadRequest\";\n HttpCodes[HttpCodes[\"Unauthorized\"] = 401] = \"Unauthorized\";\n HttpCodes[HttpCodes[\"PaymentRequired\"] = 402] = \"PaymentRequired\";\n HttpCodes[HttpCodes[\"Forbidden\"] = 403] = \"Forbidden\";\n HttpCodes[HttpCodes[\"NotFound\"] = 404] = \"NotFound\";\n HttpCodes[HttpCodes[\"MethodNotAllowed\"] = 405] = \"MethodNotAllowed\";\n HttpCodes[HttpCodes[\"NotAcceptable\"] = 406] = \"NotAcceptable\";\n HttpCodes[HttpCodes[\"ProxyAuthenticationRequired\"] = 407] = \"ProxyAuthenticationRequired\";\n HttpCodes[HttpCodes[\"RequestTimeout\"] = 408] = \"RequestTimeout\";\n HttpCodes[HttpCodes[\"Conflict\"] = 409] = \"Conflict\";\n HttpCodes[HttpCodes[\"Gone\"] = 410] = \"Gone\";\n HttpCodes[HttpCodes[\"TooManyRequests\"] = 429] = \"TooManyRequests\";\n HttpCodes[HttpCodes[\"InternalServerError\"] = 500] = \"InternalServerError\";\n HttpCodes[HttpCodes[\"NotImplemented\"] = 501] = \"NotImplemented\";\n HttpCodes[HttpCodes[\"BadGateway\"] = 502] = \"BadGateway\";\n HttpCodes[HttpCodes[\"ServiceUnavailable\"] = 503] = \"ServiceUnavailable\";\n HttpCodes[HttpCodes[\"GatewayTimeout\"] = 504] = \"GatewayTimeout\";\n})(HttpCodes = exports.HttpCodes || (exports.HttpCodes = {}));\nvar Headers;\n(function (Headers) {\n Headers[\"Accept\"] = \"accept\";\n Headers[\"ContentType\"] = \"content-type\";\n})(Headers = exports.Headers || (exports.Headers = {}));\nvar MediaTypes;\n(function (MediaTypes) {\n MediaTypes[\"ApplicationJson\"] = \"application/json\";\n})(MediaTypes = exports.MediaTypes || (exports.MediaTypes = {}));\n/**\n * Returns the proxy URL, depending upon the supplied url and proxy environment variables.\n * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com\n */\nfunction getProxyUrl(serverUrl) {\n let proxyUrl = pm.getProxyUrl(new URL(serverUrl));\n return proxyUrl ? proxyUrl.href : '';\n}\nexports.getProxyUrl = getProxyUrl;\nconst HttpRedirectCodes = [\n HttpCodes.MovedPermanently,\n HttpCodes.ResourceMoved,\n HttpCodes.SeeOther,\n HttpCodes.TemporaryRedirect,\n HttpCodes.PermanentRedirect\n];\nconst HttpResponseRetryCodes = [\n HttpCodes.BadGateway,\n HttpCodes.ServiceUnavailable,\n HttpCodes.GatewayTimeout\n];\nconst RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD'];\nconst ExponentialBackoffCeiling = 10;\nconst ExponentialBackoffTimeSlice = 5;\nclass HttpClientError extends Error {\n constructor(message, statusCode) {\n super(message);\n this.name = 'HttpClientError';\n this.statusCode = statusCode;\n Object.setPrototypeOf(this, HttpClientError.prototype);\n }\n}\nexports.HttpClientError = HttpClientError;\nclass HttpClientResponse {\n constructor(message) {\n this.message = message;\n }\n readBody() {\n return new Promise(async (resolve, reject) => {\n let output = Buffer.alloc(0);\n this.message.on('data', (chunk) => {\n output = Buffer.concat([output, chunk]);\n });\n this.message.on('end', () => {\n resolve(output.toString());\n });\n });\n }\n}\nexports.HttpClientResponse = HttpClientResponse;\nfunction isHttps(requestUrl) {\n let parsedUrl = new URL(requestUrl);\n return parsedUrl.protocol === 'https:';\n}\nexports.isHttps = isHttps;\nclass HttpClient {\n constructor(userAgent, handlers, requestOptions) {\n this._ignoreSslError = false;\n this._allowRedirects = true;\n this._allowRedirectDowngrade = false;\n this._maxRedirects = 50;\n this._allowRetries = false;\n this._maxRetries = 1;\n this._keepAlive = false;\n this._disposed = false;\n this.userAgent = userAgent;\n this.handlers = handlers || [];\n this.requestOptions = requestOptions;\n if (requestOptions) {\n if (requestOptions.ignoreSslError != null) {\n this._ignoreSslError = requestOptions.ignoreSslError;\n }\n this._socketTimeout = requestOptions.socketTimeout;\n if (requestOptions.allowRedirects != null) {\n this._allowRedirects = requestOptions.allowRedirects;\n }\n if (requestOptions.allowRedirectDowngrade != null) {\n this._allowRedirectDowngrade = requestOptions.allowRedirectDowngrade;\n }\n if (requestOptions.maxRedirects != null) {\n this._maxRedirects = Math.max(requestOptions.maxRedirects, 0);\n }\n if (requestOptions.keepAlive != null) {\n this._keepAlive = requestOptions.keepAlive;\n }\n if (requestOptions.allowRetries != null) {\n this._allowRetries = requestOptions.allowRetries;\n }\n if (requestOptions.maxRetries != null) {\n this._maxRetries = requestOptions.maxRetries;\n }\n }\n }\n options(requestUrl, additionalHeaders) {\n return this.request('OPTIONS', requestUrl, null, additionalHeaders || {});\n }\n get(requestUrl, additionalHeaders) {\n return this.request('GET', requestUrl, null, additionalHeaders || {});\n }\n del(requestUrl, additionalHeaders) {\n return this.request('DELETE', requestUrl, null, additionalHeaders || {});\n }\n post(requestUrl, data, additionalHeaders) {\n return this.request('POST', requestUrl, data, additionalHeaders || {});\n }\n patch(requestUrl, data, additionalHeaders) {\n return this.request('PATCH', requestUrl, data, additionalHeaders || {});\n }\n put(requestUrl, data, additionalHeaders) {\n return this.request('PUT', requestUrl, data, additionalHeaders || {});\n }\n head(requestUrl, additionalHeaders) {\n return this.request('HEAD', requestUrl, null, additionalHeaders || {});\n }\n sendStream(verb, requestUrl, stream, additionalHeaders) {\n return this.request(verb, requestUrl, stream, additionalHeaders);\n }\n /**\n * Gets a typed object from an endpoint\n * Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise\n */\n async getJson(requestUrl, additionalHeaders = {}) {\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n let res = await this.get(requestUrl, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n }\n async postJson(requestUrl, obj, additionalHeaders = {}) {\n let data = JSON.stringify(obj, null, 2);\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);\n let res = await this.post(requestUrl, data, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n }\n async putJson(requestUrl, obj, additionalHeaders = {}) {\n let data = JSON.stringify(obj, null, 2);\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);\n let res = await this.put(requestUrl, data, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n }\n async patchJson(requestUrl, obj, additionalHeaders = {}) {\n let data = JSON.stringify(obj, null, 2);\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);\n let res = await this.patch(requestUrl, data, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n }\n /**\n * Makes a raw http request.\n * All other methods such as get, post, patch, and request ultimately call this.\n * Prefer get, del, post and patch\n */\n async request(verb, requestUrl, data, headers) {\n if (this._disposed) {\n throw new Error('Client has already been disposed.');\n }\n let parsedUrl = new URL(requestUrl);\n let info = this._prepareRequest(verb, parsedUrl, headers);\n // Only perform retries on reads since writes may not be idempotent.\n let maxTries = this._allowRetries && RetryableHttpVerbs.indexOf(verb) != -1\n ? this._maxRetries + 1\n : 1;\n let numTries = 0;\n let response;\n while (numTries < maxTries) {\n response = await this.requestRaw(info, data);\n // Check if it's an authentication challenge\n if (response &&\n response.message &&\n response.message.statusCode === HttpCodes.Unauthorized) {\n let authenticationHandler;\n for (let i = 0; i < this.handlers.length; i++) {\n if (this.handlers[i].canHandleAuthentication(response)) {\n authenticationHandler = this.handlers[i];\n break;\n }\n }\n if (authenticationHandler) {\n return authenticationHandler.handleAuthentication(this, info, data);\n }\n else {\n // We have received an unauthorized response but have no handlers to handle it.\n // Let the response return to the caller.\n return response;\n }\n }\n let redirectsRemaining = this._maxRedirects;\n while (HttpRedirectCodes.indexOf(response.message.statusCode) != -1 &&\n this._allowRedirects &&\n redirectsRemaining > 0) {\n const redirectUrl = response.message.headers['location'];\n if (!redirectUrl) {\n // if there's no location to redirect to, we won't\n break;\n }\n let parsedRedirectUrl = new URL(redirectUrl);\n if (parsedUrl.protocol == 'https:' &&\n parsedUrl.protocol != parsedRedirectUrl.protocol &&\n !this._allowRedirectDowngrade) {\n throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.');\n }\n // we need to finish reading the response before reassigning response\n // which will leak the open socket.\n await response.readBody();\n // strip authorization header if redirected to a different hostname\n if (parsedRedirectUrl.hostname !== parsedUrl.hostname) {\n for (let header in headers) {\n // header names are case insensitive\n if (header.toLowerCase() === 'authorization') {\n delete headers[header];\n }\n }\n }\n // let's make the request with the new redirectUrl\n info = this._prepareRequest(verb, parsedRedirectUrl, headers);\n response = await this.requestRaw(info, data);\n redirectsRemaining--;\n }\n if (HttpResponseRetryCodes.indexOf(response.message.statusCode) == -1) {\n // If not a retry code, return immediately instead of retrying\n return response;\n }\n numTries += 1;\n if (numTries < maxTries) {\n await response.readBody();\n await this._performExponentialBackoff(numTries);\n }\n }\n return response;\n }\n /**\n * Needs to be called if keepAlive is set to true in request options.\n */\n dispose() {\n if (this._agent) {\n this._agent.destroy();\n }\n this._disposed = true;\n }\n /**\n * Raw request.\n * @param info\n * @param data\n */\n requestRaw(info, data) {\n return new Promise((resolve, reject) => {\n let callbackForResult = function (err, res) {\n if (err) {\n reject(err);\n }\n resolve(res);\n };\n this.requestRawWithCallback(info, data, callbackForResult);\n });\n }\n /**\n * Raw request with callback.\n * @param info\n * @param data\n * @param onResult\n */\n requestRawWithCallback(info, data, onResult) {\n let socket;\n if (typeof data === 'string') {\n info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8');\n }\n let callbackCalled = false;\n let handleResult = (err, res) => {\n if (!callbackCalled) {\n callbackCalled = true;\n onResult(err, res);\n }\n };\n let req = info.httpModule.request(info.options, (msg) => {\n let res = new HttpClientResponse(msg);\n handleResult(null, res);\n });\n req.on('socket', sock => {\n socket = sock;\n });\n // If we ever get disconnected, we want the socket to timeout eventually\n req.setTimeout(this._socketTimeout || 3 * 60000, () => {\n if (socket) {\n socket.end();\n }\n handleResult(new Error('Request timeout: ' + info.options.path), null);\n });\n req.on('error', function (err) {\n // err has statusCode property\n // res should have headers\n handleResult(err, null);\n });\n if (data && typeof data === 'string') {\n req.write(data, 'utf8');\n }\n if (data && typeof data !== 'string') {\n data.on('close', function () {\n req.end();\n });\n data.pipe(req);\n }\n else {\n req.end();\n }\n }\n /**\n * Gets an http agent. This function is useful when you need an http agent that handles\n * routing through a proxy server - depending upon the url and proxy environment variables.\n * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com\n */\n getAgent(serverUrl) {\n let parsedUrl = new URL(serverUrl);\n return this._getAgent(parsedUrl);\n }\n _prepareRequest(method, requestUrl, headers) {\n const info = {};\n info.parsedUrl = requestUrl;\n const usingSsl = info.parsedUrl.protocol === 'https:';\n info.httpModule = usingSsl ? https : http;\n const defaultPort = usingSsl ? 443 : 80;\n info.options = {};\n info.options.host = info.parsedUrl.hostname;\n info.options.port = info.parsedUrl.port\n ? parseInt(info.parsedUrl.port)\n : defaultPort;\n info.options.path =\n (info.parsedUrl.pathname || '') + (info.parsedUrl.search || '');\n info.options.method = method;\n info.options.headers = this._mergeHeaders(headers);\n if (this.userAgent != null) {\n info.options.headers['user-agent'] = this.userAgent;\n }\n info.options.agent = this._getAgent(info.parsedUrl);\n // gives handlers an opportunity to participate\n if (this.handlers) {\n this.handlers.forEach(handler => {\n handler.prepareRequest(info.options);\n });\n }\n return info;\n }\n _mergeHeaders(headers) {\n const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {});\n if (this.requestOptions && this.requestOptions.headers) {\n return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers));\n }\n return lowercaseKeys(headers || {});\n }\n _getExistingOrDefaultHeader(additionalHeaders, header, _default) {\n const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {});\n let clientHeader;\n if (this.requestOptions && this.requestOptions.headers) {\n clientHeader = lowercaseKeys(this.requestOptions.headers)[header];\n }\n return additionalHeaders[header] || clientHeader || _default;\n }\n _getAgent(parsedUrl) {\n let agent;\n let proxyUrl = pm.getProxyUrl(parsedUrl);\n let useProxy = proxyUrl && proxyUrl.hostname;\n if (this._keepAlive && useProxy) {\n agent = this._proxyAgent;\n }\n if (this._keepAlive && !useProxy) {\n agent = this._agent;\n }\n // if agent is already assigned use that agent.\n if (!!agent) {\n return agent;\n }\n const usingSsl = parsedUrl.protocol === 'https:';\n let maxSockets = 100;\n if (!!this.requestOptions) {\n maxSockets = this.requestOptions.maxSockets || http.globalAgent.maxSockets;\n }\n if (useProxy) {\n // If using proxy, need tunnel\n if (!tunnel) {\n tunnel = require('tunnel');\n }\n const agentOptions = {\n maxSockets: maxSockets,\n keepAlive: this._keepAlive,\n proxy: {\n proxyAuth: `${proxyUrl.username}:${proxyUrl.password}`,\n host: proxyUrl.hostname,\n port: proxyUrl.port\n }\n };\n let tunnelAgent;\n const overHttps = proxyUrl.protocol === 'https:';\n if (usingSsl) {\n tunnelAgent = overHttps ? tunnel.httpsOverHttps : tunnel.httpsOverHttp;\n }\n else {\n tunnelAgent = overHttps ? tunnel.httpOverHttps : tunnel.httpOverHttp;\n }\n agent = tunnelAgent(agentOptions);\n this._proxyAgent = agent;\n }\n // if reusing agent across request and tunneling agent isn't assigned create a new agent\n if (this._keepAlive && !agent) {\n const options = { keepAlive: this._keepAlive, maxSockets: maxSockets };\n agent = usingSsl ? new https.Agent(options) : new http.Agent(options);\n this._agent = agent;\n }\n // if not using private agent and tunnel agent isn't setup then use global agent\n if (!agent) {\n agent = usingSsl ? https.globalAgent : http.globalAgent;\n }\n if (usingSsl && this._ignoreSslError) {\n // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process\n // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options\n // we have to cast it to any and change it directly\n agent.options = Object.assign(agent.options || {}, {\n rejectUnauthorized: false\n });\n }\n return agent;\n }\n _performExponentialBackoff(retryNumber) {\n retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber);\n const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber);\n return new Promise(resolve => setTimeout(() => resolve(), ms));\n }\n static dateTimeDeserializer(key, value) {\n if (typeof value === 'string') {\n let a = new Date(value);\n if (!isNaN(a.valueOf())) {\n return a;\n }\n }\n return value;\n }\n async _processResponse(res, options) {\n return new Promise(async (resolve, reject) => {\n const statusCode = res.message.statusCode;\n const response = {\n statusCode: statusCode,\n result: null,\n headers: {}\n };\n // not found leads to null obj returned\n if (statusCode == HttpCodes.NotFound) {\n resolve(response);\n }\n let obj;\n let contents;\n // get the result from the body\n try {\n contents = await res.readBody();\n if (contents && contents.length > 0) {\n if (options && options.deserializeDates) {\n obj = JSON.parse(contents, HttpClient.dateTimeDeserializer);\n }\n else {\n obj = JSON.parse(contents);\n }\n response.result = obj;\n }\n response.headers = res.message.headers;\n }\n catch (err) {\n // Invalid resource (contents not json); leaving result obj null\n }\n // note that 3xx redirects are handled by the http layer.\n if (statusCode > 299) {\n let msg;\n // if exception/error in body, attempt to get better error\n if (obj && obj.message) {\n msg = obj.message;\n }\n else if (contents && contents.length > 0) {\n // it may be the case that the exception is in the body message as string\n msg = contents;\n }\n else {\n msg = 'Failed request: (' + statusCode + ')';\n }\n let err = new HttpClientError(msg, statusCode);\n err.result = response.result;\n reject(err);\n }\n else {\n resolve(response);\n }\n });\n }\n}\nexports.HttpClient = HttpClient;\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nfunction getProxyUrl(reqUrl) {\n let usingSsl = reqUrl.protocol === 'https:';\n let proxyUrl;\n if (checkBypass(reqUrl)) {\n return proxyUrl;\n }\n let proxyVar;\n if (usingSsl) {\n proxyVar = process.env['https_proxy'] || process.env['HTTPS_PROXY'];\n }\n else {\n proxyVar = process.env['http_proxy'] || process.env['HTTP_PROXY'];\n }\n if (proxyVar) {\n proxyUrl = new URL(proxyVar);\n }\n return proxyUrl;\n}\nexports.getProxyUrl = getProxyUrl;\nfunction checkBypass(reqUrl) {\n if (!reqUrl.hostname) {\n return false;\n }\n let noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || '';\n if (!noProxy) {\n return false;\n }\n // Determine the request port\n let reqPort;\n if (reqUrl.port) {\n reqPort = Number(reqUrl.port);\n }\n else if (reqUrl.protocol === 'http:') {\n reqPort = 80;\n }\n else if (reqUrl.protocol === 'https:') {\n reqPort = 443;\n }\n // Format the request hostname and hostname with port\n let upperReqHosts = [reqUrl.hostname.toUpperCase()];\n if (typeof reqPort === 'number') {\n upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`);\n }\n // Compare request host against noproxy\n for (let upperNoProxyItem of noProxy\n .split(',')\n .map(x => x.trim().toUpperCase())\n .filter(x => x)) {\n if (upperReqHosts.some(x => x === upperNoProxyItem)) {\n return true;\n }\n }\n return false;\n}\nexports.checkBypass = checkBypass;\n","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nvar _a;\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst assert_1 = require(\"assert\");\nconst fs = require(\"fs\");\nconst path = require(\"path\");\n_a = fs.promises, exports.chmod = _a.chmod, exports.copyFile = _a.copyFile, exports.lstat = _a.lstat, exports.mkdir = _a.mkdir, exports.readdir = _a.readdir, exports.readlink = _a.readlink, exports.rename = _a.rename, exports.rmdir = _a.rmdir, exports.stat = _a.stat, exports.symlink = _a.symlink, exports.unlink = _a.unlink;\nexports.IS_WINDOWS = process.platform === 'win32';\nfunction exists(fsPath) {\n return __awaiter(this, void 0, void 0, function* () {\n try {\n yield exports.stat(fsPath);\n }\n catch (err) {\n if (err.code === 'ENOENT') {\n return false;\n }\n throw err;\n }\n return true;\n });\n}\nexports.exists = exists;\nfunction isDirectory(fsPath, useStat = false) {\n return __awaiter(this, void 0, void 0, function* () {\n const stats = useStat ? yield exports.stat(fsPath) : yield exports.lstat(fsPath);\n return stats.isDirectory();\n });\n}\nexports.isDirectory = isDirectory;\n/**\n * On OSX/Linux, true if path starts with '/'. On Windows, true for paths like:\n * \\, \\hello, \\\\hello\\share, C:, and C:\\hello (and corresponding alternate separator cases).\n */\nfunction isRooted(p) {\n p = normalizeSeparators(p);\n if (!p) {\n throw new Error('isRooted() parameter \"p\" cannot be empty');\n }\n if (exports.IS_WINDOWS) {\n return (p.startsWith('\\\\') || /^[A-Z]:/i.test(p) // e.g. \\ or \\hello or \\\\hello\n ); // e.g. C: or C:\\hello\n }\n return p.startsWith('/');\n}\nexports.isRooted = isRooted;\n/**\n * Recursively create a directory at `fsPath`.\n *\n * This implementation is optimistic, meaning it attempts to create the full\n * path first, and backs up the path stack from there.\n *\n * @param fsPath The path to create\n * @param maxDepth The maximum recursion depth\n * @param depth The current recursion depth\n */\nfunction mkdirP(fsPath, maxDepth = 1000, depth = 1) {\n return __awaiter(this, void 0, void 0, function* () {\n assert_1.ok(fsPath, 'a path argument must be provided');\n fsPath = path.resolve(fsPath);\n if (depth >= maxDepth)\n return exports.mkdir(fsPath);\n try {\n yield exports.mkdir(fsPath);\n return;\n }\n catch (err) {\n switch (err.code) {\n case 'ENOENT': {\n yield mkdirP(path.dirname(fsPath), maxDepth, depth + 1);\n yield exports.mkdir(fsPath);\n return;\n }\n default: {\n let stats;\n try {\n stats = yield exports.stat(fsPath);\n }\n catch (err2) {\n throw err;\n }\n if (!stats.isDirectory())\n throw err;\n }\n }\n }\n });\n}\nexports.mkdirP = mkdirP;\n/**\n * Best effort attempt to determine whether a file exists and is executable.\n * @param filePath file path to check\n * @param extensions additional file extensions to try\n * @return if file exists and is executable, returns the file path. otherwise empty string.\n */\nfunction tryGetExecutablePath(filePath, extensions) {\n return __awaiter(this, void 0, void 0, function* () {\n let stats = undefined;\n try {\n // test file exists\n stats = yield exports.stat(filePath);\n }\n catch (err) {\n if (err.code !== 'ENOENT') {\n // eslint-disable-next-line no-console\n console.log(`Unexpected error attempting to determine if executable file exists '${filePath}': ${err}`);\n }\n }\n if (stats && stats.isFile()) {\n if (exports.IS_WINDOWS) {\n // on Windows, test for valid extension\n const upperExt = path.extname(filePath).toUpperCase();\n if (extensions.some(validExt => validExt.toUpperCase() === upperExt)) {\n return filePath;\n }\n }\n else {\n if (isUnixExecutable(stats)) {\n return filePath;\n }\n }\n }\n // try each extension\n const originalFilePath = filePath;\n for (const extension of extensions) {\n filePath = originalFilePath + extension;\n stats = undefined;\n try {\n stats = yield exports.stat(filePath);\n }\n catch (err) {\n if (err.code !== 'ENOENT') {\n // eslint-disable-next-line no-console\n console.log(`Unexpected error attempting to determine if executable file exists '${filePath}': ${err}`);\n }\n }\n if (stats && stats.isFile()) {\n if (exports.IS_WINDOWS) {\n // preserve the case of the actual file (since an extension was appended)\n try {\n const directory = path.dirname(filePath);\n const upperName = path.basename(filePath).toUpperCase();\n for (const actualName of yield exports.readdir(directory)) {\n if (upperName === actualName.toUpperCase()) {\n filePath = path.join(directory, actualName);\n break;\n }\n }\n }\n catch (err) {\n // eslint-disable-next-line no-console\n console.log(`Unexpected error attempting to determine the actual case of the file '${filePath}': ${err}`);\n }\n return filePath;\n }\n else {\n if (isUnixExecutable(stats)) {\n return filePath;\n }\n }\n }\n }\n return '';\n });\n}\nexports.tryGetExecutablePath = tryGetExecutablePath;\nfunction normalizeSeparators(p) {\n p = p || '';\n if (exports.IS_WINDOWS) {\n // convert slashes on Windows\n p = p.replace(/\\//g, '\\\\');\n // remove redundant slashes\n return p.replace(/\\\\\\\\+/g, '\\\\');\n }\n // remove redundant slashes\n return p.replace(/\\/\\/+/g, '/');\n}\n// on Mac/Linux, test the execute bit\n// R W X R W X R W X\n// 256 128 64 32 16 8 4 2 1\nfunction isUnixExecutable(stats) {\n return ((stats.mode & 1) > 0 ||\n ((stats.mode & 8) > 0 && stats.gid === process.getgid()) ||\n ((stats.mode & 64) > 0 && stats.uid === process.getuid()));\n}\n//# sourceMappingURL=io-util.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst childProcess = require(\"child_process\");\nconst path = require(\"path\");\nconst util_1 = require(\"util\");\nconst ioUtil = require(\"./io-util\");\nconst exec = util_1.promisify(childProcess.exec);\n/**\n * Copies a file or folder.\n * Based off of shelljs - https://github.com/shelljs/shelljs/blob/9237f66c52e5daa40458f94f9565e18e8132f5a6/src/cp.js\n *\n * @param source source path\n * @param dest destination path\n * @param options optional. See CopyOptions.\n */\nfunction cp(source, dest, options = {}) {\n return __awaiter(this, void 0, void 0, function* () {\n const { force, recursive } = readCopyOptions(options);\n const destStat = (yield ioUtil.exists(dest)) ? yield ioUtil.stat(dest) : null;\n // Dest is an existing file, but not forcing\n if (destStat && destStat.isFile() && !force) {\n return;\n }\n // If dest is an existing directory, should copy inside.\n const newDest = destStat && destStat.isDirectory()\n ? path.join(dest, path.basename(source))\n : dest;\n if (!(yield ioUtil.exists(source))) {\n throw new Error(`no such file or directory: ${source}`);\n }\n const sourceStat = yield ioUtil.stat(source);\n if (sourceStat.isDirectory()) {\n if (!recursive) {\n throw new Error(`Failed to copy. ${source} is a directory, but tried to copy without recursive flag.`);\n }\n else {\n yield cpDirRecursive(source, newDest, 0, force);\n }\n }\n else {\n if (path.relative(source, newDest) === '') {\n // a file cannot be copied to itself\n throw new Error(`'${newDest}' and '${source}' are the same file`);\n }\n yield copyFile(source, newDest, force);\n }\n });\n}\nexports.cp = cp;\n/**\n * Moves a path.\n *\n * @param source source path\n * @param dest destination path\n * @param options optional. See MoveOptions.\n */\nfunction mv(source, dest, options = {}) {\n return __awaiter(this, void 0, void 0, function* () {\n if (yield ioUtil.exists(dest)) {\n let destExists = true;\n if (yield ioUtil.isDirectory(dest)) {\n // If dest is directory copy src into dest\n dest = path.join(dest, path.basename(source));\n destExists = yield ioUtil.exists(dest);\n }\n if (destExists) {\n if (options.force == null || options.force) {\n yield rmRF(dest);\n }\n else {\n throw new Error('Destination already exists');\n }\n }\n }\n yield mkdirP(path.dirname(dest));\n yield ioUtil.rename(source, dest);\n });\n}\nexports.mv = mv;\n/**\n * Remove a path recursively with force\n *\n * @param inputPath path to remove\n */\nfunction rmRF(inputPath) {\n return __awaiter(this, void 0, void 0, function* () {\n if (ioUtil.IS_WINDOWS) {\n // Node doesn't provide a delete operation, only an unlink function. This means that if the file is being used by another\n // program (e.g. antivirus), it won't be deleted. To address this, we shell out the work to rd/del.\n try {\n if (yield ioUtil.isDirectory(inputPath, true)) {\n yield exec(`rd /s /q \"${inputPath}\"`);\n }\n else {\n yield exec(`del /f /a \"${inputPath}\"`);\n }\n }\n catch (err) {\n // if you try to delete a file that doesn't exist, desired result is achieved\n // other errors are valid\n if (err.code !== 'ENOENT')\n throw err;\n }\n // Shelling out fails to remove a symlink folder with missing source, this unlink catches that\n try {\n yield ioUtil.unlink(inputPath);\n }\n catch (err) {\n // if you try to delete a file that doesn't exist, desired result is achieved\n // other errors are valid\n if (err.code !== 'ENOENT')\n throw err;\n }\n }\n else {\n let isDir = false;\n try {\n isDir = yield ioUtil.isDirectory(inputPath);\n }\n catch (err) {\n // if you try to delete a file that doesn't exist, desired result is achieved\n // other errors are valid\n if (err.code !== 'ENOENT')\n throw err;\n return;\n }\n if (isDir) {\n yield exec(`rm -rf \"${inputPath}\"`);\n }\n else {\n yield ioUtil.unlink(inputPath);\n }\n }\n });\n}\nexports.rmRF = rmRF;\n/**\n * Make a directory. Creates the full path with folders in between\n * Will throw if it fails\n *\n * @param fsPath path to create\n * @returns Promise\n */\nfunction mkdirP(fsPath) {\n return __awaiter(this, void 0, void 0, function* () {\n yield ioUtil.mkdirP(fsPath);\n });\n}\nexports.mkdirP = mkdirP;\n/**\n * Returns path of a tool had the tool actually been invoked. Resolves via paths.\n * If you check and the tool does not exist, it will throw.\n *\n * @param tool name of the tool\n * @param check whether to check if tool exists\n * @returns Promise path to tool\n */\nfunction which(tool, check) {\n return __awaiter(this, void 0, void 0, function* () {\n if (!tool) {\n throw new Error(\"parameter 'tool' is required\");\n }\n // recursive when check=true\n if (check) {\n const result = yield which(tool, false);\n if (!result) {\n if (ioUtil.IS_WINDOWS) {\n throw new Error(`Unable to locate executable file: ${tool}. Please verify either the file path exists or the file can be found within a directory specified by the PATH environment variable. Also verify the file has a valid extension for an executable file.`);\n }\n else {\n throw new Error(`Unable to locate executable file: ${tool}. Please verify either the file path exists or the file can be found within a directory specified by the PATH environment variable. Also check the file mode to verify the file is executable.`);\n }\n }\n }\n try {\n // build the list of extensions to try\n const extensions = [];\n if (ioUtil.IS_WINDOWS && process.env.PATHEXT) {\n for (const extension of process.env.PATHEXT.split(path.delimiter)) {\n if (extension) {\n extensions.push(extension);\n }\n }\n }\n // if it's rooted, return it if exists. otherwise return empty.\n if (ioUtil.isRooted(tool)) {\n const filePath = yield ioUtil.tryGetExecutablePath(tool, extensions);\n if (filePath) {\n return filePath;\n }\n return '';\n }\n // if any path separators, return empty\n if (tool.includes('/') || (ioUtil.IS_WINDOWS && tool.includes('\\\\'))) {\n return '';\n }\n // build the list of directories\n //\n // Note, technically \"where\" checks the current directory on Windows. From a toolkit perspective,\n // it feels like we should not do this. Checking the current directory seems like more of a use\n // case of a shell, and the which() function exposed by the toolkit should strive for consistency\n // across platforms.\n const directories = [];\n if (process.env.PATH) {\n for (const p of process.env.PATH.split(path.delimiter)) {\n if (p) {\n directories.push(p);\n }\n }\n }\n // return the first match\n for (const directory of directories) {\n const filePath = yield ioUtil.tryGetExecutablePath(directory + path.sep + tool, extensions);\n if (filePath) {\n return filePath;\n }\n }\n return '';\n }\n catch (err) {\n throw new Error(`which failed with message ${err.message}`);\n }\n });\n}\nexports.which = which;\nfunction readCopyOptions(options) {\n const force = options.force == null ? true : options.force;\n const recursive = Boolean(options.recursive);\n return { force, recursive };\n}\nfunction cpDirRecursive(sourceDir, destDir, currentDepth, force) {\n return __awaiter(this, void 0, void 0, function* () {\n // Ensure there is not a run away recursive copy\n if (currentDepth >= 255)\n return;\n currentDepth++;\n yield mkdirP(destDir);\n const files = yield ioUtil.readdir(sourceDir);\n for (const fileName of files) {\n const srcFile = `${sourceDir}/${fileName}`;\n const destFile = `${destDir}/${fileName}`;\n const srcFileStat = yield ioUtil.lstat(srcFile);\n if (srcFileStat.isDirectory()) {\n // Recurse\n yield cpDirRecursive(srcFile, destFile, currentDepth, force);\n }\n else {\n yield copyFile(srcFile, destFile, force);\n }\n }\n // Change the mode for the newly created directory\n yield ioUtil.chmod(destDir, (yield ioUtil.stat(sourceDir)).mode);\n });\n}\n// Buffered file copy\nfunction copyFile(srcFile, destFile, force) {\n return __awaiter(this, void 0, void 0, function* () {\n if ((yield ioUtil.lstat(srcFile)).isSymbolicLink()) {\n // unlink/re-link it\n try {\n yield ioUtil.lstat(destFile);\n yield ioUtil.unlink(destFile);\n }\n catch (e) {\n // Try to override file permission\n if (e.code === 'EPERM') {\n yield ioUtil.chmod(destFile, '0666');\n yield ioUtil.unlink(destFile);\n }\n // other errors = it doesn't exist, no work to do\n }\n // Copy over symlink\n const symlinkFull = yield ioUtil.readlink(srcFile);\n yield ioUtil.symlink(symlinkFull, destFile, ioUtil.IS_WINDOWS ? 'junction' : null);\n }\n else if (!(yield ioUtil.exists(destFile)) || force) {\n yield ioUtil.copyFile(srcFile, destFile);\n }\n });\n}\n//# sourceMappingURL=io.js.map","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nasync function auth(token) {\n const tokenType = token.split(/\\./).length === 3 ? \"app\" : /^v\\d+\\./.test(token) ? \"installation\" : \"oauth\";\n return {\n type: \"token\",\n token: token,\n tokenType\n };\n}\n\n/**\n * Prefix token for usage in the Authorization header\n *\n * @param token OAuth token or JSON Web Token\n */\nfunction withAuthorizationPrefix(token) {\n if (token.split(/\\./).length === 3) {\n return `bearer ${token}`;\n }\n\n return `token ${token}`;\n}\n\nasync function hook(token, request, route, parameters) {\n const endpoint = request.endpoint.merge(route, parameters);\n endpoint.headers.authorization = withAuthorizationPrefix(token);\n return request(endpoint);\n}\n\nconst createTokenAuth = function createTokenAuth(token) {\n if (!token) {\n throw new Error(\"[@octokit/auth-token] No token passed to createTokenAuth\");\n }\n\n if (typeof token !== \"string\") {\n throw new Error(\"[@octokit/auth-token] Token passed to createTokenAuth is not a string\");\n }\n\n token = token.replace(/^(token|bearer) +/i, \"\");\n return Object.assign(auth.bind(null, token), {\n hook: hook.bind(null, token)\n });\n};\n\nexports.createTokenAuth = createTokenAuth;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nvar universalUserAgent = require('universal-user-agent');\nvar beforeAfterHook = require('before-after-hook');\nvar request = require('@octokit/request');\nvar graphql = require('@octokit/graphql');\nvar authToken = require('@octokit/auth-token');\n\nfunction _objectWithoutPropertiesLoose(source, excluded) {\n if (source == null) return {};\n var target = {};\n var sourceKeys = Object.keys(source);\n var key, i;\n\n for (i = 0; i < sourceKeys.length; i++) {\n key = sourceKeys[i];\n if (excluded.indexOf(key) >= 0) continue;\n target[key] = source[key];\n }\n\n return target;\n}\n\nfunction _objectWithoutProperties(source, excluded) {\n if (source == null) return {};\n\n var target = _objectWithoutPropertiesLoose(source, excluded);\n\n var key, i;\n\n if (Object.getOwnPropertySymbols) {\n var sourceSymbolKeys = Object.getOwnPropertySymbols(source);\n\n for (i = 0; i < sourceSymbolKeys.length; i++) {\n key = sourceSymbolKeys[i];\n if (excluded.indexOf(key) >= 0) continue;\n if (!Object.prototype.propertyIsEnumerable.call(source, key)) continue;\n target[key] = source[key];\n }\n }\n\n return target;\n}\n\nconst VERSION = \"3.2.1\";\n\nclass Octokit {\n constructor(options = {}) {\n const hook = new beforeAfterHook.Collection();\n const requestDefaults = {\n baseUrl: request.request.endpoint.DEFAULTS.baseUrl,\n headers: {},\n request: Object.assign({}, options.request, {\n hook: hook.bind(null, \"request\")\n }),\n mediaType: {\n previews: [],\n format: \"\"\n }\n }; // prepend default user agent with `options.userAgent` if set\n\n requestDefaults.headers[\"user-agent\"] = [options.userAgent, `octokit-core.js/${VERSION} ${universalUserAgent.getUserAgent()}`].filter(Boolean).join(\" \");\n\n if (options.baseUrl) {\n requestDefaults.baseUrl = options.baseUrl;\n }\n\n if (options.previews) {\n requestDefaults.mediaType.previews = options.previews;\n }\n\n if (options.timeZone) {\n requestDefaults.headers[\"time-zone\"] = options.timeZone;\n }\n\n this.request = request.request.defaults(requestDefaults);\n this.graphql = graphql.withCustomRequest(this.request).defaults(requestDefaults);\n this.log = Object.assign({\n debug: () => {},\n info: () => {},\n warn: console.warn.bind(console),\n error: console.error.bind(console)\n }, options.log);\n this.hook = hook; // (1) If neither `options.authStrategy` nor `options.auth` are set, the `octokit` instance\n // is unauthenticated. The `this.auth()` method is a no-op and no request hook is registered.\n // (2) If only `options.auth` is set, use the default token authentication strategy.\n // (3) If `options.authStrategy` is set then use it and pass in `options.auth`. Always pass own request as many strategies accept a custom request instance.\n // TODO: type `options.auth` based on `options.authStrategy`.\n\n if (!options.authStrategy) {\n if (!options.auth) {\n // (1)\n this.auth = async () => ({\n type: \"unauthenticated\"\n });\n } else {\n // (2)\n const auth = authToken.createTokenAuth(options.auth); // @ts-ignore ¯\\_(ツ)_/¯\n\n hook.wrap(\"request\", auth.hook);\n this.auth = auth;\n }\n } else {\n const {\n authStrategy\n } = options,\n otherOptions = _objectWithoutProperties(options, [\"authStrategy\"]);\n\n const auth = authStrategy(Object.assign({\n request: this.request,\n log: this.log,\n // we pass the current octokit instance as well as its constructor options\n // to allow for authentication strategies that return a new octokit instance\n // that shares the same internal state as the current one. The original\n // requirement for this was the \"event-octokit\" authentication strategy\n // of https://github.com/probot/octokit-auth-probot.\n octokit: this,\n octokitOptions: otherOptions\n }, options.auth)); // @ts-ignore ¯\\_(ツ)_/¯\n\n hook.wrap(\"request\", auth.hook);\n this.auth = auth;\n } // apply plugins\n // https://stackoverflow.com/a/16345172\n\n\n const classConstructor = this.constructor;\n classConstructor.plugins.forEach(plugin => {\n Object.assign(this, plugin(this, options));\n });\n }\n\n static defaults(defaults) {\n const OctokitWithDefaults = class extends this {\n constructor(...args) {\n const options = args[0] || {};\n\n if (typeof defaults === \"function\") {\n super(defaults(options));\n return;\n }\n\n super(Object.assign({}, defaults, options, options.userAgent && defaults.userAgent ? {\n userAgent: `${options.userAgent} ${defaults.userAgent}`\n } : null));\n }\n\n };\n return OctokitWithDefaults;\n }\n /**\n * Attach a plugin (or many) to your Octokit instance.\n *\n * @example\n * const API = Octokit.plugin(plugin1, plugin2, plugin3, ...)\n */\n\n\n static plugin(...newPlugins) {\n var _a;\n\n const currentPlugins = this.plugins;\n const NewOctokit = (_a = class extends this {}, _a.plugins = currentPlugins.concat(newPlugins.filter(plugin => !currentPlugins.includes(plugin))), _a);\n return NewOctokit;\n }\n\n}\nOctokit.VERSION = VERSION;\nOctokit.plugins = [];\n\nexports.Octokit = Octokit;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nvar isPlainObject = require('is-plain-object');\nvar universalUserAgent = require('universal-user-agent');\n\nfunction lowercaseKeys(object) {\n if (!object) {\n return {};\n }\n\n return Object.keys(object).reduce((newObj, key) => {\n newObj[key.toLowerCase()] = object[key];\n return newObj;\n }, {});\n}\n\nfunction mergeDeep(defaults, options) {\n const result = Object.assign({}, defaults);\n Object.keys(options).forEach(key => {\n if (isPlainObject.isPlainObject(options[key])) {\n if (!(key in defaults)) Object.assign(result, {\n [key]: options[key]\n });else result[key] = mergeDeep(defaults[key], options[key]);\n } else {\n Object.assign(result, {\n [key]: options[key]\n });\n }\n });\n return result;\n}\n\nfunction removeUndefinedProperties(obj) {\n for (const key in obj) {\n if (obj[key] === undefined) {\n delete obj[key];\n }\n }\n\n return obj;\n}\n\nfunction merge(defaults, route, options) {\n if (typeof route === \"string\") {\n let [method, url] = route.split(\" \");\n options = Object.assign(url ? {\n method,\n url\n } : {\n url: method\n }, options);\n } else {\n options = Object.assign({}, route);\n } // lowercase header names before merging with defaults to avoid duplicates\n\n\n options.headers = lowercaseKeys(options.headers); // remove properties with undefined values before merging\n\n removeUndefinedProperties(options);\n removeUndefinedProperties(options.headers);\n const mergedOptions = mergeDeep(defaults || {}, options); // mediaType.previews arrays are merged, instead of overwritten\n\n if (defaults && defaults.mediaType.previews.length) {\n mergedOptions.mediaType.previews = defaults.mediaType.previews.filter(preview => !mergedOptions.mediaType.previews.includes(preview)).concat(mergedOptions.mediaType.previews);\n }\n\n mergedOptions.mediaType.previews = mergedOptions.mediaType.previews.map(preview => preview.replace(/-preview/, \"\"));\n return mergedOptions;\n}\n\nfunction addQueryParameters(url, parameters) {\n const separator = /\\?/.test(url) ? \"&\" : \"?\";\n const names = Object.keys(parameters);\n\n if (names.length === 0) {\n return url;\n }\n\n return url + separator + names.map(name => {\n if (name === \"q\") {\n return \"q=\" + parameters.q.split(\"+\").map(encodeURIComponent).join(\"+\");\n }\n\n return `${name}=${encodeURIComponent(parameters[name])}`;\n }).join(\"&\");\n}\n\nconst urlVariableRegex = /\\{[^}]+\\}/g;\n\nfunction removeNonChars(variableName) {\n return variableName.replace(/^\\W+|\\W+$/g, \"\").split(/,/);\n}\n\nfunction extractUrlVariableNames(url) {\n const matches = url.match(urlVariableRegex);\n\n if (!matches) {\n return [];\n }\n\n return matches.map(removeNonChars).reduce((a, b) => a.concat(b), []);\n}\n\nfunction omit(object, keysToOmit) {\n return Object.keys(object).filter(option => !keysToOmit.includes(option)).reduce((obj, key) => {\n obj[key] = object[key];\n return obj;\n }, {});\n}\n\n// Based on https://github.com/bramstein/url-template, licensed under BSD\n// TODO: create separate package.\n//\n// Copyright (c) 2012-2014, Bram Stein\n// All rights reserved.\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions\n// are met:\n// 1. Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n// 2. Redistributions in binary form must reproduce the above copyright\n// notice, this list of conditions and the following disclaimer in the\n// documentation and/or other materials provided with the distribution.\n// 3. The name of the author may not be used to endorse or promote products\n// derived from this software without specific prior written permission.\n// THIS SOFTWARE IS PROVIDED BY THE AUTHOR \"AS IS\" AND ANY EXPRESS OR IMPLIED\n// WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF\n// MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO\n// EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,\n// INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,\n// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY\n// OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING\n// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,\n// EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n/* istanbul ignore file */\nfunction encodeReserved(str) {\n return str.split(/(%[0-9A-Fa-f]{2})/g).map(function (part) {\n if (!/%[0-9A-Fa-f]/.test(part)) {\n part = encodeURI(part).replace(/%5B/g, \"[\").replace(/%5D/g, \"]\");\n }\n\n return part;\n }).join(\"\");\n}\n\nfunction encodeUnreserved(str) {\n return encodeURIComponent(str).replace(/[!'()*]/g, function (c) {\n return \"%\" + c.charCodeAt(0).toString(16).toUpperCase();\n });\n}\n\nfunction encodeValue(operator, value, key) {\n value = operator === \"+\" || operator === \"#\" ? encodeReserved(value) : encodeUnreserved(value);\n\n if (key) {\n return encodeUnreserved(key) + \"=\" + value;\n } else {\n return value;\n }\n}\n\nfunction isDefined(value) {\n return value !== undefined && value !== null;\n}\n\nfunction isKeyOperator(operator) {\n return operator === \";\" || operator === \"&\" || operator === \"?\";\n}\n\nfunction getValues(context, operator, key, modifier) {\n var value = context[key],\n result = [];\n\n if (isDefined(value) && value !== \"\") {\n if (typeof value === \"string\" || typeof value === \"number\" || typeof value === \"boolean\") {\n value = value.toString();\n\n if (modifier && modifier !== \"*\") {\n value = value.substring(0, parseInt(modifier, 10));\n }\n\n result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : \"\"));\n } else {\n if (modifier === \"*\") {\n if (Array.isArray(value)) {\n value.filter(isDefined).forEach(function (value) {\n result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : \"\"));\n });\n } else {\n Object.keys(value).forEach(function (k) {\n if (isDefined(value[k])) {\n result.push(encodeValue(operator, value[k], k));\n }\n });\n }\n } else {\n const tmp = [];\n\n if (Array.isArray(value)) {\n value.filter(isDefined).forEach(function (value) {\n tmp.push(encodeValue(operator, value));\n });\n } else {\n Object.keys(value).forEach(function (k) {\n if (isDefined(value[k])) {\n tmp.push(encodeUnreserved(k));\n tmp.push(encodeValue(operator, value[k].toString()));\n }\n });\n }\n\n if (isKeyOperator(operator)) {\n result.push(encodeUnreserved(key) + \"=\" + tmp.join(\",\"));\n } else if (tmp.length !== 0) {\n result.push(tmp.join(\",\"));\n }\n }\n }\n } else {\n if (operator === \";\") {\n if (isDefined(value)) {\n result.push(encodeUnreserved(key));\n }\n } else if (value === \"\" && (operator === \"&\" || operator === \"?\")) {\n result.push(encodeUnreserved(key) + \"=\");\n } else if (value === \"\") {\n result.push(\"\");\n }\n }\n\n return result;\n}\n\nfunction parseUrl(template) {\n return {\n expand: expand.bind(null, template)\n };\n}\n\nfunction expand(template, context) {\n var operators = [\"+\", \"#\", \".\", \"/\", \";\", \"?\", \"&\"];\n return template.replace(/\\{([^\\{\\}]+)\\}|([^\\{\\}]+)/g, function (_, expression, literal) {\n if (expression) {\n let operator = \"\";\n const values = [];\n\n if (operators.indexOf(expression.charAt(0)) !== -1) {\n operator = expression.charAt(0);\n expression = expression.substr(1);\n }\n\n expression.split(/,/g).forEach(function (variable) {\n var tmp = /([^:\\*]*)(?::(\\d+)|(\\*))?/.exec(variable);\n values.push(getValues(context, operator, tmp[1], tmp[2] || tmp[3]));\n });\n\n if (operator && operator !== \"+\") {\n var separator = \",\";\n\n if (operator === \"?\") {\n separator = \"&\";\n } else if (operator !== \"#\") {\n separator = operator;\n }\n\n return (values.length !== 0 ? operator : \"\") + values.join(separator);\n } else {\n return values.join(\",\");\n }\n } else {\n return encodeReserved(literal);\n }\n });\n}\n\nfunction parse(options) {\n // https://fetch.spec.whatwg.org/#methods\n let method = options.method.toUpperCase(); // replace :varname with {varname} to make it RFC 6570 compatible\n\n let url = (options.url || \"/\").replace(/:([a-z]\\w+)/g, \"{$1}\");\n let headers = Object.assign({}, options.headers);\n let body;\n let parameters = omit(options, [\"method\", \"baseUrl\", \"url\", \"headers\", \"request\", \"mediaType\"]); // extract variable names from URL to calculate remaining variables later\n\n const urlVariableNames = extractUrlVariableNames(url);\n url = parseUrl(url).expand(parameters);\n\n if (!/^http/.test(url)) {\n url = options.baseUrl + url;\n }\n\n const omittedParameters = Object.keys(options).filter(option => urlVariableNames.includes(option)).concat(\"baseUrl\");\n const remainingParameters = omit(parameters, omittedParameters);\n const isBinaryRequest = /application\\/octet-stream/i.test(headers.accept);\n\n if (!isBinaryRequest) {\n if (options.mediaType.format) {\n // e.g. application/vnd.github.v3+json => application/vnd.github.v3.raw\n headers.accept = headers.accept.split(/,/).map(preview => preview.replace(/application\\/vnd(\\.\\w+)(\\.v3)?(\\.\\w+)?(\\+json)?$/, `application/vnd$1$2.${options.mediaType.format}`)).join(\",\");\n }\n\n if (options.mediaType.previews.length) {\n const previewsFromAcceptHeader = headers.accept.match(/[\\w-]+(?=-preview)/g) || [];\n headers.accept = previewsFromAcceptHeader.concat(options.mediaType.previews).map(preview => {\n const format = options.mediaType.format ? `.${options.mediaType.format}` : \"+json\";\n return `application/vnd.github.${preview}-preview${format}`;\n }).join(\",\");\n }\n } // for GET/HEAD requests, set URL query parameters from remaining parameters\n // for PATCH/POST/PUT/DELETE requests, set request body from remaining parameters\n\n\n if ([\"GET\", \"HEAD\"].includes(method)) {\n url = addQueryParameters(url, remainingParameters);\n } else {\n if (\"data\" in remainingParameters) {\n body = remainingParameters.data;\n } else {\n if (Object.keys(remainingParameters).length) {\n body = remainingParameters;\n } else {\n headers[\"content-length\"] = 0;\n }\n }\n } // default content-type for JSON if body is set\n\n\n if (!headers[\"content-type\"] && typeof body !== \"undefined\") {\n headers[\"content-type\"] = \"application/json; charset=utf-8\";\n } // GitHub expects 'content-length: 0' header for PUT/PATCH requests without body.\n // fetch does not allow to set `content-length` header, but we can set body to an empty string\n\n\n if ([\"PATCH\", \"PUT\"].includes(method) && typeof body === \"undefined\") {\n body = \"\";\n } // Only return body/request keys if present\n\n\n return Object.assign({\n method,\n url,\n headers\n }, typeof body !== \"undefined\" ? {\n body\n } : null, options.request ? {\n request: options.request\n } : null);\n}\n\nfunction endpointWithDefaults(defaults, route, options) {\n return parse(merge(defaults, route, options));\n}\n\nfunction withDefaults(oldDefaults, newDefaults) {\n const DEFAULTS = merge(oldDefaults, newDefaults);\n const endpoint = endpointWithDefaults.bind(null, DEFAULTS);\n return Object.assign(endpoint, {\n DEFAULTS,\n defaults: withDefaults.bind(null, DEFAULTS),\n merge: merge.bind(null, DEFAULTS),\n parse\n });\n}\n\nconst VERSION = \"6.0.9\";\n\nconst userAgent = `octokit-endpoint.js/${VERSION} ${universalUserAgent.getUserAgent()}`; // DEFAULTS has all properties set that EndpointOptions has, except url.\n// So we use RequestParameters and add method as additional required property.\n\nconst DEFAULTS = {\n method: \"GET\",\n baseUrl: \"https://api.github.com\",\n headers: {\n accept: \"application/vnd.github.v3+json\",\n \"user-agent\": userAgent\n },\n mediaType: {\n format: \"\",\n previews: []\n }\n};\n\nconst endpoint = withDefaults(null, DEFAULTS);\n\nexports.endpoint = endpoint;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\n/*!\n * is-plain-object \n *\n * Copyright (c) 2014-2017, Jon Schlinkert.\n * Released under the MIT License.\n */\n\nfunction isObject(o) {\n return Object.prototype.toString.call(o) === '[object Object]';\n}\n\nfunction isPlainObject(o) {\n var ctor,prot;\n\n if (isObject(o) === false) return false;\n\n // If has modified constructor\n ctor = o.constructor;\n if (ctor === undefined) return true;\n\n // If has modified prototype\n prot = ctor.prototype;\n if (isObject(prot) === false) return false;\n\n // If constructor does not have an Object-specific method\n if (prot.hasOwnProperty('isPrototypeOf') === false) {\n return false;\n }\n\n // Most likely a plain Object\n return true;\n}\n\nexports.isPlainObject = isPlainObject;\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nvar request = require('@octokit/request');\nvar universalUserAgent = require('universal-user-agent');\n\nconst VERSION = \"4.5.7\";\n\nclass GraphqlError extends Error {\n constructor(request, response) {\n const message = response.data.errors[0].message;\n super(message);\n Object.assign(this, response.data);\n Object.assign(this, {\n headers: response.headers\n });\n this.name = \"GraphqlError\";\n this.request = request; // Maintains proper stack trace (only available on V8)\n\n /* istanbul ignore next */\n\n if (Error.captureStackTrace) {\n Error.captureStackTrace(this, this.constructor);\n }\n }\n\n}\n\nconst NON_VARIABLE_OPTIONS = [\"method\", \"baseUrl\", \"url\", \"headers\", \"request\", \"query\", \"mediaType\"];\nconst GHES_V3_SUFFIX_REGEX = /\\/api\\/v3\\/?$/;\nfunction graphql(request, query, options) {\n if (typeof query === \"string\" && options && \"query\" in options) {\n return Promise.reject(new Error(`[@octokit/graphql] \"query\" cannot be used as variable name`));\n }\n\n const parsedOptions = typeof query === \"string\" ? Object.assign({\n query\n }, options) : query;\n const requestOptions = Object.keys(parsedOptions).reduce((result, key) => {\n if (NON_VARIABLE_OPTIONS.includes(key)) {\n result[key] = parsedOptions[key];\n return result;\n }\n\n if (!result.variables) {\n result.variables = {};\n }\n\n result.variables[key] = parsedOptions[key];\n return result;\n }, {}); // workaround for GitHub Enterprise baseUrl set with /api/v3 suffix\n // https://github.com/octokit/auth-app.js/issues/111#issuecomment-657610451\n\n const baseUrl = parsedOptions.baseUrl || request.endpoint.DEFAULTS.baseUrl;\n\n if (GHES_V3_SUFFIX_REGEX.test(baseUrl)) {\n requestOptions.url = baseUrl.replace(GHES_V3_SUFFIX_REGEX, \"/api/graphql\");\n }\n\n return request(requestOptions).then(response => {\n if (response.data.errors) {\n const headers = {};\n\n for (const key of Object.keys(response.headers)) {\n headers[key] = response.headers[key];\n }\n\n throw new GraphqlError(requestOptions, {\n headers,\n data: response.data\n });\n }\n\n return response.data.data;\n });\n}\n\nfunction withDefaults(request$1, newDefaults) {\n const newRequest = request$1.defaults(newDefaults);\n\n const newApi = (query, options) => {\n return graphql(newRequest, query, options);\n };\n\n return Object.assign(newApi, {\n defaults: withDefaults.bind(null, newRequest),\n endpoint: request.request.endpoint\n });\n}\n\nconst graphql$1 = withDefaults(request.request, {\n headers: {\n \"user-agent\": `octokit-graphql.js/${VERSION} ${universalUserAgent.getUserAgent()}`\n },\n method: \"POST\",\n url: \"/graphql\"\n});\nfunction withCustomRequest(customRequest) {\n return withDefaults(customRequest, {\n method: \"POST\",\n url: \"/graphql\"\n });\n}\n\nexports.graphql = graphql$1;\nexports.withCustomRequest = withCustomRequest;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nconst VERSION = \"2.6.0\";\n\n/**\n * Some “list” response that can be paginated have a different response structure\n *\n * They have a `total_count` key in the response (search also has `incomplete_results`,\n * /installation/repositories also has `repository_selection`), as well as a key with\n * the list of the items which name varies from endpoint to endpoint.\n *\n * Octokit normalizes these responses so that paginated results are always returned following\n * the same structure. One challenge is that if the list response has only one page, no Link\n * header is provided, so this header alone is not sufficient to check wether a response is\n * paginated or not.\n *\n * We check if a \"total_count\" key is present in the response data, but also make sure that\n * a \"url\" property is not, as the \"Get the combined status for a specific ref\" endpoint would\n * otherwise match: https://developer.github.com/v3/repos/statuses/#get-the-combined-status-for-a-specific-ref\n */\nfunction normalizePaginatedListResponse(response) {\n const responseNeedsNormalization = \"total_count\" in response.data && !(\"url\" in response.data);\n if (!responseNeedsNormalization) return response; // keep the additional properties intact as there is currently no other way\n // to retrieve the same information.\n\n const incompleteResults = response.data.incomplete_results;\n const repositorySelection = response.data.repository_selection;\n const totalCount = response.data.total_count;\n delete response.data.incomplete_results;\n delete response.data.repository_selection;\n delete response.data.total_count;\n const namespaceKey = Object.keys(response.data)[0];\n const data = response.data[namespaceKey];\n response.data = data;\n\n if (typeof incompleteResults !== \"undefined\") {\n response.data.incomplete_results = incompleteResults;\n }\n\n if (typeof repositorySelection !== \"undefined\") {\n response.data.repository_selection = repositorySelection;\n }\n\n response.data.total_count = totalCount;\n return response;\n}\n\nfunction iterator(octokit, route, parameters) {\n const options = typeof route === \"function\" ? route.endpoint(parameters) : octokit.request.endpoint(route, parameters);\n const requestMethod = typeof route === \"function\" ? route : octokit.request;\n const method = options.method;\n const headers = options.headers;\n let url = options.url;\n return {\n [Symbol.asyncIterator]: () => ({\n async next() {\n if (!url) return {\n done: true\n };\n const response = await requestMethod({\n method,\n url,\n headers\n });\n const normalizedResponse = normalizePaginatedListResponse(response); // `response.headers.link` format:\n // '; rel=\"next\", ; rel=\"last\"'\n // sets `url` to undefined if \"next\" URL is not present or `link` header is not set\n\n url = ((normalizedResponse.headers.link || \"\").match(/<([^>]+)>;\\s*rel=\"next\"/) || [])[1];\n return {\n value: normalizedResponse\n };\n }\n\n })\n };\n}\n\nfunction paginate(octokit, route, parameters, mapFn) {\n if (typeof parameters === \"function\") {\n mapFn = parameters;\n parameters = undefined;\n }\n\n return gather(octokit, [], iterator(octokit, route, parameters)[Symbol.asyncIterator](), mapFn);\n}\n\nfunction gather(octokit, results, iterator, mapFn) {\n return iterator.next().then(result => {\n if (result.done) {\n return results;\n }\n\n let earlyExit = false;\n\n function done() {\n earlyExit = true;\n }\n\n results = results.concat(mapFn ? mapFn(result.value, done) : result.value.data);\n\n if (earlyExit) {\n return results;\n }\n\n return gather(octokit, results, iterator, mapFn);\n });\n}\n\nconst composePaginateRest = Object.assign(paginate, {\n iterator\n});\n\n/**\n * @param octokit Octokit instance\n * @param options Options passed to Octokit constructor\n */\n\nfunction paginateRest(octokit) {\n return {\n paginate: Object.assign(paginate.bind(null, octokit), {\n iterator: iterator.bind(null, octokit)\n })\n };\n}\npaginateRest.VERSION = VERSION;\n\nexports.composePaginateRest = composePaginateRest;\nexports.paginateRest = paginateRest;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nconst Endpoints = {\n actions: {\n addSelectedRepoToOrgSecret: [\"PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}\"],\n cancelWorkflowRun: [\"POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel\"],\n createOrUpdateOrgSecret: [\"PUT /orgs/{org}/actions/secrets/{secret_name}\"],\n createOrUpdateRepoSecret: [\"PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}\"],\n createRegistrationTokenForOrg: [\"POST /orgs/{org}/actions/runners/registration-token\"],\n createRegistrationTokenForRepo: [\"POST /repos/{owner}/{repo}/actions/runners/registration-token\"],\n createRemoveTokenForOrg: [\"POST /orgs/{org}/actions/runners/remove-token\"],\n createRemoveTokenForRepo: [\"POST /repos/{owner}/{repo}/actions/runners/remove-token\"],\n createWorkflowDispatch: [\"POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches\"],\n deleteArtifact: [\"DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}\"],\n deleteOrgSecret: [\"DELETE /orgs/{org}/actions/secrets/{secret_name}\"],\n deleteRepoSecret: [\"DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}\"],\n deleteSelfHostedRunnerFromOrg: [\"DELETE /orgs/{org}/actions/runners/{runner_id}\"],\n deleteSelfHostedRunnerFromRepo: [\"DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}\"],\n deleteWorkflowRun: [\"DELETE /repos/{owner}/{repo}/actions/runs/{run_id}\"],\n deleteWorkflowRunLogs: [\"DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs\"],\n downloadArtifact: [\"GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}\"],\n downloadJobLogsForWorkflowRun: [\"GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs\"],\n downloadWorkflowRunLogs: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs\"],\n getArtifact: [\"GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}\"],\n getJobForWorkflowRun: [\"GET /repos/{owner}/{repo}/actions/jobs/{job_id}\"],\n getOrgPublicKey: [\"GET /orgs/{org}/actions/secrets/public-key\"],\n getOrgSecret: [\"GET /orgs/{org}/actions/secrets/{secret_name}\"],\n getRepoPublicKey: [\"GET /repos/{owner}/{repo}/actions/secrets/public-key\"],\n getRepoSecret: [\"GET /repos/{owner}/{repo}/actions/secrets/{secret_name}\"],\n getSelfHostedRunnerForOrg: [\"GET /orgs/{org}/actions/runners/{runner_id}\"],\n getSelfHostedRunnerForRepo: [\"GET /repos/{owner}/{repo}/actions/runners/{runner_id}\"],\n getWorkflow: [\"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}\"],\n getWorkflowRun: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}\"],\n getWorkflowRunUsage: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing\"],\n getWorkflowUsage: [\"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing\"],\n listArtifactsForRepo: [\"GET /repos/{owner}/{repo}/actions/artifacts\"],\n listJobsForWorkflowRun: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs\"],\n listOrgSecrets: [\"GET /orgs/{org}/actions/secrets\"],\n listRepoSecrets: [\"GET /repos/{owner}/{repo}/actions/secrets\"],\n listRepoWorkflows: [\"GET /repos/{owner}/{repo}/actions/workflows\"],\n listRunnerApplicationsForOrg: [\"GET /orgs/{org}/actions/runners/downloads\"],\n listRunnerApplicationsForRepo: [\"GET /repos/{owner}/{repo}/actions/runners/downloads\"],\n listSelectedReposForOrgSecret: [\"GET /orgs/{org}/actions/secrets/{secret_name}/repositories\"],\n listSelfHostedRunnersForOrg: [\"GET /orgs/{org}/actions/runners\"],\n listSelfHostedRunnersForRepo: [\"GET /repos/{owner}/{repo}/actions/runners\"],\n listWorkflowRunArtifacts: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts\"],\n listWorkflowRuns: [\"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs\"],\n listWorkflowRunsForRepo: [\"GET /repos/{owner}/{repo}/actions/runs\"],\n reRunWorkflow: [\"POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun\"],\n removeSelectedRepoFromOrgSecret: [\"DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}\"],\n setSelectedReposForOrgSecret: [\"PUT /orgs/{org}/actions/secrets/{secret_name}/repositories\"]\n },\n activity: {\n checkRepoIsStarredByAuthenticatedUser: [\"GET /user/starred/{owner}/{repo}\"],\n deleteRepoSubscription: [\"DELETE /repos/{owner}/{repo}/subscription\"],\n deleteThreadSubscription: [\"DELETE /notifications/threads/{thread_id}/subscription\"],\n getFeeds: [\"GET /feeds\"],\n getRepoSubscription: [\"GET /repos/{owner}/{repo}/subscription\"],\n getThread: [\"GET /notifications/threads/{thread_id}\"],\n getThreadSubscriptionForAuthenticatedUser: [\"GET /notifications/threads/{thread_id}/subscription\"],\n listEventsForAuthenticatedUser: [\"GET /users/{username}/events\"],\n listNotificationsForAuthenticatedUser: [\"GET /notifications\"],\n listOrgEventsForAuthenticatedUser: [\"GET /users/{username}/events/orgs/{org}\"],\n listPublicEvents: [\"GET /events\"],\n listPublicEventsForRepoNetwork: [\"GET /networks/{owner}/{repo}/events\"],\n listPublicEventsForUser: [\"GET /users/{username}/events/public\"],\n listPublicOrgEvents: [\"GET /orgs/{org}/events\"],\n listReceivedEventsForUser: [\"GET /users/{username}/received_events\"],\n listReceivedPublicEventsForUser: [\"GET /users/{username}/received_events/public\"],\n listRepoEvents: [\"GET /repos/{owner}/{repo}/events\"],\n listRepoNotificationsForAuthenticatedUser: [\"GET /repos/{owner}/{repo}/notifications\"],\n listReposStarredByAuthenticatedUser: [\"GET /user/starred\"],\n listReposStarredByUser: [\"GET /users/{username}/starred\"],\n listReposWatchedByUser: [\"GET /users/{username}/subscriptions\"],\n listStargazersForRepo: [\"GET /repos/{owner}/{repo}/stargazers\"],\n listWatchedReposForAuthenticatedUser: [\"GET /user/subscriptions\"],\n listWatchersForRepo: [\"GET /repos/{owner}/{repo}/subscribers\"],\n markNotificationsAsRead: [\"PUT /notifications\"],\n markRepoNotificationsAsRead: [\"PUT /repos/{owner}/{repo}/notifications\"],\n markThreadAsRead: [\"PATCH /notifications/threads/{thread_id}\"],\n setRepoSubscription: [\"PUT /repos/{owner}/{repo}/subscription\"],\n setThreadSubscription: [\"PUT /notifications/threads/{thread_id}/subscription\"],\n starRepoForAuthenticatedUser: [\"PUT /user/starred/{owner}/{repo}\"],\n unstarRepoForAuthenticatedUser: [\"DELETE /user/starred/{owner}/{repo}\"]\n },\n apps: {\n addRepoToInstallation: [\"PUT /user/installations/{installation_id}/repositories/{repository_id}\"],\n checkToken: [\"POST /applications/{client_id}/token\"],\n createContentAttachment: [\"POST /content_references/{content_reference_id}/attachments\", {\n mediaType: {\n previews: [\"corsair\"]\n }\n }],\n createFromManifest: [\"POST /app-manifests/{code}/conversions\"],\n createInstallationAccessToken: [\"POST /app/installations/{installation_id}/access_tokens\"],\n deleteAuthorization: [\"DELETE /applications/{client_id}/grant\"],\n deleteInstallation: [\"DELETE /app/installations/{installation_id}\"],\n deleteToken: [\"DELETE /applications/{client_id}/token\"],\n getAuthenticated: [\"GET /app\"],\n getBySlug: [\"GET /apps/{app_slug}\"],\n getInstallation: [\"GET /app/installations/{installation_id}\"],\n getOrgInstallation: [\"GET /orgs/{org}/installation\"],\n getRepoInstallation: [\"GET /repos/{owner}/{repo}/installation\"],\n getSubscriptionPlanForAccount: [\"GET /marketplace_listing/accounts/{account_id}\"],\n getSubscriptionPlanForAccountStubbed: [\"GET /marketplace_listing/stubbed/accounts/{account_id}\"],\n getUserInstallation: [\"GET /users/{username}/installation\"],\n listAccountsForPlan: [\"GET /marketplace_listing/plans/{plan_id}/accounts\"],\n listAccountsForPlanStubbed: [\"GET /marketplace_listing/stubbed/plans/{plan_id}/accounts\"],\n listInstallationReposForAuthenticatedUser: [\"GET /user/installations/{installation_id}/repositories\"],\n listInstallations: [\"GET /app/installations\"],\n listInstallationsForAuthenticatedUser: [\"GET /user/installations\"],\n listPlans: [\"GET /marketplace_listing/plans\"],\n listPlansStubbed: [\"GET /marketplace_listing/stubbed/plans\"],\n listReposAccessibleToInstallation: [\"GET /installation/repositories\"],\n listSubscriptionsForAuthenticatedUser: [\"GET /user/marketplace_purchases\"],\n listSubscriptionsForAuthenticatedUserStubbed: [\"GET /user/marketplace_purchases/stubbed\"],\n removeRepoFromInstallation: [\"DELETE /user/installations/{installation_id}/repositories/{repository_id}\"],\n resetToken: [\"PATCH /applications/{client_id}/token\"],\n revokeInstallationAccessToken: [\"DELETE /installation/token\"],\n suspendInstallation: [\"PUT /app/installations/{installation_id}/suspended\"],\n unsuspendInstallation: [\"DELETE /app/installations/{installation_id}/suspended\"]\n },\n billing: {\n getGithubActionsBillingOrg: [\"GET /orgs/{org}/settings/billing/actions\"],\n getGithubActionsBillingUser: [\"GET /users/{username}/settings/billing/actions\"],\n getGithubPackagesBillingOrg: [\"GET /orgs/{org}/settings/billing/packages\"],\n getGithubPackagesBillingUser: [\"GET /users/{username}/settings/billing/packages\"],\n getSharedStorageBillingOrg: [\"GET /orgs/{org}/settings/billing/shared-storage\"],\n getSharedStorageBillingUser: [\"GET /users/{username}/settings/billing/shared-storage\"]\n },\n checks: {\n create: [\"POST /repos/{owner}/{repo}/check-runs\", {\n mediaType: {\n previews: [\"antiope\"]\n }\n }],\n createSuite: [\"POST /repos/{owner}/{repo}/check-suites\", {\n mediaType: {\n previews: [\"antiope\"]\n }\n }],\n get: [\"GET /repos/{owner}/{repo}/check-runs/{check_run_id}\", {\n mediaType: {\n previews: [\"antiope\"]\n }\n }],\n getSuite: [\"GET /repos/{owner}/{repo}/check-suites/{check_suite_id}\", {\n mediaType: {\n previews: [\"antiope\"]\n }\n }],\n listAnnotations: [\"GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations\", {\n mediaType: {\n previews: [\"antiope\"]\n }\n }],\n listForRef: [\"GET /repos/{owner}/{repo}/commits/{ref}/check-runs\", {\n mediaType: {\n previews: [\"antiope\"]\n }\n }],\n listForSuite: [\"GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs\", {\n mediaType: {\n previews: [\"antiope\"]\n }\n }],\n listSuitesForRef: [\"GET /repos/{owner}/{repo}/commits/{ref}/check-suites\", {\n mediaType: {\n previews: [\"antiope\"]\n }\n }],\n rerequestSuite: [\"POST /repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest\", {\n mediaType: {\n previews: [\"antiope\"]\n }\n }],\n setSuitesPreferences: [\"PATCH /repos/{owner}/{repo}/check-suites/preferences\", {\n mediaType: {\n previews: [\"antiope\"]\n }\n }],\n update: [\"PATCH /repos/{owner}/{repo}/check-runs/{check_run_id}\", {\n mediaType: {\n previews: [\"antiope\"]\n }\n }]\n },\n codeScanning: {\n getAlert: [\"GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}\", {}, {\n renamedParameters: {\n alert_id: \"alert_number\"\n }\n }],\n listAlertsForRepo: [\"GET /repos/{owner}/{repo}/code-scanning/alerts\"],\n listRecentAnalyses: [\"GET /repos/{owner}/{repo}/code-scanning/analyses\"],\n updateAlert: [\"PATCH /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}\"],\n uploadSarif: [\"POST /repos/{owner}/{repo}/code-scanning/sarifs\"]\n },\n codesOfConduct: {\n getAllCodesOfConduct: [\"GET /codes_of_conduct\", {\n mediaType: {\n previews: [\"scarlet-witch\"]\n }\n }],\n getConductCode: [\"GET /codes_of_conduct/{key}\", {\n mediaType: {\n previews: [\"scarlet-witch\"]\n }\n }],\n getForRepo: [\"GET /repos/{owner}/{repo}/community/code_of_conduct\", {\n mediaType: {\n previews: [\"scarlet-witch\"]\n }\n }]\n },\n emojis: {\n get: [\"GET /emojis\"]\n },\n gists: {\n checkIsStarred: [\"GET /gists/{gist_id}/star\"],\n create: [\"POST /gists\"],\n createComment: [\"POST /gists/{gist_id}/comments\"],\n delete: [\"DELETE /gists/{gist_id}\"],\n deleteComment: [\"DELETE /gists/{gist_id}/comments/{comment_id}\"],\n fork: [\"POST /gists/{gist_id}/forks\"],\n get: [\"GET /gists/{gist_id}\"],\n getComment: [\"GET /gists/{gist_id}/comments/{comment_id}\"],\n getRevision: [\"GET /gists/{gist_id}/{sha}\"],\n list: [\"GET /gists\"],\n listComments: [\"GET /gists/{gist_id}/comments\"],\n listCommits: [\"GET /gists/{gist_id}/commits\"],\n listForUser: [\"GET /users/{username}/gists\"],\n listForks: [\"GET /gists/{gist_id}/forks\"],\n listPublic: [\"GET /gists/public\"],\n listStarred: [\"GET /gists/starred\"],\n star: [\"PUT /gists/{gist_id}/star\"],\n unstar: [\"DELETE /gists/{gist_id}/star\"],\n update: [\"PATCH /gists/{gist_id}\"],\n updateComment: [\"PATCH /gists/{gist_id}/comments/{comment_id}\"]\n },\n git: {\n createBlob: [\"POST /repos/{owner}/{repo}/git/blobs\"],\n createCommit: [\"POST /repos/{owner}/{repo}/git/commits\"],\n createRef: [\"POST /repos/{owner}/{repo}/git/refs\"],\n createTag: [\"POST /repos/{owner}/{repo}/git/tags\"],\n createTree: [\"POST /repos/{owner}/{repo}/git/trees\"],\n deleteRef: [\"DELETE /repos/{owner}/{repo}/git/refs/{ref}\"],\n getBlob: [\"GET /repos/{owner}/{repo}/git/blobs/{file_sha}\"],\n getCommit: [\"GET /repos/{owner}/{repo}/git/commits/{commit_sha}\"],\n getRef: [\"GET /repos/{owner}/{repo}/git/ref/{ref}\"],\n getTag: [\"GET /repos/{owner}/{repo}/git/tags/{tag_sha}\"],\n getTree: [\"GET /repos/{owner}/{repo}/git/trees/{tree_sha}\"],\n listMatchingRefs: [\"GET /repos/{owner}/{repo}/git/matching-refs/{ref}\"],\n updateRef: [\"PATCH /repos/{owner}/{repo}/git/refs/{ref}\"]\n },\n gitignore: {\n getAllTemplates: [\"GET /gitignore/templates\"],\n getTemplate: [\"GET /gitignore/templates/{name}\"]\n },\n interactions: {\n getRestrictionsForOrg: [\"GET /orgs/{org}/interaction-limits\", {\n mediaType: {\n previews: [\"sombra\"]\n }\n }],\n getRestrictionsForRepo: [\"GET /repos/{owner}/{repo}/interaction-limits\", {\n mediaType: {\n previews: [\"sombra\"]\n }\n }],\n removeRestrictionsForOrg: [\"DELETE /orgs/{org}/interaction-limits\", {\n mediaType: {\n previews: [\"sombra\"]\n }\n }],\n removeRestrictionsForRepo: [\"DELETE /repos/{owner}/{repo}/interaction-limits\", {\n mediaType: {\n previews: [\"sombra\"]\n }\n }],\n setRestrictionsForOrg: [\"PUT /orgs/{org}/interaction-limits\", {\n mediaType: {\n previews: [\"sombra\"]\n }\n }],\n setRestrictionsForRepo: [\"PUT /repos/{owner}/{repo}/interaction-limits\", {\n mediaType: {\n previews: [\"sombra\"]\n }\n }]\n },\n issues: {\n addAssignees: [\"POST /repos/{owner}/{repo}/issues/{issue_number}/assignees\"],\n addLabels: [\"POST /repos/{owner}/{repo}/issues/{issue_number}/labels\"],\n checkUserCanBeAssigned: [\"GET /repos/{owner}/{repo}/assignees/{assignee}\"],\n create: [\"POST /repos/{owner}/{repo}/issues\"],\n createComment: [\"POST /repos/{owner}/{repo}/issues/{issue_number}/comments\"],\n createLabel: [\"POST /repos/{owner}/{repo}/labels\"],\n createMilestone: [\"POST /repos/{owner}/{repo}/milestones\"],\n deleteComment: [\"DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}\"],\n deleteLabel: [\"DELETE /repos/{owner}/{repo}/labels/{name}\"],\n deleteMilestone: [\"DELETE /repos/{owner}/{repo}/milestones/{milestone_number}\"],\n get: [\"GET /repos/{owner}/{repo}/issues/{issue_number}\"],\n getComment: [\"GET /repos/{owner}/{repo}/issues/comments/{comment_id}\"],\n getEvent: [\"GET /repos/{owner}/{repo}/issues/events/{event_id}\"],\n getLabel: [\"GET /repos/{owner}/{repo}/labels/{name}\"],\n getMilestone: [\"GET /repos/{owner}/{repo}/milestones/{milestone_number}\"],\n list: [\"GET /issues\"],\n listAssignees: [\"GET /repos/{owner}/{repo}/assignees\"],\n listComments: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/comments\"],\n listCommentsForRepo: [\"GET /repos/{owner}/{repo}/issues/comments\"],\n listEvents: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/events\"],\n listEventsForRepo: [\"GET /repos/{owner}/{repo}/issues/events\"],\n listEventsForTimeline: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/timeline\", {\n mediaType: {\n previews: [\"mockingbird\"]\n }\n }],\n listForAuthenticatedUser: [\"GET /user/issues\"],\n listForOrg: [\"GET /orgs/{org}/issues\"],\n listForRepo: [\"GET /repos/{owner}/{repo}/issues\"],\n listLabelsForMilestone: [\"GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels\"],\n listLabelsForRepo: [\"GET /repos/{owner}/{repo}/labels\"],\n listLabelsOnIssue: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/labels\"],\n listMilestones: [\"GET /repos/{owner}/{repo}/milestones\"],\n lock: [\"PUT /repos/{owner}/{repo}/issues/{issue_number}/lock\"],\n removeAllLabels: [\"DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels\"],\n removeAssignees: [\"DELETE /repos/{owner}/{repo}/issues/{issue_number}/assignees\"],\n removeLabel: [\"DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels/{name}\"],\n setLabels: [\"PUT /repos/{owner}/{repo}/issues/{issue_number}/labels\"],\n unlock: [\"DELETE /repos/{owner}/{repo}/issues/{issue_number}/lock\"],\n update: [\"PATCH /repos/{owner}/{repo}/issues/{issue_number}\"],\n updateComment: [\"PATCH /repos/{owner}/{repo}/issues/comments/{comment_id}\"],\n updateLabel: [\"PATCH /repos/{owner}/{repo}/labels/{name}\"],\n updateMilestone: [\"PATCH /repos/{owner}/{repo}/milestones/{milestone_number}\"]\n },\n licenses: {\n get: [\"GET /licenses/{license}\"],\n getAllCommonlyUsed: [\"GET /licenses\"],\n getForRepo: [\"GET /repos/{owner}/{repo}/license\"]\n },\n markdown: {\n render: [\"POST /markdown\"],\n renderRaw: [\"POST /markdown/raw\", {\n headers: {\n \"content-type\": \"text/plain; charset=utf-8\"\n }\n }]\n },\n meta: {\n get: [\"GET /meta\"]\n },\n migrations: {\n cancelImport: [\"DELETE /repos/{owner}/{repo}/import\"],\n deleteArchiveForAuthenticatedUser: [\"DELETE /user/migrations/{migration_id}/archive\", {\n mediaType: {\n previews: [\"wyandotte\"]\n }\n }],\n deleteArchiveForOrg: [\"DELETE /orgs/{org}/migrations/{migration_id}/archive\", {\n mediaType: {\n previews: [\"wyandotte\"]\n }\n }],\n downloadArchiveForOrg: [\"GET /orgs/{org}/migrations/{migration_id}/archive\", {\n mediaType: {\n previews: [\"wyandotte\"]\n }\n }],\n getArchiveForAuthenticatedUser: [\"GET /user/migrations/{migration_id}/archive\", {\n mediaType: {\n previews: [\"wyandotte\"]\n }\n }],\n getCommitAuthors: [\"GET /repos/{owner}/{repo}/import/authors\"],\n getImportStatus: [\"GET /repos/{owner}/{repo}/import\"],\n getLargeFiles: [\"GET /repos/{owner}/{repo}/import/large_files\"],\n getStatusForAuthenticatedUser: [\"GET /user/migrations/{migration_id}\", {\n mediaType: {\n previews: [\"wyandotte\"]\n }\n }],\n getStatusForOrg: [\"GET /orgs/{org}/migrations/{migration_id}\", {\n mediaType: {\n previews: [\"wyandotte\"]\n }\n }],\n listForAuthenticatedUser: [\"GET /user/migrations\", {\n mediaType: {\n previews: [\"wyandotte\"]\n }\n }],\n listForOrg: [\"GET /orgs/{org}/migrations\", {\n mediaType: {\n previews: [\"wyandotte\"]\n }\n }],\n listReposForOrg: [\"GET /orgs/{org}/migrations/{migration_id}/repositories\", {\n mediaType: {\n previews: [\"wyandotte\"]\n }\n }],\n listReposForUser: [\"GET /user/migrations/{migration_id}/repositories\", {\n mediaType: {\n previews: [\"wyandotte\"]\n }\n }],\n mapCommitAuthor: [\"PATCH /repos/{owner}/{repo}/import/authors/{author_id}\"],\n setLfsPreference: [\"PATCH /repos/{owner}/{repo}/import/lfs\"],\n startForAuthenticatedUser: [\"POST /user/migrations\"],\n startForOrg: [\"POST /orgs/{org}/migrations\"],\n startImport: [\"PUT /repos/{owner}/{repo}/import\"],\n unlockRepoForAuthenticatedUser: [\"DELETE /user/migrations/{migration_id}/repos/{repo_name}/lock\", {\n mediaType: {\n previews: [\"wyandotte\"]\n }\n }],\n unlockRepoForOrg: [\"DELETE /orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock\", {\n mediaType: {\n previews: [\"wyandotte\"]\n }\n }],\n updateImport: [\"PATCH /repos/{owner}/{repo}/import\"]\n },\n orgs: {\n blockUser: [\"PUT /orgs/{org}/blocks/{username}\"],\n checkBlockedUser: [\"GET /orgs/{org}/blocks/{username}\"],\n checkMembershipForUser: [\"GET /orgs/{org}/members/{username}\"],\n checkPublicMembershipForUser: [\"GET /orgs/{org}/public_members/{username}\"],\n convertMemberToOutsideCollaborator: [\"PUT /orgs/{org}/outside_collaborators/{username}\"],\n createInvitation: [\"POST /orgs/{org}/invitations\"],\n createWebhook: [\"POST /orgs/{org}/hooks\"],\n deleteWebhook: [\"DELETE /orgs/{org}/hooks/{hook_id}\"],\n get: [\"GET /orgs/{org}\"],\n getMembershipForAuthenticatedUser: [\"GET /user/memberships/orgs/{org}\"],\n getMembershipForUser: [\"GET /orgs/{org}/memberships/{username}\"],\n getWebhook: [\"GET /orgs/{org}/hooks/{hook_id}\"],\n list: [\"GET /organizations\"],\n listAppInstallations: [\"GET /orgs/{org}/installations\"],\n listBlockedUsers: [\"GET /orgs/{org}/blocks\"],\n listForAuthenticatedUser: [\"GET /user/orgs\"],\n listForUser: [\"GET /users/{username}/orgs\"],\n listInvitationTeams: [\"GET /orgs/{org}/invitations/{invitation_id}/teams\"],\n listMembers: [\"GET /orgs/{org}/members\"],\n listMembershipsForAuthenticatedUser: [\"GET /user/memberships/orgs\"],\n listOutsideCollaborators: [\"GET /orgs/{org}/outside_collaborators\"],\n listPendingInvitations: [\"GET /orgs/{org}/invitations\"],\n listPublicMembers: [\"GET /orgs/{org}/public_members\"],\n listWebhooks: [\"GET /orgs/{org}/hooks\"],\n pingWebhook: [\"POST /orgs/{org}/hooks/{hook_id}/pings\"],\n removeMember: [\"DELETE /orgs/{org}/members/{username}\"],\n removeMembershipForUser: [\"DELETE /orgs/{org}/memberships/{username}\"],\n removeOutsideCollaborator: [\"DELETE /orgs/{org}/outside_collaborators/{username}\"],\n removePublicMembershipForAuthenticatedUser: [\"DELETE /orgs/{org}/public_members/{username}\"],\n setMembershipForUser: [\"PUT /orgs/{org}/memberships/{username}\"],\n setPublicMembershipForAuthenticatedUser: [\"PUT /orgs/{org}/public_members/{username}\"],\n unblockUser: [\"DELETE /orgs/{org}/blocks/{username}\"],\n update: [\"PATCH /orgs/{org}\"],\n updateMembershipForAuthenticatedUser: [\"PATCH /user/memberships/orgs/{org}\"],\n updateWebhook: [\"PATCH /orgs/{org}/hooks/{hook_id}\"]\n },\n projects: {\n addCollaborator: [\"PUT /projects/{project_id}/collaborators/{username}\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n createCard: [\"POST /projects/columns/{column_id}/cards\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n createColumn: [\"POST /projects/{project_id}/columns\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n createForAuthenticatedUser: [\"POST /user/projects\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n createForOrg: [\"POST /orgs/{org}/projects\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n createForRepo: [\"POST /repos/{owner}/{repo}/projects\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n delete: [\"DELETE /projects/{project_id}\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n deleteCard: [\"DELETE /projects/columns/cards/{card_id}\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n deleteColumn: [\"DELETE /projects/columns/{column_id}\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n get: [\"GET /projects/{project_id}\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n getCard: [\"GET /projects/columns/cards/{card_id}\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n getColumn: [\"GET /projects/columns/{column_id}\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n getPermissionForUser: [\"GET /projects/{project_id}/collaborators/{username}/permission\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n listCards: [\"GET /projects/columns/{column_id}/cards\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n listCollaborators: [\"GET /projects/{project_id}/collaborators\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n listColumns: [\"GET /projects/{project_id}/columns\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n listForOrg: [\"GET /orgs/{org}/projects\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n listForRepo: [\"GET /repos/{owner}/{repo}/projects\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n listForUser: [\"GET /users/{username}/projects\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n moveCard: [\"POST /projects/columns/cards/{card_id}/moves\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n moveColumn: [\"POST /projects/columns/{column_id}/moves\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n removeCollaborator: [\"DELETE /projects/{project_id}/collaborators/{username}\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n update: [\"PATCH /projects/{project_id}\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n updateCard: [\"PATCH /projects/columns/cards/{card_id}\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n updateColumn: [\"PATCH /projects/columns/{column_id}\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }]\n },\n pulls: {\n checkIfMerged: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/merge\"],\n create: [\"POST /repos/{owner}/{repo}/pulls\"],\n createReplyForReviewComment: [\"POST /repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies\"],\n createReview: [\"POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews\"],\n createReviewComment: [\"POST /repos/{owner}/{repo}/pulls/{pull_number}/comments\"],\n deletePendingReview: [\"DELETE /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}\"],\n deleteReviewComment: [\"DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}\"],\n dismissReview: [\"PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals\"],\n get: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}\"],\n getReview: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}\"],\n getReviewComment: [\"GET /repos/{owner}/{repo}/pulls/comments/{comment_id}\"],\n list: [\"GET /repos/{owner}/{repo}/pulls\"],\n listCommentsForReview: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments\"],\n listCommits: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/commits\"],\n listFiles: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/files\"],\n listRequestedReviewers: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers\"],\n listReviewComments: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/comments\"],\n listReviewCommentsForRepo: [\"GET /repos/{owner}/{repo}/pulls/comments\"],\n listReviews: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews\"],\n merge: [\"PUT /repos/{owner}/{repo}/pulls/{pull_number}/merge\"],\n removeRequestedReviewers: [\"DELETE /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers\"],\n requestReviewers: [\"POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers\"],\n submitReview: [\"POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events\"],\n update: [\"PATCH /repos/{owner}/{repo}/pulls/{pull_number}\"],\n updateBranch: [\"PUT /repos/{owner}/{repo}/pulls/{pull_number}/update-branch\", {\n mediaType: {\n previews: [\"lydian\"]\n }\n }],\n updateReview: [\"PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}\"],\n updateReviewComment: [\"PATCH /repos/{owner}/{repo}/pulls/comments/{comment_id}\"]\n },\n rateLimit: {\n get: [\"GET /rate_limit\"]\n },\n reactions: {\n createForCommitComment: [\"POST /repos/{owner}/{repo}/comments/{comment_id}/reactions\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }],\n createForIssue: [\"POST /repos/{owner}/{repo}/issues/{issue_number}/reactions\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }],\n createForIssueComment: [\"POST /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }],\n createForPullRequestReviewComment: [\"POST /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }],\n createForTeamDiscussionCommentInOrg: [\"POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }],\n createForTeamDiscussionInOrg: [\"POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }],\n deleteForCommitComment: [\"DELETE /repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }],\n deleteForIssue: [\"DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }],\n deleteForIssueComment: [\"DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }],\n deleteForPullRequestComment: [\"DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }],\n deleteForTeamDiscussion: [\"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }],\n deleteForTeamDiscussionComment: [\"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }],\n deleteLegacy: [\"DELETE /reactions/{reaction_id}\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }, {\n deprecated: \"octokit.reactions.deleteLegacy() is deprecated, see https://developer.github.com/v3/reactions/#delete-a-reaction-legacy\"\n }],\n listForCommitComment: [\"GET /repos/{owner}/{repo}/comments/{comment_id}/reactions\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }],\n listForIssue: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/reactions\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }],\n listForIssueComment: [\"GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }],\n listForPullRequestReviewComment: [\"GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }],\n listForTeamDiscussionCommentInOrg: [\"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }],\n listForTeamDiscussionInOrg: [\"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }]\n },\n repos: {\n acceptInvitation: [\"PATCH /user/repository_invitations/{invitation_id}\"],\n addAppAccessRestrictions: [\"POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\", {}, {\n mapToData: \"apps\"\n }],\n addCollaborator: [\"PUT /repos/{owner}/{repo}/collaborators/{username}\"],\n addStatusCheckContexts: [\"POST /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\", {}, {\n mapToData: \"contexts\"\n }],\n addTeamAccessRestrictions: [\"POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\", {}, {\n mapToData: \"teams\"\n }],\n addUserAccessRestrictions: [\"POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\", {}, {\n mapToData: \"users\"\n }],\n checkCollaborator: [\"GET /repos/{owner}/{repo}/collaborators/{username}\"],\n checkVulnerabilityAlerts: [\"GET /repos/{owner}/{repo}/vulnerability-alerts\", {\n mediaType: {\n previews: [\"dorian\"]\n }\n }],\n compareCommits: [\"GET /repos/{owner}/{repo}/compare/{base}...{head}\"],\n createCommitComment: [\"POST /repos/{owner}/{repo}/commits/{commit_sha}/comments\"],\n createCommitSignatureProtection: [\"POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures\", {\n mediaType: {\n previews: [\"zzzax\"]\n }\n }],\n createCommitStatus: [\"POST /repos/{owner}/{repo}/statuses/{sha}\"],\n createDeployKey: [\"POST /repos/{owner}/{repo}/keys\"],\n createDeployment: [\"POST /repos/{owner}/{repo}/deployments\"],\n createDeploymentStatus: [\"POST /repos/{owner}/{repo}/deployments/{deployment_id}/statuses\"],\n createDispatchEvent: [\"POST /repos/{owner}/{repo}/dispatches\"],\n createForAuthenticatedUser: [\"POST /user/repos\"],\n createFork: [\"POST /repos/{owner}/{repo}/forks\"],\n createInOrg: [\"POST /orgs/{org}/repos\"],\n createOrUpdateFileContents: [\"PUT /repos/{owner}/{repo}/contents/{path}\"],\n createPagesSite: [\"POST /repos/{owner}/{repo}/pages\", {\n mediaType: {\n previews: [\"switcheroo\"]\n }\n }],\n createRelease: [\"POST /repos/{owner}/{repo}/releases\"],\n createUsingTemplate: [\"POST /repos/{template_owner}/{template_repo}/generate\", {\n mediaType: {\n previews: [\"baptiste\"]\n }\n }],\n createWebhook: [\"POST /repos/{owner}/{repo}/hooks\"],\n declineInvitation: [\"DELETE /user/repository_invitations/{invitation_id}\"],\n delete: [\"DELETE /repos/{owner}/{repo}\"],\n deleteAccessRestrictions: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions\"],\n deleteAdminBranchProtection: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins\"],\n deleteBranchProtection: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection\"],\n deleteCommitComment: [\"DELETE /repos/{owner}/{repo}/comments/{comment_id}\"],\n deleteCommitSignatureProtection: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures\", {\n mediaType: {\n previews: [\"zzzax\"]\n }\n }],\n deleteDeployKey: [\"DELETE /repos/{owner}/{repo}/keys/{key_id}\"],\n deleteDeployment: [\"DELETE /repos/{owner}/{repo}/deployments/{deployment_id}\"],\n deleteFile: [\"DELETE /repos/{owner}/{repo}/contents/{path}\"],\n deleteInvitation: [\"DELETE /repos/{owner}/{repo}/invitations/{invitation_id}\"],\n deletePagesSite: [\"DELETE /repos/{owner}/{repo}/pages\", {\n mediaType: {\n previews: [\"switcheroo\"]\n }\n }],\n deletePullRequestReviewProtection: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews\"],\n deleteRelease: [\"DELETE /repos/{owner}/{repo}/releases/{release_id}\"],\n deleteReleaseAsset: [\"DELETE /repos/{owner}/{repo}/releases/assets/{asset_id}\"],\n deleteWebhook: [\"DELETE /repos/{owner}/{repo}/hooks/{hook_id}\"],\n disableAutomatedSecurityFixes: [\"DELETE /repos/{owner}/{repo}/automated-security-fixes\", {\n mediaType: {\n previews: [\"london\"]\n }\n }],\n disableVulnerabilityAlerts: [\"DELETE /repos/{owner}/{repo}/vulnerability-alerts\", {\n mediaType: {\n previews: [\"dorian\"]\n }\n }],\n downloadArchive: [\"GET /repos/{owner}/{repo}/{archive_format}/{ref}\"],\n enableAutomatedSecurityFixes: [\"PUT /repos/{owner}/{repo}/automated-security-fixes\", {\n mediaType: {\n previews: [\"london\"]\n }\n }],\n enableVulnerabilityAlerts: [\"PUT /repos/{owner}/{repo}/vulnerability-alerts\", {\n mediaType: {\n previews: [\"dorian\"]\n }\n }],\n get: [\"GET /repos/{owner}/{repo}\"],\n getAccessRestrictions: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions\"],\n getAdminBranchProtection: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins\"],\n getAllStatusCheckContexts: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\"],\n getAllTopics: [\"GET /repos/{owner}/{repo}/topics\", {\n mediaType: {\n previews: [\"mercy\"]\n }\n }],\n getAppsWithAccessToProtectedBranch: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\"],\n getBranch: [\"GET /repos/{owner}/{repo}/branches/{branch}\"],\n getBranchProtection: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection\"],\n getClones: [\"GET /repos/{owner}/{repo}/traffic/clones\"],\n getCodeFrequencyStats: [\"GET /repos/{owner}/{repo}/stats/code_frequency\"],\n getCollaboratorPermissionLevel: [\"GET /repos/{owner}/{repo}/collaborators/{username}/permission\"],\n getCombinedStatusForRef: [\"GET /repos/{owner}/{repo}/commits/{ref}/status\"],\n getCommit: [\"GET /repos/{owner}/{repo}/commits/{ref}\"],\n getCommitActivityStats: [\"GET /repos/{owner}/{repo}/stats/commit_activity\"],\n getCommitComment: [\"GET /repos/{owner}/{repo}/comments/{comment_id}\"],\n getCommitSignatureProtection: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures\", {\n mediaType: {\n previews: [\"zzzax\"]\n }\n }],\n getCommunityProfileMetrics: [\"GET /repos/{owner}/{repo}/community/profile\", {\n mediaType: {\n previews: [\"black-panther\"]\n }\n }],\n getContent: [\"GET /repos/{owner}/{repo}/contents/{path}\"],\n getContributorsStats: [\"GET /repos/{owner}/{repo}/stats/contributors\"],\n getDeployKey: [\"GET /repos/{owner}/{repo}/keys/{key_id}\"],\n getDeployment: [\"GET /repos/{owner}/{repo}/deployments/{deployment_id}\"],\n getDeploymentStatus: [\"GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}\"],\n getLatestPagesBuild: [\"GET /repos/{owner}/{repo}/pages/builds/latest\"],\n getLatestRelease: [\"GET /repos/{owner}/{repo}/releases/latest\"],\n getPages: [\"GET /repos/{owner}/{repo}/pages\"],\n getPagesBuild: [\"GET /repos/{owner}/{repo}/pages/builds/{build_id}\"],\n getParticipationStats: [\"GET /repos/{owner}/{repo}/stats/participation\"],\n getPullRequestReviewProtection: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews\"],\n getPunchCardStats: [\"GET /repos/{owner}/{repo}/stats/punch_card\"],\n getReadme: [\"GET /repos/{owner}/{repo}/readme\"],\n getRelease: [\"GET /repos/{owner}/{repo}/releases/{release_id}\"],\n getReleaseAsset: [\"GET /repos/{owner}/{repo}/releases/assets/{asset_id}\"],\n getReleaseByTag: [\"GET /repos/{owner}/{repo}/releases/tags/{tag}\"],\n getStatusChecksProtection: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\"],\n getTeamsWithAccessToProtectedBranch: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\"],\n getTopPaths: [\"GET /repos/{owner}/{repo}/traffic/popular/paths\"],\n getTopReferrers: [\"GET /repos/{owner}/{repo}/traffic/popular/referrers\"],\n getUsersWithAccessToProtectedBranch: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\"],\n getViews: [\"GET /repos/{owner}/{repo}/traffic/views\"],\n getWebhook: [\"GET /repos/{owner}/{repo}/hooks/{hook_id}\"],\n listBranches: [\"GET /repos/{owner}/{repo}/branches\"],\n listBranchesForHeadCommit: [\"GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head\", {\n mediaType: {\n previews: [\"groot\"]\n }\n }],\n listCollaborators: [\"GET /repos/{owner}/{repo}/collaborators\"],\n listCommentsForCommit: [\"GET /repos/{owner}/{repo}/commits/{commit_sha}/comments\"],\n listCommitCommentsForRepo: [\"GET /repos/{owner}/{repo}/comments\"],\n listCommitStatusesForRef: [\"GET /repos/{owner}/{repo}/commits/{ref}/statuses\"],\n listCommits: [\"GET /repos/{owner}/{repo}/commits\"],\n listContributors: [\"GET /repos/{owner}/{repo}/contributors\"],\n listDeployKeys: [\"GET /repos/{owner}/{repo}/keys\"],\n listDeploymentStatuses: [\"GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses\"],\n listDeployments: [\"GET /repos/{owner}/{repo}/deployments\"],\n listForAuthenticatedUser: [\"GET /user/repos\"],\n listForOrg: [\"GET /orgs/{org}/repos\"],\n listForUser: [\"GET /users/{username}/repos\"],\n listForks: [\"GET /repos/{owner}/{repo}/forks\"],\n listInvitations: [\"GET /repos/{owner}/{repo}/invitations\"],\n listInvitationsForAuthenticatedUser: [\"GET /user/repository_invitations\"],\n listLanguages: [\"GET /repos/{owner}/{repo}/languages\"],\n listPagesBuilds: [\"GET /repos/{owner}/{repo}/pages/builds\"],\n listPublic: [\"GET /repositories\"],\n listPullRequestsAssociatedWithCommit: [\"GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls\", {\n mediaType: {\n previews: [\"groot\"]\n }\n }],\n listReleaseAssets: [\"GET /repos/{owner}/{repo}/releases/{release_id}/assets\"],\n listReleases: [\"GET /repos/{owner}/{repo}/releases\"],\n listTags: [\"GET /repos/{owner}/{repo}/tags\"],\n listTeams: [\"GET /repos/{owner}/{repo}/teams\"],\n listWebhooks: [\"GET /repos/{owner}/{repo}/hooks\"],\n merge: [\"POST /repos/{owner}/{repo}/merges\"],\n pingWebhook: [\"POST /repos/{owner}/{repo}/hooks/{hook_id}/pings\"],\n removeAppAccessRestrictions: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\", {}, {\n mapToData: \"apps\"\n }],\n removeCollaborator: [\"DELETE /repos/{owner}/{repo}/collaborators/{username}\"],\n removeStatusCheckContexts: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\", {}, {\n mapToData: \"contexts\"\n }],\n removeStatusCheckProtection: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\"],\n removeTeamAccessRestrictions: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\", {}, {\n mapToData: \"teams\"\n }],\n removeUserAccessRestrictions: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\", {}, {\n mapToData: \"users\"\n }],\n replaceAllTopics: [\"PUT /repos/{owner}/{repo}/topics\", {\n mediaType: {\n previews: [\"mercy\"]\n }\n }],\n requestPagesBuild: [\"POST /repos/{owner}/{repo}/pages/builds\"],\n setAdminBranchProtection: [\"POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins\"],\n setAppAccessRestrictions: [\"PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\", {}, {\n mapToData: \"apps\"\n }],\n setStatusCheckContexts: [\"PUT /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\", {}, {\n mapToData: \"contexts\"\n }],\n setTeamAccessRestrictions: [\"PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\", {}, {\n mapToData: \"teams\"\n }],\n setUserAccessRestrictions: [\"PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\", {}, {\n mapToData: \"users\"\n }],\n testPushWebhook: [\"POST /repos/{owner}/{repo}/hooks/{hook_id}/tests\"],\n transfer: [\"POST /repos/{owner}/{repo}/transfer\"],\n update: [\"PATCH /repos/{owner}/{repo}\"],\n updateBranchProtection: [\"PUT /repos/{owner}/{repo}/branches/{branch}/protection\"],\n updateCommitComment: [\"PATCH /repos/{owner}/{repo}/comments/{comment_id}\"],\n updateInformationAboutPagesSite: [\"PUT /repos/{owner}/{repo}/pages\"],\n updateInvitation: [\"PATCH /repos/{owner}/{repo}/invitations/{invitation_id}\"],\n updatePullRequestReviewProtection: [\"PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews\"],\n updateRelease: [\"PATCH /repos/{owner}/{repo}/releases/{release_id}\"],\n updateReleaseAsset: [\"PATCH /repos/{owner}/{repo}/releases/assets/{asset_id}\"],\n updateStatusCheckPotection: [\"PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\"],\n updateWebhook: [\"PATCH /repos/{owner}/{repo}/hooks/{hook_id}\"],\n uploadReleaseAsset: [\"POST /repos/{owner}/{repo}/releases/{release_id}/assets{?name,label}\", {\n baseUrl: \"https://uploads.github.com\"\n }]\n },\n search: {\n code: [\"GET /search/code\"],\n commits: [\"GET /search/commits\", {\n mediaType: {\n previews: [\"cloak\"]\n }\n }],\n issuesAndPullRequests: [\"GET /search/issues\"],\n labels: [\"GET /search/labels\"],\n repos: [\"GET /search/repositories\"],\n topics: [\"GET /search/topics\", {\n mediaType: {\n previews: [\"mercy\"]\n }\n }],\n users: [\"GET /search/users\"]\n },\n teams: {\n addOrUpdateMembershipForUserInOrg: [\"PUT /orgs/{org}/teams/{team_slug}/memberships/{username}\"],\n addOrUpdateProjectPermissionsInOrg: [\"PUT /orgs/{org}/teams/{team_slug}/projects/{project_id}\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n addOrUpdateRepoPermissionsInOrg: [\"PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}\"],\n checkPermissionsForProjectInOrg: [\"GET /orgs/{org}/teams/{team_slug}/projects/{project_id}\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n checkPermissionsForRepoInOrg: [\"GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}\"],\n create: [\"POST /orgs/{org}/teams\"],\n createDiscussionCommentInOrg: [\"POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments\"],\n createDiscussionInOrg: [\"POST /orgs/{org}/teams/{team_slug}/discussions\"],\n deleteDiscussionCommentInOrg: [\"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}\"],\n deleteDiscussionInOrg: [\"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}\"],\n deleteInOrg: [\"DELETE /orgs/{org}/teams/{team_slug}\"],\n getByName: [\"GET /orgs/{org}/teams/{team_slug}\"],\n getDiscussionCommentInOrg: [\"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}\"],\n getDiscussionInOrg: [\"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}\"],\n getMembershipForUserInOrg: [\"GET /orgs/{org}/teams/{team_slug}/memberships/{username}\"],\n list: [\"GET /orgs/{org}/teams\"],\n listChildInOrg: [\"GET /orgs/{org}/teams/{team_slug}/teams\"],\n listDiscussionCommentsInOrg: [\"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments\"],\n listDiscussionsInOrg: [\"GET /orgs/{org}/teams/{team_slug}/discussions\"],\n listForAuthenticatedUser: [\"GET /user/teams\"],\n listMembersInOrg: [\"GET /orgs/{org}/teams/{team_slug}/members\"],\n listPendingInvitationsInOrg: [\"GET /orgs/{org}/teams/{team_slug}/invitations\"],\n listProjectsInOrg: [\"GET /orgs/{org}/teams/{team_slug}/projects\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n listReposInOrg: [\"GET /orgs/{org}/teams/{team_slug}/repos\"],\n removeMembershipForUserInOrg: [\"DELETE /orgs/{org}/teams/{team_slug}/memberships/{username}\"],\n removeProjectInOrg: [\"DELETE /orgs/{org}/teams/{team_slug}/projects/{project_id}\"],\n removeRepoInOrg: [\"DELETE /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}\"],\n updateDiscussionCommentInOrg: [\"PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}\"],\n updateDiscussionInOrg: [\"PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}\"],\n updateInOrg: [\"PATCH /orgs/{org}/teams/{team_slug}\"]\n },\n users: {\n addEmailForAuthenticated: [\"POST /user/emails\"],\n block: [\"PUT /user/blocks/{username}\"],\n checkBlocked: [\"GET /user/blocks/{username}\"],\n checkFollowingForUser: [\"GET /users/{username}/following/{target_user}\"],\n checkPersonIsFollowedByAuthenticated: [\"GET /user/following/{username}\"],\n createGpgKeyForAuthenticated: [\"POST /user/gpg_keys\"],\n createPublicSshKeyForAuthenticated: [\"POST /user/keys\"],\n deleteEmailForAuthenticated: [\"DELETE /user/emails\"],\n deleteGpgKeyForAuthenticated: [\"DELETE /user/gpg_keys/{gpg_key_id}\"],\n deletePublicSshKeyForAuthenticated: [\"DELETE /user/keys/{key_id}\"],\n follow: [\"PUT /user/following/{username}\"],\n getAuthenticated: [\"GET /user\"],\n getByUsername: [\"GET /users/{username}\"],\n getContextForUser: [\"GET /users/{username}/hovercard\"],\n getGpgKeyForAuthenticated: [\"GET /user/gpg_keys/{gpg_key_id}\"],\n getPublicSshKeyForAuthenticated: [\"GET /user/keys/{key_id}\"],\n list: [\"GET /users\"],\n listBlockedByAuthenticated: [\"GET /user/blocks\"],\n listEmailsForAuthenticated: [\"GET /user/emails\"],\n listFollowedByAuthenticated: [\"GET /user/following\"],\n listFollowersForAuthenticatedUser: [\"GET /user/followers\"],\n listFollowersForUser: [\"GET /users/{username}/followers\"],\n listFollowingForUser: [\"GET /users/{username}/following\"],\n listGpgKeysForAuthenticated: [\"GET /user/gpg_keys\"],\n listGpgKeysForUser: [\"GET /users/{username}/gpg_keys\"],\n listPublicEmailsForAuthenticated: [\"GET /user/public_emails\"],\n listPublicKeysForUser: [\"GET /users/{username}/keys\"],\n listPublicSshKeysForAuthenticated: [\"GET /user/keys\"],\n setPrimaryEmailVisibilityForAuthenticated: [\"PATCH /user/email/visibility\"],\n unblock: [\"DELETE /user/blocks/{username}\"],\n unfollow: [\"DELETE /user/following/{username}\"],\n updateAuthenticated: [\"PATCH /user\"]\n }\n};\n\nconst VERSION = \"4.2.1\";\n\nfunction endpointsToMethods(octokit, endpointsMap) {\n const newMethods = {};\n\n for (const [scope, endpoints] of Object.entries(endpointsMap)) {\n for (const [methodName, endpoint] of Object.entries(endpoints)) {\n const [route, defaults, decorations] = endpoint;\n const [method, url] = route.split(/ /);\n const endpointDefaults = Object.assign({\n method,\n url\n }, defaults);\n\n if (!newMethods[scope]) {\n newMethods[scope] = {};\n }\n\n const scopeMethods = newMethods[scope];\n\n if (decorations) {\n scopeMethods[methodName] = decorate(octokit, scope, methodName, endpointDefaults, decorations);\n continue;\n }\n\n scopeMethods[methodName] = octokit.request.defaults(endpointDefaults);\n }\n }\n\n return newMethods;\n}\n\nfunction decorate(octokit, scope, methodName, defaults, decorations) {\n const requestWithDefaults = octokit.request.defaults(defaults);\n /* istanbul ignore next */\n\n function withDecorations(...args) {\n // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488\n let options = requestWithDefaults.endpoint.merge(...args); // There are currently no other decorations than `.mapToData`\n\n if (decorations.mapToData) {\n options = Object.assign({}, options, {\n data: options[decorations.mapToData],\n [decorations.mapToData]: undefined\n });\n return requestWithDefaults(options);\n }\n\n if (decorations.renamed) {\n const [newScope, newMethodName] = decorations.renamed;\n octokit.log.warn(`octokit.${scope}.${methodName}() has been renamed to octokit.${newScope}.${newMethodName}()`);\n }\n\n if (decorations.deprecated) {\n octokit.log.warn(decorations.deprecated);\n }\n\n if (decorations.renamedParameters) {\n // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488\n const options = requestWithDefaults.endpoint.merge(...args);\n\n for (const [name, alias] of Object.entries(decorations.renamedParameters)) {\n if (name in options) {\n octokit.log.warn(`\"${name}\" parameter is deprecated for \"octokit.${scope}.${methodName}()\". Use \"${alias}\" instead`);\n\n if (!(alias in options)) {\n options[alias] = options[name];\n }\n\n delete options[name];\n }\n }\n\n return requestWithDefaults(options);\n } // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488\n\n\n return requestWithDefaults(...args);\n }\n\n return Object.assign(withDecorations, requestWithDefaults);\n}\n\n/**\n * This plugin is a 1:1 copy of internal @octokit/rest plugins. The primary\n * goal is to rebuild @octokit/rest on top of @octokit/core. Once that is\n * done, we will remove the registerEndpoints methods and return the methods\n * directly as with the other plugins. At that point we will also remove the\n * legacy workarounds and deprecations.\n *\n * See the plan at\n * https://github.com/octokit/plugin-rest-endpoint-methods.js/pull/1\n */\n\nfunction restEndpointMethods(octokit) {\n return endpointsToMethods(octokit, Endpoints);\n}\nrestEndpointMethods.VERSION = VERSION;\n\nexports.restEndpointMethods = restEndpointMethods;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nfunction _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }\n\nvar deprecation = require('deprecation');\nvar once = _interopDefault(require('once'));\n\nconst logOnce = once(deprecation => console.warn(deprecation));\n/**\n * Error with extra properties to help with debugging\n */\n\nclass RequestError extends Error {\n constructor(message, statusCode, options) {\n super(message); // Maintains proper stack trace (only available on V8)\n\n /* istanbul ignore next */\n\n if (Error.captureStackTrace) {\n Error.captureStackTrace(this, this.constructor);\n }\n\n this.name = \"HttpError\";\n this.status = statusCode;\n Object.defineProperty(this, \"code\", {\n get() {\n logOnce(new deprecation.Deprecation(\"[@octokit/request-error] `error.code` is deprecated, use `error.status`.\"));\n return statusCode;\n }\n\n });\n this.headers = options.headers || {}; // redact request credentials without mutating original request options\n\n const requestCopy = Object.assign({}, options.request);\n\n if (options.request.headers.authorization) {\n requestCopy.headers = Object.assign({}, options.request.headers, {\n authorization: options.request.headers.authorization.replace(/ .*$/, \" [REDACTED]\")\n });\n }\n\n requestCopy.url = requestCopy.url // client_id & client_secret can be passed as URL query parameters to increase rate limit\n // see https://developer.github.com/v3/#increasing-the-unauthenticated-rate-limit-for-oauth-applications\n .replace(/\\bclient_secret=\\w+/g, \"client_secret=[REDACTED]\") // OAuth tokens can be passed as URL query parameters, although it is not recommended\n // see https://developer.github.com/v3/#oauth2-token-sent-in-a-header\n .replace(/\\baccess_token=\\w+/g, \"access_token=[REDACTED]\");\n this.request = requestCopy;\n }\n\n}\n\nexports.RequestError = RequestError;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nfunction _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }\n\nvar endpoint = require('@octokit/endpoint');\nvar universalUserAgent = require('universal-user-agent');\nvar isPlainObject = require('is-plain-object');\nvar nodeFetch = _interopDefault(require('node-fetch'));\nvar requestError = require('@octokit/request-error');\n\nconst VERSION = \"5.4.10\";\n\nfunction getBufferResponse(response) {\n return response.arrayBuffer();\n}\n\nfunction fetchWrapper(requestOptions) {\n if (isPlainObject.isPlainObject(requestOptions.body) || Array.isArray(requestOptions.body)) {\n requestOptions.body = JSON.stringify(requestOptions.body);\n }\n\n let headers = {};\n let status;\n let url;\n const fetch = requestOptions.request && requestOptions.request.fetch || nodeFetch;\n return fetch(requestOptions.url, Object.assign({\n method: requestOptions.method,\n body: requestOptions.body,\n headers: requestOptions.headers,\n redirect: requestOptions.redirect\n }, requestOptions.request)).then(response => {\n url = response.url;\n status = response.status;\n\n for (const keyAndValue of response.headers) {\n headers[keyAndValue[0]] = keyAndValue[1];\n }\n\n if (status === 204 || status === 205) {\n return;\n } // GitHub API returns 200 for HEAD requests\n\n\n if (requestOptions.method === \"HEAD\") {\n if (status < 400) {\n return;\n }\n\n throw new requestError.RequestError(response.statusText, status, {\n headers,\n request: requestOptions\n });\n }\n\n if (status === 304) {\n throw new requestError.RequestError(\"Not modified\", status, {\n headers,\n request: requestOptions\n });\n }\n\n if (status >= 400) {\n return response.text().then(message => {\n const error = new requestError.RequestError(message, status, {\n headers,\n request: requestOptions\n });\n\n try {\n let responseBody = JSON.parse(error.message);\n Object.assign(error, responseBody);\n let errors = responseBody.errors; // Assumption `errors` would always be in Array format\n\n error.message = error.message + \": \" + errors.map(JSON.stringify).join(\", \");\n } catch (e) {// ignore, see octokit/rest.js#684\n }\n\n throw error;\n });\n }\n\n const contentType = response.headers.get(\"content-type\");\n\n if (/application\\/json/.test(contentType)) {\n return response.json();\n }\n\n if (!contentType || /^text\\/|charset=utf-8$/.test(contentType)) {\n return response.text();\n }\n\n return getBufferResponse(response);\n }).then(data => {\n return {\n status,\n url,\n headers,\n data\n };\n }).catch(error => {\n if (error instanceof requestError.RequestError) {\n throw error;\n }\n\n throw new requestError.RequestError(error.message, 500, {\n headers,\n request: requestOptions\n });\n });\n}\n\nfunction withDefaults(oldEndpoint, newDefaults) {\n const endpoint = oldEndpoint.defaults(newDefaults);\n\n const newApi = function (route, parameters) {\n const endpointOptions = endpoint.merge(route, parameters);\n\n if (!endpointOptions.request || !endpointOptions.request.hook) {\n return fetchWrapper(endpoint.parse(endpointOptions));\n }\n\n const request = (route, parameters) => {\n return fetchWrapper(endpoint.parse(endpoint.merge(route, parameters)));\n };\n\n Object.assign(request, {\n endpoint,\n defaults: withDefaults.bind(null, endpoint)\n });\n return endpointOptions.request.hook(request, endpointOptions);\n };\n\n return Object.assign(newApi, {\n endpoint,\n defaults: withDefaults.bind(null, endpoint)\n });\n}\n\nconst request = withDefaults(endpoint.endpoint, {\n headers: {\n \"user-agent\": `octokit-request.js/${VERSION} ${universalUserAgent.getUserAgent()}`\n }\n});\n\nexports.request = request;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\n/*!\n * is-plain-object \n *\n * Copyright (c) 2014-2017, Jon Schlinkert.\n * Released under the MIT License.\n */\n\nfunction isObject(o) {\n return Object.prototype.toString.call(o) === '[object Object]';\n}\n\nfunction isPlainObject(o) {\n var ctor,prot;\n\n if (isObject(o) === false) return false;\n\n // If has modified constructor\n ctor = o.constructor;\n if (ctor === undefined) return true;\n\n // If has modified prototype\n prot = ctor.prototype;\n if (isObject(prot) === false) return false;\n\n // If constructor does not have an Object-specific method\n if (prot.hasOwnProperty('isPrototypeOf') === false) {\n return false;\n }\n\n // Most likely a plain Object\n return true;\n}\n\nexports.isPlainObject = isPlainObject;\n","var register = require('./lib/register')\nvar addHook = require('./lib/add')\nvar removeHook = require('./lib/remove')\n\n// bind with array of arguments: https://stackoverflow.com/a/21792913\nvar bind = Function.bind\nvar bindable = bind.bind(bind)\n\nfunction bindApi (hook, state, name) {\n var removeHookRef = bindable(removeHook, null).apply(null, name ? [state, name] : [state])\n hook.api = { remove: removeHookRef }\n hook.remove = removeHookRef\n\n ;['before', 'error', 'after', 'wrap'].forEach(function (kind) {\n var args = name ? [state, kind, name] : [state, kind]\n hook[kind] = hook.api[kind] = bindable(addHook, null).apply(null, args)\n })\n}\n\nfunction HookSingular () {\n var singularHookName = 'h'\n var singularHookState = {\n registry: {}\n }\n var singularHook = register.bind(null, singularHookState, singularHookName)\n bindApi(singularHook, singularHookState, singularHookName)\n return singularHook\n}\n\nfunction HookCollection () {\n var state = {\n registry: {}\n }\n\n var hook = register.bind(null, state)\n bindApi(hook, state)\n\n return hook\n}\n\nvar collectionHookDeprecationMessageDisplayed = false\nfunction Hook () {\n if (!collectionHookDeprecationMessageDisplayed) {\n console.warn('[before-after-hook]: \"Hook()\" repurposing warning, use \"Hook.Collection()\". Read more: https://git.io/upgrade-before-after-hook-to-1.4')\n collectionHookDeprecationMessageDisplayed = true\n }\n return HookCollection()\n}\n\nHook.Singular = HookSingular.bind()\nHook.Collection = HookCollection.bind()\n\nmodule.exports = Hook\n// expose constructors as a named property for TypeScript\nmodule.exports.Hook = Hook\nmodule.exports.Singular = Hook.Singular\nmodule.exports.Collection = Hook.Collection\n","module.exports = addHook\n\nfunction addHook (state, kind, name, hook) {\n var orig = hook\n if (!state.registry[name]) {\n state.registry[name] = []\n }\n\n if (kind === 'before') {\n hook = function (method, options) {\n return Promise.resolve()\n .then(orig.bind(null, options))\n .then(method.bind(null, options))\n }\n }\n\n if (kind === 'after') {\n hook = function (method, options) {\n var result\n return Promise.resolve()\n .then(method.bind(null, options))\n .then(function (result_) {\n result = result_\n return orig(result, options)\n })\n .then(function () {\n return result\n })\n }\n }\n\n if (kind === 'error') {\n hook = function (method, options) {\n return Promise.resolve()\n .then(method.bind(null, options))\n .catch(function (error) {\n return orig(error, options)\n })\n }\n }\n\n state.registry[name].push({\n hook: hook,\n orig: orig\n })\n}\n","module.exports = register\n\nfunction register (state, name, method, options) {\n if (typeof method !== 'function') {\n throw new Error('method for before hook must be a function')\n }\n\n if (!options) {\n options = {}\n }\n\n if (Array.isArray(name)) {\n return name.reverse().reduce(function (callback, name) {\n return register.bind(null, state, name, callback, options)\n }, method)()\n }\n\n return Promise.resolve()\n .then(function () {\n if (!state.registry[name]) {\n return method(options)\n }\n\n return (state.registry[name]).reduce(function (method, registered) {\n return registered.hook.bind(null, method, options)\n }, method)()\n })\n}\n","module.exports = removeHook\n\nfunction removeHook (state, name, method) {\n if (!state.registry[name]) {\n return\n }\n\n var index = state.registry[name]\n .map(function (registered) { return registered.orig })\n .indexOf(method)\n\n if (index === -1) {\n return\n }\n\n state.registry[name].splice(index, 1)\n}\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nclass Deprecation extends Error {\n constructor(message) {\n super(message); // Maintains proper stack trace (only available on V8)\n\n /* istanbul ignore next */\n\n if (Error.captureStackTrace) {\n Error.captureStackTrace(this, this.constructor);\n }\n\n this.name = 'Deprecation';\n }\n\n}\n\nexports.Deprecation = Deprecation;\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nfunction _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }\n\nvar Stream = _interopDefault(require('stream'));\nvar http = _interopDefault(require('http'));\nvar Url = _interopDefault(require('url'));\nvar https = _interopDefault(require('https'));\nvar zlib = _interopDefault(require('zlib'));\n\n// Based on https://github.com/tmpvar/jsdom/blob/aa85b2abf07766ff7bf5c1f6daafb3726f2f2db5/lib/jsdom/living/blob.js\n\n// fix for \"Readable\" isn't a named export issue\nconst Readable = Stream.Readable;\n\nconst BUFFER = Symbol('buffer');\nconst TYPE = Symbol('type');\n\nclass Blob {\n\tconstructor() {\n\t\tthis[TYPE] = '';\n\n\t\tconst blobParts = arguments[0];\n\t\tconst options = arguments[1];\n\n\t\tconst buffers = [];\n\t\tlet size = 0;\n\n\t\tif (blobParts) {\n\t\t\tconst a = blobParts;\n\t\t\tconst length = Number(a.length);\n\t\t\tfor (let i = 0; i < length; i++) {\n\t\t\t\tconst element = a[i];\n\t\t\t\tlet buffer;\n\t\t\t\tif (element instanceof Buffer) {\n\t\t\t\t\tbuffer = element;\n\t\t\t\t} else if (ArrayBuffer.isView(element)) {\n\t\t\t\t\tbuffer = Buffer.from(element.buffer, element.byteOffset, element.byteLength);\n\t\t\t\t} else if (element instanceof ArrayBuffer) {\n\t\t\t\t\tbuffer = Buffer.from(element);\n\t\t\t\t} else if (element instanceof Blob) {\n\t\t\t\t\tbuffer = element[BUFFER];\n\t\t\t\t} else {\n\t\t\t\t\tbuffer = Buffer.from(typeof element === 'string' ? element : String(element));\n\t\t\t\t}\n\t\t\t\tsize += buffer.length;\n\t\t\t\tbuffers.push(buffer);\n\t\t\t}\n\t\t}\n\n\t\tthis[BUFFER] = Buffer.concat(buffers);\n\n\t\tlet type = options && options.type !== undefined && String(options.type).toLowerCase();\n\t\tif (type && !/[^\\u0020-\\u007E]/.test(type)) {\n\t\t\tthis[TYPE] = type;\n\t\t}\n\t}\n\tget size() {\n\t\treturn this[BUFFER].length;\n\t}\n\tget type() {\n\t\treturn this[TYPE];\n\t}\n\ttext() {\n\t\treturn Promise.resolve(this[BUFFER].toString());\n\t}\n\tarrayBuffer() {\n\t\tconst buf = this[BUFFER];\n\t\tconst ab = buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength);\n\t\treturn Promise.resolve(ab);\n\t}\n\tstream() {\n\t\tconst readable = new Readable();\n\t\treadable._read = function () {};\n\t\treadable.push(this[BUFFER]);\n\t\treadable.push(null);\n\t\treturn readable;\n\t}\n\ttoString() {\n\t\treturn '[object Blob]';\n\t}\n\tslice() {\n\t\tconst size = this.size;\n\n\t\tconst start = arguments[0];\n\t\tconst end = arguments[1];\n\t\tlet relativeStart, relativeEnd;\n\t\tif (start === undefined) {\n\t\t\trelativeStart = 0;\n\t\t} else if (start < 0) {\n\t\t\trelativeStart = Math.max(size + start, 0);\n\t\t} else {\n\t\t\trelativeStart = Math.min(start, size);\n\t\t}\n\t\tif (end === undefined) {\n\t\t\trelativeEnd = size;\n\t\t} else if (end < 0) {\n\t\t\trelativeEnd = Math.max(size + end, 0);\n\t\t} else {\n\t\t\trelativeEnd = Math.min(end, size);\n\t\t}\n\t\tconst span = Math.max(relativeEnd - relativeStart, 0);\n\n\t\tconst buffer = this[BUFFER];\n\t\tconst slicedBuffer = buffer.slice(relativeStart, relativeStart + span);\n\t\tconst blob = new Blob([], { type: arguments[2] });\n\t\tblob[BUFFER] = slicedBuffer;\n\t\treturn blob;\n\t}\n}\n\nObject.defineProperties(Blob.prototype, {\n\tsize: { enumerable: true },\n\ttype: { enumerable: true },\n\tslice: { enumerable: true }\n});\n\nObject.defineProperty(Blob.prototype, Symbol.toStringTag, {\n\tvalue: 'Blob',\n\twritable: false,\n\tenumerable: false,\n\tconfigurable: true\n});\n\n/**\n * fetch-error.js\n *\n * FetchError interface for operational errors\n */\n\n/**\n * Create FetchError instance\n *\n * @param String message Error message for human\n * @param String type Error type for machine\n * @param String systemError For Node.js system error\n * @return FetchError\n */\nfunction FetchError(message, type, systemError) {\n Error.call(this, message);\n\n this.message = message;\n this.type = type;\n\n // when err.type is `system`, err.code contains system error code\n if (systemError) {\n this.code = this.errno = systemError.code;\n }\n\n // hide custom error implementation details from end-users\n Error.captureStackTrace(this, this.constructor);\n}\n\nFetchError.prototype = Object.create(Error.prototype);\nFetchError.prototype.constructor = FetchError;\nFetchError.prototype.name = 'FetchError';\n\nlet convert;\ntry {\n\tconvert = require('encoding').convert;\n} catch (e) {}\n\nconst INTERNALS = Symbol('Body internals');\n\n// fix an issue where \"PassThrough\" isn't a named export for node <10\nconst PassThrough = Stream.PassThrough;\n\n/**\n * Body mixin\n *\n * Ref: https://fetch.spec.whatwg.org/#body\n *\n * @param Stream body Readable stream\n * @param Object opts Response options\n * @return Void\n */\nfunction Body(body) {\n\tvar _this = this;\n\n\tvar _ref = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {},\n\t _ref$size = _ref.size;\n\n\tlet size = _ref$size === undefined ? 0 : _ref$size;\n\tvar _ref$timeout = _ref.timeout;\n\tlet timeout = _ref$timeout === undefined ? 0 : _ref$timeout;\n\n\tif (body == null) {\n\t\t// body is undefined or null\n\t\tbody = null;\n\t} else if (isURLSearchParams(body)) {\n\t\t// body is a URLSearchParams\n\t\tbody = Buffer.from(body.toString());\n\t} else if (isBlob(body)) ; else if (Buffer.isBuffer(body)) ; else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') {\n\t\t// body is ArrayBuffer\n\t\tbody = Buffer.from(body);\n\t} else if (ArrayBuffer.isView(body)) {\n\t\t// body is ArrayBufferView\n\t\tbody = Buffer.from(body.buffer, body.byteOffset, body.byteLength);\n\t} else if (body instanceof Stream) ; else {\n\t\t// none of the above\n\t\t// coerce to string then buffer\n\t\tbody = Buffer.from(String(body));\n\t}\n\tthis[INTERNALS] = {\n\t\tbody,\n\t\tdisturbed: false,\n\t\terror: null\n\t};\n\tthis.size = size;\n\tthis.timeout = timeout;\n\n\tif (body instanceof Stream) {\n\t\tbody.on('error', function (err) {\n\t\t\tconst error = err.name === 'AbortError' ? err : new FetchError(`Invalid response body while trying to fetch ${_this.url}: ${err.message}`, 'system', err);\n\t\t\t_this[INTERNALS].error = error;\n\t\t});\n\t}\n}\n\nBody.prototype = {\n\tget body() {\n\t\treturn this[INTERNALS].body;\n\t},\n\n\tget bodyUsed() {\n\t\treturn this[INTERNALS].disturbed;\n\t},\n\n\t/**\n * Decode response as ArrayBuffer\n *\n * @return Promise\n */\n\tarrayBuffer() {\n\t\treturn consumeBody.call(this).then(function (buf) {\n\t\t\treturn buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength);\n\t\t});\n\t},\n\n\t/**\n * Return raw response as Blob\n *\n * @return Promise\n */\n\tblob() {\n\t\tlet ct = this.headers && this.headers.get('content-type') || '';\n\t\treturn consumeBody.call(this).then(function (buf) {\n\t\t\treturn Object.assign(\n\t\t\t// Prevent copying\n\t\t\tnew Blob([], {\n\t\t\t\ttype: ct.toLowerCase()\n\t\t\t}), {\n\t\t\t\t[BUFFER]: buf\n\t\t\t});\n\t\t});\n\t},\n\n\t/**\n * Decode response as json\n *\n * @return Promise\n */\n\tjson() {\n\t\tvar _this2 = this;\n\n\t\treturn consumeBody.call(this).then(function (buffer) {\n\t\t\ttry {\n\t\t\t\treturn JSON.parse(buffer.toString());\n\t\t\t} catch (err) {\n\t\t\t\treturn Body.Promise.reject(new FetchError(`invalid json response body at ${_this2.url} reason: ${err.message}`, 'invalid-json'));\n\t\t\t}\n\t\t});\n\t},\n\n\t/**\n * Decode response as text\n *\n * @return Promise\n */\n\ttext() {\n\t\treturn consumeBody.call(this).then(function (buffer) {\n\t\t\treturn buffer.toString();\n\t\t});\n\t},\n\n\t/**\n * Decode response as buffer (non-spec api)\n *\n * @return Promise\n */\n\tbuffer() {\n\t\treturn consumeBody.call(this);\n\t},\n\n\t/**\n * Decode response as text, while automatically detecting the encoding and\n * trying to decode to UTF-8 (non-spec api)\n *\n * @return Promise\n */\n\ttextConverted() {\n\t\tvar _this3 = this;\n\n\t\treturn consumeBody.call(this).then(function (buffer) {\n\t\t\treturn convertBody(buffer, _this3.headers);\n\t\t});\n\t}\n};\n\n// In browsers, all properties are enumerable.\nObject.defineProperties(Body.prototype, {\n\tbody: { enumerable: true },\n\tbodyUsed: { enumerable: true },\n\tarrayBuffer: { enumerable: true },\n\tblob: { enumerable: true },\n\tjson: { enumerable: true },\n\ttext: { enumerable: true }\n});\n\nBody.mixIn = function (proto) {\n\tfor (const name of Object.getOwnPropertyNames(Body.prototype)) {\n\t\t// istanbul ignore else: future proof\n\t\tif (!(name in proto)) {\n\t\t\tconst desc = Object.getOwnPropertyDescriptor(Body.prototype, name);\n\t\t\tObject.defineProperty(proto, name, desc);\n\t\t}\n\t}\n};\n\n/**\n * Consume and convert an entire Body to a Buffer.\n *\n * Ref: https://fetch.spec.whatwg.org/#concept-body-consume-body\n *\n * @return Promise\n */\nfunction consumeBody() {\n\tvar _this4 = this;\n\n\tif (this[INTERNALS].disturbed) {\n\t\treturn Body.Promise.reject(new TypeError(`body used already for: ${this.url}`));\n\t}\n\n\tthis[INTERNALS].disturbed = true;\n\n\tif (this[INTERNALS].error) {\n\t\treturn Body.Promise.reject(this[INTERNALS].error);\n\t}\n\n\tlet body = this.body;\n\n\t// body is null\n\tif (body === null) {\n\t\treturn Body.Promise.resolve(Buffer.alloc(0));\n\t}\n\n\t// body is blob\n\tif (isBlob(body)) {\n\t\tbody = body.stream();\n\t}\n\n\t// body is buffer\n\tif (Buffer.isBuffer(body)) {\n\t\treturn Body.Promise.resolve(body);\n\t}\n\n\t// istanbul ignore if: should never happen\n\tif (!(body instanceof Stream)) {\n\t\treturn Body.Promise.resolve(Buffer.alloc(0));\n\t}\n\n\t// body is stream\n\t// get ready to actually consume the body\n\tlet accum = [];\n\tlet accumBytes = 0;\n\tlet abort = false;\n\n\treturn new Body.Promise(function (resolve, reject) {\n\t\tlet resTimeout;\n\n\t\t// allow timeout on slow response body\n\t\tif (_this4.timeout) {\n\t\t\tresTimeout = setTimeout(function () {\n\t\t\t\tabort = true;\n\t\t\t\treject(new FetchError(`Response timeout while trying to fetch ${_this4.url} (over ${_this4.timeout}ms)`, 'body-timeout'));\n\t\t\t}, _this4.timeout);\n\t\t}\n\n\t\t// handle stream errors\n\t\tbody.on('error', function (err) {\n\t\t\tif (err.name === 'AbortError') {\n\t\t\t\t// if the request was aborted, reject with this Error\n\t\t\t\tabort = true;\n\t\t\t\treject(err);\n\t\t\t} else {\n\t\t\t\t// other errors, such as incorrect content-encoding\n\t\t\t\treject(new FetchError(`Invalid response body while trying to fetch ${_this4.url}: ${err.message}`, 'system', err));\n\t\t\t}\n\t\t});\n\n\t\tbody.on('data', function (chunk) {\n\t\t\tif (abort || chunk === null) {\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\tif (_this4.size && accumBytes + chunk.length > _this4.size) {\n\t\t\t\tabort = true;\n\t\t\t\treject(new FetchError(`content size at ${_this4.url} over limit: ${_this4.size}`, 'max-size'));\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\taccumBytes += chunk.length;\n\t\t\taccum.push(chunk);\n\t\t});\n\n\t\tbody.on('end', function () {\n\t\t\tif (abort) {\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\tclearTimeout(resTimeout);\n\n\t\t\ttry {\n\t\t\t\tresolve(Buffer.concat(accum, accumBytes));\n\t\t\t} catch (err) {\n\t\t\t\t// handle streams that have accumulated too much data (issue #414)\n\t\t\t\treject(new FetchError(`Could not create Buffer from response body for ${_this4.url}: ${err.message}`, 'system', err));\n\t\t\t}\n\t\t});\n\t});\n}\n\n/**\n * Detect buffer encoding and convert to target encoding\n * ref: http://www.w3.org/TR/2011/WD-html5-20110113/parsing.html#determining-the-character-encoding\n *\n * @param Buffer buffer Incoming buffer\n * @param String encoding Target encoding\n * @return String\n */\nfunction convertBody(buffer, headers) {\n\tif (typeof convert !== 'function') {\n\t\tthrow new Error('The package `encoding` must be installed to use the textConverted() function');\n\t}\n\n\tconst ct = headers.get('content-type');\n\tlet charset = 'utf-8';\n\tlet res, str;\n\n\t// header\n\tif (ct) {\n\t\tres = /charset=([^;]*)/i.exec(ct);\n\t}\n\n\t// no charset in content type, peek at response body for at most 1024 bytes\n\tstr = buffer.slice(0, 1024).toString();\n\n\t// html5\n\tif (!res && str) {\n\t\tres = / 0 && arguments[0] !== undefined ? arguments[0] : undefined;\n\n\t\tthis[MAP] = Object.create(null);\n\n\t\tif (init instanceof Headers) {\n\t\t\tconst rawHeaders = init.raw();\n\t\t\tconst headerNames = Object.keys(rawHeaders);\n\n\t\t\tfor (const headerName of headerNames) {\n\t\t\t\tfor (const value of rawHeaders[headerName]) {\n\t\t\t\t\tthis.append(headerName, value);\n\t\t\t\t}\n\t\t\t}\n\n\t\t\treturn;\n\t\t}\n\n\t\t// We don't worry about converting prop to ByteString here as append()\n\t\t// will handle it.\n\t\tif (init == null) ; else if (typeof init === 'object') {\n\t\t\tconst method = init[Symbol.iterator];\n\t\t\tif (method != null) {\n\t\t\t\tif (typeof method !== 'function') {\n\t\t\t\t\tthrow new TypeError('Header pairs must be iterable');\n\t\t\t\t}\n\n\t\t\t\t// sequence>\n\t\t\t\t// Note: per spec we have to first exhaust the lists then process them\n\t\t\t\tconst pairs = [];\n\t\t\t\tfor (const pair of init) {\n\t\t\t\t\tif (typeof pair !== 'object' || typeof pair[Symbol.iterator] !== 'function') {\n\t\t\t\t\t\tthrow new TypeError('Each header pair must be iterable');\n\t\t\t\t\t}\n\t\t\t\t\tpairs.push(Array.from(pair));\n\t\t\t\t}\n\n\t\t\t\tfor (const pair of pairs) {\n\t\t\t\t\tif (pair.length !== 2) {\n\t\t\t\t\t\tthrow new TypeError('Each header pair must be a name/value tuple');\n\t\t\t\t\t}\n\t\t\t\t\tthis.append(pair[0], pair[1]);\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\t// record\n\t\t\t\tfor (const key of Object.keys(init)) {\n\t\t\t\t\tconst value = init[key];\n\t\t\t\t\tthis.append(key, value);\n\t\t\t\t}\n\t\t\t}\n\t\t} else {\n\t\t\tthrow new TypeError('Provided initializer must be an object');\n\t\t}\n\t}\n\n\t/**\n * Return combined header value given name\n *\n * @param String name Header name\n * @return Mixed\n */\n\tget(name) {\n\t\tname = `${name}`;\n\t\tvalidateName(name);\n\t\tconst key = find(this[MAP], name);\n\t\tif (key === undefined) {\n\t\t\treturn null;\n\t\t}\n\n\t\treturn this[MAP][key].join(', ');\n\t}\n\n\t/**\n * Iterate over all headers\n *\n * @param Function callback Executed for each item with parameters (value, name, thisArg)\n * @param Boolean thisArg `this` context for callback function\n * @return Void\n */\n\tforEach(callback) {\n\t\tlet thisArg = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : undefined;\n\n\t\tlet pairs = getHeaders(this);\n\t\tlet i = 0;\n\t\twhile (i < pairs.length) {\n\t\t\tvar _pairs$i = pairs[i];\n\t\t\tconst name = _pairs$i[0],\n\t\t\t value = _pairs$i[1];\n\n\t\t\tcallback.call(thisArg, value, name, this);\n\t\t\tpairs = getHeaders(this);\n\t\t\ti++;\n\t\t}\n\t}\n\n\t/**\n * Overwrite header values given name\n *\n * @param String name Header name\n * @param String value Header value\n * @return Void\n */\n\tset(name, value) {\n\t\tname = `${name}`;\n\t\tvalue = `${value}`;\n\t\tvalidateName(name);\n\t\tvalidateValue(value);\n\t\tconst key = find(this[MAP], name);\n\t\tthis[MAP][key !== undefined ? key : name] = [value];\n\t}\n\n\t/**\n * Append a value onto existing header\n *\n * @param String name Header name\n * @param String value Header value\n * @return Void\n */\n\tappend(name, value) {\n\t\tname = `${name}`;\n\t\tvalue = `${value}`;\n\t\tvalidateName(name);\n\t\tvalidateValue(value);\n\t\tconst key = find(this[MAP], name);\n\t\tif (key !== undefined) {\n\t\t\tthis[MAP][key].push(value);\n\t\t} else {\n\t\t\tthis[MAP][name] = [value];\n\t\t}\n\t}\n\n\t/**\n * Check for header name existence\n *\n * @param String name Header name\n * @return Boolean\n */\n\thas(name) {\n\t\tname = `${name}`;\n\t\tvalidateName(name);\n\t\treturn find(this[MAP], name) !== undefined;\n\t}\n\n\t/**\n * Delete all header values given name\n *\n * @param String name Header name\n * @return Void\n */\n\tdelete(name) {\n\t\tname = `${name}`;\n\t\tvalidateName(name);\n\t\tconst key = find(this[MAP], name);\n\t\tif (key !== undefined) {\n\t\t\tdelete this[MAP][key];\n\t\t}\n\t}\n\n\t/**\n * Return raw headers (non-spec api)\n *\n * @return Object\n */\n\traw() {\n\t\treturn this[MAP];\n\t}\n\n\t/**\n * Get an iterator on keys.\n *\n * @return Iterator\n */\n\tkeys() {\n\t\treturn createHeadersIterator(this, 'key');\n\t}\n\n\t/**\n * Get an iterator on values.\n *\n * @return Iterator\n */\n\tvalues() {\n\t\treturn createHeadersIterator(this, 'value');\n\t}\n\n\t/**\n * Get an iterator on entries.\n *\n * This is the default iterator of the Headers object.\n *\n * @return Iterator\n */\n\t[Symbol.iterator]() {\n\t\treturn createHeadersIterator(this, 'key+value');\n\t}\n}\nHeaders.prototype.entries = Headers.prototype[Symbol.iterator];\n\nObject.defineProperty(Headers.prototype, Symbol.toStringTag, {\n\tvalue: 'Headers',\n\twritable: false,\n\tenumerable: false,\n\tconfigurable: true\n});\n\nObject.defineProperties(Headers.prototype, {\n\tget: { enumerable: true },\n\tforEach: { enumerable: true },\n\tset: { enumerable: true },\n\tappend: { enumerable: true },\n\thas: { enumerable: true },\n\tdelete: { enumerable: true },\n\tkeys: { enumerable: true },\n\tvalues: { enumerable: true },\n\tentries: { enumerable: true }\n});\n\nfunction getHeaders(headers) {\n\tlet kind = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'key+value';\n\n\tconst keys = Object.keys(headers[MAP]).sort();\n\treturn keys.map(kind === 'key' ? function (k) {\n\t\treturn k.toLowerCase();\n\t} : kind === 'value' ? function (k) {\n\t\treturn headers[MAP][k].join(', ');\n\t} : function (k) {\n\t\treturn [k.toLowerCase(), headers[MAP][k].join(', ')];\n\t});\n}\n\nconst INTERNAL = Symbol('internal');\n\nfunction createHeadersIterator(target, kind) {\n\tconst iterator = Object.create(HeadersIteratorPrototype);\n\titerator[INTERNAL] = {\n\t\ttarget,\n\t\tkind,\n\t\tindex: 0\n\t};\n\treturn iterator;\n}\n\nconst HeadersIteratorPrototype = Object.setPrototypeOf({\n\tnext() {\n\t\t// istanbul ignore if\n\t\tif (!this || Object.getPrototypeOf(this) !== HeadersIteratorPrototype) {\n\t\t\tthrow new TypeError('Value of `this` is not a HeadersIterator');\n\t\t}\n\n\t\tvar _INTERNAL = this[INTERNAL];\n\t\tconst target = _INTERNAL.target,\n\t\t kind = _INTERNAL.kind,\n\t\t index = _INTERNAL.index;\n\n\t\tconst values = getHeaders(target, kind);\n\t\tconst len = values.length;\n\t\tif (index >= len) {\n\t\t\treturn {\n\t\t\t\tvalue: undefined,\n\t\t\t\tdone: true\n\t\t\t};\n\t\t}\n\n\t\tthis[INTERNAL].index = index + 1;\n\n\t\treturn {\n\t\t\tvalue: values[index],\n\t\t\tdone: false\n\t\t};\n\t}\n}, Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]())));\n\nObject.defineProperty(HeadersIteratorPrototype, Symbol.toStringTag, {\n\tvalue: 'HeadersIterator',\n\twritable: false,\n\tenumerable: false,\n\tconfigurable: true\n});\n\n/**\n * Export the Headers object in a form that Node.js can consume.\n *\n * @param Headers headers\n * @return Object\n */\nfunction exportNodeCompatibleHeaders(headers) {\n\tconst obj = Object.assign({ __proto__: null }, headers[MAP]);\n\n\t// http.request() only supports string as Host header. This hack makes\n\t// specifying custom Host header possible.\n\tconst hostHeaderKey = find(headers[MAP], 'Host');\n\tif (hostHeaderKey !== undefined) {\n\t\tobj[hostHeaderKey] = obj[hostHeaderKey][0];\n\t}\n\n\treturn obj;\n}\n\n/**\n * Create a Headers object from an object of headers, ignoring those that do\n * not conform to HTTP grammar productions.\n *\n * @param Object obj Object of headers\n * @return Headers\n */\nfunction createHeadersLenient(obj) {\n\tconst headers = new Headers();\n\tfor (const name of Object.keys(obj)) {\n\t\tif (invalidTokenRegex.test(name)) {\n\t\t\tcontinue;\n\t\t}\n\t\tif (Array.isArray(obj[name])) {\n\t\t\tfor (const val of obj[name]) {\n\t\t\t\tif (invalidHeaderCharRegex.test(val)) {\n\t\t\t\t\tcontinue;\n\t\t\t\t}\n\t\t\t\tif (headers[MAP][name] === undefined) {\n\t\t\t\t\theaders[MAP][name] = [val];\n\t\t\t\t} else {\n\t\t\t\t\theaders[MAP][name].push(val);\n\t\t\t\t}\n\t\t\t}\n\t\t} else if (!invalidHeaderCharRegex.test(obj[name])) {\n\t\t\theaders[MAP][name] = [obj[name]];\n\t\t}\n\t}\n\treturn headers;\n}\n\nconst INTERNALS$1 = Symbol('Response internals');\n\n// fix an issue where \"STATUS_CODES\" aren't a named export for node <10\nconst STATUS_CODES = http.STATUS_CODES;\n\n/**\n * Response class\n *\n * @param Stream body Readable stream\n * @param Object opts Response options\n * @return Void\n */\nclass Response {\n\tconstructor() {\n\t\tlet body = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : null;\n\t\tlet opts = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};\n\n\t\tBody.call(this, body, opts);\n\n\t\tconst status = opts.status || 200;\n\t\tconst headers = new Headers(opts.headers);\n\n\t\tif (body != null && !headers.has('Content-Type')) {\n\t\t\tconst contentType = extractContentType(body);\n\t\t\tif (contentType) {\n\t\t\t\theaders.append('Content-Type', contentType);\n\t\t\t}\n\t\t}\n\n\t\tthis[INTERNALS$1] = {\n\t\t\turl: opts.url,\n\t\t\tstatus,\n\t\t\tstatusText: opts.statusText || STATUS_CODES[status],\n\t\t\theaders,\n\t\t\tcounter: opts.counter\n\t\t};\n\t}\n\n\tget url() {\n\t\treturn this[INTERNALS$1].url || '';\n\t}\n\n\tget status() {\n\t\treturn this[INTERNALS$1].status;\n\t}\n\n\t/**\n * Convenience property representing if the request ended normally\n */\n\tget ok() {\n\t\treturn this[INTERNALS$1].status >= 200 && this[INTERNALS$1].status < 300;\n\t}\n\n\tget redirected() {\n\t\treturn this[INTERNALS$1].counter > 0;\n\t}\n\n\tget statusText() {\n\t\treturn this[INTERNALS$1].statusText;\n\t}\n\n\tget headers() {\n\t\treturn this[INTERNALS$1].headers;\n\t}\n\n\t/**\n * Clone this response\n *\n * @return Response\n */\n\tclone() {\n\t\treturn new Response(clone(this), {\n\t\t\turl: this.url,\n\t\t\tstatus: this.status,\n\t\t\tstatusText: this.statusText,\n\t\t\theaders: this.headers,\n\t\t\tok: this.ok,\n\t\t\tredirected: this.redirected\n\t\t});\n\t}\n}\n\nBody.mixIn(Response.prototype);\n\nObject.defineProperties(Response.prototype, {\n\turl: { enumerable: true },\n\tstatus: { enumerable: true },\n\tok: { enumerable: true },\n\tredirected: { enumerable: true },\n\tstatusText: { enumerable: true },\n\theaders: { enumerable: true },\n\tclone: { enumerable: true }\n});\n\nObject.defineProperty(Response.prototype, Symbol.toStringTag, {\n\tvalue: 'Response',\n\twritable: false,\n\tenumerable: false,\n\tconfigurable: true\n});\n\nconst INTERNALS$2 = Symbol('Request internals');\n\n// fix an issue where \"format\", \"parse\" aren't a named export for node <10\nconst parse_url = Url.parse;\nconst format_url = Url.format;\n\nconst streamDestructionSupported = 'destroy' in Stream.Readable.prototype;\n\n/**\n * Check if a value is an instance of Request.\n *\n * @param Mixed input\n * @return Boolean\n */\nfunction isRequest(input) {\n\treturn typeof input === 'object' && typeof input[INTERNALS$2] === 'object';\n}\n\nfunction isAbortSignal(signal) {\n\tconst proto = signal && typeof signal === 'object' && Object.getPrototypeOf(signal);\n\treturn !!(proto && proto.constructor.name === 'AbortSignal');\n}\n\n/**\n * Request class\n *\n * @param Mixed input Url or Request instance\n * @param Object init Custom options\n * @return Void\n */\nclass Request {\n\tconstructor(input) {\n\t\tlet init = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};\n\n\t\tlet parsedURL;\n\n\t\t// normalize input\n\t\tif (!isRequest(input)) {\n\t\t\tif (input && input.href) {\n\t\t\t\t// in order to support Node.js' Url objects; though WHATWG's URL objects\n\t\t\t\t// will fall into this branch also (since their `toString()` will return\n\t\t\t\t// `href` property anyway)\n\t\t\t\tparsedURL = parse_url(input.href);\n\t\t\t} else {\n\t\t\t\t// coerce input to a string before attempting to parse\n\t\t\t\tparsedURL = parse_url(`${input}`);\n\t\t\t}\n\t\t\tinput = {};\n\t\t} else {\n\t\t\tparsedURL = parse_url(input.url);\n\t\t}\n\n\t\tlet method = init.method || input.method || 'GET';\n\t\tmethod = method.toUpperCase();\n\n\t\tif ((init.body != null || isRequest(input) && input.body !== null) && (method === 'GET' || method === 'HEAD')) {\n\t\t\tthrow new TypeError('Request with GET/HEAD method cannot have body');\n\t\t}\n\n\t\tlet inputBody = init.body != null ? init.body : isRequest(input) && input.body !== null ? clone(input) : null;\n\n\t\tBody.call(this, inputBody, {\n\t\t\ttimeout: init.timeout || input.timeout || 0,\n\t\t\tsize: init.size || input.size || 0\n\t\t});\n\n\t\tconst headers = new Headers(init.headers || input.headers || {});\n\n\t\tif (inputBody != null && !headers.has('Content-Type')) {\n\t\t\tconst contentType = extractContentType(inputBody);\n\t\t\tif (contentType) {\n\t\t\t\theaders.append('Content-Type', contentType);\n\t\t\t}\n\t\t}\n\n\t\tlet signal = isRequest(input) ? input.signal : null;\n\t\tif ('signal' in init) signal = init.signal;\n\n\t\tif (signal != null && !isAbortSignal(signal)) {\n\t\t\tthrow new TypeError('Expected signal to be an instanceof AbortSignal');\n\t\t}\n\n\t\tthis[INTERNALS$2] = {\n\t\t\tmethod,\n\t\t\tredirect: init.redirect || input.redirect || 'follow',\n\t\t\theaders,\n\t\t\tparsedURL,\n\t\t\tsignal\n\t\t};\n\n\t\t// node-fetch-only options\n\t\tthis.follow = init.follow !== undefined ? init.follow : input.follow !== undefined ? input.follow : 20;\n\t\tthis.compress = init.compress !== undefined ? init.compress : input.compress !== undefined ? input.compress : true;\n\t\tthis.counter = init.counter || input.counter || 0;\n\t\tthis.agent = init.agent || input.agent;\n\t}\n\n\tget method() {\n\t\treturn this[INTERNALS$2].method;\n\t}\n\n\tget url() {\n\t\treturn format_url(this[INTERNALS$2].parsedURL);\n\t}\n\n\tget headers() {\n\t\treturn this[INTERNALS$2].headers;\n\t}\n\n\tget redirect() {\n\t\treturn this[INTERNALS$2].redirect;\n\t}\n\n\tget signal() {\n\t\treturn this[INTERNALS$2].signal;\n\t}\n\n\t/**\n * Clone this request\n *\n * @return Request\n */\n\tclone() {\n\t\treturn new Request(this);\n\t}\n}\n\nBody.mixIn(Request.prototype);\n\nObject.defineProperty(Request.prototype, Symbol.toStringTag, {\n\tvalue: 'Request',\n\twritable: false,\n\tenumerable: false,\n\tconfigurable: true\n});\n\nObject.defineProperties(Request.prototype, {\n\tmethod: { enumerable: true },\n\turl: { enumerable: true },\n\theaders: { enumerable: true },\n\tredirect: { enumerable: true },\n\tclone: { enumerable: true },\n\tsignal: { enumerable: true }\n});\n\n/**\n * Convert a Request to Node.js http request options.\n *\n * @param Request A Request instance\n * @return Object The options object to be passed to http.request\n */\nfunction getNodeRequestOptions(request) {\n\tconst parsedURL = request[INTERNALS$2].parsedURL;\n\tconst headers = new Headers(request[INTERNALS$2].headers);\n\n\t// fetch step 1.3\n\tif (!headers.has('Accept')) {\n\t\theaders.set('Accept', '*/*');\n\t}\n\n\t// Basic fetch\n\tif (!parsedURL.protocol || !parsedURL.hostname) {\n\t\tthrow new TypeError('Only absolute URLs are supported');\n\t}\n\n\tif (!/^https?:$/.test(parsedURL.protocol)) {\n\t\tthrow new TypeError('Only HTTP(S) protocols are supported');\n\t}\n\n\tif (request.signal && request.body instanceof Stream.Readable && !streamDestructionSupported) {\n\t\tthrow new Error('Cancellation of streamed requests with AbortSignal is not supported in node < 8');\n\t}\n\n\t// HTTP-network-or-cache fetch steps 2.4-2.7\n\tlet contentLengthValue = null;\n\tif (request.body == null && /^(POST|PUT)$/i.test(request.method)) {\n\t\tcontentLengthValue = '0';\n\t}\n\tif (request.body != null) {\n\t\tconst totalBytes = getTotalBytes(request);\n\t\tif (typeof totalBytes === 'number') {\n\t\t\tcontentLengthValue = String(totalBytes);\n\t\t}\n\t}\n\tif (contentLengthValue) {\n\t\theaders.set('Content-Length', contentLengthValue);\n\t}\n\n\t// HTTP-network-or-cache fetch step 2.11\n\tif (!headers.has('User-Agent')) {\n\t\theaders.set('User-Agent', 'node-fetch/1.0 (+https://github.com/bitinn/node-fetch)');\n\t}\n\n\t// HTTP-network-or-cache fetch step 2.15\n\tif (request.compress && !headers.has('Accept-Encoding')) {\n\t\theaders.set('Accept-Encoding', 'gzip,deflate');\n\t}\n\n\tlet agent = request.agent;\n\tif (typeof agent === 'function') {\n\t\tagent = agent(parsedURL);\n\t}\n\n\tif (!headers.has('Connection') && !agent) {\n\t\theaders.set('Connection', 'close');\n\t}\n\n\t// HTTP-network fetch step 4.2\n\t// chunked encoding is handled by Node.js\n\n\treturn Object.assign({}, parsedURL, {\n\t\tmethod: request.method,\n\t\theaders: exportNodeCompatibleHeaders(headers),\n\t\tagent\n\t});\n}\n\n/**\n * abort-error.js\n *\n * AbortError interface for cancelled requests\n */\n\n/**\n * Create AbortError instance\n *\n * @param String message Error message for human\n * @return AbortError\n */\nfunction AbortError(message) {\n Error.call(this, message);\n\n this.type = 'aborted';\n this.message = message;\n\n // hide custom error implementation details from end-users\n Error.captureStackTrace(this, this.constructor);\n}\n\nAbortError.prototype = Object.create(Error.prototype);\nAbortError.prototype.constructor = AbortError;\nAbortError.prototype.name = 'AbortError';\n\n// fix an issue where \"PassThrough\", \"resolve\" aren't a named export for node <10\nconst PassThrough$1 = Stream.PassThrough;\nconst resolve_url = Url.resolve;\n\n/**\n * Fetch function\n *\n * @param Mixed url Absolute url or Request instance\n * @param Object opts Fetch options\n * @return Promise\n */\nfunction fetch(url, opts) {\n\n\t// allow custom promise\n\tif (!fetch.Promise) {\n\t\tthrow new Error('native promise missing, set fetch.Promise to your favorite alternative');\n\t}\n\n\tBody.Promise = fetch.Promise;\n\n\t// wrap http.request into fetch\n\treturn new fetch.Promise(function (resolve, reject) {\n\t\t// build request object\n\t\tconst request = new Request(url, opts);\n\t\tconst options = getNodeRequestOptions(request);\n\n\t\tconst send = (options.protocol === 'https:' ? https : http).request;\n\t\tconst signal = request.signal;\n\n\t\tlet response = null;\n\n\t\tconst abort = function abort() {\n\t\t\tlet error = new AbortError('The user aborted a request.');\n\t\t\treject(error);\n\t\t\tif (request.body && request.body instanceof Stream.Readable) {\n\t\t\t\trequest.body.destroy(error);\n\t\t\t}\n\t\t\tif (!response || !response.body) return;\n\t\t\tresponse.body.emit('error', error);\n\t\t};\n\n\t\tif (signal && signal.aborted) {\n\t\t\tabort();\n\t\t\treturn;\n\t\t}\n\n\t\tconst abortAndFinalize = function abortAndFinalize() {\n\t\t\tabort();\n\t\t\tfinalize();\n\t\t};\n\n\t\t// send request\n\t\tconst req = send(options);\n\t\tlet reqTimeout;\n\n\t\tif (signal) {\n\t\t\tsignal.addEventListener('abort', abortAndFinalize);\n\t\t}\n\n\t\tfunction finalize() {\n\t\t\treq.abort();\n\t\t\tif (signal) signal.removeEventListener('abort', abortAndFinalize);\n\t\t\tclearTimeout(reqTimeout);\n\t\t}\n\n\t\tif (request.timeout) {\n\t\t\treq.once('socket', function (socket) {\n\t\t\t\treqTimeout = setTimeout(function () {\n\t\t\t\t\treject(new FetchError(`network timeout at: ${request.url}`, 'request-timeout'));\n\t\t\t\t\tfinalize();\n\t\t\t\t}, request.timeout);\n\t\t\t});\n\t\t}\n\n\t\treq.on('error', function (err) {\n\t\t\treject(new FetchError(`request to ${request.url} failed, reason: ${err.message}`, 'system', err));\n\t\t\tfinalize();\n\t\t});\n\n\t\treq.on('response', function (res) {\n\t\t\tclearTimeout(reqTimeout);\n\n\t\t\tconst headers = createHeadersLenient(res.headers);\n\n\t\t\t// HTTP fetch step 5\n\t\t\tif (fetch.isRedirect(res.statusCode)) {\n\t\t\t\t// HTTP fetch step 5.2\n\t\t\t\tconst location = headers.get('Location');\n\n\t\t\t\t// HTTP fetch step 5.3\n\t\t\t\tconst locationURL = location === null ? null : resolve_url(request.url, location);\n\n\t\t\t\t// HTTP fetch step 5.5\n\t\t\t\tswitch (request.redirect) {\n\t\t\t\t\tcase 'error':\n\t\t\t\t\t\treject(new FetchError(`uri requested responds with a redirect, redirect mode is set to error: ${request.url}`, 'no-redirect'));\n\t\t\t\t\t\tfinalize();\n\t\t\t\t\t\treturn;\n\t\t\t\t\tcase 'manual':\n\t\t\t\t\t\t// node-fetch-specific step: make manual redirect a bit easier to use by setting the Location header value to the resolved URL.\n\t\t\t\t\t\tif (locationURL !== null) {\n\t\t\t\t\t\t\t// handle corrupted header\n\t\t\t\t\t\t\ttry {\n\t\t\t\t\t\t\t\theaders.set('Location', locationURL);\n\t\t\t\t\t\t\t} catch (err) {\n\t\t\t\t\t\t\t\t// istanbul ignore next: nodejs server prevent invalid response headers, we can't test this through normal request\n\t\t\t\t\t\t\t\treject(err);\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\t\t\t\t\t\tbreak;\n\t\t\t\t\tcase 'follow':\n\t\t\t\t\t\t// HTTP-redirect fetch step 2\n\t\t\t\t\t\tif (locationURL === null) {\n\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 5\n\t\t\t\t\t\tif (request.counter >= request.follow) {\n\t\t\t\t\t\t\treject(new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect'));\n\t\t\t\t\t\t\tfinalize();\n\t\t\t\t\t\t\treturn;\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 6 (counter increment)\n\t\t\t\t\t\t// Create a new Request object.\n\t\t\t\t\t\tconst requestOpts = {\n\t\t\t\t\t\t\theaders: new Headers(request.headers),\n\t\t\t\t\t\t\tfollow: request.follow,\n\t\t\t\t\t\t\tcounter: request.counter + 1,\n\t\t\t\t\t\t\tagent: request.agent,\n\t\t\t\t\t\t\tcompress: request.compress,\n\t\t\t\t\t\t\tmethod: request.method,\n\t\t\t\t\t\t\tbody: request.body,\n\t\t\t\t\t\t\tsignal: request.signal,\n\t\t\t\t\t\t\ttimeout: request.timeout,\n\t\t\t\t\t\t\tsize: request.size\n\t\t\t\t\t\t};\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 9\n\t\t\t\t\t\tif (res.statusCode !== 303 && request.body && getTotalBytes(request) === null) {\n\t\t\t\t\t\t\treject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect'));\n\t\t\t\t\t\t\tfinalize();\n\t\t\t\t\t\t\treturn;\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 11\n\t\t\t\t\t\tif (res.statusCode === 303 || (res.statusCode === 301 || res.statusCode === 302) && request.method === 'POST') {\n\t\t\t\t\t\t\trequestOpts.method = 'GET';\n\t\t\t\t\t\t\trequestOpts.body = undefined;\n\t\t\t\t\t\t\trequestOpts.headers.delete('content-length');\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 15\n\t\t\t\t\t\tresolve(fetch(new Request(locationURL, requestOpts)));\n\t\t\t\t\t\tfinalize();\n\t\t\t\t\t\treturn;\n\t\t\t\t}\n\t\t\t}\n\n\t\t\t// prepare response\n\t\t\tres.once('end', function () {\n\t\t\t\tif (signal) signal.removeEventListener('abort', abortAndFinalize);\n\t\t\t});\n\t\t\tlet body = res.pipe(new PassThrough$1());\n\n\t\t\tconst response_options = {\n\t\t\t\turl: request.url,\n\t\t\t\tstatus: res.statusCode,\n\t\t\t\tstatusText: res.statusMessage,\n\t\t\t\theaders: headers,\n\t\t\t\tsize: request.size,\n\t\t\t\ttimeout: request.timeout,\n\t\t\t\tcounter: request.counter\n\t\t\t};\n\n\t\t\t// HTTP-network fetch step 12.1.1.3\n\t\t\tconst codings = headers.get('Content-Encoding');\n\n\t\t\t// HTTP-network fetch step 12.1.1.4: handle content codings\n\n\t\t\t// in following scenarios we ignore compression support\n\t\t\t// 1. compression support is disabled\n\t\t\t// 2. HEAD request\n\t\t\t// 3. no Content-Encoding header\n\t\t\t// 4. no content response (204)\n\t\t\t// 5. content not modified response (304)\n\t\t\tif (!request.compress || request.method === 'HEAD' || codings === null || res.statusCode === 204 || res.statusCode === 304) {\n\t\t\t\tresponse = new Response(body, response_options);\n\t\t\t\tresolve(response);\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\t// For Node v6+\n\t\t\t// Be less strict when decoding compressed responses, since sometimes\n\t\t\t// servers send slightly invalid responses that are still accepted\n\t\t\t// by common browsers.\n\t\t\t// Always using Z_SYNC_FLUSH is what cURL does.\n\t\t\tconst zlibOptions = {\n\t\t\t\tflush: zlib.Z_SYNC_FLUSH,\n\t\t\t\tfinishFlush: zlib.Z_SYNC_FLUSH\n\t\t\t};\n\n\t\t\t// for gzip\n\t\t\tif (codings == 'gzip' || codings == 'x-gzip') {\n\t\t\t\tbody = body.pipe(zlib.createGunzip(zlibOptions));\n\t\t\t\tresponse = new Response(body, response_options);\n\t\t\t\tresolve(response);\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\t// for deflate\n\t\t\tif (codings == 'deflate' || codings == 'x-deflate') {\n\t\t\t\t// handle the infamous raw deflate response from old servers\n\t\t\t\t// a hack for old IIS and Apache servers\n\t\t\t\tconst raw = res.pipe(new PassThrough$1());\n\t\t\t\traw.once('data', function (chunk) {\n\t\t\t\t\t// see http://stackoverflow.com/questions/37519828\n\t\t\t\t\tif ((chunk[0] & 0x0F) === 0x08) {\n\t\t\t\t\t\tbody = body.pipe(zlib.createInflate());\n\t\t\t\t\t} else {\n\t\t\t\t\t\tbody = body.pipe(zlib.createInflateRaw());\n\t\t\t\t\t}\n\t\t\t\t\tresponse = new Response(body, response_options);\n\t\t\t\t\tresolve(response);\n\t\t\t\t});\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\t// for br\n\t\t\tif (codings == 'br' && typeof zlib.createBrotliDecompress === 'function') {\n\t\t\t\tbody = body.pipe(zlib.createBrotliDecompress());\n\t\t\t\tresponse = new Response(body, response_options);\n\t\t\t\tresolve(response);\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\t// otherwise, use response as-is\n\t\t\tresponse = new Response(body, response_options);\n\t\t\tresolve(response);\n\t\t});\n\n\t\twriteToStream(req, request);\n\t});\n}\n/**\n * Redirect code matching\n *\n * @param Number code Status code\n * @return Boolean\n */\nfetch.isRedirect = function (code) {\n\treturn code === 301 || code === 302 || code === 303 || code === 307 || code === 308;\n};\n\n// expose Promise\nfetch.Promise = global.Promise;\n\nmodule.exports = exports = fetch;\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.default = exports;\nexports.Headers = Headers;\nexports.Request = Request;\nexports.Response = Response;\nexports.FetchError = FetchError;\n","var wrappy = require('wrappy')\nmodule.exports = wrappy(once)\nmodule.exports.strict = wrappy(onceStrict)\n\nonce.proto = once(function () {\n Object.defineProperty(Function.prototype, 'once', {\n value: function () {\n return once(this)\n },\n configurable: true\n })\n\n Object.defineProperty(Function.prototype, 'onceStrict', {\n value: function () {\n return onceStrict(this)\n },\n configurable: true\n })\n})\n\nfunction once (fn) {\n var f = function () {\n if (f.called) return f.value\n f.called = true\n return f.value = fn.apply(this, arguments)\n }\n f.called = false\n return f\n}\n\nfunction onceStrict (fn) {\n var f = function () {\n if (f.called)\n throw new Error(f.onceError)\n f.called = true\n return f.value = fn.apply(this, arguments)\n }\n var name = fn.name || 'Function wrapped with `once`'\n f.onceError = name + \" shouldn't be called more than once\"\n f.called = false\n return f\n}\n",";(function (sax) { // wrapper for non-node envs\n sax.parser = function (strict, opt) { return new SAXParser(strict, opt) }\n sax.SAXParser = SAXParser\n sax.SAXStream = SAXStream\n sax.createStream = createStream\n\n // When we pass the MAX_BUFFER_LENGTH position, start checking for buffer overruns.\n // When we check, schedule the next check for MAX_BUFFER_LENGTH - (max(buffer lengths)),\n // since that's the earliest that a buffer overrun could occur. This way, checks are\n // as rare as required, but as often as necessary to ensure never crossing this bound.\n // Furthermore, buffers are only tested at most once per write(), so passing a very\n // large string into write() might have undesirable effects, but this is manageable by\n // the caller, so it is assumed to be safe. Thus, a call to write() may, in the extreme\n // edge case, result in creating at most one complete copy of the string passed in.\n // Set to Infinity to have unlimited buffers.\n sax.MAX_BUFFER_LENGTH = 64 * 1024\n\n var buffers = [\n 'comment', 'sgmlDecl', 'textNode', 'tagName', 'doctype',\n 'procInstName', 'procInstBody', 'entity', 'attribName',\n 'attribValue', 'cdata', 'script'\n ]\n\n sax.EVENTS = [\n 'text',\n 'processinginstruction',\n 'sgmldeclaration',\n 'doctype',\n 'comment',\n 'opentagstart',\n 'attribute',\n 'opentag',\n 'closetag',\n 'opencdata',\n 'cdata',\n 'closecdata',\n 'error',\n 'end',\n 'ready',\n 'script',\n 'opennamespace',\n 'closenamespace'\n ]\n\n function SAXParser (strict, opt) {\n if (!(this instanceof SAXParser)) {\n return new SAXParser(strict, opt)\n }\n\n var parser = this\n clearBuffers(parser)\n parser.q = parser.c = ''\n parser.bufferCheckPosition = sax.MAX_BUFFER_LENGTH\n parser.opt = opt || {}\n parser.opt.lowercase = parser.opt.lowercase || parser.opt.lowercasetags\n parser.looseCase = parser.opt.lowercase ? 'toLowerCase' : 'toUpperCase'\n parser.tags = []\n parser.closed = parser.closedRoot = parser.sawRoot = false\n parser.tag = parser.error = null\n parser.strict = !!strict\n parser.noscript = !!(strict || parser.opt.noscript)\n parser.state = S.BEGIN\n parser.strictEntities = parser.opt.strictEntities\n parser.ENTITIES = parser.strictEntities ? Object.create(sax.XML_ENTITIES) : Object.create(sax.ENTITIES)\n parser.attribList = []\n\n // namespaces form a prototype chain.\n // it always points at the current tag,\n // which protos to its parent tag.\n if (parser.opt.xmlns) {\n parser.ns = Object.create(rootNS)\n }\n\n // mostly just for error reporting\n parser.trackPosition = parser.opt.position !== false\n if (parser.trackPosition) {\n parser.position = parser.line = parser.column = 0\n }\n emit(parser, 'onready')\n }\n\n if (!Object.create) {\n Object.create = function (o) {\n function F () {}\n F.prototype = o\n var newf = new F()\n return newf\n }\n }\n\n if (!Object.keys) {\n Object.keys = function (o) {\n var a = []\n for (var i in o) if (o.hasOwnProperty(i)) a.push(i)\n return a\n }\n }\n\n function checkBufferLength (parser) {\n var maxAllowed = Math.max(sax.MAX_BUFFER_LENGTH, 10)\n var maxActual = 0\n for (var i = 0, l = buffers.length; i < l; i++) {\n var len = parser[buffers[i]].length\n if (len > maxAllowed) {\n // Text/cdata nodes can get big, and since they're buffered,\n // we can get here under normal conditions.\n // Avoid issues by emitting the text node now,\n // so at least it won't get any bigger.\n switch (buffers[i]) {\n case 'textNode':\n closeText(parser)\n break\n\n case 'cdata':\n emitNode(parser, 'oncdata', parser.cdata)\n parser.cdata = ''\n break\n\n case 'script':\n emitNode(parser, 'onscript', parser.script)\n parser.script = ''\n break\n\n default:\n error(parser, 'Max buffer length exceeded: ' + buffers[i])\n }\n }\n maxActual = Math.max(maxActual, len)\n }\n // schedule the next check for the earliest possible buffer overrun.\n var m = sax.MAX_BUFFER_LENGTH - maxActual\n parser.bufferCheckPosition = m + parser.position\n }\n\n function clearBuffers (parser) {\n for (var i = 0, l = buffers.length; i < l; i++) {\n parser[buffers[i]] = ''\n }\n }\n\n function flushBuffers (parser) {\n closeText(parser)\n if (parser.cdata !== '') {\n emitNode(parser, 'oncdata', parser.cdata)\n parser.cdata = ''\n }\n if (parser.script !== '') {\n emitNode(parser, 'onscript', parser.script)\n parser.script = ''\n }\n }\n\n SAXParser.prototype = {\n end: function () { end(this) },\n write: write,\n resume: function () { this.error = null; return this },\n close: function () { return this.write(null) },\n flush: function () { flushBuffers(this) }\n }\n\n var Stream\n try {\n Stream = require('stream').Stream\n } catch (ex) {\n Stream = function () {}\n }\n\n var streamWraps = sax.EVENTS.filter(function (ev) {\n return ev !== 'error' && ev !== 'end'\n })\n\n function createStream (strict, opt) {\n return new SAXStream(strict, opt)\n }\n\n function SAXStream (strict, opt) {\n if (!(this instanceof SAXStream)) {\n return new SAXStream(strict, opt)\n }\n\n Stream.apply(this)\n\n this._parser = new SAXParser(strict, opt)\n this.writable = true\n this.readable = true\n\n var me = this\n\n this._parser.onend = function () {\n me.emit('end')\n }\n\n this._parser.onerror = function (er) {\n me.emit('error', er)\n\n // if didn't throw, then means error was handled.\n // go ahead and clear error, so we can write again.\n me._parser.error = null\n }\n\n this._decoder = null\n\n streamWraps.forEach(function (ev) {\n Object.defineProperty(me, 'on' + ev, {\n get: function () {\n return me._parser['on' + ev]\n },\n set: function (h) {\n if (!h) {\n me.removeAllListeners(ev)\n me._parser['on' + ev] = h\n return h\n }\n me.on(ev, h)\n },\n enumerable: true,\n configurable: false\n })\n })\n }\n\n SAXStream.prototype = Object.create(Stream.prototype, {\n constructor: {\n value: SAXStream\n }\n })\n\n SAXStream.prototype.write = function (data) {\n if (typeof Buffer === 'function' &&\n typeof Buffer.isBuffer === 'function' &&\n Buffer.isBuffer(data)) {\n if (!this._decoder) {\n var SD = require('string_decoder').StringDecoder\n this._decoder = new SD('utf8')\n }\n data = this._decoder.write(data)\n }\n\n this._parser.write(data.toString())\n this.emit('data', data)\n return true\n }\n\n SAXStream.prototype.end = function (chunk) {\n if (chunk && chunk.length) {\n this.write(chunk)\n }\n this._parser.end()\n return true\n }\n\n SAXStream.prototype.on = function (ev, handler) {\n var me = this\n if (!me._parser['on' + ev] && streamWraps.indexOf(ev) !== -1) {\n me._parser['on' + ev] = function () {\n var args = arguments.length === 1 ? [arguments[0]] : Array.apply(null, arguments)\n args.splice(0, 0, ev)\n me.emit.apply(me, args)\n }\n }\n\n return Stream.prototype.on.call(me, ev, handler)\n }\n\n // this really needs to be replaced with character classes.\n // XML allows all manner of ridiculous numbers and digits.\n var CDATA = '[CDATA['\n var DOCTYPE = 'DOCTYPE'\n var XML_NAMESPACE = 'http://www.w3.org/XML/1998/namespace'\n var XMLNS_NAMESPACE = 'http://www.w3.org/2000/xmlns/'\n var rootNS = { xml: XML_NAMESPACE, xmlns: XMLNS_NAMESPACE }\n\n // http://www.w3.org/TR/REC-xml/#NT-NameStartChar\n // This implementation works on strings, a single character at a time\n // as such, it cannot ever support astral-plane characters (10000-EFFFF)\n // without a significant breaking change to either this parser, or the\n // JavaScript language. Implementation of an emoji-capable xml parser\n // is left as an exercise for the reader.\n var nameStart = /[:_A-Za-z\\u00C0-\\u00D6\\u00D8-\\u00F6\\u00F8-\\u02FF\\u0370-\\u037D\\u037F-\\u1FFF\\u200C-\\u200D\\u2070-\\u218F\\u2C00-\\u2FEF\\u3001-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFFD]/\n\n var nameBody = /[:_A-Za-z\\u00C0-\\u00D6\\u00D8-\\u00F6\\u00F8-\\u02FF\\u0370-\\u037D\\u037F-\\u1FFF\\u200C-\\u200D\\u2070-\\u218F\\u2C00-\\u2FEF\\u3001-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFFD\\u00B7\\u0300-\\u036F\\u203F-\\u2040.\\d-]/\n\n var entityStart = /[#:_A-Za-z\\u00C0-\\u00D6\\u00D8-\\u00F6\\u00F8-\\u02FF\\u0370-\\u037D\\u037F-\\u1FFF\\u200C-\\u200D\\u2070-\\u218F\\u2C00-\\u2FEF\\u3001-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFFD]/\n var entityBody = /[#:_A-Za-z\\u00C0-\\u00D6\\u00D8-\\u00F6\\u00F8-\\u02FF\\u0370-\\u037D\\u037F-\\u1FFF\\u200C-\\u200D\\u2070-\\u218F\\u2C00-\\u2FEF\\u3001-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFFD\\u00B7\\u0300-\\u036F\\u203F-\\u2040.\\d-]/\n\n function isWhitespace (c) {\n return c === ' ' || c === '\\n' || c === '\\r' || c === '\\t'\n }\n\n function isQuote (c) {\n return c === '\"' || c === '\\''\n }\n\n function isAttribEnd (c) {\n return c === '>' || isWhitespace(c)\n }\n\n function isMatch (regex, c) {\n return regex.test(c)\n }\n\n function notMatch (regex, c) {\n return !isMatch(regex, c)\n }\n\n var S = 0\n sax.STATE = {\n BEGIN: S++, // leading byte order mark or whitespace\n BEGIN_WHITESPACE: S++, // leading whitespace\n TEXT: S++, // general stuff\n TEXT_ENTITY: S++, // & and such.\n OPEN_WAKA: S++, // <\n SGML_DECL: S++, // \n SCRIPT: S++, //