diff --git a/dist/index.js b/dist/index.js index f64acac..44bf97d 100644 --- a/dist/index.js +++ b/dist/index.js @@ -2114,10 +2114,10 @@ var Align; Align["Center"] = ":---:"; Align["Right"] = "---:"; Align["None"] = "---"; -})(Align = exports.Align || (exports.Align = {})); +})(Align || (exports.Align = Align = {})); exports.Icon = { - skip: '⚪', - success: '✅', + skip: '⚪', // ':white_circle:' + success: '✅', // ':white_check_mark:' fail: '❌' // ':x:' }; function link(title, address) { @@ -4044,8 +4044,8 @@ class Context { var _a, _b, _c; this.payload = {}; if (process.env.GITHUB_EVENT_PATH) { - if (fs_1.existsSync(process.env.GITHUB_EVENT_PATH)) { - this.payload = JSON.parse(fs_1.readFileSync(process.env.GITHUB_EVENT_PATH, { encoding: 'utf8' })); + if ((0, fs_1.existsSync)(process.env.GITHUB_EVENT_PATH)) { + this.payload = JSON.parse((0, fs_1.readFileSync)(process.env.GITHUB_EVENT_PATH, { encoding: 'utf8' })); } else { const path = process.env.GITHUB_EVENT_PATH; @@ -4063,7 +4063,8 @@ class Context { this.runId = parseInt(process.env.GITHUB_RUN_ID, 10); this.apiUrl = (_a = process.env.GITHUB_API_URL) !== null && _a !== void 0 ? _a : `https://api.github.com`; this.serverUrl = (_b = process.env.GITHUB_SERVER_URL) !== null && _b !== void 0 ? _b : `https://github.com`; - this.graphqlUrl = (_c = process.env.GITHUB_GRAPHQL_URL) !== null && _c !== void 0 ? _c : `https://api.github.com/graphql`; + this.graphqlUrl = + (_c = process.env.GITHUB_GRAPHQL_URL) !== null && _c !== void 0 ? _c : `https://api.github.com/graphql`; } get issue() { const payload = this.payload; @@ -4095,7 +4096,11 @@ exports.Context = Context; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; @@ -4108,7 +4113,7 @@ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? ( var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; @@ -4125,7 +4130,7 @@ exports.context = new Context.Context(); */ function getOctokit(token, options, ...additionalPlugins) { const GitHubWithPlugins = utils_1.GitHub.plugin(...additionalPlugins); - return new GitHubWithPlugins(utils_1.getOctokitOptions(token, options)); + return new GitHubWithPlugins((0, utils_1.getOctokitOptions)(token, options)); } exports.getOctokit = getOctokit; //# sourceMappingURL=github.js.map @@ -4139,7 +4144,11 @@ exports.getOctokit = getOctokit; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; @@ -4152,13 +4161,23 @@ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? ( var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getApiBaseUrl = exports.getProxyAgent = exports.getAuthString = void 0; +exports.getApiBaseUrl = exports.getProxyFetch = exports.getProxyAgentDispatcher = exports.getProxyAgent = exports.getAuthString = void 0; const httpClient = __importStar(__nccwpck_require__(6255)); +const undici_1 = __nccwpck_require__(1773); function getAuthString(token, options) { if (!token && !options.auth) { throw new Error('Parameter token or opts.auth is required'); @@ -4174,6 +4193,19 @@ function getProxyAgent(destinationUrl) { return hc.getAgent(destinationUrl); } exports.getProxyAgent = getProxyAgent; +function getProxyAgentDispatcher(destinationUrl) { + const hc = new httpClient.HttpClient(); + return hc.getAgentDispatcher(destinationUrl); +} +exports.getProxyAgentDispatcher = getProxyAgentDispatcher; +function getProxyFetch(destinationUrl) { + const httpDispatcher = getProxyAgentDispatcher(destinationUrl); + const proxyFetch = (url, opts) => __awaiter(this, void 0, void 0, function* () { + return (0, undici_1.fetch)(url, Object.assign(Object.assign({}, opts), { dispatcher: httpDispatcher })); + }); + return proxyFetch; +} +exports.getProxyFetch = getProxyFetch; function getApiBaseUrl() { return process.env['GITHUB_API_URL'] || 'https://api.github.com'; } @@ -4189,7 +4221,11 @@ exports.getApiBaseUrl = getApiBaseUrl; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; @@ -4202,7 +4238,7 @@ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? ( var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; @@ -4219,7 +4255,8 @@ const baseUrl = Utils.getApiBaseUrl(); exports.defaults = { baseUrl, request: { - agent: Utils.getProxyAgent(baseUrl) + agent: Utils.getProxyAgent(baseUrl), + fetch: Utils.getProxyFetch(baseUrl) } }; exports.GitHub = core_1.Octokit.plugin(plugin_rest_endpoint_methods_1.restEndpointMethods, plugin_paginate_rest_1.paginateRest).defaults(exports.defaults); @@ -4339,7 +4376,11 @@ exports.PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHand /* eslint-disable @typescript-eslint/no-explicit-any */ var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; @@ -4352,7 +4393,7 @@ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? ( var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; @@ -4371,6 +4412,7 @@ const http = __importStar(__nccwpck_require__(3685)); const https = __importStar(__nccwpck_require__(5687)); const pm = __importStar(__nccwpck_require__(9835)); const tunnel = __importStar(__nccwpck_require__(4294)); +const undici_1 = __nccwpck_require__(1773); var HttpCodes; (function (HttpCodes) { HttpCodes[HttpCodes["OK"] = 200] = "OK"; @@ -4400,16 +4442,16 @@ var HttpCodes; HttpCodes[HttpCodes["BadGateway"] = 502] = "BadGateway"; HttpCodes[HttpCodes["ServiceUnavailable"] = 503] = "ServiceUnavailable"; HttpCodes[HttpCodes["GatewayTimeout"] = 504] = "GatewayTimeout"; -})(HttpCodes = exports.HttpCodes || (exports.HttpCodes = {})); +})(HttpCodes || (exports.HttpCodes = HttpCodes = {})); var Headers; (function (Headers) { Headers["Accept"] = "accept"; Headers["ContentType"] = "content-type"; -})(Headers = exports.Headers || (exports.Headers = {})); +})(Headers || (exports.Headers = Headers = {})); var MediaTypes; (function (MediaTypes) { MediaTypes["ApplicationJson"] = "application/json"; -})(MediaTypes = exports.MediaTypes || (exports.MediaTypes = {})); +})(MediaTypes || (exports.MediaTypes = MediaTypes = {})); /** * Returns the proxy URL, depending upon the supplied url and proxy environment variables. * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com @@ -4778,6 +4820,15 @@ class HttpClient { const parsedUrl = new URL(serverUrl); return this._getAgent(parsedUrl); } + getAgentDispatcher(serverUrl) { + const parsedUrl = new URL(serverUrl); + const proxyUrl = pm.getProxyUrl(parsedUrl); + const useProxy = proxyUrl && proxyUrl.hostname; + if (!useProxy) { + return; + } + return this._getProxyAgentDispatcher(parsedUrl, proxyUrl); + } _prepareRequest(method, requestUrl, headers) { const info = {}; info.parsedUrl = requestUrl; @@ -4877,6 +4928,30 @@ class HttpClient { } return agent; } + _getProxyAgentDispatcher(parsedUrl, proxyUrl) { + let proxyAgent; + if (this._keepAlive) { + proxyAgent = this._proxyAgentDispatcher; + } + // if agent is already assigned use that agent. + if (proxyAgent) { + return proxyAgent; + } + const usingSsl = parsedUrl.protocol === 'https:'; + proxyAgent = new undici_1.ProxyAgent(Object.assign({ uri: proxyUrl.href, pipelining: !this._keepAlive ? 0 : 1 }, ((proxyUrl.username || proxyUrl.password) && { + token: `${proxyUrl.username}:${proxyUrl.password}` + }))); + this._proxyAgentDispatcher = proxyAgent; + if (usingSsl && this._ignoreSslError) { + // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process + // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options + // we have to cast it to any and change it directly + proxyAgent.options = Object.assign(proxyAgent.options.requestTls || {}, { + rejectUnauthorized: false + }); + } + return proxyAgent; + } _performExponentialBackoff(retryNumber) { return __awaiter(this, void 0, void 0, function* () { retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber); @@ -6461,16 +6536,39 @@ exports["default"] = Settings; /***/ }), /***/ 334: -/***/ ((__unused_webpack_module, exports) => { +/***/ ((module) => { "use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); -Object.defineProperty(exports, "__esModule", ({ value: true })); +// pkg/dist-src/index.js +var dist_src_exports = {}; +__export(dist_src_exports, { + createTokenAuth: () => createTokenAuth +}); +module.exports = __toCommonJS(dist_src_exports); -const REGEX_IS_INSTALLATION_LEGACY = /^v1\./; -const REGEX_IS_INSTALLATION = /^ghs_/; -const REGEX_IS_USER_TO_SERVER = /^ghu_/; +// pkg/dist-src/auth.js +var REGEX_IS_INSTALLATION_LEGACY = /^v1\./; +var REGEX_IS_INSTALLATION = /^ghs_/; +var REGEX_IS_USER_TO_SERVER = /^ghu_/; async function auth(token) { const isApp = token.split(/\./).length === 3; const isInstallation = REGEX_IS_INSTALLATION_LEGACY.test(token) || REGEX_IS_INSTALLATION.test(token); @@ -6478,109 +6576,144 @@ async function auth(token) { const tokenType = isApp ? "app" : isInstallation ? "installation" : isUserToServer ? "user-to-server" : "oauth"; return { type: "token", - token: token, + token, tokenType }; } -/** - * Prefix token for usage in the Authorization header - * - * @param token OAuth token or JSON Web Token - */ +// pkg/dist-src/with-authorization-prefix.js function withAuthorizationPrefix(token) { if (token.split(/\./).length === 3) { return `bearer ${token}`; } - return `token ${token}`; } +// pkg/dist-src/hook.js async function hook(token, request, route, parameters) { - const endpoint = request.endpoint.merge(route, parameters); + const endpoint = request.endpoint.merge( + route, + parameters + ); endpoint.headers.authorization = withAuthorizationPrefix(token); return request(endpoint); } -const createTokenAuth = function createTokenAuth(token) { +// pkg/dist-src/index.js +var createTokenAuth = function createTokenAuth2(token) { if (!token) { throw new Error("[@octokit/auth-token] No token passed to createTokenAuth"); } - if (typeof token !== "string") { - throw new Error("[@octokit/auth-token] Token passed to createTokenAuth is not a string"); + throw new Error( + "[@octokit/auth-token] Token passed to createTokenAuth is not a string" + ); } - token = token.replace(/^(token|bearer) +/i, ""); return Object.assign(auth.bind(null, token), { hook: hook.bind(null, token) }); }; - -exports.createTokenAuth = createTokenAuth; -//# sourceMappingURL=index.js.map +// Annotate the CommonJS export names for ESM import in node: +0 && (0); /***/ }), /***/ 6762: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -var universalUserAgent = __nccwpck_require__(5030); -var beforeAfterHook = __nccwpck_require__(3682); -var request = __nccwpck_require__(6234); -var graphql = __nccwpck_require__(8467); -var authToken = __nccwpck_require__(334); - -function _objectWithoutPropertiesLoose(source, excluded) { - if (source == null) return {}; - var target = {}; - var sourceKeys = Object.keys(source); - var key, i; - - for (i = 0; i < sourceKeys.length; i++) { - key = sourceKeys[i]; - if (excluded.indexOf(key) >= 0) continue; - target[key] = source[key]; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); - return target; -} - -function _objectWithoutProperties(source, excluded) { - if (source == null) return {}; - - var target = _objectWithoutPropertiesLoose(source, excluded); - - var key, i; - - if (Object.getOwnPropertySymbols) { - var sourceSymbolKeys = Object.getOwnPropertySymbols(source); - - for (i = 0; i < sourceSymbolKeys.length; i++) { - key = sourceSymbolKeys[i]; - if (excluded.indexOf(key) >= 0) continue; - if (!Object.prototype.propertyIsEnumerable.call(source, key)) continue; - target[key] = source[key]; - } +// pkg/dist-src/index.js +var dist_src_exports = {}; +__export(dist_src_exports, { + Octokit: () => Octokit +}); +module.exports = __toCommonJS(dist_src_exports); +var import_universal_user_agent = __nccwpck_require__(5030); +var import_before_after_hook = __nccwpck_require__(3682); +var import_request = __nccwpck_require__(6234); +var import_graphql = __nccwpck_require__(8467); +var import_auth_token = __nccwpck_require__(334); + +// pkg/dist-src/version.js +var VERSION = "5.0.2"; + +// pkg/dist-src/index.js +var noop = () => { +}; +var consoleWarn = console.warn.bind(console); +var consoleError = console.error.bind(console); +var userAgentTrail = `octokit-core.js/${VERSION} ${(0, import_universal_user_agent.getUserAgent)()}`; +var Octokit = class { + static { + this.VERSION = VERSION; + } + static defaults(defaults) { + const OctokitWithDefaults = class extends this { + constructor(...args) { + const options = args[0] || {}; + if (typeof defaults === "function") { + super(defaults(options)); + return; + } + super( + Object.assign( + {}, + defaults, + options, + options.userAgent && defaults.userAgent ? { + userAgent: `${options.userAgent} ${defaults.userAgent}` + } : null + ) + ); + } + }; + return OctokitWithDefaults; + } + static { + this.plugins = []; + } + /** + * Attach a plugin (or many) to your Octokit instance. + * + * @example + * const API = Octokit.plugin(plugin1, plugin2, plugin3, ...) + */ + static plugin(...newPlugins) { + const currentPlugins = this.plugins; + const NewOctokit = class extends this { + static { + this.plugins = currentPlugins.concat( + newPlugins.filter((plugin) => !currentPlugins.includes(plugin)) + ); + } + }; + return NewOctokit; } - - return target; -} - -const VERSION = "3.6.0"; - -const _excluded = ["authStrategy"]; -class Octokit { constructor(options = {}) { - const hook = new beforeAfterHook.Collection(); + const hook = new import_before_after_hook.Collection(); const requestDefaults = { - baseUrl: request.request.endpoint.DEFAULTS.baseUrl, + baseUrl: import_request.request.endpoint.DEFAULTS.baseUrl, headers: {}, request: Object.assign({}, options.request, { // @ts-ignore internal usage only, no need to type @@ -6590,321 +6723,283 @@ class Octokit { previews: [], format: "" } - }; // prepend default user agent with `options.userAgent` if set - - requestDefaults.headers["user-agent"] = [options.userAgent, `octokit-core.js/${VERSION} ${universalUserAgent.getUserAgent()}`].filter(Boolean).join(" "); - + }; + requestDefaults.headers["user-agent"] = options.userAgent ? `${options.userAgent} ${userAgentTrail}` : userAgentTrail; if (options.baseUrl) { requestDefaults.baseUrl = options.baseUrl; } - if (options.previews) { requestDefaults.mediaType.previews = options.previews; } - if (options.timeZone) { requestDefaults.headers["time-zone"] = options.timeZone; } - - this.request = request.request.defaults(requestDefaults); - this.graphql = graphql.withCustomRequest(this.request).defaults(requestDefaults); - this.log = Object.assign({ - debug: () => {}, - info: () => {}, - warn: console.warn.bind(console), - error: console.error.bind(console) - }, options.log); - this.hook = hook; // (1) If neither `options.authStrategy` nor `options.auth` are set, the `octokit` instance - // is unauthenticated. The `this.auth()` method is a no-op and no request hook is registered. - // (2) If only `options.auth` is set, use the default token authentication strategy. - // (3) If `options.authStrategy` is set then use it and pass in `options.auth`. Always pass own request as many strategies accept a custom request instance. - // TODO: type `options.auth` based on `options.authStrategy`. - + this.request = import_request.request.defaults(requestDefaults); + this.graphql = (0, import_graphql.withCustomRequest)(this.request).defaults(requestDefaults); + this.log = Object.assign( + { + debug: noop, + info: noop, + warn: consoleWarn, + error: consoleError + }, + options.log + ); + this.hook = hook; if (!options.authStrategy) { if (!options.auth) { - // (1) this.auth = async () => ({ type: "unauthenticated" }); } else { - // (2) - const auth = authToken.createTokenAuth(options.auth); // @ts-ignore ¯\_(ツ)_/¯ - + const auth = (0, import_auth_token.createTokenAuth)(options.auth); hook.wrap("request", auth.hook); this.auth = auth; } } else { - const { - authStrategy - } = options, - otherOptions = _objectWithoutProperties(options, _excluded); - - const auth = authStrategy(Object.assign({ - request: this.request, - log: this.log, - // we pass the current octokit instance as well as its constructor options - // to allow for authentication strategies that return a new octokit instance - // that shares the same internal state as the current one. The original - // requirement for this was the "event-octokit" authentication strategy - // of https://github.com/probot/octokit-auth-probot. - octokit: this, - octokitOptions: otherOptions - }, options.auth)); // @ts-ignore ¯\_(ツ)_/¯ - + const { authStrategy, ...otherOptions } = options; + const auth = authStrategy( + Object.assign( + { + request: this.request, + log: this.log, + // we pass the current octokit instance as well as its constructor options + // to allow for authentication strategies that return a new octokit instance + // that shares the same internal state as the current one. The original + // requirement for this was the "event-octokit" authentication strategy + // of https://github.com/probot/octokit-auth-probot. + octokit: this, + octokitOptions: otherOptions + }, + options.auth + ) + ); hook.wrap("request", auth.hook); this.auth = auth; - } // apply plugins - // https://stackoverflow.com/a/16345172 - - + } const classConstructor = this.constructor; - classConstructor.plugins.forEach(plugin => { - Object.assign(this, plugin(this, options)); - }); - } - - static defaults(defaults) { - const OctokitWithDefaults = class extends this { - constructor(...args) { - const options = args[0] || {}; - - if (typeof defaults === "function") { - super(defaults(options)); - return; - } - - super(Object.assign({}, defaults, options, options.userAgent && defaults.userAgent ? { - userAgent: `${options.userAgent} ${defaults.userAgent}` - } : null)); - } - - }; - return OctokitWithDefaults; - } - /** - * Attach a plugin (or many) to your Octokit instance. - * - * @example - * const API = Octokit.plugin(plugin1, plugin2, plugin3, ...) - */ - - - static plugin(...newPlugins) { - var _a; - - const currentPlugins = this.plugins; - const NewOctokit = (_a = class extends this {}, _a.plugins = currentPlugins.concat(newPlugins.filter(plugin => !currentPlugins.includes(plugin))), _a); - return NewOctokit; + for (let i = 0; i < classConstructor.plugins.length; ++i) { + Object.assign(this, classConstructor.plugins[i](this, options)); + } } - -} -Octokit.VERSION = VERSION; -Octokit.plugins = []; - -exports.Octokit = Octokit; -//# sourceMappingURL=index.js.map +}; +// Annotate the CommonJS export names for ESM import in node: +0 && (0); /***/ }), /***/ 9440: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); -Object.defineProperty(exports, "__esModule", ({ value: true })); +// pkg/dist-src/index.js +var dist_src_exports = {}; +__export(dist_src_exports, { + endpoint: () => endpoint +}); +module.exports = __toCommonJS(dist_src_exports); + +// pkg/dist-src/defaults.js +var import_universal_user_agent = __nccwpck_require__(5030); -var isPlainObject = __nccwpck_require__(3287); -var universalUserAgent = __nccwpck_require__(5030); +// pkg/dist-src/version.js +var VERSION = "9.0.4"; + +// pkg/dist-src/defaults.js +var userAgent = `octokit-endpoint.js/${VERSION} ${(0, import_universal_user_agent.getUserAgent)()}`; +var DEFAULTS = { + method: "GET", + baseUrl: "https://api.github.com", + headers: { + accept: "application/vnd.github.v3+json", + "user-agent": userAgent + }, + mediaType: { + format: "" + } +}; +// pkg/dist-src/util/lowercase-keys.js function lowercaseKeys(object) { if (!object) { return {}; } - return Object.keys(object).reduce((newObj, key) => { newObj[key.toLowerCase()] = object[key]; return newObj; }, {}); } +// pkg/dist-src/util/is-plain-object.js +function isPlainObject(value) { + if (typeof value !== "object" || value === null) + return false; + if (Object.prototype.toString.call(value) !== "[object Object]") + return false; + const proto = Object.getPrototypeOf(value); + if (proto === null) + return true; + const Ctor = Object.prototype.hasOwnProperty.call(proto, "constructor") && proto.constructor; + return typeof Ctor === "function" && Ctor instanceof Ctor && Function.prototype.call(Ctor) === Function.prototype.call(value); +} + +// pkg/dist-src/util/merge-deep.js function mergeDeep(defaults, options) { const result = Object.assign({}, defaults); - Object.keys(options).forEach(key => { - if (isPlainObject.isPlainObject(options[key])) { - if (!(key in defaults)) Object.assign(result, { - [key]: options[key] - });else result[key] = mergeDeep(defaults[key], options[key]); + Object.keys(options).forEach((key) => { + if (isPlainObject(options[key])) { + if (!(key in defaults)) + Object.assign(result, { [key]: options[key] }); + else + result[key] = mergeDeep(defaults[key], options[key]); } else { - Object.assign(result, { - [key]: options[key] - }); + Object.assign(result, { [key]: options[key] }); } }); return result; } +// pkg/dist-src/util/remove-undefined-properties.js function removeUndefinedProperties(obj) { for (const key in obj) { - if (obj[key] === undefined) { + if (obj[key] === void 0) { delete obj[key]; } } - return obj; } +// pkg/dist-src/merge.js function merge(defaults, route, options) { if (typeof route === "string") { let [method, url] = route.split(" "); - options = Object.assign(url ? { - method, - url - } : { - url: method - }, options); + options = Object.assign(url ? { method, url } : { url: method }, options); } else { options = Object.assign({}, route); - } // lowercase header names before merging with defaults to avoid duplicates - - - options.headers = lowercaseKeys(options.headers); // remove properties with undefined values before merging - + } + options.headers = lowercaseKeys(options.headers); removeUndefinedProperties(options); removeUndefinedProperties(options.headers); - const mergedOptions = mergeDeep(defaults || {}, options); // mediaType.previews arrays are merged, instead of overwritten - - if (defaults && defaults.mediaType.previews.length) { - mergedOptions.mediaType.previews = defaults.mediaType.previews.filter(preview => !mergedOptions.mediaType.previews.includes(preview)).concat(mergedOptions.mediaType.previews); + const mergedOptions = mergeDeep(defaults || {}, options); + if (options.url === "/graphql") { + if (defaults && defaults.mediaType.previews?.length) { + mergedOptions.mediaType.previews = defaults.mediaType.previews.filter( + (preview) => !mergedOptions.mediaType.previews.includes(preview) + ).concat(mergedOptions.mediaType.previews); + } + mergedOptions.mediaType.previews = (mergedOptions.mediaType.previews || []).map((preview) => preview.replace(/-preview/, "")); } - - mergedOptions.mediaType.previews = mergedOptions.mediaType.previews.map(preview => preview.replace(/-preview/, "")); return mergedOptions; } +// pkg/dist-src/util/add-query-parameters.js function addQueryParameters(url, parameters) { const separator = /\?/.test(url) ? "&" : "?"; const names = Object.keys(parameters); - if (names.length === 0) { return url; } - - return url + separator + names.map(name => { + return url + separator + names.map((name) => { if (name === "q") { return "q=" + parameters.q.split("+").map(encodeURIComponent).join("+"); } - return `${name}=${encodeURIComponent(parameters[name])}`; }).join("&"); } -const urlVariableRegex = /\{[^}]+\}/g; - +// pkg/dist-src/util/extract-url-variable-names.js +var urlVariableRegex = /\{[^}]+\}/g; function removeNonChars(variableName) { return variableName.replace(/^\W+|\W+$/g, "").split(/,/); } - function extractUrlVariableNames(url) { const matches = url.match(urlVariableRegex); - if (!matches) { return []; } - return matches.map(removeNonChars).reduce((a, b) => a.concat(b), []); } +// pkg/dist-src/util/omit.js function omit(object, keysToOmit) { - return Object.keys(object).filter(option => !keysToOmit.includes(option)).reduce((obj, key) => { - obj[key] = object[key]; - return obj; - }, {}); + const result = { __proto__: null }; + for (const key of Object.keys(object)) { + if (keysToOmit.indexOf(key) === -1) { + result[key] = object[key]; + } + } + return result; } -// Based on https://github.com/bramstein/url-template, licensed under BSD -// TODO: create separate package. -// -// Copyright (c) 2012-2014, Bram Stein -// All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions -// are met: -// 1. Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// 2. Redistributions in binary form must reproduce the above copyright -// notice, this list of conditions and the following disclaimer in the -// documentation and/or other materials provided with the distribution. -// 3. The name of the author may not be used to endorse or promote products -// derived from this software without specific prior written permission. -// THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR IMPLIED -// WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF -// MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO -// EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, -// INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY -// OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING -// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, -// EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -/* istanbul ignore file */ +// pkg/dist-src/util/url-template.js function encodeReserved(str) { - return str.split(/(%[0-9A-Fa-f]{2})/g).map(function (part) { + return str.split(/(%[0-9A-Fa-f]{2})/g).map(function(part) { if (!/%[0-9A-Fa-f]/.test(part)) { part = encodeURI(part).replace(/%5B/g, "[").replace(/%5D/g, "]"); } - return part; }).join(""); } - function encodeUnreserved(str) { - return encodeURIComponent(str).replace(/[!'()*]/g, function (c) { + return encodeURIComponent(str).replace(/[!'()*]/g, function(c) { return "%" + c.charCodeAt(0).toString(16).toUpperCase(); }); } - function encodeValue(operator, value, key) { value = operator === "+" || operator === "#" ? encodeReserved(value) : encodeUnreserved(value); - if (key) { return encodeUnreserved(key) + "=" + value; } else { return value; } } - function isDefined(value) { - return value !== undefined && value !== null; + return value !== void 0 && value !== null; } - function isKeyOperator(operator) { return operator === ";" || operator === "&" || operator === "?"; } - function getValues(context, operator, key, modifier) { - var value = context[key], - result = []; - + var value = context[key], result = []; if (isDefined(value) && value !== "") { if (typeof value === "string" || typeof value === "number" || typeof value === "boolean") { value = value.toString(); - if (modifier && modifier !== "*") { value = value.substring(0, parseInt(modifier, 10)); } - - result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : "")); + result.push( + encodeValue(operator, value, isKeyOperator(operator) ? key : "") + ); } else { if (modifier === "*") { if (Array.isArray(value)) { - value.filter(isDefined).forEach(function (value) { - result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : "")); + value.filter(isDefined).forEach(function(value2) { + result.push( + encodeValue(operator, value2, isKeyOperator(operator) ? key : "") + ); }); } else { - Object.keys(value).forEach(function (k) { + Object.keys(value).forEach(function(k) { if (isDefined(value[k])) { result.push(encodeValue(operator, value[k], k)); } @@ -6912,20 +7007,18 @@ function getValues(context, operator, key, modifier) { } } else { const tmp = []; - if (Array.isArray(value)) { - value.filter(isDefined).forEach(function (value) { - tmp.push(encodeValue(operator, value)); + value.filter(isDefined).forEach(function(value2) { + tmp.push(encodeValue(operator, value2)); }); } else { - Object.keys(value).forEach(function (k) { + Object.keys(value).forEach(function(k) { if (isDefined(value[k])) { tmp.push(encodeUnreserved(k)); tmp.push(encodeValue(operator, value[k].toString())); } }); } - if (isKeyOperator(operator)) { result.push(encodeUnreserved(key) + "=" + tmp.join(",")); } else if (tmp.length !== 0) { @@ -6944,89 +7037,93 @@ function getValues(context, operator, key, modifier) { result.push(""); } } - return result; } - function parseUrl(template) { return { expand: expand.bind(null, template) }; } - function expand(template, context) { var operators = ["+", "#", ".", "/", ";", "?", "&"]; - return template.replace(/\{([^\{\}]+)\}|([^\{\}]+)/g, function (_, expression, literal) { - if (expression) { - let operator = ""; - const values = []; - - if (operators.indexOf(expression.charAt(0)) !== -1) { - operator = expression.charAt(0); - expression = expression.substr(1); - } - - expression.split(/,/g).forEach(function (variable) { - var tmp = /([^:\*]*)(?::(\d+)|(\*))?/.exec(variable); - values.push(getValues(context, operator, tmp[1], tmp[2] || tmp[3])); - }); - - if (operator && operator !== "+") { - var separator = ","; - - if (operator === "?") { - separator = "&"; - } else if (operator !== "#") { - separator = operator; + template = template.replace( + /\{([^\{\}]+)\}|([^\{\}]+)/g, + function(_, expression, literal) { + if (expression) { + let operator = ""; + const values = []; + if (operators.indexOf(expression.charAt(0)) !== -1) { + operator = expression.charAt(0); + expression = expression.substr(1); + } + expression.split(/,/g).forEach(function(variable) { + var tmp = /([^:\*]*)(?::(\d+)|(\*))?/.exec(variable); + values.push(getValues(context, operator, tmp[1], tmp[2] || tmp[3])); + }); + if (operator && operator !== "+") { + var separator = ","; + if (operator === "?") { + separator = "&"; + } else if (operator !== "#") { + separator = operator; + } + return (values.length !== 0 ? operator : "") + values.join(separator); + } else { + return values.join(","); } - - return (values.length !== 0 ? operator : "") + values.join(separator); } else { - return values.join(","); + return encodeReserved(literal); } - } else { - return encodeReserved(literal); } - }); + ); + if (template === "/") { + return template; + } else { + return template.replace(/\/$/, ""); + } } +// pkg/dist-src/parse.js function parse(options) { - // https://fetch.spec.whatwg.org/#methods - let method = options.method.toUpperCase(); // replace :varname with {varname} to make it RFC 6570 compatible - + let method = options.method.toUpperCase(); let url = (options.url || "/").replace(/:([a-z]\w+)/g, "{$1}"); let headers = Object.assign({}, options.headers); let body; - let parameters = omit(options, ["method", "baseUrl", "url", "headers", "request", "mediaType"]); // extract variable names from URL to calculate remaining variables later - + let parameters = omit(options, [ + "method", + "baseUrl", + "url", + "headers", + "request", + "mediaType" + ]); const urlVariableNames = extractUrlVariableNames(url); url = parseUrl(url).expand(parameters); - if (!/^http/.test(url)) { url = options.baseUrl + url; } - - const omittedParameters = Object.keys(options).filter(option => urlVariableNames.includes(option)).concat("baseUrl"); + const omittedParameters = Object.keys(options).filter((option) => urlVariableNames.includes(option)).concat("baseUrl"); const remainingParameters = omit(parameters, omittedParameters); const isBinaryRequest = /application\/octet-stream/i.test(headers.accept); - if (!isBinaryRequest) { if (options.mediaType.format) { - // e.g. application/vnd.github.v3+json => application/vnd.github.v3.raw - headers.accept = headers.accept.split(/,/).map(preview => preview.replace(/application\/vnd(\.\w+)(\.v3)?(\.\w+)?(\+json)?$/, `application/vnd$1$2.${options.mediaType.format}`)).join(","); + headers.accept = headers.accept.split(/,/).map( + (format) => format.replace( + /application\/vnd(\.\w+)(\.v3)?(\.\w+)?(\+json)?$/, + `application/vnd$1$2.${options.mediaType.format}` + ) + ).join(","); } - - if (options.mediaType.previews.length) { - const previewsFromAcceptHeader = headers.accept.match(/[\w-]+(?=-preview)/g) || []; - headers.accept = previewsFromAcceptHeader.concat(options.mediaType.previews).map(preview => { - const format = options.mediaType.format ? `.${options.mediaType.format}` : "+json"; - return `application/vnd.github.${preview}-preview${format}`; - }).join(","); + if (url.endsWith("/graphql")) { + if (options.mediaType.previews?.length) { + const previewsFromAcceptHeader = headers.accept.match(/[\w-]+(?=-preview)/g) || []; + headers.accept = previewsFromAcceptHeader.concat(options.mediaType.previews).map((preview) => { + const format = options.mediaType.format ? `.${options.mediaType.format}` : "+json"; + return `application/vnd.github.${preview}-preview${format}`; + }).join(","); + } } - } // for GET/HEAD requests, set URL query parameters from remaining parameters - // for PATCH/POST/PUT/DELETE requests, set request body from remaining parameters - - + } if (["GET", "HEAD"].includes(method)) { url = addQueryParameters(url, remainingParameters); } else { @@ -7035,183 +7132,189 @@ function parse(options) { } else { if (Object.keys(remainingParameters).length) { body = remainingParameters; - } else { - headers["content-length"] = 0; } } - } // default content-type for JSON if body is set - - + } if (!headers["content-type"] && typeof body !== "undefined") { headers["content-type"] = "application/json; charset=utf-8"; - } // GitHub expects 'content-length: 0' header for PUT/PATCH requests without body. - // fetch does not allow to set `content-length` header, but we can set body to an empty string - - + } if (["PATCH", "PUT"].includes(method) && typeof body === "undefined") { body = ""; - } // Only return body/request keys if present - - - return Object.assign({ - method, - url, - headers - }, typeof body !== "undefined" ? { - body - } : null, options.request ? { - request: options.request - } : null); + } + return Object.assign( + { method, url, headers }, + typeof body !== "undefined" ? { body } : null, + options.request ? { request: options.request } : null + ); } +// pkg/dist-src/endpoint-with-defaults.js function endpointWithDefaults(defaults, route, options) { return parse(merge(defaults, route, options)); } +// pkg/dist-src/with-defaults.js function withDefaults(oldDefaults, newDefaults) { - const DEFAULTS = merge(oldDefaults, newDefaults); - const endpoint = endpointWithDefaults.bind(null, DEFAULTS); - return Object.assign(endpoint, { - DEFAULTS, - defaults: withDefaults.bind(null, DEFAULTS), - merge: merge.bind(null, DEFAULTS), + const DEFAULTS2 = merge(oldDefaults, newDefaults); + const endpoint2 = endpointWithDefaults.bind(null, DEFAULTS2); + return Object.assign(endpoint2, { + DEFAULTS: DEFAULTS2, + defaults: withDefaults.bind(null, DEFAULTS2), + merge: merge.bind(null, DEFAULTS2), parse }); } -const VERSION = "6.0.12"; - -const userAgent = `octokit-endpoint.js/${VERSION} ${universalUserAgent.getUserAgent()}`; // DEFAULTS has all properties set that EndpointOptions has, except url. -// So we use RequestParameters and add method as additional required property. - -const DEFAULTS = { - method: "GET", - baseUrl: "https://api.github.com", - headers: { - accept: "application/vnd.github.v3+json", - "user-agent": userAgent - }, - mediaType: { - format: "", - previews: [] - } -}; - -const endpoint = withDefaults(null, DEFAULTS); - -exports.endpoint = endpoint; -//# sourceMappingURL=index.js.map +// pkg/dist-src/index.js +var endpoint = withDefaults(null, DEFAULTS); +// Annotate the CommonJS export names for ESM import in node: +0 && (0); /***/ }), /***/ 8467: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// pkg/dist-src/index.js +var dist_src_exports = {}; +__export(dist_src_exports, { + GraphqlResponseError: () => GraphqlResponseError, + graphql: () => graphql2, + withCustomRequest: () => withCustomRequest +}); +module.exports = __toCommonJS(dist_src_exports); +var import_request3 = __nccwpck_require__(6234); +var import_universal_user_agent = __nccwpck_require__(5030); -Object.defineProperty(exports, "__esModule", ({ value: true })); +// pkg/dist-src/version.js +var VERSION = "7.0.2"; -var request = __nccwpck_require__(6234); -var universalUserAgent = __nccwpck_require__(5030); +// pkg/dist-src/with-defaults.js +var import_request2 = __nccwpck_require__(6234); -const VERSION = "4.8.0"; +// pkg/dist-src/graphql.js +var import_request = __nccwpck_require__(6234); +// pkg/dist-src/error.js function _buildMessageForResponseErrors(data) { - return `Request failed due to following response errors:\n` + data.errors.map(e => ` - ${e.message}`).join("\n"); + return `Request failed due to following response errors: +` + data.errors.map((e) => ` - ${e.message}`).join("\n"); } - -class GraphqlResponseError extends Error { - constructor(request, headers, response) { +var GraphqlResponseError = class extends Error { + constructor(request2, headers, response) { super(_buildMessageForResponseErrors(response)); - this.request = request; + this.request = request2; this.headers = headers; this.response = response; - this.name = "GraphqlResponseError"; // Expose the errors and response data in their shorthand properties. - + this.name = "GraphqlResponseError"; this.errors = response.errors; - this.data = response.data; // Maintains proper stack trace (only available on V8) - - /* istanbul ignore next */ - + this.data = response.data; if (Error.captureStackTrace) { Error.captureStackTrace(this, this.constructor); } } +}; -} - -const NON_VARIABLE_OPTIONS = ["method", "baseUrl", "url", "headers", "request", "query", "mediaType"]; -const FORBIDDEN_VARIABLE_OPTIONS = ["query", "method", "url"]; -const GHES_V3_SUFFIX_REGEX = /\/api\/v3\/?$/; -function graphql(request, query, options) { +// pkg/dist-src/graphql.js +var NON_VARIABLE_OPTIONS = [ + "method", + "baseUrl", + "url", + "headers", + "request", + "query", + "mediaType" +]; +var FORBIDDEN_VARIABLE_OPTIONS = ["query", "method", "url"]; +var GHES_V3_SUFFIX_REGEX = /\/api\/v3\/?$/; +function graphql(request2, query, options) { if (options) { if (typeof query === "string" && "query" in options) { - return Promise.reject(new Error(`[@octokit/graphql] "query" cannot be used as variable name`)); + return Promise.reject( + new Error(`[@octokit/graphql] "query" cannot be used as variable name`) + ); } - for (const key in options) { - if (!FORBIDDEN_VARIABLE_OPTIONS.includes(key)) continue; - return Promise.reject(new Error(`[@octokit/graphql] "${key}" cannot be used as variable name`)); + if (!FORBIDDEN_VARIABLE_OPTIONS.includes(key)) + continue; + return Promise.reject( + new Error( + `[@octokit/graphql] "${key}" cannot be used as variable name` + ) + ); } } - - const parsedOptions = typeof query === "string" ? Object.assign({ - query - }, options) : query; - const requestOptions = Object.keys(parsedOptions).reduce((result, key) => { + const parsedOptions = typeof query === "string" ? Object.assign({ query }, options) : query; + const requestOptions = Object.keys( + parsedOptions + ).reduce((result, key) => { if (NON_VARIABLE_OPTIONS.includes(key)) { result[key] = parsedOptions[key]; return result; } - if (!result.variables) { result.variables = {}; } - result.variables[key] = parsedOptions[key]; return result; - }, {}); // workaround for GitHub Enterprise baseUrl set with /api/v3 suffix - // https://github.com/octokit/auth-app.js/issues/111#issuecomment-657610451 - - const baseUrl = parsedOptions.baseUrl || request.endpoint.DEFAULTS.baseUrl; - + }, {}); + const baseUrl = parsedOptions.baseUrl || request2.endpoint.DEFAULTS.baseUrl; if (GHES_V3_SUFFIX_REGEX.test(baseUrl)) { requestOptions.url = baseUrl.replace(GHES_V3_SUFFIX_REGEX, "/api/graphql"); } - - return request(requestOptions).then(response => { + return request2(requestOptions).then((response) => { if (response.data.errors) { const headers = {}; - for (const key of Object.keys(response.headers)) { headers[key] = response.headers[key]; } - - throw new GraphqlResponseError(requestOptions, headers, response.data); + throw new GraphqlResponseError( + requestOptions, + headers, + response.data + ); } - return response.data.data; }); } -function withDefaults(request$1, newDefaults) { - const newRequest = request$1.defaults(newDefaults); - +// pkg/dist-src/with-defaults.js +function withDefaults(request2, newDefaults) { + const newRequest = request2.defaults(newDefaults); const newApi = (query, options) => { return graphql(newRequest, query, options); }; - return Object.assign(newApi, { defaults: withDefaults.bind(null, newRequest), - endpoint: request.request.endpoint + endpoint: newRequest.endpoint }); } -const graphql$1 = withDefaults(request.request, { +// pkg/dist-src/index.js +var graphql2 = withDefaults(import_request3.request, { headers: { - "user-agent": `octokit-graphql.js/${VERSION} ${universalUserAgent.getUserAgent()}` + "user-agent": `octokit-graphql.js/${VERSION} ${(0, import_universal_user_agent.getUserAgent)()}` }, method: "POST", url: "/graphql" @@ -7222,94 +7325,59 @@ function withCustomRequest(customRequest) { url: "/graphql" }); } - -exports.GraphqlResponseError = GraphqlResponseError; -exports.graphql = graphql$1; -exports.withCustomRequest = withCustomRequest; -//# sourceMappingURL=index.js.map +// Annotate the CommonJS export names for ESM import in node: +0 && (0); /***/ }), /***/ 4193: -/***/ ((__unused_webpack_module, exports) => { +/***/ ((module) => { "use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// pkg/dist-src/index.js +var dist_src_exports = {}; +__export(dist_src_exports, { + composePaginateRest: () => composePaginateRest, + isPaginatingEndpoint: () => isPaginatingEndpoint, + paginateRest: () => paginateRest, + paginatingEndpoints: () => paginatingEndpoints +}); +module.exports = __toCommonJS(dist_src_exports); -Object.defineProperty(exports, "__esModule", ({ value: true })); - -const VERSION = "2.21.3"; - -function ownKeys(object, enumerableOnly) { - var keys = Object.keys(object); - - if (Object.getOwnPropertySymbols) { - var symbols = Object.getOwnPropertySymbols(object); - enumerableOnly && (symbols = symbols.filter(function (sym) { - return Object.getOwnPropertyDescriptor(object, sym).enumerable; - })), keys.push.apply(keys, symbols); - } - - return keys; -} - -function _objectSpread2(target) { - for (var i = 1; i < arguments.length; i++) { - var source = null != arguments[i] ? arguments[i] : {}; - i % 2 ? ownKeys(Object(source), !0).forEach(function (key) { - _defineProperty(target, key, source[key]); - }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)) : ownKeys(Object(source)).forEach(function (key) { - Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); - }); - } - - return target; -} - -function _defineProperty(obj, key, value) { - if (key in obj) { - Object.defineProperty(obj, key, { - value: value, - enumerable: true, - configurable: true, - writable: true - }); - } else { - obj[key] = value; - } - - return obj; -} +// pkg/dist-src/version.js +var VERSION = "9.1.5"; -/** - * Some “list” response that can be paginated have a different response structure - * - * They have a `total_count` key in the response (search also has `incomplete_results`, - * /installation/repositories also has `repository_selection`), as well as a key with - * the list of the items which name varies from endpoint to endpoint. - * - * Octokit normalizes these responses so that paginated results are always returned following - * the same structure. One challenge is that if the list response has only one page, no Link - * header is provided, so this header alone is not sufficient to check wether a response is - * paginated or not. - * - * We check if a "total_count" key is present in the response data, but also make sure that - * a "url" property is not, as the "Get the combined status for a specific ref" endpoint would - * otherwise match: https://developer.github.com/v3/repos/statuses/#get-the-combined-status-for-a-specific-ref - */ +// pkg/dist-src/normalize-paginated-list-response.js function normalizePaginatedListResponse(response) { - // endpoints can respond with 204 if repository is empty if (!response.data) { - return _objectSpread2(_objectSpread2({}, response), {}, { + return { + ...response, data: [] - }); + }; } - const responseNeedsNormalization = "total_count" in response.data && !("url" in response.data); - if (!responseNeedsNormalization) return response; // keep the additional properties intact as there is currently no other way - // to retrieve the same information. - + if (!responseNeedsNormalization) + return response; const incompleteResults = response.data.incomplete_results; const repositorySelection = response.data.repository_selection; const totalCount = response.data.total_count; @@ -7319,19 +7387,17 @@ function normalizePaginatedListResponse(response) { const namespaceKey = Object.keys(response.data)[0]; const data = response.data[namespaceKey]; response.data = data; - if (typeof incompleteResults !== "undefined") { response.data.incomplete_results = incompleteResults; } - if (typeof repositorySelection !== "undefined") { response.data.repository_selection = repositorySelection; } - response.data.total_count = totalCount; return response; } +// pkg/dist-src/iterator.js function iterator(octokit, route, parameters) { const options = typeof route === "function" ? route.endpoint(parameters) : octokit.request.endpoint(route, parameters); const requestMethod = typeof route === "function" ? route : octokit.request; @@ -7341,26 +7407,18 @@ function iterator(octokit, route, parameters) { return { [Symbol.asyncIterator]: () => ({ async next() { - if (!url) return { - done: true - }; - + if (!url) + return { done: true }; try { - const response = await requestMethod({ - method, - url, - headers - }); - const normalizedResponse = normalizePaginatedListResponse(response); // `response.headers.link` format: - // '; rel="next", ; rel="last"' - // sets `url` to undefined if "next" URL is not present or `link` header is not set - - url = ((normalizedResponse.headers.link || "").match(/<([^>]+)>;\s*rel="next"/) || [])[1]; - return { - value: normalizedResponse - }; + const response = await requestMethod({ method, url, headers }); + const normalizedResponse = normalizePaginatedListResponse(response); + url = ((normalizedResponse.headers.link || "").match( + /<([^>]+)>;\s*rel="next"/ + ) || [])[1]; + return { value: normalizedResponse }; } catch (error) { - if (error.status !== 409) throw error; + if (error.status !== 409) + throw error; url = ""; return { value: { @@ -7371,48 +7429,284 @@ function iterator(octokit, route, parameters) { }; } } - }) }; } +// pkg/dist-src/paginate.js function paginate(octokit, route, parameters, mapFn) { if (typeof parameters === "function") { mapFn = parameters; - parameters = undefined; + parameters = void 0; } - - return gather(octokit, [], iterator(octokit, route, parameters)[Symbol.asyncIterator](), mapFn); + return gather( + octokit, + [], + iterator(octokit, route, parameters)[Symbol.asyncIterator](), + mapFn + ); } - -function gather(octokit, results, iterator, mapFn) { - return iterator.next().then(result => { +function gather(octokit, results, iterator2, mapFn) { + return iterator2.next().then((result) => { if (result.done) { return results; } - let earlyExit = false; - function done() { earlyExit = true; } - - results = results.concat(mapFn ? mapFn(result.value, done) : result.value.data); - + results = results.concat( + mapFn ? mapFn(result.value, done) : result.value.data + ); if (earlyExit) { return results; } - - return gather(octokit, results, iterator, mapFn); + return gather(octokit, results, iterator2, mapFn); }); } -const composePaginateRest = Object.assign(paginate, { +// pkg/dist-src/compose-paginate.js +var composePaginateRest = Object.assign(paginate, { iterator }); -const paginatingEndpoints = ["GET /app/hook/deliveries", "GET /app/installations", "GET /applications/grants", "GET /authorizations", "GET /enterprises/{enterprise}/actions/permissions/organizations", "GET /enterprises/{enterprise}/actions/runner-groups", "GET /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/organizations", "GET /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/runners", "GET /enterprises/{enterprise}/actions/runners", "GET /enterprises/{enterprise}/audit-log", "GET /enterprises/{enterprise}/secret-scanning/alerts", "GET /enterprises/{enterprise}/settings/billing/advanced-security", "GET /events", "GET /gists", "GET /gists/public", "GET /gists/starred", "GET /gists/{gist_id}/comments", "GET /gists/{gist_id}/commits", "GET /gists/{gist_id}/forks", "GET /installation/repositories", "GET /issues", "GET /licenses", "GET /marketplace_listing/plans", "GET /marketplace_listing/plans/{plan_id}/accounts", "GET /marketplace_listing/stubbed/plans", "GET /marketplace_listing/stubbed/plans/{plan_id}/accounts", "GET /networks/{owner}/{repo}/events", "GET /notifications", "GET /organizations", "GET /orgs/{org}/actions/cache/usage-by-repository", "GET /orgs/{org}/actions/permissions/repositories", "GET /orgs/{org}/actions/runner-groups", "GET /orgs/{org}/actions/runner-groups/{runner_group_id}/repositories", "GET /orgs/{org}/actions/runner-groups/{runner_group_id}/runners", "GET /orgs/{org}/actions/runners", "GET /orgs/{org}/actions/secrets", "GET /orgs/{org}/actions/secrets/{secret_name}/repositories", "GET /orgs/{org}/audit-log", "GET /orgs/{org}/blocks", "GET /orgs/{org}/code-scanning/alerts", "GET /orgs/{org}/codespaces", "GET /orgs/{org}/credential-authorizations", "GET /orgs/{org}/dependabot/secrets", "GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories", "GET /orgs/{org}/events", "GET /orgs/{org}/external-groups", "GET /orgs/{org}/failed_invitations", "GET /orgs/{org}/hooks", "GET /orgs/{org}/hooks/{hook_id}/deliveries", "GET /orgs/{org}/installations", "GET /orgs/{org}/invitations", "GET /orgs/{org}/invitations/{invitation_id}/teams", "GET /orgs/{org}/issues", "GET /orgs/{org}/members", "GET /orgs/{org}/migrations", "GET /orgs/{org}/migrations/{migration_id}/repositories", "GET /orgs/{org}/outside_collaborators", "GET /orgs/{org}/packages", "GET /orgs/{org}/packages/{package_type}/{package_name}/versions", "GET /orgs/{org}/projects", "GET /orgs/{org}/public_members", "GET /orgs/{org}/repos", "GET /orgs/{org}/secret-scanning/alerts", "GET /orgs/{org}/settings/billing/advanced-security", "GET /orgs/{org}/team-sync/groups", "GET /orgs/{org}/teams", "GET /orgs/{org}/teams/{team_slug}/discussions", "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments", "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions", "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions", "GET /orgs/{org}/teams/{team_slug}/invitations", "GET /orgs/{org}/teams/{team_slug}/members", "GET /orgs/{org}/teams/{team_slug}/projects", "GET /orgs/{org}/teams/{team_slug}/repos", "GET /orgs/{org}/teams/{team_slug}/teams", "GET /projects/columns/{column_id}/cards", "GET /projects/{project_id}/collaborators", "GET /projects/{project_id}/columns", "GET /repos/{owner}/{repo}/actions/artifacts", "GET /repos/{owner}/{repo}/actions/caches", "GET /repos/{owner}/{repo}/actions/runners", "GET /repos/{owner}/{repo}/actions/runs", "GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts", "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs", "GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs", "GET /repos/{owner}/{repo}/actions/secrets", "GET /repos/{owner}/{repo}/actions/workflows", "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs", "GET /repos/{owner}/{repo}/assignees", "GET /repos/{owner}/{repo}/branches", "GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations", "GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs", "GET /repos/{owner}/{repo}/code-scanning/alerts", "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances", "GET /repos/{owner}/{repo}/code-scanning/analyses", "GET /repos/{owner}/{repo}/codespaces", "GET /repos/{owner}/{repo}/codespaces/devcontainers", "GET /repos/{owner}/{repo}/codespaces/secrets", "GET /repos/{owner}/{repo}/collaborators", "GET /repos/{owner}/{repo}/comments", "GET /repos/{owner}/{repo}/comments/{comment_id}/reactions", "GET /repos/{owner}/{repo}/commits", "GET /repos/{owner}/{repo}/commits/{commit_sha}/comments", "GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls", "GET /repos/{owner}/{repo}/commits/{ref}/check-runs", "GET /repos/{owner}/{repo}/commits/{ref}/check-suites", "GET /repos/{owner}/{repo}/commits/{ref}/status", "GET /repos/{owner}/{repo}/commits/{ref}/statuses", "GET /repos/{owner}/{repo}/contributors", "GET /repos/{owner}/{repo}/dependabot/secrets", "GET /repos/{owner}/{repo}/deployments", "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses", "GET /repos/{owner}/{repo}/environments", "GET /repos/{owner}/{repo}/events", "GET /repos/{owner}/{repo}/forks", "GET /repos/{owner}/{repo}/git/matching-refs/{ref}", "GET /repos/{owner}/{repo}/hooks", "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries", "GET /repos/{owner}/{repo}/invitations", "GET /repos/{owner}/{repo}/issues", "GET /repos/{owner}/{repo}/issues/comments", "GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions", "GET /repos/{owner}/{repo}/issues/events", "GET /repos/{owner}/{repo}/issues/{issue_number}/comments", "GET /repos/{owner}/{repo}/issues/{issue_number}/events", "GET /repos/{owner}/{repo}/issues/{issue_number}/labels", "GET /repos/{owner}/{repo}/issues/{issue_number}/reactions", "GET /repos/{owner}/{repo}/issues/{issue_number}/timeline", "GET /repos/{owner}/{repo}/keys", "GET /repos/{owner}/{repo}/labels", "GET /repos/{owner}/{repo}/milestones", "GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels", "GET /repos/{owner}/{repo}/notifications", "GET /repos/{owner}/{repo}/pages/builds", "GET /repos/{owner}/{repo}/projects", "GET /repos/{owner}/{repo}/pulls", "GET /repos/{owner}/{repo}/pulls/comments", "GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions", "GET /repos/{owner}/{repo}/pulls/{pull_number}/comments", "GET /repos/{owner}/{repo}/pulls/{pull_number}/commits", "GET /repos/{owner}/{repo}/pulls/{pull_number}/files", "GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers", "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews", "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments", "GET /repos/{owner}/{repo}/releases", "GET /repos/{owner}/{repo}/releases/{release_id}/assets", "GET /repos/{owner}/{repo}/releases/{release_id}/reactions", "GET /repos/{owner}/{repo}/secret-scanning/alerts", "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations", "GET /repos/{owner}/{repo}/stargazers", "GET /repos/{owner}/{repo}/subscribers", "GET /repos/{owner}/{repo}/tags", "GET /repos/{owner}/{repo}/teams", "GET /repos/{owner}/{repo}/topics", "GET /repositories", "GET /repositories/{repository_id}/environments/{environment_name}/secrets", "GET /search/code", "GET /search/commits", "GET /search/issues", "GET /search/labels", "GET /search/repositories", "GET /search/topics", "GET /search/users", "GET /teams/{team_id}/discussions", "GET /teams/{team_id}/discussions/{discussion_number}/comments", "GET /teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}/reactions", "GET /teams/{team_id}/discussions/{discussion_number}/reactions", "GET /teams/{team_id}/invitations", "GET /teams/{team_id}/members", "GET /teams/{team_id}/projects", "GET /teams/{team_id}/repos", "GET /teams/{team_id}/teams", "GET /user/blocks", "GET /user/codespaces", "GET /user/codespaces/secrets", "GET /user/emails", "GET /user/followers", "GET /user/following", "GET /user/gpg_keys", "GET /user/installations", "GET /user/installations/{installation_id}/repositories", "GET /user/issues", "GET /user/keys", "GET /user/marketplace_purchases", "GET /user/marketplace_purchases/stubbed", "GET /user/memberships/orgs", "GET /user/migrations", "GET /user/migrations/{migration_id}/repositories", "GET /user/orgs", "GET /user/packages", "GET /user/packages/{package_type}/{package_name}/versions", "GET /user/public_emails", "GET /user/repos", "GET /user/repository_invitations", "GET /user/starred", "GET /user/subscriptions", "GET /user/teams", "GET /users", "GET /users/{username}/events", "GET /users/{username}/events/orgs/{org}", "GET /users/{username}/events/public", "GET /users/{username}/followers", "GET /users/{username}/following", "GET /users/{username}/gists", "GET /users/{username}/gpg_keys", "GET /users/{username}/keys", "GET /users/{username}/orgs", "GET /users/{username}/packages", "GET /users/{username}/projects", "GET /users/{username}/received_events", "GET /users/{username}/received_events/public", "GET /users/{username}/repos", "GET /users/{username}/starred", "GET /users/{username}/subscriptions"]; +// pkg/dist-src/generated/paginating-endpoints.js +var paginatingEndpoints = [ + "GET /advisories", + "GET /app/hook/deliveries", + "GET /app/installation-requests", + "GET /app/installations", + "GET /assignments/{assignment_id}/accepted_assignments", + "GET /classrooms", + "GET /classrooms/{classroom_id}/assignments", + "GET /enterprises/{enterprise}/dependabot/alerts", + "GET /enterprises/{enterprise}/secret-scanning/alerts", + "GET /events", + "GET /gists", + "GET /gists/public", + "GET /gists/starred", + "GET /gists/{gist_id}/comments", + "GET /gists/{gist_id}/commits", + "GET /gists/{gist_id}/forks", + "GET /installation/repositories", + "GET /issues", + "GET /licenses", + "GET /marketplace_listing/plans", + "GET /marketplace_listing/plans/{plan_id}/accounts", + "GET /marketplace_listing/stubbed/plans", + "GET /marketplace_listing/stubbed/plans/{plan_id}/accounts", + "GET /networks/{owner}/{repo}/events", + "GET /notifications", + "GET /organizations", + "GET /orgs/{org}/actions/cache/usage-by-repository", + "GET /orgs/{org}/actions/permissions/repositories", + "GET /orgs/{org}/actions/runners", + "GET /orgs/{org}/actions/secrets", + "GET /orgs/{org}/actions/secrets/{secret_name}/repositories", + "GET /orgs/{org}/actions/variables", + "GET /orgs/{org}/actions/variables/{name}/repositories", + "GET /orgs/{org}/blocks", + "GET /orgs/{org}/code-scanning/alerts", + "GET /orgs/{org}/codespaces", + "GET /orgs/{org}/codespaces/secrets", + "GET /orgs/{org}/codespaces/secrets/{secret_name}/repositories", + "GET /orgs/{org}/copilot/billing/seats", + "GET /orgs/{org}/dependabot/alerts", + "GET /orgs/{org}/dependabot/secrets", + "GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories", + "GET /orgs/{org}/events", + "GET /orgs/{org}/failed_invitations", + "GET /orgs/{org}/hooks", + "GET /orgs/{org}/hooks/{hook_id}/deliveries", + "GET /orgs/{org}/installations", + "GET /orgs/{org}/invitations", + "GET /orgs/{org}/invitations/{invitation_id}/teams", + "GET /orgs/{org}/issues", + "GET /orgs/{org}/members", + "GET /orgs/{org}/members/{username}/codespaces", + "GET /orgs/{org}/migrations", + "GET /orgs/{org}/migrations/{migration_id}/repositories", + "GET /orgs/{org}/outside_collaborators", + "GET /orgs/{org}/packages", + "GET /orgs/{org}/packages/{package_type}/{package_name}/versions", + "GET /orgs/{org}/personal-access-token-requests", + "GET /orgs/{org}/personal-access-token-requests/{pat_request_id}/repositories", + "GET /orgs/{org}/personal-access-tokens", + "GET /orgs/{org}/personal-access-tokens/{pat_id}/repositories", + "GET /orgs/{org}/projects", + "GET /orgs/{org}/properties/values", + "GET /orgs/{org}/public_members", + "GET /orgs/{org}/repos", + "GET /orgs/{org}/rulesets", + "GET /orgs/{org}/rulesets/rule-suites", + "GET /orgs/{org}/secret-scanning/alerts", + "GET /orgs/{org}/security-advisories", + "GET /orgs/{org}/teams", + "GET /orgs/{org}/teams/{team_slug}/discussions", + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments", + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions", + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions", + "GET /orgs/{org}/teams/{team_slug}/invitations", + "GET /orgs/{org}/teams/{team_slug}/members", + "GET /orgs/{org}/teams/{team_slug}/projects", + "GET /orgs/{org}/teams/{team_slug}/repos", + "GET /orgs/{org}/teams/{team_slug}/teams", + "GET /projects/columns/{column_id}/cards", + "GET /projects/{project_id}/collaborators", + "GET /projects/{project_id}/columns", + "GET /repos/{owner}/{repo}/actions/artifacts", + "GET /repos/{owner}/{repo}/actions/caches", + "GET /repos/{owner}/{repo}/actions/organization-secrets", + "GET /repos/{owner}/{repo}/actions/organization-variables", + "GET /repos/{owner}/{repo}/actions/runners", + "GET /repos/{owner}/{repo}/actions/runs", + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts", + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs", + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs", + "GET /repos/{owner}/{repo}/actions/secrets", + "GET /repos/{owner}/{repo}/actions/variables", + "GET /repos/{owner}/{repo}/actions/workflows", + "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs", + "GET /repos/{owner}/{repo}/activity", + "GET /repos/{owner}/{repo}/assignees", + "GET /repos/{owner}/{repo}/branches", + "GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations", + "GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs", + "GET /repos/{owner}/{repo}/code-scanning/alerts", + "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances", + "GET /repos/{owner}/{repo}/code-scanning/analyses", + "GET /repos/{owner}/{repo}/codespaces", + "GET /repos/{owner}/{repo}/codespaces/devcontainers", + "GET /repos/{owner}/{repo}/codespaces/secrets", + "GET /repos/{owner}/{repo}/collaborators", + "GET /repos/{owner}/{repo}/comments", + "GET /repos/{owner}/{repo}/comments/{comment_id}/reactions", + "GET /repos/{owner}/{repo}/commits", + "GET /repos/{owner}/{repo}/commits/{commit_sha}/comments", + "GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls", + "GET /repos/{owner}/{repo}/commits/{ref}/check-runs", + "GET /repos/{owner}/{repo}/commits/{ref}/check-suites", + "GET /repos/{owner}/{repo}/commits/{ref}/status", + "GET /repos/{owner}/{repo}/commits/{ref}/statuses", + "GET /repos/{owner}/{repo}/contributors", + "GET /repos/{owner}/{repo}/dependabot/alerts", + "GET /repos/{owner}/{repo}/dependabot/secrets", + "GET /repos/{owner}/{repo}/deployments", + "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses", + "GET /repos/{owner}/{repo}/environments", + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies", + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/apps", + "GET /repos/{owner}/{repo}/events", + "GET /repos/{owner}/{repo}/forks", + "GET /repos/{owner}/{repo}/hooks", + "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries", + "GET /repos/{owner}/{repo}/invitations", + "GET /repos/{owner}/{repo}/issues", + "GET /repos/{owner}/{repo}/issues/comments", + "GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions", + "GET /repos/{owner}/{repo}/issues/events", + "GET /repos/{owner}/{repo}/issues/{issue_number}/comments", + "GET /repos/{owner}/{repo}/issues/{issue_number}/events", + "GET /repos/{owner}/{repo}/issues/{issue_number}/labels", + "GET /repos/{owner}/{repo}/issues/{issue_number}/reactions", + "GET /repos/{owner}/{repo}/issues/{issue_number}/timeline", + "GET /repos/{owner}/{repo}/keys", + "GET /repos/{owner}/{repo}/labels", + "GET /repos/{owner}/{repo}/milestones", + "GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels", + "GET /repos/{owner}/{repo}/notifications", + "GET /repos/{owner}/{repo}/pages/builds", + "GET /repos/{owner}/{repo}/projects", + "GET /repos/{owner}/{repo}/pulls", + "GET /repos/{owner}/{repo}/pulls/comments", + "GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions", + "GET /repos/{owner}/{repo}/pulls/{pull_number}/comments", + "GET /repos/{owner}/{repo}/pulls/{pull_number}/commits", + "GET /repos/{owner}/{repo}/pulls/{pull_number}/files", + "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews", + "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments", + "GET /repos/{owner}/{repo}/releases", + "GET /repos/{owner}/{repo}/releases/{release_id}/assets", + "GET /repos/{owner}/{repo}/releases/{release_id}/reactions", + "GET /repos/{owner}/{repo}/rules/branches/{branch}", + "GET /repos/{owner}/{repo}/rulesets", + "GET /repos/{owner}/{repo}/rulesets/rule-suites", + "GET /repos/{owner}/{repo}/secret-scanning/alerts", + "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations", + "GET /repos/{owner}/{repo}/security-advisories", + "GET /repos/{owner}/{repo}/stargazers", + "GET /repos/{owner}/{repo}/subscribers", + "GET /repos/{owner}/{repo}/tags", + "GET /repos/{owner}/{repo}/teams", + "GET /repos/{owner}/{repo}/topics", + "GET /repositories", + "GET /repositories/{repository_id}/environments/{environment_name}/secrets", + "GET /repositories/{repository_id}/environments/{environment_name}/variables", + "GET /search/code", + "GET /search/commits", + "GET /search/issues", + "GET /search/labels", + "GET /search/repositories", + "GET /search/topics", + "GET /search/users", + "GET /teams/{team_id}/discussions", + "GET /teams/{team_id}/discussions/{discussion_number}/comments", + "GET /teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}/reactions", + "GET /teams/{team_id}/discussions/{discussion_number}/reactions", + "GET /teams/{team_id}/invitations", + "GET /teams/{team_id}/members", + "GET /teams/{team_id}/projects", + "GET /teams/{team_id}/repos", + "GET /teams/{team_id}/teams", + "GET /user/blocks", + "GET /user/codespaces", + "GET /user/codespaces/secrets", + "GET /user/emails", + "GET /user/followers", + "GET /user/following", + "GET /user/gpg_keys", + "GET /user/installations", + "GET /user/installations/{installation_id}/repositories", + "GET /user/issues", + "GET /user/keys", + "GET /user/marketplace_purchases", + "GET /user/marketplace_purchases/stubbed", + "GET /user/memberships/orgs", + "GET /user/migrations", + "GET /user/migrations/{migration_id}/repositories", + "GET /user/orgs", + "GET /user/packages", + "GET /user/packages/{package_type}/{package_name}/versions", + "GET /user/public_emails", + "GET /user/repos", + "GET /user/repository_invitations", + "GET /user/social_accounts", + "GET /user/ssh_signing_keys", + "GET /user/starred", + "GET /user/subscriptions", + "GET /user/teams", + "GET /users", + "GET /users/{username}/events", + "GET /users/{username}/events/orgs/{org}", + "GET /users/{username}/events/public", + "GET /users/{username}/followers", + "GET /users/{username}/following", + "GET /users/{username}/gists", + "GET /users/{username}/gpg_keys", + "GET /users/{username}/keys", + "GET /users/{username}/orgs", + "GET /users/{username}/packages", + "GET /users/{username}/projects", + "GET /users/{username}/received_events", + "GET /users/{username}/received_events/public", + "GET /users/{username}/repos", + "GET /users/{username}/social_accounts", + "GET /users/{username}/ssh_signing_keys", + "GET /users/{username}/starred", + "GET /users/{username}/subscriptions" +]; +// pkg/dist-src/paginating-endpoints.js function isPaginatingEndpoint(arg) { if (typeof arg === "string") { return paginatingEndpoints.includes(arg); @@ -7421,11 +7715,7 @@ function isPaginatingEndpoint(arg) { } } -/** - * @param octokit Octokit instance - * @param options Options passed to Octokit constructor - */ - +// pkg/dist-src/index.js function paginateRest(octokit) { return { paginate: Object.assign(paginate.bind(null, octokit), { @@ -7434,202 +7724,384 @@ function paginateRest(octokit) { }; } paginateRest.VERSION = VERSION; - -exports.composePaginateRest = composePaginateRest; -exports.isPaginatingEndpoint = isPaginatingEndpoint; -exports.paginateRest = paginateRest; -exports.paginatingEndpoints = paginatingEndpoints; -//# sourceMappingURL=index.js.map +// Annotate the CommonJS export names for ESM import in node: +0 && (0); /***/ }), /***/ 3044: -/***/ ((__unused_webpack_module, exports) => { +/***/ ((module) => { "use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// pkg/dist-src/index.js +var dist_src_exports = {}; +__export(dist_src_exports, { + legacyRestEndpointMethods: () => legacyRestEndpointMethods, + restEndpointMethods: () => restEndpointMethods +}); +module.exports = __toCommonJS(dist_src_exports); -Object.defineProperty(exports, "__esModule", ({ value: true })); - -function ownKeys(object, enumerableOnly) { - var keys = Object.keys(object); - - if (Object.getOwnPropertySymbols) { - var symbols = Object.getOwnPropertySymbols(object); - - if (enumerableOnly) { - symbols = symbols.filter(function (sym) { - return Object.getOwnPropertyDescriptor(object, sym).enumerable; - }); - } - - keys.push.apply(keys, symbols); - } - - return keys; -} - -function _objectSpread2(target) { - for (var i = 1; i < arguments.length; i++) { - var source = arguments[i] != null ? arguments[i] : {}; - - if (i % 2) { - ownKeys(Object(source), true).forEach(function (key) { - _defineProperty(target, key, source[key]); - }); - } else if (Object.getOwnPropertyDescriptors) { - Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)); - } else { - ownKeys(Object(source)).forEach(function (key) { - Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); - }); - } - } - - return target; -} - -function _defineProperty(obj, key, value) { - if (key in obj) { - Object.defineProperty(obj, key, { - value: value, - enumerable: true, - configurable: true, - writable: true - }); - } else { - obj[key] = value; - } - - return obj; -} +// pkg/dist-src/version.js +var VERSION = "10.2.0"; -const Endpoints = { +// pkg/dist-src/generated/endpoints.js +var Endpoints = { actions: { - addCustomLabelsToSelfHostedRunnerForOrg: ["POST /orgs/{org}/actions/runners/{runner_id}/labels"], - addCustomLabelsToSelfHostedRunnerForRepo: ["POST /repos/{owner}/{repo}/actions/runners/{runner_id}/labels"], - addSelectedRepoToOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}"], - approveWorkflowRun: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/approve"], - cancelWorkflowRun: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel"], - createOrUpdateEnvironmentSecret: ["PUT /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}"], + addCustomLabelsToSelfHostedRunnerForOrg: [ + "POST /orgs/{org}/actions/runners/{runner_id}/labels" + ], + addCustomLabelsToSelfHostedRunnerForRepo: [ + "POST /repos/{owner}/{repo}/actions/runners/{runner_id}/labels" + ], + addSelectedRepoToOrgSecret: [ + "PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}" + ], + addSelectedRepoToOrgVariable: [ + "PUT /orgs/{org}/actions/variables/{name}/repositories/{repository_id}" + ], + approveWorkflowRun: [ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/approve" + ], + cancelWorkflowRun: [ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel" + ], + createEnvironmentVariable: [ + "POST /repositories/{repository_id}/environments/{environment_name}/variables" + ], + createOrUpdateEnvironmentSecret: [ + "PUT /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}" + ], createOrUpdateOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}"], - createOrUpdateRepoSecret: ["PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}"], - createRegistrationTokenForOrg: ["POST /orgs/{org}/actions/runners/registration-token"], - createRegistrationTokenForRepo: ["POST /repos/{owner}/{repo}/actions/runners/registration-token"], + createOrUpdateRepoSecret: [ + "PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}" + ], + createOrgVariable: ["POST /orgs/{org}/actions/variables"], + createRegistrationTokenForOrg: [ + "POST /orgs/{org}/actions/runners/registration-token" + ], + createRegistrationTokenForRepo: [ + "POST /repos/{owner}/{repo}/actions/runners/registration-token" + ], createRemoveTokenForOrg: ["POST /orgs/{org}/actions/runners/remove-token"], - createRemoveTokenForRepo: ["POST /repos/{owner}/{repo}/actions/runners/remove-token"], - createWorkflowDispatch: ["POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches"], - deleteActionsCacheById: ["DELETE /repos/{owner}/{repo}/actions/caches/{cache_id}"], - deleteActionsCacheByKey: ["DELETE /repos/{owner}/{repo}/actions/caches{?key,ref}"], - deleteArtifact: ["DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"], - deleteEnvironmentSecret: ["DELETE /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}"], + createRemoveTokenForRepo: [ + "POST /repos/{owner}/{repo}/actions/runners/remove-token" + ], + createRepoVariable: ["POST /repos/{owner}/{repo}/actions/variables"], + createWorkflowDispatch: [ + "POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches" + ], + deleteActionsCacheById: [ + "DELETE /repos/{owner}/{repo}/actions/caches/{cache_id}" + ], + deleteActionsCacheByKey: [ + "DELETE /repos/{owner}/{repo}/actions/caches{?key,ref}" + ], + deleteArtifact: [ + "DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}" + ], + deleteEnvironmentSecret: [ + "DELETE /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}" + ], + deleteEnvironmentVariable: [ + "DELETE /repositories/{repository_id}/environments/{environment_name}/variables/{name}" + ], deleteOrgSecret: ["DELETE /orgs/{org}/actions/secrets/{secret_name}"], - deleteRepoSecret: ["DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}"], - deleteSelfHostedRunnerFromOrg: ["DELETE /orgs/{org}/actions/runners/{runner_id}"], - deleteSelfHostedRunnerFromRepo: ["DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}"], + deleteOrgVariable: ["DELETE /orgs/{org}/actions/variables/{name}"], + deleteRepoSecret: [ + "DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}" + ], + deleteRepoVariable: [ + "DELETE /repos/{owner}/{repo}/actions/variables/{name}" + ], + deleteSelfHostedRunnerFromOrg: [ + "DELETE /orgs/{org}/actions/runners/{runner_id}" + ], + deleteSelfHostedRunnerFromRepo: [ + "DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}" + ], deleteWorkflowRun: ["DELETE /repos/{owner}/{repo}/actions/runs/{run_id}"], - deleteWorkflowRunLogs: ["DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs"], - disableSelectedRepositoryGithubActionsOrganization: ["DELETE /orgs/{org}/actions/permissions/repositories/{repository_id}"], - disableWorkflow: ["PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/disable"], - downloadArtifact: ["GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}"], - downloadJobLogsForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs"], - downloadWorkflowRunAttemptLogs: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/logs"], - downloadWorkflowRunLogs: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs"], - enableSelectedRepositoryGithubActionsOrganization: ["PUT /orgs/{org}/actions/permissions/repositories/{repository_id}"], - enableWorkflow: ["PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/enable"], + deleteWorkflowRunLogs: [ + "DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs" + ], + disableSelectedRepositoryGithubActionsOrganization: [ + "DELETE /orgs/{org}/actions/permissions/repositories/{repository_id}" + ], + disableWorkflow: [ + "PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/disable" + ], + downloadArtifact: [ + "GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}" + ], + downloadJobLogsForWorkflowRun: [ + "GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs" + ], + downloadWorkflowRunAttemptLogs: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/logs" + ], + downloadWorkflowRunLogs: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs" + ], + enableSelectedRepositoryGithubActionsOrganization: [ + "PUT /orgs/{org}/actions/permissions/repositories/{repository_id}" + ], + enableWorkflow: [ + "PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/enable" + ], + forceCancelWorkflowRun: [ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/force-cancel" + ], + generateRunnerJitconfigForOrg: [ + "POST /orgs/{org}/actions/runners/generate-jitconfig" + ], + generateRunnerJitconfigForRepo: [ + "POST /repos/{owner}/{repo}/actions/runners/generate-jitconfig" + ], getActionsCacheList: ["GET /repos/{owner}/{repo}/actions/caches"], getActionsCacheUsage: ["GET /repos/{owner}/{repo}/actions/cache/usage"], - getActionsCacheUsageByRepoForOrg: ["GET /orgs/{org}/actions/cache/usage-by-repository"], - getActionsCacheUsageForEnterprise: ["GET /enterprises/{enterprise}/actions/cache/usage"], + getActionsCacheUsageByRepoForOrg: [ + "GET /orgs/{org}/actions/cache/usage-by-repository" + ], getActionsCacheUsageForOrg: ["GET /orgs/{org}/actions/cache/usage"], - getAllowedActionsOrganization: ["GET /orgs/{org}/actions/permissions/selected-actions"], - getAllowedActionsRepository: ["GET /repos/{owner}/{repo}/actions/permissions/selected-actions"], + getAllowedActionsOrganization: [ + "GET /orgs/{org}/actions/permissions/selected-actions" + ], + getAllowedActionsRepository: [ + "GET /repos/{owner}/{repo}/actions/permissions/selected-actions" + ], getArtifact: ["GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"], - getEnvironmentPublicKey: ["GET /repositories/{repository_id}/environments/{environment_name}/secrets/public-key"], - getEnvironmentSecret: ["GET /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}"], - getGithubActionsDefaultWorkflowPermissionsEnterprise: ["GET /enterprises/{enterprise}/actions/permissions/workflow"], - getGithubActionsDefaultWorkflowPermissionsOrganization: ["GET /orgs/{org}/actions/permissions/workflow"], - getGithubActionsDefaultWorkflowPermissionsRepository: ["GET /repos/{owner}/{repo}/actions/permissions/workflow"], - getGithubActionsPermissionsOrganization: ["GET /orgs/{org}/actions/permissions"], - getGithubActionsPermissionsRepository: ["GET /repos/{owner}/{repo}/actions/permissions"], + getEnvironmentPublicKey: [ + "GET /repositories/{repository_id}/environments/{environment_name}/secrets/public-key" + ], + getEnvironmentSecret: [ + "GET /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}" + ], + getEnvironmentVariable: [ + "GET /repositories/{repository_id}/environments/{environment_name}/variables/{name}" + ], + getGithubActionsDefaultWorkflowPermissionsOrganization: [ + "GET /orgs/{org}/actions/permissions/workflow" + ], + getGithubActionsDefaultWorkflowPermissionsRepository: [ + "GET /repos/{owner}/{repo}/actions/permissions/workflow" + ], + getGithubActionsPermissionsOrganization: [ + "GET /orgs/{org}/actions/permissions" + ], + getGithubActionsPermissionsRepository: [ + "GET /repos/{owner}/{repo}/actions/permissions" + ], getJobForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}"], getOrgPublicKey: ["GET /orgs/{org}/actions/secrets/public-key"], getOrgSecret: ["GET /orgs/{org}/actions/secrets/{secret_name}"], - getPendingDeploymentsForRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments"], - getRepoPermissions: ["GET /repos/{owner}/{repo}/actions/permissions", {}, { - renamed: ["actions", "getGithubActionsPermissionsRepository"] - }], + getOrgVariable: ["GET /orgs/{org}/actions/variables/{name}"], + getPendingDeploymentsForRun: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments" + ], + getRepoPermissions: [ + "GET /repos/{owner}/{repo}/actions/permissions", + {}, + { renamed: ["actions", "getGithubActionsPermissionsRepository"] } + ], getRepoPublicKey: ["GET /repos/{owner}/{repo}/actions/secrets/public-key"], getRepoSecret: ["GET /repos/{owner}/{repo}/actions/secrets/{secret_name}"], - getReviewsForRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/approvals"], + getRepoVariable: ["GET /repos/{owner}/{repo}/actions/variables/{name}"], + getReviewsForRun: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/approvals" + ], getSelfHostedRunnerForOrg: ["GET /orgs/{org}/actions/runners/{runner_id}"], - getSelfHostedRunnerForRepo: ["GET /repos/{owner}/{repo}/actions/runners/{runner_id}"], + getSelfHostedRunnerForRepo: [ + "GET /repos/{owner}/{repo}/actions/runners/{runner_id}" + ], getWorkflow: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}"], - getWorkflowAccessToRepository: ["GET /repos/{owner}/{repo}/actions/permissions/access"], + getWorkflowAccessToRepository: [ + "GET /repos/{owner}/{repo}/actions/permissions/access" + ], getWorkflowRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}"], - getWorkflowRunAttempt: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}"], - getWorkflowRunUsage: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing"], - getWorkflowUsage: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing"], + getWorkflowRunAttempt: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}" + ], + getWorkflowRunUsage: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing" + ], + getWorkflowUsage: [ + "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing" + ], listArtifactsForRepo: ["GET /repos/{owner}/{repo}/actions/artifacts"], - listEnvironmentSecrets: ["GET /repositories/{repository_id}/environments/{environment_name}/secrets"], - listJobsForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs"], - listJobsForWorkflowRunAttempt: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs"], - listLabelsForSelfHostedRunnerForOrg: ["GET /orgs/{org}/actions/runners/{runner_id}/labels"], - listLabelsForSelfHostedRunnerForRepo: ["GET /repos/{owner}/{repo}/actions/runners/{runner_id}/labels"], + listEnvironmentSecrets: [ + "GET /repositories/{repository_id}/environments/{environment_name}/secrets" + ], + listEnvironmentVariables: [ + "GET /repositories/{repository_id}/environments/{environment_name}/variables" + ], + listJobsForWorkflowRun: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs" + ], + listJobsForWorkflowRunAttempt: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs" + ], + listLabelsForSelfHostedRunnerForOrg: [ + "GET /orgs/{org}/actions/runners/{runner_id}/labels" + ], + listLabelsForSelfHostedRunnerForRepo: [ + "GET /repos/{owner}/{repo}/actions/runners/{runner_id}/labels" + ], listOrgSecrets: ["GET /orgs/{org}/actions/secrets"], + listOrgVariables: ["GET /orgs/{org}/actions/variables"], + listRepoOrganizationSecrets: [ + "GET /repos/{owner}/{repo}/actions/organization-secrets" + ], + listRepoOrganizationVariables: [ + "GET /repos/{owner}/{repo}/actions/organization-variables" + ], listRepoSecrets: ["GET /repos/{owner}/{repo}/actions/secrets"], + listRepoVariables: ["GET /repos/{owner}/{repo}/actions/variables"], listRepoWorkflows: ["GET /repos/{owner}/{repo}/actions/workflows"], listRunnerApplicationsForOrg: ["GET /orgs/{org}/actions/runners/downloads"], - listRunnerApplicationsForRepo: ["GET /repos/{owner}/{repo}/actions/runners/downloads"], - listSelectedReposForOrgSecret: ["GET /orgs/{org}/actions/secrets/{secret_name}/repositories"], - listSelectedRepositoriesEnabledGithubActionsOrganization: ["GET /orgs/{org}/actions/permissions/repositories"], + listRunnerApplicationsForRepo: [ + "GET /repos/{owner}/{repo}/actions/runners/downloads" + ], + listSelectedReposForOrgSecret: [ + "GET /orgs/{org}/actions/secrets/{secret_name}/repositories" + ], + listSelectedReposForOrgVariable: [ + "GET /orgs/{org}/actions/variables/{name}/repositories" + ], + listSelectedRepositoriesEnabledGithubActionsOrganization: [ + "GET /orgs/{org}/actions/permissions/repositories" + ], listSelfHostedRunnersForOrg: ["GET /orgs/{org}/actions/runners"], listSelfHostedRunnersForRepo: ["GET /repos/{owner}/{repo}/actions/runners"], - listWorkflowRunArtifacts: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts"], - listWorkflowRuns: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs"], + listWorkflowRunArtifacts: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts" + ], + listWorkflowRuns: [ + "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs" + ], listWorkflowRunsForRepo: ["GET /repos/{owner}/{repo}/actions/runs"], - reRunJobForWorkflowRun: ["POST /repos/{owner}/{repo}/actions/jobs/{job_id}/rerun"], + reRunJobForWorkflowRun: [ + "POST /repos/{owner}/{repo}/actions/jobs/{job_id}/rerun" + ], reRunWorkflow: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun"], - reRunWorkflowFailedJobs: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun-failed-jobs"], - removeAllCustomLabelsFromSelfHostedRunnerForOrg: ["DELETE /orgs/{org}/actions/runners/{runner_id}/labels"], - removeAllCustomLabelsFromSelfHostedRunnerForRepo: ["DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels"], - removeCustomLabelFromSelfHostedRunnerForOrg: ["DELETE /orgs/{org}/actions/runners/{runner_id}/labels/{name}"], - removeCustomLabelFromSelfHostedRunnerForRepo: ["DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels/{name}"], - removeSelectedRepoFromOrgSecret: ["DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}"], - reviewPendingDeploymentsForRun: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments"], - setAllowedActionsOrganization: ["PUT /orgs/{org}/actions/permissions/selected-actions"], - setAllowedActionsRepository: ["PUT /repos/{owner}/{repo}/actions/permissions/selected-actions"], - setCustomLabelsForSelfHostedRunnerForOrg: ["PUT /orgs/{org}/actions/runners/{runner_id}/labels"], - setCustomLabelsForSelfHostedRunnerForRepo: ["PUT /repos/{owner}/{repo}/actions/runners/{runner_id}/labels"], - setGithubActionsDefaultWorkflowPermissionsEnterprise: ["PUT /enterprises/{enterprise}/actions/permissions/workflow"], - setGithubActionsDefaultWorkflowPermissionsOrganization: ["PUT /orgs/{org}/actions/permissions/workflow"], - setGithubActionsDefaultWorkflowPermissionsRepository: ["PUT /repos/{owner}/{repo}/actions/permissions/workflow"], - setGithubActionsPermissionsOrganization: ["PUT /orgs/{org}/actions/permissions"], - setGithubActionsPermissionsRepository: ["PUT /repos/{owner}/{repo}/actions/permissions"], - setSelectedReposForOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}/repositories"], - setSelectedRepositoriesEnabledGithubActionsOrganization: ["PUT /orgs/{org}/actions/permissions/repositories"], - setWorkflowAccessToRepository: ["PUT /repos/{owner}/{repo}/actions/permissions/access"] + reRunWorkflowFailedJobs: [ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun-failed-jobs" + ], + removeAllCustomLabelsFromSelfHostedRunnerForOrg: [ + "DELETE /orgs/{org}/actions/runners/{runner_id}/labels" + ], + removeAllCustomLabelsFromSelfHostedRunnerForRepo: [ + "DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels" + ], + removeCustomLabelFromSelfHostedRunnerForOrg: [ + "DELETE /orgs/{org}/actions/runners/{runner_id}/labels/{name}" + ], + removeCustomLabelFromSelfHostedRunnerForRepo: [ + "DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels/{name}" + ], + removeSelectedRepoFromOrgSecret: [ + "DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}" + ], + removeSelectedRepoFromOrgVariable: [ + "DELETE /orgs/{org}/actions/variables/{name}/repositories/{repository_id}" + ], + reviewCustomGatesForRun: [ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/deployment_protection_rule" + ], + reviewPendingDeploymentsForRun: [ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments" + ], + setAllowedActionsOrganization: [ + "PUT /orgs/{org}/actions/permissions/selected-actions" + ], + setAllowedActionsRepository: [ + "PUT /repos/{owner}/{repo}/actions/permissions/selected-actions" + ], + setCustomLabelsForSelfHostedRunnerForOrg: [ + "PUT /orgs/{org}/actions/runners/{runner_id}/labels" + ], + setCustomLabelsForSelfHostedRunnerForRepo: [ + "PUT /repos/{owner}/{repo}/actions/runners/{runner_id}/labels" + ], + setGithubActionsDefaultWorkflowPermissionsOrganization: [ + "PUT /orgs/{org}/actions/permissions/workflow" + ], + setGithubActionsDefaultWorkflowPermissionsRepository: [ + "PUT /repos/{owner}/{repo}/actions/permissions/workflow" + ], + setGithubActionsPermissionsOrganization: [ + "PUT /orgs/{org}/actions/permissions" + ], + setGithubActionsPermissionsRepository: [ + "PUT /repos/{owner}/{repo}/actions/permissions" + ], + setSelectedReposForOrgSecret: [ + "PUT /orgs/{org}/actions/secrets/{secret_name}/repositories" + ], + setSelectedReposForOrgVariable: [ + "PUT /orgs/{org}/actions/variables/{name}/repositories" + ], + setSelectedRepositoriesEnabledGithubActionsOrganization: [ + "PUT /orgs/{org}/actions/permissions/repositories" + ], + setWorkflowAccessToRepository: [ + "PUT /repos/{owner}/{repo}/actions/permissions/access" + ], + updateEnvironmentVariable: [ + "PATCH /repositories/{repository_id}/environments/{environment_name}/variables/{name}" + ], + updateOrgVariable: ["PATCH /orgs/{org}/actions/variables/{name}"], + updateRepoVariable: [ + "PATCH /repos/{owner}/{repo}/actions/variables/{name}" + ] }, activity: { checkRepoIsStarredByAuthenticatedUser: ["GET /user/starred/{owner}/{repo}"], deleteRepoSubscription: ["DELETE /repos/{owner}/{repo}/subscription"], - deleteThreadSubscription: ["DELETE /notifications/threads/{thread_id}/subscription"], + deleteThreadSubscription: [ + "DELETE /notifications/threads/{thread_id}/subscription" + ], getFeeds: ["GET /feeds"], getRepoSubscription: ["GET /repos/{owner}/{repo}/subscription"], getThread: ["GET /notifications/threads/{thread_id}"], - getThreadSubscriptionForAuthenticatedUser: ["GET /notifications/threads/{thread_id}/subscription"], + getThreadSubscriptionForAuthenticatedUser: [ + "GET /notifications/threads/{thread_id}/subscription" + ], listEventsForAuthenticatedUser: ["GET /users/{username}/events"], listNotificationsForAuthenticatedUser: ["GET /notifications"], - listOrgEventsForAuthenticatedUser: ["GET /users/{username}/events/orgs/{org}"], + listOrgEventsForAuthenticatedUser: [ + "GET /users/{username}/events/orgs/{org}" + ], listPublicEvents: ["GET /events"], listPublicEventsForRepoNetwork: ["GET /networks/{owner}/{repo}/events"], listPublicEventsForUser: ["GET /users/{username}/events/public"], listPublicOrgEvents: ["GET /orgs/{org}/events"], listReceivedEventsForUser: ["GET /users/{username}/received_events"], - listReceivedPublicEventsForUser: ["GET /users/{username}/received_events/public"], + listReceivedPublicEventsForUser: [ + "GET /users/{username}/received_events/public" + ], listRepoEvents: ["GET /repos/{owner}/{repo}/events"], - listRepoNotificationsForAuthenticatedUser: ["GET /repos/{owner}/{repo}/notifications"], + listRepoNotificationsForAuthenticatedUser: [ + "GET /repos/{owner}/{repo}/notifications" + ], listReposStarredByAuthenticatedUser: ["GET /user/starred"], listReposStarredByUser: ["GET /users/{username}/starred"], listReposWatchedByUser: ["GET /users/{username}/subscriptions"], @@ -7640,18 +8112,26 @@ const Endpoints = { markRepoNotificationsAsRead: ["PUT /repos/{owner}/{repo}/notifications"], markThreadAsRead: ["PATCH /notifications/threads/{thread_id}"], setRepoSubscription: ["PUT /repos/{owner}/{repo}/subscription"], - setThreadSubscription: ["PUT /notifications/threads/{thread_id}/subscription"], + setThreadSubscription: [ + "PUT /notifications/threads/{thread_id}/subscription" + ], starRepoForAuthenticatedUser: ["PUT /user/starred/{owner}/{repo}"], unstarRepoForAuthenticatedUser: ["DELETE /user/starred/{owner}/{repo}"] }, apps: { - addRepoToInstallation: ["PUT /user/installations/{installation_id}/repositories/{repository_id}", {}, { - renamed: ["apps", "addRepoToInstallationForAuthenticatedUser"] - }], - addRepoToInstallationForAuthenticatedUser: ["PUT /user/installations/{installation_id}/repositories/{repository_id}"], + addRepoToInstallation: [ + "PUT /user/installations/{installation_id}/repositories/{repository_id}", + {}, + { renamed: ["apps", "addRepoToInstallationForAuthenticatedUser"] } + ], + addRepoToInstallationForAuthenticatedUser: [ + "PUT /user/installations/{installation_id}/repositories/{repository_id}" + ], checkToken: ["POST /applications/{client_id}/token"], createFromManifest: ["POST /app-manifests/{code}/conversions"], - createInstallationAccessToken: ["POST /app/installations/{installation_id}/access_tokens"], + createInstallationAccessToken: [ + "POST /app/installations/{installation_id}/access_tokens" + ], deleteAuthorization: ["DELETE /applications/{client_id}/grant"], deleteInstallation: ["DELETE /app/installations/{installation_id}"], deleteToken: ["DELETE /applications/{client_id}/token"], @@ -7660,75 +8140,132 @@ const Endpoints = { getInstallation: ["GET /app/installations/{installation_id}"], getOrgInstallation: ["GET /orgs/{org}/installation"], getRepoInstallation: ["GET /repos/{owner}/{repo}/installation"], - getSubscriptionPlanForAccount: ["GET /marketplace_listing/accounts/{account_id}"], - getSubscriptionPlanForAccountStubbed: ["GET /marketplace_listing/stubbed/accounts/{account_id}"], + getSubscriptionPlanForAccount: [ + "GET /marketplace_listing/accounts/{account_id}" + ], + getSubscriptionPlanForAccountStubbed: [ + "GET /marketplace_listing/stubbed/accounts/{account_id}" + ], getUserInstallation: ["GET /users/{username}/installation"], getWebhookConfigForApp: ["GET /app/hook/config"], getWebhookDelivery: ["GET /app/hook/deliveries/{delivery_id}"], listAccountsForPlan: ["GET /marketplace_listing/plans/{plan_id}/accounts"], - listAccountsForPlanStubbed: ["GET /marketplace_listing/stubbed/plans/{plan_id}/accounts"], - listInstallationReposForAuthenticatedUser: ["GET /user/installations/{installation_id}/repositories"], + listAccountsForPlanStubbed: [ + "GET /marketplace_listing/stubbed/plans/{plan_id}/accounts" + ], + listInstallationReposForAuthenticatedUser: [ + "GET /user/installations/{installation_id}/repositories" + ], + listInstallationRequestsForAuthenticatedApp: [ + "GET /app/installation-requests" + ], listInstallations: ["GET /app/installations"], listInstallationsForAuthenticatedUser: ["GET /user/installations"], listPlans: ["GET /marketplace_listing/plans"], listPlansStubbed: ["GET /marketplace_listing/stubbed/plans"], listReposAccessibleToInstallation: ["GET /installation/repositories"], listSubscriptionsForAuthenticatedUser: ["GET /user/marketplace_purchases"], - listSubscriptionsForAuthenticatedUserStubbed: ["GET /user/marketplace_purchases/stubbed"], + listSubscriptionsForAuthenticatedUserStubbed: [ + "GET /user/marketplace_purchases/stubbed" + ], listWebhookDeliveries: ["GET /app/hook/deliveries"], - redeliverWebhookDelivery: ["POST /app/hook/deliveries/{delivery_id}/attempts"], - removeRepoFromInstallation: ["DELETE /user/installations/{installation_id}/repositories/{repository_id}", {}, { - renamed: ["apps", "removeRepoFromInstallationForAuthenticatedUser"] - }], - removeRepoFromInstallationForAuthenticatedUser: ["DELETE /user/installations/{installation_id}/repositories/{repository_id}"], + redeliverWebhookDelivery: [ + "POST /app/hook/deliveries/{delivery_id}/attempts" + ], + removeRepoFromInstallation: [ + "DELETE /user/installations/{installation_id}/repositories/{repository_id}", + {}, + { renamed: ["apps", "removeRepoFromInstallationForAuthenticatedUser"] } + ], + removeRepoFromInstallationForAuthenticatedUser: [ + "DELETE /user/installations/{installation_id}/repositories/{repository_id}" + ], resetToken: ["PATCH /applications/{client_id}/token"], revokeInstallationAccessToken: ["DELETE /installation/token"], scopeToken: ["POST /applications/{client_id}/token/scoped"], suspendInstallation: ["PUT /app/installations/{installation_id}/suspended"], - unsuspendInstallation: ["DELETE /app/installations/{installation_id}/suspended"], + unsuspendInstallation: [ + "DELETE /app/installations/{installation_id}/suspended" + ], updateWebhookConfigForApp: ["PATCH /app/hook/config"] }, billing: { getGithubActionsBillingOrg: ["GET /orgs/{org}/settings/billing/actions"], - getGithubActionsBillingUser: ["GET /users/{username}/settings/billing/actions"], - getGithubAdvancedSecurityBillingGhe: ["GET /enterprises/{enterprise}/settings/billing/advanced-security"], - getGithubAdvancedSecurityBillingOrg: ["GET /orgs/{org}/settings/billing/advanced-security"], + getGithubActionsBillingUser: [ + "GET /users/{username}/settings/billing/actions" + ], getGithubPackagesBillingOrg: ["GET /orgs/{org}/settings/billing/packages"], - getGithubPackagesBillingUser: ["GET /users/{username}/settings/billing/packages"], - getSharedStorageBillingOrg: ["GET /orgs/{org}/settings/billing/shared-storage"], - getSharedStorageBillingUser: ["GET /users/{username}/settings/billing/shared-storage"] + getGithubPackagesBillingUser: [ + "GET /users/{username}/settings/billing/packages" + ], + getSharedStorageBillingOrg: [ + "GET /orgs/{org}/settings/billing/shared-storage" + ], + getSharedStorageBillingUser: [ + "GET /users/{username}/settings/billing/shared-storage" + ] }, checks: { create: ["POST /repos/{owner}/{repo}/check-runs"], createSuite: ["POST /repos/{owner}/{repo}/check-suites"], get: ["GET /repos/{owner}/{repo}/check-runs/{check_run_id}"], getSuite: ["GET /repos/{owner}/{repo}/check-suites/{check_suite_id}"], - listAnnotations: ["GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations"], + listAnnotations: [ + "GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations" + ], listForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-runs"], - listForSuite: ["GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs"], + listForSuite: [ + "GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs" + ], listSuitesForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-suites"], - rerequestRun: ["POST /repos/{owner}/{repo}/check-runs/{check_run_id}/rerequest"], - rerequestSuite: ["POST /repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest"], - setSuitesPreferences: ["PATCH /repos/{owner}/{repo}/check-suites/preferences"], + rerequestRun: [ + "POST /repos/{owner}/{repo}/check-runs/{check_run_id}/rerequest" + ], + rerequestSuite: [ + "POST /repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest" + ], + setSuitesPreferences: [ + "PATCH /repos/{owner}/{repo}/check-suites/preferences" + ], update: ["PATCH /repos/{owner}/{repo}/check-runs/{check_run_id}"] }, codeScanning: { - deleteAnalysis: ["DELETE /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}{?confirm_delete}"], - getAlert: ["GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}", {}, { - renamedParameters: { - alert_id: "alert_number" - } - }], - getAnalysis: ["GET /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}"], + deleteAnalysis: [ + "DELETE /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}{?confirm_delete}" + ], + getAlert: [ + "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}", + {}, + { renamedParameters: { alert_id: "alert_number" } } + ], + getAnalysis: [ + "GET /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}" + ], + getCodeqlDatabase: [ + "GET /repos/{owner}/{repo}/code-scanning/codeql/databases/{language}" + ], + getDefaultSetup: ["GET /repos/{owner}/{repo}/code-scanning/default-setup"], getSarif: ["GET /repos/{owner}/{repo}/code-scanning/sarifs/{sarif_id}"], - listAlertInstances: ["GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances"], + listAlertInstances: [ + "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances" + ], listAlertsForOrg: ["GET /orgs/{org}/code-scanning/alerts"], listAlertsForRepo: ["GET /repos/{owner}/{repo}/code-scanning/alerts"], - listAlertsInstances: ["GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances", {}, { - renamed: ["codeScanning", "listAlertInstances"] - }], + listAlertsInstances: [ + "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances", + {}, + { renamed: ["codeScanning", "listAlertInstances"] } + ], + listCodeqlDatabases: [ + "GET /repos/{owner}/{repo}/code-scanning/codeql/databases" + ], listRecentAnalyses: ["GET /repos/{owner}/{repo}/code-scanning/analyses"], - updateAlert: ["PATCH /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}"], + updateAlert: [ + "PATCH /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}" + ], + updateDefaultSetup: [ + "PATCH /repos/{owner}/{repo}/code-scanning/default-setup" + ], uploadSarif: ["POST /repos/{owner}/{repo}/code-scanning/sarifs"] }, codesOfConduct: { @@ -7736,82 +8273,190 @@ const Endpoints = { getConductCode: ["GET /codes_of_conduct/{key}"] }, codespaces: { - addRepositoryForSecretForAuthenticatedUser: ["PUT /user/codespaces/secrets/{secret_name}/repositories/{repository_id}"], - codespaceMachinesForAuthenticatedUser: ["GET /user/codespaces/{codespace_name}/machines"], + addRepositoryForSecretForAuthenticatedUser: [ + "PUT /user/codespaces/secrets/{secret_name}/repositories/{repository_id}" + ], + addSelectedRepoToOrgSecret: [ + "PUT /orgs/{org}/codespaces/secrets/{secret_name}/repositories/{repository_id}" + ], + checkPermissionsForDevcontainer: [ + "GET /repos/{owner}/{repo}/codespaces/permissions_check" + ], + codespaceMachinesForAuthenticatedUser: [ + "GET /user/codespaces/{codespace_name}/machines" + ], createForAuthenticatedUser: ["POST /user/codespaces"], - createOrUpdateRepoSecret: ["PUT /repos/{owner}/{repo}/codespaces/secrets/{secret_name}"], - createOrUpdateSecretForAuthenticatedUser: ["PUT /user/codespaces/secrets/{secret_name}"], - createWithPrForAuthenticatedUser: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/codespaces"], - createWithRepoForAuthenticatedUser: ["POST /repos/{owner}/{repo}/codespaces"], + createOrUpdateOrgSecret: [ + "PUT /orgs/{org}/codespaces/secrets/{secret_name}" + ], + createOrUpdateRepoSecret: [ + "PUT /repos/{owner}/{repo}/codespaces/secrets/{secret_name}" + ], + createOrUpdateSecretForAuthenticatedUser: [ + "PUT /user/codespaces/secrets/{secret_name}" + ], + createWithPrForAuthenticatedUser: [ + "POST /repos/{owner}/{repo}/pulls/{pull_number}/codespaces" + ], + createWithRepoForAuthenticatedUser: [ + "POST /repos/{owner}/{repo}/codespaces" + ], deleteForAuthenticatedUser: ["DELETE /user/codespaces/{codespace_name}"], - deleteFromOrganization: ["DELETE /orgs/{org}/members/{username}/codespaces/{codespace_name}"], - deleteRepoSecret: ["DELETE /repos/{owner}/{repo}/codespaces/secrets/{secret_name}"], - deleteSecretForAuthenticatedUser: ["DELETE /user/codespaces/secrets/{secret_name}"], - exportForAuthenticatedUser: ["POST /user/codespaces/{codespace_name}/exports"], - getExportDetailsForAuthenticatedUser: ["GET /user/codespaces/{codespace_name}/exports/{export_id}"], + deleteFromOrganization: [ + "DELETE /orgs/{org}/members/{username}/codespaces/{codespace_name}" + ], + deleteOrgSecret: ["DELETE /orgs/{org}/codespaces/secrets/{secret_name}"], + deleteRepoSecret: [ + "DELETE /repos/{owner}/{repo}/codespaces/secrets/{secret_name}" + ], + deleteSecretForAuthenticatedUser: [ + "DELETE /user/codespaces/secrets/{secret_name}" + ], + exportForAuthenticatedUser: [ + "POST /user/codespaces/{codespace_name}/exports" + ], + getCodespacesForUserInOrg: [ + "GET /orgs/{org}/members/{username}/codespaces" + ], + getExportDetailsForAuthenticatedUser: [ + "GET /user/codespaces/{codespace_name}/exports/{export_id}" + ], getForAuthenticatedUser: ["GET /user/codespaces/{codespace_name}"], - getPublicKeyForAuthenticatedUser: ["GET /user/codespaces/secrets/public-key"], - getRepoPublicKey: ["GET /repos/{owner}/{repo}/codespaces/secrets/public-key"], - getRepoSecret: ["GET /repos/{owner}/{repo}/codespaces/secrets/{secret_name}"], - getSecretForAuthenticatedUser: ["GET /user/codespaces/secrets/{secret_name}"], - listDevcontainersInRepositoryForAuthenticatedUser: ["GET /repos/{owner}/{repo}/codespaces/devcontainers"], + getOrgPublicKey: ["GET /orgs/{org}/codespaces/secrets/public-key"], + getOrgSecret: ["GET /orgs/{org}/codespaces/secrets/{secret_name}"], + getPublicKeyForAuthenticatedUser: [ + "GET /user/codespaces/secrets/public-key" + ], + getRepoPublicKey: [ + "GET /repos/{owner}/{repo}/codespaces/secrets/public-key" + ], + getRepoSecret: [ + "GET /repos/{owner}/{repo}/codespaces/secrets/{secret_name}" + ], + getSecretForAuthenticatedUser: [ + "GET /user/codespaces/secrets/{secret_name}" + ], + listDevcontainersInRepositoryForAuthenticatedUser: [ + "GET /repos/{owner}/{repo}/codespaces/devcontainers" + ], listForAuthenticatedUser: ["GET /user/codespaces"], - listInOrganization: ["GET /orgs/{org}/codespaces", {}, { - renamedParameters: { - org_id: "org" - } - }], - listInRepositoryForAuthenticatedUser: ["GET /repos/{owner}/{repo}/codespaces"], + listInOrganization: [ + "GET /orgs/{org}/codespaces", + {}, + { renamedParameters: { org_id: "org" } } + ], + listInRepositoryForAuthenticatedUser: [ + "GET /repos/{owner}/{repo}/codespaces" + ], + listOrgSecrets: ["GET /orgs/{org}/codespaces/secrets"], listRepoSecrets: ["GET /repos/{owner}/{repo}/codespaces/secrets"], - listRepositoriesForSecretForAuthenticatedUser: ["GET /user/codespaces/secrets/{secret_name}/repositories"], + listRepositoriesForSecretForAuthenticatedUser: [ + "GET /user/codespaces/secrets/{secret_name}/repositories" + ], listSecretsForAuthenticatedUser: ["GET /user/codespaces/secrets"], - removeRepositoryForSecretForAuthenticatedUser: ["DELETE /user/codespaces/secrets/{secret_name}/repositories/{repository_id}"], - repoMachinesForAuthenticatedUser: ["GET /repos/{owner}/{repo}/codespaces/machines"], - setRepositoriesForSecretForAuthenticatedUser: ["PUT /user/codespaces/secrets/{secret_name}/repositories"], + listSelectedReposForOrgSecret: [ + "GET /orgs/{org}/codespaces/secrets/{secret_name}/repositories" + ], + preFlightWithRepoForAuthenticatedUser: [ + "GET /repos/{owner}/{repo}/codespaces/new" + ], + publishForAuthenticatedUser: [ + "POST /user/codespaces/{codespace_name}/publish" + ], + removeRepositoryForSecretForAuthenticatedUser: [ + "DELETE /user/codespaces/secrets/{secret_name}/repositories/{repository_id}" + ], + removeSelectedRepoFromOrgSecret: [ + "DELETE /orgs/{org}/codespaces/secrets/{secret_name}/repositories/{repository_id}" + ], + repoMachinesForAuthenticatedUser: [ + "GET /repos/{owner}/{repo}/codespaces/machines" + ], + setRepositoriesForSecretForAuthenticatedUser: [ + "PUT /user/codespaces/secrets/{secret_name}/repositories" + ], + setSelectedReposForOrgSecret: [ + "PUT /orgs/{org}/codespaces/secrets/{secret_name}/repositories" + ], startForAuthenticatedUser: ["POST /user/codespaces/{codespace_name}/start"], stopForAuthenticatedUser: ["POST /user/codespaces/{codespace_name}/stop"], - stopInOrganization: ["POST /orgs/{org}/members/{username}/codespaces/{codespace_name}/stop"], + stopInOrganization: [ + "POST /orgs/{org}/members/{username}/codespaces/{codespace_name}/stop" + ], updateForAuthenticatedUser: ["PATCH /user/codespaces/{codespace_name}"] }, + copilot: { + addCopilotForBusinessSeatsForTeams: [ + "POST /orgs/{org}/copilot/billing/selected_teams" + ], + addCopilotForBusinessSeatsForUsers: [ + "POST /orgs/{org}/copilot/billing/selected_users" + ], + cancelCopilotSeatAssignmentForTeams: [ + "DELETE /orgs/{org}/copilot/billing/selected_teams" + ], + cancelCopilotSeatAssignmentForUsers: [ + "DELETE /orgs/{org}/copilot/billing/selected_users" + ], + getCopilotOrganizationDetails: ["GET /orgs/{org}/copilot/billing"], + getCopilotSeatDetailsForUser: [ + "GET /orgs/{org}/members/{username}/copilot" + ], + listCopilotSeats: ["GET /orgs/{org}/copilot/billing/seats"] + }, dependabot: { - addSelectedRepoToOrgSecret: ["PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}"], - createOrUpdateOrgSecret: ["PUT /orgs/{org}/dependabot/secrets/{secret_name}"], - createOrUpdateRepoSecret: ["PUT /repos/{owner}/{repo}/dependabot/secrets/{secret_name}"], + addSelectedRepoToOrgSecret: [ + "PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}" + ], + createOrUpdateOrgSecret: [ + "PUT /orgs/{org}/dependabot/secrets/{secret_name}" + ], + createOrUpdateRepoSecret: [ + "PUT /repos/{owner}/{repo}/dependabot/secrets/{secret_name}" + ], deleteOrgSecret: ["DELETE /orgs/{org}/dependabot/secrets/{secret_name}"], - deleteRepoSecret: ["DELETE /repos/{owner}/{repo}/dependabot/secrets/{secret_name}"], + deleteRepoSecret: [ + "DELETE /repos/{owner}/{repo}/dependabot/secrets/{secret_name}" + ], + getAlert: ["GET /repos/{owner}/{repo}/dependabot/alerts/{alert_number}"], getOrgPublicKey: ["GET /orgs/{org}/dependabot/secrets/public-key"], getOrgSecret: ["GET /orgs/{org}/dependabot/secrets/{secret_name}"], - getRepoPublicKey: ["GET /repos/{owner}/{repo}/dependabot/secrets/public-key"], - getRepoSecret: ["GET /repos/{owner}/{repo}/dependabot/secrets/{secret_name}"], + getRepoPublicKey: [ + "GET /repos/{owner}/{repo}/dependabot/secrets/public-key" + ], + getRepoSecret: [ + "GET /repos/{owner}/{repo}/dependabot/secrets/{secret_name}" + ], + listAlertsForEnterprise: [ + "GET /enterprises/{enterprise}/dependabot/alerts" + ], + listAlertsForOrg: ["GET /orgs/{org}/dependabot/alerts"], + listAlertsForRepo: ["GET /repos/{owner}/{repo}/dependabot/alerts"], listOrgSecrets: ["GET /orgs/{org}/dependabot/secrets"], listRepoSecrets: ["GET /repos/{owner}/{repo}/dependabot/secrets"], - listSelectedReposForOrgSecret: ["GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories"], - removeSelectedRepoFromOrgSecret: ["DELETE /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}"], - setSelectedReposForOrgSecret: ["PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories"] + listSelectedReposForOrgSecret: [ + "GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories" + ], + removeSelectedRepoFromOrgSecret: [ + "DELETE /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}" + ], + setSelectedReposForOrgSecret: [ + "PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories" + ], + updateAlert: [ + "PATCH /repos/{owner}/{repo}/dependabot/alerts/{alert_number}" + ] }, dependencyGraph: { - createRepositorySnapshot: ["POST /repos/{owner}/{repo}/dependency-graph/snapshots"], - diffRange: ["GET /repos/{owner}/{repo}/dependency-graph/compare/{basehead}"] - }, - emojis: { - get: ["GET /emojis"] - }, - enterpriseAdmin: { - addCustomLabelsToSelfHostedRunnerForEnterprise: ["POST /enterprises/{enterprise}/actions/runners/{runner_id}/labels"], - disableSelectedOrganizationGithubActionsEnterprise: ["DELETE /enterprises/{enterprise}/actions/permissions/organizations/{org_id}"], - enableSelectedOrganizationGithubActionsEnterprise: ["PUT /enterprises/{enterprise}/actions/permissions/organizations/{org_id}"], - getAllowedActionsEnterprise: ["GET /enterprises/{enterprise}/actions/permissions/selected-actions"], - getGithubActionsPermissionsEnterprise: ["GET /enterprises/{enterprise}/actions/permissions"], - getServerStatistics: ["GET /enterprise-installation/{enterprise_or_org}/server-statistics"], - listLabelsForSelfHostedRunnerForEnterprise: ["GET /enterprises/{enterprise}/actions/runners/{runner_id}/labels"], - listSelectedOrganizationsEnabledGithubActionsEnterprise: ["GET /enterprises/{enterprise}/actions/permissions/organizations"], - removeAllCustomLabelsFromSelfHostedRunnerForEnterprise: ["DELETE /enterprises/{enterprise}/actions/runners/{runner_id}/labels"], - removeCustomLabelFromSelfHostedRunnerForEnterprise: ["DELETE /enterprises/{enterprise}/actions/runners/{runner_id}/labels/{name}"], - setAllowedActionsEnterprise: ["PUT /enterprises/{enterprise}/actions/permissions/selected-actions"], - setCustomLabelsForSelfHostedRunnerForEnterprise: ["PUT /enterprises/{enterprise}/actions/runners/{runner_id}/labels"], - setGithubActionsPermissionsEnterprise: ["PUT /enterprises/{enterprise}/actions/permissions"], - setSelectedOrganizationsEnabledGithubActionsEnterprise: ["PUT /enterprises/{enterprise}/actions/permissions/organizations"] + createRepositorySnapshot: [ + "POST /repos/{owner}/{repo}/dependency-graph/snapshots" + ], + diffRange: [ + "GET /repos/{owner}/{repo}/dependency-graph/compare/{basehead}" + ], + exportSbom: ["GET /repos/{owner}/{repo}/dependency-graph/sbom"] }, + emojis: { get: ["GET /emojis"] }, gists: { checkIsStarred: ["GET /gists/{gist_id}/star"], create: ["POST /gists"], @@ -7857,33 +8502,52 @@ const Endpoints = { getRestrictionsForAuthenticatedUser: ["GET /user/interaction-limits"], getRestrictionsForOrg: ["GET /orgs/{org}/interaction-limits"], getRestrictionsForRepo: ["GET /repos/{owner}/{repo}/interaction-limits"], - getRestrictionsForYourPublicRepos: ["GET /user/interaction-limits", {}, { - renamed: ["interactions", "getRestrictionsForAuthenticatedUser"] - }], + getRestrictionsForYourPublicRepos: [ + "GET /user/interaction-limits", + {}, + { renamed: ["interactions", "getRestrictionsForAuthenticatedUser"] } + ], removeRestrictionsForAuthenticatedUser: ["DELETE /user/interaction-limits"], removeRestrictionsForOrg: ["DELETE /orgs/{org}/interaction-limits"], - removeRestrictionsForRepo: ["DELETE /repos/{owner}/{repo}/interaction-limits"], - removeRestrictionsForYourPublicRepos: ["DELETE /user/interaction-limits", {}, { - renamed: ["interactions", "removeRestrictionsForAuthenticatedUser"] - }], + removeRestrictionsForRepo: [ + "DELETE /repos/{owner}/{repo}/interaction-limits" + ], + removeRestrictionsForYourPublicRepos: [ + "DELETE /user/interaction-limits", + {}, + { renamed: ["interactions", "removeRestrictionsForAuthenticatedUser"] } + ], setRestrictionsForAuthenticatedUser: ["PUT /user/interaction-limits"], setRestrictionsForOrg: ["PUT /orgs/{org}/interaction-limits"], setRestrictionsForRepo: ["PUT /repos/{owner}/{repo}/interaction-limits"], - setRestrictionsForYourPublicRepos: ["PUT /user/interaction-limits", {}, { - renamed: ["interactions", "setRestrictionsForAuthenticatedUser"] - }] + setRestrictionsForYourPublicRepos: [ + "PUT /user/interaction-limits", + {}, + { renamed: ["interactions", "setRestrictionsForAuthenticatedUser"] } + ] }, issues: { - addAssignees: ["POST /repos/{owner}/{repo}/issues/{issue_number}/assignees"], + addAssignees: [ + "POST /repos/{owner}/{repo}/issues/{issue_number}/assignees" + ], addLabels: ["POST /repos/{owner}/{repo}/issues/{issue_number}/labels"], checkUserCanBeAssigned: ["GET /repos/{owner}/{repo}/assignees/{assignee}"], + checkUserCanBeAssignedToIssue: [ + "GET /repos/{owner}/{repo}/issues/{issue_number}/assignees/{assignee}" + ], create: ["POST /repos/{owner}/{repo}/issues"], - createComment: ["POST /repos/{owner}/{repo}/issues/{issue_number}/comments"], + createComment: [ + "POST /repos/{owner}/{repo}/issues/{issue_number}/comments" + ], createLabel: ["POST /repos/{owner}/{repo}/labels"], createMilestone: ["POST /repos/{owner}/{repo}/milestones"], - deleteComment: ["DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}"], + deleteComment: [ + "DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}" + ], deleteLabel: ["DELETE /repos/{owner}/{repo}/labels/{name}"], - deleteMilestone: ["DELETE /repos/{owner}/{repo}/milestones/{milestone_number}"], + deleteMilestone: [ + "DELETE /repos/{owner}/{repo}/milestones/{milestone_number}" + ], get: ["GET /repos/{owner}/{repo}/issues/{issue_number}"], getComment: ["GET /repos/{owner}/{repo}/issues/comments/{comment_id}"], getEvent: ["GET /repos/{owner}/{repo}/issues/events/{event_id}"], @@ -7895,24 +8559,38 @@ const Endpoints = { listCommentsForRepo: ["GET /repos/{owner}/{repo}/issues/comments"], listEvents: ["GET /repos/{owner}/{repo}/issues/{issue_number}/events"], listEventsForRepo: ["GET /repos/{owner}/{repo}/issues/events"], - listEventsForTimeline: ["GET /repos/{owner}/{repo}/issues/{issue_number}/timeline"], + listEventsForTimeline: [ + "GET /repos/{owner}/{repo}/issues/{issue_number}/timeline" + ], listForAuthenticatedUser: ["GET /user/issues"], listForOrg: ["GET /orgs/{org}/issues"], listForRepo: ["GET /repos/{owner}/{repo}/issues"], - listLabelsForMilestone: ["GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels"], + listLabelsForMilestone: [ + "GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels" + ], listLabelsForRepo: ["GET /repos/{owner}/{repo}/labels"], - listLabelsOnIssue: ["GET /repos/{owner}/{repo}/issues/{issue_number}/labels"], + listLabelsOnIssue: [ + "GET /repos/{owner}/{repo}/issues/{issue_number}/labels" + ], listMilestones: ["GET /repos/{owner}/{repo}/milestones"], lock: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/lock"], - removeAllLabels: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels"], - removeAssignees: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/assignees"], - removeLabel: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels/{name}"], + removeAllLabels: [ + "DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels" + ], + removeAssignees: [ + "DELETE /repos/{owner}/{repo}/issues/{issue_number}/assignees" + ], + removeLabel: [ + "DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels/{name}" + ], setLabels: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/labels"], unlock: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/lock"], update: ["PATCH /repos/{owner}/{repo}/issues/{issue_number}"], updateComment: ["PATCH /repos/{owner}/{repo}/issues/comments/{comment_id}"], updateLabel: ["PATCH /repos/{owner}/{repo}/labels/{name}"], - updateMilestone: ["PATCH /repos/{owner}/{repo}/milestones/{milestone_number}"] + updateMilestone: [ + "PATCH /repos/{owner}/{repo}/milestones/{milestone_number}" + ] }, licenses: { get: ["GET /licenses/{license}"], @@ -7921,65 +8599,151 @@ const Endpoints = { }, markdown: { render: ["POST /markdown"], - renderRaw: ["POST /markdown/raw", { - headers: { - "content-type": "text/plain; charset=utf-8" - } - }] + renderRaw: [ + "POST /markdown/raw", + { headers: { "content-type": "text/plain; charset=utf-8" } } + ] }, meta: { get: ["GET /meta"], + getAllVersions: ["GET /versions"], getOctocat: ["GET /octocat"], getZen: ["GET /zen"], root: ["GET /"] }, migrations: { - cancelImport: ["DELETE /repos/{owner}/{repo}/import"], - deleteArchiveForAuthenticatedUser: ["DELETE /user/migrations/{migration_id}/archive"], - deleteArchiveForOrg: ["DELETE /orgs/{org}/migrations/{migration_id}/archive"], - downloadArchiveForOrg: ["GET /orgs/{org}/migrations/{migration_id}/archive"], - getArchiveForAuthenticatedUser: ["GET /user/migrations/{migration_id}/archive"], - getCommitAuthors: ["GET /repos/{owner}/{repo}/import/authors"], - getImportStatus: ["GET /repos/{owner}/{repo}/import"], - getLargeFiles: ["GET /repos/{owner}/{repo}/import/large_files"], + cancelImport: [ + "DELETE /repos/{owner}/{repo}/import", + {}, + { + deprecated: "octokit.rest.migrations.cancelImport() is deprecated, see https://docs.github.com/rest/migrations/source-imports#cancel-an-import" + } + ], + deleteArchiveForAuthenticatedUser: [ + "DELETE /user/migrations/{migration_id}/archive" + ], + deleteArchiveForOrg: [ + "DELETE /orgs/{org}/migrations/{migration_id}/archive" + ], + downloadArchiveForOrg: [ + "GET /orgs/{org}/migrations/{migration_id}/archive" + ], + getArchiveForAuthenticatedUser: [ + "GET /user/migrations/{migration_id}/archive" + ], + getCommitAuthors: [ + "GET /repos/{owner}/{repo}/import/authors", + {}, + { + deprecated: "octokit.rest.migrations.getCommitAuthors() is deprecated, see https://docs.github.com/rest/migrations/source-imports#get-commit-authors" + } + ], + getImportStatus: [ + "GET /repos/{owner}/{repo}/import", + {}, + { + deprecated: "octokit.rest.migrations.getImportStatus() is deprecated, see https://docs.github.com/rest/migrations/source-imports#get-an-import-status" + } + ], + getLargeFiles: [ + "GET /repos/{owner}/{repo}/import/large_files", + {}, + { + deprecated: "octokit.rest.migrations.getLargeFiles() is deprecated, see https://docs.github.com/rest/migrations/source-imports#get-large-files" + } + ], getStatusForAuthenticatedUser: ["GET /user/migrations/{migration_id}"], getStatusForOrg: ["GET /orgs/{org}/migrations/{migration_id}"], listForAuthenticatedUser: ["GET /user/migrations"], listForOrg: ["GET /orgs/{org}/migrations"], - listReposForAuthenticatedUser: ["GET /user/migrations/{migration_id}/repositories"], + listReposForAuthenticatedUser: [ + "GET /user/migrations/{migration_id}/repositories" + ], listReposForOrg: ["GET /orgs/{org}/migrations/{migration_id}/repositories"], - listReposForUser: ["GET /user/migrations/{migration_id}/repositories", {}, { - renamed: ["migrations", "listReposForAuthenticatedUser"] - }], - mapCommitAuthor: ["PATCH /repos/{owner}/{repo}/import/authors/{author_id}"], - setLfsPreference: ["PATCH /repos/{owner}/{repo}/import/lfs"], + listReposForUser: [ + "GET /user/migrations/{migration_id}/repositories", + {}, + { renamed: ["migrations", "listReposForAuthenticatedUser"] } + ], + mapCommitAuthor: [ + "PATCH /repos/{owner}/{repo}/import/authors/{author_id}", + {}, + { + deprecated: "octokit.rest.migrations.mapCommitAuthor() is deprecated, see https://docs.github.com/rest/migrations/source-imports#map-a-commit-author" + } + ], + setLfsPreference: [ + "PATCH /repos/{owner}/{repo}/import/lfs", + {}, + { + deprecated: "octokit.rest.migrations.setLfsPreference() is deprecated, see https://docs.github.com/rest/migrations/source-imports#update-git-lfs-preference" + } + ], startForAuthenticatedUser: ["POST /user/migrations"], startForOrg: ["POST /orgs/{org}/migrations"], - startImport: ["PUT /repos/{owner}/{repo}/import"], - unlockRepoForAuthenticatedUser: ["DELETE /user/migrations/{migration_id}/repos/{repo_name}/lock"], - unlockRepoForOrg: ["DELETE /orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock"], - updateImport: ["PATCH /repos/{owner}/{repo}/import"] + startImport: [ + "PUT /repos/{owner}/{repo}/import", + {}, + { + deprecated: "octokit.rest.migrations.startImport() is deprecated, see https://docs.github.com/rest/migrations/source-imports#start-an-import" + } + ], + unlockRepoForAuthenticatedUser: [ + "DELETE /user/migrations/{migration_id}/repos/{repo_name}/lock" + ], + unlockRepoForOrg: [ + "DELETE /orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock" + ], + updateImport: [ + "PATCH /repos/{owner}/{repo}/import", + {}, + { + deprecated: "octokit.rest.migrations.updateImport() is deprecated, see https://docs.github.com/rest/migrations/source-imports#update-an-import" + } + ] }, orgs: { + addSecurityManagerTeam: [ + "PUT /orgs/{org}/security-managers/teams/{team_slug}" + ], blockUser: ["PUT /orgs/{org}/blocks/{username}"], cancelInvitation: ["DELETE /orgs/{org}/invitations/{invitation_id}"], checkBlockedUser: ["GET /orgs/{org}/blocks/{username}"], checkMembershipForUser: ["GET /orgs/{org}/members/{username}"], checkPublicMembershipForUser: ["GET /orgs/{org}/public_members/{username}"], - convertMemberToOutsideCollaborator: ["PUT /orgs/{org}/outside_collaborators/{username}"], + convertMemberToOutsideCollaborator: [ + "PUT /orgs/{org}/outside_collaborators/{username}" + ], createInvitation: ["POST /orgs/{org}/invitations"], + createOrUpdateCustomProperties: ["PATCH /orgs/{org}/properties/schema"], + createOrUpdateCustomPropertiesValuesForRepos: [ + "PATCH /orgs/{org}/properties/values" + ], + createOrUpdateCustomProperty: [ + "PUT /orgs/{org}/properties/schema/{custom_property_name}" + ], createWebhook: ["POST /orgs/{org}/hooks"], + delete: ["DELETE /orgs/{org}"], deleteWebhook: ["DELETE /orgs/{org}/hooks/{hook_id}"], + enableOrDisableSecurityProductOnAllOrgRepos: [ + "POST /orgs/{org}/{security_product}/{enablement}" + ], get: ["GET /orgs/{org}"], + getAllCustomProperties: ["GET /orgs/{org}/properties/schema"], + getCustomProperty: [ + "GET /orgs/{org}/properties/schema/{custom_property_name}" + ], getMembershipForAuthenticatedUser: ["GET /user/memberships/orgs/{org}"], getMembershipForUser: ["GET /orgs/{org}/memberships/{username}"], getWebhook: ["GET /orgs/{org}/hooks/{hook_id}"], getWebhookConfigForOrg: ["GET /orgs/{org}/hooks/{hook_id}/config"], - getWebhookDelivery: ["GET /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}"], + getWebhookDelivery: [ + "GET /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}" + ], list: ["GET /organizations"], listAppInstallations: ["GET /orgs/{org}/installations"], listBlockedUsers: ["GET /orgs/{org}/blocks"], - listCustomRoles: ["GET /organizations/{organization_id}/custom_roles"], + listCustomPropertiesValuesForRepos: ["GET /orgs/{org}/properties/values"], listFailedInvitations: ["GET /orgs/{org}/failed_invitations"], listForAuthenticatedUser: ["GET /user/orgs"], listForUser: ["GET /users/{username}/orgs"], @@ -7987,55 +8751,148 @@ const Endpoints = { listMembers: ["GET /orgs/{org}/members"], listMembershipsForAuthenticatedUser: ["GET /user/memberships/orgs"], listOutsideCollaborators: ["GET /orgs/{org}/outside_collaborators"], + listPatGrantRepositories: [ + "GET /orgs/{org}/personal-access-tokens/{pat_id}/repositories" + ], + listPatGrantRequestRepositories: [ + "GET /orgs/{org}/personal-access-token-requests/{pat_request_id}/repositories" + ], + listPatGrantRequests: ["GET /orgs/{org}/personal-access-token-requests"], + listPatGrants: ["GET /orgs/{org}/personal-access-tokens"], listPendingInvitations: ["GET /orgs/{org}/invitations"], listPublicMembers: ["GET /orgs/{org}/public_members"], + listSecurityManagerTeams: ["GET /orgs/{org}/security-managers"], listWebhookDeliveries: ["GET /orgs/{org}/hooks/{hook_id}/deliveries"], listWebhooks: ["GET /orgs/{org}/hooks"], pingWebhook: ["POST /orgs/{org}/hooks/{hook_id}/pings"], - redeliverWebhookDelivery: ["POST /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}/attempts"], + redeliverWebhookDelivery: [ + "POST /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}/attempts" + ], + removeCustomProperty: [ + "DELETE /orgs/{org}/properties/schema/{custom_property_name}" + ], removeMember: ["DELETE /orgs/{org}/members/{username}"], removeMembershipForUser: ["DELETE /orgs/{org}/memberships/{username}"], - removeOutsideCollaborator: ["DELETE /orgs/{org}/outside_collaborators/{username}"], - removePublicMembershipForAuthenticatedUser: ["DELETE /orgs/{org}/public_members/{username}"], + removeOutsideCollaborator: [ + "DELETE /orgs/{org}/outside_collaborators/{username}" + ], + removePublicMembershipForAuthenticatedUser: [ + "DELETE /orgs/{org}/public_members/{username}" + ], + removeSecurityManagerTeam: [ + "DELETE /orgs/{org}/security-managers/teams/{team_slug}" + ], + reviewPatGrantRequest: [ + "POST /orgs/{org}/personal-access-token-requests/{pat_request_id}" + ], + reviewPatGrantRequestsInBulk: [ + "POST /orgs/{org}/personal-access-token-requests" + ], setMembershipForUser: ["PUT /orgs/{org}/memberships/{username}"], - setPublicMembershipForAuthenticatedUser: ["PUT /orgs/{org}/public_members/{username}"], + setPublicMembershipForAuthenticatedUser: [ + "PUT /orgs/{org}/public_members/{username}" + ], unblockUser: ["DELETE /orgs/{org}/blocks/{username}"], update: ["PATCH /orgs/{org}"], - updateMembershipForAuthenticatedUser: ["PATCH /user/memberships/orgs/{org}"], + updateMembershipForAuthenticatedUser: [ + "PATCH /user/memberships/orgs/{org}" + ], + updatePatAccess: ["POST /orgs/{org}/personal-access-tokens/{pat_id}"], + updatePatAccesses: ["POST /orgs/{org}/personal-access-tokens"], updateWebhook: ["PATCH /orgs/{org}/hooks/{hook_id}"], updateWebhookConfigForOrg: ["PATCH /orgs/{org}/hooks/{hook_id}/config"] }, packages: { - deletePackageForAuthenticatedUser: ["DELETE /user/packages/{package_type}/{package_name}"], - deletePackageForOrg: ["DELETE /orgs/{org}/packages/{package_type}/{package_name}"], - deletePackageForUser: ["DELETE /users/{username}/packages/{package_type}/{package_name}"], - deletePackageVersionForAuthenticatedUser: ["DELETE /user/packages/{package_type}/{package_name}/versions/{package_version_id}"], - deletePackageVersionForOrg: ["DELETE /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}"], - deletePackageVersionForUser: ["DELETE /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}"], - getAllPackageVersionsForAPackageOwnedByAnOrg: ["GET /orgs/{org}/packages/{package_type}/{package_name}/versions", {}, { - renamed: ["packages", "getAllPackageVersionsForPackageOwnedByOrg"] - }], - getAllPackageVersionsForAPackageOwnedByTheAuthenticatedUser: ["GET /user/packages/{package_type}/{package_name}/versions", {}, { - renamed: ["packages", "getAllPackageVersionsForPackageOwnedByAuthenticatedUser"] - }], - getAllPackageVersionsForPackageOwnedByAuthenticatedUser: ["GET /user/packages/{package_type}/{package_name}/versions"], - getAllPackageVersionsForPackageOwnedByOrg: ["GET /orgs/{org}/packages/{package_type}/{package_name}/versions"], - getAllPackageVersionsForPackageOwnedByUser: ["GET /users/{username}/packages/{package_type}/{package_name}/versions"], - getPackageForAuthenticatedUser: ["GET /user/packages/{package_type}/{package_name}"], - getPackageForOrganization: ["GET /orgs/{org}/packages/{package_type}/{package_name}"], - getPackageForUser: ["GET /users/{username}/packages/{package_type}/{package_name}"], - getPackageVersionForAuthenticatedUser: ["GET /user/packages/{package_type}/{package_name}/versions/{package_version_id}"], - getPackageVersionForOrganization: ["GET /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}"], - getPackageVersionForUser: ["GET /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}"], + deletePackageForAuthenticatedUser: [ + "DELETE /user/packages/{package_type}/{package_name}" + ], + deletePackageForOrg: [ + "DELETE /orgs/{org}/packages/{package_type}/{package_name}" + ], + deletePackageForUser: [ + "DELETE /users/{username}/packages/{package_type}/{package_name}" + ], + deletePackageVersionForAuthenticatedUser: [ + "DELETE /user/packages/{package_type}/{package_name}/versions/{package_version_id}" + ], + deletePackageVersionForOrg: [ + "DELETE /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}" + ], + deletePackageVersionForUser: [ + "DELETE /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}" + ], + getAllPackageVersionsForAPackageOwnedByAnOrg: [ + "GET /orgs/{org}/packages/{package_type}/{package_name}/versions", + {}, + { renamed: ["packages", "getAllPackageVersionsForPackageOwnedByOrg"] } + ], + getAllPackageVersionsForAPackageOwnedByTheAuthenticatedUser: [ + "GET /user/packages/{package_type}/{package_name}/versions", + {}, + { + renamed: [ + "packages", + "getAllPackageVersionsForPackageOwnedByAuthenticatedUser" + ] + } + ], + getAllPackageVersionsForPackageOwnedByAuthenticatedUser: [ + "GET /user/packages/{package_type}/{package_name}/versions" + ], + getAllPackageVersionsForPackageOwnedByOrg: [ + "GET /orgs/{org}/packages/{package_type}/{package_name}/versions" + ], + getAllPackageVersionsForPackageOwnedByUser: [ + "GET /users/{username}/packages/{package_type}/{package_name}/versions" + ], + getPackageForAuthenticatedUser: [ + "GET /user/packages/{package_type}/{package_name}" + ], + getPackageForOrganization: [ + "GET /orgs/{org}/packages/{package_type}/{package_name}" + ], + getPackageForUser: [ + "GET /users/{username}/packages/{package_type}/{package_name}" + ], + getPackageVersionForAuthenticatedUser: [ + "GET /user/packages/{package_type}/{package_name}/versions/{package_version_id}" + ], + getPackageVersionForOrganization: [ + "GET /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}" + ], + getPackageVersionForUser: [ + "GET /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}" + ], + listDockerMigrationConflictingPackagesForAuthenticatedUser: [ + "GET /user/docker/conflicts" + ], + listDockerMigrationConflictingPackagesForOrganization: [ + "GET /orgs/{org}/docker/conflicts" + ], + listDockerMigrationConflictingPackagesForUser: [ + "GET /users/{username}/docker/conflicts" + ], listPackagesForAuthenticatedUser: ["GET /user/packages"], listPackagesForOrganization: ["GET /orgs/{org}/packages"], listPackagesForUser: ["GET /users/{username}/packages"], - restorePackageForAuthenticatedUser: ["POST /user/packages/{package_type}/{package_name}/restore{?token}"], - restorePackageForOrg: ["POST /orgs/{org}/packages/{package_type}/{package_name}/restore{?token}"], - restorePackageForUser: ["POST /users/{username}/packages/{package_type}/{package_name}/restore{?token}"], - restorePackageVersionForAuthenticatedUser: ["POST /user/packages/{package_type}/{package_name}/versions/{package_version_id}/restore"], - restorePackageVersionForOrg: ["POST /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore"], - restorePackageVersionForUser: ["POST /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore"] + restorePackageForAuthenticatedUser: [ + "POST /user/packages/{package_type}/{package_name}/restore{?token}" + ], + restorePackageForOrg: [ + "POST /orgs/{org}/packages/{package_type}/{package_name}/restore{?token}" + ], + restorePackageForUser: [ + "POST /users/{username}/packages/{package_type}/{package_name}/restore{?token}" + ], + restorePackageVersionForAuthenticatedUser: [ + "POST /user/packages/{package_type}/{package_name}/versions/{package_version_id}/restore" + ], + restorePackageVersionForOrg: [ + "POST /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore" + ], + restorePackageVersionForUser: [ + "POST /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore" + ] }, projects: { addCollaborator: ["PUT /projects/{project_id}/collaborators/{username}"], @@ -8050,7 +8907,9 @@ const Endpoints = { get: ["GET /projects/{project_id}"], getCard: ["GET /projects/columns/cards/{card_id}"], getColumn: ["GET /projects/columns/{column_id}"], - getPermissionForUser: ["GET /projects/{project_id}/collaborators/{username}/permission"], + getPermissionForUser: [ + "GET /projects/{project_id}/collaborators/{username}/permission" + ], listCards: ["GET /projects/columns/{column_id}/cards"], listCollaborators: ["GET /projects/{project_id}/collaborators"], listColumns: ["GET /projects/{project_id}/columns"], @@ -8059,7 +8918,9 @@ const Endpoints = { listForUser: ["GET /users/{username}/projects"], moveCard: ["POST /projects/columns/cards/{card_id}/moves"], moveColumn: ["POST /projects/columns/{column_id}/moves"], - removeCollaborator: ["DELETE /projects/{project_id}/collaborators/{username}"], + removeCollaborator: [ + "DELETE /projects/{project_id}/collaborators/{username}" + ], update: ["PATCH /projects/{project_id}"], updateCard: ["PATCH /projects/columns/cards/{card_id}"], updateColumn: ["PATCH /projects/columns/{column_id}"] @@ -8067,191 +8928,411 @@ const Endpoints = { pulls: { checkIfMerged: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/merge"], create: ["POST /repos/{owner}/{repo}/pulls"], - createReplyForReviewComment: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies"], + createReplyForReviewComment: [ + "POST /repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies" + ], createReview: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews"], - createReviewComment: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/comments"], - deletePendingReview: ["DELETE /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"], - deleteReviewComment: ["DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}"], - dismissReview: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals"], + createReviewComment: [ + "POST /repos/{owner}/{repo}/pulls/{pull_number}/comments" + ], + deletePendingReview: [ + "DELETE /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}" + ], + deleteReviewComment: [ + "DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}" + ], + dismissReview: [ + "PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals" + ], get: ["GET /repos/{owner}/{repo}/pulls/{pull_number}"], - getReview: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"], + getReview: [ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}" + ], getReviewComment: ["GET /repos/{owner}/{repo}/pulls/comments/{comment_id}"], list: ["GET /repos/{owner}/{repo}/pulls"], - listCommentsForReview: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments"], + listCommentsForReview: [ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments" + ], listCommits: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/commits"], listFiles: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/files"], - listRequestedReviewers: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"], - listReviewComments: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/comments"], + listRequestedReviewers: [ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers" + ], + listReviewComments: [ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/comments" + ], listReviewCommentsForRepo: ["GET /repos/{owner}/{repo}/pulls/comments"], listReviews: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews"], merge: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/merge"], - removeRequestedReviewers: ["DELETE /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"], - requestReviewers: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"], - submitReview: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events"], + removeRequestedReviewers: [ + "DELETE /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers" + ], + requestReviewers: [ + "POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers" + ], + submitReview: [ + "POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events" + ], update: ["PATCH /repos/{owner}/{repo}/pulls/{pull_number}"], - updateBranch: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/update-branch"], - updateReview: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"], - updateReviewComment: ["PATCH /repos/{owner}/{repo}/pulls/comments/{comment_id}"] - }, - rateLimit: { - get: ["GET /rate_limit"] + updateBranch: [ + "PUT /repos/{owner}/{repo}/pulls/{pull_number}/update-branch" + ], + updateReview: [ + "PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}" + ], + updateReviewComment: [ + "PATCH /repos/{owner}/{repo}/pulls/comments/{comment_id}" + ] }, + rateLimit: { get: ["GET /rate_limit"] }, reactions: { - createForCommitComment: ["POST /repos/{owner}/{repo}/comments/{comment_id}/reactions"], - createForIssue: ["POST /repos/{owner}/{repo}/issues/{issue_number}/reactions"], - createForIssueComment: ["POST /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions"], - createForPullRequestReviewComment: ["POST /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions"], - createForRelease: ["POST /repos/{owner}/{repo}/releases/{release_id}/reactions"], - createForTeamDiscussionCommentInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions"], - createForTeamDiscussionInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions"], - deleteForCommitComment: ["DELETE /repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}"], - deleteForIssue: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}"], - deleteForIssueComment: ["DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}"], - deleteForPullRequestComment: ["DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}"], - deleteForRelease: ["DELETE /repos/{owner}/{repo}/releases/{release_id}/reactions/{reaction_id}"], - deleteForTeamDiscussion: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}"], - deleteForTeamDiscussionComment: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}"], - listForCommitComment: ["GET /repos/{owner}/{repo}/comments/{comment_id}/reactions"], + createForCommitComment: [ + "POST /repos/{owner}/{repo}/comments/{comment_id}/reactions" + ], + createForIssue: [ + "POST /repos/{owner}/{repo}/issues/{issue_number}/reactions" + ], + createForIssueComment: [ + "POST /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions" + ], + createForPullRequestReviewComment: [ + "POST /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions" + ], + createForRelease: [ + "POST /repos/{owner}/{repo}/releases/{release_id}/reactions" + ], + createForTeamDiscussionCommentInOrg: [ + "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions" + ], + createForTeamDiscussionInOrg: [ + "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions" + ], + deleteForCommitComment: [ + "DELETE /repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}" + ], + deleteForIssue: [ + "DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}" + ], + deleteForIssueComment: [ + "DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}" + ], + deleteForPullRequestComment: [ + "DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}" + ], + deleteForRelease: [ + "DELETE /repos/{owner}/{repo}/releases/{release_id}/reactions/{reaction_id}" + ], + deleteForTeamDiscussion: [ + "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}" + ], + deleteForTeamDiscussionComment: [ + "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}" + ], + listForCommitComment: [ + "GET /repos/{owner}/{repo}/comments/{comment_id}/reactions" + ], listForIssue: ["GET /repos/{owner}/{repo}/issues/{issue_number}/reactions"], - listForIssueComment: ["GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions"], - listForPullRequestReviewComment: ["GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions"], - listForRelease: ["GET /repos/{owner}/{repo}/releases/{release_id}/reactions"], - listForTeamDiscussionCommentInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions"], - listForTeamDiscussionInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions"] + listForIssueComment: [ + "GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions" + ], + listForPullRequestReviewComment: [ + "GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions" + ], + listForRelease: [ + "GET /repos/{owner}/{repo}/releases/{release_id}/reactions" + ], + listForTeamDiscussionCommentInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions" + ], + listForTeamDiscussionInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions" + ] }, repos: { - acceptInvitation: ["PATCH /user/repository_invitations/{invitation_id}", {}, { - renamed: ["repos", "acceptInvitationForAuthenticatedUser"] - }], - acceptInvitationForAuthenticatedUser: ["PATCH /user/repository_invitations/{invitation_id}"], - addAppAccessRestrictions: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, { - mapToData: "apps" - }], + acceptInvitation: [ + "PATCH /user/repository_invitations/{invitation_id}", + {}, + { renamed: ["repos", "acceptInvitationForAuthenticatedUser"] } + ], + acceptInvitationForAuthenticatedUser: [ + "PATCH /user/repository_invitations/{invitation_id}" + ], + addAppAccessRestrictions: [ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", + {}, + { mapToData: "apps" } + ], addCollaborator: ["PUT /repos/{owner}/{repo}/collaborators/{username}"], - addStatusCheckContexts: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, { - mapToData: "contexts" - }], - addTeamAccessRestrictions: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, { - mapToData: "teams" - }], - addUserAccessRestrictions: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, { - mapToData: "users" - }], + addStatusCheckContexts: [ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", + {}, + { mapToData: "contexts" } + ], + addTeamAccessRestrictions: [ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", + {}, + { mapToData: "teams" } + ], + addUserAccessRestrictions: [ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", + {}, + { mapToData: "users" } + ], + checkAutomatedSecurityFixes: [ + "GET /repos/{owner}/{repo}/automated-security-fixes" + ], checkCollaborator: ["GET /repos/{owner}/{repo}/collaborators/{username}"], - checkVulnerabilityAlerts: ["GET /repos/{owner}/{repo}/vulnerability-alerts"], + checkVulnerabilityAlerts: [ + "GET /repos/{owner}/{repo}/vulnerability-alerts" + ], codeownersErrors: ["GET /repos/{owner}/{repo}/codeowners/errors"], compareCommits: ["GET /repos/{owner}/{repo}/compare/{base}...{head}"], - compareCommitsWithBasehead: ["GET /repos/{owner}/{repo}/compare/{basehead}"], + compareCommitsWithBasehead: [ + "GET /repos/{owner}/{repo}/compare/{basehead}" + ], createAutolink: ["POST /repos/{owner}/{repo}/autolinks"], - createCommitComment: ["POST /repos/{owner}/{repo}/commits/{commit_sha}/comments"], - createCommitSignatureProtection: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures"], + createCommitComment: [ + "POST /repos/{owner}/{repo}/commits/{commit_sha}/comments" + ], + createCommitSignatureProtection: [ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures" + ], createCommitStatus: ["POST /repos/{owner}/{repo}/statuses/{sha}"], createDeployKey: ["POST /repos/{owner}/{repo}/keys"], createDeployment: ["POST /repos/{owner}/{repo}/deployments"], - createDeploymentStatus: ["POST /repos/{owner}/{repo}/deployments/{deployment_id}/statuses"], + createDeploymentBranchPolicy: [ + "POST /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies" + ], + createDeploymentProtectionRule: [ + "POST /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules" + ], + createDeploymentStatus: [ + "POST /repos/{owner}/{repo}/deployments/{deployment_id}/statuses" + ], createDispatchEvent: ["POST /repos/{owner}/{repo}/dispatches"], createForAuthenticatedUser: ["POST /user/repos"], createFork: ["POST /repos/{owner}/{repo}/forks"], createInOrg: ["POST /orgs/{org}/repos"], - createOrUpdateEnvironment: ["PUT /repos/{owner}/{repo}/environments/{environment_name}"], + createOrUpdateEnvironment: [ + "PUT /repos/{owner}/{repo}/environments/{environment_name}" + ], createOrUpdateFileContents: ["PUT /repos/{owner}/{repo}/contents/{path}"], + createOrgRuleset: ["POST /orgs/{org}/rulesets"], + createPagesDeployment: ["POST /repos/{owner}/{repo}/pages/deployment"], createPagesSite: ["POST /repos/{owner}/{repo}/pages"], createRelease: ["POST /repos/{owner}/{repo}/releases"], + createRepoRuleset: ["POST /repos/{owner}/{repo}/rulesets"], createTagProtection: ["POST /repos/{owner}/{repo}/tags/protection"], - createUsingTemplate: ["POST /repos/{template_owner}/{template_repo}/generate"], + createUsingTemplate: [ + "POST /repos/{template_owner}/{template_repo}/generate" + ], createWebhook: ["POST /repos/{owner}/{repo}/hooks"], - declineInvitation: ["DELETE /user/repository_invitations/{invitation_id}", {}, { - renamed: ["repos", "declineInvitationForAuthenticatedUser"] - }], - declineInvitationForAuthenticatedUser: ["DELETE /user/repository_invitations/{invitation_id}"], + declineInvitation: [ + "DELETE /user/repository_invitations/{invitation_id}", + {}, + { renamed: ["repos", "declineInvitationForAuthenticatedUser"] } + ], + declineInvitationForAuthenticatedUser: [ + "DELETE /user/repository_invitations/{invitation_id}" + ], delete: ["DELETE /repos/{owner}/{repo}"], - deleteAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions"], - deleteAdminBranchProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"], - deleteAnEnvironment: ["DELETE /repos/{owner}/{repo}/environments/{environment_name}"], + deleteAccessRestrictions: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions" + ], + deleteAdminBranchProtection: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins" + ], + deleteAnEnvironment: [ + "DELETE /repos/{owner}/{repo}/environments/{environment_name}" + ], deleteAutolink: ["DELETE /repos/{owner}/{repo}/autolinks/{autolink_id}"], - deleteBranchProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection"], + deleteBranchProtection: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection" + ], deleteCommitComment: ["DELETE /repos/{owner}/{repo}/comments/{comment_id}"], - deleteCommitSignatureProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures"], + deleteCommitSignatureProtection: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures" + ], deleteDeployKey: ["DELETE /repos/{owner}/{repo}/keys/{key_id}"], - deleteDeployment: ["DELETE /repos/{owner}/{repo}/deployments/{deployment_id}"], + deleteDeployment: [ + "DELETE /repos/{owner}/{repo}/deployments/{deployment_id}" + ], + deleteDeploymentBranchPolicy: [ + "DELETE /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}" + ], deleteFile: ["DELETE /repos/{owner}/{repo}/contents/{path}"], - deleteInvitation: ["DELETE /repos/{owner}/{repo}/invitations/{invitation_id}"], + deleteInvitation: [ + "DELETE /repos/{owner}/{repo}/invitations/{invitation_id}" + ], + deleteOrgRuleset: ["DELETE /orgs/{org}/rulesets/{ruleset_id}"], deletePagesSite: ["DELETE /repos/{owner}/{repo}/pages"], - deletePullRequestReviewProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"], + deletePullRequestReviewProtection: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews" + ], deleteRelease: ["DELETE /repos/{owner}/{repo}/releases/{release_id}"], - deleteReleaseAsset: ["DELETE /repos/{owner}/{repo}/releases/assets/{asset_id}"], - deleteTagProtection: ["DELETE /repos/{owner}/{repo}/tags/protection/{tag_protection_id}"], + deleteReleaseAsset: [ + "DELETE /repos/{owner}/{repo}/releases/assets/{asset_id}" + ], + deleteRepoRuleset: ["DELETE /repos/{owner}/{repo}/rulesets/{ruleset_id}"], + deleteTagProtection: [ + "DELETE /repos/{owner}/{repo}/tags/protection/{tag_protection_id}" + ], deleteWebhook: ["DELETE /repos/{owner}/{repo}/hooks/{hook_id}"], - disableAutomatedSecurityFixes: ["DELETE /repos/{owner}/{repo}/automated-security-fixes"], - disableLfsForRepo: ["DELETE /repos/{owner}/{repo}/lfs"], - disableVulnerabilityAlerts: ["DELETE /repos/{owner}/{repo}/vulnerability-alerts"], - downloadArchive: ["GET /repos/{owner}/{repo}/zipball/{ref}", {}, { - renamed: ["repos", "downloadZipballArchive"] - }], + disableAutomatedSecurityFixes: [ + "DELETE /repos/{owner}/{repo}/automated-security-fixes" + ], + disableDeploymentProtectionRule: [ + "DELETE /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/{protection_rule_id}" + ], + disablePrivateVulnerabilityReporting: [ + "DELETE /repos/{owner}/{repo}/private-vulnerability-reporting" + ], + disableVulnerabilityAlerts: [ + "DELETE /repos/{owner}/{repo}/vulnerability-alerts" + ], + downloadArchive: [ + "GET /repos/{owner}/{repo}/zipball/{ref}", + {}, + { renamed: ["repos", "downloadZipballArchive"] } + ], downloadTarballArchive: ["GET /repos/{owner}/{repo}/tarball/{ref}"], downloadZipballArchive: ["GET /repos/{owner}/{repo}/zipball/{ref}"], - enableAutomatedSecurityFixes: ["PUT /repos/{owner}/{repo}/automated-security-fixes"], - enableLfsForRepo: ["PUT /repos/{owner}/{repo}/lfs"], - enableVulnerabilityAlerts: ["PUT /repos/{owner}/{repo}/vulnerability-alerts"], - generateReleaseNotes: ["POST /repos/{owner}/{repo}/releases/generate-notes"], + enableAutomatedSecurityFixes: [ + "PUT /repos/{owner}/{repo}/automated-security-fixes" + ], + enablePrivateVulnerabilityReporting: [ + "PUT /repos/{owner}/{repo}/private-vulnerability-reporting" + ], + enableVulnerabilityAlerts: [ + "PUT /repos/{owner}/{repo}/vulnerability-alerts" + ], + generateReleaseNotes: [ + "POST /repos/{owner}/{repo}/releases/generate-notes" + ], get: ["GET /repos/{owner}/{repo}"], - getAccessRestrictions: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions"], - getAdminBranchProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"], + getAccessRestrictions: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions" + ], + getAdminBranchProtection: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins" + ], + getAllDeploymentProtectionRules: [ + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules" + ], getAllEnvironments: ["GET /repos/{owner}/{repo}/environments"], - getAllStatusCheckContexts: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts"], + getAllStatusCheckContexts: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts" + ], getAllTopics: ["GET /repos/{owner}/{repo}/topics"], - getAppsWithAccessToProtectedBranch: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps"], + getAppsWithAccessToProtectedBranch: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps" + ], getAutolink: ["GET /repos/{owner}/{repo}/autolinks/{autolink_id}"], getBranch: ["GET /repos/{owner}/{repo}/branches/{branch}"], - getBranchProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection"], + getBranchProtection: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection" + ], + getBranchRules: ["GET /repos/{owner}/{repo}/rules/branches/{branch}"], getClones: ["GET /repos/{owner}/{repo}/traffic/clones"], getCodeFrequencyStats: ["GET /repos/{owner}/{repo}/stats/code_frequency"], - getCollaboratorPermissionLevel: ["GET /repos/{owner}/{repo}/collaborators/{username}/permission"], + getCollaboratorPermissionLevel: [ + "GET /repos/{owner}/{repo}/collaborators/{username}/permission" + ], getCombinedStatusForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/status"], getCommit: ["GET /repos/{owner}/{repo}/commits/{ref}"], getCommitActivityStats: ["GET /repos/{owner}/{repo}/stats/commit_activity"], getCommitComment: ["GET /repos/{owner}/{repo}/comments/{comment_id}"], - getCommitSignatureProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures"], + getCommitSignatureProtection: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures" + ], getCommunityProfileMetrics: ["GET /repos/{owner}/{repo}/community/profile"], getContent: ["GET /repos/{owner}/{repo}/contents/{path}"], getContributorsStats: ["GET /repos/{owner}/{repo}/stats/contributors"], + getCustomDeploymentProtectionRule: [ + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/{protection_rule_id}" + ], + getCustomPropertiesValues: ["GET /repos/{owner}/{repo}/properties/values"], getDeployKey: ["GET /repos/{owner}/{repo}/keys/{key_id}"], getDeployment: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}"], - getDeploymentStatus: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}"], - getEnvironment: ["GET /repos/{owner}/{repo}/environments/{environment_name}"], + getDeploymentBranchPolicy: [ + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}" + ], + getDeploymentStatus: [ + "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}" + ], + getEnvironment: [ + "GET /repos/{owner}/{repo}/environments/{environment_name}" + ], getLatestPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/latest"], getLatestRelease: ["GET /repos/{owner}/{repo}/releases/latest"], + getOrgRuleSuite: ["GET /orgs/{org}/rulesets/rule-suites/{rule_suite_id}"], + getOrgRuleSuites: ["GET /orgs/{org}/rulesets/rule-suites"], + getOrgRuleset: ["GET /orgs/{org}/rulesets/{ruleset_id}"], + getOrgRulesets: ["GET /orgs/{org}/rulesets"], getPages: ["GET /repos/{owner}/{repo}/pages"], getPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/{build_id}"], getPagesHealthCheck: ["GET /repos/{owner}/{repo}/pages/health"], getParticipationStats: ["GET /repos/{owner}/{repo}/stats/participation"], - getPullRequestReviewProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"], + getPullRequestReviewProtection: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews" + ], getPunchCardStats: ["GET /repos/{owner}/{repo}/stats/punch_card"], getReadme: ["GET /repos/{owner}/{repo}/readme"], getReadmeInDirectory: ["GET /repos/{owner}/{repo}/readme/{dir}"], getRelease: ["GET /repos/{owner}/{repo}/releases/{release_id}"], getReleaseAsset: ["GET /repos/{owner}/{repo}/releases/assets/{asset_id}"], getReleaseByTag: ["GET /repos/{owner}/{repo}/releases/tags/{tag}"], - getStatusChecksProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"], - getTeamsWithAccessToProtectedBranch: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams"], + getRepoRuleSuite: [ + "GET /repos/{owner}/{repo}/rulesets/rule-suites/{rule_suite_id}" + ], + getRepoRuleSuites: ["GET /repos/{owner}/{repo}/rulesets/rule-suites"], + getRepoRuleset: ["GET /repos/{owner}/{repo}/rulesets/{ruleset_id}"], + getRepoRulesets: ["GET /repos/{owner}/{repo}/rulesets"], + getStatusChecksProtection: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks" + ], + getTeamsWithAccessToProtectedBranch: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams" + ], getTopPaths: ["GET /repos/{owner}/{repo}/traffic/popular/paths"], getTopReferrers: ["GET /repos/{owner}/{repo}/traffic/popular/referrers"], - getUsersWithAccessToProtectedBranch: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users"], + getUsersWithAccessToProtectedBranch: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users" + ], getViews: ["GET /repos/{owner}/{repo}/traffic/views"], getWebhook: ["GET /repos/{owner}/{repo}/hooks/{hook_id}"], - getWebhookConfigForRepo: ["GET /repos/{owner}/{repo}/hooks/{hook_id}/config"], - getWebhookDelivery: ["GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}"], + getWebhookConfigForRepo: [ + "GET /repos/{owner}/{repo}/hooks/{hook_id}/config" + ], + getWebhookDelivery: [ + "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}" + ], + listActivities: ["GET /repos/{owner}/{repo}/activity"], listAutolinks: ["GET /repos/{owner}/{repo}/autolinks"], listBranches: ["GET /repos/{owner}/{repo}/branches"], - listBranchesForHeadCommit: ["GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head"], + listBranchesForHeadCommit: [ + "GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head" + ], listCollaborators: ["GET /repos/{owner}/{repo}/collaborators"], - listCommentsForCommit: ["GET /repos/{owner}/{repo}/commits/{commit_sha}/comments"], + listCommentsForCommit: [ + "GET /repos/{owner}/{repo}/commits/{commit_sha}/comments" + ], listCommitCommentsForRepo: ["GET /repos/{owner}/{repo}/comments"], - listCommitStatusesForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/statuses"], + listCommitStatusesForRef: [ + "GET /repos/{owner}/{repo}/commits/{ref}/statuses" + ], listCommits: ["GET /repos/{owner}/{repo}/commits"], listContributors: ["GET /repos/{owner}/{repo}/contributors"], + listCustomDeploymentRuleIntegrations: [ + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/apps" + ], listDeployKeys: ["GET /repos/{owner}/{repo}/keys"], - listDeploymentStatuses: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses"], + listDeploymentBranchPolicies: [ + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies" + ], + listDeploymentStatuses: [ + "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses" + ], listDeployments: ["GET /repos/{owner}/{repo}/deployments"], listForAuthenticatedUser: ["GET /user/repos"], listForOrg: ["GET /orgs/{org}/repos"], @@ -8262,67 +9343,117 @@ const Endpoints = { listLanguages: ["GET /repos/{owner}/{repo}/languages"], listPagesBuilds: ["GET /repos/{owner}/{repo}/pages/builds"], listPublic: ["GET /repositories"], - listPullRequestsAssociatedWithCommit: ["GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls"], - listReleaseAssets: ["GET /repos/{owner}/{repo}/releases/{release_id}/assets"], + listPullRequestsAssociatedWithCommit: [ + "GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls" + ], + listReleaseAssets: [ + "GET /repos/{owner}/{repo}/releases/{release_id}/assets" + ], listReleases: ["GET /repos/{owner}/{repo}/releases"], listTagProtection: ["GET /repos/{owner}/{repo}/tags/protection"], listTags: ["GET /repos/{owner}/{repo}/tags"], listTeams: ["GET /repos/{owner}/{repo}/teams"], - listWebhookDeliveries: ["GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries"], + listWebhookDeliveries: [ + "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries" + ], listWebhooks: ["GET /repos/{owner}/{repo}/hooks"], merge: ["POST /repos/{owner}/{repo}/merges"], mergeUpstream: ["POST /repos/{owner}/{repo}/merge-upstream"], pingWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/pings"], - redeliverWebhookDelivery: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}/attempts"], - removeAppAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, { - mapToData: "apps" - }], - removeCollaborator: ["DELETE /repos/{owner}/{repo}/collaborators/{username}"], - removeStatusCheckContexts: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, { - mapToData: "contexts" - }], - removeStatusCheckProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"], - removeTeamAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, { - mapToData: "teams" - }], - removeUserAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, { - mapToData: "users" - }], + redeliverWebhookDelivery: [ + "POST /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}/attempts" + ], + removeAppAccessRestrictions: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", + {}, + { mapToData: "apps" } + ], + removeCollaborator: [ + "DELETE /repos/{owner}/{repo}/collaborators/{username}" + ], + removeStatusCheckContexts: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", + {}, + { mapToData: "contexts" } + ], + removeStatusCheckProtection: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks" + ], + removeTeamAccessRestrictions: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", + {}, + { mapToData: "teams" } + ], + removeUserAccessRestrictions: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", + {}, + { mapToData: "users" } + ], renameBranch: ["POST /repos/{owner}/{repo}/branches/{branch}/rename"], replaceAllTopics: ["PUT /repos/{owner}/{repo}/topics"], requestPagesBuild: ["POST /repos/{owner}/{repo}/pages/builds"], - setAdminBranchProtection: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"], - setAppAccessRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, { - mapToData: "apps" - }], - setStatusCheckContexts: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, { - mapToData: "contexts" - }], - setTeamAccessRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, { - mapToData: "teams" - }], - setUserAccessRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, { - mapToData: "users" - }], + setAdminBranchProtection: [ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins" + ], + setAppAccessRestrictions: [ + "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", + {}, + { mapToData: "apps" } + ], + setStatusCheckContexts: [ + "PUT /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", + {}, + { mapToData: "contexts" } + ], + setTeamAccessRestrictions: [ + "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", + {}, + { mapToData: "teams" } + ], + setUserAccessRestrictions: [ + "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", + {}, + { mapToData: "users" } + ], testPushWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/tests"], transfer: ["POST /repos/{owner}/{repo}/transfer"], update: ["PATCH /repos/{owner}/{repo}"], - updateBranchProtection: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection"], + updateBranchProtection: [ + "PUT /repos/{owner}/{repo}/branches/{branch}/protection" + ], updateCommitComment: ["PATCH /repos/{owner}/{repo}/comments/{comment_id}"], + updateDeploymentBranchPolicy: [ + "PUT /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}" + ], updateInformationAboutPagesSite: ["PUT /repos/{owner}/{repo}/pages"], - updateInvitation: ["PATCH /repos/{owner}/{repo}/invitations/{invitation_id}"], - updatePullRequestReviewProtection: ["PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"], + updateInvitation: [ + "PATCH /repos/{owner}/{repo}/invitations/{invitation_id}" + ], + updateOrgRuleset: ["PUT /orgs/{org}/rulesets/{ruleset_id}"], + updatePullRequestReviewProtection: [ + "PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews" + ], updateRelease: ["PATCH /repos/{owner}/{repo}/releases/{release_id}"], - updateReleaseAsset: ["PATCH /repos/{owner}/{repo}/releases/assets/{asset_id}"], - updateStatusCheckPotection: ["PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks", {}, { - renamed: ["repos", "updateStatusCheckProtection"] - }], - updateStatusCheckProtection: ["PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"], + updateReleaseAsset: [ + "PATCH /repos/{owner}/{repo}/releases/assets/{asset_id}" + ], + updateRepoRuleset: ["PUT /repos/{owner}/{repo}/rulesets/{ruleset_id}"], + updateStatusCheckPotection: [ + "PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks", + {}, + { renamed: ["repos", "updateStatusCheckProtection"] } + ], + updateStatusCheckProtection: [ + "PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks" + ], updateWebhook: ["PATCH /repos/{owner}/{repo}/hooks/{hook_id}"], - updateWebhookConfigForRepo: ["PATCH /repos/{owner}/{repo}/hooks/{hook_id}/config"], - uploadReleaseAsset: ["POST /repos/{owner}/{repo}/releases/{release_id}/assets{?name,label}", { - baseUrl: "https://uploads.github.com" - }] + updateWebhookConfigForRepo: [ + "PATCH /repos/{owner}/{repo}/hooks/{hook_id}/config" + ], + uploadReleaseAsset: [ + "POST /repos/{owner}/{repo}/releases/{release_id}/assets{?name,label}", + { baseUrl: "https://uploads.github.com" } + ] }, search: { code: ["GET /search/code"], @@ -8334,390 +9465,596 @@ const Endpoints = { users: ["GET /search/users"] }, secretScanning: { - getAlert: ["GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}"], - listAlertsForEnterprise: ["GET /enterprises/{enterprise}/secret-scanning/alerts"], + getAlert: [ + "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}" + ], + listAlertsForEnterprise: [ + "GET /enterprises/{enterprise}/secret-scanning/alerts" + ], listAlertsForOrg: ["GET /orgs/{org}/secret-scanning/alerts"], listAlertsForRepo: ["GET /repos/{owner}/{repo}/secret-scanning/alerts"], - listLocationsForAlert: ["GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations"], - updateAlert: ["PATCH /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}"] + listLocationsForAlert: [ + "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations" + ], + updateAlert: [ + "PATCH /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}" + ] + }, + securityAdvisories: { + createPrivateVulnerabilityReport: [ + "POST /repos/{owner}/{repo}/security-advisories/reports" + ], + createRepositoryAdvisory: [ + "POST /repos/{owner}/{repo}/security-advisories" + ], + createRepositoryAdvisoryCveRequest: [ + "POST /repos/{owner}/{repo}/security-advisories/{ghsa_id}/cve" + ], + getGlobalAdvisory: ["GET /advisories/{ghsa_id}"], + getRepositoryAdvisory: [ + "GET /repos/{owner}/{repo}/security-advisories/{ghsa_id}" + ], + listGlobalAdvisories: ["GET /advisories"], + listOrgRepositoryAdvisories: ["GET /orgs/{org}/security-advisories"], + listRepositoryAdvisories: ["GET /repos/{owner}/{repo}/security-advisories"], + updateRepositoryAdvisory: [ + "PATCH /repos/{owner}/{repo}/security-advisories/{ghsa_id}" + ] }, teams: { - addOrUpdateMembershipForUserInOrg: ["PUT /orgs/{org}/teams/{team_slug}/memberships/{username}"], - addOrUpdateProjectPermissionsInOrg: ["PUT /orgs/{org}/teams/{team_slug}/projects/{project_id}"], - addOrUpdateRepoPermissionsInOrg: ["PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"], - checkPermissionsForProjectInOrg: ["GET /orgs/{org}/teams/{team_slug}/projects/{project_id}"], - checkPermissionsForRepoInOrg: ["GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"], + addOrUpdateMembershipForUserInOrg: [ + "PUT /orgs/{org}/teams/{team_slug}/memberships/{username}" + ], + addOrUpdateProjectPermissionsInOrg: [ + "PUT /orgs/{org}/teams/{team_slug}/projects/{project_id}" + ], + addOrUpdateRepoPermissionsInOrg: [ + "PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}" + ], + checkPermissionsForProjectInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/projects/{project_id}" + ], + checkPermissionsForRepoInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}" + ], create: ["POST /orgs/{org}/teams"], - createDiscussionCommentInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments"], + createDiscussionCommentInOrg: [ + "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments" + ], createDiscussionInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions"], - deleteDiscussionCommentInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"], - deleteDiscussionInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"], + deleteDiscussionCommentInOrg: [ + "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}" + ], + deleteDiscussionInOrg: [ + "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}" + ], deleteInOrg: ["DELETE /orgs/{org}/teams/{team_slug}"], getByName: ["GET /orgs/{org}/teams/{team_slug}"], - getDiscussionCommentInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"], - getDiscussionInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"], - getMembershipForUserInOrg: ["GET /orgs/{org}/teams/{team_slug}/memberships/{username}"], + getDiscussionCommentInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}" + ], + getDiscussionInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}" + ], + getMembershipForUserInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/memberships/{username}" + ], list: ["GET /orgs/{org}/teams"], listChildInOrg: ["GET /orgs/{org}/teams/{team_slug}/teams"], - listDiscussionCommentsInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments"], + listDiscussionCommentsInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments" + ], listDiscussionsInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions"], listForAuthenticatedUser: ["GET /user/teams"], listMembersInOrg: ["GET /orgs/{org}/teams/{team_slug}/members"], - listPendingInvitationsInOrg: ["GET /orgs/{org}/teams/{team_slug}/invitations"], + listPendingInvitationsInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/invitations" + ], listProjectsInOrg: ["GET /orgs/{org}/teams/{team_slug}/projects"], listReposInOrg: ["GET /orgs/{org}/teams/{team_slug}/repos"], - removeMembershipForUserInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/memberships/{username}"], - removeProjectInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/projects/{project_id}"], - removeRepoInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"], - updateDiscussionCommentInOrg: ["PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"], - updateDiscussionInOrg: ["PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"], + removeMembershipForUserInOrg: [ + "DELETE /orgs/{org}/teams/{team_slug}/memberships/{username}" + ], + removeProjectInOrg: [ + "DELETE /orgs/{org}/teams/{team_slug}/projects/{project_id}" + ], + removeRepoInOrg: [ + "DELETE /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}" + ], + updateDiscussionCommentInOrg: [ + "PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}" + ], + updateDiscussionInOrg: [ + "PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}" + ], updateInOrg: ["PATCH /orgs/{org}/teams/{team_slug}"] }, users: { - addEmailForAuthenticated: ["POST /user/emails", {}, { - renamed: ["users", "addEmailForAuthenticatedUser"] - }], + addEmailForAuthenticated: [ + "POST /user/emails", + {}, + { renamed: ["users", "addEmailForAuthenticatedUser"] } + ], addEmailForAuthenticatedUser: ["POST /user/emails"], + addSocialAccountForAuthenticatedUser: ["POST /user/social_accounts"], block: ["PUT /user/blocks/{username}"], checkBlocked: ["GET /user/blocks/{username}"], checkFollowingForUser: ["GET /users/{username}/following/{target_user}"], checkPersonIsFollowedByAuthenticated: ["GET /user/following/{username}"], - createGpgKeyForAuthenticated: ["POST /user/gpg_keys", {}, { - renamed: ["users", "createGpgKeyForAuthenticatedUser"] - }], + createGpgKeyForAuthenticated: [ + "POST /user/gpg_keys", + {}, + { renamed: ["users", "createGpgKeyForAuthenticatedUser"] } + ], createGpgKeyForAuthenticatedUser: ["POST /user/gpg_keys"], - createPublicSshKeyForAuthenticated: ["POST /user/keys", {}, { - renamed: ["users", "createPublicSshKeyForAuthenticatedUser"] - }], + createPublicSshKeyForAuthenticated: [ + "POST /user/keys", + {}, + { renamed: ["users", "createPublicSshKeyForAuthenticatedUser"] } + ], createPublicSshKeyForAuthenticatedUser: ["POST /user/keys"], - deleteEmailForAuthenticated: ["DELETE /user/emails", {}, { - renamed: ["users", "deleteEmailForAuthenticatedUser"] - }], + createSshSigningKeyForAuthenticatedUser: ["POST /user/ssh_signing_keys"], + deleteEmailForAuthenticated: [ + "DELETE /user/emails", + {}, + { renamed: ["users", "deleteEmailForAuthenticatedUser"] } + ], deleteEmailForAuthenticatedUser: ["DELETE /user/emails"], - deleteGpgKeyForAuthenticated: ["DELETE /user/gpg_keys/{gpg_key_id}", {}, { - renamed: ["users", "deleteGpgKeyForAuthenticatedUser"] - }], + deleteGpgKeyForAuthenticated: [ + "DELETE /user/gpg_keys/{gpg_key_id}", + {}, + { renamed: ["users", "deleteGpgKeyForAuthenticatedUser"] } + ], deleteGpgKeyForAuthenticatedUser: ["DELETE /user/gpg_keys/{gpg_key_id}"], - deletePublicSshKeyForAuthenticated: ["DELETE /user/keys/{key_id}", {}, { - renamed: ["users", "deletePublicSshKeyForAuthenticatedUser"] - }], + deletePublicSshKeyForAuthenticated: [ + "DELETE /user/keys/{key_id}", + {}, + { renamed: ["users", "deletePublicSshKeyForAuthenticatedUser"] } + ], deletePublicSshKeyForAuthenticatedUser: ["DELETE /user/keys/{key_id}"], + deleteSocialAccountForAuthenticatedUser: ["DELETE /user/social_accounts"], + deleteSshSigningKeyForAuthenticatedUser: [ + "DELETE /user/ssh_signing_keys/{ssh_signing_key_id}" + ], follow: ["PUT /user/following/{username}"], getAuthenticated: ["GET /user"], getByUsername: ["GET /users/{username}"], getContextForUser: ["GET /users/{username}/hovercard"], - getGpgKeyForAuthenticated: ["GET /user/gpg_keys/{gpg_key_id}", {}, { - renamed: ["users", "getGpgKeyForAuthenticatedUser"] - }], + getGpgKeyForAuthenticated: [ + "GET /user/gpg_keys/{gpg_key_id}", + {}, + { renamed: ["users", "getGpgKeyForAuthenticatedUser"] } + ], getGpgKeyForAuthenticatedUser: ["GET /user/gpg_keys/{gpg_key_id}"], - getPublicSshKeyForAuthenticated: ["GET /user/keys/{key_id}", {}, { - renamed: ["users", "getPublicSshKeyForAuthenticatedUser"] - }], + getPublicSshKeyForAuthenticated: [ + "GET /user/keys/{key_id}", + {}, + { renamed: ["users", "getPublicSshKeyForAuthenticatedUser"] } + ], getPublicSshKeyForAuthenticatedUser: ["GET /user/keys/{key_id}"], + getSshSigningKeyForAuthenticatedUser: [ + "GET /user/ssh_signing_keys/{ssh_signing_key_id}" + ], list: ["GET /users"], - listBlockedByAuthenticated: ["GET /user/blocks", {}, { - renamed: ["users", "listBlockedByAuthenticatedUser"] - }], + listBlockedByAuthenticated: [ + "GET /user/blocks", + {}, + { renamed: ["users", "listBlockedByAuthenticatedUser"] } + ], listBlockedByAuthenticatedUser: ["GET /user/blocks"], - listEmailsForAuthenticated: ["GET /user/emails", {}, { - renamed: ["users", "listEmailsForAuthenticatedUser"] - }], + listEmailsForAuthenticated: [ + "GET /user/emails", + {}, + { renamed: ["users", "listEmailsForAuthenticatedUser"] } + ], listEmailsForAuthenticatedUser: ["GET /user/emails"], - listFollowedByAuthenticated: ["GET /user/following", {}, { - renamed: ["users", "listFollowedByAuthenticatedUser"] - }], + listFollowedByAuthenticated: [ + "GET /user/following", + {}, + { renamed: ["users", "listFollowedByAuthenticatedUser"] } + ], listFollowedByAuthenticatedUser: ["GET /user/following"], listFollowersForAuthenticatedUser: ["GET /user/followers"], listFollowersForUser: ["GET /users/{username}/followers"], listFollowingForUser: ["GET /users/{username}/following"], - listGpgKeysForAuthenticated: ["GET /user/gpg_keys", {}, { - renamed: ["users", "listGpgKeysForAuthenticatedUser"] - }], + listGpgKeysForAuthenticated: [ + "GET /user/gpg_keys", + {}, + { renamed: ["users", "listGpgKeysForAuthenticatedUser"] } + ], listGpgKeysForAuthenticatedUser: ["GET /user/gpg_keys"], listGpgKeysForUser: ["GET /users/{username}/gpg_keys"], - listPublicEmailsForAuthenticated: ["GET /user/public_emails", {}, { - renamed: ["users", "listPublicEmailsForAuthenticatedUser"] - }], + listPublicEmailsForAuthenticated: [ + "GET /user/public_emails", + {}, + { renamed: ["users", "listPublicEmailsForAuthenticatedUser"] } + ], listPublicEmailsForAuthenticatedUser: ["GET /user/public_emails"], listPublicKeysForUser: ["GET /users/{username}/keys"], - listPublicSshKeysForAuthenticated: ["GET /user/keys", {}, { - renamed: ["users", "listPublicSshKeysForAuthenticatedUser"] - }], + listPublicSshKeysForAuthenticated: [ + "GET /user/keys", + {}, + { renamed: ["users", "listPublicSshKeysForAuthenticatedUser"] } + ], listPublicSshKeysForAuthenticatedUser: ["GET /user/keys"], - setPrimaryEmailVisibilityForAuthenticated: ["PATCH /user/email/visibility", {}, { - renamed: ["users", "setPrimaryEmailVisibilityForAuthenticatedUser"] - }], - setPrimaryEmailVisibilityForAuthenticatedUser: ["PATCH /user/email/visibility"], + listSocialAccountsForAuthenticatedUser: ["GET /user/social_accounts"], + listSocialAccountsForUser: ["GET /users/{username}/social_accounts"], + listSshSigningKeysForAuthenticatedUser: ["GET /user/ssh_signing_keys"], + listSshSigningKeysForUser: ["GET /users/{username}/ssh_signing_keys"], + setPrimaryEmailVisibilityForAuthenticated: [ + "PATCH /user/email/visibility", + {}, + { renamed: ["users", "setPrimaryEmailVisibilityForAuthenticatedUser"] } + ], + setPrimaryEmailVisibilityForAuthenticatedUser: [ + "PATCH /user/email/visibility" + ], unblock: ["DELETE /user/blocks/{username}"], unfollow: ["DELETE /user/following/{username}"], updateAuthenticated: ["PATCH /user"] } }; +var endpoints_default = Endpoints; -const VERSION = "5.16.2"; - -function endpointsToMethods(octokit, endpointsMap) { - const newMethods = {}; - - for (const [scope, endpoints] of Object.entries(endpointsMap)) { - for (const [methodName, endpoint] of Object.entries(endpoints)) { - const [route, defaults, decorations] = endpoint; - const [method, url] = route.split(/ /); - const endpointDefaults = Object.assign({ +// pkg/dist-src/endpoints-to-methods.js +var endpointMethodsMap = /* @__PURE__ */ new Map(); +for (const [scope, endpoints] of Object.entries(endpoints_default)) { + for (const [methodName, endpoint] of Object.entries(endpoints)) { + const [route, defaults, decorations] = endpoint; + const [method, url] = route.split(/ /); + const endpointDefaults = Object.assign( + { method, url - }, defaults); - - if (!newMethods[scope]) { - newMethods[scope] = {}; - } - - const scopeMethods = newMethods[scope]; - - if (decorations) { - scopeMethods[methodName] = decorate(octokit, scope, methodName, endpointDefaults, decorations); - continue; - } - - scopeMethods[methodName] = octokit.request.defaults(endpointDefaults); + }, + defaults + ); + if (!endpointMethodsMap.has(scope)) { + endpointMethodsMap.set(scope, /* @__PURE__ */ new Map()); + } + endpointMethodsMap.get(scope).set(methodName, { + scope, + methodName, + endpointDefaults, + decorations + }); + } +} +var handler = { + has({ scope }, methodName) { + return endpointMethodsMap.get(scope).has(methodName); + }, + getOwnPropertyDescriptor(target, methodName) { + return { + value: this.get(target, methodName), + // ensures method is in the cache + configurable: true, + writable: true, + enumerable: true + }; + }, + defineProperty(target, methodName, descriptor) { + Object.defineProperty(target.cache, methodName, descriptor); + return true; + }, + deleteProperty(target, methodName) { + delete target.cache[methodName]; + return true; + }, + ownKeys({ scope }) { + return [...endpointMethodsMap.get(scope).keys()]; + }, + set(target, methodName, value) { + return target.cache[methodName] = value; + }, + get({ octokit, scope, cache }, methodName) { + if (cache[methodName]) { + return cache[methodName]; + } + const method = endpointMethodsMap.get(scope).get(methodName); + if (!method) { + return void 0; + } + const { endpointDefaults, decorations } = method; + if (decorations) { + cache[methodName] = decorate( + octokit, + scope, + methodName, + endpointDefaults, + decorations + ); + } else { + cache[methodName] = octokit.request.defaults(endpointDefaults); } + return cache[methodName]; + } +}; +function endpointsToMethods(octokit) { + const newMethods = {}; + for (const scope of endpointMethodsMap.keys()) { + newMethods[scope] = new Proxy({ octokit, scope, cache: {} }, handler); } - return newMethods; } - function decorate(octokit, scope, methodName, defaults, decorations) { const requestWithDefaults = octokit.request.defaults(defaults); - /* istanbul ignore next */ - function withDecorations(...args) { - // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488 - let options = requestWithDefaults.endpoint.merge(...args); // There are currently no other decorations than `.mapToData` - + let options = requestWithDefaults.endpoint.merge(...args); if (decorations.mapToData) { options = Object.assign({}, options, { data: options[decorations.mapToData], - [decorations.mapToData]: undefined + [decorations.mapToData]: void 0 }); return requestWithDefaults(options); } - if (decorations.renamed) { const [newScope, newMethodName] = decorations.renamed; - octokit.log.warn(`octokit.${scope}.${methodName}() has been renamed to octokit.${newScope}.${newMethodName}()`); + octokit.log.warn( + `octokit.${scope}.${methodName}() has been renamed to octokit.${newScope}.${newMethodName}()` + ); } - if (decorations.deprecated) { octokit.log.warn(decorations.deprecated); } - if (decorations.renamedParameters) { - // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488 - const options = requestWithDefaults.endpoint.merge(...args); - - for (const [name, alias] of Object.entries(decorations.renamedParameters)) { - if (name in options) { - octokit.log.warn(`"${name}" parameter is deprecated for "octokit.${scope}.${methodName}()". Use "${alias}" instead`); - - if (!(alias in options)) { - options[alias] = options[name]; + const options2 = requestWithDefaults.endpoint.merge(...args); + for (const [name, alias] of Object.entries( + decorations.renamedParameters + )) { + if (name in options2) { + octokit.log.warn( + `"${name}" parameter is deprecated for "octokit.${scope}.${methodName}()". Use "${alias}" instead` + ); + if (!(alias in options2)) { + options2[alias] = options2[name]; } - - delete options[name]; + delete options2[name]; } } - - return requestWithDefaults(options); - } // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488 - - + return requestWithDefaults(options2); + } return requestWithDefaults(...args); } - return Object.assign(withDecorations, requestWithDefaults); } +// pkg/dist-src/index.js function restEndpointMethods(octokit) { - const api = endpointsToMethods(octokit, Endpoints); + const api = endpointsToMethods(octokit); return { rest: api }; } restEndpointMethods.VERSION = VERSION; function legacyRestEndpointMethods(octokit) { - const api = endpointsToMethods(octokit, Endpoints); - return _objectSpread2(_objectSpread2({}, api), {}, { + const api = endpointsToMethods(octokit); + return { + ...api, rest: api - }); + }; } legacyRestEndpointMethods.VERSION = VERSION; - -exports.legacyRestEndpointMethods = legacyRestEndpointMethods; -exports.restEndpointMethods = restEndpointMethods; -//# sourceMappingURL=index.js.map +// Annotate the CommonJS export names for ESM import in node: +0 && (0); /***/ }), /***/ 537: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } - -var deprecation = __nccwpck_require__(8932); -var once = _interopDefault(__nccwpck_require__(1223)); - -const logOnceCode = once(deprecation => console.warn(deprecation)); -const logOnceHeaders = once(deprecation => console.warn(deprecation)); -/** - * Error with extra properties to help with debugging - */ - -class RequestError extends Error { +var __create = Object.create; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __getProtoOf = Object.getPrototypeOf; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps( + // If the importer is in node compatibility mode or this is not an ESM + // file that has been converted to a CommonJS file using a Babel- + // compatible transform (i.e. "__esModule" has not been set), then set + // "default" to the CommonJS "module.exports" for node compatibility. + isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target, + mod +)); +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// pkg/dist-src/index.js +var dist_src_exports = {}; +__export(dist_src_exports, { + RequestError: () => RequestError +}); +module.exports = __toCommonJS(dist_src_exports); +var import_deprecation = __nccwpck_require__(8932); +var import_once = __toESM(__nccwpck_require__(1223)); +var logOnceCode = (0, import_once.default)((deprecation) => console.warn(deprecation)); +var logOnceHeaders = (0, import_once.default)((deprecation) => console.warn(deprecation)); +var RequestError = class extends Error { constructor(message, statusCode, options) { - super(message); // Maintains proper stack trace (only available on V8) - - /* istanbul ignore next */ - + super(message); if (Error.captureStackTrace) { Error.captureStackTrace(this, this.constructor); } - this.name = "HttpError"; this.status = statusCode; let headers; - if ("headers" in options && typeof options.headers !== "undefined") { headers = options.headers; } - if ("response" in options) { this.response = options.response; headers = options.response.headers; - } // redact request credentials without mutating original request options - - + } const requestCopy = Object.assign({}, options.request); - if (options.request.headers.authorization) { requestCopy.headers = Object.assign({}, options.request.headers, { - authorization: options.request.headers.authorization.replace(/ .*$/, " [REDACTED]") + authorization: options.request.headers.authorization.replace( + / .*$/, + " [REDACTED]" + ) }); } - - requestCopy.url = requestCopy.url // client_id & client_secret can be passed as URL query parameters to increase rate limit - // see https://developer.github.com/v3/#increasing-the-unauthenticated-rate-limit-for-oauth-applications - .replace(/\bclient_secret=\w+/g, "client_secret=[REDACTED]") // OAuth tokens can be passed as URL query parameters, although it is not recommended - // see https://developer.github.com/v3/#oauth2-token-sent-in-a-header - .replace(/\baccess_token=\w+/g, "access_token=[REDACTED]"); - this.request = requestCopy; // deprecations - + requestCopy.url = requestCopy.url.replace(/\bclient_secret=\w+/g, "client_secret=[REDACTED]").replace(/\baccess_token=\w+/g, "access_token=[REDACTED]"); + this.request = requestCopy; Object.defineProperty(this, "code", { get() { - logOnceCode(new deprecation.Deprecation("[@octokit/request-error] `error.code` is deprecated, use `error.status`.")); + logOnceCode( + new import_deprecation.Deprecation( + "[@octokit/request-error] `error.code` is deprecated, use `error.status`." + ) + ); return statusCode; } - }); Object.defineProperty(this, "headers", { get() { - logOnceHeaders(new deprecation.Deprecation("[@octokit/request-error] `error.headers` is deprecated, use `error.response.headers`.")); + logOnceHeaders( + new import_deprecation.Deprecation( + "[@octokit/request-error] `error.headers` is deprecated, use `error.response.headers`." + ) + ); return headers || {}; } - }); } - -} - -exports.RequestError = RequestError; -//# sourceMappingURL=index.js.map +}; +// Annotate the CommonJS export names for ESM import in node: +0 && (0); /***/ }), /***/ 6234: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); -Object.defineProperty(exports, "__esModule", ({ value: true })); +// pkg/dist-src/index.js +var dist_src_exports = {}; +__export(dist_src_exports, { + request: () => request +}); +module.exports = __toCommonJS(dist_src_exports); +var import_endpoint = __nccwpck_require__(9440); +var import_universal_user_agent = __nccwpck_require__(5030); -function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } +// pkg/dist-src/version.js +var VERSION = "8.1.6"; -var endpoint = __nccwpck_require__(9440); -var universalUserAgent = __nccwpck_require__(5030); -var isPlainObject = __nccwpck_require__(3287); -var nodeFetch = _interopDefault(__nccwpck_require__(467)); -var requestError = __nccwpck_require__(537); +// pkg/dist-src/is-plain-object.js +function isPlainObject(value) { + if (typeof value !== "object" || value === null) + return false; + if (Object.prototype.toString.call(value) !== "[object Object]") + return false; + const proto = Object.getPrototypeOf(value); + if (proto === null) + return true; + const Ctor = Object.prototype.hasOwnProperty.call(proto, "constructor") && proto.constructor; + return typeof Ctor === "function" && Ctor instanceof Ctor && Function.prototype.call(Ctor) === Function.prototype.call(value); +} -const VERSION = "5.6.3"; +// pkg/dist-src/fetch-wrapper.js +var import_request_error = __nccwpck_require__(537); +// pkg/dist-src/get-buffer-response.js function getBufferResponse(response) { return response.arrayBuffer(); } +// pkg/dist-src/fetch-wrapper.js function fetchWrapper(requestOptions) { + var _a, _b, _c; const log = requestOptions.request && requestOptions.request.log ? requestOptions.request.log : console; - - if (isPlainObject.isPlainObject(requestOptions.body) || Array.isArray(requestOptions.body)) { + const parseSuccessResponseBody = ((_a = requestOptions.request) == null ? void 0 : _a.parseSuccessResponseBody) !== false; + if (isPlainObject(requestOptions.body) || Array.isArray(requestOptions.body)) { requestOptions.body = JSON.stringify(requestOptions.body); } - let headers = {}; let status; let url; - const fetch = requestOptions.request && requestOptions.request.fetch || nodeFetch; - return fetch(requestOptions.url, Object.assign({ + let { fetch } = globalThis; + if ((_b = requestOptions.request) == null ? void 0 : _b.fetch) { + fetch = requestOptions.request.fetch; + } + if (!fetch) { + throw new Error( + "fetch is not set. Please pass a fetch implementation as new Octokit({ request: { fetch }}). Learn more at https://github.com/octokit/octokit.js/#fetch-missing" + ); + } + return fetch(requestOptions.url, { method: requestOptions.method, body: requestOptions.body, headers: requestOptions.headers, - redirect: requestOptions.redirect - }, // `requestOptions.request.agent` type is incompatible - // see https://github.com/octokit/types.ts/pull/264 - requestOptions.request)).then(async response => { + signal: (_c = requestOptions.request) == null ? void 0 : _c.signal, + // duplex must be set if request.body is ReadableStream or Async Iterables. + // See https://fetch.spec.whatwg.org/#dom-requestinit-duplex. + ...requestOptions.body && { duplex: "half" } + }).then(async (response) => { url = response.url; status = response.status; - for (const keyAndValue of response.headers) { headers[keyAndValue[0]] = keyAndValue[1]; } - if ("deprecation" in headers) { const matches = headers.link && headers.link.match(/<([^>]+)>; rel="deprecation"/); const deprecationLink = matches && matches.pop(); - log.warn(`[@octokit/request] "${requestOptions.method} ${requestOptions.url}" is deprecated. It is scheduled to be removed on ${headers.sunset}${deprecationLink ? `. See ${deprecationLink}` : ""}`); + log.warn( + `[@octokit/request] "${requestOptions.method} ${requestOptions.url}" is deprecated. It is scheduled to be removed on ${headers.sunset}${deprecationLink ? `. See ${deprecationLink}` : ""}` + ); } - if (status === 204 || status === 205) { return; - } // GitHub API returns 200 for HEAD requests - - + } if (requestOptions.method === "HEAD") { if (status < 400) { return; } - - throw new requestError.RequestError(response.statusText, status, { + throw new import_request_error.RequestError(response.statusText, status, { response: { url, status, headers, - data: undefined + data: void 0 }, request: requestOptions }); } - if (status === 304) { - throw new requestError.RequestError("Not modified", status, { + throw new import_request_error.RequestError("Not modified", status, { response: { url, status, @@ -8727,10 +10064,9 @@ function fetchWrapper(requestOptions) { request: requestOptions }); } - if (status >= 400) { const data = await getResponseData(response); - const error = new requestError.RequestError(toErrorMessage(data), status, { + const error = new import_request_error.RequestError(toErrorMessage(data), status, { response: { url, status, @@ -8741,87 +10077,87 @@ function fetchWrapper(requestOptions) { }); throw error; } - - return getResponseData(response); - }).then(data => { + return parseSuccessResponseBody ? await getResponseData(response) : response.body; + }).then((data) => { return { status, url, headers, data }; - }).catch(error => { - if (error instanceof requestError.RequestError) throw error; - throw new requestError.RequestError(error.message, 500, { + }).catch((error) => { + if (error instanceof import_request_error.RequestError) + throw error; + else if (error.name === "AbortError") + throw error; + let message = error.message; + if (error.name === "TypeError" && "cause" in error) { + if (error.cause instanceof Error) { + message = error.cause.message; + } else if (typeof error.cause === "string") { + message = error.cause; + } + } + throw new import_request_error.RequestError(message, 500, { request: requestOptions }); }); } - async function getResponseData(response) { const contentType = response.headers.get("content-type"); - if (/application\/json/.test(contentType)) { - return response.json(); + return response.json().catch(() => response.text()).catch(() => ""); } - if (!contentType || /^text\/|charset=utf-8$/.test(contentType)) { return response.text(); } - return getBufferResponse(response); } - function toErrorMessage(data) { - if (typeof data === "string") return data; // istanbul ignore else - just in case - + if (typeof data === "string") + return data; if ("message" in data) { if (Array.isArray(data.errors)) { return `${data.message}: ${data.errors.map(JSON.stringify).join(", ")}`; } - return data.message; - } // istanbul ignore next - just in case - - + } return `Unknown error: ${JSON.stringify(data)}`; } +// pkg/dist-src/with-defaults.js function withDefaults(oldEndpoint, newDefaults) { - const endpoint = oldEndpoint.defaults(newDefaults); - - const newApi = function (route, parameters) { - const endpointOptions = endpoint.merge(route, parameters); - + const endpoint2 = oldEndpoint.defaults(newDefaults); + const newApi = function(route, parameters) { + const endpointOptions = endpoint2.merge(route, parameters); if (!endpointOptions.request || !endpointOptions.request.hook) { - return fetchWrapper(endpoint.parse(endpointOptions)); + return fetchWrapper(endpoint2.parse(endpointOptions)); } - - const request = (route, parameters) => { - return fetchWrapper(endpoint.parse(endpoint.merge(route, parameters))); + const request2 = (route2, parameters2) => { + return fetchWrapper( + endpoint2.parse(endpoint2.merge(route2, parameters2)) + ); }; - - Object.assign(request, { - endpoint, - defaults: withDefaults.bind(null, endpoint) + Object.assign(request2, { + endpoint: endpoint2, + defaults: withDefaults.bind(null, endpoint2) }); - return endpointOptions.request.hook(request, endpointOptions); + return endpointOptions.request.hook(request2, endpointOptions); }; - return Object.assign(newApi, { - endpoint, - defaults: withDefaults.bind(null, endpoint) + endpoint: endpoint2, + defaults: withDefaults.bind(null, endpoint2) }); } -const request = withDefaults(endpoint.endpoint, { +// pkg/dist-src/index.js +var request = withDefaults(import_endpoint.endpoint, { headers: { - "user-agent": `octokit-request.js/${VERSION} ${universalUserAgent.getUserAgent()}` + "user-agent": `octokit-request.js/${VERSION} ${(0, import_universal_user_agent.getUserAgent)()}` } }); - -exports.request = request; -//# sourceMappingURL=index.js.map +// Annotate the CommonJS export names for ESM import in node: +0 && (0); /***/ }), @@ -10544,12 +11880,12 @@ module.exports = function () { /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { exports.EntryHeader = __nccwpck_require__(9032); -exports.MainHeader = __nccwpck_require__(4408); +exports.MainHeader = __nccwpck_require__(8952); /***/ }), -/***/ 4408: +/***/ 8952: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { var Utils = __nccwpck_require__(5182), @@ -13353,7 +14689,7 @@ exports.flatten = (...args) => { /***/ }), -/***/ 2286: +/***/ 8367: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; @@ -13795,6 +15131,134 @@ module.exports = CacheableLookup; module.exports["default"] = CacheableLookup; +/***/ }), + +/***/ 4340: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + +const {PassThrough: PassThroughStream} = __nccwpck_require__(2781); + +module.exports = options => { + options = {...options}; + + const {array} = options; + let {encoding} = options; + const isBuffer = encoding === 'buffer'; + let objectMode = false; + + if (array) { + objectMode = !(encoding || isBuffer); + } else { + encoding = encoding || 'utf8'; + } + + if (isBuffer) { + encoding = null; + } + + const stream = new PassThroughStream({objectMode}); + + if (encoding) { + stream.setEncoding(encoding); + } + + let length = 0; + const chunks = []; + + stream.on('data', chunk => { + chunks.push(chunk); + + if (objectMode) { + length = chunks.length; + } else { + length += chunk.length; + } + }); + + stream.getBufferedValue = () => { + if (array) { + return chunks; + } + + return isBuffer ? Buffer.concat(chunks, length) : chunks.join(''); + }; + + stream.getBufferedLength = () => length; + + return stream; +}; + + +/***/ }), + +/***/ 7040: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + +const {constants: BufferConstants} = __nccwpck_require__(4300); +const pump = __nccwpck_require__(8341); +const bufferStream = __nccwpck_require__(4340); + +class MaxBufferError extends Error { + constructor() { + super('maxBuffer exceeded'); + this.name = 'MaxBufferError'; + } +} + +async function getStream(inputStream, options) { + if (!inputStream) { + return Promise.reject(new Error('Expected a stream')); + } + + options = { + maxBuffer: Infinity, + ...options + }; + + const {maxBuffer} = options; + + let stream; + await new Promise((resolve, reject) => { + const rejectPromise = error => { + // Don't retrieve an oversized buffer. + if (error && stream.getBufferedLength() <= BufferConstants.MAX_LENGTH) { + error.bufferedData = stream.getBufferedValue(); + } + + reject(error); + }; + + stream = pump(inputStream, bufferStream(options), error => { + if (error) { + rejectPromise(error); + return; + } + + resolve(); + }); + + stream.on('data', () => { + if (stream.getBufferedLength() > maxBuffer) { + rejectPromise(new MaxBufferError()); + } + }); + }); + + return stream.getBufferedValue(); +} + +module.exports = getStream; +// TODO: Remove this for the next major release +module.exports["default"] = getStream; +module.exports.buffer = (stream, options) => getStream(stream, {...options, encoding: 'buffer'}); +module.exports.array = (stream, options) => getStream(stream, {...options, array: true}); +module.exports.MaxBufferError = MaxBufferError; + + /***/ }), /***/ 8116: @@ -13806,7 +15270,7 @@ module.exports["default"] = CacheableLookup; const EventEmitter = __nccwpck_require__(2361); const urlLib = __nccwpck_require__(7310); const normalizeUrl = __nccwpck_require__(7952); -const getStream = __nccwpck_require__(1766); +const getStream = __nccwpck_require__(7040); const CachePolicy = __nccwpck_require__(1002); const Response = __nccwpck_require__(9004); const lowercaseKeys = __nccwpck_require__(9662); @@ -14476,7 +15940,7 @@ const async_1 = __nccwpck_require__(5679); const stream_1 = __nccwpck_require__(4630); const sync_1 = __nccwpck_require__(2405); const settings_1 = __nccwpck_require__(952); -const utils = __nccwpck_require__(5444); +const utils = __nccwpck_require__(8223); async function FastGlob(source, options) { assertPatternsInput(source); const works = getWorks(source, async_1.default, options); @@ -14583,7 +16047,7 @@ module.exports = FastGlob; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.convertPatternGroupToTask = exports.convertPatternGroupsToTasks = exports.groupPatternsByBaseDirectory = exports.getNegativePatternsAsPositive = exports.getPositivePatterns = exports.convertPatternsToTasks = exports.generate = void 0; -const utils = __nccwpck_require__(5444); +const utils = __nccwpck_require__(8223); function generate(input, settings) { const patterns = processPatterns(input, settings); const ignore = processPatterns(settings.ignore, settings); @@ -14731,7 +16195,7 @@ exports["default"] = ProviderAsync; "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); -const utils = __nccwpck_require__(5444); +const utils = __nccwpck_require__(8223); const partial_1 = __nccwpck_require__(5295); class DeepFilter { constructor(_settings, _micromatchOptions) { @@ -14801,7 +16265,7 @@ exports["default"] = DeepFilter; "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); -const utils = __nccwpck_require__(5444); +const utils = __nccwpck_require__(8223); class EntryFilter { constructor(_settings, _micromatchOptions) { this._settings = _settings; @@ -14872,7 +16336,7 @@ exports["default"] = EntryFilter; "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); -const utils = __nccwpck_require__(5444); +const utils = __nccwpck_require__(8223); class ErrorFilter { constructor(_settings) { this._settings = _settings; @@ -14895,7 +16359,7 @@ exports["default"] = ErrorFilter; "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); -const utils = __nccwpck_require__(5444); +const utils = __nccwpck_require__(8223); class Matcher { constructor(_patterns, _settings, _micromatchOptions) { this._patterns = _patterns; @@ -15120,7 +16584,7 @@ exports["default"] = ProviderSync; "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); -const utils = __nccwpck_require__(5444); +const utils = __nccwpck_require__(8223); class EntryTransformer { constructor(_settings) { this._settings = _settings; @@ -15199,7 +16663,7 @@ exports["default"] = ReaderAsync; Object.defineProperty(exports, "__esModule", ({ value: true })); const path = __nccwpck_require__(1017); const fsStat = __nccwpck_require__(109); -const utils = __nccwpck_require__(5444); +const utils = __nccwpck_require__(8223); class Reader { constructor(_settings) { this._settings = _settings; @@ -15485,7 +16949,7 @@ exports.createDirentFromStats = createDirentFromStats; /***/ }), -/***/ 5444: +/***/ 8223: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; @@ -15523,11 +16987,11 @@ const IS_WINDOWS_PLATFORM = os.platform() === 'win32'; const LEADING_DOT_SEGMENT_CHARACTERS_COUNT = 2; // ./ or .\\ /** * All non-escaped special characters. - * Posix: ()*?[\]{|}, !+@ before (, ! at the beginning, \\ before non-special characters. - * Windows: (){}, !+@ before (, ! at the beginning. + * Posix: ()*?[]{|}, !+@ before (, ! at the beginning, \\ before non-special characters. + * Windows: (){}[], !+@ before (, ! at the beginning. */ const POSIX_UNESCAPED_GLOB_SYMBOLS_RE = /(\\?)([()*?[\]{|}]|^!|[!+@](?=\()|\\(?![!()*+?@[\]{|}]))/g; -const WINDOWS_UNESCAPED_GLOB_SYMBOLS_RE = /(\\?)([(){}]|^!|[!+@](?=\())/g; +const WINDOWS_UNESCAPED_GLOB_SYMBOLS_RE = /(\\?)([()[\]{}]|^!|[!+@](?=\())/g; /** * The device path (\\.\ or \\?\). * https://learn.microsoft.com/en-us/dotnet/standard/io/file-path-formats#dos-device-paths @@ -15538,7 +17002,7 @@ const DOS_DEVICE_PATH_RE = /^\\\\([.?])/; * Windows: !()+@{} * https://learn.microsoft.com/en-us/windows/win32/fileio/naming-a-file#naming-conventions */ -const WINDOWS_BACKSLASHES_RE = /\\(?![!()+@{}])/g; +const WINDOWS_BACKSLASHES_RE = /\\(?![!()+@[\]{}])/g; /** * Designed to work only with simple paths: `dir\\file`. */ @@ -15726,7 +17190,7 @@ function expandPatternsWithBraceExpansion(patterns) { } exports.expandPatternsWithBraceExpansion = expandPatternsWithBraceExpansion; function expandBraceExpansion(pattern) { - const patterns = micromatch.braces(pattern, { expand: true, nodupes: true }); + const patterns = micromatch.braces(pattern, { expand: true, nodupes: true, keepEscaping: true }); /** * Sort the patterns by length so that the same depth patterns are processed side by side. * `a/{b,}/{c,}/*` – `['a///*', 'a/b//*', 'a//c/*', 'a/b/c/*']` @@ -16378,134 +17842,6 @@ const fill = (start, end, step, options = {}) => { module.exports = fill; -/***/ }), - -/***/ 1585: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - -const {PassThrough: PassThroughStream} = __nccwpck_require__(2781); - -module.exports = options => { - options = {...options}; - - const {array} = options; - let {encoding} = options; - const isBuffer = encoding === 'buffer'; - let objectMode = false; - - if (array) { - objectMode = !(encoding || isBuffer); - } else { - encoding = encoding || 'utf8'; - } - - if (isBuffer) { - encoding = null; - } - - const stream = new PassThroughStream({objectMode}); - - if (encoding) { - stream.setEncoding(encoding); - } - - let length = 0; - const chunks = []; - - stream.on('data', chunk => { - chunks.push(chunk); - - if (objectMode) { - length = chunks.length; - } else { - length += chunk.length; - } - }); - - stream.getBufferedValue = () => { - if (array) { - return chunks; - } - - return isBuffer ? Buffer.concat(chunks, length) : chunks.join(''); - }; - - stream.getBufferedLength = () => length; - - return stream; -}; - - -/***/ }), - -/***/ 1766: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - -const {constants: BufferConstants} = __nccwpck_require__(4300); -const pump = __nccwpck_require__(8341); -const bufferStream = __nccwpck_require__(1585); - -class MaxBufferError extends Error { - constructor() { - super('maxBuffer exceeded'); - this.name = 'MaxBufferError'; - } -} - -async function getStream(inputStream, options) { - if (!inputStream) { - return Promise.reject(new Error('Expected a stream')); - } - - options = { - maxBuffer: Infinity, - ...options - }; - - const {maxBuffer} = options; - - let stream; - await new Promise((resolve, reject) => { - const rejectPromise = error => { - // Don't retrieve an oversized buffer. - if (error && stream.getBufferedLength() <= BufferConstants.MAX_LENGTH) { - error.bufferedData = stream.getBufferedValue(); - } - - reject(error); - }; - - stream = pump(inputStream, bufferStream(options), error => { - if (error) { - rejectPromise(error); - return; - } - - resolve(); - }); - - stream.on('data', () => { - if (stream.getBufferedLength() > maxBuffer) { - rejectPromise(new MaxBufferError()); - } - }); - }); - - return stream.getBufferedValue(); -} - -module.exports = getStream; -// TODO: Remove this for the next major release -module.exports["default"] = getStream; -module.exports.buffer = (stream, options) => getStream(stream, {...options, encoding: 'buffer'}); -module.exports.array = (stream, options) => getStream(stream, {...options, array: true}); -module.exports.MaxBufferError = MaxBufferError; - - /***/ }), /***/ 6457: @@ -16951,7 +18287,7 @@ const http = __nccwpck_require__(3685); const http_1 = __nccwpck_require__(3685); const https = __nccwpck_require__(5687); const http_timer_1 = __nccwpck_require__(8097); -const cacheable_lookup_1 = __nccwpck_require__(2286); +const cacheable_lookup_1 = __nccwpck_require__(8367); const CacheableRequest = __nccwpck_require__(8116); const decompressResponse = __nccwpck_require__(2391); // @ts-expect-error Missing types @@ -21756,52 +23092,6 @@ module.exports = function(num) { }; -/***/ }), - -/***/ 3287: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -/*! - * is-plain-object - * - * Copyright (c) 2014-2017, Jon Schlinkert. - * Released under the MIT License. - */ - -function isObject(o) { - return Object.prototype.toString.call(o) === '[object Object]'; -} - -function isPlainObject(o) { - var ctor,prot; - - if (isObject(o) === false) return false; - - // If has modified constructor - ctor = o.constructor; - if (ctor === undefined) return true; - - // If has modified prototype - prot = ctor.prototype; - if (isObject(prot) === false) return false; - - // If constructor does not have an Object-specific method - if (prot.hasOwnProperty('isPrototypeOf') === false) { - return false; - } - - // Most likely a plain Object - return true; -} - -exports.isPlainObject = isPlainObject; - - /***/ }), /***/ 2820: @@ -22319,8 +23609,8 @@ function pauseStreams (streams, options) { const util = __nccwpck_require__(3837); const braces = __nccwpck_require__(610); -const picomatch = __nccwpck_require__(8569); -const utils = __nccwpck_require__(479); +const picomatch = __nccwpck_require__(2864); +const utils = __nccwpck_require__(7426); const isEmptyString = val => val === '' || val === './'; /** @@ -22786,9758 +24076,29886 @@ module.exports = micromatch; /***/ }), -/***/ 2610: -/***/ ((module) => { +/***/ 2864: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -// We define these manually to ensure they're always copied -// even if they would move up the prototype chain -// https://nodejs.org/api/http.html#http_class_http_incomingmessage -const knownProps = [ - 'destroy', - 'setTimeout', - 'socket', - 'headers', - 'trailers', - 'rawHeaders', - 'statusCode', - 'httpVersion', - 'httpVersionMinor', - 'httpVersionMajor', - 'rawTrailers', - 'statusMessage' -]; - -module.exports = (fromStream, toStream) => { - const fromProps = new Set(Object.keys(fromStream).concat(knownProps)); - - for (const prop of fromProps) { - // Don't overwrite existing properties - if (prop in toStream) { - continue; - } - - toStream[prop] = typeof fromStream[prop] === 'function' ? fromStream[prop].bind(fromStream) : fromStream[prop]; - } -}; +module.exports = __nccwpck_require__(555); /***/ }), -/***/ 467: -/***/ ((module, exports, __nccwpck_require__) => { +/***/ 6476: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -Object.defineProperty(exports, "__esModule", ({ value: true })); +const path = __nccwpck_require__(1017); +const WIN_SLASH = '\\\\/'; +const WIN_NO_SLASH = `[^${WIN_SLASH}]`; -function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } +/** + * Posix glob regex + */ -var Stream = _interopDefault(__nccwpck_require__(2781)); -var http = _interopDefault(__nccwpck_require__(3685)); -var Url = _interopDefault(__nccwpck_require__(7310)); -var whatwgUrl = _interopDefault(__nccwpck_require__(8665)); -var https = _interopDefault(__nccwpck_require__(5687)); -var zlib = _interopDefault(__nccwpck_require__(9796)); +const DOT_LITERAL = '\\.'; +const PLUS_LITERAL = '\\+'; +const QMARK_LITERAL = '\\?'; +const SLASH_LITERAL = '\\/'; +const ONE_CHAR = '(?=.)'; +const QMARK = '[^/]'; +const END_ANCHOR = `(?:${SLASH_LITERAL}|$)`; +const START_ANCHOR = `(?:^|${SLASH_LITERAL})`; +const DOTS_SLASH = `${DOT_LITERAL}{1,2}${END_ANCHOR}`; +const NO_DOT = `(?!${DOT_LITERAL})`; +const NO_DOTS = `(?!${START_ANCHOR}${DOTS_SLASH})`; +const NO_DOT_SLASH = `(?!${DOT_LITERAL}{0,1}${END_ANCHOR})`; +const NO_DOTS_SLASH = `(?!${DOTS_SLASH})`; +const QMARK_NO_DOT = `[^.${SLASH_LITERAL}]`; +const STAR = `${QMARK}*?`; -// Based on https://github.com/tmpvar/jsdom/blob/aa85b2abf07766ff7bf5c1f6daafb3726f2f2db5/lib/jsdom/living/blob.js +const POSIX_CHARS = { + DOT_LITERAL, + PLUS_LITERAL, + QMARK_LITERAL, + SLASH_LITERAL, + ONE_CHAR, + QMARK, + END_ANCHOR, + DOTS_SLASH, + NO_DOT, + NO_DOTS, + NO_DOT_SLASH, + NO_DOTS_SLASH, + QMARK_NO_DOT, + STAR, + START_ANCHOR +}; -// fix for "Readable" isn't a named export issue -const Readable = Stream.Readable; +/** + * Windows glob regex + */ -const BUFFER = Symbol('buffer'); -const TYPE = Symbol('type'); +const WINDOWS_CHARS = { + ...POSIX_CHARS, -class Blob { - constructor() { - this[TYPE] = ''; - - const blobParts = arguments[0]; - const options = arguments[1]; - - const buffers = []; - let size = 0; - - if (blobParts) { - const a = blobParts; - const length = Number(a.length); - for (let i = 0; i < length; i++) { - const element = a[i]; - let buffer; - if (element instanceof Buffer) { - buffer = element; - } else if (ArrayBuffer.isView(element)) { - buffer = Buffer.from(element.buffer, element.byteOffset, element.byteLength); - } else if (element instanceof ArrayBuffer) { - buffer = Buffer.from(element); - } else if (element instanceof Blob) { - buffer = element[BUFFER]; - } else { - buffer = Buffer.from(typeof element === 'string' ? element : String(element)); - } - size += buffer.length; - buffers.push(buffer); - } - } + SLASH_LITERAL: `[${WIN_SLASH}]`, + QMARK: WIN_NO_SLASH, + STAR: `${WIN_NO_SLASH}*?`, + DOTS_SLASH: `${DOT_LITERAL}{1,2}(?:[${WIN_SLASH}]|$)`, + NO_DOT: `(?!${DOT_LITERAL})`, + NO_DOTS: `(?!(?:^|[${WIN_SLASH}])${DOT_LITERAL}{1,2}(?:[${WIN_SLASH}]|$))`, + NO_DOT_SLASH: `(?!${DOT_LITERAL}{0,1}(?:[${WIN_SLASH}]|$))`, + NO_DOTS_SLASH: `(?!${DOT_LITERAL}{1,2}(?:[${WIN_SLASH}]|$))`, + QMARK_NO_DOT: `[^.${WIN_SLASH}]`, + START_ANCHOR: `(?:^|[${WIN_SLASH}])`, + END_ANCHOR: `(?:[${WIN_SLASH}]|$)` +}; - this[BUFFER] = Buffer.concat(buffers); +/** + * POSIX Bracket Regex + */ - let type = options && options.type !== undefined && String(options.type).toLowerCase(); - if (type && !/[^\u0020-\u007E]/.test(type)) { - this[TYPE] = type; - } - } - get size() { - return this[BUFFER].length; - } - get type() { - return this[TYPE]; - } - text() { - return Promise.resolve(this[BUFFER].toString()); - } - arrayBuffer() { - const buf = this[BUFFER]; - const ab = buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); - return Promise.resolve(ab); - } - stream() { - const readable = new Readable(); - readable._read = function () {}; - readable.push(this[BUFFER]); - readable.push(null); - return readable; - } - toString() { - return '[object Blob]'; - } - slice() { - const size = this.size; - - const start = arguments[0]; - const end = arguments[1]; - let relativeStart, relativeEnd; - if (start === undefined) { - relativeStart = 0; - } else if (start < 0) { - relativeStart = Math.max(size + start, 0); - } else { - relativeStart = Math.min(start, size); - } - if (end === undefined) { - relativeEnd = size; - } else if (end < 0) { - relativeEnd = Math.max(size + end, 0); - } else { - relativeEnd = Math.min(end, size); - } - const span = Math.max(relativeEnd - relativeStart, 0); +const POSIX_REGEX_SOURCE = { + alnum: 'a-zA-Z0-9', + alpha: 'a-zA-Z', + ascii: '\\x00-\\x7F', + blank: ' \\t', + cntrl: '\\x00-\\x1F\\x7F', + digit: '0-9', + graph: '\\x21-\\x7E', + lower: 'a-z', + print: '\\x20-\\x7E ', + punct: '\\-!"#$%&\'()\\*+,./:;<=>?@[\\]^_`{|}~', + space: ' \\t\\r\\n\\v\\f', + upper: 'A-Z', + word: 'A-Za-z0-9_', + xdigit: 'A-Fa-f0-9' +}; - const buffer = this[BUFFER]; - const slicedBuffer = buffer.slice(relativeStart, relativeStart + span); - const blob = new Blob([], { type: arguments[2] }); - blob[BUFFER] = slicedBuffer; - return blob; - } -} +module.exports = { + MAX_LENGTH: 1024 * 64, + POSIX_REGEX_SOURCE, -Object.defineProperties(Blob.prototype, { - size: { enumerable: true }, - type: { enumerable: true }, - slice: { enumerable: true } -}); + // regular expressions + REGEX_BACKSLASH: /\\(?![*+?^${}(|)[\]])/g, + REGEX_NON_SPECIAL_CHARS: /^[^@![\].,$*+?^{}()|\\/]+/, + REGEX_SPECIAL_CHARS: /[-*+?.^${}(|)[\]]/, + REGEX_SPECIAL_CHARS_BACKREF: /(\\?)((\W)(\3*))/g, + REGEX_SPECIAL_CHARS_GLOBAL: /([-*+?.^${}(|)[\]])/g, + REGEX_REMOVE_BACKSLASH: /(?:\[.*?[^\\]\]|\\(?=.))/g, -Object.defineProperty(Blob.prototype, Symbol.toStringTag, { - value: 'Blob', - writable: false, - enumerable: false, - configurable: true -}); + // Replace globs with equivalent patterns to reduce parsing time. + REPLACEMENTS: { + '***': '*', + '**/**': '**', + '**/**/**': '**' + }, -/** - * fetch-error.js - * - * FetchError interface for operational errors - */ + // Digits + CHAR_0: 48, /* 0 */ + CHAR_9: 57, /* 9 */ -/** - * Create FetchError instance - * - * @param String message Error message for human - * @param String type Error type for machine - * @param String systemError For Node.js system error - * @return FetchError - */ -function FetchError(message, type, systemError) { - Error.call(this, message); + // Alphabet chars. + CHAR_UPPERCASE_A: 65, /* A */ + CHAR_LOWERCASE_A: 97, /* a */ + CHAR_UPPERCASE_Z: 90, /* Z */ + CHAR_LOWERCASE_Z: 122, /* z */ - this.message = message; - this.type = type; + CHAR_LEFT_PARENTHESES: 40, /* ( */ + CHAR_RIGHT_PARENTHESES: 41, /* ) */ - // when err.type is `system`, err.code contains system error code - if (systemError) { - this.code = this.errno = systemError.code; - } + CHAR_ASTERISK: 42, /* * */ - // hide custom error implementation details from end-users - Error.captureStackTrace(this, this.constructor); -} + // Non-alphabetic chars. + CHAR_AMPERSAND: 38, /* & */ + CHAR_AT: 64, /* @ */ + CHAR_BACKWARD_SLASH: 92, /* \ */ + CHAR_CARRIAGE_RETURN: 13, /* \r */ + CHAR_CIRCUMFLEX_ACCENT: 94, /* ^ */ + CHAR_COLON: 58, /* : */ + CHAR_COMMA: 44, /* , */ + CHAR_DOT: 46, /* . */ + CHAR_DOUBLE_QUOTE: 34, /* " */ + CHAR_EQUAL: 61, /* = */ + CHAR_EXCLAMATION_MARK: 33, /* ! */ + CHAR_FORM_FEED: 12, /* \f */ + CHAR_FORWARD_SLASH: 47, /* / */ + CHAR_GRAVE_ACCENT: 96, /* ` */ + CHAR_HASH: 35, /* # */ + CHAR_HYPHEN_MINUS: 45, /* - */ + CHAR_LEFT_ANGLE_BRACKET: 60, /* < */ + CHAR_LEFT_CURLY_BRACE: 123, /* { */ + CHAR_LEFT_SQUARE_BRACKET: 91, /* [ */ + CHAR_LINE_FEED: 10, /* \n */ + CHAR_NO_BREAK_SPACE: 160, /* \u00A0 */ + CHAR_PERCENT: 37, /* % */ + CHAR_PLUS: 43, /* + */ + CHAR_QUESTION_MARK: 63, /* ? */ + CHAR_RIGHT_ANGLE_BRACKET: 62, /* > */ + CHAR_RIGHT_CURLY_BRACE: 125, /* } */ + CHAR_RIGHT_SQUARE_BRACKET: 93, /* ] */ + CHAR_SEMICOLON: 59, /* ; */ + CHAR_SINGLE_QUOTE: 39, /* ' */ + CHAR_SPACE: 32, /* */ + CHAR_TAB: 9, /* \t */ + CHAR_UNDERSCORE: 95, /* _ */ + CHAR_VERTICAL_LINE: 124, /* | */ + CHAR_ZERO_WIDTH_NOBREAK_SPACE: 65279, /* \uFEFF */ -FetchError.prototype = Object.create(Error.prototype); -FetchError.prototype.constructor = FetchError; -FetchError.prototype.name = 'FetchError'; + SEP: path.sep, -let convert; -try { - convert = (__nccwpck_require__(2877).convert); -} catch (e) {} + /** + * Create EXTGLOB_CHARS + */ -const INTERNALS = Symbol('Body internals'); + extglobChars(chars) { + return { + '!': { type: 'negate', open: '(?:(?!(?:', close: `))${chars.STAR})` }, + '?': { type: 'qmark', open: '(?:', close: ')?' }, + '+': { type: 'plus', open: '(?:', close: ')+' }, + '*': { type: 'star', open: '(?:', close: ')*' }, + '@': { type: 'at', open: '(?:', close: ')' } + }; + }, -// fix an issue where "PassThrough" isn't a named export for node <10 -const PassThrough = Stream.PassThrough; + /** + * Create GLOB_CHARS + */ -/** - * Body mixin - * - * Ref: https://fetch.spec.whatwg.org/#body - * - * @param Stream body Readable stream - * @param Object opts Response options - * @return Void - */ -function Body(body) { - var _this = this; - - var _ref = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}, - _ref$size = _ref.size; - - let size = _ref$size === undefined ? 0 : _ref$size; - var _ref$timeout = _ref.timeout; - let timeout = _ref$timeout === undefined ? 0 : _ref$timeout; - - if (body == null) { - // body is undefined or null - body = null; - } else if (isURLSearchParams(body)) { - // body is a URLSearchParams - body = Buffer.from(body.toString()); - } else if (isBlob(body)) ; else if (Buffer.isBuffer(body)) ; else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') { - // body is ArrayBuffer - body = Buffer.from(body); - } else if (ArrayBuffer.isView(body)) { - // body is ArrayBufferView - body = Buffer.from(body.buffer, body.byteOffset, body.byteLength); - } else if (body instanceof Stream) ; else { - // none of the above - // coerce to string then buffer - body = Buffer.from(String(body)); - } - this[INTERNALS] = { - body, - disturbed: false, - error: null - }; - this.size = size; - this.timeout = timeout; + globChars(win32) { + return win32 === true ? WINDOWS_CHARS : POSIX_CHARS; + } +}; - if (body instanceof Stream) { - body.on('error', function (err) { - const error = err.name === 'AbortError' ? err : new FetchError(`Invalid response body while trying to fetch ${_this.url}: ${err.message}`, 'system', err); - _this[INTERNALS].error = error; - }); - } -} -Body.prototype = { - get body() { - return this[INTERNALS].body; - }, +/***/ }), - get bodyUsed() { - return this[INTERNALS].disturbed; - }, +/***/ 5961: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - /** - * Decode response as ArrayBuffer - * - * @return Promise - */ - arrayBuffer() { - return consumeBody.call(this).then(function (buf) { - return buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); - }); - }, +"use strict"; - /** - * Return raw response as Blob - * - * @return Promise - */ - blob() { - let ct = this.headers && this.headers.get('content-type') || ''; - return consumeBody.call(this).then(function (buf) { - return Object.assign( - // Prevent copying - new Blob([], { - type: ct.toLowerCase() - }), { - [BUFFER]: buf - }); - }); - }, - /** - * Decode response as json - * - * @return Promise - */ - json() { - var _this2 = this; +const constants = __nccwpck_require__(6476); +const utils = __nccwpck_require__(7426); - return consumeBody.call(this).then(function (buffer) { - try { - return JSON.parse(buffer.toString()); - } catch (err) { - return Body.Promise.reject(new FetchError(`invalid json response body at ${_this2.url} reason: ${err.message}`, 'invalid-json')); - } - }); - }, +/** + * Constants + */ - /** - * Decode response as text - * - * @return Promise - */ - text() { - return consumeBody.call(this).then(function (buffer) { - return buffer.toString(); - }); - }, +const { + MAX_LENGTH, + POSIX_REGEX_SOURCE, + REGEX_NON_SPECIAL_CHARS, + REGEX_SPECIAL_CHARS_BACKREF, + REPLACEMENTS +} = constants; - /** - * Decode response as buffer (non-spec api) - * - * @return Promise - */ - buffer() { - return consumeBody.call(this); - }, +/** + * Helpers + */ - /** - * Decode response as text, while automatically detecting the encoding and - * trying to decode to UTF-8 (non-spec api) - * - * @return Promise - */ - textConverted() { - var _this3 = this; - - return consumeBody.call(this).then(function (buffer) { - return convertBody(buffer, _this3.headers); - }); - } -}; +const expandRange = (args, options) => { + if (typeof options.expandRange === 'function') { + return options.expandRange(...args, options); + } -// In browsers, all properties are enumerable. -Object.defineProperties(Body.prototype, { - body: { enumerable: true }, - bodyUsed: { enumerable: true }, - arrayBuffer: { enumerable: true }, - blob: { enumerable: true }, - json: { enumerable: true }, - text: { enumerable: true } -}); + args.sort(); + const value = `[${args.join('-')}]`; -Body.mixIn = function (proto) { - for (const name of Object.getOwnPropertyNames(Body.prototype)) { - // istanbul ignore else: future proof - if (!(name in proto)) { - const desc = Object.getOwnPropertyDescriptor(Body.prototype, name); - Object.defineProperty(proto, name, desc); - } - } + try { + /* eslint-disable-next-line no-new */ + new RegExp(value); + } catch (ex) { + return args.map(v => utils.escapeRegex(v)).join('..'); + } + + return value; }; /** - * Consume and convert an entire Body to a Buffer. - * - * Ref: https://fetch.spec.whatwg.org/#concept-body-consume-body - * - * @return Promise + * Create the message for a syntax error */ -function consumeBody() { - var _this4 = this; - if (this[INTERNALS].disturbed) { - return Body.Promise.reject(new TypeError(`body used already for: ${this.url}`)); - } +const syntaxError = (type, char) => { + return `Missing ${type}: "${char}" - use "\\\\${char}" to match literal characters`; +}; - this[INTERNALS].disturbed = true; +/** + * Parse the given input string. + * @param {String} input + * @param {Object} options + * @return {Object} + */ - if (this[INTERNALS].error) { - return Body.Promise.reject(this[INTERNALS].error); - } +const parse = (input, options) => { + if (typeof input !== 'string') { + throw new TypeError('Expected a string'); + } - let body = this.body; + input = REPLACEMENTS[input] || input; - // body is null - if (body === null) { - return Body.Promise.resolve(Buffer.alloc(0)); - } + const opts = { ...options }; + const max = typeof opts.maxLength === 'number' ? Math.min(MAX_LENGTH, opts.maxLength) : MAX_LENGTH; - // body is blob - if (isBlob(body)) { - body = body.stream(); - } + let len = input.length; + if (len > max) { + throw new SyntaxError(`Input length: ${len}, exceeds maximum allowed length: ${max}`); + } - // body is buffer - if (Buffer.isBuffer(body)) { - return Body.Promise.resolve(body); - } + const bos = { type: 'bos', value: '', output: opts.prepend || '' }; + const tokens = [bos]; - // istanbul ignore if: should never happen - if (!(body instanceof Stream)) { - return Body.Promise.resolve(Buffer.alloc(0)); - } + const capture = opts.capture ? '' : '?:'; + const win32 = utils.isWindows(options); - // body is stream - // get ready to actually consume the body - let accum = []; - let accumBytes = 0; - let abort = false; - - return new Body.Promise(function (resolve, reject) { - let resTimeout; - - // allow timeout on slow response body - if (_this4.timeout) { - resTimeout = setTimeout(function () { - abort = true; - reject(new FetchError(`Response timeout while trying to fetch ${_this4.url} (over ${_this4.timeout}ms)`, 'body-timeout')); - }, _this4.timeout); - } + // create constants based on platform, for windows or posix + const PLATFORM_CHARS = constants.globChars(win32); + const EXTGLOB_CHARS = constants.extglobChars(PLATFORM_CHARS); - // handle stream errors - body.on('error', function (err) { - if (err.name === 'AbortError') { - // if the request was aborted, reject with this Error - abort = true; - reject(err); - } else { - // other errors, such as incorrect content-encoding - reject(new FetchError(`Invalid response body while trying to fetch ${_this4.url}: ${err.message}`, 'system', err)); - } - }); + const { + DOT_LITERAL, + PLUS_LITERAL, + SLASH_LITERAL, + ONE_CHAR, + DOTS_SLASH, + NO_DOT, + NO_DOT_SLASH, + NO_DOTS_SLASH, + QMARK, + QMARK_NO_DOT, + STAR, + START_ANCHOR + } = PLATFORM_CHARS; - body.on('data', function (chunk) { - if (abort || chunk === null) { - return; - } + const globstar = opts => { + return `(${capture}(?:(?!${START_ANCHOR}${opts.dot ? DOTS_SLASH : DOT_LITERAL}).)*?)`; + }; - if (_this4.size && accumBytes + chunk.length > _this4.size) { - abort = true; - reject(new FetchError(`content size at ${_this4.url} over limit: ${_this4.size}`, 'max-size')); - return; - } + const nodot = opts.dot ? '' : NO_DOT; + const qmarkNoDot = opts.dot ? QMARK : QMARK_NO_DOT; + let star = opts.bash === true ? globstar(opts) : STAR; - accumBytes += chunk.length; - accum.push(chunk); - }); + if (opts.capture) { + star = `(${star})`; + } - body.on('end', function () { - if (abort) { - return; - } + // minimatch options support + if (typeof opts.noext === 'boolean') { + opts.noextglob = opts.noext; + } - clearTimeout(resTimeout); + const state = { + input, + index: -1, + start: 0, + dot: opts.dot === true, + consumed: '', + output: '', + prefix: '', + backtrack: false, + negated: false, + brackets: 0, + braces: 0, + parens: 0, + quotes: 0, + globstar: false, + tokens + }; - try { - resolve(Buffer.concat(accum, accumBytes)); - } catch (err) { - // handle streams that have accumulated too much data (issue #414) - reject(new FetchError(`Could not create Buffer from response body for ${_this4.url}: ${err.message}`, 'system', err)); - } - }); - }); -} + input = utils.removePrefix(input, state); + len = input.length; -/** - * Detect buffer encoding and convert to target encoding - * ref: http://www.w3.org/TR/2011/WD-html5-20110113/parsing.html#determining-the-character-encoding - * - * @param Buffer buffer Incoming buffer - * @param String encoding Target encoding - * @return String - */ -function convertBody(buffer, headers) { - if (typeof convert !== 'function') { - throw new Error('The package `encoding` must be installed to use the textConverted() function'); - } + const extglobs = []; + const braces = []; + const stack = []; + let prev = bos; + let value; - const ct = headers.get('content-type'); - let charset = 'utf-8'; - let res, str; + /** + * Tokenizing helpers + */ - // header - if (ct) { - res = /charset=([^;]*)/i.exec(ct); - } + const eos = () => state.index === len - 1; + const peek = state.peek = (n = 1) => input[state.index + n]; + const advance = state.advance = () => input[++state.index] || ''; + const remaining = () => input.slice(state.index + 1); + const consume = (value = '', num = 0) => { + state.consumed += value; + state.index += num; + }; - // no charset in content type, peek at response body for at most 1024 bytes - str = buffer.slice(0, 1024).toString(); + const append = token => { + state.output += token.output != null ? token.output : token.value; + consume(token.value); + }; - // html5 - if (!res && str) { - res = / { + let count = 1; - // html4 - if (!res && str) { - res = / { + state[type]++; + stack.push(type); + }; - // prevent decode issues when sites use incorrect encoding - // ref: https://hsivonen.fi/encoding-menu/ - if (charset === 'gb2312' || charset === 'gbk') { - charset = 'gb18030'; - } - } + const decrement = type => { + state[type]--; + stack.pop(); + }; - // turn raw buffers into a single utf-8 buffer - return convert(buffer, 'UTF-8', charset).toString(); -} + /** + * Push tokens onto the tokens array. This helper speeds up + * tokenizing by 1) helping us avoid backtracking as much as possible, + * and 2) helping us avoid creating extra tokens when consecutive + * characters are plain text. This improves performance and simplifies + * lookbehinds. + */ -/** - * Detect a URLSearchParams object - * ref: https://github.com/bitinn/node-fetch/issues/296#issuecomment-307598143 - * - * @param Object obj Object to detect by type or brand - * @return String - */ -function isURLSearchParams(obj) { - // Duck-typing as a necessary condition. - if (typeof obj !== 'object' || typeof obj.append !== 'function' || typeof obj.delete !== 'function' || typeof obj.get !== 'function' || typeof obj.getAll !== 'function' || typeof obj.has !== 'function' || typeof obj.set !== 'function') { - return false; - } + const push = tok => { + if (prev.type === 'globstar') { + const isBrace = state.braces > 0 && (tok.type === 'comma' || tok.type === 'brace'); + const isExtglob = tok.extglob === true || (extglobs.length && (tok.type === 'pipe' || tok.type === 'paren')); - // Brand-checking and more duck-typing as optional condition. - return obj.constructor.name === 'URLSearchParams' || Object.prototype.toString.call(obj) === '[object URLSearchParams]' || typeof obj.sort === 'function'; -} + if (tok.type !== 'slash' && tok.type !== 'paren' && !isBrace && !isExtglob) { + state.output = state.output.slice(0, -prev.output.length); + prev.type = 'star'; + prev.value = '*'; + prev.output = star; + state.output += prev.output; + } + } -/** - * Check if `obj` is a W3C `Blob` object (which `File` inherits from) - * @param {*} obj - * @return {boolean} - */ -function isBlob(obj) { - return typeof obj === 'object' && typeof obj.arrayBuffer === 'function' && typeof obj.type === 'string' && typeof obj.stream === 'function' && typeof obj.constructor === 'function' && typeof obj.constructor.name === 'string' && /^(Blob|File)$/.test(obj.constructor.name) && /^(Blob|File)$/.test(obj[Symbol.toStringTag]); -} + if (extglobs.length && tok.type !== 'paren') { + extglobs[extglobs.length - 1].inner += tok.value; + } -/** - * Clone body given Res/Req instance - * - * @param Mixed instance Response or Request instance - * @return Mixed - */ -function clone(instance) { - let p1, p2; - let body = instance.body; + if (tok.value || tok.output) append(tok); + if (prev && prev.type === 'text' && tok.type === 'text') { + prev.value += tok.value; + prev.output = (prev.output || '') + tok.value; + return; + } - // don't allow cloning a used body - if (instance.bodyUsed) { - throw new Error('cannot clone body after it is used'); - } + tok.prev = prev; + tokens.push(tok); + prev = tok; + }; - // check that body is a stream and not form-data object - // note: we can't clone the form-data object without having it as a dependency - if (body instanceof Stream && typeof body.getBoundary !== 'function') { - // tee instance body - p1 = new PassThrough(); - p2 = new PassThrough(); - body.pipe(p1); - body.pipe(p2); - // set instance body to teed body and return the other teed body - instance[INTERNALS].body = p1; - body = p2; - } + const extglobOpen = (type, value) => { + const token = { ...EXTGLOB_CHARS[value], conditions: 1, inner: '' }; - return body; -} + token.prev = prev; + token.parens = state.parens; + token.output = state.output; + const output = (opts.capture ? '(' : '') + token.open; -/** - * Performs the operation "extract a `Content-Type` value from |object|" as - * specified in the specification: - * https://fetch.spec.whatwg.org/#concept-bodyinit-extract - * - * This function assumes that instance.body is present. - * - * @param Mixed instance Any options.body input - */ -function extractContentType(body) { - if (body === null) { - // body is null - return null; - } else if (typeof body === 'string') { - // body is string - return 'text/plain;charset=UTF-8'; - } else if (isURLSearchParams(body)) { - // body is a URLSearchParams - return 'application/x-www-form-urlencoded;charset=UTF-8'; - } else if (isBlob(body)) { - // body is blob - return body.type || null; - } else if (Buffer.isBuffer(body)) { - // body is buffer - return null; - } else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') { - // body is ArrayBuffer - return null; - } else if (ArrayBuffer.isView(body)) { - // body is ArrayBufferView - return null; - } else if (typeof body.getBoundary === 'function') { - // detect form data input from form-data module - return `multipart/form-data;boundary=${body.getBoundary()}`; - } else if (body instanceof Stream) { - // body is stream - // can't really do much about this - return null; - } else { - // Body constructor defaults other things to string - return 'text/plain;charset=UTF-8'; - } -} + increment('parens'); + push({ type, value, output: state.output ? '' : ONE_CHAR }); + push({ type: 'paren', extglob: true, value: advance(), output }); + extglobs.push(token); + }; -/** - * The Fetch Standard treats this as if "total bytes" is a property on the body. - * For us, we have to explicitly get it with a function. - * - * ref: https://fetch.spec.whatwg.org/#concept-body-total-bytes - * - * @param Body instance Instance of Body - * @return Number? Number of bytes, or null if not possible - */ -function getTotalBytes(instance) { - const body = instance.body; - - - if (body === null) { - // body is null - return 0; - } else if (isBlob(body)) { - return body.size; - } else if (Buffer.isBuffer(body)) { - // body is buffer - return body.length; - } else if (body && typeof body.getLengthSync === 'function') { - // detect form data input from form-data module - if (body._lengthRetrievers && body._lengthRetrievers.length == 0 || // 1.x - body.hasKnownLength && body.hasKnownLength()) { - // 2.x - return body.getLengthSync(); - } - return null; - } else { - // body is stream - return null; - } -} + const extglobClose = token => { + let output = token.close + (opts.capture ? ')' : ''); + let rest; -/** - * Write a Body to a Node.js WritableStream (e.g. http.Request) object. - * - * @param Body instance Instance of Body - * @return Void - */ -function writeToStream(dest, instance) { - const body = instance.body; - - - if (body === null) { - // body is null - dest.end(); - } else if (isBlob(body)) { - body.stream().pipe(dest); - } else if (Buffer.isBuffer(body)) { - // body is buffer - dest.write(body); - dest.end(); - } else { - // body is stream - body.pipe(dest); - } -} + if (token.type === 'negate') { + let extglobStar = star; -// expose Promise -Body.Promise = global.Promise; + if (token.inner && token.inner.length > 1 && token.inner.includes('/')) { + extglobStar = globstar(opts); + } -/** - * headers.js - * - * Headers class offers convenient helpers - */ + if (extglobStar !== star || eos() || /^\)+$/.test(remaining())) { + output = token.close = `)$))${extglobStar}`; + } -const invalidTokenRegex = /[^\^_`a-zA-Z\-0-9!#$%&'*+.|~]/; -const invalidHeaderCharRegex = /[^\t\x20-\x7e\x80-\xff]/; + if (token.inner.includes('*') && (rest = remaining()) && /^\.[^\\/.]+$/.test(rest)) { + // Any non-magical string (`.ts`) or even nested expression (`.{ts,tsx}`) can follow after the closing parenthesis. + // In this case, we need to parse the string and use it in the output of the original pattern. + // Suitable patterns: `/!(*.d).ts`, `/!(*.d).{ts,tsx}`, `**/!(*-dbg).@(js)`. + // + // Disabling the `fastpaths` option due to a problem with parsing strings as `.ts` in the pattern like `**/!(*.d).ts`. + const expression = parse(rest, { ...options, fastpaths: false }).output; -function validateName(name) { - name = `${name}`; - if (invalidTokenRegex.test(name) || name === '') { - throw new TypeError(`${name} is not a legal HTTP header name`); - } -} + output = token.close = `)${expression})${extglobStar})`; + } -function validateValue(value) { - value = `${value}`; - if (invalidHeaderCharRegex.test(value)) { - throw new TypeError(`${value} is not a legal HTTP header value`); - } -} + if (token.prev.type === 'bos') { + state.negatedExtglob = true; + } + } -/** - * Find the key in the map object given a header name. - * - * Returns undefined if not found. - * - * @param String name Header name - * @return String|Undefined - */ -function find(map, name) { - name = name.toLowerCase(); - for (const key in map) { - if (key.toLowerCase() === name) { - return key; - } - } - return undefined; -} + push({ type: 'paren', extglob: true, value, output }); + decrement('parens'); + }; -const MAP = Symbol('map'); -class Headers { - /** - * Headers class - * - * @param Object headers Response headers - * @return Void - */ - constructor() { - let init = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : undefined; + /** + * Fast paths + */ - this[MAP] = Object.create(null); + if (opts.fastpaths !== false && !/(^[*!]|[/()[\]{}"])/.test(input)) { + let backslashes = false; - if (init instanceof Headers) { - const rawHeaders = init.raw(); - const headerNames = Object.keys(rawHeaders); + let output = input.replace(REGEX_SPECIAL_CHARS_BACKREF, (m, esc, chars, first, rest, index) => { + if (first === '\\') { + backslashes = true; + return m; + } - for (const headerName of headerNames) { - for (const value of rawHeaders[headerName]) { - this.append(headerName, value); - } - } + if (first === '?') { + if (esc) { + return esc + first + (rest ? QMARK.repeat(rest.length) : ''); + } + if (index === 0) { + return qmarkNoDot + (rest ? QMARK.repeat(rest.length) : ''); + } + return QMARK.repeat(chars.length); + } - return; - } + if (first === '.') { + return DOT_LITERAL.repeat(chars.length); + } - // We don't worry about converting prop to ByteString here as append() - // will handle it. - if (init == null) ; else if (typeof init === 'object') { - const method = init[Symbol.iterator]; - if (method != null) { - if (typeof method !== 'function') { - throw new TypeError('Header pairs must be iterable'); - } + if (first === '*') { + if (esc) { + return esc + first + (rest ? star : ''); + } + return star; + } + return esc ? m : `\\${m}`; + }); - // sequence> - // Note: per spec we have to first exhaust the lists then process them - const pairs = []; - for (const pair of init) { - if (typeof pair !== 'object' || typeof pair[Symbol.iterator] !== 'function') { - throw new TypeError('Each header pair must be iterable'); - } - pairs.push(Array.from(pair)); - } + if (backslashes === true) { + if (opts.unescape === true) { + output = output.replace(/\\/g, ''); + } else { + output = output.replace(/\\+/g, m => { + return m.length % 2 === 0 ? '\\\\' : (m ? '\\' : ''); + }); + } + } - for (const pair of pairs) { - if (pair.length !== 2) { - throw new TypeError('Each header pair must be a name/value tuple'); - } - this.append(pair[0], pair[1]); - } - } else { - // record - for (const key of Object.keys(init)) { - const value = init[key]; - this.append(key, value); - } - } - } else { - throw new TypeError('Provided initializer must be an object'); - } - } + if (output === input && opts.contains === true) { + state.output = input; + return state; + } - /** - * Return combined header value given name - * - * @param String name Header name - * @return Mixed - */ - get(name) { - name = `${name}`; - validateName(name); - const key = find(this[MAP], name); - if (key === undefined) { - return null; - } + state.output = utils.wrapOutput(output, state, options); + return state; + } - return this[MAP][key].join(', '); - } + /** + * Tokenize input until we reach end-of-string + */ - /** - * Iterate over all headers - * - * @param Function callback Executed for each item with parameters (value, name, thisArg) - * @param Boolean thisArg `this` context for callback function - * @return Void - */ - forEach(callback) { - let thisArg = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : undefined; - - let pairs = getHeaders(this); - let i = 0; - while (i < pairs.length) { - var _pairs$i = pairs[i]; - const name = _pairs$i[0], - value = _pairs$i[1]; - - callback.call(thisArg, value, name, this); - pairs = getHeaders(this); - i++; - } - } + while (!eos()) { + value = advance(); - /** - * Overwrite header values given name - * - * @param String name Header name - * @param String value Header value - * @return Void - */ - set(name, value) { - name = `${name}`; - value = `${value}`; - validateName(name); - validateValue(value); - const key = find(this[MAP], name); - this[MAP][key !== undefined ? key : name] = [value]; - } + if (value === '\u0000') { + continue; + } - /** - * Append a value onto existing header - * - * @param String name Header name - * @param String value Header value - * @return Void - */ - append(name, value) { - name = `${name}`; - value = `${value}`; - validateName(name); - validateValue(value); - const key = find(this[MAP], name); - if (key !== undefined) { - this[MAP][key].push(value); - } else { - this[MAP][name] = [value]; - } - } + /** + * Escaped characters + */ - /** - * Check for header name existence - * - * @param String name Header name - * @return Boolean - */ - has(name) { - name = `${name}`; - validateName(name); - return find(this[MAP], name) !== undefined; - } + if (value === '\\') { + const next = peek(); - /** - * Delete all header values given name - * - * @param String name Header name - * @return Void - */ - delete(name) { - name = `${name}`; - validateName(name); - const key = find(this[MAP], name); - if (key !== undefined) { - delete this[MAP][key]; - } - } + if (next === '/' && opts.bash !== true) { + continue; + } - /** - * Return raw headers (non-spec api) - * - * @return Object - */ - raw() { - return this[MAP]; - } + if (next === '.' || next === ';') { + continue; + } - /** - * Get an iterator on keys. - * - * @return Iterator - */ - keys() { - return createHeadersIterator(this, 'key'); - } + if (!next) { + value += '\\'; + push({ type: 'text', value }); + continue; + } - /** - * Get an iterator on values. - * - * @return Iterator - */ - values() { - return createHeadersIterator(this, 'value'); - } + // collapse slashes to reduce potential for exploits + const match = /^\\+/.exec(remaining()); + let slashes = 0; - /** - * Get an iterator on entries. - * - * This is the default iterator of the Headers object. - * - * @return Iterator - */ - [Symbol.iterator]() { - return createHeadersIterator(this, 'key+value'); - } -} -Headers.prototype.entries = Headers.prototype[Symbol.iterator]; + if (match && match[0].length > 2) { + slashes = match[0].length; + state.index += slashes; + if (slashes % 2 !== 0) { + value += '\\'; + } + } -Object.defineProperty(Headers.prototype, Symbol.toStringTag, { - value: 'Headers', - writable: false, - enumerable: false, - configurable: true -}); + if (opts.unescape === true) { + value = advance(); + } else { + value += advance(); + } -Object.defineProperties(Headers.prototype, { - get: { enumerable: true }, - forEach: { enumerable: true }, - set: { enumerable: true }, - append: { enumerable: true }, - has: { enumerable: true }, - delete: { enumerable: true }, - keys: { enumerable: true }, - values: { enumerable: true }, - entries: { enumerable: true } -}); + if (state.brackets === 0) { + push({ type: 'text', value }); + continue; + } + } -function getHeaders(headers) { - let kind = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'key+value'; + /** + * If we're inside a regex character class, continue + * until we reach the closing bracket. + */ - const keys = Object.keys(headers[MAP]).sort(); - return keys.map(kind === 'key' ? function (k) { - return k.toLowerCase(); - } : kind === 'value' ? function (k) { - return headers[MAP][k].join(', '); - } : function (k) { - return [k.toLowerCase(), headers[MAP][k].join(', ')]; - }); -} + if (state.brackets > 0 && (value !== ']' || prev.value === '[' || prev.value === '[^')) { + if (opts.posix !== false && value === ':') { + const inner = prev.value.slice(1); + if (inner.includes('[')) { + prev.posix = true; -const INTERNAL = Symbol('internal'); + if (inner.includes(':')) { + const idx = prev.value.lastIndexOf('['); + const pre = prev.value.slice(0, idx); + const rest = prev.value.slice(idx + 2); + const posix = POSIX_REGEX_SOURCE[rest]; + if (posix) { + prev.value = pre + posix; + state.backtrack = true; + advance(); -function createHeadersIterator(target, kind) { - const iterator = Object.create(HeadersIteratorPrototype); - iterator[INTERNAL] = { - target, - kind, - index: 0 - }; - return iterator; -} + if (!bos.output && tokens.indexOf(prev) === 1) { + bos.output = ONE_CHAR; + } + continue; + } + } + } + } -const HeadersIteratorPrototype = Object.setPrototypeOf({ - next() { - // istanbul ignore if - if (!this || Object.getPrototypeOf(this) !== HeadersIteratorPrototype) { - throw new TypeError('Value of `this` is not a HeadersIterator'); - } + if ((value === '[' && peek() !== ':') || (value === '-' && peek() === ']')) { + value = `\\${value}`; + } - var _INTERNAL = this[INTERNAL]; - const target = _INTERNAL.target, - kind = _INTERNAL.kind, - index = _INTERNAL.index; + if (value === ']' && (prev.value === '[' || prev.value === '[^')) { + value = `\\${value}`; + } - const values = getHeaders(target, kind); - const len = values.length; - if (index >= len) { - return { - value: undefined, - done: true - }; - } + if (opts.posix === true && value === '!' && prev.value === '[') { + value = '^'; + } - this[INTERNAL].index = index + 1; + prev.value += value; + append({ value }); + continue; + } - return { - value: values[index], - done: false - }; - } -}, Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]()))); + /** + * If we're inside a quoted string, continue + * until we reach the closing double quote. + */ -Object.defineProperty(HeadersIteratorPrototype, Symbol.toStringTag, { - value: 'HeadersIterator', - writable: false, - enumerable: false, - configurable: true -}); + if (state.quotes === 1 && value !== '"') { + value = utils.escapeRegex(value); + prev.value += value; + append({ value }); + continue; + } -/** - * Export the Headers object in a form that Node.js can consume. - * - * @param Headers headers - * @return Object - */ -function exportNodeCompatibleHeaders(headers) { - const obj = Object.assign({ __proto__: null }, headers[MAP]); - - // http.request() only supports string as Host header. This hack makes - // specifying custom Host header possible. - const hostHeaderKey = find(headers[MAP], 'Host'); - if (hostHeaderKey !== undefined) { - obj[hostHeaderKey] = obj[hostHeaderKey][0]; - } + /** + * Double quotes + */ - return obj; -} + if (value === '"') { + state.quotes = state.quotes === 1 ? 0 : 1; + if (opts.keepQuotes === true) { + push({ type: 'text', value }); + } + continue; + } -/** - * Create a Headers object from an object of headers, ignoring those that do - * not conform to HTTP grammar productions. - * - * @param Object obj Object of headers - * @return Headers - */ -function createHeadersLenient(obj) { - const headers = new Headers(); - for (const name of Object.keys(obj)) { - if (invalidTokenRegex.test(name)) { - continue; - } - if (Array.isArray(obj[name])) { - for (const val of obj[name]) { - if (invalidHeaderCharRegex.test(val)) { - continue; - } - if (headers[MAP][name] === undefined) { - headers[MAP][name] = [val]; - } else { - headers[MAP][name].push(val); - } - } - } else if (!invalidHeaderCharRegex.test(obj[name])) { - headers[MAP][name] = [obj[name]]; - } - } - return headers; -} + /** + * Parentheses + */ -const INTERNALS$1 = Symbol('Response internals'); + if (value === '(') { + increment('parens'); + push({ type: 'paren', value }); + continue; + } -// fix an issue where "STATUS_CODES" aren't a named export for node <10 -const STATUS_CODES = http.STATUS_CODES; + if (value === ')') { + if (state.parens === 0 && opts.strictBrackets === true) { + throw new SyntaxError(syntaxError('opening', '(')); + } -/** - * Response class - * - * @param Stream body Readable stream - * @param Object opts Response options - * @return Void - */ -class Response { - constructor() { - let body = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : null; - let opts = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; + const extglob = extglobs[extglobs.length - 1]; + if (extglob && state.parens === extglob.parens + 1) { + extglobClose(extglobs.pop()); + continue; + } - Body.call(this, body, opts); + push({ type: 'paren', value, output: state.parens ? ')' : '\\)' }); + decrement('parens'); + continue; + } - const status = opts.status || 200; - const headers = new Headers(opts.headers); + /** + * Square brackets + */ - if (body != null && !headers.has('Content-Type')) { - const contentType = extractContentType(body); - if (contentType) { - headers.append('Content-Type', contentType); - } - } + if (value === '[') { + if (opts.nobracket === true || !remaining().includes(']')) { + if (opts.nobracket !== true && opts.strictBrackets === true) { + throw new SyntaxError(syntaxError('closing', ']')); + } - this[INTERNALS$1] = { - url: opts.url, - status, - statusText: opts.statusText || STATUS_CODES[status], - headers, - counter: opts.counter - }; - } + value = `\\${value}`; + } else { + increment('brackets'); + } - get url() { - return this[INTERNALS$1].url || ''; - } + push({ type: 'bracket', value }); + continue; + } - get status() { - return this[INTERNALS$1].status; - } + if (value === ']') { + if (opts.nobracket === true || (prev && prev.type === 'bracket' && prev.value.length === 1)) { + push({ type: 'text', value, output: `\\${value}` }); + continue; + } - /** - * Convenience property representing if the request ended normally - */ - get ok() { - return this[INTERNALS$1].status >= 200 && this[INTERNALS$1].status < 300; - } + if (state.brackets === 0) { + if (opts.strictBrackets === true) { + throw new SyntaxError(syntaxError('opening', '[')); + } - get redirected() { - return this[INTERNALS$1].counter > 0; - } + push({ type: 'text', value, output: `\\${value}` }); + continue; + } - get statusText() { - return this[INTERNALS$1].statusText; - } + decrement('brackets'); - get headers() { - return this[INTERNALS$1].headers; - } + const prevValue = prev.value.slice(1); + if (prev.posix !== true && prevValue[0] === '^' && !prevValue.includes('/')) { + value = `/${value}`; + } - /** - * Clone this response - * - * @return Response - */ - clone() { - return new Response(clone(this), { - url: this.url, - status: this.status, - statusText: this.statusText, - headers: this.headers, - ok: this.ok, - redirected: this.redirected - }); - } -} + prev.value += value; + append({ value }); -Body.mixIn(Response.prototype); + // when literal brackets are explicitly disabled + // assume we should match with a regex character class + if (opts.literalBrackets === false || utils.hasRegexChars(prevValue)) { + continue; + } -Object.defineProperties(Response.prototype, { - url: { enumerable: true }, - status: { enumerable: true }, - ok: { enumerable: true }, - redirected: { enumerable: true }, - statusText: { enumerable: true }, - headers: { enumerable: true }, - clone: { enumerable: true } -}); + const escaped = utils.escapeRegex(prev.value); + state.output = state.output.slice(0, -prev.value.length); -Object.defineProperty(Response.prototype, Symbol.toStringTag, { - value: 'Response', - writable: false, - enumerable: false, - configurable: true -}); + // when literal brackets are explicitly enabled + // assume we should escape the brackets to match literal characters + if (opts.literalBrackets === true) { + state.output += escaped; + prev.value = escaped; + continue; + } -const INTERNALS$2 = Symbol('Request internals'); -const URL = Url.URL || whatwgUrl.URL; + // when the user specifies nothing, try to match both + prev.value = `(${capture}${escaped}|${prev.value})`; + state.output += prev.value; + continue; + } -// fix an issue where "format", "parse" aren't a named export for node <10 -const parse_url = Url.parse; -const format_url = Url.format; + /** + * Braces + */ -/** - * Wrapper around `new URL` to handle arbitrary URLs - * - * @param {string} urlStr - * @return {void} - */ -function parseURL(urlStr) { - /* - Check whether the URL is absolute or not - Scheme: https://tools.ietf.org/html/rfc3986#section-3.1 - Absolute URL: https://tools.ietf.org/html/rfc3986#section-4.3 - */ - if (/^[a-zA-Z][a-zA-Z\d+\-.]*:/.exec(urlStr)) { - urlStr = new URL(urlStr).toString(); - } - - // Fallback to old implementation for arbitrary URLs - return parse_url(urlStr); -} - -const streamDestructionSupported = 'destroy' in Stream.Readable.prototype; - -/** - * Check if a value is an instance of Request. - * - * @param Mixed input - * @return Boolean - */ -function isRequest(input) { - return typeof input === 'object' && typeof input[INTERNALS$2] === 'object'; -} - -function isAbortSignal(signal) { - const proto = signal && typeof signal === 'object' && Object.getPrototypeOf(signal); - return !!(proto && proto.constructor.name === 'AbortSignal'); -} - -/** - * Request class - * - * @param Mixed input Url or Request instance - * @param Object init Custom options - * @return Void - */ -class Request { - constructor(input) { - let init = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; - - let parsedURL; - - // normalize input - if (!isRequest(input)) { - if (input && input.href) { - // in order to support Node.js' Url objects; though WHATWG's URL objects - // will fall into this branch also (since their `toString()` will return - // `href` property anyway) - parsedURL = parseURL(input.href); - } else { - // coerce input to a string before attempting to parse - parsedURL = parseURL(`${input}`); - } - input = {}; - } else { - parsedURL = parseURL(input.url); - } + if (value === '{' && opts.nobrace !== true) { + increment('braces'); - let method = init.method || input.method || 'GET'; - method = method.toUpperCase(); + const open = { + type: 'brace', + value, + output: '(', + outputIndex: state.output.length, + tokensIndex: state.tokens.length + }; - if ((init.body != null || isRequest(input) && input.body !== null) && (method === 'GET' || method === 'HEAD')) { - throw new TypeError('Request with GET/HEAD method cannot have body'); - } + braces.push(open); + push(open); + continue; + } - let inputBody = init.body != null ? init.body : isRequest(input) && input.body !== null ? clone(input) : null; + if (value === '}') { + const brace = braces[braces.length - 1]; - Body.call(this, inputBody, { - timeout: init.timeout || input.timeout || 0, - size: init.size || input.size || 0 - }); + if (opts.nobrace === true || !brace) { + push({ type: 'text', value, output: value }); + continue; + } - const headers = new Headers(init.headers || input.headers || {}); + let output = ')'; - if (inputBody != null && !headers.has('Content-Type')) { - const contentType = extractContentType(inputBody); - if (contentType) { - headers.append('Content-Type', contentType); - } - } + if (brace.dots === true) { + const arr = tokens.slice(); + const range = []; - let signal = isRequest(input) ? input.signal : null; - if ('signal' in init) signal = init.signal; + for (let i = arr.length - 1; i >= 0; i--) { + tokens.pop(); + if (arr[i].type === 'brace') { + break; + } + if (arr[i].type !== 'dots') { + range.unshift(arr[i].value); + } + } - if (signal != null && !isAbortSignal(signal)) { - throw new TypeError('Expected signal to be an instanceof AbortSignal'); - } + output = expandRange(range, opts); + state.backtrack = true; + } - this[INTERNALS$2] = { - method, - redirect: init.redirect || input.redirect || 'follow', - headers, - parsedURL, - signal - }; + if (brace.comma !== true && brace.dots !== true) { + const out = state.output.slice(0, brace.outputIndex); + const toks = state.tokens.slice(brace.tokensIndex); + brace.value = brace.output = '\\{'; + value = output = '\\}'; + state.output = out; + for (const t of toks) { + state.output += (t.output || t.value); + } + } - // node-fetch-only options - this.follow = init.follow !== undefined ? init.follow : input.follow !== undefined ? input.follow : 20; - this.compress = init.compress !== undefined ? init.compress : input.compress !== undefined ? input.compress : true; - this.counter = init.counter || input.counter || 0; - this.agent = init.agent || input.agent; - } + push({ type: 'brace', value, output }); + decrement('braces'); + braces.pop(); + continue; + } - get method() { - return this[INTERNALS$2].method; - } + /** + * Pipes + */ - get url() { - return format_url(this[INTERNALS$2].parsedURL); - } + if (value === '|') { + if (extglobs.length > 0) { + extglobs[extglobs.length - 1].conditions++; + } + push({ type: 'text', value }); + continue; + } - get headers() { - return this[INTERNALS$2].headers; - } + /** + * Commas + */ - get redirect() { - return this[INTERNALS$2].redirect; - } + if (value === ',') { + let output = value; - get signal() { - return this[INTERNALS$2].signal; - } + const brace = braces[braces.length - 1]; + if (brace && stack[stack.length - 1] === 'braces') { + brace.comma = true; + output = '|'; + } - /** - * Clone this request - * - * @return Request - */ - clone() { - return new Request(this); - } -} + push({ type: 'comma', value, output }); + continue; + } -Body.mixIn(Request.prototype); + /** + * Slashes + */ -Object.defineProperty(Request.prototype, Symbol.toStringTag, { - value: 'Request', - writable: false, - enumerable: false, - configurable: true -}); + if (value === '/') { + // if the beginning of the glob is "./", advance the start + // to the current index, and don't add the "./" characters + // to the state. This greatly simplifies lookbehinds when + // checking for BOS characters like "!" and "." (not "./") + if (prev.type === 'dot' && state.index === state.start + 1) { + state.start = state.index + 1; + state.consumed = ''; + state.output = ''; + tokens.pop(); + prev = bos; // reset "prev" to the first token + continue; + } -Object.defineProperties(Request.prototype, { - method: { enumerable: true }, - url: { enumerable: true }, - headers: { enumerable: true }, - redirect: { enumerable: true }, - clone: { enumerable: true }, - signal: { enumerable: true } -}); + push({ type: 'slash', value, output: SLASH_LITERAL }); + continue; + } -/** - * Convert a Request to Node.js http request options. - * - * @param Request A Request instance - * @return Object The options object to be passed to http.request - */ -function getNodeRequestOptions(request) { - const parsedURL = request[INTERNALS$2].parsedURL; - const headers = new Headers(request[INTERNALS$2].headers); + /** + * Dots + */ - // fetch step 1.3 - if (!headers.has('Accept')) { - headers.set('Accept', '*/*'); - } + if (value === '.') { + if (state.braces > 0 && prev.type === 'dot') { + if (prev.value === '.') prev.output = DOT_LITERAL; + const brace = braces[braces.length - 1]; + prev.type = 'dots'; + prev.output += value; + prev.value += value; + brace.dots = true; + continue; + } - // Basic fetch - if (!parsedURL.protocol || !parsedURL.hostname) { - throw new TypeError('Only absolute URLs are supported'); - } + if ((state.braces + state.parens) === 0 && prev.type !== 'bos' && prev.type !== 'slash') { + push({ type: 'text', value, output: DOT_LITERAL }); + continue; + } - if (!/^https?:$/.test(parsedURL.protocol)) { - throw new TypeError('Only HTTP(S) protocols are supported'); - } + push({ type: 'dot', value, output: DOT_LITERAL }); + continue; + } - if (request.signal && request.body instanceof Stream.Readable && !streamDestructionSupported) { - throw new Error('Cancellation of streamed requests with AbortSignal is not supported in node < 8'); - } + /** + * Question marks + */ - // HTTP-network-or-cache fetch steps 2.4-2.7 - let contentLengthValue = null; - if (request.body == null && /^(POST|PUT)$/i.test(request.method)) { - contentLengthValue = '0'; - } - if (request.body != null) { - const totalBytes = getTotalBytes(request); - if (typeof totalBytes === 'number') { - contentLengthValue = String(totalBytes); - } - } - if (contentLengthValue) { - headers.set('Content-Length', contentLengthValue); - } + if (value === '?') { + const isGroup = prev && prev.value === '('; + if (!isGroup && opts.noextglob !== true && peek() === '(' && peek(2) !== '?') { + extglobOpen('qmark', value); + continue; + } - // HTTP-network-or-cache fetch step 2.11 - if (!headers.has('User-Agent')) { - headers.set('User-Agent', 'node-fetch/1.0 (+https://github.com/bitinn/node-fetch)'); - } + if (prev && prev.type === 'paren') { + const next = peek(); + let output = value; - // HTTP-network-or-cache fetch step 2.15 - if (request.compress && !headers.has('Accept-Encoding')) { - headers.set('Accept-Encoding', 'gzip,deflate'); - } + if (next === '<' && !utils.supportsLookbehinds()) { + throw new Error('Node.js v10 or higher is required for regex lookbehinds'); + } - let agent = request.agent; - if (typeof agent === 'function') { - agent = agent(parsedURL); - } + if ((prev.value === '(' && !/[!=<:]/.test(next)) || (next === '<' && !/<([!=]|\w+>)/.test(remaining()))) { + output = `\\${value}`; + } - // HTTP-network fetch step 4.2 - // chunked encoding is handled by Node.js + push({ type: 'text', value, output }); + continue; + } - return Object.assign({}, parsedURL, { - method: request.method, - headers: exportNodeCompatibleHeaders(headers), - agent - }); -} + if (opts.dot !== true && (prev.type === 'slash' || prev.type === 'bos')) { + push({ type: 'qmark', value, output: QMARK_NO_DOT }); + continue; + } -/** - * abort-error.js - * - * AbortError interface for cancelled requests - */ + push({ type: 'qmark', value, output: QMARK }); + continue; + } -/** - * Create AbortError instance - * - * @param String message Error message for human - * @return AbortError - */ -function AbortError(message) { - Error.call(this, message); + /** + * Exclamation + */ - this.type = 'aborted'; - this.message = message; + if (value === '!') { + if (opts.noextglob !== true && peek() === '(') { + if (peek(2) !== '?' || !/[!=<:]/.test(peek(3))) { + extglobOpen('negate', value); + continue; + } + } - // hide custom error implementation details from end-users - Error.captureStackTrace(this, this.constructor); -} + if (opts.nonegate !== true && state.index === 0) { + negate(); + continue; + } + } -AbortError.prototype = Object.create(Error.prototype); -AbortError.prototype.constructor = AbortError; -AbortError.prototype.name = 'AbortError'; + /** + * Plus + */ -const URL$1 = Url.URL || whatwgUrl.URL; + if (value === '+') { + if (opts.noextglob !== true && peek() === '(' && peek(2) !== '?') { + extglobOpen('plus', value); + continue; + } -// fix an issue where "PassThrough", "resolve" aren't a named export for node <10 -const PassThrough$1 = Stream.PassThrough; + if ((prev && prev.value === '(') || opts.regex === false) { + push({ type: 'plus', value, output: PLUS_LITERAL }); + continue; + } -const isDomainOrSubdomain = function isDomainOrSubdomain(destination, original) { - const orig = new URL$1(original).hostname; - const dest = new URL$1(destination).hostname; + if ((prev && (prev.type === 'bracket' || prev.type === 'paren' || prev.type === 'brace')) || state.parens > 0) { + push({ type: 'plus', value }); + continue; + } - return orig === dest || orig[orig.length - dest.length - 1] === '.' && orig.endsWith(dest); -}; + push({ type: 'plus', value: PLUS_LITERAL }); + continue; + } -/** - * isSameProtocol reports whether the two provided URLs use the same protocol. - * - * Both domains must already be in canonical form. - * @param {string|URL} original - * @param {string|URL} destination - */ -const isSameProtocol = function isSameProtocol(destination, original) { - const orig = new URL$1(original).protocol; - const dest = new URL$1(destination).protocol; + /** + * Plain text + */ - return orig === dest; -}; + if (value === '@') { + if (opts.noextglob !== true && peek() === '(' && peek(2) !== '?') { + push({ type: 'at', extglob: true, value, output: '' }); + continue; + } -/** - * Fetch function - * - * @param Mixed url Absolute url or Request instance - * @param Object opts Fetch options - * @return Promise - */ -function fetch(url, opts) { + push({ type: 'text', value }); + continue; + } - // allow custom promise - if (!fetch.Promise) { - throw new Error('native promise missing, set fetch.Promise to your favorite alternative'); - } + /** + * Plain text + */ - Body.Promise = fetch.Promise; + if (value !== '*') { + if (value === '$' || value === '^') { + value = `\\${value}`; + } - // wrap http.request into fetch - return new fetch.Promise(function (resolve, reject) { - // build request object - const request = new Request(url, opts); - const options = getNodeRequestOptions(request); + const match = REGEX_NON_SPECIAL_CHARS.exec(remaining()); + if (match) { + value += match[0]; + state.index += match[0].length; + } - const send = (options.protocol === 'https:' ? https : http).request; - const signal = request.signal; + push({ type: 'text', value }); + continue; + } - let response = null; + /** + * Stars + */ - const abort = function abort() { - let error = new AbortError('The user aborted a request.'); - reject(error); - if (request.body && request.body instanceof Stream.Readable) { - destroyStream(request.body, error); - } - if (!response || !response.body) return; - response.body.emit('error', error); - }; + if (prev && (prev.type === 'globstar' || prev.star === true)) { + prev.type = 'star'; + prev.star = true; + prev.value += value; + prev.output = star; + state.backtrack = true; + state.globstar = true; + consume(value); + continue; + } - if (signal && signal.aborted) { - abort(); - return; - } + let rest = remaining(); + if (opts.noextglob !== true && /^\([^?]/.test(rest)) { + extglobOpen('star', value); + continue; + } - const abortAndFinalize = function abortAndFinalize() { - abort(); - finalize(); - }; + if (prev.type === 'star') { + if (opts.noglobstar === true) { + consume(value); + continue; + } - // send request - const req = send(options); - let reqTimeout; + const prior = prev.prev; + const before = prior.prev; + const isStart = prior.type === 'slash' || prior.type === 'bos'; + const afterStar = before && (before.type === 'star' || before.type === 'globstar'); - if (signal) { - signal.addEventListener('abort', abortAndFinalize); - } + if (opts.bash === true && (!isStart || (rest[0] && rest[0] !== '/'))) { + push({ type: 'star', value, output: '' }); + continue; + } - function finalize() { - req.abort(); - if (signal) signal.removeEventListener('abort', abortAndFinalize); - clearTimeout(reqTimeout); - } + const isBrace = state.braces > 0 && (prior.type === 'comma' || prior.type === 'brace'); + const isExtglob = extglobs.length && (prior.type === 'pipe' || prior.type === 'paren'); + if (!isStart && prior.type !== 'paren' && !isBrace && !isExtglob) { + push({ type: 'star', value, output: '' }); + continue; + } - if (request.timeout) { - req.once('socket', function (socket) { - reqTimeout = setTimeout(function () { - reject(new FetchError(`network timeout at: ${request.url}`, 'request-timeout')); - finalize(); - }, request.timeout); - }); - } + // strip consecutive `/**/` + while (rest.slice(0, 3) === '/**') { + const after = input[state.index + 4]; + if (after && after !== '/') { + break; + } + rest = rest.slice(3); + consume('/**', 3); + } - req.on('error', function (err) { - reject(new FetchError(`request to ${request.url} failed, reason: ${err.message}`, 'system', err)); + if (prior.type === 'bos' && eos()) { + prev.type = 'globstar'; + prev.value += value; + prev.output = globstar(opts); + state.output = prev.output; + state.globstar = true; + consume(value); + continue; + } - if (response && response.body) { - destroyStream(response.body, err); - } + if (prior.type === 'slash' && prior.prev.type !== 'bos' && !afterStar && eos()) { + state.output = state.output.slice(0, -(prior.output + prev.output).length); + prior.output = `(?:${prior.output}`; - finalize(); - }); + prev.type = 'globstar'; + prev.output = globstar(opts) + (opts.strictSlashes ? ')' : '|$)'); + prev.value += value; + state.globstar = true; + state.output += prior.output + prev.output; + consume(value); + continue; + } - fixResponseChunkedTransferBadEnding(req, function (err) { - if (signal && signal.aborted) { - return; - } + if (prior.type === 'slash' && prior.prev.type !== 'bos' && rest[0] === '/') { + const end = rest[1] !== void 0 ? '|$' : ''; - if (response && response.body) { - destroyStream(response.body, err); - } - }); + state.output = state.output.slice(0, -(prior.output + prev.output).length); + prior.output = `(?:${prior.output}`; - /* c8 ignore next 18 */ - if (parseInt(process.version.substring(1)) < 14) { - // Before Node.js 14, pipeline() does not fully support async iterators and does not always - // properly handle when the socket close/end events are out of order. - req.on('socket', function (s) { - s.addListener('close', function (hadError) { - // if a data listener is still present we didn't end cleanly - const hasDataListener = s.listenerCount('data') > 0; - - // if end happened before close but the socket didn't emit an error, do it now - if (response && hasDataListener && !hadError && !(signal && signal.aborted)) { - const err = new Error('Premature close'); - err.code = 'ERR_STREAM_PREMATURE_CLOSE'; - response.body.emit('error', err); - } - }); - }); - } + prev.type = 'globstar'; + prev.output = `${globstar(opts)}${SLASH_LITERAL}|${SLASH_LITERAL}${end})`; + prev.value += value; - req.on('response', function (res) { - clearTimeout(reqTimeout); + state.output += prior.output + prev.output; + state.globstar = true; - const headers = createHeadersLenient(res.headers); + consume(value + advance()); - // HTTP fetch step 5 - if (fetch.isRedirect(res.statusCode)) { - // HTTP fetch step 5.2 - const location = headers.get('Location'); + push({ type: 'slash', value: '/', output: '' }); + continue; + } - // HTTP fetch step 5.3 - let locationURL = null; - try { - locationURL = location === null ? null : new URL$1(location, request.url).toString(); - } catch (err) { - // error here can only be invalid URL in Location: header - // do not throw when options.redirect == manual - // let the user extract the errorneous redirect URL - if (request.redirect !== 'manual') { - reject(new FetchError(`uri requested responds with an invalid redirect URL: ${location}`, 'invalid-redirect')); - finalize(); - return; - } - } + if (prior.type === 'bos' && rest[0] === '/') { + prev.type = 'globstar'; + prev.value += value; + prev.output = `(?:^|${SLASH_LITERAL}|${globstar(opts)}${SLASH_LITERAL})`; + state.output = prev.output; + state.globstar = true; + consume(value + advance()); + push({ type: 'slash', value: '/', output: '' }); + continue; + } - // HTTP fetch step 5.5 - switch (request.redirect) { - case 'error': - reject(new FetchError(`uri requested responds with a redirect, redirect mode is set to error: ${request.url}`, 'no-redirect')); - finalize(); - return; - case 'manual': - // node-fetch-specific step: make manual redirect a bit easier to use by setting the Location header value to the resolved URL. - if (locationURL !== null) { - // handle corrupted header - try { - headers.set('Location', locationURL); - } catch (err) { - // istanbul ignore next: nodejs server prevent invalid response headers, we can't test this through normal request - reject(err); - } - } - break; - case 'follow': - // HTTP-redirect fetch step 2 - if (locationURL === null) { - break; - } + // remove single star from output + state.output = state.output.slice(0, -prev.output.length); - // HTTP-redirect fetch step 5 - if (request.counter >= request.follow) { - reject(new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect')); - finalize(); - return; - } + // reset previous token to globstar + prev.type = 'globstar'; + prev.output = globstar(opts); + prev.value += value; - // HTTP-redirect fetch step 6 (counter increment) - // Create a new Request object. - const requestOpts = { - headers: new Headers(request.headers), - follow: request.follow, - counter: request.counter + 1, - agent: request.agent, - compress: request.compress, - method: request.method, - body: request.body, - signal: request.signal, - timeout: request.timeout, - size: request.size - }; - - if (!isDomainOrSubdomain(request.url, locationURL) || !isSameProtocol(request.url, locationURL)) { - for (const name of ['authorization', 'www-authenticate', 'cookie', 'cookie2']) { - requestOpts.headers.delete(name); - } - } + // reset output with globstar + state.output += prev.output; + state.globstar = true; + consume(value); + continue; + } - // HTTP-redirect fetch step 9 - if (res.statusCode !== 303 && request.body && getTotalBytes(request) === null) { - reject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect')); - finalize(); - return; - } + const token = { type: 'star', value, output: star }; - // HTTP-redirect fetch step 11 - if (res.statusCode === 303 || (res.statusCode === 301 || res.statusCode === 302) && request.method === 'POST') { - requestOpts.method = 'GET'; - requestOpts.body = undefined; - requestOpts.headers.delete('content-length'); - } + if (opts.bash === true) { + token.output = '.*?'; + if (prev.type === 'bos' || prev.type === 'slash') { + token.output = nodot + token.output; + } + push(token); + continue; + } - // HTTP-redirect fetch step 15 - resolve(fetch(new Request(locationURL, requestOpts))); - finalize(); - return; - } - } + if (prev && (prev.type === 'bracket' || prev.type === 'paren') && opts.regex === true) { + token.output = value; + push(token); + continue; + } - // prepare response - res.once('end', function () { - if (signal) signal.removeEventListener('abort', abortAndFinalize); - }); - let body = res.pipe(new PassThrough$1()); - - const response_options = { - url: request.url, - status: res.statusCode, - statusText: res.statusMessage, - headers: headers, - size: request.size, - timeout: request.timeout, - counter: request.counter - }; + if (state.index === state.start || prev.type === 'slash' || prev.type === 'dot') { + if (prev.type === 'dot') { + state.output += NO_DOT_SLASH; + prev.output += NO_DOT_SLASH; - // HTTP-network fetch step 12.1.1.3 - const codings = headers.get('Content-Encoding'); + } else if (opts.dot === true) { + state.output += NO_DOTS_SLASH; + prev.output += NO_DOTS_SLASH; - // HTTP-network fetch step 12.1.1.4: handle content codings + } else { + state.output += nodot; + prev.output += nodot; + } - // in following scenarios we ignore compression support - // 1. compression support is disabled - // 2. HEAD request - // 3. no Content-Encoding header - // 4. no content response (204) - // 5. content not modified response (304) - if (!request.compress || request.method === 'HEAD' || codings === null || res.statusCode === 204 || res.statusCode === 304) { - response = new Response(body, response_options); - resolve(response); - return; - } + if (peek() !== '*') { + state.output += ONE_CHAR; + prev.output += ONE_CHAR; + } + } - // For Node v6+ - // Be less strict when decoding compressed responses, since sometimes - // servers send slightly invalid responses that are still accepted - // by common browsers. - // Always using Z_SYNC_FLUSH is what cURL does. - const zlibOptions = { - flush: zlib.Z_SYNC_FLUSH, - finishFlush: zlib.Z_SYNC_FLUSH - }; + push(token); + } - // for gzip - if (codings == 'gzip' || codings == 'x-gzip') { - body = body.pipe(zlib.createGunzip(zlibOptions)); - response = new Response(body, response_options); - resolve(response); - return; - } + while (state.brackets > 0) { + if (opts.strictBrackets === true) throw new SyntaxError(syntaxError('closing', ']')); + state.output = utils.escapeLast(state.output, '['); + decrement('brackets'); + } - // for deflate - if (codings == 'deflate' || codings == 'x-deflate') { - // handle the infamous raw deflate response from old servers - // a hack for old IIS and Apache servers - const raw = res.pipe(new PassThrough$1()); - raw.once('data', function (chunk) { - // see http://stackoverflow.com/questions/37519828 - if ((chunk[0] & 0x0F) === 0x08) { - body = body.pipe(zlib.createInflate()); - } else { - body = body.pipe(zlib.createInflateRaw()); - } - response = new Response(body, response_options); - resolve(response); - }); - raw.on('end', function () { - // some old IIS servers return zero-length OK deflate responses, so 'data' is never emitted. - if (!response) { - response = new Response(body, response_options); - resolve(response); - } - }); - return; - } + while (state.parens > 0) { + if (opts.strictBrackets === true) throw new SyntaxError(syntaxError('closing', ')')); + state.output = utils.escapeLast(state.output, '('); + decrement('parens'); + } - // for br - if (codings == 'br' && typeof zlib.createBrotliDecompress === 'function') { - body = body.pipe(zlib.createBrotliDecompress()); - response = new Response(body, response_options); - resolve(response); - return; - } + while (state.braces > 0) { + if (opts.strictBrackets === true) throw new SyntaxError(syntaxError('closing', '}')); + state.output = utils.escapeLast(state.output, '{'); + decrement('braces'); + } - // otherwise, use response as-is - response = new Response(body, response_options); - resolve(response); - }); + if (opts.strictSlashes !== true && (prev.type === 'star' || prev.type === 'bracket')) { + push({ type: 'maybe_slash', value: '', output: `${SLASH_LITERAL}?` }); + } - writeToStream(req, request); - }); -} -function fixResponseChunkedTransferBadEnding(request, errorCallback) { - let socket; + // rebuild the output if we had to backtrack at any point + if (state.backtrack === true) { + state.output = ''; - request.on('socket', function (s) { - socket = s; - }); + for (const token of state.tokens) { + state.output += token.output != null ? token.output : token.value; - request.on('response', function (response) { - const headers = response.headers; - - if (headers['transfer-encoding'] === 'chunked' && !headers['content-length']) { - response.once('close', function (hadError) { - // tests for socket presence, as in some situations the - // the 'socket' event is not triggered for the request - // (happens in deno), avoids `TypeError` - // if a data listener is still present we didn't end cleanly - const hasDataListener = socket && socket.listenerCount('data') > 0; - - if (hasDataListener && !hadError) { - const err = new Error('Premature close'); - err.code = 'ERR_STREAM_PREMATURE_CLOSE'; - errorCallback(err); - } - }); - } - }); -} + if (token.suffix) { + state.output += token.suffix; + } + } + } -function destroyStream(stream, err) { - if (stream.destroy) { - stream.destroy(err); - } else { - // node < 8 - stream.emit('error', err); - stream.end(); - } -} + return state; +}; /** - * Redirect code matching - * - * @param Number code Status code - * @return Boolean + * Fast paths for creating regular expressions for common glob patterns. + * This can significantly speed up processing and has very little downside + * impact when none of the fast paths match. */ -fetch.isRedirect = function (code) { - return code === 301 || code === 302 || code === 303 || code === 307 || code === 308; -}; - -// expose Promise -fetch.Promise = global.Promise; -module.exports = exports = fetch; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports["default"] = exports; -exports.Headers = Headers; -exports.Request = Request; -exports.Response = Response; -exports.FetchError = FetchError; -exports.AbortError = AbortError; +parse.fastpaths = (input, options) => { + const opts = { ...options }; + const max = typeof opts.maxLength === 'number' ? Math.min(MAX_LENGTH, opts.maxLength) : MAX_LENGTH; + const len = input.length; + if (len > max) { + throw new SyntaxError(`Input length: ${len}, exceeds maximum allowed length: ${max}`); + } + input = REPLACEMENTS[input] || input; + const win32 = utils.isWindows(options); -/***/ }), + // create constants based on platform, for windows or posix + const { + DOT_LITERAL, + SLASH_LITERAL, + ONE_CHAR, + DOTS_SLASH, + NO_DOT, + NO_DOTS, + NO_DOTS_SLASH, + STAR, + START_ANCHOR + } = constants.globChars(win32); -/***/ 7952: -/***/ ((module) => { + const nodot = opts.dot ? NO_DOTS : NO_DOT; + const slashDot = opts.dot ? NO_DOTS_SLASH : NO_DOT; + const capture = opts.capture ? '' : '?:'; + const state = { negated: false, prefix: '' }; + let star = opts.bash === true ? '.*?' : STAR; -"use strict"; + if (opts.capture) { + star = `(${star})`; + } + const globstar = opts => { + if (opts.noglobstar === true) return star; + return `(${capture}(?:(?!${START_ANCHOR}${opts.dot ? DOTS_SLASH : DOT_LITERAL}).)*?)`; + }; -// https://developer.mozilla.org/en-US/docs/Web/HTTP/Basics_of_HTTP/Data_URIs -const DATA_URL_DEFAULT_MIME_TYPE = 'text/plain'; -const DATA_URL_DEFAULT_CHARSET = 'us-ascii'; + const create = str => { + switch (str) { + case '*': + return `${nodot}${ONE_CHAR}${star}`; -const testParameter = (name, filters) => { - return filters.some(filter => filter instanceof RegExp ? filter.test(name) : filter === name); -}; + case '.*': + return `${DOT_LITERAL}${ONE_CHAR}${star}`; -const normalizeDataURL = (urlString, {stripHash}) => { - const match = /^data:(?[^,]*?),(?[^#]*?)(?:#(?.*))?$/.exec(urlString); + case '*.*': + return `${nodot}${star}${DOT_LITERAL}${ONE_CHAR}${star}`; - if (!match) { - throw new Error(`Invalid URL: ${urlString}`); - } + case '*/*': + return `${nodot}${star}${SLASH_LITERAL}${ONE_CHAR}${slashDot}${star}`; - let {type, data, hash} = match.groups; - const mediaType = type.split(';'); - hash = stripHash ? '' : hash; + case '**': + return nodot + globstar(opts); - let isBase64 = false; - if (mediaType[mediaType.length - 1] === 'base64') { - mediaType.pop(); - isBase64 = true; - } + case '**/*': + return `(?:${nodot}${globstar(opts)}${SLASH_LITERAL})?${slashDot}${ONE_CHAR}${star}`; - // Lowercase MIME type - const mimeType = (mediaType.shift() || '').toLowerCase(); - const attributes = mediaType - .map(attribute => { - let [key, value = ''] = attribute.split('=').map(string => string.trim()); + case '**/*.*': + return `(?:${nodot}${globstar(opts)}${SLASH_LITERAL})?${slashDot}${star}${DOT_LITERAL}${ONE_CHAR}${star}`; - // Lowercase `charset` - if (key === 'charset') { - value = value.toLowerCase(); + case '**/.*': + return `(?:${nodot}${globstar(opts)}${SLASH_LITERAL})?${DOT_LITERAL}${ONE_CHAR}${star}`; - if (value === DATA_URL_DEFAULT_CHARSET) { - return ''; - } - } + default: { + const match = /^(.*?)\.(\w+)$/.exec(str); + if (!match) return; - return `${key}${value ? `=${value}` : ''}`; - }) - .filter(Boolean); + const source = create(match[1]); + if (!source) return; - const normalizedMediaType = [ - ...attributes - ]; + return source + DOT_LITERAL + match[2]; + } + } + }; - if (isBase64) { - normalizedMediaType.push('base64'); - } + const output = utils.removePrefix(input, state); + let source = create(output); - if (normalizedMediaType.length !== 0 || (mimeType && mimeType !== DATA_URL_DEFAULT_MIME_TYPE)) { - normalizedMediaType.unshift(mimeType); - } + if (source && opts.strictSlashes !== true) { + source += `${SLASH_LITERAL}?`; + } - return `data:${normalizedMediaType.join(';')},${isBase64 ? data.trim() : data}${hash ? `#${hash}` : ''}`; + return source; }; -const normalizeUrl = (urlString, options) => { - options = { - defaultProtocol: 'http:', - normalizeProtocol: true, - forceHttp: false, - forceHttps: false, - stripAuthentication: true, - stripHash: false, - stripTextFragment: true, - stripWWW: true, - removeQueryParameters: [/^utm_\w+/i], - removeTrailingSlash: true, - removeSingleSlash: true, - removeDirectoryIndex: false, - sortQueryParameters: true, - ...options - }; +module.exports = parse; - urlString = urlString.trim(); - // Data URL - if (/^data:/i.test(urlString)) { - return normalizeDataURL(urlString, options); - } +/***/ }), - if (/^view-source:/i.test(urlString)) { - throw new Error('`view-source:` is not supported as it is a non-standard protocol'); - } +/***/ 555: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - const hasRelativeProtocol = urlString.startsWith('//'); - const isRelativeUrl = !hasRelativeProtocol && /^\.*\//.test(urlString); +"use strict"; - // Prepend protocol - if (!isRelativeUrl) { - urlString = urlString.replace(/^(?!(?:\w+:)?\/\/)|^\/\//, options.defaultProtocol); - } - const urlObj = new URL(urlString); +const path = __nccwpck_require__(1017); +const scan = __nccwpck_require__(7751); +const parse = __nccwpck_require__(5961); +const utils = __nccwpck_require__(7426); +const constants = __nccwpck_require__(6476); +const isObject = val => val && typeof val === 'object' && !Array.isArray(val); - if (options.forceHttp && options.forceHttps) { - throw new Error('The `forceHttp` and `forceHttps` options cannot be used together'); - } +/** + * Creates a matcher function from one or more glob patterns. The + * returned function takes a string to match as its first argument, + * and returns true if the string is a match. The returned matcher + * function also takes a boolean as the second argument that, when true, + * returns an object with additional information. + * + * ```js + * const picomatch = require('picomatch'); + * // picomatch(glob[, options]); + * + * const isMatch = picomatch('*.!(*a)'); + * console.log(isMatch('a.a')); //=> false + * console.log(isMatch('a.b')); //=> true + * ``` + * @name picomatch + * @param {String|Array} `globs` One or more glob patterns. + * @param {Object=} `options` + * @return {Function=} Returns a matcher function. + * @api public + */ - if (options.forceHttp && urlObj.protocol === 'https:') { - urlObj.protocol = 'http:'; - } +const picomatch = (glob, options, returnState = false) => { + if (Array.isArray(glob)) { + const fns = glob.map(input => picomatch(input, options, returnState)); + const arrayMatcher = str => { + for (const isMatch of fns) { + const state = isMatch(str); + if (state) return state; + } + return false; + }; + return arrayMatcher; + } - if (options.forceHttps && urlObj.protocol === 'http:') { - urlObj.protocol = 'https:'; - } + const isState = isObject(glob) && glob.tokens && glob.input; - // Remove auth - if (options.stripAuthentication) { - urlObj.username = ''; - urlObj.password = ''; - } + if (glob === '' || (typeof glob !== 'string' && !isState)) { + throw new TypeError('Expected pattern to be a non-empty string'); + } - // Remove hash - if (options.stripHash) { - urlObj.hash = ''; - } else if (options.stripTextFragment) { - urlObj.hash = urlObj.hash.replace(/#?:~:text.*?$/i, ''); - } + const opts = options || {}; + const posix = utils.isWindows(options); + const regex = isState + ? picomatch.compileRe(glob, options) + : picomatch.makeRe(glob, options, false, true); - // Remove duplicate slashes if not preceded by a protocol - if (urlObj.pathname) { - urlObj.pathname = urlObj.pathname.replace(/(? false; + if (opts.ignore) { + const ignoreOpts = { ...options, ignore: null, onMatch: null, onResult: null }; + isIgnored = picomatch(opts.ignore, ignoreOpts, returnState); + } - // Remove directory index - if (options.removeDirectoryIndex === true) { - options.removeDirectoryIndex = [/^index\.[a-z]+$/]; - } + const matcher = (input, returnObject = false) => { + const { isMatch, match, output } = picomatch.test(input, regex, options, { glob, posix }); + const result = { glob, state, regex, posix, input, output, match, isMatch }; - if (Array.isArray(options.removeDirectoryIndex) && options.removeDirectoryIndex.length > 0) { - let pathComponents = urlObj.pathname.split('/'); - const lastComponent = pathComponents[pathComponents.length - 1]; + if (typeof opts.onResult === 'function') { + opts.onResult(result); + } - if (testParameter(lastComponent, options.removeDirectoryIndex)) { - pathComponents = pathComponents.slice(0, pathComponents.length - 1); - urlObj.pathname = pathComponents.slice(1).join('/') + '/'; - } - } + if (isMatch === false) { + result.isMatch = false; + return returnObject ? result : false; + } - if (urlObj.hostname) { - // Remove trailing dot - urlObj.hostname = urlObj.hostname.replace(/\.$/, ''); + if (isIgnored(input)) { + if (typeof opts.onIgnore === 'function') { + opts.onIgnore(result); + } + result.isMatch = false; + return returnObject ? result : false; + } - // Remove `www.` - if (options.stripWWW && /^www\.(?!www\.)(?:[a-z\-\d]{1,63})\.(?:[a-z.\-\d]{2,63})$/.test(urlObj.hostname)) { - // Each label should be max 63 at length (min: 1). - // Source: https://en.wikipedia.org/wiki/Hostname#Restrictions_on_valid_host_names - // Each TLD should be up to 63 characters long (min: 2). - // It is technically possible to have a single character TLD, but none currently exist. - urlObj.hostname = urlObj.hostname.replace(/^www\./, ''); - } - } + if (typeof opts.onMatch === 'function') { + opts.onMatch(result); + } + return returnObject ? result : true; + }; - // Remove query unwanted parameters - if (Array.isArray(options.removeQueryParameters)) { - for (const key of [...urlObj.searchParams.keys()]) { - if (testParameter(key, options.removeQueryParameters)) { - urlObj.searchParams.delete(key); - } - } - } + if (returnState) { + matcher.state = state; + } - if (options.removeQueryParameters === true) { - urlObj.search = ''; - } + return matcher; +}; - // Sort query parameters - if (options.sortQueryParameters) { - urlObj.searchParams.sort(); - } +/** + * Test `input` with the given `regex`. This is used by the main + * `picomatch()` function to test the input string. + * + * ```js + * const picomatch = require('picomatch'); + * // picomatch.test(input, regex[, options]); + * + * console.log(picomatch.test('foo/bar', /^(?:([^/]*?)\/([^/]*?))$/)); + * // { isMatch: true, match: [ 'foo/', 'foo', 'bar' ], output: 'foo/bar' } + * ``` + * @param {String} `input` String to test. + * @param {RegExp} `regex` + * @return {Object} Returns an object with matching info. + * @api public + */ - if (options.removeTrailingSlash) { - urlObj.pathname = urlObj.pathname.replace(/\/$/, ''); - } +picomatch.test = (input, regex, options, { glob, posix } = {}) => { + if (typeof input !== 'string') { + throw new TypeError('Expected input to be a string'); + } - const oldUrlString = urlString; + if (input === '') { + return { isMatch: false, output: '' }; + } - // Take advantage of many of the Node `url` normalizations - urlString = urlObj.toString(); + const opts = options || {}; + const format = opts.format || (posix ? utils.toPosixSlashes : null); + let match = input === glob; + let output = (match && format) ? format(input) : input; - if (!options.removeSingleSlash && urlObj.pathname === '/' && !oldUrlString.endsWith('/') && urlObj.hash === '') { - urlString = urlString.replace(/\/$/, ''); - } + if (match === false) { + output = format ? format(input) : input; + match = output === glob; + } - // Remove ending `/` unless removeSingleSlash is false - if ((options.removeTrailingSlash || urlObj.pathname === '/') && urlObj.hash === '' && options.removeSingleSlash) { - urlString = urlString.replace(/\/$/, ''); - } + if (match === false || opts.capture === true) { + if (opts.matchBase === true || opts.basename === true) { + match = picomatch.matchBase(input, regex, options, posix); + } else { + match = regex.exec(output); + } + } - // Restore relative protocol, if applicable - if (hasRelativeProtocol && !options.normalizeProtocol) { - urlString = urlString.replace(/^http:\/\//, '//'); - } + return { isMatch: Boolean(match), match, output }; +}; - // Remove http/https - if (options.stripProtocol) { - urlString = urlString.replace(/^(?:https?:)?\/\//, ''); - } +/** + * Match the basename of a filepath. + * + * ```js + * const picomatch = require('picomatch'); + * // picomatch.matchBase(input, glob[, options]); + * console.log(picomatch.matchBase('foo/bar.js', '*.js'); // true + * ``` + * @param {String} `input` String to test. + * @param {RegExp|String} `glob` Glob pattern or regex created by [.makeRe](#makeRe). + * @return {Boolean} + * @api public + */ - return urlString; +picomatch.matchBase = (input, glob, options, posix = utils.isWindows(options)) => { + const regex = glob instanceof RegExp ? glob : picomatch.makeRe(glob, options); + return regex.test(path.basename(input)); }; -module.exports = normalizeUrl; - +/** + * Returns true if **any** of the given glob `patterns` match the specified `string`. + * + * ```js + * const picomatch = require('picomatch'); + * // picomatch.isMatch(string, patterns[, options]); + * + * console.log(picomatch.isMatch('a.a', ['b.*', '*.a'])); //=> true + * console.log(picomatch.isMatch('a.a', 'b.*')); //=> false + * ``` + * @param {String|Array} str The string to test. + * @param {String|Array} patterns One or more glob patterns to use for matching. + * @param {Object} [options] See available [options](#options). + * @return {Boolean} Returns true if any patterns match `str` + * @api public + */ -/***/ }), +picomatch.isMatch = (str, patterns, options) => picomatch(patterns, options)(str); -/***/ 1223: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/** + * Parse a glob pattern to create the source string for a regular + * expression. + * + * ```js + * const picomatch = require('picomatch'); + * const result = picomatch.parse(pattern[, options]); + * ``` + * @param {String} `pattern` + * @param {Object} `options` + * @return {Object} Returns an object with useful properties and output to be used as a regex source string. + * @api public + */ -var wrappy = __nccwpck_require__(2940) -module.exports = wrappy(once) -module.exports.strict = wrappy(onceStrict) +picomatch.parse = (pattern, options) => { + if (Array.isArray(pattern)) return pattern.map(p => picomatch.parse(p, options)); + return parse(pattern, { ...options, fastpaths: false }); +}; -once.proto = once(function () { - Object.defineProperty(Function.prototype, 'once', { - value: function () { - return once(this) - }, - configurable: true - }) +/** + * Scan a glob pattern to separate the pattern into segments. + * + * ```js + * const picomatch = require('picomatch'); + * // picomatch.scan(input[, options]); + * + * const result = picomatch.scan('!./foo/*.js'); + * console.log(result); + * { prefix: '!./', + * input: '!./foo/*.js', + * start: 3, + * base: 'foo', + * glob: '*.js', + * isBrace: false, + * isBracket: false, + * isGlob: true, + * isExtglob: false, + * isGlobstar: false, + * negated: true } + * ``` + * @param {String} `input` Glob pattern to scan. + * @param {Object} `options` + * @return {Object} Returns an object with + * @api public + */ - Object.defineProperty(Function.prototype, 'onceStrict', { - value: function () { - return onceStrict(this) - }, - configurable: true - }) -}) +picomatch.scan = (input, options) => scan(input, options); -function once (fn) { - var f = function () { - if (f.called) return f.value - f.called = true - return f.value = fn.apply(this, arguments) - } - f.called = false - return f -} +/** + * Compile a regular expression from the `state` object returned by the + * [parse()](#parse) method. + * + * @param {Object} `state` + * @param {Object} `options` + * @param {Boolean} `returnOutput` Intended for implementors, this argument allows you to return the raw output from the parser. + * @param {Boolean} `returnState` Adds the state to a `state` property on the returned regex. Useful for implementors and debugging. + * @return {RegExp} + * @api public + */ -function onceStrict (fn) { - var f = function () { - if (f.called) - throw new Error(f.onceError) - f.called = true - return f.value = fn.apply(this, arguments) +picomatch.compileRe = (state, options, returnOutput = false, returnState = false) => { + if (returnOutput === true) { + return state.output; } - var name = fn.name || 'Function wrapped with `once`' - f.onceError = name + " shouldn't be called more than once" - f.called = false - return f -} - -/***/ }), + const opts = options || {}; + const prepend = opts.contains ? '' : '^'; + const append = opts.contains ? '' : '$'; -/***/ 9072: -/***/ ((module) => { + let source = `${prepend}(?:${state.output})${append}`; + if (state && state.negated === true) { + source = `^(?!${source}).*$`; + } -"use strict"; + const regex = picomatch.toRegex(source, options); + if (returnState === true) { + regex.state = state; + } + return regex; +}; -class CancelError extends Error { - constructor(reason) { - super(reason || 'Promise was canceled'); - this.name = 'CancelError'; - } - - get isCanceled() { - return true; - } -} - -class PCancelable { - static fn(userFn) { - return (...arguments_) => { - return new PCancelable((resolve, reject, onCancel) => { - arguments_.push(onCancel); - // eslint-disable-next-line promise/prefer-await-to-then - userFn(...arguments_).then(resolve, reject); - }); - }; - } - - constructor(executor) { - this._cancelHandlers = []; - this._isPending = true; - this._isCanceled = false; - this._rejectOnCancel = true; - - this._promise = new Promise((resolve, reject) => { - this._reject = reject; - - const onResolve = value => { - if (!this._isCanceled || !onCancel.shouldReject) { - this._isPending = false; - resolve(value); - } - }; - - const onReject = error => { - this._isPending = false; - reject(error); - }; - - const onCancel = handler => { - if (!this._isPending) { - throw new Error('The `onCancel` handler was attached after the promise settled.'); - } - - this._cancelHandlers.push(handler); - }; - - Object.defineProperties(onCancel, { - shouldReject: { - get: () => this._rejectOnCancel, - set: boolean => { - this._rejectOnCancel = boolean; - } - } - }); +/** + * Create a regular expression from a parsed glob pattern. + * + * ```js + * const picomatch = require('picomatch'); + * const state = picomatch.parse('*.js'); + * // picomatch.compileRe(state[, options]); + * + * console.log(picomatch.compileRe(state)); + * //=> /^(?:(?!\.)(?=.)[^/]*?\.js)$/ + * ``` + * @param {String} `state` The object returned from the `.parse` method. + * @param {Object} `options` + * @param {Boolean} `returnOutput` Implementors may use this argument to return the compiled output, instead of a regular expression. This is not exposed on the options to prevent end-users from mutating the result. + * @param {Boolean} `returnState` Implementors may use this argument to return the state from the parsed glob with the returned regular expression. + * @return {RegExp} Returns a regex created from the given pattern. + * @api public + */ - return executor(onResolve, onReject, onCancel); - }); - } +picomatch.makeRe = (input, options = {}, returnOutput = false, returnState = false) => { + if (!input || typeof input !== 'string') { + throw new TypeError('Expected a non-empty string'); + } - then(onFulfilled, onRejected) { - // eslint-disable-next-line promise/prefer-await-to-then - return this._promise.then(onFulfilled, onRejected); - } + let parsed = { negated: false, fastpaths: true }; - catch(onRejected) { - return this._promise.catch(onRejected); - } + if (options.fastpaths !== false && (input[0] === '.' || input[0] === '*')) { + parsed.output = parse.fastpaths(input, options); + } - finally(onFinally) { - return this._promise.finally(onFinally); - } + if (!parsed.output) { + parsed = parse(input, options); + } - cancel(reason) { - if (!this._isPending || this._isCanceled) { - return; - } + return picomatch.compileRe(parsed, options, returnOutput, returnState); +}; - this._isCanceled = true; +/** + * Create a regular expression from the given regex source string. + * + * ```js + * const picomatch = require('picomatch'); + * // picomatch.toRegex(source[, options]); + * + * const { output } = picomatch.parse('*.js'); + * console.log(picomatch.toRegex(output)); + * //=> /^(?:(?!\.)(?=.)[^/]*?\.js)$/ + * ``` + * @param {String} `source` Regular expression source string. + * @param {Object} `options` + * @return {RegExp} + * @api public + */ - if (this._cancelHandlers.length > 0) { - try { - for (const handler of this._cancelHandlers) { - handler(); - } - } catch (error) { - this._reject(error); - return; - } - } +picomatch.toRegex = (source, options) => { + try { + const opts = options || {}; + return new RegExp(source, opts.flags || (opts.nocase ? 'i' : '')); + } catch (err) { + if (options && options.debug === true) throw err; + return /$^/; + } +}; - if (this._rejectOnCancel) { - this._reject(new CancelError(reason)); - } - } +/** + * Picomatch constants. + * @return {Object} + */ - get isCanceled() { - return this._isCanceled; - } -} +picomatch.constants = constants; -Object.setPrototypeOf(PCancelable.prototype, Promise.prototype); +/** + * Expose "picomatch" + */ -module.exports = PCancelable; -module.exports.CancelError = CancelError; +module.exports = picomatch; /***/ }), -/***/ 8569: +/***/ 7751: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -module.exports = __nccwpck_require__(3322); +const utils = __nccwpck_require__(7426); +const { + CHAR_ASTERISK, /* * */ + CHAR_AT, /* @ */ + CHAR_BACKWARD_SLASH, /* \ */ + CHAR_COMMA, /* , */ + CHAR_DOT, /* . */ + CHAR_EXCLAMATION_MARK, /* ! */ + CHAR_FORWARD_SLASH, /* / */ + CHAR_LEFT_CURLY_BRACE, /* { */ + CHAR_LEFT_PARENTHESES, /* ( */ + CHAR_LEFT_SQUARE_BRACKET, /* [ */ + CHAR_PLUS, /* + */ + CHAR_QUESTION_MARK, /* ? */ + CHAR_RIGHT_CURLY_BRACE, /* } */ + CHAR_RIGHT_PARENTHESES, /* ) */ + CHAR_RIGHT_SQUARE_BRACKET /* ] */ +} = __nccwpck_require__(6476); +const isPathSeparator = code => { + return code === CHAR_FORWARD_SLASH || code === CHAR_BACKWARD_SLASH; +}; -/***/ }), +const depth = token => { + if (token.isPrefix !== true) { + token.depth = token.isGlobstar ? Infinity : 1; + } +}; -/***/ 6099: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/** + * Quickly scans a glob pattern and returns an object with a handful of + * useful properties, like `isGlob`, `path` (the leading non-glob, if it exists), + * `glob` (the actual pattern), `negated` (true if the path starts with `!` but not + * with `!(`) and `negatedExtglob` (true if the path starts with `!(`). + * + * ```js + * const pm = require('picomatch'); + * console.log(pm.scan('foo/bar/*.js')); + * { isGlob: true, input: 'foo/bar/*.js', base: 'foo/bar', glob: '*.js' } + * ``` + * @param {String} `str` + * @param {Object} `options` + * @return {Object} Returns an object with tokens and regex source string. + * @api public + */ -"use strict"; +const scan = (input, options) => { + const opts = options || {}; + const length = input.length - 1; + const scanToEnd = opts.parts === true || opts.scanToEnd === true; + const slashes = []; + const tokens = []; + const parts = []; -const path = __nccwpck_require__(1017); -const WIN_SLASH = '\\\\/'; -const WIN_NO_SLASH = `[^${WIN_SLASH}]`; + let str = input; + let index = -1; + let start = 0; + let lastIndex = 0; + let isBrace = false; + let isBracket = false; + let isGlob = false; + let isExtglob = false; + let isGlobstar = false; + let braceEscaped = false; + let backslashes = false; + let negated = false; + let negatedExtglob = false; + let finished = false; + let braces = 0; + let prev; + let code; + let token = { value: '', depth: 0, isGlob: false }; -/** - * Posix glob regex - */ + const eos = () => index >= length; + const peek = () => str.charCodeAt(index + 1); + const advance = () => { + prev = code; + return str.charCodeAt(++index); + }; -const DOT_LITERAL = '\\.'; -const PLUS_LITERAL = '\\+'; -const QMARK_LITERAL = '\\?'; -const SLASH_LITERAL = '\\/'; -const ONE_CHAR = '(?=.)'; -const QMARK = '[^/]'; -const END_ANCHOR = `(?:${SLASH_LITERAL}|$)`; -const START_ANCHOR = `(?:^|${SLASH_LITERAL})`; -const DOTS_SLASH = `${DOT_LITERAL}{1,2}${END_ANCHOR}`; -const NO_DOT = `(?!${DOT_LITERAL})`; -const NO_DOTS = `(?!${START_ANCHOR}${DOTS_SLASH})`; -const NO_DOT_SLASH = `(?!${DOT_LITERAL}{0,1}${END_ANCHOR})`; -const NO_DOTS_SLASH = `(?!${DOTS_SLASH})`; -const QMARK_NO_DOT = `[^.${SLASH_LITERAL}]`; -const STAR = `${QMARK}*?`; + while (index < length) { + code = advance(); + let next; -const POSIX_CHARS = { - DOT_LITERAL, - PLUS_LITERAL, - QMARK_LITERAL, - SLASH_LITERAL, - ONE_CHAR, - QMARK, - END_ANCHOR, - DOTS_SLASH, - NO_DOT, - NO_DOTS, - NO_DOT_SLASH, - NO_DOTS_SLASH, - QMARK_NO_DOT, - STAR, - START_ANCHOR -}; + if (code === CHAR_BACKWARD_SLASH) { + backslashes = token.backslashes = true; + code = advance(); -/** - * Windows glob regex - */ + if (code === CHAR_LEFT_CURLY_BRACE) { + braceEscaped = true; + } + continue; + } -const WINDOWS_CHARS = { - ...POSIX_CHARS, + if (braceEscaped === true || code === CHAR_LEFT_CURLY_BRACE) { + braces++; - SLASH_LITERAL: `[${WIN_SLASH}]`, - QMARK: WIN_NO_SLASH, - STAR: `${WIN_NO_SLASH}*?`, - DOTS_SLASH: `${DOT_LITERAL}{1,2}(?:[${WIN_SLASH}]|$)`, - NO_DOT: `(?!${DOT_LITERAL})`, - NO_DOTS: `(?!(?:^|[${WIN_SLASH}])${DOT_LITERAL}{1,2}(?:[${WIN_SLASH}]|$))`, - NO_DOT_SLASH: `(?!${DOT_LITERAL}{0,1}(?:[${WIN_SLASH}]|$))`, - NO_DOTS_SLASH: `(?!${DOT_LITERAL}{1,2}(?:[${WIN_SLASH}]|$))`, - QMARK_NO_DOT: `[^.${WIN_SLASH}]`, - START_ANCHOR: `(?:^|[${WIN_SLASH}])`, - END_ANCHOR: `(?:[${WIN_SLASH}]|$)` -}; + while (eos() !== true && (code = advance())) { + if (code === CHAR_BACKWARD_SLASH) { + backslashes = token.backslashes = true; + advance(); + continue; + } -/** - * POSIX Bracket Regex - */ + if (code === CHAR_LEFT_CURLY_BRACE) { + braces++; + continue; + } -const POSIX_REGEX_SOURCE = { - alnum: 'a-zA-Z0-9', - alpha: 'a-zA-Z', - ascii: '\\x00-\\x7F', - blank: ' \\t', - cntrl: '\\x00-\\x1F\\x7F', - digit: '0-9', - graph: '\\x21-\\x7E', - lower: 'a-z', - print: '\\x20-\\x7E ', - punct: '\\-!"#$%&\'()\\*+,./:;<=>?@[\\]^_`{|}~', - space: ' \\t\\r\\n\\v\\f', - upper: 'A-Z', - word: 'A-Za-z0-9_', - xdigit: 'A-Fa-f0-9' -}; + if (braceEscaped !== true && code === CHAR_DOT && (code = advance()) === CHAR_DOT) { + isBrace = token.isBrace = true; + isGlob = token.isGlob = true; + finished = true; -module.exports = { - MAX_LENGTH: 1024 * 64, - POSIX_REGEX_SOURCE, + if (scanToEnd === true) { + continue; + } - // regular expressions - REGEX_BACKSLASH: /\\(?![*+?^${}(|)[\]])/g, - REGEX_NON_SPECIAL_CHARS: /^[^@![\].,$*+?^{}()|\\/]+/, - REGEX_SPECIAL_CHARS: /[-*+?.^${}(|)[\]]/, - REGEX_SPECIAL_CHARS_BACKREF: /(\\?)((\W)(\3*))/g, - REGEX_SPECIAL_CHARS_GLOBAL: /([-*+?.^${}(|)[\]])/g, - REGEX_REMOVE_BACKSLASH: /(?:\[.*?[^\\]\]|\\(?=.))/g, + break; + } - // Replace globs with equivalent patterns to reduce parsing time. - REPLACEMENTS: { - '***': '*', - '**/**': '**', - '**/**/**': '**' - }, + if (braceEscaped !== true && code === CHAR_COMMA) { + isBrace = token.isBrace = true; + isGlob = token.isGlob = true; + finished = true; - // Digits - CHAR_0: 48, /* 0 */ - CHAR_9: 57, /* 9 */ + if (scanToEnd === true) { + continue; + } - // Alphabet chars. - CHAR_UPPERCASE_A: 65, /* A */ - CHAR_LOWERCASE_A: 97, /* a */ - CHAR_UPPERCASE_Z: 90, /* Z */ - CHAR_LOWERCASE_Z: 122, /* z */ + break; + } - CHAR_LEFT_PARENTHESES: 40, /* ( */ - CHAR_RIGHT_PARENTHESES: 41, /* ) */ + if (code === CHAR_RIGHT_CURLY_BRACE) { + braces--; - CHAR_ASTERISK: 42, /* * */ + if (braces === 0) { + braceEscaped = false; + isBrace = token.isBrace = true; + finished = true; + break; + } + } + } - // Non-alphabetic chars. - CHAR_AMPERSAND: 38, /* & */ - CHAR_AT: 64, /* @ */ - CHAR_BACKWARD_SLASH: 92, /* \ */ - CHAR_CARRIAGE_RETURN: 13, /* \r */ - CHAR_CIRCUMFLEX_ACCENT: 94, /* ^ */ - CHAR_COLON: 58, /* : */ - CHAR_COMMA: 44, /* , */ - CHAR_DOT: 46, /* . */ - CHAR_DOUBLE_QUOTE: 34, /* " */ - CHAR_EQUAL: 61, /* = */ - CHAR_EXCLAMATION_MARK: 33, /* ! */ - CHAR_FORM_FEED: 12, /* \f */ - CHAR_FORWARD_SLASH: 47, /* / */ - CHAR_GRAVE_ACCENT: 96, /* ` */ - CHAR_HASH: 35, /* # */ - CHAR_HYPHEN_MINUS: 45, /* - */ - CHAR_LEFT_ANGLE_BRACKET: 60, /* < */ - CHAR_LEFT_CURLY_BRACE: 123, /* { */ - CHAR_LEFT_SQUARE_BRACKET: 91, /* [ */ - CHAR_LINE_FEED: 10, /* \n */ - CHAR_NO_BREAK_SPACE: 160, /* \u00A0 */ - CHAR_PERCENT: 37, /* % */ - CHAR_PLUS: 43, /* + */ - CHAR_QUESTION_MARK: 63, /* ? */ - CHAR_RIGHT_ANGLE_BRACKET: 62, /* > */ - CHAR_RIGHT_CURLY_BRACE: 125, /* } */ - CHAR_RIGHT_SQUARE_BRACKET: 93, /* ] */ - CHAR_SEMICOLON: 59, /* ; */ - CHAR_SINGLE_QUOTE: 39, /* ' */ - CHAR_SPACE: 32, /* */ - CHAR_TAB: 9, /* \t */ - CHAR_UNDERSCORE: 95, /* _ */ - CHAR_VERTICAL_LINE: 124, /* | */ - CHAR_ZERO_WIDTH_NOBREAK_SPACE: 65279, /* \uFEFF */ + if (scanToEnd === true) { + continue; + } - SEP: path.sep, + break; + } - /** - * Create EXTGLOB_CHARS - */ + if (code === CHAR_FORWARD_SLASH) { + slashes.push(index); + tokens.push(token); + token = { value: '', depth: 0, isGlob: false }; - extglobChars(chars) { - return { - '!': { type: 'negate', open: '(?:(?!(?:', close: `))${chars.STAR})` }, - '?': { type: 'qmark', open: '(?:', close: ')?' }, - '+': { type: 'plus', open: '(?:', close: ')+' }, - '*': { type: 'star', open: '(?:', close: ')*' }, - '@': { type: 'at', open: '(?:', close: ')' } - }; - }, + if (finished === true) continue; + if (prev === CHAR_DOT && index === (start + 1)) { + start += 2; + continue; + } - /** - * Create GLOB_CHARS - */ + lastIndex = index + 1; + continue; + } - globChars(win32) { - return win32 === true ? WINDOWS_CHARS : POSIX_CHARS; - } -}; + if (opts.noext !== true) { + const isExtglobChar = code === CHAR_PLUS + || code === CHAR_AT + || code === CHAR_ASTERISK + || code === CHAR_QUESTION_MARK + || code === CHAR_EXCLAMATION_MARK; + if (isExtglobChar === true && peek() === CHAR_LEFT_PARENTHESES) { + isGlob = token.isGlob = true; + isExtglob = token.isExtglob = true; + finished = true; + if (code === CHAR_EXCLAMATION_MARK && index === start) { + negatedExtglob = true; + } -/***/ }), + if (scanToEnd === true) { + while (eos() !== true && (code = advance())) { + if (code === CHAR_BACKWARD_SLASH) { + backslashes = token.backslashes = true; + code = advance(); + continue; + } -/***/ 2139: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + if (code === CHAR_RIGHT_PARENTHESES) { + isGlob = token.isGlob = true; + finished = true; + break; + } + } + continue; + } + break; + } + } -"use strict"; + if (code === CHAR_ASTERISK) { + if (prev === CHAR_ASTERISK) isGlobstar = token.isGlobstar = true; + isGlob = token.isGlob = true; + finished = true; + if (scanToEnd === true) { + continue; + } + break; + } -const constants = __nccwpck_require__(6099); -const utils = __nccwpck_require__(479); + if (code === CHAR_QUESTION_MARK) { + isGlob = token.isGlob = true; + finished = true; -/** - * Constants - */ - -const { - MAX_LENGTH, - POSIX_REGEX_SOURCE, - REGEX_NON_SPECIAL_CHARS, - REGEX_SPECIAL_CHARS_BACKREF, - REPLACEMENTS -} = constants; - -/** - * Helpers - */ + if (scanToEnd === true) { + continue; + } + break; + } -const expandRange = (args, options) => { - if (typeof options.expandRange === 'function') { - return options.expandRange(...args, options); - } + if (code === CHAR_LEFT_SQUARE_BRACKET) { + while (eos() !== true && (next = advance())) { + if (next === CHAR_BACKWARD_SLASH) { + backslashes = token.backslashes = true; + advance(); + continue; + } - args.sort(); - const value = `[${args.join('-')}]`; + if (next === CHAR_RIGHT_SQUARE_BRACKET) { + isBracket = token.isBracket = true; + isGlob = token.isGlob = true; + finished = true; + break; + } + } - try { - /* eslint-disable-next-line no-new */ - new RegExp(value); - } catch (ex) { - return args.map(v => utils.escapeRegex(v)).join('..'); - } + if (scanToEnd === true) { + continue; + } - return value; -}; + break; + } -/** - * Create the message for a syntax error - */ + if (opts.nonegate !== true && code === CHAR_EXCLAMATION_MARK && index === start) { + negated = token.negated = true; + start++; + continue; + } -const syntaxError = (type, char) => { - return `Missing ${type}: "${char}" - use "\\\\${char}" to match literal characters`; -}; + if (opts.noparen !== true && code === CHAR_LEFT_PARENTHESES) { + isGlob = token.isGlob = true; -/** - * Parse the given input string. - * @param {String} input - * @param {Object} options - * @return {Object} - */ + if (scanToEnd === true) { + while (eos() !== true && (code = advance())) { + if (code === CHAR_LEFT_PARENTHESES) { + backslashes = token.backslashes = true; + code = advance(); + continue; + } -const parse = (input, options) => { - if (typeof input !== 'string') { - throw new TypeError('Expected a string'); - } + if (code === CHAR_RIGHT_PARENTHESES) { + finished = true; + break; + } + } + continue; + } + break; + } - input = REPLACEMENTS[input] || input; + if (isGlob === true) { + finished = true; - const opts = { ...options }; - const max = typeof opts.maxLength === 'number' ? Math.min(MAX_LENGTH, opts.maxLength) : MAX_LENGTH; + if (scanToEnd === true) { + continue; + } - let len = input.length; - if (len > max) { - throw new SyntaxError(`Input length: ${len}, exceeds maximum allowed length: ${max}`); + break; + } } - const bos = { type: 'bos', value: '', output: opts.prepend || '' }; - const tokens = [bos]; - - const capture = opts.capture ? '' : '?:'; - const win32 = utils.isWindows(options); - - // create constants based on platform, for windows or posix - const PLATFORM_CHARS = constants.globChars(win32); - const EXTGLOB_CHARS = constants.extglobChars(PLATFORM_CHARS); + if (opts.noext === true) { + isExtglob = false; + isGlob = false; + } - const { - DOT_LITERAL, - PLUS_LITERAL, - SLASH_LITERAL, - ONE_CHAR, - DOTS_SLASH, - NO_DOT, - NO_DOT_SLASH, - NO_DOTS_SLASH, - QMARK, - QMARK_NO_DOT, - STAR, - START_ANCHOR - } = PLATFORM_CHARS; + let base = str; + let prefix = ''; + let glob = ''; - const globstar = opts => { - return `(${capture}(?:(?!${START_ANCHOR}${opts.dot ? DOTS_SLASH : DOT_LITERAL}).)*?)`; - }; + if (start > 0) { + prefix = str.slice(0, start); + str = str.slice(start); + lastIndex -= start; + } - const nodot = opts.dot ? '' : NO_DOT; - const qmarkNoDot = opts.dot ? QMARK : QMARK_NO_DOT; - let star = opts.bash === true ? globstar(opts) : STAR; + if (base && isGlob === true && lastIndex > 0) { + base = str.slice(0, lastIndex); + glob = str.slice(lastIndex); + } else if (isGlob === true) { + base = ''; + glob = str; + } else { + base = str; + } - if (opts.capture) { - star = `(${star})`; + if (base && base !== '' && base !== '/' && base !== str) { + if (isPathSeparator(base.charCodeAt(base.length - 1))) { + base = base.slice(0, -1); + } } - // minimatch options support - if (typeof opts.noext === 'boolean') { - opts.noextglob = opts.noext; + if (opts.unescape === true) { + if (glob) glob = utils.removeBackslashes(glob); + + if (base && backslashes === true) { + base = utils.removeBackslashes(base); + } } const state = { + prefix, input, - index: -1, - start: 0, - dot: opts.dot === true, - consumed: '', - output: '', - prefix: '', - backtrack: false, - negated: false, - brackets: 0, - braces: 0, - parens: 0, - quotes: 0, - globstar: false, - tokens + start, + base, + glob, + isBrace, + isBracket, + isGlob, + isExtglob, + isGlobstar, + negated, + negatedExtglob }; - input = utils.removePrefix(input, state); - len = input.length; - - const extglobs = []; - const braces = []; - const stack = []; - let prev = bos; - let value; - - /** - * Tokenizing helpers - */ + if (opts.tokens === true) { + state.maxDepth = 0; + if (!isPathSeparator(code)) { + tokens.push(token); + } + state.tokens = tokens; + } - const eos = () => state.index === len - 1; - const peek = state.peek = (n = 1) => input[state.index + n]; - const advance = state.advance = () => input[++state.index] || ''; - const remaining = () => input.slice(state.index + 1); - const consume = (value = '', num = 0) => { - state.consumed += value; - state.index += num; - }; + if (opts.parts === true || opts.tokens === true) { + let prevIndex; - const append = token => { - state.output += token.output != null ? token.output : token.value; - consume(token.value); - }; + for (let idx = 0; idx < slashes.length; idx++) { + const n = prevIndex ? prevIndex + 1 : start; + const i = slashes[idx]; + const value = input.slice(n, i); + if (opts.tokens) { + if (idx === 0 && start !== 0) { + tokens[idx].isPrefix = true; + tokens[idx].value = prefix; + } else { + tokens[idx].value = value; + } + depth(tokens[idx]); + state.maxDepth += tokens[idx].depth; + } + if (idx !== 0 || value !== '') { + parts.push(value); + } + prevIndex = i; + } - const negate = () => { - let count = 1; + if (prevIndex && prevIndex + 1 < input.length) { + const value = input.slice(prevIndex + 1); + parts.push(value); - while (peek() === '!' && (peek(2) !== '(' || peek(3) === '?')) { - advance(); - state.start++; - count++; + if (opts.tokens) { + tokens[tokens.length - 1].value = value; + depth(tokens[tokens.length - 1]); + state.maxDepth += tokens[tokens.length - 1].depth; + } } - if (count % 2 === 0) { - return false; - } + state.slashes = slashes; + state.parts = parts; + } - state.negated = true; - state.start++; - return true; - }; + return state; +}; - const increment = type => { - state[type]++; - stack.push(type); - }; +module.exports = scan; - const decrement = type => { - state[type]--; - stack.pop(); - }; - /** - * Push tokens onto the tokens array. This helper speeds up - * tokenizing by 1) helping us avoid backtracking as much as possible, - * and 2) helping us avoid creating extra tokens when consecutive - * characters are plain text. This improves performance and simplifies - * lookbehinds. - */ +/***/ }), - const push = tok => { - if (prev.type === 'globstar') { - const isBrace = state.braces > 0 && (tok.type === 'comma' || tok.type === 'brace'); - const isExtglob = tok.extglob === true || (extglobs.length && (tok.type === 'pipe' || tok.type === 'paren')); +/***/ 7426: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - if (tok.type !== 'slash' && tok.type !== 'paren' && !isBrace && !isExtglob) { - state.output = state.output.slice(0, -prev.output.length); - prev.type = 'star'; - prev.value = '*'; - prev.output = star; - state.output += prev.output; - } - } +"use strict"; - if (extglobs.length && tok.type !== 'paren') { - extglobs[extglobs.length - 1].inner += tok.value; - } - if (tok.value || tok.output) append(tok); - if (prev && prev.type === 'text' && tok.type === 'text') { - prev.value += tok.value; - prev.output = (prev.output || '') + tok.value; - return; - } +const path = __nccwpck_require__(1017); +const win32 = process.platform === 'win32'; +const { + REGEX_BACKSLASH, + REGEX_REMOVE_BACKSLASH, + REGEX_SPECIAL_CHARS, + REGEX_SPECIAL_CHARS_GLOBAL +} = __nccwpck_require__(6476); - tok.prev = prev; - tokens.push(tok); - prev = tok; - }; +exports.isObject = val => val !== null && typeof val === 'object' && !Array.isArray(val); +exports.hasRegexChars = str => REGEX_SPECIAL_CHARS.test(str); +exports.isRegexChar = str => str.length === 1 && exports.hasRegexChars(str); +exports.escapeRegex = str => str.replace(REGEX_SPECIAL_CHARS_GLOBAL, '\\$1'); +exports.toPosixSlashes = str => str.replace(REGEX_BACKSLASH, '/'); - const extglobOpen = (type, value) => { - const token = { ...EXTGLOB_CHARS[value], conditions: 1, inner: '' }; +exports.removeBackslashes = str => { + return str.replace(REGEX_REMOVE_BACKSLASH, match => { + return match === '\\' ? '' : match; + }); +}; - token.prev = prev; - token.parens = state.parens; - token.output = state.output; - const output = (opts.capture ? '(' : '') + token.open; +exports.supportsLookbehinds = () => { + const segs = process.version.slice(1).split('.').map(Number); + if (segs.length === 3 && segs[0] >= 9 || (segs[0] === 8 && segs[1] >= 10)) { + return true; + } + return false; +}; - increment('parens'); - push({ type, value, output: state.output ? '' : ONE_CHAR }); - push({ type: 'paren', extglob: true, value: advance(), output }); - extglobs.push(token); - }; +exports.isWindows = options => { + if (options && typeof options.windows === 'boolean') { + return options.windows; + } + return win32 === true || path.sep === '\\'; +}; - const extglobClose = token => { - let output = token.close + (opts.capture ? ')' : ''); - let rest; +exports.escapeLast = (input, char, lastIdx) => { + const idx = input.lastIndexOf(char, lastIdx); + if (idx === -1) return input; + if (input[idx - 1] === '\\') return exports.escapeLast(input, char, idx - 1); + return `${input.slice(0, idx)}\\${input.slice(idx)}`; +}; - if (token.type === 'negate') { - let extglobStar = star; +exports.removePrefix = (input, state = {}) => { + let output = input; + if (output.startsWith('./')) { + output = output.slice(2); + state.prefix = './'; + } + return output; +}; - if (token.inner && token.inner.length > 1 && token.inner.includes('/')) { - extglobStar = globstar(opts); - } +exports.wrapOutput = (input, state = {}, options = {}) => { + const prepend = options.contains ? '' : '^'; + const append = options.contains ? '' : '$'; - if (extglobStar !== star || eos() || /^\)+$/.test(remaining())) { - output = token.close = `)$))${extglobStar}`; - } + let output = `${prepend}(?:${input})${append}`; + if (state.negated === true) { + output = `(?:^(?!${output}).*$)`; + } + return output; +}; - if (token.inner.includes('*') && (rest = remaining()) && /^\.[^\\/.]+$/.test(rest)) { - // Any non-magical string (`.ts`) or even nested expression (`.{ts,tsx}`) can follow after the closing parenthesis. - // In this case, we need to parse the string and use it in the output of the original pattern. - // Suitable patterns: `/!(*.d).ts`, `/!(*.d).{ts,tsx}`, `**/!(*-dbg).@(js)`. - // - // Disabling the `fastpaths` option due to a problem with parsing strings as `.ts` in the pattern like `**/!(*.d).ts`. - const expression = parse(rest, { ...options, fastpaths: false }).output; - output = token.close = `)${expression})${extglobStar})`; - } +/***/ }), - if (token.prev.type === 'bos') { - state.negatedExtglob = true; - } - } +/***/ 2610: +/***/ ((module) => { - push({ type: 'paren', extglob: true, value, output }); - decrement('parens'); - }; +"use strict"; - /** - * Fast paths - */ - if (opts.fastpaths !== false && !/(^[*!]|[/()[\]{}"])/.test(input)) { - let backslashes = false; +// We define these manually to ensure they're always copied +// even if they would move up the prototype chain +// https://nodejs.org/api/http.html#http_class_http_incomingmessage +const knownProps = [ + 'destroy', + 'setTimeout', + 'socket', + 'headers', + 'trailers', + 'rawHeaders', + 'statusCode', + 'httpVersion', + 'httpVersionMinor', + 'httpVersionMajor', + 'rawTrailers', + 'statusMessage' +]; - let output = input.replace(REGEX_SPECIAL_CHARS_BACKREF, (m, esc, chars, first, rest, index) => { - if (first === '\\') { - backslashes = true; - return m; - } +module.exports = (fromStream, toStream) => { + const fromProps = new Set(Object.keys(fromStream).concat(knownProps)); - if (first === '?') { - if (esc) { - return esc + first + (rest ? QMARK.repeat(rest.length) : ''); - } - if (index === 0) { - return qmarkNoDot + (rest ? QMARK.repeat(rest.length) : ''); - } - return QMARK.repeat(chars.length); - } + for (const prop of fromProps) { + // Don't overwrite existing properties + if (prop in toStream) { + continue; + } - if (first === '.') { - return DOT_LITERAL.repeat(chars.length); - } + toStream[prop] = typeof fromStream[prop] === 'function' ? fromStream[prop].bind(fromStream) : fromStream[prop]; + } +}; - if (first === '*') { - if (esc) { - return esc + first + (rest ? star : ''); - } - return star; - } - return esc ? m : `\\${m}`; - }); - if (backslashes === true) { - if (opts.unescape === true) { - output = output.replace(/\\/g, ''); - } else { - output = output.replace(/\\+/g, m => { - return m.length % 2 === 0 ? '\\\\' : (m ? '\\' : ''); - }); - } - } +/***/ }), - if (output === input && opts.contains === true) { - state.output = input; - return state; - } +/***/ 7952: +/***/ ((module) => { - state.output = utils.wrapOutput(output, state, options); - return state; - } +"use strict"; - /** - * Tokenize input until we reach end-of-string - */ - while (!eos()) { - value = advance(); +// https://developer.mozilla.org/en-US/docs/Web/HTTP/Basics_of_HTTP/Data_URIs +const DATA_URL_DEFAULT_MIME_TYPE = 'text/plain'; +const DATA_URL_DEFAULT_CHARSET = 'us-ascii'; - if (value === '\u0000') { - continue; - } +const testParameter = (name, filters) => { + return filters.some(filter => filter instanceof RegExp ? filter.test(name) : filter === name); +}; - /** - * Escaped characters - */ +const normalizeDataURL = (urlString, {stripHash}) => { + const match = /^data:(?[^,]*?),(?[^#]*?)(?:#(?.*))?$/.exec(urlString); - if (value === '\\') { - const next = peek(); + if (!match) { + throw new Error(`Invalid URL: ${urlString}`); + } - if (next === '/' && opts.bash !== true) { - continue; - } + let {type, data, hash} = match.groups; + const mediaType = type.split(';'); + hash = stripHash ? '' : hash; - if (next === '.' || next === ';') { - continue; - } + let isBase64 = false; + if (mediaType[mediaType.length - 1] === 'base64') { + mediaType.pop(); + isBase64 = true; + } - if (!next) { - value += '\\'; - push({ type: 'text', value }); - continue; - } + // Lowercase MIME type + const mimeType = (mediaType.shift() || '').toLowerCase(); + const attributes = mediaType + .map(attribute => { + let [key, value = ''] = attribute.split('=').map(string => string.trim()); - // collapse slashes to reduce potential for exploits - const match = /^\\+/.exec(remaining()); - let slashes = 0; + // Lowercase `charset` + if (key === 'charset') { + value = value.toLowerCase(); - if (match && match[0].length > 2) { - slashes = match[0].length; - state.index += slashes; - if (slashes % 2 !== 0) { - value += '\\'; - } - } + if (value === DATA_URL_DEFAULT_CHARSET) { + return ''; + } + } - if (opts.unescape === true) { - value = advance(); - } else { - value += advance(); - } + return `${key}${value ? `=${value}` : ''}`; + }) + .filter(Boolean); - if (state.brackets === 0) { - push({ type: 'text', value }); - continue; - } - } + const normalizedMediaType = [ + ...attributes + ]; - /** - * If we're inside a regex character class, continue - * until we reach the closing bracket. - */ + if (isBase64) { + normalizedMediaType.push('base64'); + } - if (state.brackets > 0 && (value !== ']' || prev.value === '[' || prev.value === '[^')) { - if (opts.posix !== false && value === ':') { - const inner = prev.value.slice(1); - if (inner.includes('[')) { - prev.posix = true; + if (normalizedMediaType.length !== 0 || (mimeType && mimeType !== DATA_URL_DEFAULT_MIME_TYPE)) { + normalizedMediaType.unshift(mimeType); + } - if (inner.includes(':')) { - const idx = prev.value.lastIndexOf('['); - const pre = prev.value.slice(0, idx); - const rest = prev.value.slice(idx + 2); - const posix = POSIX_REGEX_SOURCE[rest]; - if (posix) { - prev.value = pre + posix; - state.backtrack = true; - advance(); + return `data:${normalizedMediaType.join(';')},${isBase64 ? data.trim() : data}${hash ? `#${hash}` : ''}`; +}; - if (!bos.output && tokens.indexOf(prev) === 1) { - bos.output = ONE_CHAR; - } - continue; - } - } - } - } +const normalizeUrl = (urlString, options) => { + options = { + defaultProtocol: 'http:', + normalizeProtocol: true, + forceHttp: false, + forceHttps: false, + stripAuthentication: true, + stripHash: false, + stripTextFragment: true, + stripWWW: true, + removeQueryParameters: [/^utm_\w+/i], + removeTrailingSlash: true, + removeSingleSlash: true, + removeDirectoryIndex: false, + sortQueryParameters: true, + ...options + }; - if ((value === '[' && peek() !== ':') || (value === '-' && peek() === ']')) { - value = `\\${value}`; - } + urlString = urlString.trim(); - if (value === ']' && (prev.value === '[' || prev.value === '[^')) { - value = `\\${value}`; - } + // Data URL + if (/^data:/i.test(urlString)) { + return normalizeDataURL(urlString, options); + } - if (opts.posix === true && value === '!' && prev.value === '[') { - value = '^'; - } + if (/^view-source:/i.test(urlString)) { + throw new Error('`view-source:` is not supported as it is a non-standard protocol'); + } - prev.value += value; - append({ value }); - continue; - } + const hasRelativeProtocol = urlString.startsWith('//'); + const isRelativeUrl = !hasRelativeProtocol && /^\.*\//.test(urlString); - /** - * If we're inside a quoted string, continue - * until we reach the closing double quote. - */ + // Prepend protocol + if (!isRelativeUrl) { + urlString = urlString.replace(/^(?!(?:\w+:)?\/\/)|^\/\//, options.defaultProtocol); + } - if (state.quotes === 1 && value !== '"') { - value = utils.escapeRegex(value); - prev.value += value; - append({ value }); - continue; - } + const urlObj = new URL(urlString); - /** - * Double quotes - */ + if (options.forceHttp && options.forceHttps) { + throw new Error('The `forceHttp` and `forceHttps` options cannot be used together'); + } - if (value === '"') { - state.quotes = state.quotes === 1 ? 0 : 1; - if (opts.keepQuotes === true) { - push({ type: 'text', value }); - } - continue; - } + if (options.forceHttp && urlObj.protocol === 'https:') { + urlObj.protocol = 'http:'; + } - /** - * Parentheses - */ + if (options.forceHttps && urlObj.protocol === 'http:') { + urlObj.protocol = 'https:'; + } - if (value === '(') { - increment('parens'); - push({ type: 'paren', value }); - continue; - } + // Remove auth + if (options.stripAuthentication) { + urlObj.username = ''; + urlObj.password = ''; + } - if (value === ')') { - if (state.parens === 0 && opts.strictBrackets === true) { - throw new SyntaxError(syntaxError('opening', '(')); - } + // Remove hash + if (options.stripHash) { + urlObj.hash = ''; + } else if (options.stripTextFragment) { + urlObj.hash = urlObj.hash.replace(/#?:~:text.*?$/i, ''); + } - const extglob = extglobs[extglobs.length - 1]; - if (extglob && state.parens === extglob.parens + 1) { - extglobClose(extglobs.pop()); - continue; - } + // Remove duplicate slashes if not preceded by a protocol + if (urlObj.pathname) { + urlObj.pathname = urlObj.pathname.replace(/(? 0) { + let pathComponents = urlObj.pathname.split('/'); + const lastComponent = pathComponents[pathComponents.length - 1]; - value = `\\${value}`; - } else { - increment('brackets'); - } + if (testParameter(lastComponent, options.removeDirectoryIndex)) { + pathComponents = pathComponents.slice(0, pathComponents.length - 1); + urlObj.pathname = pathComponents.slice(1).join('/') + '/'; + } + } - push({ type: 'bracket', value }); - continue; - } + if (urlObj.hostname) { + // Remove trailing dot + urlObj.hostname = urlObj.hostname.replace(/\.$/, ''); - if (value === ']') { - if (opts.nobracket === true || (prev && prev.type === 'bracket' && prev.value.length === 1)) { - push({ type: 'text', value, output: `\\${value}` }); - continue; - } + // Remove `www.` + if (options.stripWWW && /^www\.(?!www\.)(?:[a-z\-\d]{1,63})\.(?:[a-z.\-\d]{2,63})$/.test(urlObj.hostname)) { + // Each label should be max 63 at length (min: 1). + // Source: https://en.wikipedia.org/wiki/Hostname#Restrictions_on_valid_host_names + // Each TLD should be up to 63 characters long (min: 2). + // It is technically possible to have a single character TLD, but none currently exist. + urlObj.hostname = urlObj.hostname.replace(/^www\./, ''); + } + } - if (state.brackets === 0) { - if (opts.strictBrackets === true) { - throw new SyntaxError(syntaxError('opening', '[')); - } + // Remove query unwanted parameters + if (Array.isArray(options.removeQueryParameters)) { + for (const key of [...urlObj.searchParams.keys()]) { + if (testParameter(key, options.removeQueryParameters)) { + urlObj.searchParams.delete(key); + } + } + } - push({ type: 'text', value, output: `\\${value}` }); - continue; - } + if (options.removeQueryParameters === true) { + urlObj.search = ''; + } - decrement('brackets'); + // Sort query parameters + if (options.sortQueryParameters) { + urlObj.searchParams.sort(); + } - const prevValue = prev.value.slice(1); - if (prev.posix !== true && prevValue[0] === '^' && !prevValue.includes('/')) { - value = `/${value}`; - } + if (options.removeTrailingSlash) { + urlObj.pathname = urlObj.pathname.replace(/\/$/, ''); + } - prev.value += value; - append({ value }); + const oldUrlString = urlString; - // when literal brackets are explicitly disabled - // assume we should match with a regex character class - if (opts.literalBrackets === false || utils.hasRegexChars(prevValue)) { - continue; - } + // Take advantage of many of the Node `url` normalizations + urlString = urlObj.toString(); - const escaped = utils.escapeRegex(prev.value); - state.output = state.output.slice(0, -prev.value.length); + if (!options.removeSingleSlash && urlObj.pathname === '/' && !oldUrlString.endsWith('/') && urlObj.hash === '') { + urlString = urlString.replace(/\/$/, ''); + } - // when literal brackets are explicitly enabled - // assume we should escape the brackets to match literal characters - if (opts.literalBrackets === true) { - state.output += escaped; - prev.value = escaped; - continue; - } + // Remove ending `/` unless removeSingleSlash is false + if ((options.removeTrailingSlash || urlObj.pathname === '/') && urlObj.hash === '' && options.removeSingleSlash) { + urlString = urlString.replace(/\/$/, ''); + } - // when the user specifies nothing, try to match both - prev.value = `(${capture}${escaped}|${prev.value})`; - state.output += prev.value; - continue; - } + // Restore relative protocol, if applicable + if (hasRelativeProtocol && !options.normalizeProtocol) { + urlString = urlString.replace(/^http:\/\//, '//'); + } - /** - * Braces - */ + // Remove http/https + if (options.stripProtocol) { + urlString = urlString.replace(/^(?:https?:)?\/\//, ''); + } - if (value === '{' && opts.nobrace !== true) { - increment('braces'); + return urlString; +}; - const open = { - type: 'brace', - value, - output: '(', - outputIndex: state.output.length, - tokensIndex: state.tokens.length - }; +module.exports = normalizeUrl; - braces.push(open); - push(open); - continue; - } - if (value === '}') { - const brace = braces[braces.length - 1]; +/***/ }), - if (opts.nobrace === true || !brace) { - push({ type: 'text', value, output: value }); - continue; - } +/***/ 1223: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - let output = ')'; +var wrappy = __nccwpck_require__(2940) +module.exports = wrappy(once) +module.exports.strict = wrappy(onceStrict) - if (brace.dots === true) { - const arr = tokens.slice(); - const range = []; +once.proto = once(function () { + Object.defineProperty(Function.prototype, 'once', { + value: function () { + return once(this) + }, + configurable: true + }) - for (let i = arr.length - 1; i >= 0; i--) { - tokens.pop(); - if (arr[i].type === 'brace') { - break; - } - if (arr[i].type !== 'dots') { - range.unshift(arr[i].value); - } - } + Object.defineProperty(Function.prototype, 'onceStrict', { + value: function () { + return onceStrict(this) + }, + configurable: true + }) +}) - output = expandRange(range, opts); - state.backtrack = true; - } +function once (fn) { + var f = function () { + if (f.called) return f.value + f.called = true + return f.value = fn.apply(this, arguments) + } + f.called = false + return f +} - if (brace.comma !== true && brace.dots !== true) { - const out = state.output.slice(0, brace.outputIndex); - const toks = state.tokens.slice(brace.tokensIndex); - brace.value = brace.output = '\\{'; - value = output = '\\}'; - state.output = out; - for (const t of toks) { - state.output += (t.output || t.value); - } - } +function onceStrict (fn) { + var f = function () { + if (f.called) + throw new Error(f.onceError) + f.called = true + return f.value = fn.apply(this, arguments) + } + var name = fn.name || 'Function wrapped with `once`' + f.onceError = name + " shouldn't be called more than once" + f.called = false + return f +} - push({ type: 'brace', value, output }); - decrement('braces'); - braces.pop(); - continue; - } - /** - * Pipes - */ +/***/ }), - if (value === '|') { - if (extglobs.length > 0) { - extglobs[extglobs.length - 1].conditions++; - } - push({ type: 'text', value }); - continue; - } +/***/ 9072: +/***/ ((module) => { - /** - * Commas - */ +"use strict"; - if (value === ',') { - let output = value; - const brace = braces[braces.length - 1]; - if (brace && stack[stack.length - 1] === 'braces') { - brace.comma = true; - output = '|'; - } +class CancelError extends Error { + constructor(reason) { + super(reason || 'Promise was canceled'); + this.name = 'CancelError'; + } - push({ type: 'comma', value, output }); - continue; - } + get isCanceled() { + return true; + } +} - /** - * Slashes - */ +class PCancelable { + static fn(userFn) { + return (...arguments_) => { + return new PCancelable((resolve, reject, onCancel) => { + arguments_.push(onCancel); + // eslint-disable-next-line promise/prefer-await-to-then + userFn(...arguments_).then(resolve, reject); + }); + }; + } - if (value === '/') { - // if the beginning of the glob is "./", advance the start - // to the current index, and don't add the "./" characters - // to the state. This greatly simplifies lookbehinds when - // checking for BOS characters like "!" and "." (not "./") - if (prev.type === 'dot' && state.index === state.start + 1) { - state.start = state.index + 1; - state.consumed = ''; - state.output = ''; - tokens.pop(); - prev = bos; // reset "prev" to the first token - continue; - } + constructor(executor) { + this._cancelHandlers = []; + this._isPending = true; + this._isCanceled = false; + this._rejectOnCancel = true; - push({ type: 'slash', value, output: SLASH_LITERAL }); - continue; - } + this._promise = new Promise((resolve, reject) => { + this._reject = reject; - /** - * Dots - */ + const onResolve = value => { + if (!this._isCanceled || !onCancel.shouldReject) { + this._isPending = false; + resolve(value); + } + }; - if (value === '.') { - if (state.braces > 0 && prev.type === 'dot') { - if (prev.value === '.') prev.output = DOT_LITERAL; - const brace = braces[braces.length - 1]; - prev.type = 'dots'; - prev.output += value; - prev.value += value; - brace.dots = true; - continue; - } + const onReject = error => { + this._isPending = false; + reject(error); + }; - if ((state.braces + state.parens) === 0 && prev.type !== 'bos' && prev.type !== 'slash') { - push({ type: 'text', value, output: DOT_LITERAL }); - continue; - } + const onCancel = handler => { + if (!this._isPending) { + throw new Error('The `onCancel` handler was attached after the promise settled.'); + } - push({ type: 'dot', value, output: DOT_LITERAL }); - continue; - } + this._cancelHandlers.push(handler); + }; - /** - * Question marks - */ + Object.defineProperties(onCancel, { + shouldReject: { + get: () => this._rejectOnCancel, + set: boolean => { + this._rejectOnCancel = boolean; + } + } + }); - if (value === '?') { - const isGroup = prev && prev.value === '('; - if (!isGroup && opts.noextglob !== true && peek() === '(' && peek(2) !== '?') { - extglobOpen('qmark', value); - continue; - } + return executor(onResolve, onReject, onCancel); + }); + } - if (prev && prev.type === 'paren') { - const next = peek(); - let output = value; + then(onFulfilled, onRejected) { + // eslint-disable-next-line promise/prefer-await-to-then + return this._promise.then(onFulfilled, onRejected); + } - if (next === '<' && !utils.supportsLookbehinds()) { - throw new Error('Node.js v10 or higher is required for regex lookbehinds'); - } + catch(onRejected) { + return this._promise.catch(onRejected); + } - if ((prev.value === '(' && !/[!=<:]/.test(next)) || (next === '<' && !/<([!=]|\w+>)/.test(remaining()))) { - output = `\\${value}`; - } + finally(onFinally) { + return this._promise.finally(onFinally); + } - push({ type: 'text', value, output }); - continue; - } + cancel(reason) { + if (!this._isPending || this._isCanceled) { + return; + } - if (opts.dot !== true && (prev.type === 'slash' || prev.type === 'bos')) { - push({ type: 'qmark', value, output: QMARK_NO_DOT }); - continue; - } + this._isCanceled = true; - push({ type: 'qmark', value, output: QMARK }); - continue; - } + if (this._cancelHandlers.length > 0) { + try { + for (const handler of this._cancelHandlers) { + handler(); + } + } catch (error) { + this._reject(error); + return; + } + } - /** - * Exclamation - */ + if (this._rejectOnCancel) { + this._reject(new CancelError(reason)); + } + } - if (value === '!') { - if (opts.noextglob !== true && peek() === '(') { - if (peek(2) !== '?' || !/[!=<:]/.test(peek(3))) { - extglobOpen('negate', value); - continue; - } - } + get isCanceled() { + return this._isCanceled; + } +} - if (opts.nonegate !== true && state.index === 0) { - negate(); - continue; - } - } +Object.setPrototypeOf(PCancelable.prototype, Promise.prototype); - /** - * Plus - */ +module.exports = PCancelable; +module.exports.CancelError = CancelError; - if (value === '+') { - if (opts.noextglob !== true && peek() === '(' && peek(2) !== '?') { - extglobOpen('plus', value); - continue; - } - if ((prev && prev.value === '(') || opts.regex === false) { - push({ type: 'plus', value, output: PLUS_LITERAL }); - continue; - } +/***/ }), - if ((prev && (prev.type === 'bracket' || prev.type === 'paren' || prev.type === 'brace')) || state.parens > 0) { - push({ type: 'plus', value }); - continue; - } +/***/ 8569: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - push({ type: 'plus', value: PLUS_LITERAL }); - continue; - } +"use strict"; - /** - * Plain text - */ - if (value === '@') { - if (opts.noextglob !== true && peek() === '(' && peek(2) !== '?') { - push({ type: 'at', extglob: true, value, output: '' }); - continue; - } +const os = __nccwpck_require__(2037); +const pico = __nccwpck_require__(3322); - push({ type: 'text', value }); - continue; - } +const isWindows = os.platform() === 'win32'; - /** - * Plain text - */ +function picomatch(glob, options, returnState = false) { + // default to os.platform() + if (options && (options.windows === null || options.windows === undefined)) { + // don't mutate the original options object + options = { ...options, windows: isWindows }; + } + return pico(glob, options, returnState); +} - if (value !== '*') { - if (value === '$' || value === '^') { - value = `\\${value}`; - } +module.exports = picomatch; +// public api +module.exports.test = pico.test; +module.exports.matchBase = pico.matchBase; +module.exports.isMatch = pico.isMatch; +module.exports.parse = pico.parse; +module.exports.scan = pico.scan; +module.exports.compileRe = pico.compileRe; +module.exports.toRegex = pico.toRegex; +// for tests +module.exports.makeRe = pico.makeRe; - const match = REGEX_NON_SPECIAL_CHARS.exec(remaining()); - if (match) { - value += match[0]; - state.index += match[0].length; - } - push({ type: 'text', value }); - continue; - } +/***/ }), - /** - * Stars - */ +/***/ 6099: +/***/ ((module) => { - if (prev && (prev.type === 'globstar' || prev.star === true)) { - prev.type = 'star'; - prev.star = true; - prev.value += value; - prev.output = star; - state.backtrack = true; - state.globstar = true; - consume(value); - continue; - } +"use strict"; - let rest = remaining(); - if (opts.noextglob !== true && /^\([^?]/.test(rest)) { - extglobOpen('star', value); - continue; - } - if (prev.type === 'star') { - if (opts.noglobstar === true) { - consume(value); - continue; - } +const WIN_SLASH = '\\\\/'; +const WIN_NO_SLASH = `[^${WIN_SLASH}]`; - const prior = prev.prev; - const before = prior.prev; - const isStart = prior.type === 'slash' || prior.type === 'bos'; - const afterStar = before && (before.type === 'star' || before.type === 'globstar'); +/** + * Posix glob regex + */ - if (opts.bash === true && (!isStart || (rest[0] && rest[0] !== '/'))) { - push({ type: 'star', value, output: '' }); - continue; - } +const DOT_LITERAL = '\\.'; +const PLUS_LITERAL = '\\+'; +const QMARK_LITERAL = '\\?'; +const SLASH_LITERAL = '\\/'; +const ONE_CHAR = '(?=.)'; +const QMARK = '[^/]'; +const END_ANCHOR = `(?:${SLASH_LITERAL}|$)`; +const START_ANCHOR = `(?:^|${SLASH_LITERAL})`; +const DOTS_SLASH = `${DOT_LITERAL}{1,2}${END_ANCHOR}`; +const NO_DOT = `(?!${DOT_LITERAL})`; +const NO_DOTS = `(?!${START_ANCHOR}${DOTS_SLASH})`; +const NO_DOT_SLASH = `(?!${DOT_LITERAL}{0,1}${END_ANCHOR})`; +const NO_DOTS_SLASH = `(?!${DOTS_SLASH})`; +const QMARK_NO_DOT = `[^.${SLASH_LITERAL}]`; +const STAR = `${QMARK}*?`; +const SEP = '/'; - const isBrace = state.braces > 0 && (prior.type === 'comma' || prior.type === 'brace'); - const isExtglob = extglobs.length && (prior.type === 'pipe' || prior.type === 'paren'); - if (!isStart && prior.type !== 'paren' && !isBrace && !isExtglob) { - push({ type: 'star', value, output: '' }); - continue; - } +const POSIX_CHARS = { + DOT_LITERAL, + PLUS_LITERAL, + QMARK_LITERAL, + SLASH_LITERAL, + ONE_CHAR, + QMARK, + END_ANCHOR, + DOTS_SLASH, + NO_DOT, + NO_DOTS, + NO_DOT_SLASH, + NO_DOTS_SLASH, + QMARK_NO_DOT, + STAR, + START_ANCHOR, + SEP +}; - // strip consecutive `/**/` - while (rest.slice(0, 3) === '/**') { - const after = input[state.index + 4]; - if (after && after !== '/') { - break; - } - rest = rest.slice(3); - consume('/**', 3); - } +/** + * Windows glob regex + */ - if (prior.type === 'bos' && eos()) { - prev.type = 'globstar'; - prev.value += value; - prev.output = globstar(opts); - state.output = prev.output; - state.globstar = true; - consume(value); - continue; - } +const WINDOWS_CHARS = { + ...POSIX_CHARS, - if (prior.type === 'slash' && prior.prev.type !== 'bos' && !afterStar && eos()) { - state.output = state.output.slice(0, -(prior.output + prev.output).length); - prior.output = `(?:${prior.output}`; + SLASH_LITERAL: `[${WIN_SLASH}]`, + QMARK: WIN_NO_SLASH, + STAR: `${WIN_NO_SLASH}*?`, + DOTS_SLASH: `${DOT_LITERAL}{1,2}(?:[${WIN_SLASH}]|$)`, + NO_DOT: `(?!${DOT_LITERAL})`, + NO_DOTS: `(?!(?:^|[${WIN_SLASH}])${DOT_LITERAL}{1,2}(?:[${WIN_SLASH}]|$))`, + NO_DOT_SLASH: `(?!${DOT_LITERAL}{0,1}(?:[${WIN_SLASH}]|$))`, + NO_DOTS_SLASH: `(?!${DOT_LITERAL}{1,2}(?:[${WIN_SLASH}]|$))`, + QMARK_NO_DOT: `[^.${WIN_SLASH}]`, + START_ANCHOR: `(?:^|[${WIN_SLASH}])`, + END_ANCHOR: `(?:[${WIN_SLASH}]|$)`, + SEP: '\\' +}; - prev.type = 'globstar'; - prev.output = globstar(opts) + (opts.strictSlashes ? ')' : '|$)'); - prev.value += value; - state.globstar = true; - state.output += prior.output + prev.output; - consume(value); - continue; - } +/** + * POSIX Bracket Regex + */ - if (prior.type === 'slash' && prior.prev.type !== 'bos' && rest[0] === '/') { - const end = rest[1] !== void 0 ? '|$' : ''; +const POSIX_REGEX_SOURCE = { + alnum: 'a-zA-Z0-9', + alpha: 'a-zA-Z', + ascii: '\\x00-\\x7F', + blank: ' \\t', + cntrl: '\\x00-\\x1F\\x7F', + digit: '0-9', + graph: '\\x21-\\x7E', + lower: 'a-z', + print: '\\x20-\\x7E ', + punct: '\\-!"#$%&\'()\\*+,./:;<=>?@[\\]^_`{|}~', + space: ' \\t\\r\\n\\v\\f', + upper: 'A-Z', + word: 'A-Za-z0-9_', + xdigit: 'A-Fa-f0-9' +}; - state.output = state.output.slice(0, -(prior.output + prev.output).length); - prior.output = `(?:${prior.output}`; +module.exports = { + MAX_LENGTH: 1024 * 64, + POSIX_REGEX_SOURCE, - prev.type = 'globstar'; - prev.output = `${globstar(opts)}${SLASH_LITERAL}|${SLASH_LITERAL}${end})`; - prev.value += value; + // regular expressions + REGEX_BACKSLASH: /\\(?![*+?^${}(|)[\]])/g, + REGEX_NON_SPECIAL_CHARS: /^[^@![\].,$*+?^{}()|\\/]+/, + REGEX_SPECIAL_CHARS: /[-*+?.^${}(|)[\]]/, + REGEX_SPECIAL_CHARS_BACKREF: /(\\?)((\W)(\3*))/g, + REGEX_SPECIAL_CHARS_GLOBAL: /([-*+?.^${}(|)[\]])/g, + REGEX_REMOVE_BACKSLASH: /(?:\[.*?[^\\]\]|\\(?=.))/g, - state.output += prior.output + prev.output; - state.globstar = true; + // Replace globs with equivalent patterns to reduce parsing time. + REPLACEMENTS: { + '***': '*', + '**/**': '**', + '**/**/**': '**' + }, - consume(value + advance()); + // Digits + CHAR_0: 48, /* 0 */ + CHAR_9: 57, /* 9 */ - push({ type: 'slash', value: '/', output: '' }); - continue; - } + // Alphabet chars. + CHAR_UPPERCASE_A: 65, /* A */ + CHAR_LOWERCASE_A: 97, /* a */ + CHAR_UPPERCASE_Z: 90, /* Z */ + CHAR_LOWERCASE_Z: 122, /* z */ - if (prior.type === 'bos' && rest[0] === '/') { - prev.type = 'globstar'; - prev.value += value; - prev.output = `(?:^|${SLASH_LITERAL}|${globstar(opts)}${SLASH_LITERAL})`; - state.output = prev.output; - state.globstar = true; - consume(value + advance()); - push({ type: 'slash', value: '/', output: '' }); - continue; - } + CHAR_LEFT_PARENTHESES: 40, /* ( */ + CHAR_RIGHT_PARENTHESES: 41, /* ) */ - // remove single star from output - state.output = state.output.slice(0, -prev.output.length); + CHAR_ASTERISK: 42, /* * */ - // reset previous token to globstar - prev.type = 'globstar'; - prev.output = globstar(opts); - prev.value += value; + // Non-alphabetic chars. + CHAR_AMPERSAND: 38, /* & */ + CHAR_AT: 64, /* @ */ + CHAR_BACKWARD_SLASH: 92, /* \ */ + CHAR_CARRIAGE_RETURN: 13, /* \r */ + CHAR_CIRCUMFLEX_ACCENT: 94, /* ^ */ + CHAR_COLON: 58, /* : */ + CHAR_COMMA: 44, /* , */ + CHAR_DOT: 46, /* . */ + CHAR_DOUBLE_QUOTE: 34, /* " */ + CHAR_EQUAL: 61, /* = */ + CHAR_EXCLAMATION_MARK: 33, /* ! */ + CHAR_FORM_FEED: 12, /* \f */ + CHAR_FORWARD_SLASH: 47, /* / */ + CHAR_GRAVE_ACCENT: 96, /* ` */ + CHAR_HASH: 35, /* # */ + CHAR_HYPHEN_MINUS: 45, /* - */ + CHAR_LEFT_ANGLE_BRACKET: 60, /* < */ + CHAR_LEFT_CURLY_BRACE: 123, /* { */ + CHAR_LEFT_SQUARE_BRACKET: 91, /* [ */ + CHAR_LINE_FEED: 10, /* \n */ + CHAR_NO_BREAK_SPACE: 160, /* \u00A0 */ + CHAR_PERCENT: 37, /* % */ + CHAR_PLUS: 43, /* + */ + CHAR_QUESTION_MARK: 63, /* ? */ + CHAR_RIGHT_ANGLE_BRACKET: 62, /* > */ + CHAR_RIGHT_CURLY_BRACE: 125, /* } */ + CHAR_RIGHT_SQUARE_BRACKET: 93, /* ] */ + CHAR_SEMICOLON: 59, /* ; */ + CHAR_SINGLE_QUOTE: 39, /* ' */ + CHAR_SPACE: 32, /* */ + CHAR_TAB: 9, /* \t */ + CHAR_UNDERSCORE: 95, /* _ */ + CHAR_VERTICAL_LINE: 124, /* | */ + CHAR_ZERO_WIDTH_NOBREAK_SPACE: 65279, /* \uFEFF */ - // reset output with globstar - state.output += prev.output; - state.globstar = true; - consume(value); - continue; - } + /** + * Create EXTGLOB_CHARS + */ - const token = { type: 'star', value, output: star }; + extglobChars(chars) { + return { + '!': { type: 'negate', open: '(?:(?!(?:', close: `))${chars.STAR})` }, + '?': { type: 'qmark', open: '(?:', close: ')?' }, + '+': { type: 'plus', open: '(?:', close: ')+' }, + '*': { type: 'star', open: '(?:', close: ')*' }, + '@': { type: 'at', open: '(?:', close: ')' } + }; + }, - if (opts.bash === true) { - token.output = '.*?'; - if (prev.type === 'bos' || prev.type === 'slash') { - token.output = nodot + token.output; - } - push(token); - continue; - } + /** + * Create GLOB_CHARS + */ - if (prev && (prev.type === 'bracket' || prev.type === 'paren') && opts.regex === true) { - token.output = value; - push(token); - continue; - } + globChars(win32) { + return win32 === true ? WINDOWS_CHARS : POSIX_CHARS; + } +}; - if (state.index === state.start || prev.type === 'slash' || prev.type === 'dot') { - if (prev.type === 'dot') { - state.output += NO_DOT_SLASH; - prev.output += NO_DOT_SLASH; - } else if (opts.dot === true) { - state.output += NO_DOTS_SLASH; - prev.output += NO_DOTS_SLASH; +/***/ }), - } else { - state.output += nodot; - prev.output += nodot; - } +/***/ 2139: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - if (peek() !== '*') { - state.output += ONE_CHAR; - prev.output += ONE_CHAR; - } - } +"use strict"; - push(token); - } - while (state.brackets > 0) { - if (opts.strictBrackets === true) throw new SyntaxError(syntaxError('closing', ']')); - state.output = utils.escapeLast(state.output, '['); - decrement('brackets'); - } +const constants = __nccwpck_require__(6099); +const utils = __nccwpck_require__(479); - while (state.parens > 0) { - if (opts.strictBrackets === true) throw new SyntaxError(syntaxError('closing', ')')); - state.output = utils.escapeLast(state.output, '('); - decrement('parens'); - } +/** + * Constants + */ - while (state.braces > 0) { - if (opts.strictBrackets === true) throw new SyntaxError(syntaxError('closing', '}')); - state.output = utils.escapeLast(state.output, '{'); - decrement('braces'); - } +const { + MAX_LENGTH, + POSIX_REGEX_SOURCE, + REGEX_NON_SPECIAL_CHARS, + REGEX_SPECIAL_CHARS_BACKREF, + REPLACEMENTS +} = constants; - if (opts.strictSlashes !== true && (prev.type === 'star' || prev.type === 'bracket')) { - push({ type: 'maybe_slash', value: '', output: `${SLASH_LITERAL}?` }); - } +/** + * Helpers + */ - // rebuild the output if we had to backtrack at any point - if (state.backtrack === true) { - state.output = ''; +const expandRange = (args, options) => { + if (typeof options.expandRange === 'function') { + return options.expandRange(...args, options); + } - for (const token of state.tokens) { - state.output += token.output != null ? token.output : token.value; + args.sort(); + const value = `[${args.join('-')}]`; - if (token.suffix) { - state.output += token.suffix; - } - } + try { + /* eslint-disable-next-line no-new */ + new RegExp(value); + } catch (ex) { + return args.map(v => utils.escapeRegex(v)).join('..'); } - return state; + return value; }; /** - * Fast paths for creating regular expressions for common glob patterns. - * This can significantly speed up processing and has very little downside - * impact when none of the fast paths match. + * Create the message for a syntax error */ -parse.fastpaths = (input, options) => { - const opts = { ...options }; - const max = typeof opts.maxLength === 'number' ? Math.min(MAX_LENGTH, opts.maxLength) : MAX_LENGTH; - const len = input.length; +const syntaxError = (type, char) => { + return `Missing ${type}: "${char}" - use "\\\\${char}" to match literal characters`; +}; + +/** + * Parse the given input string. + * @param {String} input + * @param {Object} options + * @return {Object} + */ + +const parse = (input, options) => { + if (typeof input !== 'string') { + throw new TypeError('Expected a string'); + } + + input = REPLACEMENTS[input] || input; + + const opts = { ...options }; + const max = typeof opts.maxLength === 'number' ? Math.min(MAX_LENGTH, opts.maxLength) : MAX_LENGTH; + + let len = input.length; if (len > max) { throw new SyntaxError(`Input length: ${len}, exceeds maximum allowed length: ${max}`); } - input = REPLACEMENTS[input] || input; - const win32 = utils.isWindows(options); + const bos = { type: 'bos', value: '', output: opts.prepend || '' }; + const tokens = [bos]; + + const capture = opts.capture ? '' : '?:'; // create constants based on platform, for windows or posix + const PLATFORM_CHARS = constants.globChars(opts.windows); + const EXTGLOB_CHARS = constants.extglobChars(PLATFORM_CHARS); + const { DOT_LITERAL, + PLUS_LITERAL, SLASH_LITERAL, ONE_CHAR, DOTS_SLASH, NO_DOT, - NO_DOTS, + NO_DOT_SLASH, NO_DOTS_SLASH, + QMARK, + QMARK_NO_DOT, STAR, START_ANCHOR - } = constants.globChars(win32); + } = PLATFORM_CHARS; - const nodot = opts.dot ? NO_DOTS : NO_DOT; - const slashDot = opts.dot ? NO_DOTS_SLASH : NO_DOT; - const capture = opts.capture ? '' : '?:'; - const state = { negated: false, prefix: '' }; - let star = opts.bash === true ? '.*?' : STAR; + const globstar = opts => { + return `(${capture}(?:(?!${START_ANCHOR}${opts.dot ? DOTS_SLASH : DOT_LITERAL}).)*?)`; + }; + + const nodot = opts.dot ? '' : NO_DOT; + const qmarkNoDot = opts.dot ? QMARK : QMARK_NO_DOT; + let star = opts.bash === true ? globstar(opts) : STAR; if (opts.capture) { star = `(${star})`; } - const globstar = opts => { - if (opts.noglobstar === true) return star; - return `(${capture}(?:(?!${START_ANCHOR}${opts.dot ? DOTS_SLASH : DOT_LITERAL}).)*?)`; + // minimatch options support + if (typeof opts.noext === 'boolean') { + opts.noextglob = opts.noext; + } + + const state = { + input, + index: -1, + start: 0, + dot: opts.dot === true, + consumed: '', + output: '', + prefix: '', + backtrack: false, + negated: false, + brackets: 0, + braces: 0, + parens: 0, + quotes: 0, + globstar: false, + tokens }; - const create = str => { - switch (str) { - case '*': - return `${nodot}${ONE_CHAR}${star}`; + input = utils.removePrefix(input, state); + len = input.length; - case '.*': - return `${DOT_LITERAL}${ONE_CHAR}${star}`; + const extglobs = []; + const braces = []; + const stack = []; + let prev = bos; + let value; - case '*.*': - return `${nodot}${star}${DOT_LITERAL}${ONE_CHAR}${star}`; + /** + * Tokenizing helpers + */ - case '*/*': - return `${nodot}${star}${SLASH_LITERAL}${ONE_CHAR}${slashDot}${star}`; + const eos = () => state.index === len - 1; + const peek = state.peek = (n = 1) => input[state.index + n]; + const advance = state.advance = () => input[++state.index] || ''; + const remaining = () => input.slice(state.index + 1); + const consume = (value = '', num = 0) => { + state.consumed += value; + state.index += num; + }; - case '**': - return nodot + globstar(opts); + const append = token => { + state.output += token.output != null ? token.output : token.value; + consume(token.value); + }; - case '**/*': - return `(?:${nodot}${globstar(opts)}${SLASH_LITERAL})?${slashDot}${ONE_CHAR}${star}`; + const negate = () => { + let count = 1; - case '**/*.*': - return `(?:${nodot}${globstar(opts)}${SLASH_LITERAL})?${slashDot}${star}${DOT_LITERAL}${ONE_CHAR}${star}`; + while (peek() === '!' && (peek(2) !== '(' || peek(3) === '?')) { + advance(); + state.start++; + count++; + } - case '**/.*': - return `(?:${nodot}${globstar(opts)}${SLASH_LITERAL})?${DOT_LITERAL}${ONE_CHAR}${star}`; + if (count % 2 === 0) { + return false; + } - default: { - const match = /^(.*?)\.(\w+)$/.exec(str); - if (!match) return; + state.negated = true; + state.start++; + return true; + }; - const source = create(match[1]); - if (!source) return; + const increment = type => { + state[type]++; + stack.push(type); + }; - return source + DOT_LITERAL + match[2]; + const decrement = type => { + state[type]--; + stack.pop(); + }; + + /** + * Push tokens onto the tokens array. This helper speeds up + * tokenizing by 1) helping us avoid backtracking as much as possible, + * and 2) helping us avoid creating extra tokens when consecutive + * characters are plain text. This improves performance and simplifies + * lookbehinds. + */ + + const push = tok => { + if (prev.type === 'globstar') { + const isBrace = state.braces > 0 && (tok.type === 'comma' || tok.type === 'brace'); + const isExtglob = tok.extglob === true || (extglobs.length && (tok.type === 'pipe' || tok.type === 'paren')); + + if (tok.type !== 'slash' && tok.type !== 'paren' && !isBrace && !isExtglob) { + state.output = state.output.slice(0, -prev.output.length); + prev.type = 'star'; + prev.value = '*'; + prev.output = star; + state.output += prev.output; } } - }; - const output = utils.removePrefix(input, state); - let source = create(output); + if (extglobs.length && tok.type !== 'paren') { + extglobs[extglobs.length - 1].inner += tok.value; + } - if (source && opts.strictSlashes !== true) { - source += `${SLASH_LITERAL}?`; - } + if (tok.value || tok.output) append(tok); + if (prev && prev.type === 'text' && tok.type === 'text') { + prev.value += tok.value; + prev.output = (prev.output || '') + tok.value; + return; + } - return source; -}; + tok.prev = prev; + tokens.push(tok); + prev = tok; + }; -module.exports = parse; + const extglobOpen = (type, value) => { + const token = { ...EXTGLOB_CHARS[value], conditions: 1, inner: '' }; + token.prev = prev; + token.parens = state.parens; + token.output = state.output; + const output = (opts.capture ? '(' : '') + token.open; -/***/ }), + increment('parens'); + push({ type, value, output: state.output ? '' : ONE_CHAR }); + push({ type: 'paren', extglob: true, value: advance(), output }); + extglobs.push(token); + }; -/***/ 3322: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + const extglobClose = token => { + let output = token.close + (opts.capture ? ')' : ''); + let rest; -"use strict"; + if (token.type === 'negate') { + let extglobStar = star; + if (token.inner && token.inner.length > 1 && token.inner.includes('/')) { + extglobStar = globstar(opts); + } -const path = __nccwpck_require__(1017); -const scan = __nccwpck_require__(2429); -const parse = __nccwpck_require__(2139); -const utils = __nccwpck_require__(479); -const constants = __nccwpck_require__(6099); -const isObject = val => val && typeof val === 'object' && !Array.isArray(val); + if (extglobStar !== star || eos() || /^\)+$/.test(remaining())) { + output = token.close = `)$))${extglobStar}`; + } -/** - * Creates a matcher function from one or more glob patterns. The - * returned function takes a string to match as its first argument, - * and returns true if the string is a match. The returned matcher - * function also takes a boolean as the second argument that, when true, - * returns an object with additional information. - * - * ```js - * const picomatch = require('picomatch'); - * // picomatch(glob[, options]); - * - * const isMatch = picomatch('*.!(*a)'); - * console.log(isMatch('a.a')); //=> false - * console.log(isMatch('a.b')); //=> true - * ``` - * @name picomatch - * @param {String|Array} `globs` One or more glob patterns. - * @param {Object=} `options` - * @return {Function=} Returns a matcher function. - * @api public - */ + if (token.inner.includes('*') && (rest = remaining()) && /^\.[^\\/.]+$/.test(rest)) { + // Any non-magical string (`.ts`) or even nested expression (`.{ts,tsx}`) can follow after the closing parenthesis. + // In this case, we need to parse the string and use it in the output of the original pattern. + // Suitable patterns: `/!(*.d).ts`, `/!(*.d).{ts,tsx}`, `**/!(*-dbg).@(js)`. + // + // Disabling the `fastpaths` option due to a problem with parsing strings as `.ts` in the pattern like `**/!(*.d).ts`. + const expression = parse(rest, { ...options, fastpaths: false }).output; -const picomatch = (glob, options, returnState = false) => { - if (Array.isArray(glob)) { - const fns = glob.map(input => picomatch(input, options, returnState)); - const arrayMatcher = str => { - for (const isMatch of fns) { - const state = isMatch(str); - if (state) return state; + output = token.close = `)${expression})${extglobStar})`; } - return false; - }; - return arrayMatcher; - } - const isState = isObject(glob) && glob.tokens && glob.input; + if (token.prev.type === 'bos') { + state.negatedExtglob = true; + } + } - if (glob === '' || (typeof glob !== 'string' && !isState)) { - throw new TypeError('Expected pattern to be a non-empty string'); - } + push({ type: 'paren', extglob: true, value, output }); + decrement('parens'); + }; - const opts = options || {}; - const posix = utils.isWindows(options); - const regex = isState - ? picomatch.compileRe(glob, options) - : picomatch.makeRe(glob, options, false, true); + /** + * Fast paths + */ - const state = regex.state; - delete regex.state; + if (opts.fastpaths !== false && !/(^[*!]|[/()[\]{}"])/.test(input)) { + let backslashes = false; - let isIgnored = () => false; - if (opts.ignore) { - const ignoreOpts = { ...options, ignore: null, onMatch: null, onResult: null }; - isIgnored = picomatch(opts.ignore, ignoreOpts, returnState); - } + let output = input.replace(REGEX_SPECIAL_CHARS_BACKREF, (m, esc, chars, first, rest, index) => { + if (first === '\\') { + backslashes = true; + return m; + } - const matcher = (input, returnObject = false) => { - const { isMatch, match, output } = picomatch.test(input, regex, options, { glob, posix }); - const result = { glob, state, regex, posix, input, output, match, isMatch }; + if (first === '?') { + if (esc) { + return esc + first + (rest ? QMARK.repeat(rest.length) : ''); + } + if (index === 0) { + return qmarkNoDot + (rest ? QMARK.repeat(rest.length) : ''); + } + return QMARK.repeat(chars.length); + } - if (typeof opts.onResult === 'function') { - opts.onResult(result); - } + if (first === '.') { + return DOT_LITERAL.repeat(chars.length); + } - if (isMatch === false) { - result.isMatch = false; - return returnObject ? result : false; - } + if (first === '*') { + if (esc) { + return esc + first + (rest ? star : ''); + } + return star; + } + return esc ? m : `\\${m}`; + }); - if (isIgnored(input)) { - if (typeof opts.onIgnore === 'function') { - opts.onIgnore(result); + if (backslashes === true) { + if (opts.unescape === true) { + output = output.replace(/\\/g, ''); + } else { + output = output.replace(/\\+/g, m => { + return m.length % 2 === 0 ? '\\\\' : (m ? '\\' : ''); + }); } - result.isMatch = false; - return returnObject ? result : false; } - if (typeof opts.onMatch === 'function') { - opts.onMatch(result); + if (output === input && opts.contains === true) { + state.output = input; + return state; } - return returnObject ? result : true; - }; - if (returnState) { - matcher.state = state; + state.output = utils.wrapOutput(output, state, options); + return state; } - return matcher; -}; + /** + * Tokenize input until we reach end-of-string + */ -/** - * Test `input` with the given `regex`. This is used by the main - * `picomatch()` function to test the input string. - * - * ```js - * const picomatch = require('picomatch'); - * // picomatch.test(input, regex[, options]); - * - * console.log(picomatch.test('foo/bar', /^(?:([^/]*?)\/([^/]*?))$/)); - * // { isMatch: true, match: [ 'foo/', 'foo', 'bar' ], output: 'foo/bar' } - * ``` - * @param {String} `input` String to test. - * @param {RegExp} `regex` - * @return {Object} Returns an object with matching info. - * @api public - */ + while (!eos()) { + value = advance(); -picomatch.test = (input, regex, options, { glob, posix } = {}) => { - if (typeof input !== 'string') { - throw new TypeError('Expected input to be a string'); - } + if (value === '\u0000') { + continue; + } - if (input === '') { - return { isMatch: false, output: '' }; - } + /** + * Escaped characters + */ - const opts = options || {}; - const format = opts.format || (posix ? utils.toPosixSlashes : null); - let match = input === glob; - let output = (match && format) ? format(input) : input; + if (value === '\\') { + const next = peek(); - if (match === false) { - output = format ? format(input) : input; - match = output === glob; - } + if (next === '/' && opts.bash !== true) { + continue; + } - if (match === false || opts.capture === true) { - if (opts.matchBase === true || opts.basename === true) { - match = picomatch.matchBase(input, regex, options, posix); - } else { - match = regex.exec(output); - } - } + if (next === '.' || next === ';') { + continue; + } - return { isMatch: Boolean(match), match, output }; -}; + if (!next) { + value += '\\'; + push({ type: 'text', value }); + continue; + } -/** - * Match the basename of a filepath. - * - * ```js - * const picomatch = require('picomatch'); - * // picomatch.matchBase(input, glob[, options]); - * console.log(picomatch.matchBase('foo/bar.js', '*.js'); // true - * ``` - * @param {String} `input` String to test. - * @param {RegExp|String} `glob` Glob pattern or regex created by [.makeRe](#makeRe). - * @return {Boolean} - * @api public - */ + // collapse slashes to reduce potential for exploits + const match = /^\\+/.exec(remaining()); + let slashes = 0; -picomatch.matchBase = (input, glob, options, posix = utils.isWindows(options)) => { - const regex = glob instanceof RegExp ? glob : picomatch.makeRe(glob, options); - return regex.test(path.basename(input)); -}; + if (match && match[0].length > 2) { + slashes = match[0].length; + state.index += slashes; + if (slashes % 2 !== 0) { + value += '\\'; + } + } -/** - * Returns true if **any** of the given glob `patterns` match the specified `string`. - * - * ```js - * const picomatch = require('picomatch'); - * // picomatch.isMatch(string, patterns[, options]); - * - * console.log(picomatch.isMatch('a.a', ['b.*', '*.a'])); //=> true - * console.log(picomatch.isMatch('a.a', 'b.*')); //=> false - * ``` - * @param {String|Array} str The string to test. - * @param {String|Array} patterns One or more glob patterns to use for matching. - * @param {Object} [options] See available [options](#options). - * @return {Boolean} Returns true if any patterns match `str` - * @api public - */ + if (opts.unescape === true) { + value = advance(); + } else { + value += advance(); + } -picomatch.isMatch = (str, patterns, options) => picomatch(patterns, options)(str); + if (state.brackets === 0) { + push({ type: 'text', value }); + continue; + } + } -/** - * Parse a glob pattern to create the source string for a regular - * expression. - * - * ```js - * const picomatch = require('picomatch'); - * const result = picomatch.parse(pattern[, options]); - * ``` - * @param {String} `pattern` - * @param {Object} `options` - * @return {Object} Returns an object with useful properties and output to be used as a regex source string. - * @api public - */ + /** + * If we're inside a regex character class, continue + * until we reach the closing bracket. + */ -picomatch.parse = (pattern, options) => { - if (Array.isArray(pattern)) return pattern.map(p => picomatch.parse(p, options)); - return parse(pattern, { ...options, fastpaths: false }); -}; + if (state.brackets > 0 && (value !== ']' || prev.value === '[' || prev.value === '[^')) { + if (opts.posix !== false && value === ':') { + const inner = prev.value.slice(1); + if (inner.includes('[')) { + prev.posix = true; -/** - * Scan a glob pattern to separate the pattern into segments. - * - * ```js - * const picomatch = require('picomatch'); - * // picomatch.scan(input[, options]); - * - * const result = picomatch.scan('!./foo/*.js'); - * console.log(result); - * { prefix: '!./', - * input: '!./foo/*.js', - * start: 3, - * base: 'foo', - * glob: '*.js', - * isBrace: false, - * isBracket: false, - * isGlob: true, - * isExtglob: false, - * isGlobstar: false, - * negated: true } - * ``` - * @param {String} `input` Glob pattern to scan. - * @param {Object} `options` - * @return {Object} Returns an object with - * @api public - */ - -picomatch.scan = (input, options) => scan(input, options); - -/** - * Compile a regular expression from the `state` object returned by the - * [parse()](#parse) method. - * - * @param {Object} `state` - * @param {Object} `options` - * @param {Boolean} `returnOutput` Intended for implementors, this argument allows you to return the raw output from the parser. - * @param {Boolean} `returnState` Adds the state to a `state` property on the returned regex. Useful for implementors and debugging. - * @return {RegExp} - * @api public - */ - -picomatch.compileRe = (state, options, returnOutput = false, returnState = false) => { - if (returnOutput === true) { - return state.output; - } + if (inner.includes(':')) { + const idx = prev.value.lastIndexOf('['); + const pre = prev.value.slice(0, idx); + const rest = prev.value.slice(idx + 2); + const posix = POSIX_REGEX_SOURCE[rest]; + if (posix) { + prev.value = pre + posix; + state.backtrack = true; + advance(); - const opts = options || {}; - const prepend = opts.contains ? '' : '^'; - const append = opts.contains ? '' : '$'; + if (!bos.output && tokens.indexOf(prev) === 1) { + bos.output = ONE_CHAR; + } + continue; + } + } + } + } - let source = `${prepend}(?:${state.output})${append}`; - if (state && state.negated === true) { - source = `^(?!${source}).*$`; - } + if ((value === '[' && peek() !== ':') || (value === '-' && peek() === ']')) { + value = `\\${value}`; + } - const regex = picomatch.toRegex(source, options); - if (returnState === true) { - regex.state = state; - } + if (value === ']' && (prev.value === '[' || prev.value === '[^')) { + value = `\\${value}`; + } - return regex; -}; + if (opts.posix === true && value === '!' && prev.value === '[') { + value = '^'; + } -/** - * Create a regular expression from a parsed glob pattern. - * - * ```js - * const picomatch = require('picomatch'); - * const state = picomatch.parse('*.js'); - * // picomatch.compileRe(state[, options]); - * - * console.log(picomatch.compileRe(state)); - * //=> /^(?:(?!\.)(?=.)[^/]*?\.js)$/ - * ``` - * @param {String} `state` The object returned from the `.parse` method. - * @param {Object} `options` - * @param {Boolean} `returnOutput` Implementors may use this argument to return the compiled output, instead of a regular expression. This is not exposed on the options to prevent end-users from mutating the result. - * @param {Boolean} `returnState` Implementors may use this argument to return the state from the parsed glob with the returned regular expression. - * @return {RegExp} Returns a regex created from the given pattern. - * @api public - */ + prev.value += value; + append({ value }); + continue; + } -picomatch.makeRe = (input, options = {}, returnOutput = false, returnState = false) => { - if (!input || typeof input !== 'string') { - throw new TypeError('Expected a non-empty string'); - } + /** + * If we're inside a quoted string, continue + * until we reach the closing double quote. + */ - let parsed = { negated: false, fastpaths: true }; + if (state.quotes === 1 && value !== '"') { + value = utils.escapeRegex(value); + prev.value += value; + append({ value }); + continue; + } - if (options.fastpaths !== false && (input[0] === '.' || input[0] === '*')) { - parsed.output = parse.fastpaths(input, options); - } + /** + * Double quotes + */ - if (!parsed.output) { - parsed = parse(input, options); - } + if (value === '"') { + state.quotes = state.quotes === 1 ? 0 : 1; + if (opts.keepQuotes === true) { + push({ type: 'text', value }); + } + continue; + } - return picomatch.compileRe(parsed, options, returnOutput, returnState); -}; + /** + * Parentheses + */ -/** - * Create a regular expression from the given regex source string. - * - * ```js - * const picomatch = require('picomatch'); - * // picomatch.toRegex(source[, options]); - * - * const { output } = picomatch.parse('*.js'); - * console.log(picomatch.toRegex(output)); - * //=> /^(?:(?!\.)(?=.)[^/]*?\.js)$/ - * ``` - * @param {String} `source` Regular expression source string. - * @param {Object} `options` - * @return {RegExp} - * @api public - */ + if (value === '(') { + increment('parens'); + push({ type: 'paren', value }); + continue; + } -picomatch.toRegex = (source, options) => { - try { - const opts = options || {}; - return new RegExp(source, opts.flags || (opts.nocase ? 'i' : '')); - } catch (err) { - if (options && options.debug === true) throw err; - return /$^/; - } -}; + if (value === ')') { + if (state.parens === 0 && opts.strictBrackets === true) { + throw new SyntaxError(syntaxError('opening', '(')); + } -/** - * Picomatch constants. - * @return {Object} - */ + const extglob = extglobs[extglobs.length - 1]; + if (extglob && state.parens === extglob.parens + 1) { + extglobClose(extglobs.pop()); + continue; + } -picomatch.constants = constants; + push({ type: 'paren', value, output: state.parens ? ')' : '\\)' }); + decrement('parens'); + continue; + } -/** - * Expose "picomatch" - */ + /** + * Square brackets + */ -module.exports = picomatch; + if (value === '[') { + if (opts.nobracket === true || !remaining().includes(']')) { + if (opts.nobracket !== true && opts.strictBrackets === true) { + throw new SyntaxError(syntaxError('closing', ']')); + } + value = `\\${value}`; + } else { + increment('brackets'); + } -/***/ }), + push({ type: 'bracket', value }); + continue; + } -/***/ 2429: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + if (value === ']') { + if (opts.nobracket === true || (prev && prev.type === 'bracket' && prev.value.length === 1)) { + push({ type: 'text', value, output: `\\${value}` }); + continue; + } -"use strict"; + if (state.brackets === 0) { + if (opts.strictBrackets === true) { + throw new SyntaxError(syntaxError('opening', '[')); + } + push({ type: 'text', value, output: `\\${value}` }); + continue; + } -const utils = __nccwpck_require__(479); -const { - CHAR_ASTERISK, /* * */ - CHAR_AT, /* @ */ - CHAR_BACKWARD_SLASH, /* \ */ - CHAR_COMMA, /* , */ - CHAR_DOT, /* . */ - CHAR_EXCLAMATION_MARK, /* ! */ - CHAR_FORWARD_SLASH, /* / */ - CHAR_LEFT_CURLY_BRACE, /* { */ - CHAR_LEFT_PARENTHESES, /* ( */ - CHAR_LEFT_SQUARE_BRACKET, /* [ */ - CHAR_PLUS, /* + */ - CHAR_QUESTION_MARK, /* ? */ - CHAR_RIGHT_CURLY_BRACE, /* } */ - CHAR_RIGHT_PARENTHESES, /* ) */ - CHAR_RIGHT_SQUARE_BRACKET /* ] */ -} = __nccwpck_require__(6099); + decrement('brackets'); -const isPathSeparator = code => { - return code === CHAR_FORWARD_SLASH || code === CHAR_BACKWARD_SLASH; -}; + const prevValue = prev.value.slice(1); + if (prev.posix !== true && prevValue[0] === '^' && !prevValue.includes('/')) { + value = `/${value}`; + } -const depth = token => { - if (token.isPrefix !== true) { - token.depth = token.isGlobstar ? Infinity : 1; - } -}; + prev.value += value; + append({ value }); -/** - * Quickly scans a glob pattern and returns an object with a handful of - * useful properties, like `isGlob`, `path` (the leading non-glob, if it exists), - * `glob` (the actual pattern), `negated` (true if the path starts with `!` but not - * with `!(`) and `negatedExtglob` (true if the path starts with `!(`). - * - * ```js - * const pm = require('picomatch'); - * console.log(pm.scan('foo/bar/*.js')); - * { isGlob: true, input: 'foo/bar/*.js', base: 'foo/bar', glob: '*.js' } - * ``` - * @param {String} `str` - * @param {Object} `options` - * @return {Object} Returns an object with tokens and regex source string. - * @api public - */ + // when literal brackets are explicitly disabled + // assume we should match with a regex character class + if (opts.literalBrackets === false || utils.hasRegexChars(prevValue)) { + continue; + } -const scan = (input, options) => { - const opts = options || {}; + const escaped = utils.escapeRegex(prev.value); + state.output = state.output.slice(0, -prev.value.length); - const length = input.length - 1; - const scanToEnd = opts.parts === true || opts.scanToEnd === true; - const slashes = []; - const tokens = []; - const parts = []; + // when literal brackets are explicitly enabled + // assume we should escape the brackets to match literal characters + if (opts.literalBrackets === true) { + state.output += escaped; + prev.value = escaped; + continue; + } - let str = input; - let index = -1; - let start = 0; - let lastIndex = 0; - let isBrace = false; - let isBracket = false; - let isGlob = false; - let isExtglob = false; - let isGlobstar = false; - let braceEscaped = false; - let backslashes = false; - let negated = false; - let negatedExtglob = false; - let finished = false; - let braces = 0; - let prev; - let code; - let token = { value: '', depth: 0, isGlob: false }; + // when the user specifies nothing, try to match both + prev.value = `(${capture}${escaped}|${prev.value})`; + state.output += prev.value; + continue; + } - const eos = () => index >= length; - const peek = () => str.charCodeAt(index + 1); - const advance = () => { - prev = code; - return str.charCodeAt(++index); - }; + /** + * Braces + */ - while (index < length) { - code = advance(); - let next; + if (value === '{' && opts.nobrace !== true) { + increment('braces'); - if (code === CHAR_BACKWARD_SLASH) { - backslashes = token.backslashes = true; - code = advance(); + const open = { + type: 'brace', + value, + output: '(', + outputIndex: state.output.length, + tokensIndex: state.tokens.length + }; - if (code === CHAR_LEFT_CURLY_BRACE) { - braceEscaped = true; - } + braces.push(open); + push(open); continue; } - if (braceEscaped === true || code === CHAR_LEFT_CURLY_BRACE) { - braces++; + if (value === '}') { + const brace = braces[braces.length - 1]; - while (eos() !== true && (code = advance())) { - if (code === CHAR_BACKWARD_SLASH) { - backslashes = token.backslashes = true; - advance(); - continue; - } + if (opts.nobrace === true || !brace) { + push({ type: 'text', value, output: value }); + continue; + } - if (code === CHAR_LEFT_CURLY_BRACE) { - braces++; - continue; - } + let output = ')'; - if (braceEscaped !== true && code === CHAR_DOT && (code = advance()) === CHAR_DOT) { - isBrace = token.isBrace = true; - isGlob = token.isGlob = true; - finished = true; + if (brace.dots === true) { + const arr = tokens.slice(); + const range = []; - if (scanToEnd === true) { - continue; + for (let i = arr.length - 1; i >= 0; i--) { + tokens.pop(); + if (arr[i].type === 'brace') { + break; + } + if (arr[i].type !== 'dots') { + range.unshift(arr[i].value); } - - break; } - if (braceEscaped !== true && code === CHAR_COMMA) { - isBrace = token.isBrace = true; - isGlob = token.isGlob = true; - finished = true; - - if (scanToEnd === true) { - continue; - } + output = expandRange(range, opts); + state.backtrack = true; + } - break; + if (brace.comma !== true && brace.dots !== true) { + const out = state.output.slice(0, brace.outputIndex); + const toks = state.tokens.slice(brace.tokensIndex); + brace.value = brace.output = '\\{'; + value = output = '\\}'; + state.output = out; + for (const t of toks) { + state.output += (t.output || t.value); } + } - if (code === CHAR_RIGHT_CURLY_BRACE) { - braces--; + push({ type: 'brace', value, output }); + decrement('braces'); + braces.pop(); + continue; + } - if (braces === 0) { - braceEscaped = false; - isBrace = token.isBrace = true; - finished = true; - break; - } - } + /** + * Pipes + */ + + if (value === '|') { + if (extglobs.length > 0) { + extglobs[extglobs.length - 1].conditions++; } + push({ type: 'text', value }); + continue; + } - if (scanToEnd === true) { - continue; + /** + * Commas + */ + + if (value === ',') { + let output = value; + + const brace = braces[braces.length - 1]; + if (brace && stack[stack.length - 1] === 'braces') { + brace.comma = true; + output = '|'; } - break; + push({ type: 'comma', value, output }); + continue; } - if (code === CHAR_FORWARD_SLASH) { - slashes.push(index); - tokens.push(token); - token = { value: '', depth: 0, isGlob: false }; + /** + * Slashes + */ - if (finished === true) continue; - if (prev === CHAR_DOT && index === (start + 1)) { - start += 2; + if (value === '/') { + // if the beginning of the glob is "./", advance the start + // to the current index, and don't add the "./" characters + // to the state. This greatly simplifies lookbehinds when + // checking for BOS characters like "!" and "." (not "./") + if (prev.type === 'dot' && state.index === state.start + 1) { + state.start = state.index + 1; + state.consumed = ''; + state.output = ''; + tokens.pop(); + prev = bos; // reset "prev" to the first token continue; } - lastIndex = index + 1; + push({ type: 'slash', value, output: SLASH_LITERAL }); continue; } - if (opts.noext !== true) { - const isExtglobChar = code === CHAR_PLUS - || code === CHAR_AT - || code === CHAR_ASTERISK - || code === CHAR_QUESTION_MARK - || code === CHAR_EXCLAMATION_MARK; - - if (isExtglobChar === true && peek() === CHAR_LEFT_PARENTHESES) { - isGlob = token.isGlob = true; - isExtglob = token.isExtglob = true; - finished = true; - if (code === CHAR_EXCLAMATION_MARK && index === start) { - negatedExtglob = true; - } - - if (scanToEnd === true) { - while (eos() !== true && (code = advance())) { - if (code === CHAR_BACKWARD_SLASH) { - backslashes = token.backslashes = true; - code = advance(); - continue; - } + /** + * Dots + */ - if (code === CHAR_RIGHT_PARENTHESES) { - isGlob = token.isGlob = true; - finished = true; - break; - } - } - continue; - } - break; + if (value === '.') { + if (state.braces > 0 && prev.type === 'dot') { + if (prev.value === '.') prev.output = DOT_LITERAL; + const brace = braces[braces.length - 1]; + prev.type = 'dots'; + prev.output += value; + prev.value += value; + brace.dots = true; + continue; } - } - - if (code === CHAR_ASTERISK) { - if (prev === CHAR_ASTERISK) isGlobstar = token.isGlobstar = true; - isGlob = token.isGlob = true; - finished = true; - if (scanToEnd === true) { + if ((state.braces + state.parens) === 0 && prev.type !== 'bos' && prev.type !== 'slash') { + push({ type: 'text', value, output: DOT_LITERAL }); continue; } - break; + + push({ type: 'dot', value, output: DOT_LITERAL }); + continue; } - if (code === CHAR_QUESTION_MARK) { - isGlob = token.isGlob = true; - finished = true; + /** + * Question marks + */ - if (scanToEnd === true) { + if (value === '?') { + const isGroup = prev && prev.value === '('; + if (!isGroup && opts.noextglob !== true && peek() === '(' && peek(2) !== '?') { + extglobOpen('qmark', value); continue; } - break; - } - if (code === CHAR_LEFT_SQUARE_BRACKET) { - while (eos() !== true && (next = advance())) { - if (next === CHAR_BACKWARD_SLASH) { - backslashes = token.backslashes = true; - advance(); - continue; + if (prev && prev.type === 'paren') { + const next = peek(); + let output = value; + + if (next === '<' && !utils.supportsLookbehinds()) { + throw new Error('Node.js v10 or higher is required for regex lookbehinds'); } - if (next === CHAR_RIGHT_SQUARE_BRACKET) { - isBracket = token.isBracket = true; - isGlob = token.isGlob = true; - finished = true; - break; + if ((prev.value === '(' && !/[!=<:]/.test(next)) || (next === '<' && !/<([!=]|\w+>)/.test(remaining()))) { + output = `\\${value}`; } - } - if (scanToEnd === true) { + push({ type: 'text', value, output }); continue; } - break; - } + if (opts.dot !== true && (prev.type === 'slash' || prev.type === 'bos')) { + push({ type: 'qmark', value, output: QMARK_NO_DOT }); + continue; + } - if (opts.nonegate !== true && code === CHAR_EXCLAMATION_MARK && index === start) { - negated = token.negated = true; - start++; + push({ type: 'qmark', value, output: QMARK }); continue; } - if (opts.noparen !== true && code === CHAR_LEFT_PARENTHESES) { - isGlob = token.isGlob = true; - - if (scanToEnd === true) { - while (eos() !== true && (code = advance())) { - if (code === CHAR_LEFT_PARENTHESES) { - backslashes = token.backslashes = true; - code = advance(); - continue; - } + /** + * Exclamation + */ - if (code === CHAR_RIGHT_PARENTHESES) { - finished = true; - break; - } + if (value === '!') { + if (opts.noextglob !== true && peek() === '(') { + if (peek(2) !== '?' || !/[!=<:]/.test(peek(3))) { + extglobOpen('negate', value); + continue; } + } + + if (opts.nonegate !== true && state.index === 0) { + negate(); continue; } - break; } - if (isGlob === true) { - finished = true; + /** + * Plus + */ - if (scanToEnd === true) { + if (value === '+') { + if (opts.noextglob !== true && peek() === '(' && peek(2) !== '?') { + extglobOpen('plus', value); continue; } - break; - } - } + if ((prev && prev.value === '(') || opts.regex === false) { + push({ type: 'plus', value, output: PLUS_LITERAL }); + continue; + } - if (opts.noext === true) { - isExtglob = false; - isGlob = false; - } + if ((prev && (prev.type === 'bracket' || prev.type === 'paren' || prev.type === 'brace')) || state.parens > 0) { + push({ type: 'plus', value }); + continue; + } - let base = str; - let prefix = ''; - let glob = ''; + push({ type: 'plus', value: PLUS_LITERAL }); + continue; + } - if (start > 0) { - prefix = str.slice(0, start); - str = str.slice(start); - lastIndex -= start; - } + /** + * Plain text + */ - if (base && isGlob === true && lastIndex > 0) { - base = str.slice(0, lastIndex); - glob = str.slice(lastIndex); - } else if (isGlob === true) { - base = ''; - glob = str; - } else { - base = str; - } + if (value === '@') { + if (opts.noextglob !== true && peek() === '(' && peek(2) !== '?') { + push({ type: 'at', extglob: true, value, output: '' }); + continue; + } - if (base && base !== '' && base !== '/' && base !== str) { - if (isPathSeparator(base.charCodeAt(base.length - 1))) { - base = base.slice(0, -1); + push({ type: 'text', value }); + continue; } - } - if (opts.unescape === true) { - if (glob) glob = utils.removeBackslashes(glob); + /** + * Plain text + */ - if (base && backslashes === true) { - base = utils.removeBackslashes(base); - } - } + if (value !== '*') { + if (value === '$' || value === '^') { + value = `\\${value}`; + } - const state = { - prefix, - input, - start, - base, - glob, - isBrace, - isBracket, - isGlob, - isExtglob, - isGlobstar, - negated, - negatedExtglob - }; + const match = REGEX_NON_SPECIAL_CHARS.exec(remaining()); + if (match) { + value += match[0]; + state.index += match[0].length; + } - if (opts.tokens === true) { - state.maxDepth = 0; - if (!isPathSeparator(code)) { - tokens.push(token); + push({ type: 'text', value }); + continue; } - state.tokens = tokens; - } - if (opts.parts === true || opts.tokens === true) { - let prevIndex; + /** + * Stars + */ - for (let idx = 0; idx < slashes.length; idx++) { - const n = prevIndex ? prevIndex + 1 : start; - const i = slashes[idx]; - const value = input.slice(n, i); - if (opts.tokens) { - if (idx === 0 && start !== 0) { - tokens[idx].isPrefix = true; - tokens[idx].value = prefix; - } else { - tokens[idx].value = value; - } - depth(tokens[idx]); - state.maxDepth += tokens[idx].depth; - } - if (idx !== 0 || value !== '') { - parts.push(value); - } - prevIndex = i; + if (prev && (prev.type === 'globstar' || prev.star === true)) { + prev.type = 'star'; + prev.star = true; + prev.value += value; + prev.output = star; + state.backtrack = true; + state.globstar = true; + consume(value); + continue; } - if (prevIndex && prevIndex + 1 < input.length) { - const value = input.slice(prevIndex + 1); - parts.push(value); + let rest = remaining(); + if (opts.noextglob !== true && /^\([^?]/.test(rest)) { + extglobOpen('star', value); + continue; + } - if (opts.tokens) { - tokens[tokens.length - 1].value = value; - depth(tokens[tokens.length - 1]); - state.maxDepth += tokens[tokens.length - 1].depth; + if (prev.type === 'star') { + if (opts.noglobstar === true) { + consume(value); + continue; } - } - state.slashes = slashes; - state.parts = parts; - } + const prior = prev.prev; + const before = prior.prev; + const isStart = prior.type === 'slash' || prior.type === 'bos'; + const afterStar = before && (before.type === 'star' || before.type === 'globstar'); - return state; -}; + if (opts.bash === true && (!isStart || (rest[0] && rest[0] !== '/'))) { + push({ type: 'star', value, output: '' }); + continue; + } -module.exports = scan; + const isBrace = state.braces > 0 && (prior.type === 'comma' || prior.type === 'brace'); + const isExtglob = extglobs.length && (prior.type === 'pipe' || prior.type === 'paren'); + if (!isStart && prior.type !== 'paren' && !isBrace && !isExtglob) { + push({ type: 'star', value, output: '' }); + continue; + } + // strip consecutive `/**/` + while (rest.slice(0, 3) === '/**') { + const after = input[state.index + 4]; + if (after && after !== '/') { + break; + } + rest = rest.slice(3); + consume('/**', 3); + } -/***/ }), + if (prior.type === 'bos' && eos()) { + prev.type = 'globstar'; + prev.value += value; + prev.output = globstar(opts); + state.output = prev.output; + state.globstar = true; + consume(value); + continue; + } -/***/ 479: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + if (prior.type === 'slash' && prior.prev.type !== 'bos' && !afterStar && eos()) { + state.output = state.output.slice(0, -(prior.output + prev.output).length); + prior.output = `(?:${prior.output}`; -"use strict"; + prev.type = 'globstar'; + prev.output = globstar(opts) + (opts.strictSlashes ? ')' : '|$)'); + prev.value += value; + state.globstar = true; + state.output += prior.output + prev.output; + consume(value); + continue; + } + if (prior.type === 'slash' && prior.prev.type !== 'bos' && rest[0] === '/') { + const end = rest[1] !== void 0 ? '|$' : ''; -const path = __nccwpck_require__(1017); -const win32 = process.platform === 'win32'; -const { - REGEX_BACKSLASH, - REGEX_REMOVE_BACKSLASH, - REGEX_SPECIAL_CHARS, - REGEX_SPECIAL_CHARS_GLOBAL -} = __nccwpck_require__(6099); + state.output = state.output.slice(0, -(prior.output + prev.output).length); + prior.output = `(?:${prior.output}`; -exports.isObject = val => val !== null && typeof val === 'object' && !Array.isArray(val); -exports.hasRegexChars = str => REGEX_SPECIAL_CHARS.test(str); -exports.isRegexChar = str => str.length === 1 && exports.hasRegexChars(str); -exports.escapeRegex = str => str.replace(REGEX_SPECIAL_CHARS_GLOBAL, '\\$1'); -exports.toPosixSlashes = str => str.replace(REGEX_BACKSLASH, '/'); + prev.type = 'globstar'; + prev.output = `${globstar(opts)}${SLASH_LITERAL}|${SLASH_LITERAL}${end})`; + prev.value += value; -exports.removeBackslashes = str => { - return str.replace(REGEX_REMOVE_BACKSLASH, match => { - return match === '\\' ? '' : match; - }); -}; + state.output += prior.output + prev.output; + state.globstar = true; -exports.supportsLookbehinds = () => { - const segs = process.version.slice(1).split('.').map(Number); - if (segs.length === 3 && segs[0] >= 9 || (segs[0] === 8 && segs[1] >= 10)) { - return true; - } - return false; -}; + consume(value + advance()); -exports.isWindows = options => { - if (options && typeof options.windows === 'boolean') { - return options.windows; - } - return win32 === true || path.sep === '\\'; -}; + push({ type: 'slash', value: '/', output: '' }); + continue; + } -exports.escapeLast = (input, char, lastIdx) => { - const idx = input.lastIndexOf(char, lastIdx); - if (idx === -1) return input; - if (input[idx - 1] === '\\') return exports.escapeLast(input, char, idx - 1); - return `${input.slice(0, idx)}\\${input.slice(idx)}`; -}; + if (prior.type === 'bos' && rest[0] === '/') { + prev.type = 'globstar'; + prev.value += value; + prev.output = `(?:^|${SLASH_LITERAL}|${globstar(opts)}${SLASH_LITERAL})`; + state.output = prev.output; + state.globstar = true; + consume(value + advance()); + push({ type: 'slash', value: '/', output: '' }); + continue; + } -exports.removePrefix = (input, state = {}) => { - let output = input; - if (output.startsWith('./')) { - output = output.slice(2); - state.prefix = './'; - } - return output; -}; + // remove single star from output + state.output = state.output.slice(0, -prev.output.length); -exports.wrapOutput = (input, state = {}, options = {}) => { - const prepend = options.contains ? '' : '^'; - const append = options.contains ? '' : '$'; + // reset previous token to globstar + prev.type = 'globstar'; + prev.output = globstar(opts); + prev.value += value; - let output = `${prepend}(?:${input})${append}`; - if (state.negated === true) { - output = `(?:^(?!${output}).*$)`; - } - return output; -}; + // reset output with globstar + state.output += prev.output; + state.globstar = true; + consume(value); + continue; + } + const token = { type: 'star', value, output: star }; -/***/ }), + if (opts.bash === true) { + token.output = '.*?'; + if (prev.type === 'bos' || prev.type === 'slash') { + token.output = nodot + token.output; + } + push(token); + continue; + } -/***/ 8341: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + if (prev && (prev.type === 'bracket' || prev.type === 'paren') && opts.regex === true) { + token.output = value; + push(token); + continue; + } -var once = __nccwpck_require__(1223) -var eos = __nccwpck_require__(1205) -var fs = __nccwpck_require__(7147) // we only need fs to get the ReadStream and WriteStream prototypes + if (state.index === state.start || prev.type === 'slash' || prev.type === 'dot') { + if (prev.type === 'dot') { + state.output += NO_DOT_SLASH; + prev.output += NO_DOT_SLASH; -var noop = function () {} -var ancient = /^v?\.0/.test(process.version) + } else if (opts.dot === true) { + state.output += NO_DOTS_SLASH; + prev.output += NO_DOTS_SLASH; -var isFn = function (fn) { - return typeof fn === 'function' -} + } else { + state.output += nodot; + prev.output += nodot; + } -var isFS = function (stream) { - if (!ancient) return false // newer node version do not need to care about fs is a special way - if (!fs) return false // browser - return (stream instanceof (fs.ReadStream || noop) || stream instanceof (fs.WriteStream || noop)) && isFn(stream.close) -} + if (peek() !== '*') { + state.output += ONE_CHAR; + prev.output += ONE_CHAR; + } + } -var isRequest = function (stream) { - return stream.setHeader && isFn(stream.abort) -} + push(token); + } -var destroyer = function (stream, reading, writing, callback) { - callback = once(callback) + while (state.brackets > 0) { + if (opts.strictBrackets === true) throw new SyntaxError(syntaxError('closing', ']')); + state.output = utils.escapeLast(state.output, '['); + decrement('brackets'); + } - var closed = false - stream.on('close', function () { - closed = true - }) + while (state.parens > 0) { + if (opts.strictBrackets === true) throw new SyntaxError(syntaxError('closing', ')')); + state.output = utils.escapeLast(state.output, '('); + decrement('parens'); + } - eos(stream, {readable: reading, writable: writing}, function (err) { - if (err) return callback(err) - closed = true - callback() - }) + while (state.braces > 0) { + if (opts.strictBrackets === true) throw new SyntaxError(syntaxError('closing', '}')); + state.output = utils.escapeLast(state.output, '{'); + decrement('braces'); + } - var destroyed = false - return function (err) { - if (closed) return - if (destroyed) return - destroyed = true + if (opts.strictSlashes !== true && (prev.type === 'star' || prev.type === 'bracket')) { + push({ type: 'maybe_slash', value: '', output: `${SLASH_LITERAL}?` }); + } - if (isFS(stream)) return stream.close(noop) // use close for fs streams to avoid fd leaks - if (isRequest(stream)) return stream.abort() // request.destroy just do .end - .abort is what we want + // rebuild the output if we had to backtrack at any point + if (state.backtrack === true) { + state.output = ''; - if (isFn(stream.destroy)) return stream.destroy() + for (const token of state.tokens) { + state.output += token.output != null ? token.output : token.value; - callback(err || new Error('stream was destroyed')) + if (token.suffix) { + state.output += token.suffix; + } + } } -} -var call = function (fn) { - fn() -} + return state; +}; -var pipe = function (from, to) { - return from.pipe(to) -} +/** + * Fast paths for creating regular expressions for common glob patterns. + * This can significantly speed up processing and has very little downside + * impact when none of the fast paths match. + */ -var pump = function () { - var streams = Array.prototype.slice.call(arguments) - var callback = isFn(streams[streams.length - 1] || noop) && streams.pop() || noop +parse.fastpaths = (input, options) => { + const opts = { ...options }; + const max = typeof opts.maxLength === 'number' ? Math.min(MAX_LENGTH, opts.maxLength) : MAX_LENGTH; + const len = input.length; + if (len > max) { + throw new SyntaxError(`Input length: ${len}, exceeds maximum allowed length: ${max}`); + } - if (Array.isArray(streams[0])) streams = streams[0] - if (streams.length < 2) throw new Error('pump requires two streams per minimum') + input = REPLACEMENTS[input] || input; - var error - var destroys = streams.map(function (stream, i) { - var reading = i < streams.length - 1 - var writing = i > 0 - return destroyer(stream, reading, writing, function (err) { - if (!error) error = err - if (err) destroys.forEach(call) - if (reading) return - destroys.forEach(call) - callback(error) - }) - }) + // create constants based on platform, for windows or posix + const { + DOT_LITERAL, + SLASH_LITERAL, + ONE_CHAR, + DOTS_SLASH, + NO_DOT, + NO_DOTS, + NO_DOTS_SLASH, + STAR, + START_ANCHOR + } = constants.globChars(opts.windows); - return streams.reduce(pipe) -} + const nodot = opts.dot ? NO_DOTS : NO_DOT; + const slashDot = opts.dot ? NO_DOTS_SLASH : NO_DOT; + const capture = opts.capture ? '' : '?:'; + const state = { negated: false, prefix: '' }; + let star = opts.bash === true ? '.*?' : STAR; -module.exports = pump + if (opts.capture) { + star = `(${star})`; + } + const globstar = opts => { + if (opts.noglobstar === true) return star; + return `(${capture}(?:(?!${START_ANCHOR}${opts.dot ? DOTS_SLASH : DOT_LITERAL}).)*?)`; + }; -/***/ }), - -/***/ 9795: -/***/ ((module) => { - -/*! queue-microtask. MIT License. Feross Aboukhadijeh */ -let promise - -module.exports = typeof queueMicrotask === 'function' - ? queueMicrotask.bind(typeof window !== 'undefined' ? window : global) - // reuse resolved promise, and allocate it lazily - : cb => (promise || (promise = Promise.resolve())) - .then(cb) - .catch(err => setTimeout(() => { throw err }, 0)) - - -/***/ }), - -/***/ 9273: -/***/ ((module) => { - -"use strict"; - + const create = str => { + switch (str) { + case '*': + return `${nodot}${ONE_CHAR}${star}`; -class QuickLRU { - constructor(options = {}) { - if (!(options.maxSize && options.maxSize > 0)) { - throw new TypeError('`maxSize` must be a number greater than 0'); - } + case '.*': + return `${DOT_LITERAL}${ONE_CHAR}${star}`; - this.maxSize = options.maxSize; - this.onEviction = options.onEviction; - this.cache = new Map(); - this.oldCache = new Map(); - this._size = 0; - } + case '*.*': + return `${nodot}${star}${DOT_LITERAL}${ONE_CHAR}${star}`; - _set(key, value) { - this.cache.set(key, value); - this._size++; + case '*/*': + return `${nodot}${star}${SLASH_LITERAL}${ONE_CHAR}${slashDot}${star}`; - if (this._size >= this.maxSize) { - this._size = 0; + case '**': + return nodot + globstar(opts); - if (typeof this.onEviction === 'function') { - for (const [key, value] of this.oldCache.entries()) { - this.onEviction(key, value); - } - } + case '**/*': + return `(?:${nodot}${globstar(opts)}${SLASH_LITERAL})?${slashDot}${ONE_CHAR}${star}`; - this.oldCache = this.cache; - this.cache = new Map(); - } - } + case '**/*.*': + return `(?:${nodot}${globstar(opts)}${SLASH_LITERAL})?${slashDot}${star}${DOT_LITERAL}${ONE_CHAR}${star}`; - get(key) { - if (this.cache.has(key)) { - return this.cache.get(key); - } + case '**/.*': + return `(?:${nodot}${globstar(opts)}${SLASH_LITERAL})?${DOT_LITERAL}${ONE_CHAR}${star}`; - if (this.oldCache.has(key)) { - const value = this.oldCache.get(key); - this.oldCache.delete(key); - this._set(key, value); - return value; - } - } + default: { + const match = /^(.*?)\.(\w+)$/.exec(str); + if (!match) return; - set(key, value) { - if (this.cache.has(key)) { - this.cache.set(key, value); - } else { - this._set(key, value); - } + const source = create(match[1]); + if (!source) return; - return this; - } + return source + DOT_LITERAL + match[2]; + } + } + }; - has(key) { - return this.cache.has(key) || this.oldCache.has(key); - } + const output = utils.removePrefix(input, state); + let source = create(output); - peek(key) { - if (this.cache.has(key)) { - return this.cache.get(key); - } + if (source && opts.strictSlashes !== true) { + source += `${SLASH_LITERAL}?`; + } - if (this.oldCache.has(key)) { - return this.oldCache.get(key); - } - } + return source; +}; - delete(key) { - const deleted = this.cache.delete(key); - if (deleted) { - this._size--; - } +module.exports = parse; - return this.oldCache.delete(key) || deleted; - } - clear() { - this.cache.clear(); - this.oldCache.clear(); - this._size = 0; - } +/***/ }), - * keys() { - for (const [key] of this) { - yield key; - } - } +/***/ 3322: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - * values() { - for (const [, value] of this) { - yield value; - } - } +"use strict"; - * [Symbol.iterator]() { - for (const item of this.cache) { - yield item; - } - for (const item of this.oldCache) { - const [key] = item; - if (!this.cache.has(key)) { - yield item; - } - } - } +const scan = __nccwpck_require__(2429); +const parse = __nccwpck_require__(2139); +const utils = __nccwpck_require__(479); +const constants = __nccwpck_require__(6099); +const isObject = val => val && typeof val === 'object' && !Array.isArray(val); - get size() { - let oldCacheSize = 0; - for (const key of this.oldCache.keys()) { - if (!this.cache.has(key)) { - oldCacheSize++; - } - } +/** + * Creates a matcher function from one or more glob patterns. The + * returned function takes a string to match as its first argument, + * and returns true if the string is a match. The returned matcher + * function also takes a boolean as the second argument that, when true, + * returns an object with additional information. + * + * ```js + * const picomatch = require('picomatch'); + * // picomatch(glob[, options]); + * + * const isMatch = picomatch('*.!(*a)'); + * console.log(isMatch('a.a')); //=> false + * console.log(isMatch('a.b')); //=> true + * ``` + * @name picomatch + * @param {String|Array} `globs` One or more glob patterns. + * @param {Object=} `options` + * @return {Function=} Returns a matcher function. + * @api public + */ - return Math.min(this._size + oldCacheSize, this.maxSize); - } -} +const picomatch = (glob, options, returnState = false) => { + if (Array.isArray(glob)) { + const fns = glob.map(input => picomatch(input, options, returnState)); + const arrayMatcher = str => { + for (const isMatch of fns) { + const state = isMatch(str); + if (state) return state; + } + return false; + }; + return arrayMatcher; + } -module.exports = QuickLRU; + const isState = isObject(glob) && glob.tokens && glob.input; + if (glob === '' || (typeof glob !== 'string' && !isState)) { + throw new TypeError('Expected pattern to be a non-empty string'); + } -/***/ }), + const opts = options || {}; + const posix = opts.windows; + const regex = isState + ? picomatch.compileRe(glob, options) + : picomatch.makeRe(glob, options, false, true); -/***/ 6624: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + const state = regex.state; + delete regex.state; -"use strict"; + let isIgnored = () => false; + if (opts.ignore) { + const ignoreOpts = { ...options, ignore: null, onMatch: null, onResult: null }; + isIgnored = picomatch(opts.ignore, ignoreOpts, returnState); + } -const tls = __nccwpck_require__(4404); + const matcher = (input, returnObject = false) => { + const { isMatch, match, output } = picomatch.test(input, regex, options, { glob, posix }); + const result = { glob, state, regex, posix, input, output, match, isMatch }; -module.exports = (options = {}, connect = tls.connect) => new Promise((resolve, reject) => { - let timeout = false; + if (typeof opts.onResult === 'function') { + opts.onResult(result); + } - let socket; + if (isMatch === false) { + result.isMatch = false; + return returnObject ? result : false; + } - const callback = async () => { - await socketPromise; + if (isIgnored(input)) { + if (typeof opts.onIgnore === 'function') { + opts.onIgnore(result); + } + result.isMatch = false; + return returnObject ? result : false; + } - socket.off('timeout', onTimeout); - socket.off('error', reject); + if (typeof opts.onMatch === 'function') { + opts.onMatch(result); + } + return returnObject ? result : true; + }; - if (options.resolveSocket) { - resolve({alpnProtocol: socket.alpnProtocol, socket, timeout}); + if (returnState) { + matcher.state = state; + } - if (timeout) { - await Promise.resolve(); - socket.emit('timeout'); - } - } else { - socket.destroy(); - resolve({alpnProtocol: socket.alpnProtocol, timeout}); - } - }; + return matcher; +}; - const onTimeout = async () => { - timeout = true; - callback(); - }; +/** + * Test `input` with the given `regex`. This is used by the main + * `picomatch()` function to test the input string. + * + * ```js + * const picomatch = require('picomatch'); + * // picomatch.test(input, regex[, options]); + * + * console.log(picomatch.test('foo/bar', /^(?:([^/]*?)\/([^/]*?))$/)); + * // { isMatch: true, match: [ 'foo/', 'foo', 'bar' ], output: 'foo/bar' } + * ``` + * @param {String} `input` String to test. + * @param {RegExp} `regex` + * @return {Object} Returns an object with matching info. + * @api public + */ - const socketPromise = (async () => { - try { - socket = await connect(options, callback); +picomatch.test = (input, regex, options, { glob, posix } = {}) => { + if (typeof input !== 'string') { + throw new TypeError('Expected input to be a string'); + } - socket.on('error', reject); - socket.once('timeout', onTimeout); - } catch (error) { - reject(error); - } - })(); -}); + if (input === '') { + return { isMatch: false, output: '' }; + } + const opts = options || {}; + const format = opts.format || (posix ? utils.toPosixSlashes : null); + let match = input === glob; + let output = (match && format) ? format(input) : input; -/***/ }), + if (match === false) { + output = format ? format(input) : input; + match = output === glob; + } -/***/ 9004: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + if (match === false || opts.capture === true) { + if (opts.matchBase === true || opts.basename === true) { + match = picomatch.matchBase(input, regex, options, posix); + } else { + match = regex.exec(output); + } + } -"use strict"; + return { isMatch: Boolean(match), match, output }; +}; +/** + * Match the basename of a filepath. + * + * ```js + * const picomatch = require('picomatch'); + * // picomatch.matchBase(input, glob[, options]); + * console.log(picomatch.matchBase('foo/bar.js', '*.js'); // true + * ``` + * @param {String} `input` String to test. + * @param {RegExp|String} `glob` Glob pattern or regex created by [.makeRe](#makeRe). + * @return {Boolean} + * @api public + */ -const Readable = (__nccwpck_require__(2781).Readable); -const lowercaseKeys = __nccwpck_require__(9662); +picomatch.matchBase = (input, glob, options) => { + const regex = glob instanceof RegExp ? glob : picomatch.makeRe(glob, options); + return regex.test(utils.basename(input)); +}; -class Response extends Readable { - constructor(statusCode, headers, body, url) { - if (typeof statusCode !== 'number') { - throw new TypeError('Argument `statusCode` should be a number'); - } - if (typeof headers !== 'object') { - throw new TypeError('Argument `headers` should be an object'); - } - if (!(body instanceof Buffer)) { - throw new TypeError('Argument `body` should be a buffer'); - } - if (typeof url !== 'string') { - throw new TypeError('Argument `url` should be a string'); - } +/** + * Returns true if **any** of the given glob `patterns` match the specified `string`. + * + * ```js + * const picomatch = require('picomatch'); + * // picomatch.isMatch(string, patterns[, options]); + * + * console.log(picomatch.isMatch('a.a', ['b.*', '*.a'])); //=> true + * console.log(picomatch.isMatch('a.a', 'b.*')); //=> false + * ``` + * @param {String|Array} str The string to test. + * @param {String|Array} patterns One or more glob patterns to use for matching. + * @param {Object} [options] See available [options](#options). + * @return {Boolean} Returns true if any patterns match `str` + * @api public + */ - super(); - this.statusCode = statusCode; - this.headers = lowercaseKeys(headers); - this.body = body; - this.url = url; - } +picomatch.isMatch = (str, patterns, options) => picomatch(patterns, options)(str); - _read() { - this.push(this.body); - this.push(null); - } -} +/** + * Parse a glob pattern to create the source string for a regular + * expression. + * + * ```js + * const picomatch = require('picomatch'); + * const result = picomatch.parse(pattern[, options]); + * ``` + * @param {String} `pattern` + * @param {Object} `options` + * @return {Object} Returns an object with useful properties and output to be used as a regex source string. + * @api public + */ -module.exports = Response; +picomatch.parse = (pattern, options) => { + if (Array.isArray(pattern)) return pattern.map(p => picomatch.parse(p, options)); + return parse(pattern, { ...options, fastpaths: false }); +}; +/** + * Scan a glob pattern to separate the pattern into segments. + * + * ```js + * const picomatch = require('picomatch'); + * // picomatch.scan(input[, options]); + * + * const result = picomatch.scan('!./foo/*.js'); + * console.log(result); + * { prefix: '!./', + * input: '!./foo/*.js', + * start: 3, + * base: 'foo', + * glob: '*.js', + * isBrace: false, + * isBracket: false, + * isGlob: true, + * isExtglob: false, + * isGlobstar: false, + * negated: true } + * ``` + * @param {String} `input` Glob pattern to scan. + * @param {Object} `options` + * @return {Object} Returns an object with + * @api public + */ -/***/ }), +picomatch.scan = (input, options) => scan(input, options); -/***/ 2113: -/***/ ((module) => { +/** + * Compile a regular expression from the `state` object returned by the + * [parse()](#parse) method. + * + * @param {Object} `state` + * @param {Object} `options` + * @param {Boolean} `returnOutput` Intended for implementors, this argument allows you to return the raw output from the parser. + * @param {Boolean} `returnState` Adds the state to a `state` property on the returned regex. Useful for implementors and debugging. + * @return {RegExp} + * @api public + */ -"use strict"; +picomatch.compileRe = (state, options, returnOutput = false, returnState = false) => { + if (returnOutput === true) { + return state.output; + } + const opts = options || {}; + const prepend = opts.contains ? '' : '^'; + const append = opts.contains ? '' : '$'; -function reusify (Constructor) { - var head = new Constructor() - var tail = head + let source = `${prepend}(?:${state.output})${append}`; + if (state && state.negated === true) { + source = `^(?!${source}).*$`; + } - function get () { - var current = head + const regex = picomatch.toRegex(source, options); + if (returnState === true) { + regex.state = state; + } - if (current.next) { - head = current.next - } else { - head = new Constructor() - tail = head - } + return regex; +}; - current.next = null +/** + * Create a regular expression from a parsed glob pattern. + * + * ```js + * const picomatch = require('picomatch'); + * const state = picomatch.parse('*.js'); + * // picomatch.compileRe(state[, options]); + * + * console.log(picomatch.compileRe(state)); + * //=> /^(?:(?!\.)(?=.)[^/]*?\.js)$/ + * ``` + * @param {String} `state` The object returned from the `.parse` method. + * @param {Object} `options` + * @param {Boolean} `returnOutput` Implementors may use this argument to return the compiled output, instead of a regular expression. This is not exposed on the options to prevent end-users from mutating the result. + * @param {Boolean} `returnState` Implementors may use this argument to return the state from the parsed glob with the returned regular expression. + * @return {RegExp} Returns a regex created from the given pattern. + * @api public + */ - return current +picomatch.makeRe = (input, options = {}, returnOutput = false, returnState = false) => { + if (!input || typeof input !== 'string') { + throw new TypeError('Expected a non-empty string'); } - function release (obj) { - tail.next = obj - tail = obj - } + let parsed = { negated: false, fastpaths: true }; - return { - get: get, - release: release + if (options.fastpaths !== false && (input[0] === '.' || input[0] === '*')) { + parsed.output = parse.fastpaths(input, options); } -} - -module.exports = reusify + if (!parsed.output) { + parsed = parse(input, options); + } -/***/ }), + return picomatch.compileRe(parsed, options, returnOutput, returnState); +}; -/***/ 5288: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -/*! run-parallel. MIT License. Feross Aboukhadijeh */ -module.exports = runParallel - -const queueMicrotask = __nccwpck_require__(9795) - -function runParallel (tasks, cb) { - let results, pending, keys - let isSync = true +/** + * Create a regular expression from the given regex source string. + * + * ```js + * const picomatch = require('picomatch'); + * // picomatch.toRegex(source[, options]); + * + * const { output } = picomatch.parse('*.js'); + * console.log(picomatch.toRegex(output)); + * //=> /^(?:(?!\.)(?=.)[^/]*?\.js)$/ + * ``` + * @param {String} `source` Regular expression source string. + * @param {Object} `options` + * @return {RegExp} + * @api public + */ - if (Array.isArray(tasks)) { - results = [] - pending = tasks.length - } else { - keys = Object.keys(tasks) - results = {} - pending = keys.length +picomatch.toRegex = (source, options) => { + try { + const opts = options || {}; + return new RegExp(source, opts.flags || (opts.nocase ? 'i' : '')); + } catch (err) { + if (options && options.debug === true) throw err; + return /$^/; } +}; - function done (err) { - function end () { - if (cb) cb(err, results) - cb = null - } - if (isSync) queueMicrotask(end) - else end() - } +/** + * Picomatch constants. + * @return {Object} + */ - function each (i, err, result) { - results[i] = result - if (--pending === 0 || err) { - done(err) - } - } +picomatch.constants = constants; - if (!pending) { - // empty - done(null) - } else if (keys) { - // object - keys.forEach(function (key) { - tasks[key](function (err, result) { each(key, err, result) }) - }) - } else { - // array - tasks.forEach(function (task, i) { - task(function (err, result) { each(i, err, result) }) - }) - } +/** + * Expose "picomatch" + */ - isSync = false -} +module.exports = picomatch; /***/ }), -/***/ 2043: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -;(function (sax) { // wrapper for non-node envs - sax.parser = function (strict, opt) { return new SAXParser(strict, opt) } - sax.SAXParser = SAXParser - sax.SAXStream = SAXStream - sax.createStream = createStream +/***/ 2429: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - // When we pass the MAX_BUFFER_LENGTH position, start checking for buffer overruns. - // When we check, schedule the next check for MAX_BUFFER_LENGTH - (max(buffer lengths)), - // since that's the earliest that a buffer overrun could occur. This way, checks are - // as rare as required, but as often as necessary to ensure never crossing this bound. - // Furthermore, buffers are only tested at most once per write(), so passing a very - // large string into write() might have undesirable effects, but this is manageable by - // the caller, so it is assumed to be safe. Thus, a call to write() may, in the extreme - // edge case, result in creating at most one complete copy of the string passed in. - // Set to Infinity to have unlimited buffers. - sax.MAX_BUFFER_LENGTH = 64 * 1024 +"use strict"; - var buffers = [ - 'comment', 'sgmlDecl', 'textNode', 'tagName', 'doctype', - 'procInstName', 'procInstBody', 'entity', 'attribName', - 'attribValue', 'cdata', 'script' - ] - sax.EVENTS = [ - 'text', - 'processinginstruction', - 'sgmldeclaration', - 'doctype', - 'comment', - 'opentagstart', - 'attribute', - 'opentag', - 'closetag', - 'opencdata', - 'cdata', - 'closecdata', - 'error', - 'end', - 'ready', - 'script', - 'opennamespace', - 'closenamespace' - ] +const utils = __nccwpck_require__(479); +const { + CHAR_ASTERISK, /* * */ + CHAR_AT, /* @ */ + CHAR_BACKWARD_SLASH, /* \ */ + CHAR_COMMA, /* , */ + CHAR_DOT, /* . */ + CHAR_EXCLAMATION_MARK, /* ! */ + CHAR_FORWARD_SLASH, /* / */ + CHAR_LEFT_CURLY_BRACE, /* { */ + CHAR_LEFT_PARENTHESES, /* ( */ + CHAR_LEFT_SQUARE_BRACKET, /* [ */ + CHAR_PLUS, /* + */ + CHAR_QUESTION_MARK, /* ? */ + CHAR_RIGHT_CURLY_BRACE, /* } */ + CHAR_RIGHT_PARENTHESES, /* ) */ + CHAR_RIGHT_SQUARE_BRACKET /* ] */ +} = __nccwpck_require__(6099); - function SAXParser (strict, opt) { - if (!(this instanceof SAXParser)) { - return new SAXParser(strict, opt) - } +const isPathSeparator = code => { + return code === CHAR_FORWARD_SLASH || code === CHAR_BACKWARD_SLASH; +}; - var parser = this - clearBuffers(parser) - parser.q = parser.c = '' - parser.bufferCheckPosition = sax.MAX_BUFFER_LENGTH - parser.opt = opt || {} - parser.opt.lowercase = parser.opt.lowercase || parser.opt.lowercasetags - parser.looseCase = parser.opt.lowercase ? 'toLowerCase' : 'toUpperCase' - parser.tags = [] - parser.closed = parser.closedRoot = parser.sawRoot = false - parser.tag = parser.error = null - parser.strict = !!strict - parser.noscript = !!(strict || parser.opt.noscript) - parser.state = S.BEGIN - parser.strictEntities = parser.opt.strictEntities - parser.ENTITIES = parser.strictEntities ? Object.create(sax.XML_ENTITIES) : Object.create(sax.ENTITIES) - parser.attribList = [] +const depth = token => { + if (token.isPrefix !== true) { + token.depth = token.isGlobstar ? Infinity : 1; + } +}; - // namespaces form a prototype chain. - // it always points at the current tag, - // which protos to its parent tag. - if (parser.opt.xmlns) { - parser.ns = Object.create(rootNS) - } +/** + * Quickly scans a glob pattern and returns an object with a handful of + * useful properties, like `isGlob`, `path` (the leading non-glob, if it exists), + * `glob` (the actual pattern), `negated` (true if the path starts with `!` but not + * with `!(`) and `negatedExtglob` (true if the path starts with `!(`). + * + * ```js + * const pm = require('picomatch'); + * console.log(pm.scan('foo/bar/*.js')); + * { isGlob: true, input: 'foo/bar/*.js', base: 'foo/bar', glob: '*.js' } + * ``` + * @param {String} `str` + * @param {Object} `options` + * @return {Object} Returns an object with tokens and regex source string. + * @api public + */ - // mostly just for error reporting - parser.trackPosition = parser.opt.position !== false - if (parser.trackPosition) { - parser.position = parser.line = parser.column = 0 - } - emit(parser, 'onready') - } +const scan = (input, options) => { + const opts = options || {}; - if (!Object.create) { - Object.create = function (o) { - function F () {} - F.prototype = o - var newf = new F() - return newf - } - } + const length = input.length - 1; + const scanToEnd = opts.parts === true || opts.scanToEnd === true; + const slashes = []; + const tokens = []; + const parts = []; - if (!Object.keys) { - Object.keys = function (o) { - var a = [] - for (var i in o) if (o.hasOwnProperty(i)) a.push(i) - return a - } - } + let str = input; + let index = -1; + let start = 0; + let lastIndex = 0; + let isBrace = false; + let isBracket = false; + let isGlob = false; + let isExtglob = false; + let isGlobstar = false; + let braceEscaped = false; + let backslashes = false; + let negated = false; + let negatedExtglob = false; + let finished = false; + let braces = 0; + let prev; + let code; + let token = { value: '', depth: 0, isGlob: false }; - function checkBufferLength (parser) { - var maxAllowed = Math.max(sax.MAX_BUFFER_LENGTH, 10) - var maxActual = 0 - for (var i = 0, l = buffers.length; i < l; i++) { - var len = parser[buffers[i]].length - if (len > maxAllowed) { - // Text/cdata nodes can get big, and since they're buffered, - // we can get here under normal conditions. - // Avoid issues by emitting the text node now, - // so at least it won't get any bigger. - switch (buffers[i]) { - case 'textNode': - closeText(parser) - break + const eos = () => index >= length; + const peek = () => str.charCodeAt(index + 1); + const advance = () => { + prev = code; + return str.charCodeAt(++index); + }; - case 'cdata': - emitNode(parser, 'oncdata', parser.cdata) - parser.cdata = '' - break + while (index < length) { + code = advance(); + let next; - case 'script': - emitNode(parser, 'onscript', parser.script) - parser.script = '' - break + if (code === CHAR_BACKWARD_SLASH) { + backslashes = token.backslashes = true; + code = advance(); - default: - error(parser, 'Max buffer length exceeded: ' + buffers[i]) - } + if (code === CHAR_LEFT_CURLY_BRACE) { + braceEscaped = true; } - maxActual = Math.max(maxActual, len) + continue; } - // schedule the next check for the earliest possible buffer overrun. - var m = sax.MAX_BUFFER_LENGTH - maxActual - parser.bufferCheckPosition = m + parser.position - } - function clearBuffers (parser) { - for (var i = 0, l = buffers.length; i < l; i++) { - parser[buffers[i]] = '' - } - } + if (braceEscaped === true || code === CHAR_LEFT_CURLY_BRACE) { + braces++; - function flushBuffers (parser) { - closeText(parser) - if (parser.cdata !== '') { - emitNode(parser, 'oncdata', parser.cdata) - parser.cdata = '' - } - if (parser.script !== '') { - emitNode(parser, 'onscript', parser.script) - parser.script = '' - } - } + while (eos() !== true && (code = advance())) { + if (code === CHAR_BACKWARD_SLASH) { + backslashes = token.backslashes = true; + advance(); + continue; + } - SAXParser.prototype = { - end: function () { end(this) }, - write: write, - resume: function () { this.error = null; return this }, - close: function () { return this.write(null) }, - flush: function () { flushBuffers(this) } - } + if (code === CHAR_LEFT_CURLY_BRACE) { + braces++; + continue; + } - var Stream - try { - Stream = (__nccwpck_require__(2781).Stream) - } catch (ex) { - Stream = function () {} - } + if (braceEscaped !== true && code === CHAR_DOT && (code = advance()) === CHAR_DOT) { + isBrace = token.isBrace = true; + isGlob = token.isGlob = true; + finished = true; - var streamWraps = sax.EVENTS.filter(function (ev) { - return ev !== 'error' && ev !== 'end' - }) + if (scanToEnd === true) { + continue; + } - function createStream (strict, opt) { - return new SAXStream(strict, opt) - } + break; + } - function SAXStream (strict, opt) { - if (!(this instanceof SAXStream)) { - return new SAXStream(strict, opt) - } + if (braceEscaped !== true && code === CHAR_COMMA) { + isBrace = token.isBrace = true; + isGlob = token.isGlob = true; + finished = true; - Stream.apply(this) + if (scanToEnd === true) { + continue; + } - this._parser = new SAXParser(strict, opt) - this.writable = true - this.readable = true + break; + } - var me = this + if (code === CHAR_RIGHT_CURLY_BRACE) { + braces--; - this._parser.onend = function () { - me.emit('end') - } + if (braces === 0) { + braceEscaped = false; + isBrace = token.isBrace = true; + finished = true; + break; + } + } + } - this._parser.onerror = function (er) { - me.emit('error', er) + if (scanToEnd === true) { + continue; + } - // if didn't throw, then means error was handled. - // go ahead and clear error, so we can write again. - me._parser.error = null + break; } - this._decoder = null + if (code === CHAR_FORWARD_SLASH) { + slashes.push(index); + tokens.push(token); + token = { value: '', depth: 0, isGlob: false }; - streamWraps.forEach(function (ev) { - Object.defineProperty(me, 'on' + ev, { - get: function () { - return me._parser['on' + ev] - }, - set: function (h) { - if (!h) { - me.removeAllListeners(ev) - me._parser['on' + ev] = h - return h - } - me.on(ev, h) - }, - enumerable: true, - configurable: false - }) - }) - } + if (finished === true) continue; + if (prev === CHAR_DOT && index === (start + 1)) { + start += 2; + continue; + } - SAXStream.prototype = Object.create(Stream.prototype, { - constructor: { - value: SAXStream + lastIndex = index + 1; + continue; } - }) - SAXStream.prototype.write = function (data) { - if (typeof Buffer === 'function' && - typeof Buffer.isBuffer === 'function' && - Buffer.isBuffer(data)) { - if (!this._decoder) { - var SD = (__nccwpck_require__(1576).StringDecoder) - this._decoder = new SD('utf8') + if (opts.noext !== true) { + const isExtglobChar = code === CHAR_PLUS + || code === CHAR_AT + || code === CHAR_ASTERISK + || code === CHAR_QUESTION_MARK + || code === CHAR_EXCLAMATION_MARK; + + if (isExtglobChar === true && peek() === CHAR_LEFT_PARENTHESES) { + isGlob = token.isGlob = true; + isExtglob = token.isExtglob = true; + finished = true; + if (code === CHAR_EXCLAMATION_MARK && index === start) { + negatedExtglob = true; + } + + if (scanToEnd === true) { + while (eos() !== true && (code = advance())) { + if (code === CHAR_BACKWARD_SLASH) { + backslashes = token.backslashes = true; + code = advance(); + continue; + } + + if (code === CHAR_RIGHT_PARENTHESES) { + isGlob = token.isGlob = true; + finished = true; + break; + } + } + continue; + } + break; } - data = this._decoder.write(data) } - this._parser.write(data.toString()) - this.emit('data', data) - return true - } + if (code === CHAR_ASTERISK) { + if (prev === CHAR_ASTERISK) isGlobstar = token.isGlobstar = true; + isGlob = token.isGlob = true; + finished = true; - SAXStream.prototype.end = function (chunk) { - if (chunk && chunk.length) { - this.write(chunk) + if (scanToEnd === true) { + continue; + } + break; } - this._parser.end() - return true - } - SAXStream.prototype.on = function (ev, handler) { - var me = this - if (!me._parser['on' + ev] && streamWraps.indexOf(ev) !== -1) { - me._parser['on' + ev] = function () { - var args = arguments.length === 1 ? [arguments[0]] : Array.apply(null, arguments) - args.splice(0, 0, ev) - me.emit.apply(me, args) + if (code === CHAR_QUESTION_MARK) { + isGlob = token.isGlob = true; + finished = true; + + if (scanToEnd === true) { + continue; } + break; } - return Stream.prototype.on.call(me, ev, handler) - } + if (code === CHAR_LEFT_SQUARE_BRACKET) { + while (eos() !== true && (next = advance())) { + if (next === CHAR_BACKWARD_SLASH) { + backslashes = token.backslashes = true; + advance(); + continue; + } - // this really needs to be replaced with character classes. - // XML allows all manner of ridiculous numbers and digits. - var CDATA = '[CDATA[' - var DOCTYPE = 'DOCTYPE' - var XML_NAMESPACE = 'http://www.w3.org/XML/1998/namespace' - var XMLNS_NAMESPACE = 'http://www.w3.org/2000/xmlns/' - var rootNS = { xml: XML_NAMESPACE, xmlns: XMLNS_NAMESPACE } + if (next === CHAR_RIGHT_SQUARE_BRACKET) { + isBracket = token.isBracket = true; + isGlob = token.isGlob = true; + finished = true; + break; + } + } - // http://www.w3.org/TR/REC-xml/#NT-NameStartChar - // This implementation works on strings, a single character at a time + if (scanToEnd === true) { + continue; + } + + break; + } + + if (opts.nonegate !== true && code === CHAR_EXCLAMATION_MARK && index === start) { + negated = token.negated = true; + start++; + continue; + } + + if (opts.noparen !== true && code === CHAR_LEFT_PARENTHESES) { + isGlob = token.isGlob = true; + + if (scanToEnd === true) { + while (eos() !== true && (code = advance())) { + if (code === CHAR_LEFT_PARENTHESES) { + backslashes = token.backslashes = true; + code = advance(); + continue; + } + + if (code === CHAR_RIGHT_PARENTHESES) { + finished = true; + break; + } + } + continue; + } + break; + } + + if (isGlob === true) { + finished = true; + + if (scanToEnd === true) { + continue; + } + + break; + } + } + + if (opts.noext === true) { + isExtglob = false; + isGlob = false; + } + + let base = str; + let prefix = ''; + let glob = ''; + + if (start > 0) { + prefix = str.slice(0, start); + str = str.slice(start); + lastIndex -= start; + } + + if (base && isGlob === true && lastIndex > 0) { + base = str.slice(0, lastIndex); + glob = str.slice(lastIndex); + } else if (isGlob === true) { + base = ''; + glob = str; + } else { + base = str; + } + + if (base && base !== '' && base !== '/' && base !== str) { + if (isPathSeparator(base.charCodeAt(base.length - 1))) { + base = base.slice(0, -1); + } + } + + if (opts.unescape === true) { + if (glob) glob = utils.removeBackslashes(glob); + + if (base && backslashes === true) { + base = utils.removeBackslashes(base); + } + } + + const state = { + prefix, + input, + start, + base, + glob, + isBrace, + isBracket, + isGlob, + isExtglob, + isGlobstar, + negated, + negatedExtglob + }; + + if (opts.tokens === true) { + state.maxDepth = 0; + if (!isPathSeparator(code)) { + tokens.push(token); + } + state.tokens = tokens; + } + + if (opts.parts === true || opts.tokens === true) { + let prevIndex; + + for (let idx = 0; idx < slashes.length; idx++) { + const n = prevIndex ? prevIndex + 1 : start; + const i = slashes[idx]; + const value = input.slice(n, i); + if (opts.tokens) { + if (idx === 0 && start !== 0) { + tokens[idx].isPrefix = true; + tokens[idx].value = prefix; + } else { + tokens[idx].value = value; + } + depth(tokens[idx]); + state.maxDepth += tokens[idx].depth; + } + if (idx !== 0 || value !== '') { + parts.push(value); + } + prevIndex = i; + } + + if (prevIndex && prevIndex + 1 < input.length) { + const value = input.slice(prevIndex + 1); + parts.push(value); + + if (opts.tokens) { + tokens[tokens.length - 1].value = value; + depth(tokens[tokens.length - 1]); + state.maxDepth += tokens[tokens.length - 1].depth; + } + } + + state.slashes = slashes; + state.parts = parts; + } + + return state; +}; + +module.exports = scan; + + +/***/ }), + +/***/ 479: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +const { + REGEX_BACKSLASH, + REGEX_REMOVE_BACKSLASH, + REGEX_SPECIAL_CHARS, + REGEX_SPECIAL_CHARS_GLOBAL +} = __nccwpck_require__(6099); + +exports.isObject = val => val !== null && typeof val === 'object' && !Array.isArray(val); +exports.hasRegexChars = str => REGEX_SPECIAL_CHARS.test(str); +exports.isRegexChar = str => str.length === 1 && exports.hasRegexChars(str); +exports.escapeRegex = str => str.replace(REGEX_SPECIAL_CHARS_GLOBAL, '\\$1'); +exports.toPosixSlashes = str => str.replace(REGEX_BACKSLASH, '/'); + +exports.removeBackslashes = str => { + return str.replace(REGEX_REMOVE_BACKSLASH, match => { + return match === '\\' ? '' : match; + }); +}; + +exports.supportsLookbehinds = () => { + if (typeof process !== 'undefined') { + const segs = process.version.slice(1).split('.').map(Number); + if (segs.length === 3 && segs[0] >= 9 || (segs[0] === 8 && segs[1] >= 10)) { + return true; + } + } + return false; +}; + +exports.escapeLast = (input, char, lastIdx) => { + const idx = input.lastIndexOf(char, lastIdx); + if (idx === -1) return input; + if (input[idx - 1] === '\\') return exports.escapeLast(input, char, idx - 1); + return `${input.slice(0, idx)}\\${input.slice(idx)}`; +}; + +exports.removePrefix = (input, state = {}) => { + let output = input; + if (output.startsWith('./')) { + output = output.slice(2); + state.prefix = './'; + } + return output; +}; + +exports.wrapOutput = (input, state = {}, options = {}) => { + const prepend = options.contains ? '' : '^'; + const append = options.contains ? '' : '$'; + + let output = `${prepend}(?:${input})${append}`; + if (state.negated === true) { + output = `(?:^(?!${output}).*$)`; + } + return output; +}; + +exports.basename = (path, { windows } = {}) => { + const segs = path.split(windows ? /[\\/]/ : '/'); + const last = segs[segs.length - 1]; + + if (last === '') { + return segs[segs.length - 2]; + } + + return last; +}; + + +/***/ }), + +/***/ 8341: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var once = __nccwpck_require__(1223) +var eos = __nccwpck_require__(1205) +var fs = __nccwpck_require__(7147) // we only need fs to get the ReadStream and WriteStream prototypes + +var noop = function () {} +var ancient = /^v?\.0/.test(process.version) + +var isFn = function (fn) { + return typeof fn === 'function' +} + +var isFS = function (stream) { + if (!ancient) return false // newer node version do not need to care about fs is a special way + if (!fs) return false // browser + return (stream instanceof (fs.ReadStream || noop) || stream instanceof (fs.WriteStream || noop)) && isFn(stream.close) +} + +var isRequest = function (stream) { + return stream.setHeader && isFn(stream.abort) +} + +var destroyer = function (stream, reading, writing, callback) { + callback = once(callback) + + var closed = false + stream.on('close', function () { + closed = true + }) + + eos(stream, {readable: reading, writable: writing}, function (err) { + if (err) return callback(err) + closed = true + callback() + }) + + var destroyed = false + return function (err) { + if (closed) return + if (destroyed) return + destroyed = true + + if (isFS(stream)) return stream.close(noop) // use close for fs streams to avoid fd leaks + if (isRequest(stream)) return stream.abort() // request.destroy just do .end - .abort is what we want + + if (isFn(stream.destroy)) return stream.destroy() + + callback(err || new Error('stream was destroyed')) + } +} + +var call = function (fn) { + fn() +} + +var pipe = function (from, to) { + return from.pipe(to) +} + +var pump = function () { + var streams = Array.prototype.slice.call(arguments) + var callback = isFn(streams[streams.length - 1] || noop) && streams.pop() || noop + + if (Array.isArray(streams[0])) streams = streams[0] + if (streams.length < 2) throw new Error('pump requires two streams per minimum') + + var error + var destroys = streams.map(function (stream, i) { + var reading = i < streams.length - 1 + var writing = i > 0 + return destroyer(stream, reading, writing, function (err) { + if (!error) error = err + if (err) destroys.forEach(call) + if (reading) return + destroys.forEach(call) + callback(error) + }) + }) + + return streams.reduce(pipe) +} + +module.exports = pump + + +/***/ }), + +/***/ 9795: +/***/ ((module) => { + +/*! queue-microtask. MIT License. Feross Aboukhadijeh */ +let promise + +module.exports = typeof queueMicrotask === 'function' + ? queueMicrotask.bind(typeof window !== 'undefined' ? window : global) + // reuse resolved promise, and allocate it lazily + : cb => (promise || (promise = Promise.resolve())) + .then(cb) + .catch(err => setTimeout(() => { throw err }, 0)) + + +/***/ }), + +/***/ 9273: +/***/ ((module) => { + +"use strict"; + + +class QuickLRU { + constructor(options = {}) { + if (!(options.maxSize && options.maxSize > 0)) { + throw new TypeError('`maxSize` must be a number greater than 0'); + } + + this.maxSize = options.maxSize; + this.onEviction = options.onEviction; + this.cache = new Map(); + this.oldCache = new Map(); + this._size = 0; + } + + _set(key, value) { + this.cache.set(key, value); + this._size++; + + if (this._size >= this.maxSize) { + this._size = 0; + + if (typeof this.onEviction === 'function') { + for (const [key, value] of this.oldCache.entries()) { + this.onEviction(key, value); + } + } + + this.oldCache = this.cache; + this.cache = new Map(); + } + } + + get(key) { + if (this.cache.has(key)) { + return this.cache.get(key); + } + + if (this.oldCache.has(key)) { + const value = this.oldCache.get(key); + this.oldCache.delete(key); + this._set(key, value); + return value; + } + } + + set(key, value) { + if (this.cache.has(key)) { + this.cache.set(key, value); + } else { + this._set(key, value); + } + + return this; + } + + has(key) { + return this.cache.has(key) || this.oldCache.has(key); + } + + peek(key) { + if (this.cache.has(key)) { + return this.cache.get(key); + } + + if (this.oldCache.has(key)) { + return this.oldCache.get(key); + } + } + + delete(key) { + const deleted = this.cache.delete(key); + if (deleted) { + this._size--; + } + + return this.oldCache.delete(key) || deleted; + } + + clear() { + this.cache.clear(); + this.oldCache.clear(); + this._size = 0; + } + + * keys() { + for (const [key] of this) { + yield key; + } + } + + * values() { + for (const [, value] of this) { + yield value; + } + } + + * [Symbol.iterator]() { + for (const item of this.cache) { + yield item; + } + + for (const item of this.oldCache) { + const [key] = item; + if (!this.cache.has(key)) { + yield item; + } + } + } + + get size() { + let oldCacheSize = 0; + for (const key of this.oldCache.keys()) { + if (!this.cache.has(key)) { + oldCacheSize++; + } + } + + return Math.min(this._size + oldCacheSize, this.maxSize); + } +} + +module.exports = QuickLRU; + + +/***/ }), + +/***/ 6624: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + +const tls = __nccwpck_require__(4404); + +module.exports = (options = {}, connect = tls.connect) => new Promise((resolve, reject) => { + let timeout = false; + + let socket; + + const callback = async () => { + await socketPromise; + + socket.off('timeout', onTimeout); + socket.off('error', reject); + + if (options.resolveSocket) { + resolve({alpnProtocol: socket.alpnProtocol, socket, timeout}); + + if (timeout) { + await Promise.resolve(); + socket.emit('timeout'); + } + } else { + socket.destroy(); + resolve({alpnProtocol: socket.alpnProtocol, timeout}); + } + }; + + const onTimeout = async () => { + timeout = true; + callback(); + }; + + const socketPromise = (async () => { + try { + socket = await connect(options, callback); + + socket.on('error', reject); + socket.once('timeout', onTimeout); + } catch (error) { + reject(error); + } + })(); +}); + + +/***/ }), + +/***/ 9004: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const Readable = (__nccwpck_require__(2781).Readable); +const lowercaseKeys = __nccwpck_require__(9662); + +class Response extends Readable { + constructor(statusCode, headers, body, url) { + if (typeof statusCode !== 'number') { + throw new TypeError('Argument `statusCode` should be a number'); + } + if (typeof headers !== 'object') { + throw new TypeError('Argument `headers` should be an object'); + } + if (!(body instanceof Buffer)) { + throw new TypeError('Argument `body` should be a buffer'); + } + if (typeof url !== 'string') { + throw new TypeError('Argument `url` should be a string'); + } + + super(); + this.statusCode = statusCode; + this.headers = lowercaseKeys(headers); + this.body = body; + this.url = url; + } + + _read() { + this.push(this.body); + this.push(null); + } +} + +module.exports = Response; + + +/***/ }), + +/***/ 2113: +/***/ ((module) => { + +"use strict"; + + +function reusify (Constructor) { + var head = new Constructor() + var tail = head + + function get () { + var current = head + + if (current.next) { + head = current.next + } else { + head = new Constructor() + tail = head + } + + current.next = null + + return current + } + + function release (obj) { + tail.next = obj + tail = obj + } + + return { + get: get, + release: release + } +} + +module.exports = reusify + + +/***/ }), + +/***/ 5288: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +/*! run-parallel. MIT License. Feross Aboukhadijeh */ +module.exports = runParallel + +const queueMicrotask = __nccwpck_require__(9795) + +function runParallel (tasks, cb) { + let results, pending, keys + let isSync = true + + if (Array.isArray(tasks)) { + results = [] + pending = tasks.length + } else { + keys = Object.keys(tasks) + results = {} + pending = keys.length + } + + function done (err) { + function end () { + if (cb) cb(err, results) + cb = null + } + if (isSync) queueMicrotask(end) + else end() + } + + function each (i, err, result) { + results[i] = result + if (--pending === 0 || err) { + done(err) + } + } + + if (!pending) { + // empty + done(null) + } else if (keys) { + // object + keys.forEach(function (key) { + tasks[key](function (err, result) { each(key, err, result) }) + }) + } else { + // array + tasks.forEach(function (task, i) { + task(function (err, result) { each(i, err, result) }) + }) + } + + isSync = false +} + + +/***/ }), + +/***/ 2043: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +;(function (sax) { // wrapper for non-node envs + sax.parser = function (strict, opt) { return new SAXParser(strict, opt) } + sax.SAXParser = SAXParser + sax.SAXStream = SAXStream + sax.createStream = createStream + + // When we pass the MAX_BUFFER_LENGTH position, start checking for buffer overruns. + // When we check, schedule the next check for MAX_BUFFER_LENGTH - (max(buffer lengths)), + // since that's the earliest that a buffer overrun could occur. This way, checks are + // as rare as required, but as often as necessary to ensure never crossing this bound. + // Furthermore, buffers are only tested at most once per write(), so passing a very + // large string into write() might have undesirable effects, but this is manageable by + // the caller, so it is assumed to be safe. Thus, a call to write() may, in the extreme + // edge case, result in creating at most one complete copy of the string passed in. + // Set to Infinity to have unlimited buffers. + sax.MAX_BUFFER_LENGTH = 64 * 1024 + + var buffers = [ + 'comment', 'sgmlDecl', 'textNode', 'tagName', 'doctype', + 'procInstName', 'procInstBody', 'entity', 'attribName', + 'attribValue', 'cdata', 'script' + ] + + sax.EVENTS = [ + 'text', + 'processinginstruction', + 'sgmldeclaration', + 'doctype', + 'comment', + 'opentagstart', + 'attribute', + 'opentag', + 'closetag', + 'opencdata', + 'cdata', + 'closecdata', + 'error', + 'end', + 'ready', + 'script', + 'opennamespace', + 'closenamespace' + ] + + function SAXParser (strict, opt) { + if (!(this instanceof SAXParser)) { + return new SAXParser(strict, opt) + } + + var parser = this + clearBuffers(parser) + parser.q = parser.c = '' + parser.bufferCheckPosition = sax.MAX_BUFFER_LENGTH + parser.opt = opt || {} + parser.opt.lowercase = parser.opt.lowercase || parser.opt.lowercasetags + parser.looseCase = parser.opt.lowercase ? 'toLowerCase' : 'toUpperCase' + parser.tags = [] + parser.closed = parser.closedRoot = parser.sawRoot = false + parser.tag = parser.error = null + parser.strict = !!strict + parser.noscript = !!(strict || parser.opt.noscript) + parser.state = S.BEGIN + parser.strictEntities = parser.opt.strictEntities + parser.ENTITIES = parser.strictEntities ? Object.create(sax.XML_ENTITIES) : Object.create(sax.ENTITIES) + parser.attribList = [] + + // namespaces form a prototype chain. + // it always points at the current tag, + // which protos to its parent tag. + if (parser.opt.xmlns) { + parser.ns = Object.create(rootNS) + } + + // mostly just for error reporting + parser.trackPosition = parser.opt.position !== false + if (parser.trackPosition) { + parser.position = parser.line = parser.column = 0 + } + emit(parser, 'onready') + } + + if (!Object.create) { + Object.create = function (o) { + function F () {} + F.prototype = o + var newf = new F() + return newf + } + } + + if (!Object.keys) { + Object.keys = function (o) { + var a = [] + for (var i in o) if (o.hasOwnProperty(i)) a.push(i) + return a + } + } + + function checkBufferLength (parser) { + var maxAllowed = Math.max(sax.MAX_BUFFER_LENGTH, 10) + var maxActual = 0 + for (var i = 0, l = buffers.length; i < l; i++) { + var len = parser[buffers[i]].length + if (len > maxAllowed) { + // Text/cdata nodes can get big, and since they're buffered, + // we can get here under normal conditions. + // Avoid issues by emitting the text node now, + // so at least it won't get any bigger. + switch (buffers[i]) { + case 'textNode': + closeText(parser) + break + + case 'cdata': + emitNode(parser, 'oncdata', parser.cdata) + parser.cdata = '' + break + + case 'script': + emitNode(parser, 'onscript', parser.script) + parser.script = '' + break + + default: + error(parser, 'Max buffer length exceeded: ' + buffers[i]) + } + } + maxActual = Math.max(maxActual, len) + } + // schedule the next check for the earliest possible buffer overrun. + var m = sax.MAX_BUFFER_LENGTH - maxActual + parser.bufferCheckPosition = m + parser.position + } + + function clearBuffers (parser) { + for (var i = 0, l = buffers.length; i < l; i++) { + parser[buffers[i]] = '' + } + } + + function flushBuffers (parser) { + closeText(parser) + if (parser.cdata !== '') { + emitNode(parser, 'oncdata', parser.cdata) + parser.cdata = '' + } + if (parser.script !== '') { + emitNode(parser, 'onscript', parser.script) + parser.script = '' + } + } + + SAXParser.prototype = { + end: function () { end(this) }, + write: write, + resume: function () { this.error = null; return this }, + close: function () { return this.write(null) }, + flush: function () { flushBuffers(this) } + } + + var Stream + try { + Stream = (__nccwpck_require__(2781).Stream) + } catch (ex) { + Stream = function () {} + } + + var streamWraps = sax.EVENTS.filter(function (ev) { + return ev !== 'error' && ev !== 'end' + }) + + function createStream (strict, opt) { + return new SAXStream(strict, opt) + } + + function SAXStream (strict, opt) { + if (!(this instanceof SAXStream)) { + return new SAXStream(strict, opt) + } + + Stream.apply(this) + + this._parser = new SAXParser(strict, opt) + this.writable = true + this.readable = true + + var me = this + + this._parser.onend = function () { + me.emit('end') + } + + this._parser.onerror = function (er) { + me.emit('error', er) + + // if didn't throw, then means error was handled. + // go ahead and clear error, so we can write again. + me._parser.error = null + } + + this._decoder = null + + streamWraps.forEach(function (ev) { + Object.defineProperty(me, 'on' + ev, { + get: function () { + return me._parser['on' + ev] + }, + set: function (h) { + if (!h) { + me.removeAllListeners(ev) + me._parser['on' + ev] = h + return h + } + me.on(ev, h) + }, + enumerable: true, + configurable: false + }) + }) + } + + SAXStream.prototype = Object.create(Stream.prototype, { + constructor: { + value: SAXStream + } + }) + + SAXStream.prototype.write = function (data) { + if (typeof Buffer === 'function' && + typeof Buffer.isBuffer === 'function' && + Buffer.isBuffer(data)) { + if (!this._decoder) { + var SD = (__nccwpck_require__(1576).StringDecoder) + this._decoder = new SD('utf8') + } + data = this._decoder.write(data) + } + + this._parser.write(data.toString()) + this.emit('data', data) + return true + } + + SAXStream.prototype.end = function (chunk) { + if (chunk && chunk.length) { + this.write(chunk) + } + this._parser.end() + return true + } + + SAXStream.prototype.on = function (ev, handler) { + var me = this + if (!me._parser['on' + ev] && streamWraps.indexOf(ev) !== -1) { + me._parser['on' + ev] = function () { + var args = arguments.length === 1 ? [arguments[0]] : Array.apply(null, arguments) + args.splice(0, 0, ev) + me.emit.apply(me, args) + } + } + + return Stream.prototype.on.call(me, ev, handler) + } + + // this really needs to be replaced with character classes. + // XML allows all manner of ridiculous numbers and digits. + var CDATA = '[CDATA[' + var DOCTYPE = 'DOCTYPE' + var XML_NAMESPACE = 'http://www.w3.org/XML/1998/namespace' + var XMLNS_NAMESPACE = 'http://www.w3.org/2000/xmlns/' + var rootNS = { xml: XML_NAMESPACE, xmlns: XMLNS_NAMESPACE } + + // http://www.w3.org/TR/REC-xml/#NT-NameStartChar + // This implementation works on strings, a single character at a time // as such, it cannot ever support astral-plane characters (10000-EFFFF) // without a significant breaking change to either this parser, or the // JavaScript language. Implementation of an emoji-capable xml parser // is left as an exercise for the reader. var nameStart = /[:_A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD]/ - var nameBody = /[:_A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD\u00B7\u0300-\u036F\u203F-\u2040.\d-]/ + var nameBody = /[:_A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD\u00B7\u0300-\u036F\u203F-\u2040.\d-]/ + + var entityStart = /[#:_A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD]/ + var entityBody = /[#:_A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD\u00B7\u0300-\u036F\u203F-\u2040.\d-]/ + + function isWhitespace (c) { + return c === ' ' || c === '\n' || c === '\r' || c === '\t' + } + + function isQuote (c) { + return c === '"' || c === '\'' + } + + function isAttribEnd (c) { + return c === '>' || isWhitespace(c) + } + + function isMatch (regex, c) { + return regex.test(c) + } + + function notMatch (regex, c) { + return !isMatch(regex, c) + } + + var S = 0 + sax.STATE = { + BEGIN: S++, // leading byte order mark or whitespace + BEGIN_WHITESPACE: S++, // leading whitespace + TEXT: S++, // general stuff + TEXT_ENTITY: S++, // & and such. + OPEN_WAKA: S++, // < + SGML_DECL: S++, // + SCRIPT: S++, //