module.exports = /******/ (() => { // webpackBootstrap /******/ var __webpack_modules__ = ({ /***/ 913: /***/ ((module) => { "use strict"; module.exports = JSON.parse("{\"_args\":[[\"@octokit/rest@16.43.2\",\"/home/betahuhn/programming/repo-file-sync-action\"]],\"_from\":\"@octokit/rest@16.43.2\",\"_id\":\"@octokit/rest@16.43.2\",\"_inBundle\":false,\"_integrity\":\"sha512-ngDBevLbBTFfrHZeiS7SAMAZ6ssuVmXuya+F/7RaVvlysgGa1JKJkKWY+jV6TCJYcW0OALfJ7nTIGXcBXzycfQ==\",\"_location\":\"/@actions/github/@octokit/rest\",\"_phantomChildren\":{},\"_requested\":{\"type\":\"version\",\"registry\":true,\"raw\":\"@octokit/rest@16.43.2\",\"name\":\"@octokit/rest\",\"escapedName\":\"@octokit%2frest\",\"scope\":\"@octokit\",\"rawSpec\":\"16.43.2\",\"saveSpec\":null,\"fetchSpec\":\"16.43.2\"},\"_requiredBy\":[\"/@actions/github\"],\"_resolved\":\"https://registry.npmjs.org/@octokit/rest/-/rest-16.43.2.tgz\",\"_spec\":\"16.43.2\",\"_where\":\"/home/betahuhn/programming/repo-file-sync-action\",\"author\":{\"name\":\"Gregor Martynus\",\"url\":\"https://github.com/gr2m\"},\"bugs\":{\"url\":\"https://github.com/octokit/rest.js/issues\"},\"bundlesize\":[{\"path\":\"./dist/octokit-rest.min.js.gz\",\"maxSize\":\"33 kB\"}],\"contributors\":[{\"name\":\"Mike de Boer\",\"email\":\"info@mikedeboer.nl\"},{\"name\":\"Fabian Jakobs\",\"email\":\"fabian@c9.io\"},{\"name\":\"Joe Gallo\",\"email\":\"joe@brassafrax.com\"},{\"name\":\"Gregor Martynus\",\"url\":\"https://github.com/gr2m\"}],\"dependencies\":{\"@octokit/auth-token\":\"^2.4.0\",\"@octokit/plugin-paginate-rest\":\"^1.1.1\",\"@octokit/plugin-request-log\":\"^1.0.0\",\"@octokit/plugin-rest-endpoint-methods\":\"2.4.0\",\"@octokit/request\":\"^5.2.0\",\"@octokit/request-error\":\"^1.0.2\",\"atob-lite\":\"^2.0.0\",\"before-after-hook\":\"^2.0.0\",\"btoa-lite\":\"^1.0.0\",\"deprecation\":\"^2.0.0\",\"lodash.get\":\"^4.4.2\",\"lodash.set\":\"^4.3.2\",\"lodash.uniq\":\"^4.5.0\",\"octokit-pagination-methods\":\"^1.1.0\",\"once\":\"^1.4.0\",\"universal-user-agent\":\"^4.0.0\"},\"description\":\"GitHub REST API client for Node.js\",\"devDependencies\":{\"@gimenete/type-writer\":\"^0.1.3\",\"@octokit/auth\":\"^1.1.1\",\"@octokit/fixtures-server\":\"^5.0.6\",\"@octokit/graphql\":\"^4.2.0\",\"@types/node\":\"^13.1.0\",\"bundlesize\":\"^0.18.0\",\"chai\":\"^4.1.2\",\"compression-webpack-plugin\":\"^3.1.0\",\"cypress\":\"^4.0.0\",\"glob\":\"^7.1.2\",\"http-proxy-agent\":\"^4.0.0\",\"lodash.camelcase\":\"^4.3.0\",\"lodash.merge\":\"^4.6.1\",\"lodash.upperfirst\":\"^4.3.1\",\"lolex\":\"^6.0.0\",\"mkdirp\":\"^1.0.0\",\"mocha\":\"^7.0.1\",\"mustache\":\"^4.0.0\",\"nock\":\"^11.3.3\",\"npm-run-all\":\"^4.1.2\",\"nyc\":\"^15.0.0\",\"prettier\":\"^1.14.2\",\"proxy\":\"^1.0.0\",\"semantic-release\":\"^17.0.0\",\"sinon\":\"^8.0.0\",\"sinon-chai\":\"^3.0.0\",\"sort-keys\":\"^4.0.0\",\"string-to-arraybuffer\":\"^1.0.0\",\"string-to-jsdoc-comment\":\"^1.0.0\",\"typescript\":\"^3.3.1\",\"webpack\":\"^4.0.0\",\"webpack-bundle-analyzer\":\"^3.0.0\",\"webpack-cli\":\"^3.0.0\"},\"files\":[\"index.js\",\"index.d.ts\",\"lib\",\"plugins\"],\"homepage\":\"https://github.com/octokit/rest.js#readme\",\"keywords\":[\"octokit\",\"github\",\"rest\",\"api-client\"],\"license\":\"MIT\",\"name\":\"@octokit/rest\",\"nyc\":{\"ignore\":[\"test\"]},\"publishConfig\":{\"access\":\"public\"},\"release\":{\"publish\":[\"@semantic-release/npm\",{\"path\":\"@semantic-release/github\",\"assets\":[\"dist/*\",\"!dist/*.map.gz\"]}]},\"repository\":{\"type\":\"git\",\"url\":\"git+https://github.com/octokit/rest.js.git\"},\"scripts\":{\"build\":\"npm-run-all build:*\",\"build:browser\":\"npm-run-all build:browser:*\",\"build:browser:development\":\"webpack --mode development --entry . --output-library=Octokit --output=./dist/octokit-rest.js --profile --json > dist/bundle-stats.json\",\"build:browser:production\":\"webpack --mode production --entry . --plugin=compression-webpack-plugin --output-library=Octokit --output-path=./dist --output-filename=octokit-rest.min.js --devtool source-map\",\"build:ts\":\"npm run -s update-endpoints:typescript\",\"coverage\":\"nyc report --reporter=html && open coverage/index.html\",\"generate-bundle-report\":\"webpack-bundle-analyzer dist/bundle-stats.json --mode=static --no-open --report dist/bundle-report.html\",\"lint\":\"prettier --check '{lib,plugins,scripts,test}/**/*.{js,json,ts}' 'docs/*.{js,json}' 'docs/src/**/*' index.js README.md package.json\",\"lint:fix\":\"prettier --write '{lib,plugins,scripts,test}/**/*.{js,json,ts}' 'docs/*.{js,json}' 'docs/src/**/*' index.js README.md package.json\",\"postvalidate:ts\":\"tsc --noEmit --target es6 test/typescript-validate.ts\",\"prebuild:browser\":\"mkdirp dist/\",\"pretest\":\"npm run -s lint\",\"prevalidate:ts\":\"npm run -s build:ts\",\"start-fixtures-server\":\"octokit-fixtures-server\",\"test\":\"nyc mocha test/mocha-node-setup.js \\\"test/*/**/*-test.js\\\"\",\"test:browser\":\"cypress run --browser chrome\",\"update-endpoints\":\"npm-run-all update-endpoints:*\",\"update-endpoints:fetch-json\":\"node scripts/update-endpoints/fetch-json\",\"update-endpoints:typescript\":\"node scripts/update-endpoints/typescript\",\"validate:ts\":\"tsc --target es6 --noImplicitAny index.d.ts\"},\"types\":\"index.d.ts\",\"version\":\"16.43.2\"}"); /***/ }), /***/ 7351: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; result["default"] = mod; return result; }; Object.defineProperty(exports, "__esModule", ({ value: true })); const os = __importStar(__nccwpck_require__(2087)); const utils_1 = __nccwpck_require__(5278); /** * Commands * * Command Format: * ::name key=value,key=value::message * * Examples: * ::warning::This is the message * ::set-env name=MY_VAR::some value */ function issueCommand(command, properties, message) { const cmd = new Command(command, properties, message); process.stdout.write(cmd.toString() + os.EOL); } exports.issueCommand = issueCommand; function issue(name, message = '') { issueCommand(name, {}, message); } exports.issue = issue; const CMD_STRING = '::'; class Command { constructor(command, properties, message) { if (!command) { command = 'missing.command'; } this.command = command; this.properties = properties; this.message = message; } toString() { let cmdStr = CMD_STRING + this.command; if (this.properties && Object.keys(this.properties).length > 0) { cmdStr += ' '; let first = true; for (const key in this.properties) { if (this.properties.hasOwnProperty(key)) { const val = this.properties[key]; if (val) { if (first) { first = false; } else { cmdStr += ','; } cmdStr += `${key}=${escapeProperty(val)}`; } } } } cmdStr += `${CMD_STRING}${escapeData(this.message)}`; return cmdStr; } } function escapeData(s) { return utils_1.toCommandValue(s) .replace(/%/g, '%25') .replace(/\r/g, '%0D') .replace(/\n/g, '%0A'); } function escapeProperty(s) { return utils_1.toCommandValue(s) .replace(/%/g, '%25') .replace(/\r/g, '%0D') .replace(/\n/g, '%0A') .replace(/:/g, '%3A') .replace(/,/g, '%2C'); } //# sourceMappingURL=command.js.map /***/ }), /***/ 2186: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; result["default"] = mod; return result; }; Object.defineProperty(exports, "__esModule", ({ value: true })); const command_1 = __nccwpck_require__(7351); const file_command_1 = __nccwpck_require__(717); const utils_1 = __nccwpck_require__(5278); const os = __importStar(__nccwpck_require__(2087)); const path = __importStar(__nccwpck_require__(5622)); /** * The code to exit an action */ var ExitCode; (function (ExitCode) { /** * A code indicating that the action was successful */ ExitCode[ExitCode["Success"] = 0] = "Success"; /** * A code indicating that the action was a failure */ ExitCode[ExitCode["Failure"] = 1] = "Failure"; })(ExitCode = exports.ExitCode || (exports.ExitCode = {})); //----------------------------------------------------------------------- // Variables //----------------------------------------------------------------------- /** * Sets env variable for this action and future actions in the job * @param name the name of the variable to set * @param val the value of the variable. Non-string values will be converted to a string via JSON.stringify */ // eslint-disable-next-line @typescript-eslint/no-explicit-any function exportVariable(name, val) { const convertedVal = utils_1.toCommandValue(val); process.env[name] = convertedVal; const filePath = process.env['GITHUB_ENV'] || ''; if (filePath) { const delimiter = '_GitHubActionsFileCommandDelimeter_'; const commandValue = `${name}<<${delimiter}${os.EOL}${convertedVal}${os.EOL}${delimiter}`; file_command_1.issueCommand('ENV', commandValue); } else { command_1.issueCommand('set-env', { name }, convertedVal); } } exports.exportVariable = exportVariable; /** * Registers a secret which will get masked from logs * @param secret value of the secret */ function setSecret(secret) { command_1.issueCommand('add-mask', {}, secret); } exports.setSecret = setSecret; /** * Prepends inputPath to the PATH (for this action and future actions) * @param inputPath */ function addPath(inputPath) { const filePath = process.env['GITHUB_PATH'] || ''; if (filePath) { file_command_1.issueCommand('PATH', inputPath); } else { command_1.issueCommand('add-path', {}, inputPath); } process.env['PATH'] = `${inputPath}${path.delimiter}${process.env['PATH']}`; } exports.addPath = addPath; /** * Gets the value of an input. The value is also trimmed. * * @param name name of the input to get * @param options optional. See InputOptions. * @returns string */ function getInput(name, options) { const val = process.env[`INPUT_${name.replace(/ /g, '_').toUpperCase()}`] || ''; if (options && options.required && !val) { throw new Error(`Input required and not supplied: ${name}`); } return val.trim(); } exports.getInput = getInput; /** * Sets the value of an output. * * @param name name of the output to set * @param value value to store. Non-string values will be converted to a string via JSON.stringify */ // eslint-disable-next-line @typescript-eslint/no-explicit-any function setOutput(name, value) { process.stdout.write(os.EOL); command_1.issueCommand('set-output', { name }, value); } exports.setOutput = setOutput; /** * Enables or disables the echoing of commands into stdout for the rest of the step. * Echoing is disabled by default if ACTIONS_STEP_DEBUG is not set. * */ function setCommandEcho(enabled) { command_1.issue('echo', enabled ? 'on' : 'off'); } exports.setCommandEcho = setCommandEcho; //----------------------------------------------------------------------- // Results //----------------------------------------------------------------------- /** * Sets the action status to failed. * When the action exits it will be with an exit code of 1 * @param message add error issue message */ function setFailed(message) { process.exitCode = ExitCode.Failure; error(message); } exports.setFailed = setFailed; //----------------------------------------------------------------------- // Logging Commands //----------------------------------------------------------------------- /** * Gets whether Actions Step Debug is on or not */ function isDebug() { return process.env['RUNNER_DEBUG'] === '1'; } exports.isDebug = isDebug; /** * Writes debug message to user log * @param message debug message */ function debug(message) { command_1.issueCommand('debug', {}, message); } exports.debug = debug; /** * Adds an error issue * @param message error issue message. Errors will be converted to string via toString() */ function error(message) { command_1.issue('error', message instanceof Error ? message.toString() : message); } exports.error = error; /** * Adds an warning issue * @param message warning issue message. Errors will be converted to string via toString() */ function warning(message) { command_1.issue('warning', message instanceof Error ? message.toString() : message); } exports.warning = warning; /** * Writes info to log with console.log. * @param message info message */ function info(message) { process.stdout.write(message + os.EOL); } exports.info = info; /** * Begin an output group. * * Output until the next `groupEnd` will be foldable in this group * * @param name The name of the output group */ function startGroup(name) { command_1.issue('group', name); } exports.startGroup = startGroup; /** * End an output group. */ function endGroup() { command_1.issue('endgroup'); } exports.endGroup = endGroup; /** * Wrap an asynchronous function call in a group. * * Returns the same type as the function itself. * * @param name The name of the group * @param fn The function to wrap in the group */ function group(name, fn) { return __awaiter(this, void 0, void 0, function* () { startGroup(name); let result; try { result = yield fn(); } finally { endGroup(); } return result; }); } exports.group = group; //----------------------------------------------------------------------- // Wrapper action state //----------------------------------------------------------------------- /** * Saves state for current action, the state can only be retrieved by this action's post job execution. * * @param name name of the state to store * @param value value to store. Non-string values will be converted to a string via JSON.stringify */ // eslint-disable-next-line @typescript-eslint/no-explicit-any function saveState(name, value) { command_1.issueCommand('save-state', { name }, value); } exports.saveState = saveState; /** * Gets the value of an state set by this action's main execution. * * @param name name of the state to get * @returns string */ function getState(name) { return process.env[`STATE_${name}`] || ''; } exports.getState = getState; //# sourceMappingURL=core.js.map /***/ }), /***/ 717: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; // For internal use, subject to change. var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; result["default"] = mod; return result; }; Object.defineProperty(exports, "__esModule", ({ value: true })); // We use any as a valid input type /* eslint-disable @typescript-eslint/no-explicit-any */ const fs = __importStar(__nccwpck_require__(5747)); const os = __importStar(__nccwpck_require__(2087)); const utils_1 = __nccwpck_require__(5278); function issueCommand(command, message) { const filePath = process.env[`GITHUB_${command}`]; if (!filePath) { throw new Error(`Unable to find environment variable for file command ${command}`); } if (!fs.existsSync(filePath)) { throw new Error(`Missing file at path: ${filePath}`); } fs.appendFileSync(filePath, `${utils_1.toCommandValue(message)}${os.EOL}`, { encoding: 'utf8' }); } exports.issueCommand = issueCommand; //# sourceMappingURL=file-command.js.map /***/ }), /***/ 5278: /***/ ((__unused_webpack_module, exports) => { "use strict"; // We use any as a valid input type /* eslint-disable @typescript-eslint/no-explicit-any */ Object.defineProperty(exports, "__esModule", ({ value: true })); /** * Sanitizes an input into a string so it can be passed into issueCommand safely * @param input input to sanitize into a string */ function toCommandValue(input) { if (input === null || input === undefined) { return ''; } else if (typeof input === 'string' || input instanceof String) { return input; } return JSON.stringify(input); } exports.toCommandValue = toCommandValue; //# sourceMappingURL=utils.js.map /***/ }), /***/ 4087: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); const fs_1 = __nccwpck_require__(5747); const os_1 = __nccwpck_require__(2087); class Context { /** * Hydrate the context from the environment */ constructor() { this.payload = {}; if (process.env.GITHUB_EVENT_PATH) { if (fs_1.existsSync(process.env.GITHUB_EVENT_PATH)) { this.payload = JSON.parse(fs_1.readFileSync(process.env.GITHUB_EVENT_PATH, { encoding: 'utf8' })); } else { const path = process.env.GITHUB_EVENT_PATH; process.stdout.write(`GITHUB_EVENT_PATH ${path} does not exist${os_1.EOL}`); } } this.eventName = process.env.GITHUB_EVENT_NAME; this.sha = process.env.GITHUB_SHA; this.ref = process.env.GITHUB_REF; this.workflow = process.env.GITHUB_WORKFLOW; this.action = process.env.GITHUB_ACTION; this.actor = process.env.GITHUB_ACTOR; } get issue() { const payload = this.payload; return Object.assign(Object.assign({}, this.repo), { number: (payload.issue || payload.pull_request || payload).number }); } get repo() { if (process.env.GITHUB_REPOSITORY) { const [owner, repo] = process.env.GITHUB_REPOSITORY.split('/'); return { owner, repo }; } if (this.payload.repository) { return { owner: this.payload.repository.owner.login, repo: this.payload.repository.name }; } throw new Error("context.repo requires a GITHUB_REPOSITORY environment variable like 'owner/repo'"); } } exports.Context = Context; //# sourceMappingURL=context.js.map /***/ }), /***/ 5438: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; result["default"] = mod; return result; }; Object.defineProperty(exports, "__esModule", ({ value: true })); // Originally pulled from https://github.com/JasonEtco/actions-toolkit/blob/master/src/github.ts const graphql_1 = __nccwpck_require__(8467); const rest_1 = __nccwpck_require__(239); const Context = __importStar(__nccwpck_require__(4087)); const httpClient = __importStar(__nccwpck_require__(9925)); // We need this in order to extend Octokit rest_1.Octokit.prototype = new rest_1.Octokit(); exports.context = new Context.Context(); class GitHub extends rest_1.Octokit { constructor(token, opts) { super(GitHub.getOctokitOptions(GitHub.disambiguate(token, opts))); this.graphql = GitHub.getGraphQL(GitHub.disambiguate(token, opts)); } /** * Disambiguates the constructor overload parameters */ static disambiguate(token, opts) { return [ typeof token === 'string' ? token : '', typeof token === 'object' ? token : opts || {} ]; } static getOctokitOptions(args) { const token = args[0]; const options = Object.assign({}, args[1]); // Shallow clone - don't mutate the object provided by the caller // Base URL - GHES or Dotcom options.baseUrl = options.baseUrl || this.getApiBaseUrl(); // Auth const auth = GitHub.getAuthString(token, options); if (auth) { options.auth = auth; } // Proxy const agent = GitHub.getProxyAgent(options.baseUrl, options); if (agent) { // Shallow clone - don't mutate the object provided by the caller options.request = options.request ? Object.assign({}, options.request) : {}; // Set the agent options.request.agent = agent; } return options; } static getGraphQL(args) { const defaults = {}; defaults.baseUrl = this.getGraphQLBaseUrl(); const token = args[0]; const options = args[1]; // Authorization const auth = this.getAuthString(token, options); if (auth) { defaults.headers = { authorization: auth }; } // Proxy const agent = GitHub.getProxyAgent(defaults.baseUrl, options); if (agent) { defaults.request = { agent }; } return graphql_1.graphql.defaults(defaults); } static getAuthString(token, options) { // Validate args if (!token && !options.auth) { throw new Error('Parameter token or opts.auth is required'); } else if (token && options.auth) { throw new Error('Parameters token and opts.auth may not both be specified'); } return typeof options.auth === 'string' ? options.auth : `token ${token}`; } static getProxyAgent(destinationUrl, options) { var _a; if (!((_a = options.request) === null || _a === void 0 ? void 0 : _a.agent)) { if (httpClient.getProxyUrl(destinationUrl)) { const hc = new httpClient.HttpClient(); return hc.getAgent(destinationUrl); } } return undefined; } static getApiBaseUrl() { return process.env['GITHUB_API_URL'] || 'https://api.github.com'; } static getGraphQLBaseUrl() { let url = process.env['GITHUB_GRAPHQL_URL'] || 'https://api.github.com/graphql'; // Shouldn't be a trailing slash, but remove if so if (url.endsWith('/')) { url = url.substr(0, url.length - 1); } // Remove trailing "/graphql" if (url.toUpperCase().endsWith('/GRAPHQL')) { url = url.substr(0, url.length - '/graphql'.length); } return url; } } exports.GitHub = GitHub; //# sourceMappingURL=github.js.map /***/ }), /***/ 8945: /***/ ((__unused_webpack_module, exports) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); const VERSION = "1.1.2"; /** * Some “list” response that can be paginated have a different response structure * * They have a `total_count` key in the response (search also has `incomplete_results`, * /installation/repositories also has `repository_selection`), as well as a key with * the list of the items which name varies from endpoint to endpoint: * * - https://developer.github.com/v3/search/#example (key `items`) * - https://developer.github.com/v3/checks/runs/#response-3 (key: `check_runs`) * - https://developer.github.com/v3/checks/suites/#response-1 (key: `check_suites`) * - https://developer.github.com/v3/apps/installations/#list-repositories (key: `repositories`) * - https://developer.github.com/v3/apps/installations/#list-installations-for-a-user (key `installations`) * * Octokit normalizes these responses so that paginated results are always returned following * the same structure. One challenge is that if the list response has only one page, no Link * header is provided, so this header alone is not sufficient to check wether a response is * paginated or not. For the exceptions with the namespace, a fallback check for the route * paths has to be added in order to normalize the response. We cannot check for the total_count * property because it also exists in the response of Get the combined status for a specific ref. */ const REGEX = [/^\/search\//, /^\/repos\/[^/]+\/[^/]+\/commits\/[^/]+\/(check-runs|check-suites)([^/]|$)/, /^\/installation\/repositories([^/]|$)/, /^\/user\/installations([^/]|$)/, /^\/repos\/[^/]+\/[^/]+\/actions\/secrets([^/]|$)/, /^\/repos\/[^/]+\/[^/]+\/actions\/workflows(\/[^/]+\/runs)?([^/]|$)/, /^\/repos\/[^/]+\/[^/]+\/actions\/runs(\/[^/]+\/(artifacts|jobs))?([^/]|$)/]; function normalizePaginatedListResponse(octokit, url, response) { const path = url.replace(octokit.request.endpoint.DEFAULTS.baseUrl, ""); const responseNeedsNormalization = REGEX.find(regex => regex.test(path)); if (!responseNeedsNormalization) return; // keep the additional properties intact as there is currently no other way // to retrieve the same information. const incompleteResults = response.data.incomplete_results; const repositorySelection = response.data.repository_selection; const totalCount = response.data.total_count; delete response.data.incomplete_results; delete response.data.repository_selection; delete response.data.total_count; const namespaceKey = Object.keys(response.data)[0]; const data = response.data[namespaceKey]; response.data = data; if (typeof incompleteResults !== "undefined") { response.data.incomplete_results = incompleteResults; } if (typeof repositorySelection !== "undefined") { response.data.repository_selection = repositorySelection; } response.data.total_count = totalCount; Object.defineProperty(response.data, namespaceKey, { get() { octokit.log.warn(`[@octokit/paginate-rest] "response.data.${namespaceKey}" is deprecated for "GET ${path}". Get the results directly from "response.data"`); return Array.from(data); } }); } function iterator(octokit, route, parameters) { const options = octokit.request.endpoint(route, parameters); const method = options.method; const headers = options.headers; let url = options.url; return { [Symbol.asyncIterator]: () => ({ next() { if (!url) { return Promise.resolve({ done: true }); } return octokit.request({ method, url, headers }).then(response => { normalizePaginatedListResponse(octokit, url, response); // `response.headers.link` format: // '; rel="next", ; rel="last"' // sets `url` to undefined if "next" URL is not present or `link` header is not set url = ((response.headers.link || "").match(/<([^>]+)>;\s*rel="next"/) || [])[1]; return { value: response }; }); } }) }; } function paginate(octokit, route, parameters, mapFn) { if (typeof parameters === "function") { mapFn = parameters; parameters = undefined; } return gather(octokit, [], iterator(octokit, route, parameters)[Symbol.asyncIterator](), mapFn); } function gather(octokit, results, iterator, mapFn) { return iterator.next().then(result => { if (result.done) { return results; } let earlyExit = false; function done() { earlyExit = true; } results = results.concat(mapFn ? mapFn(result.value, done) : result.value.data); if (earlyExit) { return results; } return gather(octokit, results, iterator, mapFn); }); } /** * @param octokit Octokit instance * @param options Options passed to Octokit constructor */ function paginateRest(octokit) { return { paginate: Object.assign(paginate.bind(null, octokit), { iterator: iterator.bind(null, octokit) }) }; } paginateRest.VERSION = VERSION; exports.paginateRest = paginateRest; //# sourceMappingURL=index.js.map /***/ }), /***/ 7471: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } var deprecation = __nccwpck_require__(8932); var once = _interopDefault(__nccwpck_require__(1223)); const logOnce = once(deprecation => console.warn(deprecation)); /** * Error with extra properties to help with debugging */ class RequestError extends Error { constructor(message, statusCode, options) { super(message); // Maintains proper stack trace (only available on V8) /* istanbul ignore next */ if (Error.captureStackTrace) { Error.captureStackTrace(this, this.constructor); } this.name = "HttpError"; this.status = statusCode; Object.defineProperty(this, "code", { get() { logOnce(new deprecation.Deprecation("[@octokit/request-error] `error.code` is deprecated, use `error.status`.")); return statusCode; } }); this.headers = options.headers || {}; // redact request credentials without mutating original request options const requestCopy = Object.assign({}, options.request); if (options.request.headers.authorization) { requestCopy.headers = Object.assign({}, options.request.headers, { authorization: options.request.headers.authorization.replace(/ .*$/, " [REDACTED]") }); } requestCopy.url = requestCopy.url // client_id & client_secret can be passed as URL query parameters to increase rate limit // see https://developer.github.com/v3/#increasing-the-unauthenticated-rate-limit-for-oauth-applications .replace(/\bclient_secret=\w+/g, "client_secret=[REDACTED]") // OAuth tokens can be passed as URL query parameters, although it is not recommended // see https://developer.github.com/v3/#oauth2-token-sent-in-a-header .replace(/\baccess_token=\w+/g, "access_token=[REDACTED]"); this.request = requestCopy; } } exports.RequestError = RequestError; //# sourceMappingURL=index.js.map /***/ }), /***/ 239: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { const { requestLog } = __nccwpck_require__(8883); const { restEndpointMethods } = __nccwpck_require__(3044); const Core = __nccwpck_require__(2348); const CORE_PLUGINS = [ __nccwpck_require__(2796), __nccwpck_require__(7662), // deprecated: remove in v17 requestLog, __nccwpck_require__(5155), restEndpointMethods, __nccwpck_require__(7291), __nccwpck_require__(2072) // deprecated: remove in v17 ]; const OctokitRest = Core.plugin(CORE_PLUGINS); function DeprecatedOctokit(options) { const warn = options && options.log && options.log.warn ? options.log.warn : console.warn; warn( '[@octokit/rest] `const Octokit = require("@octokit/rest")` is deprecated. Use `const { Octokit } = require("@octokit/rest")` instead' ); return new OctokitRest(options); } const Octokit = Object.assign(DeprecatedOctokit, { Octokit: OctokitRest }); Object.keys(OctokitRest).forEach(key => { /* istanbul ignore else */ if (OctokitRest.hasOwnProperty(key)) { Octokit[key] = OctokitRest[key]; } }); module.exports = Octokit; /***/ }), /***/ 7535: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { module.exports = Octokit; const { request } = __nccwpck_require__(6234); const Hook = __nccwpck_require__(3682); const parseClientOptions = __nccwpck_require__(6184); function Octokit(plugins, options) { options = options || {}; const hook = new Hook.Collection(); const log = Object.assign( { debug: () => {}, info: () => {}, warn: console.warn, error: console.error }, options && options.log ); const api = { hook, log, request: request.defaults(parseClientOptions(options, log, hook)) }; plugins.forEach(pluginFunction => pluginFunction(api, options)); return api; } /***/ }), /***/ 2348: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { const factory = __nccwpck_require__(7692); module.exports = factory(); /***/ }), /***/ 7692: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { module.exports = factory; const Octokit = __nccwpck_require__(7535); const registerPlugin = __nccwpck_require__(9927); function factory(plugins) { const Api = Octokit.bind(null, plugins || []); Api.plugin = registerPlugin.bind(null, plugins || []); return Api; } /***/ }), /***/ 6184: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { module.exports = parseOptions; const { Deprecation } = __nccwpck_require__(8932); const { getUserAgent } = __nccwpck_require__(129); const once = __nccwpck_require__(1223); const pkg = __nccwpck_require__(913); const deprecateOptionsTimeout = once((log, deprecation) => log.warn(deprecation) ); const deprecateOptionsAgent = once((log, deprecation) => log.warn(deprecation)); const deprecateOptionsHeaders = once((log, deprecation) => log.warn(deprecation) ); function parseOptions(options, log, hook) { if (options.headers) { options.headers = Object.keys(options.headers).reduce((newObj, key) => { newObj[key.toLowerCase()] = options.headers[key]; return newObj; }, {}); } const clientDefaults = { headers: options.headers || {}, request: options.request || {}, mediaType: { previews: [], format: "" } }; if (options.baseUrl) { clientDefaults.baseUrl = options.baseUrl; } if (options.userAgent) { clientDefaults.headers["user-agent"] = options.userAgent; } if (options.previews) { clientDefaults.mediaType.previews = options.previews; } if (options.timeZone) { clientDefaults.headers["time-zone"] = options.timeZone; } if (options.timeout) { deprecateOptionsTimeout( log, new Deprecation( "[@octokit/rest] new Octokit({timeout}) is deprecated. Use {request: {timeout}} instead. See https://github.com/octokit/request.js#request" ) ); clientDefaults.request.timeout = options.timeout; } if (options.agent) { deprecateOptionsAgent( log, new Deprecation( "[@octokit/rest] new Octokit({agent}) is deprecated. Use {request: {agent}} instead. See https://github.com/octokit/request.js#request" ) ); clientDefaults.request.agent = options.agent; } if (options.headers) { deprecateOptionsHeaders( log, new Deprecation( "[@octokit/rest] new Octokit({headers}) is deprecated. Use {userAgent, previews} instead. See https://github.com/octokit/request.js#request" ) ); } const userAgentOption = clientDefaults.headers["user-agent"]; const defaultUserAgent = `octokit.js/${pkg.version} ${getUserAgent()}`; clientDefaults.headers["user-agent"] = [userAgentOption, defaultUserAgent] .filter(Boolean) .join(" "); clientDefaults.request.hook = hook.bind(null, "request"); return clientDefaults; } /***/ }), /***/ 9927: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { module.exports = registerPlugin; const factory = __nccwpck_require__(7692); function registerPlugin(plugins, pluginFunction) { return factory( plugins.includes(pluginFunction) ? plugins : plugins.concat(pluginFunction) ); } /***/ }), /***/ 3086: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { module.exports = authenticate; const { Deprecation } = __nccwpck_require__(8932); const once = __nccwpck_require__(1223); const deprecateAuthenticate = once((log, deprecation) => log.warn(deprecation)); function authenticate(state, options) { deprecateAuthenticate( state.octokit.log, new Deprecation( '[@octokit/rest] octokit.authenticate() is deprecated. Use "auth" constructor option instead.' ) ); if (!options) { state.auth = false; return; } switch (options.type) { case "basic": if (!options.username || !options.password) { throw new Error( "Basic authentication requires both a username and password to be set" ); } break; case "oauth": if (!options.token && !(options.key && options.secret)) { throw new Error( "OAuth2 authentication requires a token or key & secret to be set" ); } break; case "token": case "app": if (!options.token) { throw new Error("Token authentication requires a token to be set"); } break; default: throw new Error( "Invalid authentication type, must be 'basic', 'oauth', 'token' or 'app'" ); } state.auth = options; } /***/ }), /***/ 6888: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { module.exports = authenticationBeforeRequest; const btoa = __nccwpck_require__(2358); const uniq = __nccwpck_require__(8216); function authenticationBeforeRequest(state, options) { if (!state.auth.type) { return; } if (state.auth.type === "basic") { const hash = btoa(`${state.auth.username}:${state.auth.password}`); options.headers.authorization = `Basic ${hash}`; return; } if (state.auth.type === "token") { options.headers.authorization = `token ${state.auth.token}`; return; } if (state.auth.type === "app") { options.headers.authorization = `Bearer ${state.auth.token}`; const acceptHeaders = options.headers.accept .split(",") .concat("application/vnd.github.machine-man-preview+json"); options.headers.accept = uniq(acceptHeaders) .filter(Boolean) .join(","); return; } options.url += options.url.indexOf("?") === -1 ? "?" : "&"; if (state.auth.token) { options.url += `access_token=${encodeURIComponent(state.auth.token)}`; return; } const key = encodeURIComponent(state.auth.key); const secret = encodeURIComponent(state.auth.secret); options.url += `client_id=${key}&client_secret=${secret}`; } /***/ }), /***/ 7662: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { module.exports = authenticationPlugin; const { Deprecation } = __nccwpck_require__(8932); const once = __nccwpck_require__(1223); const deprecateAuthenticate = once((log, deprecation) => log.warn(deprecation)); const authenticate = __nccwpck_require__(3086); const beforeRequest = __nccwpck_require__(6888); const requestError = __nccwpck_require__(2532); function authenticationPlugin(octokit, options) { if (options.auth) { octokit.authenticate = () => { deprecateAuthenticate( octokit.log, new Deprecation( '[@octokit/rest] octokit.authenticate() is deprecated and has no effect when "auth" option is set on Octokit constructor' ) ); }; return; } const state = { octokit, auth: false }; octokit.authenticate = authenticate.bind(null, state); octokit.hook.before("request", beforeRequest.bind(null, state)); octokit.hook.error("request", requestError.bind(null, state)); } /***/ }), /***/ 2532: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { module.exports = authenticationRequestError; const { RequestError } = __nccwpck_require__(7471); function authenticationRequestError(state, error, options) { /* istanbul ignore next */ if (!error.headers) throw error; const otpRequired = /required/.test(error.headers["x-github-otp"] || ""); // handle "2FA required" error only if (error.status !== 401 || !otpRequired) { throw error; } if ( error.status === 401 && otpRequired && error.request && error.request.headers["x-github-otp"] ) { throw new RequestError( "Invalid one-time password for two-factor authentication", 401, { headers: error.headers, request: options } ); } if (typeof state.auth.on2fa !== "function") { throw new RequestError( "2FA required, but options.on2fa is not a function. See https://github.com/octokit/rest.js#authentication", 401, { headers: error.headers, request: options } ); } return Promise.resolve() .then(() => { return state.auth.on2fa(); }) .then(oneTimePassword => { const newOptions = Object.assign(options, { headers: Object.assign( { "x-github-otp": oneTimePassword }, options.headers ) }); return state.octokit.request(newOptions); }); } /***/ }), /***/ 4729: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { module.exports = authenticationBeforeRequest; const btoa = __nccwpck_require__(2358); const withAuthorizationPrefix = __nccwpck_require__(2885); function authenticationBeforeRequest(state, options) { if (typeof state.auth === "string") { options.headers.authorization = withAuthorizationPrefix(state.auth); return; } if (state.auth.username) { const hash = btoa(`${state.auth.username}:${state.auth.password}`); options.headers.authorization = `Basic ${hash}`; if (state.otp) { options.headers["x-github-otp"] = state.otp; } return; } if (state.auth.clientId) { // There is a special case for OAuth applications, when `clientId` and `clientSecret` is passed as // Basic Authorization instead of query parameters. The only routes where that applies share the same // URL though: `/applications/:client_id/tokens/:access_token`. // // 1. [Check an authorization](https://developer.github.com/v3/oauth_authorizations/#check-an-authorization) // 2. [Reset an authorization](https://developer.github.com/v3/oauth_authorizations/#reset-an-authorization) // 3. [Revoke an authorization for an application](https://developer.github.com/v3/oauth_authorizations/#revoke-an-authorization-for-an-application) // // We identify by checking the URL. It must merge both "/applications/:client_id/tokens/:access_token" // as well as "/applications/123/tokens/token456" if (/\/applications\/:?[\w_]+\/tokens\/:?[\w_]+($|\?)/.test(options.url)) { const hash = btoa(`${state.auth.clientId}:${state.auth.clientSecret}`); options.headers.authorization = `Basic ${hash}`; return; } options.url += options.url.indexOf("?") === -1 ? "?" : "&"; options.url += `client_id=${state.auth.clientId}&client_secret=${state.auth.clientSecret}`; return; } return Promise.resolve() .then(() => { return state.auth(); }) .then(authorization => { options.headers.authorization = withAuthorizationPrefix(authorization); }); } /***/ }), /***/ 2796: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { module.exports = authenticationPlugin; const { createTokenAuth } = __nccwpck_require__(334); const { Deprecation } = __nccwpck_require__(8932); const once = __nccwpck_require__(1223); const beforeRequest = __nccwpck_require__(4729); const requestError = __nccwpck_require__(1983); const validate = __nccwpck_require__(8326); const withAuthorizationPrefix = __nccwpck_require__(2885); const deprecateAuthBasic = once((log, deprecation) => log.warn(deprecation)); const deprecateAuthObject = once((log, deprecation) => log.warn(deprecation)); function authenticationPlugin(octokit, options) { // If `options.authStrategy` is set then use it and pass in `options.auth` if (options.authStrategy) { const auth = options.authStrategy(options.auth); octokit.hook.wrap("request", auth.hook); octokit.auth = auth; return; } // If neither `options.authStrategy` nor `options.auth` are set, the `octokit` instance // is unauthenticated. The `octokit.auth()` method is a no-op and no request hook is registred. if (!options.auth) { octokit.auth = () => Promise.resolve({ type: "unauthenticated" }); return; } const isBasicAuthString = typeof options.auth === "string" && /^basic/.test(withAuthorizationPrefix(options.auth)); // If only `options.auth` is set to a string, use the default token authentication strategy. if (typeof options.auth === "string" && !isBasicAuthString) { const auth = createTokenAuth(options.auth); octokit.hook.wrap("request", auth.hook); octokit.auth = auth; return; } // Otherwise log a deprecation message const [deprecationMethod, deprecationMessapge] = isBasicAuthString ? [ deprecateAuthBasic, 'Setting the "new Octokit({ auth })" option to a Basic Auth string is deprecated. Use https://github.com/octokit/auth-basic.js instead. See (https://octokit.github.io/rest.js/#authentication)' ] : [ deprecateAuthObject, 'Setting the "new Octokit({ auth })" option to an object without also setting the "authStrategy" option is deprecated and will be removed in v17. See (https://octokit.github.io/rest.js/#authentication)' ]; deprecationMethod( octokit.log, new Deprecation("[@octokit/rest] " + deprecationMessapge) ); octokit.auth = () => Promise.resolve({ type: "deprecated", message: deprecationMessapge }); validate(options.auth); const state = { octokit, auth: options.auth }; octokit.hook.before("request", beforeRequest.bind(null, state)); octokit.hook.error("request", requestError.bind(null, state)); } /***/ }), /***/ 1983: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { module.exports = authenticationRequestError; const { RequestError } = __nccwpck_require__(7471); function authenticationRequestError(state, error, options) { if (!error.headers) throw error; const otpRequired = /required/.test(error.headers["x-github-otp"] || ""); // handle "2FA required" error only if (error.status !== 401 || !otpRequired) { throw error; } if ( error.status === 401 && otpRequired && error.request && error.request.headers["x-github-otp"] ) { if (state.otp) { delete state.otp; // no longer valid, request again } else { throw new RequestError( "Invalid one-time password for two-factor authentication", 401, { headers: error.headers, request: options } ); } } if (typeof state.auth.on2fa !== "function") { throw new RequestError( "2FA required, but options.on2fa is not a function. See https://github.com/octokit/rest.js#authentication", 401, { headers: error.headers, request: options } ); } return Promise.resolve() .then(() => { return state.auth.on2fa(); }) .then(oneTimePassword => { const newOptions = Object.assign(options, { headers: Object.assign(options.headers, { "x-github-otp": oneTimePassword }) }); return state.octokit.request(newOptions).then(response => { // If OTP still valid, then persist it for following requests state.otp = oneTimePassword; return response; }); }); } /***/ }), /***/ 8326: /***/ ((module) => { module.exports = validateAuth; function validateAuth(auth) { if (typeof auth === "string") { return; } if (typeof auth === "function") { return; } if (auth.username && auth.password) { return; } if (auth.clientId && auth.clientSecret) { return; } throw new Error(`Invalid "auth" option: ${JSON.stringify(auth)}`); } /***/ }), /***/ 2885: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { module.exports = withAuthorizationPrefix; const atob = __nccwpck_require__(5224); const REGEX_IS_BASIC_AUTH = /^[\w-]+:/; function withAuthorizationPrefix(authorization) { if (/^(basic|bearer|token) /i.test(authorization)) { return authorization; } try { if (REGEX_IS_BASIC_AUTH.test(atob(authorization))) { return `basic ${authorization}`; } } catch (error) {} if (authorization.split(/\./).length === 3) { return `bearer ${authorization}`; } return `token ${authorization}`; } /***/ }), /***/ 5155: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { module.exports = paginatePlugin; const { paginateRest } = __nccwpck_require__(8945); function paginatePlugin(octokit) { Object.assign(octokit, paginateRest(octokit)); } /***/ }), /***/ 7291: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { module.exports = octokitValidate; const validate = __nccwpck_require__(9324); function octokitValidate(octokit) { octokit.hook.before("request", validate.bind(null, octokit)); } /***/ }), /***/ 9324: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; module.exports = validate; const { RequestError } = __nccwpck_require__(7471); const get = __nccwpck_require__(9197); const set = __nccwpck_require__(1552); function validate(octokit, options) { if (!options.request.validate) { return; } const { validate: params } = options.request; Object.keys(params).forEach(parameterName => { const parameter = get(params, parameterName); const expectedType = parameter.type; let parentParameterName; let parentValue; let parentParamIsPresent = true; let parentParameterIsArray = false; if (/\./.test(parameterName)) { parentParameterName = parameterName.replace(/\.[^.]+$/, ""); parentParameterIsArray = parentParameterName.slice(-2) === "[]"; if (parentParameterIsArray) { parentParameterName = parentParameterName.slice(0, -2); } parentValue = get(options, parentParameterName); parentParamIsPresent = parentParameterName === "headers" || (typeof parentValue === "object" && parentValue !== null); } const values = parentParameterIsArray ? (get(options, parentParameterName) || []).map( value => value[parameterName.split(/\./).pop()] ) : [get(options, parameterName)]; values.forEach((value, i) => { const valueIsPresent = typeof value !== "undefined"; const valueIsNull = value === null; const currentParameterName = parentParameterIsArray ? parameterName.replace(/\[\]/, `[${i}]`) : parameterName; if (!parameter.required && !valueIsPresent) { return; } // if the parent parameter is of type object but allows null // then the child parameters can be ignored if (!parentParamIsPresent) { return; } if (parameter.allowNull && valueIsNull) { return; } if (!parameter.allowNull && valueIsNull) { throw new RequestError( `'${currentParameterName}' cannot be null`, 400, { request: options } ); } if (parameter.required && !valueIsPresent) { throw new RequestError( `Empty value for parameter '${currentParameterName}': ${JSON.stringify( value )}`, 400, { request: options } ); } // parse to integer before checking for enum // so that string "1" will match enum with number 1 if (expectedType === "integer") { const unparsedValue = value; value = parseInt(value, 10); if (isNaN(value)) { throw new RequestError( `Invalid value for parameter '${currentParameterName}': ${JSON.stringify( unparsedValue )} is NaN`, 400, { request: options } ); } } if (parameter.enum && parameter.enum.indexOf(String(value)) === -1) { throw new RequestError( `Invalid value for parameter '${currentParameterName}': ${JSON.stringify( value )}`, 400, { request: options } ); } if (parameter.validation) { const regex = new RegExp(parameter.validation); if (!regex.test(value)) { throw new RequestError( `Invalid value for parameter '${currentParameterName}': ${JSON.stringify( value )}`, 400, { request: options } ); } } if (expectedType === "object" && typeof value === "string") { try { value = JSON.parse(value); } catch (exception) { throw new RequestError( `JSON parse error of value for parameter '${currentParameterName}': ${JSON.stringify( value )}`, 400, { request: options } ); } } set(options, parameter.mapTo || currentParameterName, value); }); }); return options; } /***/ }), /***/ 129: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } var osName = _interopDefault(__nccwpck_require__(4824)); function getUserAgent() { try { return `Node.js/${process.version.substr(1)} (${osName()}; ${process.arch})`; } catch (error) { if (/wmic os get Caption/.test(error.message)) { return "Windows "; } throw error; } } exports.getUserAgent = getUserAgent; //# sourceMappingURL=index.js.map /***/ }), /***/ 9925: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); const http = __nccwpck_require__(8605); const https = __nccwpck_require__(7211); const pm = __nccwpck_require__(6443); let tunnel; var HttpCodes; (function (HttpCodes) { HttpCodes[HttpCodes["OK"] = 200] = "OK"; HttpCodes[HttpCodes["MultipleChoices"] = 300] = "MultipleChoices"; HttpCodes[HttpCodes["MovedPermanently"] = 301] = "MovedPermanently"; HttpCodes[HttpCodes["ResourceMoved"] = 302] = "ResourceMoved"; HttpCodes[HttpCodes["SeeOther"] = 303] = "SeeOther"; HttpCodes[HttpCodes["NotModified"] = 304] = "NotModified"; HttpCodes[HttpCodes["UseProxy"] = 305] = "UseProxy"; HttpCodes[HttpCodes["SwitchProxy"] = 306] = "SwitchProxy"; HttpCodes[HttpCodes["TemporaryRedirect"] = 307] = "TemporaryRedirect"; HttpCodes[HttpCodes["PermanentRedirect"] = 308] = "PermanentRedirect"; HttpCodes[HttpCodes["BadRequest"] = 400] = "BadRequest"; HttpCodes[HttpCodes["Unauthorized"] = 401] = "Unauthorized"; HttpCodes[HttpCodes["PaymentRequired"] = 402] = "PaymentRequired"; HttpCodes[HttpCodes["Forbidden"] = 403] = "Forbidden"; HttpCodes[HttpCodes["NotFound"] = 404] = "NotFound"; HttpCodes[HttpCodes["MethodNotAllowed"] = 405] = "MethodNotAllowed"; HttpCodes[HttpCodes["NotAcceptable"] = 406] = "NotAcceptable"; HttpCodes[HttpCodes["ProxyAuthenticationRequired"] = 407] = "ProxyAuthenticationRequired"; HttpCodes[HttpCodes["RequestTimeout"] = 408] = "RequestTimeout"; HttpCodes[HttpCodes["Conflict"] = 409] = "Conflict"; HttpCodes[HttpCodes["Gone"] = 410] = "Gone"; HttpCodes[HttpCodes["TooManyRequests"] = 429] = "TooManyRequests"; HttpCodes[HttpCodes["InternalServerError"] = 500] = "InternalServerError"; HttpCodes[HttpCodes["NotImplemented"] = 501] = "NotImplemented"; HttpCodes[HttpCodes["BadGateway"] = 502] = "BadGateway"; HttpCodes[HttpCodes["ServiceUnavailable"] = 503] = "ServiceUnavailable"; HttpCodes[HttpCodes["GatewayTimeout"] = 504] = "GatewayTimeout"; })(HttpCodes = exports.HttpCodes || (exports.HttpCodes = {})); var Headers; (function (Headers) { Headers["Accept"] = "accept"; Headers["ContentType"] = "content-type"; })(Headers = exports.Headers || (exports.Headers = {})); var MediaTypes; (function (MediaTypes) { MediaTypes["ApplicationJson"] = "application/json"; })(MediaTypes = exports.MediaTypes || (exports.MediaTypes = {})); /** * Returns the proxy URL, depending upon the supplied url and proxy environment variables. * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com */ function getProxyUrl(serverUrl) { let proxyUrl = pm.getProxyUrl(new URL(serverUrl)); return proxyUrl ? proxyUrl.href : ''; } exports.getProxyUrl = getProxyUrl; const HttpRedirectCodes = [ HttpCodes.MovedPermanently, HttpCodes.ResourceMoved, HttpCodes.SeeOther, HttpCodes.TemporaryRedirect, HttpCodes.PermanentRedirect ]; const HttpResponseRetryCodes = [ HttpCodes.BadGateway, HttpCodes.ServiceUnavailable, HttpCodes.GatewayTimeout ]; const RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD']; const ExponentialBackoffCeiling = 10; const ExponentialBackoffTimeSlice = 5; class HttpClientError extends Error { constructor(message, statusCode) { super(message); this.name = 'HttpClientError'; this.statusCode = statusCode; Object.setPrototypeOf(this, HttpClientError.prototype); } } exports.HttpClientError = HttpClientError; class HttpClientResponse { constructor(message) { this.message = message; } readBody() { return new Promise(async (resolve, reject) => { let output = Buffer.alloc(0); this.message.on('data', (chunk) => { output = Buffer.concat([output, chunk]); }); this.message.on('end', () => { resolve(output.toString()); }); }); } } exports.HttpClientResponse = HttpClientResponse; function isHttps(requestUrl) { let parsedUrl = new URL(requestUrl); return parsedUrl.protocol === 'https:'; } exports.isHttps = isHttps; class HttpClient { constructor(userAgent, handlers, requestOptions) { this._ignoreSslError = false; this._allowRedirects = true; this._allowRedirectDowngrade = false; this._maxRedirects = 50; this._allowRetries = false; this._maxRetries = 1; this._keepAlive = false; this._disposed = false; this.userAgent = userAgent; this.handlers = handlers || []; this.requestOptions = requestOptions; if (requestOptions) { if (requestOptions.ignoreSslError != null) { this._ignoreSslError = requestOptions.ignoreSslError; } this._socketTimeout = requestOptions.socketTimeout; if (requestOptions.allowRedirects != null) { this._allowRedirects = requestOptions.allowRedirects; } if (requestOptions.allowRedirectDowngrade != null) { this._allowRedirectDowngrade = requestOptions.allowRedirectDowngrade; } if (requestOptions.maxRedirects != null) { this._maxRedirects = Math.max(requestOptions.maxRedirects, 0); } if (requestOptions.keepAlive != null) { this._keepAlive = requestOptions.keepAlive; } if (requestOptions.allowRetries != null) { this._allowRetries = requestOptions.allowRetries; } if (requestOptions.maxRetries != null) { this._maxRetries = requestOptions.maxRetries; } } } options(requestUrl, additionalHeaders) { return this.request('OPTIONS', requestUrl, null, additionalHeaders || {}); } get(requestUrl, additionalHeaders) { return this.request('GET', requestUrl, null, additionalHeaders || {}); } del(requestUrl, additionalHeaders) { return this.request('DELETE', requestUrl, null, additionalHeaders || {}); } post(requestUrl, data, additionalHeaders) { return this.request('POST', requestUrl, data, additionalHeaders || {}); } patch(requestUrl, data, additionalHeaders) { return this.request('PATCH', requestUrl, data, additionalHeaders || {}); } put(requestUrl, data, additionalHeaders) { return this.request('PUT', requestUrl, data, additionalHeaders || {}); } head(requestUrl, additionalHeaders) { return this.request('HEAD', requestUrl, null, additionalHeaders || {}); } sendStream(verb, requestUrl, stream, additionalHeaders) { return this.request(verb, requestUrl, stream, additionalHeaders); } /** * Gets a typed object from an endpoint * Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise */ async getJson(requestUrl, additionalHeaders = {}) { additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); let res = await this.get(requestUrl, additionalHeaders); return this._processResponse(res, this.requestOptions); } async postJson(requestUrl, obj, additionalHeaders = {}) { let data = JSON.stringify(obj, null, 2); additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); let res = await this.post(requestUrl, data, additionalHeaders); return this._processResponse(res, this.requestOptions); } async putJson(requestUrl, obj, additionalHeaders = {}) { let data = JSON.stringify(obj, null, 2); additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); let res = await this.put(requestUrl, data, additionalHeaders); return this._processResponse(res, this.requestOptions); } async patchJson(requestUrl, obj, additionalHeaders = {}) { let data = JSON.stringify(obj, null, 2); additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); let res = await this.patch(requestUrl, data, additionalHeaders); return this._processResponse(res, this.requestOptions); } /** * Makes a raw http request. * All other methods such as get, post, patch, and request ultimately call this. * Prefer get, del, post and patch */ async request(verb, requestUrl, data, headers) { if (this._disposed) { throw new Error('Client has already been disposed.'); } let parsedUrl = new URL(requestUrl); let info = this._prepareRequest(verb, parsedUrl, headers); // Only perform retries on reads since writes may not be idempotent. let maxTries = this._allowRetries && RetryableHttpVerbs.indexOf(verb) != -1 ? this._maxRetries + 1 : 1; let numTries = 0; let response; while (numTries < maxTries) { response = await this.requestRaw(info, data); // Check if it's an authentication challenge if (response && response.message && response.message.statusCode === HttpCodes.Unauthorized) { let authenticationHandler; for (let i = 0; i < this.handlers.length; i++) { if (this.handlers[i].canHandleAuthentication(response)) { authenticationHandler = this.handlers[i]; break; } } if (authenticationHandler) { return authenticationHandler.handleAuthentication(this, info, data); } else { // We have received an unauthorized response but have no handlers to handle it. // Let the response return to the caller. return response; } } let redirectsRemaining = this._maxRedirects; while (HttpRedirectCodes.indexOf(response.message.statusCode) != -1 && this._allowRedirects && redirectsRemaining > 0) { const redirectUrl = response.message.headers['location']; if (!redirectUrl) { // if there's no location to redirect to, we won't break; } let parsedRedirectUrl = new URL(redirectUrl); if (parsedUrl.protocol == 'https:' && parsedUrl.protocol != parsedRedirectUrl.protocol && !this._allowRedirectDowngrade) { throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.'); } // we need to finish reading the response before reassigning response // which will leak the open socket. await response.readBody(); // strip authorization header if redirected to a different hostname if (parsedRedirectUrl.hostname !== parsedUrl.hostname) { for (let header in headers) { // header names are case insensitive if (header.toLowerCase() === 'authorization') { delete headers[header]; } } } // let's make the request with the new redirectUrl info = this._prepareRequest(verb, parsedRedirectUrl, headers); response = await this.requestRaw(info, data); redirectsRemaining--; } if (HttpResponseRetryCodes.indexOf(response.message.statusCode) == -1) { // If not a retry code, return immediately instead of retrying return response; } numTries += 1; if (numTries < maxTries) { await response.readBody(); await this._performExponentialBackoff(numTries); } } return response; } /** * Needs to be called if keepAlive is set to true in request options. */ dispose() { if (this._agent) { this._agent.destroy(); } this._disposed = true; } /** * Raw request. * @param info * @param data */ requestRaw(info, data) { return new Promise((resolve, reject) => { let callbackForResult = function (err, res) { if (err) { reject(err); } resolve(res); }; this.requestRawWithCallback(info, data, callbackForResult); }); } /** * Raw request with callback. * @param info * @param data * @param onResult */ requestRawWithCallback(info, data, onResult) { let socket; if (typeof data === 'string') { info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8'); } let callbackCalled = false; let handleResult = (err, res) => { if (!callbackCalled) { callbackCalled = true; onResult(err, res); } }; let req = info.httpModule.request(info.options, (msg) => { let res = new HttpClientResponse(msg); handleResult(null, res); }); req.on('socket', sock => { socket = sock; }); // If we ever get disconnected, we want the socket to timeout eventually req.setTimeout(this._socketTimeout || 3 * 60000, () => { if (socket) { socket.end(); } handleResult(new Error('Request timeout: ' + info.options.path), null); }); req.on('error', function (err) { // err has statusCode property // res should have headers handleResult(err, null); }); if (data && typeof data === 'string') { req.write(data, 'utf8'); } if (data && typeof data !== 'string') { data.on('close', function () { req.end(); }); data.pipe(req); } else { req.end(); } } /** * Gets an http agent. This function is useful when you need an http agent that handles * routing through a proxy server - depending upon the url and proxy environment variables. * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com */ getAgent(serverUrl) { let parsedUrl = new URL(serverUrl); return this._getAgent(parsedUrl); } _prepareRequest(method, requestUrl, headers) { const info = {}; info.parsedUrl = requestUrl; const usingSsl = info.parsedUrl.protocol === 'https:'; info.httpModule = usingSsl ? https : http; const defaultPort = usingSsl ? 443 : 80; info.options = {}; info.options.host = info.parsedUrl.hostname; info.options.port = info.parsedUrl.port ? parseInt(info.parsedUrl.port) : defaultPort; info.options.path = (info.parsedUrl.pathname || '') + (info.parsedUrl.search || ''); info.options.method = method; info.options.headers = this._mergeHeaders(headers); if (this.userAgent != null) { info.options.headers['user-agent'] = this.userAgent; } info.options.agent = this._getAgent(info.parsedUrl); // gives handlers an opportunity to participate if (this.handlers) { this.handlers.forEach(handler => { handler.prepareRequest(info.options); }); } return info; } _mergeHeaders(headers) { const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {}); if (this.requestOptions && this.requestOptions.headers) { return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers)); } return lowercaseKeys(headers || {}); } _getExistingOrDefaultHeader(additionalHeaders, header, _default) { const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {}); let clientHeader; if (this.requestOptions && this.requestOptions.headers) { clientHeader = lowercaseKeys(this.requestOptions.headers)[header]; } return additionalHeaders[header] || clientHeader || _default; } _getAgent(parsedUrl) { let agent; let proxyUrl = pm.getProxyUrl(parsedUrl); let useProxy = proxyUrl && proxyUrl.hostname; if (this._keepAlive && useProxy) { agent = this._proxyAgent; } if (this._keepAlive && !useProxy) { agent = this._agent; } // if agent is already assigned use that agent. if (!!agent) { return agent; } const usingSsl = parsedUrl.protocol === 'https:'; let maxSockets = 100; if (!!this.requestOptions) { maxSockets = this.requestOptions.maxSockets || http.globalAgent.maxSockets; } if (useProxy) { // If using proxy, need tunnel if (!tunnel) { tunnel = __nccwpck_require__(4294); } const agentOptions = { maxSockets: maxSockets, keepAlive: this._keepAlive, proxy: { proxyAuth: `${proxyUrl.username}:${proxyUrl.password}`, host: proxyUrl.hostname, port: proxyUrl.port } }; let tunnelAgent; const overHttps = proxyUrl.protocol === 'https:'; if (usingSsl) { tunnelAgent = overHttps ? tunnel.httpsOverHttps : tunnel.httpsOverHttp; } else { tunnelAgent = overHttps ? tunnel.httpOverHttps : tunnel.httpOverHttp; } agent = tunnelAgent(agentOptions); this._proxyAgent = agent; } // if reusing agent across request and tunneling agent isn't assigned create a new agent if (this._keepAlive && !agent) { const options = { keepAlive: this._keepAlive, maxSockets: maxSockets }; agent = usingSsl ? new https.Agent(options) : new http.Agent(options); this._agent = agent; } // if not using private agent and tunnel agent isn't setup then use global agent if (!agent) { agent = usingSsl ? https.globalAgent : http.globalAgent; } if (usingSsl && this._ignoreSslError) { // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options // we have to cast it to any and change it directly agent.options = Object.assign(agent.options || {}, { rejectUnauthorized: false }); } return agent; } _performExponentialBackoff(retryNumber) { retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber); const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber); return new Promise(resolve => setTimeout(() => resolve(), ms)); } static dateTimeDeserializer(key, value) { if (typeof value === 'string') { let a = new Date(value); if (!isNaN(a.valueOf())) { return a; } } return value; } async _processResponse(res, options) { return new Promise(async (resolve, reject) => { const statusCode = res.message.statusCode; const response = { statusCode: statusCode, result: null, headers: {} }; // not found leads to null obj returned if (statusCode == HttpCodes.NotFound) { resolve(response); } let obj; let contents; // get the result from the body try { contents = await res.readBody(); if (contents && contents.length > 0) { if (options && options.deserializeDates) { obj = JSON.parse(contents, HttpClient.dateTimeDeserializer); } else { obj = JSON.parse(contents); } response.result = obj; } response.headers = res.message.headers; } catch (err) { // Invalid resource (contents not json); leaving result obj null } // note that 3xx redirects are handled by the http layer. if (statusCode > 299) { let msg; // if exception/error in body, attempt to get better error if (obj && obj.message) { msg = obj.message; } else if (contents && contents.length > 0) { // it may be the case that the exception is in the body message as string msg = contents; } else { msg = 'Failed request: (' + statusCode + ')'; } let err = new HttpClientError(msg, statusCode); err.result = response.result; reject(err); } else { resolve(response); } }); } } exports.HttpClient = HttpClient; /***/ }), /***/ 6443: /***/ ((__unused_webpack_module, exports) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); function getProxyUrl(reqUrl) { let usingSsl = reqUrl.protocol === 'https:'; let proxyUrl; if (checkBypass(reqUrl)) { return proxyUrl; } let proxyVar; if (usingSsl) { proxyVar = process.env['https_proxy'] || process.env['HTTPS_PROXY']; } else { proxyVar = process.env['http_proxy'] || process.env['HTTP_PROXY']; } if (proxyVar) { proxyUrl = new URL(proxyVar); } return proxyUrl; } exports.getProxyUrl = getProxyUrl; function checkBypass(reqUrl) { if (!reqUrl.hostname) { return false; } let noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || ''; if (!noProxy) { return false; } // Determine the request port let reqPort; if (reqUrl.port) { reqPort = Number(reqUrl.port); } else if (reqUrl.protocol === 'http:') { reqPort = 80; } else if (reqUrl.protocol === 'https:') { reqPort = 443; } // Format the request hostname and hostname with port let upperReqHosts = [reqUrl.hostname.toUpperCase()]; if (typeof reqPort === 'number') { upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`); } // Compare request host against noproxy for (let upperNoProxyItem of noProxy .split(',') .map(x => x.trim().toUpperCase()) .filter(x => x)) { if (upperReqHosts.some(x => x === upperNoProxyItem)) { return true; } } return false; } exports.checkBypass = checkBypass; /***/ }), /***/ 334: /***/ ((__unused_webpack_module, exports) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); async function auth(token) { const tokenType = token.split(/\./).length === 3 ? "app" : /^v\d+\./.test(token) ? "installation" : "oauth"; return { type: "token", token: token, tokenType }; } /** * Prefix token for usage in the Authorization header * * @param token OAuth token or JSON Web Token */ function withAuthorizationPrefix(token) { if (token.split(/\./).length === 3) { return `bearer ${token}`; } return `token ${token}`; } async function hook(token, request, route, parameters) { const endpoint = request.endpoint.merge(route, parameters); endpoint.headers.authorization = withAuthorizationPrefix(token); return request(endpoint); } const createTokenAuth = function createTokenAuth(token) { if (!token) { throw new Error("[@octokit/auth-token] No token passed to createTokenAuth"); } if (typeof token !== "string") { throw new Error("[@octokit/auth-token] Token passed to createTokenAuth is not a string"); } token = token.replace(/^(token|bearer) +/i, ""); return Object.assign(auth.bind(null, token), { hook: hook.bind(null, token) }); }; exports.createTokenAuth = createTokenAuth; //# sourceMappingURL=index.js.map /***/ }), /***/ 9440: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); var isPlainObject = __nccwpck_require__(3287); var universalUserAgent = __nccwpck_require__(5030); function lowercaseKeys(object) { if (!object) { return {}; } return Object.keys(object).reduce((newObj, key) => { newObj[key.toLowerCase()] = object[key]; return newObj; }, {}); } function mergeDeep(defaults, options) { const result = Object.assign({}, defaults); Object.keys(options).forEach(key => { if (isPlainObject.isPlainObject(options[key])) { if (!(key in defaults)) Object.assign(result, { [key]: options[key] });else result[key] = mergeDeep(defaults[key], options[key]); } else { Object.assign(result, { [key]: options[key] }); } }); return result; } function removeUndefinedProperties(obj) { for (const key in obj) { if (obj[key] === undefined) { delete obj[key]; } } return obj; } function merge(defaults, route, options) { if (typeof route === "string") { let [method, url] = route.split(" "); options = Object.assign(url ? { method, url } : { url: method }, options); } else { options = Object.assign({}, route); } // lowercase header names before merging with defaults to avoid duplicates options.headers = lowercaseKeys(options.headers); // remove properties with undefined values before merging removeUndefinedProperties(options); removeUndefinedProperties(options.headers); const mergedOptions = mergeDeep(defaults || {}, options); // mediaType.previews arrays are merged, instead of overwritten if (defaults && defaults.mediaType.previews.length) { mergedOptions.mediaType.previews = defaults.mediaType.previews.filter(preview => !mergedOptions.mediaType.previews.includes(preview)).concat(mergedOptions.mediaType.previews); } mergedOptions.mediaType.previews = mergedOptions.mediaType.previews.map(preview => preview.replace(/-preview/, "")); return mergedOptions; } function addQueryParameters(url, parameters) { const separator = /\?/.test(url) ? "&" : "?"; const names = Object.keys(parameters); if (names.length === 0) { return url; } return url + separator + names.map(name => { if (name === "q") { return "q=" + parameters.q.split("+").map(encodeURIComponent).join("+"); } return `${name}=${encodeURIComponent(parameters[name])}`; }).join("&"); } const urlVariableRegex = /\{[^}]+\}/g; function removeNonChars(variableName) { return variableName.replace(/^\W+|\W+$/g, "").split(/,/); } function extractUrlVariableNames(url) { const matches = url.match(urlVariableRegex); if (!matches) { return []; } return matches.map(removeNonChars).reduce((a, b) => a.concat(b), []); } function omit(object, keysToOmit) { return Object.keys(object).filter(option => !keysToOmit.includes(option)).reduce((obj, key) => { obj[key] = object[key]; return obj; }, {}); } // Based on https://github.com/bramstein/url-template, licensed under BSD // TODO: create separate package. // // Copyright (c) 2012-2014, Bram Stein // All rights reserved. // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions // are met: // 1. Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // 2. Redistributions in binary form must reproduce the above copyright // notice, this list of conditions and the following disclaimer in the // documentation and/or other materials provided with the distribution. // 3. The name of the author may not be used to endorse or promote products // derived from this software without specific prior written permission. // THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR IMPLIED // WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF // MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO // EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, // INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, // BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY // OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, // EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. /* istanbul ignore file */ function encodeReserved(str) { return str.split(/(%[0-9A-Fa-f]{2})/g).map(function (part) { if (!/%[0-9A-Fa-f]/.test(part)) { part = encodeURI(part).replace(/%5B/g, "[").replace(/%5D/g, "]"); } return part; }).join(""); } function encodeUnreserved(str) { return encodeURIComponent(str).replace(/[!'()*]/g, function (c) { return "%" + c.charCodeAt(0).toString(16).toUpperCase(); }); } function encodeValue(operator, value, key) { value = operator === "+" || operator === "#" ? encodeReserved(value) : encodeUnreserved(value); if (key) { return encodeUnreserved(key) + "=" + value; } else { return value; } } function isDefined(value) { return value !== undefined && value !== null; } function isKeyOperator(operator) { return operator === ";" || operator === "&" || operator === "?"; } function getValues(context, operator, key, modifier) { var value = context[key], result = []; if (isDefined(value) && value !== "") { if (typeof value === "string" || typeof value === "number" || typeof value === "boolean") { value = value.toString(); if (modifier && modifier !== "*") { value = value.substring(0, parseInt(modifier, 10)); } result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : "")); } else { if (modifier === "*") { if (Array.isArray(value)) { value.filter(isDefined).forEach(function (value) { result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : "")); }); } else { Object.keys(value).forEach(function (k) { if (isDefined(value[k])) { result.push(encodeValue(operator, value[k], k)); } }); } } else { const tmp = []; if (Array.isArray(value)) { value.filter(isDefined).forEach(function (value) { tmp.push(encodeValue(operator, value)); }); } else { Object.keys(value).forEach(function (k) { if (isDefined(value[k])) { tmp.push(encodeUnreserved(k)); tmp.push(encodeValue(operator, value[k].toString())); } }); } if (isKeyOperator(operator)) { result.push(encodeUnreserved(key) + "=" + tmp.join(",")); } else if (tmp.length !== 0) { result.push(tmp.join(",")); } } } } else { if (operator === ";") { if (isDefined(value)) { result.push(encodeUnreserved(key)); } } else if (value === "" && (operator === "&" || operator === "?")) { result.push(encodeUnreserved(key) + "="); } else if (value === "") { result.push(""); } } return result; } function parseUrl(template) { return { expand: expand.bind(null, template) }; } function expand(template, context) { var operators = ["+", "#", ".", "/", ";", "?", "&"]; return template.replace(/\{([^\{\}]+)\}|([^\{\}]+)/g, function (_, expression, literal) { if (expression) { let operator = ""; const values = []; if (operators.indexOf(expression.charAt(0)) !== -1) { operator = expression.charAt(0); expression = expression.substr(1); } expression.split(/,/g).forEach(function (variable) { var tmp = /([^:\*]*)(?::(\d+)|(\*))?/.exec(variable); values.push(getValues(context, operator, tmp[1], tmp[2] || tmp[3])); }); if (operator && operator !== "+") { var separator = ","; if (operator === "?") { separator = "&"; } else if (operator !== "#") { separator = operator; } return (values.length !== 0 ? operator : "") + values.join(separator); } else { return values.join(","); } } else { return encodeReserved(literal); } }); } function parse(options) { // https://fetch.spec.whatwg.org/#methods let method = options.method.toUpperCase(); // replace :varname with {varname} to make it RFC 6570 compatible let url = (options.url || "/").replace(/:([a-z]\w+)/g, "{$1}"); let headers = Object.assign({}, options.headers); let body; let parameters = omit(options, ["method", "baseUrl", "url", "headers", "request", "mediaType"]); // extract variable names from URL to calculate remaining variables later const urlVariableNames = extractUrlVariableNames(url); url = parseUrl(url).expand(parameters); if (!/^http/.test(url)) { url = options.baseUrl + url; } const omittedParameters = Object.keys(options).filter(option => urlVariableNames.includes(option)).concat("baseUrl"); const remainingParameters = omit(parameters, omittedParameters); const isBinaryRequest = /application\/octet-stream/i.test(headers.accept); if (!isBinaryRequest) { if (options.mediaType.format) { // e.g. application/vnd.github.v3+json => application/vnd.github.v3.raw headers.accept = headers.accept.split(/,/).map(preview => preview.replace(/application\/vnd(\.\w+)(\.v3)?(\.\w+)?(\+json)?$/, `application/vnd$1$2.${options.mediaType.format}`)).join(","); } if (options.mediaType.previews.length) { const previewsFromAcceptHeader = headers.accept.match(/[\w-]+(?=-preview)/g) || []; headers.accept = previewsFromAcceptHeader.concat(options.mediaType.previews).map(preview => { const format = options.mediaType.format ? `.${options.mediaType.format}` : "+json"; return `application/vnd.github.${preview}-preview${format}`; }).join(","); } } // for GET/HEAD requests, set URL query parameters from remaining parameters // for PATCH/POST/PUT/DELETE requests, set request body from remaining parameters if (["GET", "HEAD"].includes(method)) { url = addQueryParameters(url, remainingParameters); } else { if ("data" in remainingParameters) { body = remainingParameters.data; } else { if (Object.keys(remainingParameters).length) { body = remainingParameters; } else { headers["content-length"] = 0; } } } // default content-type for JSON if body is set if (!headers["content-type"] && typeof body !== "undefined") { headers["content-type"] = "application/json; charset=utf-8"; } // GitHub expects 'content-length: 0' header for PUT/PATCH requests without body. // fetch does not allow to set `content-length` header, but we can set body to an empty string if (["PATCH", "PUT"].includes(method) && typeof body === "undefined") { body = ""; } // Only return body/request keys if present return Object.assign({ method, url, headers }, typeof body !== "undefined" ? { body } : null, options.request ? { request: options.request } : null); } function endpointWithDefaults(defaults, route, options) { return parse(merge(defaults, route, options)); } function withDefaults(oldDefaults, newDefaults) { const DEFAULTS = merge(oldDefaults, newDefaults); const endpoint = endpointWithDefaults.bind(null, DEFAULTS); return Object.assign(endpoint, { DEFAULTS, defaults: withDefaults.bind(null, DEFAULTS), merge: merge.bind(null, DEFAULTS), parse }); } const VERSION = "6.0.10"; const userAgent = `octokit-endpoint.js/${VERSION} ${universalUserAgent.getUserAgent()}`; // DEFAULTS has all properties set that EndpointOptions has, except url. // So we use RequestParameters and add method as additional required property. const DEFAULTS = { method: "GET", baseUrl: "https://api.github.com", headers: { accept: "application/vnd.github.v3+json", "user-agent": userAgent }, mediaType: { format: "", previews: [] } }; const endpoint = withDefaults(null, DEFAULTS); exports.endpoint = endpoint; //# sourceMappingURL=index.js.map /***/ }), /***/ 8467: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); var request = __nccwpck_require__(6234); var universalUserAgent = __nccwpck_require__(5030); const VERSION = "4.5.8"; class GraphqlError extends Error { constructor(request, response) { const message = response.data.errors[0].message; super(message); Object.assign(this, response.data); Object.assign(this, { headers: response.headers }); this.name = "GraphqlError"; this.request = request; // Maintains proper stack trace (only available on V8) /* istanbul ignore next */ if (Error.captureStackTrace) { Error.captureStackTrace(this, this.constructor); } } } const NON_VARIABLE_OPTIONS = ["method", "baseUrl", "url", "headers", "request", "query", "mediaType"]; const GHES_V3_SUFFIX_REGEX = /\/api\/v3\/?$/; function graphql(request, query, options) { if (typeof query === "string" && options && "query" in options) { return Promise.reject(new Error(`[@octokit/graphql] "query" cannot be used as variable name`)); } const parsedOptions = typeof query === "string" ? Object.assign({ query }, options) : query; const requestOptions = Object.keys(parsedOptions).reduce((result, key) => { if (NON_VARIABLE_OPTIONS.includes(key)) { result[key] = parsedOptions[key]; return result; } if (!result.variables) { result.variables = {}; } result.variables[key] = parsedOptions[key]; return result; }, {}); // workaround for GitHub Enterprise baseUrl set with /api/v3 suffix // https://github.com/octokit/auth-app.js/issues/111#issuecomment-657610451 const baseUrl = parsedOptions.baseUrl || request.endpoint.DEFAULTS.baseUrl; if (GHES_V3_SUFFIX_REGEX.test(baseUrl)) { requestOptions.url = baseUrl.replace(GHES_V3_SUFFIX_REGEX, "/api/graphql"); } return request(requestOptions).then(response => { if (response.data.errors) { const headers = {}; for (const key of Object.keys(response.headers)) { headers[key] = response.headers[key]; } throw new GraphqlError(requestOptions, { headers, data: response.data }); } return response.data.data; }); } function withDefaults(request$1, newDefaults) { const newRequest = request$1.defaults(newDefaults); const newApi = (query, options) => { return graphql(newRequest, query, options); }; return Object.assign(newApi, { defaults: withDefaults.bind(null, newRequest), endpoint: request.request.endpoint }); } const graphql$1 = withDefaults(request.request, { headers: { "user-agent": `octokit-graphql.js/${VERSION} ${universalUserAgent.getUserAgent()}` }, method: "POST", url: "/graphql" }); function withCustomRequest(customRequest) { return withDefaults(customRequest, { method: "POST", url: "/graphql" }); } exports.graphql = graphql$1; exports.withCustomRequest = withCustomRequest; //# sourceMappingURL=index.js.map /***/ }), /***/ 8883: /***/ ((__unused_webpack_module, exports) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); const VERSION = "1.0.2"; /** * @param octokit Octokit instance * @param options Options passed to Octokit constructor */ function requestLog(octokit) { octokit.hook.wrap("request", (request, options) => { octokit.log.debug("request", options); const start = Date.now(); const requestOptions = octokit.request.endpoint.parse(options); const path = requestOptions.url.replace(options.baseUrl, ""); return request(options).then(response => { octokit.log.info(`${requestOptions.method} ${path} - ${response.status} in ${Date.now() - start}ms`); return response; }).catch(error => { octokit.log.info(`${requestOptions.method} ${path} - ${error.status} in ${Date.now() - start}ms`); throw error; }); }); } requestLog.VERSION = VERSION; exports.requestLog = requestLog; //# sourceMappingURL=index.js.map /***/ }), /***/ 3044: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); var deprecation = __nccwpck_require__(8932); var endpointsByScope = { actions: { cancelWorkflowRun: { method: "POST", params: { owner: { required: true, type: "string" }, repo: { required: true, type: "string" }, run_id: { required: true, type: "integer" } }, url: "/repos/:owner/:repo/actions/runs/:run_id/cancel" }, createOrUpdateSecretForRepo: { method: "PUT", params: { encrypted_value: { type: "string" }, key_id: { type: "string" }, name: { required: true, type: "string" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/actions/secrets/:name" }, createRegistrationToken: { method: "POST", params: { owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/actions/runners/registration-token" }, createRemoveToken: { method: "POST", params: { owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/actions/runners/remove-token" }, deleteArtifact: { method: "DELETE", params: { artifact_id: { required: true, type: "integer" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/actions/artifacts/:artifact_id" }, deleteSecretFromRepo: { method: "DELETE", params: { name: { required: true, type: "string" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/actions/secrets/:name" }, downloadArtifact: { method: "GET", params: { archive_format: { required: true, type: "string" }, artifact_id: { required: true, type: "integer" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/actions/artifacts/:artifact_id/:archive_format" }, getArtifact: { method: "GET", params: { artifact_id: { required: true, type: "integer" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/actions/artifacts/:artifact_id" }, getPublicKey: { method: "GET", params: { owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/actions/secrets/public-key" }, getSecret: { method: "GET", params: { name: { required: true, type: "string" }, owner: { required: true, type: "string" }, page: { type: "integer" }, per_page: { type: "integer" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/actions/secrets/:name" }, getSelfHostedRunner: { method: "GET", params: { owner: { required: true, type: "string" }, repo: { required: true, type: "string" }, runner_id: { required: true, type: "integer" } }, url: "/repos/:owner/:repo/actions/runners/:runner_id" }, getWorkflow: { method: "GET", params: { owner: { required: true, type: "string" }, repo: { required: true, type: "string" }, workflow_id: { required: true, type: "integer" } }, url: "/repos/:owner/:repo/actions/workflows/:workflow_id" }, getWorkflowJob: { method: "GET", params: { job_id: { required: true, type: "integer" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/actions/jobs/:job_id" }, getWorkflowRun: { method: "GET", params: { owner: { required: true, type: "string" }, repo: { required: true, type: "string" }, run_id: { required: true, type: "integer" } }, url: "/repos/:owner/:repo/actions/runs/:run_id" }, listDownloadsForSelfHostedRunnerApplication: { method: "GET", params: { owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/actions/runners/downloads" }, listJobsForWorkflowRun: { method: "GET", params: { owner: { required: true, type: "string" }, page: { type: "integer" }, per_page: { type: "integer" }, repo: { required: true, type: "string" }, run_id: { required: true, type: "integer" } }, url: "/repos/:owner/:repo/actions/runs/:run_id/jobs" }, listRepoWorkflowRuns: { method: "GET", params: { actor: { type: "string" }, branch: { type: "string" }, event: { type: "string" }, owner: { required: true, type: "string" }, page: { type: "integer" }, per_page: { type: "integer" }, repo: { required: true, type: "string" }, status: { enum: ["completed", "status", "conclusion"], type: "string" } }, url: "/repos/:owner/:repo/actions/runs" }, listRepoWorkflows: { method: "GET", params: { owner: { required: true, type: "string" }, page: { type: "integer" }, per_page: { type: "integer" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/actions/workflows" }, listSecretsForRepo: { method: "GET", params: { owner: { required: true, type: "string" }, page: { type: "integer" }, per_page: { type: "integer" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/actions/secrets" }, listSelfHostedRunnersForRepo: { method: "GET", params: { owner: { required: true, type: "string" }, page: { type: "integer" }, per_page: { type: "integer" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/actions/runners" }, listWorkflowJobLogs: { method: "GET", params: { job_id: { required: true, type: "integer" }, owner: { required: true, type: "string" }, page: { type: "integer" }, per_page: { type: "integer" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/actions/jobs/:job_id/logs" }, listWorkflowRunArtifacts: { method: "GET", params: { owner: { required: true, type: "string" }, page: { type: "integer" }, per_page: { type: "integer" }, repo: { required: true, type: "string" }, run_id: { required: true, type: "integer" } }, url: "/repos/:owner/:repo/actions/runs/:run_id/artifacts" }, listWorkflowRunLogs: { method: "GET", params: { owner: { required: true, type: "string" }, page: { type: "integer" }, per_page: { type: "integer" }, repo: { required: true, type: "string" }, run_id: { required: true, type: "integer" } }, url: "/repos/:owner/:repo/actions/runs/:run_id/logs" }, listWorkflowRuns: { method: "GET", params: { actor: { type: "string" }, branch: { type: "string" }, event: { type: "string" }, owner: { required: true, type: "string" }, page: { type: "integer" }, per_page: { type: "integer" }, repo: { required: true, type: "string" }, status: { enum: ["completed", "status", "conclusion"], type: "string" }, workflow_id: { required: true, type: "integer" } }, url: "/repos/:owner/:repo/actions/workflows/:workflow_id/runs" }, reRunWorkflow: { method: "POST", params: { owner: { required: true, type: "string" }, repo: { required: true, type: "string" }, run_id: { required: true, type: "integer" } }, url: "/repos/:owner/:repo/actions/runs/:run_id/rerun" }, removeSelfHostedRunner: { method: "DELETE", params: { owner: { required: true, type: "string" }, repo: { required: true, type: "string" }, runner_id: { required: true, type: "integer" } }, url: "/repos/:owner/:repo/actions/runners/:runner_id" } }, activity: { checkStarringRepo: { method: "GET", params: { owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/user/starred/:owner/:repo" }, deleteRepoSubscription: { method: "DELETE", params: { owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/subscription" }, deleteThreadSubscription: { method: "DELETE", params: { thread_id: { required: true, type: "integer" } }, url: "/notifications/threads/:thread_id/subscription" }, getRepoSubscription: { method: "GET", params: { owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/subscription" }, getThread: { method: "GET", params: { thread_id: { required: true, type: "integer" } }, url: "/notifications/threads/:thread_id" }, getThreadSubscription: { method: "GET", params: { thread_id: { required: true, type: "integer" } }, url: "/notifications/threads/:thread_id/subscription" }, listEventsForOrg: { method: "GET", params: { org: { required: true, type: "string" }, page: { type: "integer" }, per_page: { type: "integer" }, username: { required: true, type: "string" } }, url: "/users/:username/events/orgs/:org" }, listEventsForUser: { method: "GET", params: { page: { type: "integer" }, per_page: { type: "integer" }, username: { required: true, type: "string" } }, url: "/users/:username/events" }, listFeeds: { method: "GET", params: {}, url: "/feeds" }, listNotifications: { method: "GET", params: { all: { type: "boolean" }, before: { type: "string" }, page: { type: "integer" }, participating: { type: "boolean" }, per_page: { type: "integer" }, since: { type: "string" } }, url: "/notifications" }, listNotificationsForRepo: { method: "GET", params: { all: { type: "boolean" }, before: { type: "string" }, owner: { required: true, type: "string" }, page: { type: "integer" }, participating: { type: "boolean" }, per_page: { type: "integer" }, repo: { required: true, type: "string" }, since: { type: "string" } }, url: "/repos/:owner/:repo/notifications" }, listPublicEvents: { method: "GET", params: { page: { type: "integer" }, per_page: { type: "integer" } }, url: "/events" }, listPublicEventsForOrg: { method: "GET", params: { org: { required: true, type: "string" }, page: { type: "integer" }, per_page: { type: "integer" } }, url: "/orgs/:org/events" }, listPublicEventsForRepoNetwork: { method: "GET", params: { owner: { required: true, type: "string" }, page: { type: "integer" }, per_page: { type: "integer" }, repo: { required: true, type: "string" } }, url: "/networks/:owner/:repo/events" }, listPublicEventsForUser: { method: "GET", params: { page: { type: "integer" }, per_page: { type: "integer" }, username: { required: true, type: "string" } }, url: "/users/:username/events/public" }, listReceivedEventsForUser: { method: "GET", params: { page: { type: "integer" }, per_page: { type: "integer" }, username: { required: true, type: "string" } }, url: "/users/:username/received_events" }, listReceivedPublicEventsForUser: { method: "GET", params: { page: { type: "integer" }, per_page: { type: "integer" }, username: { required: true, type: "string" } }, url: "/users/:username/received_events/public" }, listRepoEvents: { method: "GET", params: { owner: { required: true, type: "string" }, page: { type: "integer" }, per_page: { type: "integer" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/events" }, listReposStarredByAuthenticatedUser: { method: "GET", params: { direction: { enum: ["asc", "desc"], type: "string" }, page: { type: "integer" }, per_page: { type: "integer" }, sort: { enum: ["created", "updated"], type: "string" } }, url: "/user/starred" }, listReposStarredByUser: { method: "GET", params: { direction: { enum: ["asc", "desc"], type: "string" }, page: { type: "integer" }, per_page: { type: "integer" }, sort: { enum: ["created", "updated"], type: "string" }, username: { required: true, type: "string" } }, url: "/users/:username/starred" }, listReposWatchedByUser: { method: "GET", params: { page: { type: "integer" }, per_page: { type: "integer" }, username: { required: true, type: "string" } }, url: "/users/:username/subscriptions" }, listStargazersForRepo: { method: "GET", params: { owner: { required: true, type: "string" }, page: { type: "integer" }, per_page: { type: "integer" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/stargazers" }, listWatchedReposForAuthenticatedUser: { method: "GET", params: { page: { type: "integer" }, per_page: { type: "integer" } }, url: "/user/subscriptions" }, listWatchersForRepo: { method: "GET", params: { owner: { required: true, type: "string" }, page: { type: "integer" }, per_page: { type: "integer" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/subscribers" }, markAsRead: { method: "PUT", params: { last_read_at: { type: "string" } }, url: "/notifications" }, markNotificationsAsReadForRepo: { method: "PUT", params: { last_read_at: { type: "string" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/notifications" }, markThreadAsRead: { method: "PATCH", params: { thread_id: { required: true, type: "integer" } }, url: "/notifications/threads/:thread_id" }, setRepoSubscription: { method: "PUT", params: { ignored: { type: "boolean" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" }, subscribed: { type: "boolean" } }, url: "/repos/:owner/:repo/subscription" }, setThreadSubscription: { method: "PUT", params: { ignored: { type: "boolean" }, thread_id: { required: true, type: "integer" } }, url: "/notifications/threads/:thread_id/subscription" }, starRepo: { method: "PUT", params: { owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/user/starred/:owner/:repo" }, unstarRepo: { method: "DELETE", params: { owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/user/starred/:owner/:repo" } }, apps: { addRepoToInstallation: { headers: { accept: "application/vnd.github.machine-man-preview+json" }, method: "PUT", params: { installation_id: { required: true, type: "integer" }, repository_id: { required: true, type: "integer" } }, url: "/user/installations/:installation_id/repositories/:repository_id" }, checkAccountIsAssociatedWithAny: { method: "GET", params: { account_id: { required: true, type: "integer" } }, url: "/marketplace_listing/accounts/:account_id" }, checkAccountIsAssociatedWithAnyStubbed: { method: "GET", params: { account_id: { required: true, type: "integer" } }, url: "/marketplace_listing/stubbed/accounts/:account_id" }, checkAuthorization: { deprecated: "octokit.apps.checkAuthorization() is deprecated, see https://developer.github.com/v3/apps/oauth_applications/#check-an-authorization", method: "GET", params: { access_token: { required: true, type: "string" }, client_id: { required: true, type: "string" } }, url: "/applications/:client_id/tokens/:access_token" }, checkToken: { headers: { accept: "application/vnd.github.doctor-strange-preview+json" }, method: "POST", params: { access_token: { type: "string" }, client_id: { required: true, type: "string" } }, url: "/applications/:client_id/token" }, createContentAttachment: { headers: { accept: "application/vnd.github.corsair-preview+json" }, method: "POST", params: { body: { required: true, type: "string" }, content_reference_id: { required: true, type: "integer" }, title: { required: true, type: "string" } }, url: "/content_references/:content_reference_id/attachments" }, createFromManifest: { headers: { accept: "application/vnd.github.fury-preview+json" }, method: "POST", params: { code: { required: true, type: "string" } }, url: "/app-manifests/:code/conversions" }, createInstallationToken: { headers: { accept: "application/vnd.github.machine-man-preview+json" }, method: "POST", params: { installation_id: { required: true, type: "integer" }, permissions: { type: "object" }, repository_ids: { type: "integer[]" } }, url: "/app/installations/:installation_id/access_tokens" }, deleteAuthorization: { headers: { accept: "application/vnd.github.doctor-strange-preview+json" }, method: "DELETE", params: { access_token: { type: "string" }, client_id: { required: true, type: "string" } }, url: "/applications/:client_id/grant" }, deleteInstallation: { headers: { accept: "application/vnd.github.gambit-preview+json,application/vnd.github.machine-man-preview+json" }, method: "DELETE", params: { installation_id: { required: true, type: "integer" } }, url: "/app/installations/:installation_id" }, deleteToken: { headers: { accept: "application/vnd.github.doctor-strange-preview+json" }, method: "DELETE", params: { access_token: { type: "string" }, client_id: { required: true, type: "string" } }, url: "/applications/:client_id/token" }, findOrgInstallation: { deprecated: "octokit.apps.findOrgInstallation() has been renamed to octokit.apps.getOrgInstallation() (2019-04-10)", headers: { accept: "application/vnd.github.machine-man-preview+json" }, method: "GET", params: { org: { required: true, type: "string" } }, url: "/orgs/:org/installation" }, findRepoInstallation: { deprecated: "octokit.apps.findRepoInstallation() has been renamed to octokit.apps.getRepoInstallation() (2019-04-10)", headers: { accept: "application/vnd.github.machine-man-preview+json" }, method: "GET", params: { owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/installation" }, findUserInstallation: { deprecated: "octokit.apps.findUserInstallation() has been renamed to octokit.apps.getUserInstallation() (2019-04-10)", headers: { accept: "application/vnd.github.machine-man-preview+json" }, method: "GET", params: { username: { required: true, type: "string" } }, url: "/users/:username/installation" }, getAuthenticated: { headers: { accept: "application/vnd.github.machine-man-preview+json" }, method: "GET", params: {}, url: "/app" }, getBySlug: { headers: { accept: "application/vnd.github.machine-man-preview+json" }, method: "GET", params: { app_slug: { required: true, type: "string" } }, url: "/apps/:app_slug" }, getInstallation: { headers: { accept: "application/vnd.github.machine-man-preview+json" }, method: "GET", params: { installation_id: { required: true, type: "integer" } }, url: "/app/installations/:installation_id" }, getOrgInstallation: { headers: { accept: "application/vnd.github.machine-man-preview+json" }, method: "GET", params: { org: { required: true, type: "string" } }, url: "/orgs/:org/installation" }, getRepoInstallation: { headers: { accept: "application/vnd.github.machine-man-preview+json" }, method: "GET", params: { owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/installation" }, getUserInstallation: { headers: { accept: "application/vnd.github.machine-man-preview+json" }, method: "GET", params: { username: { required: true, type: "string" } }, url: "/users/:username/installation" }, listAccountsUserOrOrgOnPlan: { method: "GET", params: { direction: { enum: ["asc", "desc"], type: "string" }, page: { type: "integer" }, per_page: { type: "integer" }, plan_id: { required: true, type: "integer" }, sort: { enum: ["created", "updated"], type: "string" } }, url: "/marketplace_listing/plans/:plan_id/accounts" }, listAccountsUserOrOrgOnPlanStubbed: { method: "GET", params: { direction: { enum: ["asc", "desc"], type: "string" }, page: { type: "integer" }, per_page: { type: "integer" }, plan_id: { required: true, type: "integer" }, sort: { enum: ["created", "updated"], type: "string" } }, url: "/marketplace_listing/stubbed/plans/:plan_id/accounts" }, listInstallationReposForAuthenticatedUser: { headers: { accept: "application/vnd.github.machine-man-preview+json" }, method: "GET", params: { installation_id: { required: true, type: "integer" }, page: { type: "integer" }, per_page: { type: "integer" } }, url: "/user/installations/:installation_id/repositories" }, listInstallations: { headers: { accept: "application/vnd.github.machine-man-preview+json" }, method: "GET", params: { page: { type: "integer" }, per_page: { type: "integer" } }, url: "/app/installations" }, listInstallationsForAuthenticatedUser: { headers: { accept: "application/vnd.github.machine-man-preview+json" }, method: "GET", params: { page: { type: "integer" }, per_page: { type: "integer" } }, url: "/user/installations" }, listMarketplacePurchasesForAuthenticatedUser: { method: "GET", params: { page: { type: "integer" }, per_page: { type: "integer" } }, url: "/user/marketplace_purchases" }, listMarketplacePurchasesForAuthenticatedUserStubbed: { method: "GET", params: { page: { type: "integer" }, per_page: { type: "integer" } }, url: "/user/marketplace_purchases/stubbed" }, listPlans: { method: "GET", params: { page: { type: "integer" }, per_page: { type: "integer" } }, url: "/marketplace_listing/plans" }, listPlansStubbed: { method: "GET", params: { page: { type: "integer" }, per_page: { type: "integer" } }, url: "/marketplace_listing/stubbed/plans" }, listRepos: { headers: { accept: "application/vnd.github.machine-man-preview+json" }, method: "GET", params: { page: { type: "integer" }, per_page: { type: "integer" } }, url: "/installation/repositories" }, removeRepoFromInstallation: { headers: { accept: "application/vnd.github.machine-man-preview+json" }, method: "DELETE", params: { installation_id: { required: true, type: "integer" }, repository_id: { required: true, type: "integer" } }, url: "/user/installations/:installation_id/repositories/:repository_id" }, resetAuthorization: { deprecated: "octokit.apps.resetAuthorization() is deprecated, see https://developer.github.com/v3/apps/oauth_applications/#reset-an-authorization", method: "POST", params: { access_token: { required: true, type: "string" }, client_id: { required: true, type: "string" } }, url: "/applications/:client_id/tokens/:access_token" }, resetToken: { headers: { accept: "application/vnd.github.doctor-strange-preview+json" }, method: "PATCH", params: { access_token: { type: "string" }, client_id: { required: true, type: "string" } }, url: "/applications/:client_id/token" }, revokeAuthorizationForApplication: { deprecated: "octokit.apps.revokeAuthorizationForApplication() is deprecated, see https://developer.github.com/v3/apps/oauth_applications/#revoke-an-authorization-for-an-application", method: "DELETE", params: { access_token: { required: true, type: "string" }, client_id: { required: true, type: "string" } }, url: "/applications/:client_id/tokens/:access_token" }, revokeGrantForApplication: { deprecated: "octokit.apps.revokeGrantForApplication() is deprecated, see https://developer.github.com/v3/apps/oauth_applications/#revoke-a-grant-for-an-application", method: "DELETE", params: { access_token: { required: true, type: "string" }, client_id: { required: true, type: "string" } }, url: "/applications/:client_id/grants/:access_token" }, revokeInstallationToken: { headers: { accept: "application/vnd.github.gambit-preview+json" }, method: "DELETE", params: {}, url: "/installation/token" } }, checks: { create: { headers: { accept: "application/vnd.github.antiope-preview+json" }, method: "POST", params: { actions: { type: "object[]" }, "actions[].description": { required: true, type: "string" }, "actions[].identifier": { required: true, type: "string" }, "actions[].label": { required: true, type: "string" }, completed_at: { type: "string" }, conclusion: { enum: ["success", "failure", "neutral", "cancelled", "timed_out", "action_required"], type: "string" }, details_url: { type: "string" }, external_id: { type: "string" }, head_sha: { required: true, type: "string" }, name: { required: true, type: "string" }, output: { type: "object" }, "output.annotations": { type: "object[]" }, "output.annotations[].annotation_level": { enum: ["notice", "warning", "failure"], required: true, type: "string" }, "output.annotations[].end_column": { type: "integer" }, "output.annotations[].end_line": { required: true, type: "integer" }, "output.annotations[].message": { required: true, type: "string" }, "output.annotations[].path": { required: true, type: "string" }, "output.annotations[].raw_details": { type: "string" }, "output.annotations[].start_column": { type: "integer" }, "output.annotations[].start_line": { required: true, type: "integer" }, "output.annotations[].title": { type: "string" }, "output.images": { type: "object[]" }, "output.images[].alt": { required: true, type: "string" }, "output.images[].caption": { type: "string" }, "output.images[].image_url": { required: true, type: "string" }, "output.summary": { required: true, type: "string" }, "output.text": { type: "string" }, "output.title": { required: true, type: "string" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" }, started_at: { type: "string" }, status: { enum: ["queued", "in_progress", "completed"], type: "string" } }, url: "/repos/:owner/:repo/check-runs" }, createSuite: { headers: { accept: "application/vnd.github.antiope-preview+json" }, method: "POST", params: { head_sha: { required: true, type: "string" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/check-suites" }, get: { headers: { accept: "application/vnd.github.antiope-preview+json" }, method: "GET", params: { check_run_id: { required: true, type: "integer" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/check-runs/:check_run_id" }, getSuite: { headers: { accept: "application/vnd.github.antiope-preview+json" }, method: "GET", params: { check_suite_id: { required: true, type: "integer" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/check-suites/:check_suite_id" }, listAnnotations: { headers: { accept: "application/vnd.github.antiope-preview+json" }, method: "GET", params: { check_run_id: { required: true, type: "integer" }, owner: { required: true, type: "string" }, page: { type: "integer" }, per_page: { type: "integer" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/check-runs/:check_run_id/annotations" }, listForRef: { headers: { accept: "application/vnd.github.antiope-preview+json" }, method: "GET", params: { check_name: { type: "string" }, filter: { enum: ["latest", "all"], type: "string" }, owner: { required: true, type: "string" }, page: { type: "integer" }, per_page: { type: "integer" }, ref: { required: true, type: "string" }, repo: { required: true, type: "string" }, status: { enum: ["queued", "in_progress", "completed"], type: "string" } }, url: "/repos/:owner/:repo/commits/:ref/check-runs" }, listForSuite: { headers: { accept: "application/vnd.github.antiope-preview+json" }, method: "GET", params: { check_name: { type: "string" }, check_suite_id: { required: true, type: "integer" }, filter: { enum: ["latest", "all"], type: "string" }, owner: { required: true, type: "string" }, page: { type: "integer" }, per_page: { type: "integer" }, repo: { required: true, type: "string" }, status: { enum: ["queued", "in_progress", "completed"], type: "string" } }, url: "/repos/:owner/:repo/check-suites/:check_suite_id/check-runs" }, listSuitesForRef: { headers: { accept: "application/vnd.github.antiope-preview+json" }, method: "GET", params: { app_id: { type: "integer" }, check_name: { type: "string" }, owner: { required: true, type: "string" }, page: { type: "integer" }, per_page: { type: "integer" }, ref: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/commits/:ref/check-suites" }, rerequestSuite: { headers: { accept: "application/vnd.github.antiope-preview+json" }, method: "POST", params: { check_suite_id: { required: true, type: "integer" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/check-suites/:check_suite_id/rerequest" }, setSuitesPreferences: { headers: { accept: "application/vnd.github.antiope-preview+json" }, method: "PATCH", params: { auto_trigger_checks: { type: "object[]" }, "auto_trigger_checks[].app_id": { required: true, type: "integer" }, "auto_trigger_checks[].setting": { required: true, type: "boolean" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/check-suites/preferences" }, update: { headers: { accept: "application/vnd.github.antiope-preview+json" }, method: "PATCH", params: { actions: { type: "object[]" }, "actions[].description": { required: true, type: "string" }, "actions[].identifier": { required: true, type: "string" }, "actions[].label": { required: true, type: "string" }, check_run_id: { required: true, type: "integer" }, completed_at: { type: "string" }, conclusion: { enum: ["success", "failure", "neutral", "cancelled", "timed_out", "action_required"], type: "string" }, details_url: { type: "string" }, external_id: { type: "string" }, name: { type: "string" }, output: { type: "object" }, "output.annotations": { type: "object[]" }, "output.annotations[].annotation_level": { enum: ["notice", "warning", "failure"], required: true, type: "string" }, "output.annotations[].end_column": { type: "integer" }, "output.annotations[].end_line": { required: true, type: "integer" }, "output.annotations[].message": { required: true, type: "string" }, "output.annotations[].path": { required: true, type: "string" }, "output.annotations[].raw_details": { type: "string" }, "output.annotations[].start_column": { type: "integer" }, "output.annotations[].start_line": { required: true, type: "integer" }, "output.annotations[].title": { type: "string" }, "output.images": { type: "object[]" }, "output.images[].alt": { required: true, type: "string" }, "output.images[].caption": { type: "string" }, "output.images[].image_url": { required: true, type: "string" }, "output.summary": { required: true, type: "string" }, "output.text": { type: "string" }, "output.title": { type: "string" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" }, started_at: { type: "string" }, status: { enum: ["queued", "in_progress", "completed"], type: "string" } }, url: "/repos/:owner/:repo/check-runs/:check_run_id" } }, codesOfConduct: { getConductCode: { headers: { accept: "application/vnd.github.scarlet-witch-preview+json" }, method: "GET", params: { key: { required: true, type: "string" } }, url: "/codes_of_conduct/:key" }, getForRepo: { headers: { accept: "application/vnd.github.scarlet-witch-preview+json" }, method: "GET", params: { owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/community/code_of_conduct" }, listConductCodes: { headers: { accept: "application/vnd.github.scarlet-witch-preview+json" }, method: "GET", params: {}, url: "/codes_of_conduct" } }, emojis: { get: { method: "GET", params: {}, url: "/emojis" } }, gists: { checkIsStarred: { method: "GET", params: { gist_id: { required: true, type: "string" } }, url: "/gists/:gist_id/star" }, create: { method: "POST", params: { description: { type: "string" }, files: { required: true, type: "object" }, "files.content": { type: "string" }, public: { type: "boolean" } }, url: "/gists" }, createComment: { method: "POST", params: { body: { required: true, type: "string" }, gist_id: { required: true, type: "string" } }, url: "/gists/:gist_id/comments" }, delete: { method: "DELETE", params: { gist_id: { required: true, type: "string" } }, url: "/gists/:gist_id" }, deleteComment: { method: "DELETE", params: { comment_id: { required: true, type: "integer" }, gist_id: { required: true, type: "string" } }, url: "/gists/:gist_id/comments/:comment_id" }, fork: { method: "POST", params: { gist_id: { required: true, type: "string" } }, url: "/gists/:gist_id/forks" }, get: { method: "GET", params: { gist_id: { required: true, type: "string" } }, url: "/gists/:gist_id" }, getComment: { method: "GET", params: { comment_id: { required: true, type: "integer" }, gist_id: { required: true, type: "string" } }, url: "/gists/:gist_id/comments/:comment_id" }, getRevision: { method: "GET", params: { gist_id: { required: true, type: "string" }, sha: { required: true, type: "string" } }, url: "/gists/:gist_id/:sha" }, list: { method: "GET", params: { page: { type: "integer" }, per_page: { type: "integer" }, since: { type: "string" } }, url: "/gists" }, listComments: { method: "GET", params: { gist_id: { required: true, type: "string" }, page: { type: "integer" }, per_page: { type: "integer" } }, url: "/gists/:gist_id/comments" }, listCommits: { method: "GET", params: { gist_id: { required: true, type: "string" }, page: { type: "integer" }, per_page: { type: "integer" } }, url: "/gists/:gist_id/commits" }, listForks: { method: "GET", params: { gist_id: { required: true, type: "string" }, page: { type: "integer" }, per_page: { type: "integer" } }, url: "/gists/:gist_id/forks" }, listPublic: { method: "GET", params: { page: { type: "integer" }, per_page: { type: "integer" }, since: { type: "string" } }, url: "/gists/public" }, listPublicForUser: { method: "GET", params: { page: { type: "integer" }, per_page: { type: "integer" }, since: { type: "string" }, username: { required: true, type: "string" } }, url: "/users/:username/gists" }, listStarred: { method: "GET", params: { page: { type: "integer" }, per_page: { type: "integer" }, since: { type: "string" } }, url: "/gists/starred" }, star: { method: "PUT", params: { gist_id: { required: true, type: "string" } }, url: "/gists/:gist_id/star" }, unstar: { method: "DELETE", params: { gist_id: { required: true, type: "string" } }, url: "/gists/:gist_id/star" }, update: { method: "PATCH", params: { description: { type: "string" }, files: { type: "object" }, "files.content": { type: "string" }, "files.filename": { type: "string" }, gist_id: { required: true, type: "string" } }, url: "/gists/:gist_id" }, updateComment: { method: "PATCH", params: { body: { required: true, type: "string" }, comment_id: { required: true, type: "integer" }, gist_id: { required: true, type: "string" } }, url: "/gists/:gist_id/comments/:comment_id" } }, git: { createBlob: { method: "POST", params: { content: { required: true, type: "string" }, encoding: { type: "string" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/git/blobs" }, createCommit: { method: "POST", params: { author: { type: "object" }, "author.date": { type: "string" }, "author.email": { type: "string" }, "author.name": { type: "string" }, committer: { type: "object" }, "committer.date": { type: "string" }, "committer.email": { type: "string" }, "committer.name": { type: "string" }, message: { required: true, type: "string" }, owner: { required: true, type: "string" }, parents: { required: true, type: "string[]" }, repo: { required: true, type: "string" }, signature: { type: "string" }, tree: { required: true, type: "string" } }, url: "/repos/:owner/:repo/git/commits" }, createRef: { method: "POST", params: { owner: { required: true, type: "string" }, ref: { required: true, type: "string" }, repo: { required: true, type: "string" }, sha: { required: true, type: "string" } }, url: "/repos/:owner/:repo/git/refs" }, createTag: { method: "POST", params: { message: { required: true, type: "string" }, object: { required: true, type: "string" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" }, tag: { required: true, type: "string" }, tagger: { type: "object" }, "tagger.date": { type: "string" }, "tagger.email": { type: "string" }, "tagger.name": { type: "string" }, type: { enum: ["commit", "tree", "blob"], required: true, type: "string" } }, url: "/repos/:owner/:repo/git/tags" }, createTree: { method: "POST", params: { base_tree: { type: "string" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" }, tree: { required: true, type: "object[]" }, "tree[].content": { type: "string" }, "tree[].mode": { enum: ["100644", "100755", "040000", "160000", "120000"], type: "string" }, "tree[].path": { type: "string" }, "tree[].sha": { allowNull: true, type: "string" }, "tree[].type": { enum: ["blob", "tree", "commit"], type: "string" } }, url: "/repos/:owner/:repo/git/trees" }, deleteRef: { method: "DELETE", params: { owner: { required: true, type: "string" }, ref: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/git/refs/:ref" }, getBlob: { method: "GET", params: { file_sha: { required: true, type: "string" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/git/blobs/:file_sha" }, getCommit: { method: "GET", params: { commit_sha: { required: true, type: "string" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/git/commits/:commit_sha" }, getRef: { method: "GET", params: { owner: { required: true, type: "string" }, ref: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/git/ref/:ref" }, getTag: { method: "GET", params: { owner: { required: true, type: "string" }, repo: { required: true, type: "string" }, tag_sha: { required: true, type: "string" } }, url: "/repos/:owner/:repo/git/tags/:tag_sha" }, getTree: { method: "GET", params: { owner: { required: true, type: "string" }, recursive: { enum: ["1"], type: "integer" }, repo: { required: true, type: "string" }, tree_sha: { required: true, type: "string" } }, url: "/repos/:owner/:repo/git/trees/:tree_sha" }, listMatchingRefs: { method: "GET", params: { owner: { required: true, type: "string" }, page: { type: "integer" }, per_page: { type: "integer" }, ref: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/git/matching-refs/:ref" }, listRefs: { method: "GET", params: { namespace: { type: "string" }, owner: { required: true, type: "string" }, page: { type: "integer" }, per_page: { type: "integer" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/git/refs/:namespace" }, updateRef: { method: "PATCH", params: { force: { type: "boolean" }, owner: { required: true, type: "string" }, ref: { required: true, type: "string" }, repo: { required: true, type: "string" }, sha: { required: true, type: "string" } }, url: "/repos/:owner/:repo/git/refs/:ref" } }, gitignore: { getTemplate: { method: "GET", params: { name: { required: true, type: "string" } }, url: "/gitignore/templates/:name" }, listTemplates: { method: "GET", params: {}, url: "/gitignore/templates" } }, interactions: { addOrUpdateRestrictionsForOrg: { headers: { accept: "application/vnd.github.sombra-preview+json" }, method: "PUT", params: { limit: { enum: ["existing_users", "contributors_only", "collaborators_only"], required: true, type: "string" }, org: { required: true, type: "string" } }, url: "/orgs/:org/interaction-limits" }, addOrUpdateRestrictionsForRepo: { headers: { accept: "application/vnd.github.sombra-preview+json" }, method: "PUT", params: { limit: { enum: ["existing_users", "contributors_only", "collaborators_only"], required: true, type: "string" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/interaction-limits" }, getRestrictionsForOrg: { headers: { accept: "application/vnd.github.sombra-preview+json" }, method: "GET", params: { org: { required: true, type: "string" } }, url: "/orgs/:org/interaction-limits" }, getRestrictionsForRepo: { headers: { accept: "application/vnd.github.sombra-preview+json" }, method: "GET", params: { owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/interaction-limits" }, removeRestrictionsForOrg: { headers: { accept: "application/vnd.github.sombra-preview+json" }, method: "DELETE", params: { org: { required: true, type: "string" } }, url: "/orgs/:org/interaction-limits" }, removeRestrictionsForRepo: { headers: { accept: "application/vnd.github.sombra-preview+json" }, method: "DELETE", params: { owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/interaction-limits" } }, issues: { addAssignees: { method: "POST", params: { assignees: { type: "string[]" }, issue_number: { required: true, type: "integer" }, number: { alias: "issue_number", deprecated: true, type: "integer" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/issues/:issue_number/assignees" }, addLabels: { method: "POST", params: { issue_number: { required: true, type: "integer" }, labels: { required: true, type: "string[]" }, number: { alias: "issue_number", deprecated: true, type: "integer" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/issues/:issue_number/labels" }, checkAssignee: { method: "GET", params: { assignee: { required: true, type: "string" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/assignees/:assignee" }, create: { method: "POST", params: { assignee: { type: "string" }, assignees: { type: "string[]" }, body: { type: "string" }, labels: { type: "string[]" }, milestone: { type: "integer" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" }, title: { required: true, type: "string" } }, url: "/repos/:owner/:repo/issues" }, createComment: { method: "POST", params: { body: { required: true, type: "string" }, issue_number: { required: true, type: "integer" }, number: { alias: "issue_number", deprecated: true, type: "integer" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/issues/:issue_number/comments" }, createLabel: { method: "POST", params: { color: { required: true, type: "string" }, description: { type: "string" }, name: { required: true, type: "string" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/labels" }, createMilestone: { method: "POST", params: { description: { type: "string" }, due_on: { type: "string" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" }, state: { enum: ["open", "closed"], type: "string" }, title: { required: true, type: "string" } }, url: "/repos/:owner/:repo/milestones" }, deleteComment: { method: "DELETE", params: { comment_id: { required: true, type: "integer" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/issues/comments/:comment_id" }, deleteLabel: { method: "DELETE", params: { name: { required: true, type: "string" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/labels/:name" }, deleteMilestone: { method: "DELETE", params: { milestone_number: { required: true, type: "integer" }, number: { alias: "milestone_number", deprecated: true, type: "integer" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/milestones/:milestone_number" }, get: { method: "GET", params: { issue_number: { required: true, type: "integer" }, number: { alias: "issue_number", deprecated: true, type: "integer" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/issues/:issue_number" }, getComment: { method: "GET", params: { comment_id: { required: true, type: "integer" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/issues/comments/:comment_id" }, getEvent: { method: "GET", params: { event_id: { required: true, type: "integer" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/issues/events/:event_id" }, getLabel: { method: "GET", params: { name: { required: true, type: "string" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/labels/:name" }, getMilestone: { method: "GET", params: { milestone_number: { required: true, type: "integer" }, number: { alias: "milestone_number", deprecated: true, type: "integer" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/milestones/:milestone_number" }, list: { method: "GET", params: { direction: { enum: ["asc", "desc"], type: "string" }, filter: { enum: ["assigned", "created", "mentioned", "subscribed", "all"], type: "string" }, labels: { type: "string" }, page: { type: "integer" }, per_page: { type: "integer" }, since: { type: "string" }, sort: { enum: ["created", "updated", "comments"], type: "string" }, state: { enum: ["open", "closed", "all"], type: "string" } }, url: "/issues" }, listAssignees: { method: "GET", params: { owner: { required: true, type: "string" }, page: { type: "integer" }, per_page: { type: "integer" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/assignees" }, listComments: { method: "GET", params: { issue_number: { required: true, type: "integer" }, number: { alias: "issue_number", deprecated: true, type: "integer" }, owner: { required: true, type: "string" }, page: { type: "integer" }, per_page: { type: "integer" }, repo: { required: true, type: "string" }, since: { type: "string" } }, url: "/repos/:owner/:repo/issues/:issue_number/comments" }, listCommentsForRepo: { method: "GET", params: { direction: { enum: ["asc", "desc"], type: "string" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" }, since: { type: "string" }, sort: { enum: ["created", "updated"], type: "string" } }, url: "/repos/:owner/:repo/issues/comments" }, listEvents: { method: "GET", params: { issue_number: { required: true, type: "integer" }, number: { alias: "issue_number", deprecated: true, type: "integer" }, owner: { required: true, type: "string" }, page: { type: "integer" }, per_page: { type: "integer" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/issues/:issue_number/events" }, listEventsForRepo: { method: "GET", params: { owner: { required: true, type: "string" }, page: { type: "integer" }, per_page: { type: "integer" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/issues/events" }, listEventsForTimeline: { headers: { accept: "application/vnd.github.mockingbird-preview+json" }, method: "GET", params: { issue_number: { required: true, type: "integer" }, number: { alias: "issue_number", deprecated: true, type: "integer" }, owner: { required: true, type: "string" }, page: { type: "integer" }, per_page: { type: "integer" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/issues/:issue_number/timeline" }, listForAuthenticatedUser: { method: "GET", params: { direction: { enum: ["asc", "desc"], type: "string" }, filter: { enum: ["assigned", "created", "mentioned", "subscribed", "all"], type: "string" }, labels: { type: "string" }, page: { type: "integer" }, per_page: { type: "integer" }, since: { type: "string" }, sort: { enum: ["created", "updated", "comments"], type: "string" }, state: { enum: ["open", "closed", "all"], type: "string" } }, url: "/user/issues" }, listForOrg: { method: "GET", params: { direction: { enum: ["asc", "desc"], type: "string" }, filter: { enum: ["assigned", "created", "mentioned", "subscribed", "all"], type: "string" }, labels: { type: "string" }, org: { required: true, type: "string" }, page: { type: "integer" }, per_page: { type: "integer" }, since: { type: "string" }, sort: { enum: ["created", "updated", "comments"], type: "string" }, state: { enum: ["open", "closed", "all"], type: "string" } }, url: "/orgs/:org/issues" }, listForRepo: { method: "GET", params: { assignee: { type: "string" }, creator: { type: "string" }, direction: { enum: ["asc", "desc"], type: "string" }, labels: { type: "string" }, mentioned: { type: "string" }, milestone: { type: "string" }, owner: { required: true, type: "string" }, page: { type: "integer" }, per_page: { type: "integer" }, repo: { required: true, type: "string" }, since: { type: "string" }, sort: { enum: ["created", "updated", "comments"], type: "string" }, state: { enum: ["open", "closed", "all"], type: "string" } }, url: "/repos/:owner/:repo/issues" }, listLabelsForMilestone: { method: "GET", params: { milestone_number: { required: true, type: "integer" }, number: { alias: "milestone_number", deprecated: true, type: "integer" }, owner: { required: true, type: "string" }, page: { type: "integer" }, per_page: { type: "integer" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/milestones/:milestone_number/labels" }, listLabelsForRepo: { method: "GET", params: { owner: { required: true, type: "string" }, page: { type: "integer" }, per_page: { type: "integer" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/labels" }, listLabelsOnIssue: { method: "GET", params: { issue_number: { required: true, type: "integer" }, number: { alias: "issue_number", deprecated: true, type: "integer" }, owner: { required: true, type: "string" }, page: { type: "integer" }, per_page: { type: "integer" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/issues/:issue_number/labels" }, listMilestonesForRepo: { method: "GET", params: { direction: { enum: ["asc", "desc"], type: "string" }, owner: { required: true, type: "string" }, page: { type: "integer" }, per_page: { type: "integer" }, repo: { required: true, type: "string" }, sort: { enum: ["due_on", "completeness"], type: "string" }, state: { enum: ["open", "closed", "all"], type: "string" } }, url: "/repos/:owner/:repo/milestones" }, lock: { method: "PUT", params: { issue_number: { required: true, type: "integer" }, lock_reason: { enum: ["off-topic", "too heated", "resolved", "spam"], type: "string" }, number: { alias: "issue_number", deprecated: true, type: "integer" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/issues/:issue_number/lock" }, removeAssignees: { method: "DELETE", params: { assignees: { type: "string[]" }, issue_number: { required: true, type: "integer" }, number: { alias: "issue_number", deprecated: true, type: "integer" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/issues/:issue_number/assignees" }, removeLabel: { method: "DELETE", params: { issue_number: { required: true, type: "integer" }, name: { required: true, type: "string" }, number: { alias: "issue_number", deprecated: true, type: "integer" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/issues/:issue_number/labels/:name" }, removeLabels: { method: "DELETE", params: { issue_number: { required: true, type: "integer" }, number: { alias: "issue_number", deprecated: true, type: "integer" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/issues/:issue_number/labels" }, replaceLabels: { method: "PUT", params: { issue_number: { required: true, type: "integer" }, labels: { type: "string[]" }, number: { alias: "issue_number", deprecated: true, type: "integer" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/issues/:issue_number/labels" }, unlock: { method: "DELETE", params: { issue_number: { required: true, type: "integer" }, number: { alias: "issue_number", deprecated: true, type: "integer" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/issues/:issue_number/lock" }, update: { method: "PATCH", params: { assignee: { type: "string" }, assignees: { type: "string[]" }, body: { type: "string" }, issue_number: { required: true, type: "integer" }, labels: { type: "string[]" }, milestone: { allowNull: true, type: "integer" }, number: { alias: "issue_number", deprecated: true, type: "integer" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" }, state: { enum: ["open", "closed"], type: "string" }, title: { type: "string" } }, url: "/repos/:owner/:repo/issues/:issue_number" }, updateComment: { method: "PATCH", params: { body: { required: true, type: "string" }, comment_id: { required: true, type: "integer" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/issues/comments/:comment_id" }, updateLabel: { method: "PATCH", params: { color: { type: "string" }, current_name: { required: true, type: "string" }, description: { type: "string" }, name: { type: "string" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/labels/:current_name" }, updateMilestone: { method: "PATCH", params: { description: { type: "string" }, due_on: { type: "string" }, milestone_number: { required: true, type: "integer" }, number: { alias: "milestone_number", deprecated: true, type: "integer" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" }, state: { enum: ["open", "closed"], type: "string" }, title: { type: "string" } }, url: "/repos/:owner/:repo/milestones/:milestone_number" } }, licenses: { get: { method: "GET", params: { license: { required: true, type: "string" } }, url: "/licenses/:license" }, getForRepo: { method: "GET", params: { owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/license" }, list: { deprecated: "octokit.licenses.list() has been renamed to octokit.licenses.listCommonlyUsed() (2019-03-05)", method: "GET", params: {}, url: "/licenses" }, listCommonlyUsed: { method: "GET", params: {}, url: "/licenses" } }, markdown: { render: { method: "POST", params: { context: { type: "string" }, mode: { enum: ["markdown", "gfm"], type: "string" }, text: { required: true, type: "string" } }, url: "/markdown" }, renderRaw: { headers: { "content-type": "text/plain; charset=utf-8" }, method: "POST", params: { data: { mapTo: "data", required: true, type: "string" } }, url: "/markdown/raw" } }, meta: { get: { method: "GET", params: {}, url: "/meta" } }, migrations: { cancelImport: { method: "DELETE", params: { owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/import" }, deleteArchiveForAuthenticatedUser: { headers: { accept: "application/vnd.github.wyandotte-preview+json" }, method: "DELETE", params: { migration_id: { required: true, type: "integer" } }, url: "/user/migrations/:migration_id/archive" }, deleteArchiveForOrg: { headers: { accept: "application/vnd.github.wyandotte-preview+json" }, method: "DELETE", params: { migration_id: { required: true, type: "integer" }, org: { required: true, type: "string" } }, url: "/orgs/:org/migrations/:migration_id/archive" }, downloadArchiveForOrg: { headers: { accept: "application/vnd.github.wyandotte-preview+json" }, method: "GET", params: { migration_id: { required: true, type: "integer" }, org: { required: true, type: "string" } }, url: "/orgs/:org/migrations/:migration_id/archive" }, getArchiveForAuthenticatedUser: { headers: { accept: "application/vnd.github.wyandotte-preview+json" }, method: "GET", params: { migration_id: { required: true, type: "integer" } }, url: "/user/migrations/:migration_id/archive" }, getArchiveForOrg: { deprecated: "octokit.migrations.getArchiveForOrg() has been renamed to octokit.migrations.downloadArchiveForOrg() (2020-01-27)", headers: { accept: "application/vnd.github.wyandotte-preview+json" }, method: "GET", params: { migration_id: { required: true, type: "integer" }, org: { required: true, type: "string" } }, url: "/orgs/:org/migrations/:migration_id/archive" }, getCommitAuthors: { method: "GET", params: { owner: { required: true, type: "string" }, repo: { required: true, type: "string" }, since: { type: "string" } }, url: "/repos/:owner/:repo/import/authors" }, getImportProgress: { method: "GET", params: { owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/import" }, getLargeFiles: { method: "GET", params: { owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/import/large_files" }, getStatusForAuthenticatedUser: { headers: { accept: "application/vnd.github.wyandotte-preview+json" }, method: "GET", params: { migration_id: { required: true, type: "integer" } }, url: "/user/migrations/:migration_id" }, getStatusForOrg: { headers: { accept: "application/vnd.github.wyandotte-preview+json" }, method: "GET", params: { migration_id: { required: true, type: "integer" }, org: { required: true, type: "string" } }, url: "/orgs/:org/migrations/:migration_id" }, listForAuthenticatedUser: { headers: { accept: "application/vnd.github.wyandotte-preview+json" }, method: "GET", params: { page: { type: "integer" }, per_page: { type: "integer" } }, url: "/user/migrations" }, listForOrg: { headers: { accept: "application/vnd.github.wyandotte-preview+json" }, method: "GET", params: { org: { required: true, type: "string" }, page: { type: "integer" }, per_page: { type: "integer" } }, url: "/orgs/:org/migrations" }, listReposForOrg: { headers: { accept: "application/vnd.github.wyandotte-preview+json" }, method: "GET", params: { migration_id: { required: true, type: "integer" }, org: { required: true, type: "string" }, page: { type: "integer" }, per_page: { type: "integer" } }, url: "/orgs/:org/migrations/:migration_id/repositories" }, listReposForUser: { headers: { accept: "application/vnd.github.wyandotte-preview+json" }, method: "GET", params: { migration_id: { required: true, type: "integer" }, page: { type: "integer" }, per_page: { type: "integer" } }, url: "/user/:migration_id/repositories" }, mapCommitAuthor: { method: "PATCH", params: { author_id: { required: true, type: "integer" }, email: { type: "string" }, name: { type: "string" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/import/authors/:author_id" }, setLfsPreference: { method: "PATCH", params: { owner: { required: true, type: "string" }, repo: { required: true, type: "string" }, use_lfs: { enum: ["opt_in", "opt_out"], required: true, type: "string" } }, url: "/repos/:owner/:repo/import/lfs" }, startForAuthenticatedUser: { method: "POST", params: { exclude_attachments: { type: "boolean" }, lock_repositories: { type: "boolean" }, repositories: { required: true, type: "string[]" } }, url: "/user/migrations" }, startForOrg: { method: "POST", params: { exclude_attachments: { type: "boolean" }, lock_repositories: { type: "boolean" }, org: { required: true, type: "string" }, repositories: { required: true, type: "string[]" } }, url: "/orgs/:org/migrations" }, startImport: { method: "PUT", params: { owner: { required: true, type: "string" }, repo: { required: true, type: "string" }, tfvc_project: { type: "string" }, vcs: { enum: ["subversion", "git", "mercurial", "tfvc"], type: "string" }, vcs_password: { type: "string" }, vcs_url: { required: true, type: "string" }, vcs_username: { type: "string" } }, url: "/repos/:owner/:repo/import" }, unlockRepoForAuthenticatedUser: { headers: { accept: "application/vnd.github.wyandotte-preview+json" }, method: "DELETE", params: { migration_id: { required: true, type: "integer" }, repo_name: { required: true, type: "string" } }, url: "/user/migrations/:migration_id/repos/:repo_name/lock" }, unlockRepoForOrg: { headers: { accept: "application/vnd.github.wyandotte-preview+json" }, method: "DELETE", params: { migration_id: { required: true, type: "integer" }, org: { required: true, type: "string" }, repo_name: { required: true, type: "string" } }, url: "/orgs/:org/migrations/:migration_id/repos/:repo_name/lock" }, updateImport: { method: "PATCH", params: { owner: { required: true, type: "string" }, repo: { required: true, type: "string" }, vcs_password: { type: "string" }, vcs_username: { type: "string" } }, url: "/repos/:owner/:repo/import" } }, oauthAuthorizations: { checkAuthorization: { deprecated: "octokit.oauthAuthorizations.checkAuthorization() has been renamed to octokit.apps.checkAuthorization() (2019-11-05)", method: "GET", params: { access_token: { required: true, type: "string" }, client_id: { required: true, type: "string" } }, url: "/applications/:client_id/tokens/:access_token" }, createAuthorization: { deprecated: "octokit.oauthAuthorizations.createAuthorization() is deprecated, see https://developer.github.com/v3/oauth_authorizations/#create-a-new-authorization", method: "POST", params: { client_id: { type: "string" }, client_secret: { type: "string" }, fingerprint: { type: "string" }, note: { required: true, type: "string" }, note_url: { type: "string" }, scopes: { type: "string[]" } }, url: "/authorizations" }, deleteAuthorization: { deprecated: "octokit.oauthAuthorizations.deleteAuthorization() is deprecated, see https://developer.github.com/v3/oauth_authorizations/#delete-an-authorization", method: "DELETE", params: { authorization_id: { required: true, type: "integer" } }, url: "/authorizations/:authorization_id" }, deleteGrant: { deprecated: "octokit.oauthAuthorizations.deleteGrant() is deprecated, see https://developer.github.com/v3/oauth_authorizations/#delete-a-grant", method: "DELETE", params: { grant_id: { required: true, type: "integer" } }, url: "/applications/grants/:grant_id" }, getAuthorization: { deprecated: "octokit.oauthAuthorizations.getAuthorization() is deprecated, see https://developer.github.com/v3/oauth_authorizations/#get-a-single-authorization", method: "GET", params: { authorization_id: { required: true, type: "integer" } }, url: "/authorizations/:authorization_id" }, getGrant: { deprecated: "octokit.oauthAuthorizations.getGrant() is deprecated, see https://developer.github.com/v3/oauth_authorizations/#get-a-single-grant", method: "GET", params: { grant_id: { required: true, type: "integer" } }, url: "/applications/grants/:grant_id" }, getOrCreateAuthorizationForApp: { deprecated: "octokit.oauthAuthorizations.getOrCreateAuthorizationForApp() is deprecated, see https://developer.github.com/v3/oauth_authorizations/#get-or-create-an-authorization-for-a-specific-app", method: "PUT", params: { client_id: { required: true, type: "string" }, client_secret: { required: true, type: "string" }, fingerprint: { type: "string" }, note: { type: "string" }, note_url: { type: "string" }, scopes: { type: "string[]" } }, url: "/authorizations/clients/:client_id" }, getOrCreateAuthorizationForAppAndFingerprint: { deprecated: "octokit.oauthAuthorizations.getOrCreateAuthorizationForAppAndFingerprint() is deprecated, see https://developer.github.com/v3/oauth_authorizations/#get-or-create-an-authorization-for-a-specific-app-and-fingerprint", method: "PUT", params: { client_id: { required: true, type: "string" }, client_secret: { required: true, type: "string" }, fingerprint: { required: true, type: "string" }, note: { type: "string" }, note_url: { type: "string" }, scopes: { type: "string[]" } }, url: "/authorizations/clients/:client_id/:fingerprint" }, getOrCreateAuthorizationForAppFingerprint: { deprecated: "octokit.oauthAuthorizations.getOrCreateAuthorizationForAppFingerprint() has been renamed to octokit.oauthAuthorizations.getOrCreateAuthorizationForAppAndFingerprint() (2018-12-27)", method: "PUT", params: { client_id: { required: true, type: "string" }, client_secret: { required: true, type: "string" }, fingerprint: { required: true, type: "string" }, note: { type: "string" }, note_url: { type: "string" }, scopes: { type: "string[]" } }, url: "/authorizations/clients/:client_id/:fingerprint" }, listAuthorizations: { deprecated: "octokit.oauthAuthorizations.listAuthorizations() is deprecated, see https://developer.github.com/v3/oauth_authorizations/#list-your-authorizations", method: "GET", params: { page: { type: "integer" }, per_page: { type: "integer" } }, url: "/authorizations" }, listGrants: { deprecated: "octokit.oauthAuthorizations.listGrants() is deprecated, see https://developer.github.com/v3/oauth_authorizations/#list-your-grants", method: "GET", params: { page: { type: "integer" }, per_page: { type: "integer" } }, url: "/applications/grants" }, resetAuthorization: { deprecated: "octokit.oauthAuthorizations.resetAuthorization() has been renamed to octokit.apps.resetAuthorization() (2019-11-05)", method: "POST", params: { access_token: { required: true, type: "string" }, client_id: { required: true, type: "string" } }, url: "/applications/:client_id/tokens/:access_token" }, revokeAuthorizationForApplication: { deprecated: "octokit.oauthAuthorizations.revokeAuthorizationForApplication() has been renamed to octokit.apps.revokeAuthorizationForApplication() (2019-11-05)", method: "DELETE", params: { access_token: { required: true, type: "string" }, client_id: { required: true, type: "string" } }, url: "/applications/:client_id/tokens/:access_token" }, revokeGrantForApplication: { deprecated: "octokit.oauthAuthorizations.revokeGrantForApplication() has been renamed to octokit.apps.revokeGrantForApplication() (2019-11-05)", method: "DELETE", params: { access_token: { required: true, type: "string" }, client_id: { required: true, type: "string" } }, url: "/applications/:client_id/grants/:access_token" }, updateAuthorization: { deprecated: "octokit.oauthAuthorizations.updateAuthorization() is deprecated, see https://developer.github.com/v3/oauth_authorizations/#update-an-existing-authorization", method: "PATCH", params: { add_scopes: { type: "string[]" }, authorization_id: { required: true, type: "integer" }, fingerprint: { type: "string" }, note: { type: "string" }, note_url: { type: "string" }, remove_scopes: { type: "string[]" }, scopes: { type: "string[]" } }, url: "/authorizations/:authorization_id" } }, orgs: { addOrUpdateMembership: { method: "PUT", params: { org: { required: true, type: "string" }, role: { enum: ["admin", "member"], type: "string" }, username: { required: true, type: "string" } }, url: "/orgs/:org/memberships/:username" }, blockUser: { method: "PUT", params: { org: { required: true, type: "string" }, username: { required: true, type: "string" } }, url: "/orgs/:org/blocks/:username" }, checkBlockedUser: { method: "GET", params: { org: { required: true, type: "string" }, username: { required: true, type: "string" } }, url: "/orgs/:org/blocks/:username" }, checkMembership: { method: "GET", params: { org: { required: true, type: "string" }, username: { required: true, type: "string" } }, url: "/orgs/:org/members/:username" }, checkPublicMembership: { method: "GET", params: { org: { required: true, type: "string" }, username: { required: true, type: "string" } }, url: "/orgs/:org/public_members/:username" }, concealMembership: { method: "DELETE", params: { org: { required: true, type: "string" }, username: { required: true, type: "string" } }, url: "/orgs/:org/public_members/:username" }, convertMemberToOutsideCollaborator: { method: "PUT", params: { org: { required: true, type: "string" }, username: { required: true, type: "string" } }, url: "/orgs/:org/outside_collaborators/:username" }, createHook: { method: "POST", params: { active: { type: "boolean" }, config: { required: true, type: "object" }, "config.content_type": { type: "string" }, "config.insecure_ssl": { type: "string" }, "config.secret": { type: "string" }, "config.url": { required: true, type: "string" }, events: { type: "string[]" }, name: { required: true, type: "string" }, org: { required: true, type: "string" } }, url: "/orgs/:org/hooks" }, createInvitation: { method: "POST", params: { email: { type: "string" }, invitee_id: { type: "integer" }, org: { required: true, type: "string" }, role: { enum: ["admin", "direct_member", "billing_manager"], type: "string" }, team_ids: { type: "integer[]" } }, url: "/orgs/:org/invitations" }, deleteHook: { method: "DELETE", params: { hook_id: { required: true, type: "integer" }, org: { required: true, type: "string" } }, url: "/orgs/:org/hooks/:hook_id" }, get: { method: "GET", params: { org: { required: true, type: "string" } }, url: "/orgs/:org" }, getHook: { method: "GET", params: { hook_id: { required: true, type: "integer" }, org: { required: true, type: "string" } }, url: "/orgs/:org/hooks/:hook_id" }, getMembership: { method: "GET", params: { org: { required: true, type: "string" }, username: { required: true, type: "string" } }, url: "/orgs/:org/memberships/:username" }, getMembershipForAuthenticatedUser: { method: "GET", params: { org: { required: true, type: "string" } }, url: "/user/memberships/orgs/:org" }, list: { method: "GET", params: { page: { type: "integer" }, per_page: { type: "integer" }, since: { type: "integer" } }, url: "/organizations" }, listBlockedUsers: { method: "GET", params: { org: { required: true, type: "string" } }, url: "/orgs/:org/blocks" }, listForAuthenticatedUser: { method: "GET", params: { page: { type: "integer" }, per_page: { type: "integer" } }, url: "/user/orgs" }, listForUser: { method: "GET", params: { page: { type: "integer" }, per_page: { type: "integer" }, username: { required: true, type: "string" } }, url: "/users/:username/orgs" }, listHooks: { method: "GET", params: { org: { required: true, type: "string" }, page: { type: "integer" }, per_page: { type: "integer" } }, url: "/orgs/:org/hooks" }, listInstallations: { headers: { accept: "application/vnd.github.machine-man-preview+json" }, method: "GET", params: { org: { required: true, type: "string" }, page: { type: "integer" }, per_page: { type: "integer" } }, url: "/orgs/:org/installations" }, listInvitationTeams: { method: "GET", params: { invitation_id: { required: true, type: "integer" }, org: { required: true, type: "string" }, page: { type: "integer" }, per_page: { type: "integer" } }, url: "/orgs/:org/invitations/:invitation_id/teams" }, listMembers: { method: "GET", params: { filter: { enum: ["2fa_disabled", "all"], type: "string" }, org: { required: true, type: "string" }, page: { type: "integer" }, per_page: { type: "integer" }, role: { enum: ["all", "admin", "member"], type: "string" } }, url: "/orgs/:org/members" }, listMemberships: { method: "GET", params: { page: { type: "integer" }, per_page: { type: "integer" }, state: { enum: ["active", "pending"], type: "string" } }, url: "/user/memberships/orgs" }, listOutsideCollaborators: { method: "GET", params: { filter: { enum: ["2fa_disabled", "all"], type: "string" }, org: { required: true, type: "string" }, page: { type: "integer" }, per_page: { type: "integer" } }, url: "/orgs/:org/outside_collaborators" }, listPendingInvitations: { method: "GET", params: { org: { required: true, type: "string" }, page: { type: "integer" }, per_page: { type: "integer" } }, url: "/orgs/:org/invitations" }, listPublicMembers: { method: "GET", params: { org: { required: true, type: "string" }, page: { type: "integer" }, per_page: { type: "integer" } }, url: "/orgs/:org/public_members" }, pingHook: { method: "POST", params: { hook_id: { required: true, type: "integer" }, org: { required: true, type: "string" } }, url: "/orgs/:org/hooks/:hook_id/pings" }, publicizeMembership: { method: "PUT", params: { org: { required: true, type: "string" }, username: { required: true, type: "string" } }, url: "/orgs/:org/public_members/:username" }, removeMember: { method: "DELETE", params: { org: { required: true, type: "string" }, username: { required: true, type: "string" } }, url: "/orgs/:org/members/:username" }, removeMembership: { method: "DELETE", params: { org: { required: true, type: "string" }, username: { required: true, type: "string" } }, url: "/orgs/:org/memberships/:username" }, removeOutsideCollaborator: { method: "DELETE", params: { org: { required: true, type: "string" }, username: { required: true, type: "string" } }, url: "/orgs/:org/outside_collaborators/:username" }, unblockUser: { method: "DELETE", params: { org: { required: true, type: "string" }, username: { required: true, type: "string" } }, url: "/orgs/:org/blocks/:username" }, update: { method: "PATCH", params: { billing_email: { type: "string" }, company: { type: "string" }, default_repository_permission: { enum: ["read", "write", "admin", "none"], type: "string" }, description: { type: "string" }, email: { type: "string" }, has_organization_projects: { type: "boolean" }, has_repository_projects: { type: "boolean" }, location: { type: "string" }, members_allowed_repository_creation_type: { enum: ["all", "private", "none"], type: "string" }, members_can_create_internal_repositories: { type: "boolean" }, members_can_create_private_repositories: { type: "boolean" }, members_can_create_public_repositories: { type: "boolean" }, members_can_create_repositories: { type: "boolean" }, name: { type: "string" }, org: { required: true, type: "string" } }, url: "/orgs/:org" }, updateHook: { method: "PATCH", params: { active: { type: "boolean" }, config: { type: "object" }, "config.content_type": { type: "string" }, "config.insecure_ssl": { type: "string" }, "config.secret": { type: "string" }, "config.url": { required: true, type: "string" }, events: { type: "string[]" }, hook_id: { required: true, type: "integer" }, org: { required: true, type: "string" } }, url: "/orgs/:org/hooks/:hook_id" }, updateMembership: { method: "PATCH", params: { org: { required: true, type: "string" }, state: { enum: ["active"], required: true, type: "string" } }, url: "/user/memberships/orgs/:org" } }, projects: { addCollaborator: { headers: { accept: "application/vnd.github.inertia-preview+json" }, method: "PUT", params: { permission: { enum: ["read", "write", "admin"], type: "string" }, project_id: { required: true, type: "integer" }, username: { required: true, type: "string" } }, url: "/projects/:project_id/collaborators/:username" }, createCard: { headers: { accept: "application/vnd.github.inertia-preview+json" }, method: "POST", params: { column_id: { required: true, type: "integer" }, content_id: { type: "integer" }, content_type: { type: "string" }, note: { type: "string" } }, url: "/projects/columns/:column_id/cards" }, createColumn: { headers: { accept: "application/vnd.github.inertia-preview+json" }, method: "POST", params: { name: { required: true, type: "string" }, project_id: { required: true, type: "integer" } }, url: "/projects/:project_id/columns" }, createForAuthenticatedUser: { headers: { accept: "application/vnd.github.inertia-preview+json" }, method: "POST", params: { body: { type: "string" }, name: { required: true, type: "string" } }, url: "/user/projects" }, createForOrg: { headers: { accept: "application/vnd.github.inertia-preview+json" }, method: "POST", params: { body: { type: "string" }, name: { required: true, type: "string" }, org: { required: true, type: "string" } }, url: "/orgs/:org/projects" }, createForRepo: { headers: { accept: "application/vnd.github.inertia-preview+json" }, method: "POST", params: { body: { type: "string" }, name: { required: true, type: "string" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/projects" }, delete: { headers: { accept: "application/vnd.github.inertia-preview+json" }, method: "DELETE", params: { project_id: { required: true, type: "integer" } }, url: "/projects/:project_id" }, deleteCard: { headers: { accept: "application/vnd.github.inertia-preview+json" }, method: "DELETE", params: { card_id: { required: true, type: "integer" } }, url: "/projects/columns/cards/:card_id" }, deleteColumn: { headers: { accept: "application/vnd.github.inertia-preview+json" }, method: "DELETE", params: { column_id: { required: true, type: "integer" } }, url: "/projects/columns/:column_id" }, get: { headers: { accept: "application/vnd.github.inertia-preview+json" }, method: "GET", params: { project_id: { required: true, type: "integer" } }, url: "/projects/:project_id" }, getCard: { headers: { accept: "application/vnd.github.inertia-preview+json" }, method: "GET", params: { card_id: { required: true, type: "integer" } }, url: "/projects/columns/cards/:card_id" }, getColumn: { headers: { accept: "application/vnd.github.inertia-preview+json" }, method: "GET", params: { column_id: { required: true, type: "integer" } }, url: "/projects/columns/:column_id" }, listCards: { headers: { accept: "application/vnd.github.inertia-preview+json" }, method: "GET", params: { archived_state: { enum: ["all", "archived", "not_archived"], type: "string" }, column_id: { required: true, type: "integer" }, page: { type: "integer" }, per_page: { type: "integer" } }, url: "/projects/columns/:column_id/cards" }, listCollaborators: { headers: { accept: "application/vnd.github.inertia-preview+json" }, method: "GET", params: { affiliation: { enum: ["outside", "direct", "all"], type: "string" }, page: { type: "integer" }, per_page: { type: "integer" }, project_id: { required: true, type: "integer" } }, url: "/projects/:project_id/collaborators" }, listColumns: { headers: { accept: "application/vnd.github.inertia-preview+json" }, method: "GET", params: { page: { type: "integer" }, per_page: { type: "integer" }, project_id: { required: true, type: "integer" } }, url: "/projects/:project_id/columns" }, listForOrg: { headers: { accept: "application/vnd.github.inertia-preview+json" }, method: "GET", params: { org: { required: true, type: "string" }, page: { type: "integer" }, per_page: { type: "integer" }, state: { enum: ["open", "closed", "all"], type: "string" } }, url: "/orgs/:org/projects" }, listForRepo: { headers: { accept: "application/vnd.github.inertia-preview+json" }, method: "GET", params: { owner: { required: true, type: "string" }, page: { type: "integer" }, per_page: { type: "integer" }, repo: { required: true, type: "string" }, state: { enum: ["open", "closed", "all"], type: "string" } }, url: "/repos/:owner/:repo/projects" }, listForUser: { headers: { accept: "application/vnd.github.inertia-preview+json" }, method: "GET", params: { page: { type: "integer" }, per_page: { type: "integer" }, state: { enum: ["open", "closed", "all"], type: "string" }, username: { required: true, type: "string" } }, url: "/users/:username/projects" }, moveCard: { headers: { accept: "application/vnd.github.inertia-preview+json" }, method: "POST", params: { card_id: { required: true, type: "integer" }, column_id: { type: "integer" }, position: { required: true, type: "string", validation: "^(top|bottom|after:\\d+)$" } }, url: "/projects/columns/cards/:card_id/moves" }, moveColumn: { headers: { accept: "application/vnd.github.inertia-preview+json" }, method: "POST", params: { column_id: { required: true, type: "integer" }, position: { required: true, type: "string", validation: "^(first|last|after:\\d+)$" } }, url: "/projects/columns/:column_id/moves" }, removeCollaborator: { headers: { accept: "application/vnd.github.inertia-preview+json" }, method: "DELETE", params: { project_id: { required: true, type: "integer" }, username: { required: true, type: "string" } }, url: "/projects/:project_id/collaborators/:username" }, reviewUserPermissionLevel: { headers: { accept: "application/vnd.github.inertia-preview+json" }, method: "GET", params: { project_id: { required: true, type: "integer" }, username: { required: true, type: "string" } }, url: "/projects/:project_id/collaborators/:username/permission" }, update: { headers: { accept: "application/vnd.github.inertia-preview+json" }, method: "PATCH", params: { body: { type: "string" }, name: { type: "string" }, organization_permission: { type: "string" }, private: { type: "boolean" }, project_id: { required: true, type: "integer" }, state: { enum: ["open", "closed"], type: "string" } }, url: "/projects/:project_id" }, updateCard: { headers: { accept: "application/vnd.github.inertia-preview+json" }, method: "PATCH", params: { archived: { type: "boolean" }, card_id: { required: true, type: "integer" }, note: { type: "string" } }, url: "/projects/columns/cards/:card_id" }, updateColumn: { headers: { accept: "application/vnd.github.inertia-preview+json" }, method: "PATCH", params: { column_id: { required: true, type: "integer" }, name: { required: true, type: "string" } }, url: "/projects/columns/:column_id" } }, pulls: { checkIfMerged: { method: "GET", params: { number: { alias: "pull_number", deprecated: true, type: "integer" }, owner: { required: true, type: "string" }, pull_number: { required: true, type: "integer" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/pulls/:pull_number/merge" }, create: { method: "POST", params: { base: { required: true, type: "string" }, body: { type: "string" }, draft: { type: "boolean" }, head: { required: true, type: "string" }, maintainer_can_modify: { type: "boolean" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" }, title: { required: true, type: "string" } }, url: "/repos/:owner/:repo/pulls" }, createComment: { method: "POST", params: { body: { required: true, type: "string" }, commit_id: { required: true, type: "string" }, in_reply_to: { deprecated: true, description: "The comment ID to reply to. **Note**: This must be the ID of a top-level comment, not a reply to that comment. Replies to replies are not supported.", type: "integer" }, line: { type: "integer" }, number: { alias: "pull_number", deprecated: true, type: "integer" }, owner: { required: true, type: "string" }, path: { required: true, type: "string" }, position: { type: "integer" }, pull_number: { required: true, type: "integer" }, repo: { required: true, type: "string" }, side: { enum: ["LEFT", "RIGHT"], type: "string" }, start_line: { type: "integer" }, start_side: { enum: ["LEFT", "RIGHT", "side"], type: "string" } }, url: "/repos/:owner/:repo/pulls/:pull_number/comments" }, createCommentReply: { deprecated: "octokit.pulls.createCommentReply() has been renamed to octokit.pulls.createComment() (2019-09-09)", method: "POST", params: { body: { required: true, type: "string" }, commit_id: { required: true, type: "string" }, in_reply_to: { deprecated: true, description: "The comment ID to reply to. **Note**: This must be the ID of a top-level comment, not a reply to that comment. Replies to replies are not supported.", type: "integer" }, line: { type: "integer" }, number: { alias: "pull_number", deprecated: true, type: "integer" }, owner: { required: true, type: "string" }, path: { required: true, type: "string" }, position: { type: "integer" }, pull_number: { required: true, type: "integer" }, repo: { required: true, type: "string" }, side: { enum: ["LEFT", "RIGHT"], type: "string" }, start_line: { type: "integer" }, start_side: { enum: ["LEFT", "RIGHT", "side"], type: "string" } }, url: "/repos/:owner/:repo/pulls/:pull_number/comments" }, createFromIssue: { deprecated: "octokit.pulls.createFromIssue() is deprecated, see https://developer.github.com/v3/pulls/#create-a-pull-request", method: "POST", params: { base: { required: true, type: "string" }, draft: { type: "boolean" }, head: { required: true, type: "string" }, issue: { required: true, type: "integer" }, maintainer_can_modify: { type: "boolean" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/pulls" }, createReview: { method: "POST", params: { body: { type: "string" }, comments: { type: "object[]" }, "comments[].body": { required: true, type: "string" }, "comments[].path": { required: true, type: "string" }, "comments[].position": { required: true, type: "integer" }, commit_id: { type: "string" }, event: { enum: ["APPROVE", "REQUEST_CHANGES", "COMMENT"], type: "string" }, number: { alias: "pull_number", deprecated: true, type: "integer" }, owner: { required: true, type: "string" }, pull_number: { required: true, type: "integer" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/pulls/:pull_number/reviews" }, createReviewCommentReply: { method: "POST", params: { body: { required: true, type: "string" }, comment_id: { required: true, type: "integer" }, owner: { required: true, type: "string" }, pull_number: { required: true, type: "integer" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/pulls/:pull_number/comments/:comment_id/replies" }, createReviewRequest: { method: "POST", params: { number: { alias: "pull_number", deprecated: true, type: "integer" }, owner: { required: true, type: "string" }, pull_number: { required: true, type: "integer" }, repo: { required: true, type: "string" }, reviewers: { type: "string[]" }, team_reviewers: { type: "string[]" } }, url: "/repos/:owner/:repo/pulls/:pull_number/requested_reviewers" }, deleteComment: { method: "DELETE", params: { comment_id: { required: true, type: "integer" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/pulls/comments/:comment_id" }, deletePendingReview: { method: "DELETE", params: { number: { alias: "pull_number", deprecated: true, type: "integer" }, owner: { required: true, type: "string" }, pull_number: { required: true, type: "integer" }, repo: { required: true, type: "string" }, review_id: { required: true, type: "integer" } }, url: "/repos/:owner/:repo/pulls/:pull_number/reviews/:review_id" }, deleteReviewRequest: { method: "DELETE", params: { number: { alias: "pull_number", deprecated: true, type: "integer" }, owner: { required: true, type: "string" }, pull_number: { required: true, type: "integer" }, repo: { required: true, type: "string" }, reviewers: { type: "string[]" }, team_reviewers: { type: "string[]" } }, url: "/repos/:owner/:repo/pulls/:pull_number/requested_reviewers" }, dismissReview: { method: "PUT", params: { message: { required: true, type: "string" }, number: { alias: "pull_number", deprecated: true, type: "integer" }, owner: { required: true, type: "string" }, pull_number: { required: true, type: "integer" }, repo: { required: true, type: "string" }, review_id: { required: true, type: "integer" } }, url: "/repos/:owner/:repo/pulls/:pull_number/reviews/:review_id/dismissals" }, get: { method: "GET", params: { number: { alias: "pull_number", deprecated: true, type: "integer" }, owner: { required: true, type: "string" }, pull_number: { required: true, type: "integer" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/pulls/:pull_number" }, getComment: { method: "GET", params: { comment_id: { required: true, type: "integer" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/pulls/comments/:comment_id" }, getCommentsForReview: { method: "GET", params: { number: { alias: "pull_number", deprecated: true, type: "integer" }, owner: { required: true, type: "string" }, page: { type: "integer" }, per_page: { type: "integer" }, pull_number: { required: true, type: "integer" }, repo: { required: true, type: "string" }, review_id: { required: true, type: "integer" } }, url: "/repos/:owner/:repo/pulls/:pull_number/reviews/:review_id/comments" }, getReview: { method: "GET", params: { number: { alias: "pull_number", deprecated: true, type: "integer" }, owner: { required: true, type: "string" }, pull_number: { required: true, type: "integer" }, repo: { required: true, type: "string" }, review_id: { required: true, type: "integer" } }, url: "/repos/:owner/:repo/pulls/:pull_number/reviews/:review_id" }, list: { method: "GET", params: { base: { type: "string" }, direction: { enum: ["asc", "desc"], type: "string" }, head: { type: "string" }, owner: { required: true, type: "string" }, page: { type: "integer" }, per_page: { type: "integer" }, repo: { required: true, type: "string" }, sort: { enum: ["created", "updated", "popularity", "long-running"], type: "string" }, state: { enum: ["open", "closed", "all"], type: "string" } }, url: "/repos/:owner/:repo/pulls" }, listComments: { method: "GET", params: { direction: { enum: ["asc", "desc"], type: "string" }, number: { alias: "pull_number", deprecated: true, type: "integer" }, owner: { required: true, type: "string" }, page: { type: "integer" }, per_page: { type: "integer" }, pull_number: { required: true, type: "integer" }, repo: { required: true, type: "string" }, since: { type: "string" }, sort: { enum: ["created", "updated"], type: "string" } }, url: "/repos/:owner/:repo/pulls/:pull_number/comments" }, listCommentsForRepo: { method: "GET", params: { direction: { enum: ["asc", "desc"], type: "string" }, owner: { required: true, type: "string" }, page: { type: "integer" }, per_page: { type: "integer" }, repo: { required: true, type: "string" }, since: { type: "string" }, sort: { enum: ["created", "updated"], type: "string" } }, url: "/repos/:owner/:repo/pulls/comments" }, listCommits: { method: "GET", params: { number: { alias: "pull_number", deprecated: true, type: "integer" }, owner: { required: true, type: "string" }, page: { type: "integer" }, per_page: { type: "integer" }, pull_number: { required: true, type: "integer" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/pulls/:pull_number/commits" }, listFiles: { method: "GET", params: { number: { alias: "pull_number", deprecated: true, type: "integer" }, owner: { required: true, type: "string" }, page: { type: "integer" }, per_page: { type: "integer" }, pull_number: { required: true, type: "integer" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/pulls/:pull_number/files" }, listReviewRequests: { method: "GET", params: { number: { alias: "pull_number", deprecated: true, type: "integer" }, owner: { required: true, type: "string" }, page: { type: "integer" }, per_page: { type: "integer" }, pull_number: { required: true, type: "integer" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/pulls/:pull_number/requested_reviewers" }, listReviews: { method: "GET", params: { number: { alias: "pull_number", deprecated: true, type: "integer" }, owner: { required: true, type: "string" }, page: { type: "integer" }, per_page: { type: "integer" }, pull_number: { required: true, type: "integer" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/pulls/:pull_number/reviews" }, merge: { method: "PUT", params: { commit_message: { type: "string" }, commit_title: { type: "string" }, merge_method: { enum: ["merge", "squash", "rebase"], type: "string" }, number: { alias: "pull_number", deprecated: true, type: "integer" }, owner: { required: true, type: "string" }, pull_number: { required: true, type: "integer" }, repo: { required: true, type: "string" }, sha: { type: "string" } }, url: "/repos/:owner/:repo/pulls/:pull_number/merge" }, submitReview: { method: "POST", params: { body: { type: "string" }, event: { enum: ["APPROVE", "REQUEST_CHANGES", "COMMENT"], required: true, type: "string" }, number: { alias: "pull_number", deprecated: true, type: "integer" }, owner: { required: true, type: "string" }, pull_number: { required: true, type: "integer" }, repo: { required: true, type: "string" }, review_id: { required: true, type: "integer" } }, url: "/repos/:owner/:repo/pulls/:pull_number/reviews/:review_id/events" }, update: { method: "PATCH", params: { base: { type: "string" }, body: { type: "string" }, maintainer_can_modify: { type: "boolean" }, number: { alias: "pull_number", deprecated: true, type: "integer" }, owner: { required: true, type: "string" }, pull_number: { required: true, type: "integer" }, repo: { required: true, type: "string" }, state: { enum: ["open", "closed"], type: "string" }, title: { type: "string" } }, url: "/repos/:owner/:repo/pulls/:pull_number" }, updateBranch: { headers: { accept: "application/vnd.github.lydian-preview+json" }, method: "PUT", params: { expected_head_sha: { type: "string" }, owner: { required: true, type: "string" }, pull_number: { required: true, type: "integer" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/pulls/:pull_number/update-branch" }, updateComment: { method: "PATCH", params: { body: { required: true, type: "string" }, comment_id: { required: true, type: "integer" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/pulls/comments/:comment_id" }, updateReview: { method: "PUT", params: { body: { required: true, type: "string" }, number: { alias: "pull_number", deprecated: true, type: "integer" }, owner: { required: true, type: "string" }, pull_number: { required: true, type: "integer" }, repo: { required: true, type: "string" }, review_id: { required: true, type: "integer" } }, url: "/repos/:owner/:repo/pulls/:pull_number/reviews/:review_id" } }, rateLimit: { get: { method: "GET", params: {}, url: "/rate_limit" } }, reactions: { createForCommitComment: { headers: { accept: "application/vnd.github.squirrel-girl-preview+json" }, method: "POST", params: { comment_id: { required: true, type: "integer" }, content: { enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"], required: true, type: "string" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/comments/:comment_id/reactions" }, createForIssue: { headers: { accept: "application/vnd.github.squirrel-girl-preview+json" }, method: "POST", params: { content: { enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"], required: true, type: "string" }, issue_number: { required: true, type: "integer" }, number: { alias: "issue_number", deprecated: true, type: "integer" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/issues/:issue_number/reactions" }, createForIssueComment: { headers: { accept: "application/vnd.github.squirrel-girl-preview+json" }, method: "POST", params: { comment_id: { required: true, type: "integer" }, content: { enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"], required: true, type: "string" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/issues/comments/:comment_id/reactions" }, createForPullRequestReviewComment: { headers: { accept: "application/vnd.github.squirrel-girl-preview+json" }, method: "POST", params: { comment_id: { required: true, type: "integer" }, content: { enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"], required: true, type: "string" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/pulls/comments/:comment_id/reactions" }, createForTeamDiscussion: { deprecated: "octokit.reactions.createForTeamDiscussion() has been renamed to octokit.reactions.createForTeamDiscussionLegacy() (2020-01-16)", headers: { accept: "application/vnd.github.squirrel-girl-preview+json" }, method: "POST", params: { content: { enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"], required: true, type: "string" }, discussion_number: { required: true, type: "integer" }, team_id: { required: true, type: "integer" } }, url: "/teams/:team_id/discussions/:discussion_number/reactions" }, createForTeamDiscussionComment: { deprecated: "octokit.reactions.createForTeamDiscussionComment() has been renamed to octokit.reactions.createForTeamDiscussionCommentLegacy() (2020-01-16)", headers: { accept: "application/vnd.github.squirrel-girl-preview+json" }, method: "POST", params: { comment_number: { required: true, type: "integer" }, content: { enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"], required: true, type: "string" }, discussion_number: { required: true, type: "integer" }, team_id: { required: true, type: "integer" } }, url: "/teams/:team_id/discussions/:discussion_number/comments/:comment_number/reactions" }, createForTeamDiscussionCommentInOrg: { headers: { accept: "application/vnd.github.squirrel-girl-preview+json" }, method: "POST", params: { comment_number: { required: true, type: "integer" }, content: { enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"], required: true, type: "string" }, discussion_number: { required: true, type: "integer" }, org: { required: true, type: "string" }, team_slug: { required: true, type: "string" } }, url: "/orgs/:org/teams/:team_slug/discussions/:discussion_number/comments/:comment_number/reactions" }, createForTeamDiscussionCommentLegacy: { deprecated: "octokit.reactions.createForTeamDiscussionCommentLegacy() is deprecated, see https://developer.github.com/v3/reactions/#create-reaction-for-a-team-discussion-comment-legacy", headers: { accept: "application/vnd.github.squirrel-girl-preview+json" }, method: "POST", params: { comment_number: { required: true, type: "integer" }, content: { enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"], required: true, type: "string" }, discussion_number: { required: true, type: "integer" }, team_id: { required: true, type: "integer" } }, url: "/teams/:team_id/discussions/:discussion_number/comments/:comment_number/reactions" }, createForTeamDiscussionInOrg: { headers: { accept: "application/vnd.github.squirrel-girl-preview+json" }, method: "POST", params: { content: { enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"], required: true, type: "string" }, discussion_number: { required: true, type: "integer" }, org: { required: true, type: "string" }, team_slug: { required: true, type: "string" } }, url: "/orgs/:org/teams/:team_slug/discussions/:discussion_number/reactions" }, createForTeamDiscussionLegacy: { deprecated: "octokit.reactions.createForTeamDiscussionLegacy() is deprecated, see https://developer.github.com/v3/reactions/#create-reaction-for-a-team-discussion-legacy", headers: { accept: "application/vnd.github.squirrel-girl-preview+json" }, method: "POST", params: { content: { enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"], required: true, type: "string" }, discussion_number: { required: true, type: "integer" }, team_id: { required: true, type: "integer" } }, url: "/teams/:team_id/discussions/:discussion_number/reactions" }, delete: { headers: { accept: "application/vnd.github.squirrel-girl-preview+json" }, method: "DELETE", params: { reaction_id: { required: true, type: "integer" } }, url: "/reactions/:reaction_id" }, listForCommitComment: { headers: { accept: "application/vnd.github.squirrel-girl-preview+json" }, method: "GET", params: { comment_id: { required: true, type: "integer" }, content: { enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"], type: "string" }, owner: { required: true, type: "string" }, page: { type: "integer" }, per_page: { type: "integer" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/comments/:comment_id/reactions" }, listForIssue: { headers: { accept: "application/vnd.github.squirrel-girl-preview+json" }, method: "GET", params: { content: { enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"], type: "string" }, issue_number: { required: true, type: "integer" }, number: { alias: "issue_number", deprecated: true, type: "integer" }, owner: { required: true, type: "string" }, page: { type: "integer" }, per_page: { type: "integer" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/issues/:issue_number/reactions" }, listForIssueComment: { headers: { accept: "application/vnd.github.squirrel-girl-preview+json" }, method: "GET", params: { comment_id: { required: true, type: "integer" }, content: { enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"], type: "string" }, owner: { required: true, type: "string" }, page: { type: "integer" }, per_page: { type: "integer" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/issues/comments/:comment_id/reactions" }, listForPullRequestReviewComment: { headers: { accept: "application/vnd.github.squirrel-girl-preview+json" }, method: "GET", params: { comment_id: { required: true, type: "integer" }, content: { enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"], type: "string" }, owner: { required: true, type: "string" }, page: { type: "integer" }, per_page: { type: "integer" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/pulls/comments/:comment_id/reactions" }, listForTeamDiscussion: { deprecated: "octokit.reactions.listForTeamDiscussion() has been renamed to octokit.reactions.listForTeamDiscussionLegacy() (2020-01-16)", headers: { accept: "application/vnd.github.squirrel-girl-preview+json" }, method: "GET", params: { content: { enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"], type: "string" }, discussion_number: { required: true, type: "integer" }, page: { type: "integer" }, per_page: { type: "integer" }, team_id: { required: true, type: "integer" } }, url: "/teams/:team_id/discussions/:discussion_number/reactions" }, listForTeamDiscussionComment: { deprecated: "octokit.reactions.listForTeamDiscussionComment() has been renamed to octokit.reactions.listForTeamDiscussionCommentLegacy() (2020-01-16)", headers: { accept: "application/vnd.github.squirrel-girl-preview+json" }, method: "GET", params: { comment_number: { required: true, type: "integer" }, content: { enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"], type: "string" }, discussion_number: { required: true, type: "integer" }, page: { type: "integer" }, per_page: { type: "integer" }, team_id: { required: true, type: "integer" } }, url: "/teams/:team_id/discussions/:discussion_number/comments/:comment_number/reactions" }, listForTeamDiscussionCommentInOrg: { headers: { accept: "application/vnd.github.squirrel-girl-preview+json" }, method: "GET", params: { comment_number: { required: true, type: "integer" }, content: { enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"], type: "string" }, discussion_number: { required: true, type: "integer" }, org: { required: true, type: "string" }, page: { type: "integer" }, per_page: { type: "integer" }, team_slug: { required: true, type: "string" } }, url: "/orgs/:org/teams/:team_slug/discussions/:discussion_number/comments/:comment_number/reactions" }, listForTeamDiscussionCommentLegacy: { deprecated: "octokit.reactions.listForTeamDiscussionCommentLegacy() is deprecated, see https://developer.github.com/v3/reactions/#list-reactions-for-a-team-discussion-comment-legacy", headers: { accept: "application/vnd.github.squirrel-girl-preview+json" }, method: "GET", params: { comment_number: { required: true, type: "integer" }, content: { enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"], type: "string" }, discussion_number: { required: true, type: "integer" }, page: { type: "integer" }, per_page: { type: "integer" }, team_id: { required: true, type: "integer" } }, url: "/teams/:team_id/discussions/:discussion_number/comments/:comment_number/reactions" }, listForTeamDiscussionInOrg: { headers: { accept: "application/vnd.github.squirrel-girl-preview+json" }, method: "GET", params: { content: { enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"], type: "string" }, discussion_number: { required: true, type: "integer" }, org: { required: true, type: "string" }, page: { type: "integer" }, per_page: { type: "integer" }, team_slug: { required: true, type: "string" } }, url: "/orgs/:org/teams/:team_slug/discussions/:discussion_number/reactions" }, listForTeamDiscussionLegacy: { deprecated: "octokit.reactions.listForTeamDiscussionLegacy() is deprecated, see https://developer.github.com/v3/reactions/#list-reactions-for-a-team-discussion-legacy", headers: { accept: "application/vnd.github.squirrel-girl-preview+json" }, method: "GET", params: { content: { enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"], type: "string" }, discussion_number: { required: true, type: "integer" }, page: { type: "integer" }, per_page: { type: "integer" }, team_id: { required: true, type: "integer" } }, url: "/teams/:team_id/discussions/:discussion_number/reactions" } }, repos: { acceptInvitation: { method: "PATCH", params: { invitation_id: { required: true, type: "integer" } }, url: "/user/repository_invitations/:invitation_id" }, addCollaborator: { method: "PUT", params: { owner: { required: true, type: "string" }, permission: { enum: ["pull", "push", "admin"], type: "string" }, repo: { required: true, type: "string" }, username: { required: true, type: "string" } }, url: "/repos/:owner/:repo/collaborators/:username" }, addDeployKey: { method: "POST", params: { key: { required: true, type: "string" }, owner: { required: true, type: "string" }, read_only: { type: "boolean" }, repo: { required: true, type: "string" }, title: { type: "string" } }, url: "/repos/:owner/:repo/keys" }, addProtectedBranchAdminEnforcement: { method: "POST", params: { branch: { required: true, type: "string" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/branches/:branch/protection/enforce_admins" }, addProtectedBranchAppRestrictions: { method: "POST", params: { apps: { mapTo: "data", required: true, type: "string[]" }, branch: { required: true, type: "string" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/apps" }, addProtectedBranchRequiredSignatures: { headers: { accept: "application/vnd.github.zzzax-preview+json" }, method: "POST", params: { branch: { required: true, type: "string" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/branches/:branch/protection/required_signatures" }, addProtectedBranchRequiredStatusChecksContexts: { method: "POST", params: { branch: { required: true, type: "string" }, contexts: { mapTo: "data", required: true, type: "string[]" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/branches/:branch/protection/required_status_checks/contexts" }, addProtectedBranchTeamRestrictions: { method: "POST", params: { branch: { required: true, type: "string" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" }, teams: { mapTo: "data", required: true, type: "string[]" } }, url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/teams" }, addProtectedBranchUserRestrictions: { method: "POST", params: { branch: { required: true, type: "string" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" }, users: { mapTo: "data", required: true, type: "string[]" } }, url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/users" }, checkCollaborator: { method: "GET", params: { owner: { required: true, type: "string" }, repo: { required: true, type: "string" }, username: { required: true, type: "string" } }, url: "/repos/:owner/:repo/collaborators/:username" }, checkVulnerabilityAlerts: { headers: { accept: "application/vnd.github.dorian-preview+json" }, method: "GET", params: { owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/vulnerability-alerts" }, compareCommits: { method: "GET", params: { base: { required: true, type: "string" }, head: { required: true, type: "string" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/compare/:base...:head" }, createCommitComment: { method: "POST", params: { body: { required: true, type: "string" }, commit_sha: { required: true, type: "string" }, line: { type: "integer" }, owner: { required: true, type: "string" }, path: { type: "string" }, position: { type: "integer" }, repo: { required: true, type: "string" }, sha: { alias: "commit_sha", deprecated: true, type: "string" } }, url: "/repos/:owner/:repo/commits/:commit_sha/comments" }, createDeployment: { method: "POST", params: { auto_merge: { type: "boolean" }, description: { type: "string" }, environment: { type: "string" }, owner: { required: true, type: "string" }, payload: { type: "string" }, production_environment: { type: "boolean" }, ref: { required: true, type: "string" }, repo: { required: true, type: "string" }, required_contexts: { type: "string[]" }, task: { type: "string" }, transient_environment: { type: "boolean" } }, url: "/repos/:owner/:repo/deployments" }, createDeploymentStatus: { method: "POST", params: { auto_inactive: { type: "boolean" }, deployment_id: { required: true, type: "integer" }, description: { type: "string" }, environment: { enum: ["production", "staging", "qa"], type: "string" }, environment_url: { type: "string" }, log_url: { type: "string" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" }, state: { enum: ["error", "failure", "inactive", "in_progress", "queued", "pending", "success"], required: true, type: "string" }, target_url: { type: "string" } }, url: "/repos/:owner/:repo/deployments/:deployment_id/statuses" }, createDispatchEvent: { method: "POST", params: { client_payload: { type: "object" }, event_type: { type: "string" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/dispatches" }, createFile: { deprecated: "octokit.repos.createFile() has been renamed to octokit.repos.createOrUpdateFile() (2019-06-07)", method: "PUT", params: { author: { type: "object" }, "author.email": { required: true, type: "string" }, "author.name": { required: true, type: "string" }, branch: { type: "string" }, committer: { type: "object" }, "committer.email": { required: true, type: "string" }, "committer.name": { required: true, type: "string" }, content: { required: true, type: "string" }, message: { required: true, type: "string" }, owner: { required: true, type: "string" }, path: { required: true, type: "string" }, repo: { required: true, type: "string" }, sha: { type: "string" } }, url: "/repos/:owner/:repo/contents/:path" }, createForAuthenticatedUser: { method: "POST", params: { allow_merge_commit: { type: "boolean" }, allow_rebase_merge: { type: "boolean" }, allow_squash_merge: { type: "boolean" }, auto_init: { type: "boolean" }, delete_branch_on_merge: { type: "boolean" }, description: { type: "string" }, gitignore_template: { type: "string" }, has_issues: { type: "boolean" }, has_projects: { type: "boolean" }, has_wiki: { type: "boolean" }, homepage: { type: "string" }, is_template: { type: "boolean" }, license_template: { type: "string" }, name: { required: true, type: "string" }, private: { type: "boolean" }, team_id: { type: "integer" }, visibility: { enum: ["public", "private", "visibility", "internal"], type: "string" } }, url: "/user/repos" }, createFork: { method: "POST", params: { organization: { type: "string" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/forks" }, createHook: { method: "POST", params: { active: { type: "boolean" }, config: { required: true, type: "object" }, "config.content_type": { type: "string" }, "config.insecure_ssl": { type: "string" }, "config.secret": { type: "string" }, "config.url": { required: true, type: "string" }, events: { type: "string[]" }, name: { type: "string" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/hooks" }, createInOrg: { method: "POST", params: { allow_merge_commit: { type: "boolean" }, allow_rebase_merge: { type: "boolean" }, allow_squash_merge: { type: "boolean" }, auto_init: { type: "boolean" }, delete_branch_on_merge: { type: "boolean" }, description: { type: "string" }, gitignore_template: { type: "string" }, has_issues: { type: "boolean" }, has_projects: { type: "boolean" }, has_wiki: { type: "boolean" }, homepage: { type: "string" }, is_template: { type: "boolean" }, license_template: { type: "string" }, name: { required: true, type: "string" }, org: { required: true, type: "string" }, private: { type: "boolean" }, team_id: { type: "integer" }, visibility: { enum: ["public", "private", "visibility", "internal"], type: "string" } }, url: "/orgs/:org/repos" }, createOrUpdateFile: { method: "PUT", params: { author: { type: "object" }, "author.email": { required: true, type: "string" }, "author.name": { required: true, type: "string" }, branch: { type: "string" }, committer: { type: "object" }, "committer.email": { required: true, type: "string" }, "committer.name": { required: true, type: "string" }, content: { required: true, type: "string" }, message: { required: true, type: "string" }, owner: { required: true, type: "string" }, path: { required: true, type: "string" }, repo: { required: true, type: "string" }, sha: { type: "string" } }, url: "/repos/:owner/:repo/contents/:path" }, createRelease: { method: "POST", params: { body: { type: "string" }, draft: { type: "boolean" }, name: { type: "string" }, owner: { required: true, type: "string" }, prerelease: { type: "boolean" }, repo: { required: true, type: "string" }, tag_name: { required: true, type: "string" }, target_commitish: { type: "string" } }, url: "/repos/:owner/:repo/releases" }, createStatus: { method: "POST", params: { context: { type: "string" }, description: { type: "string" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" }, sha: { required: true, type: "string" }, state: { enum: ["error", "failure", "pending", "success"], required: true, type: "string" }, target_url: { type: "string" } }, url: "/repos/:owner/:repo/statuses/:sha" }, createUsingTemplate: { headers: { accept: "application/vnd.github.baptiste-preview+json" }, method: "POST", params: { description: { type: "string" }, name: { required: true, type: "string" }, owner: { type: "string" }, private: { type: "boolean" }, template_owner: { required: true, type: "string" }, template_repo: { required: true, type: "string" } }, url: "/repos/:template_owner/:template_repo/generate" }, declineInvitation: { method: "DELETE", params: { invitation_id: { required: true, type: "integer" } }, url: "/user/repository_invitations/:invitation_id" }, delete: { method: "DELETE", params: { owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo" }, deleteCommitComment: { method: "DELETE", params: { comment_id: { required: true, type: "integer" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/comments/:comment_id" }, deleteDownload: { method: "DELETE", params: { download_id: { required: true, type: "integer" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/downloads/:download_id" }, deleteFile: { method: "DELETE", params: { author: { type: "object" }, "author.email": { type: "string" }, "author.name": { type: "string" }, branch: { type: "string" }, committer: { type: "object" }, "committer.email": { type: "string" }, "committer.name": { type: "string" }, message: { required: true, type: "string" }, owner: { required: true, type: "string" }, path: { required: true, type: "string" }, repo: { required: true, type: "string" }, sha: { required: true, type: "string" } }, url: "/repos/:owner/:repo/contents/:path" }, deleteHook: { method: "DELETE", params: { hook_id: { required: true, type: "integer" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/hooks/:hook_id" }, deleteInvitation: { method: "DELETE", params: { invitation_id: { required: true, type: "integer" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/invitations/:invitation_id" }, deleteRelease: { method: "DELETE", params: { owner: { required: true, type: "string" }, release_id: { required: true, type: "integer" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/releases/:release_id" }, deleteReleaseAsset: { method: "DELETE", params: { asset_id: { required: true, type: "integer" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/releases/assets/:asset_id" }, disableAutomatedSecurityFixes: { headers: { accept: "application/vnd.github.london-preview+json" }, method: "DELETE", params: { owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/automated-security-fixes" }, disablePagesSite: { headers: { accept: "application/vnd.github.switcheroo-preview+json" }, method: "DELETE", params: { owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/pages" }, disableVulnerabilityAlerts: { headers: { accept: "application/vnd.github.dorian-preview+json" }, method: "DELETE", params: { owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/vulnerability-alerts" }, enableAutomatedSecurityFixes: { headers: { accept: "application/vnd.github.london-preview+json" }, method: "PUT", params: { owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/automated-security-fixes" }, enablePagesSite: { headers: { accept: "application/vnd.github.switcheroo-preview+json" }, method: "POST", params: { owner: { required: true, type: "string" }, repo: { required: true, type: "string" }, source: { type: "object" }, "source.branch": { enum: ["master", "gh-pages"], type: "string" }, "source.path": { type: "string" } }, url: "/repos/:owner/:repo/pages" }, enableVulnerabilityAlerts: { headers: { accept: "application/vnd.github.dorian-preview+json" }, method: "PUT", params: { owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/vulnerability-alerts" }, get: { method: "GET", params: { owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo" }, getAppsWithAccessToProtectedBranch: { method: "GET", params: { branch: { required: true, type: "string" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/apps" }, getArchiveLink: { method: "GET", params: { archive_format: { required: true, type: "string" }, owner: { required: true, type: "string" }, ref: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/:archive_format/:ref" }, getBranch: { method: "GET", params: { branch: { required: true, type: "string" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/branches/:branch" }, getBranchProtection: { method: "GET", params: { branch: { required: true, type: "string" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/branches/:branch/protection" }, getClones: { method: "GET", params: { owner: { required: true, type: "string" }, per: { enum: ["day", "week"], type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/traffic/clones" }, getCodeFrequencyStats: { method: "GET", params: { owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/stats/code_frequency" }, getCollaboratorPermissionLevel: { method: "GET", params: { owner: { required: true, type: "string" }, repo: { required: true, type: "string" }, username: { required: true, type: "string" } }, url: "/repos/:owner/:repo/collaborators/:username/permission" }, getCombinedStatusForRef: { method: "GET", params: { owner: { required: true, type: "string" }, ref: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/commits/:ref/status" }, getCommit: { method: "GET", params: { commit_sha: { alias: "ref", deprecated: true, type: "string" }, owner: { required: true, type: "string" }, ref: { required: true, type: "string" }, repo: { required: true, type: "string" }, sha: { alias: "ref", deprecated: true, type: "string" } }, url: "/repos/:owner/:repo/commits/:ref" }, getCommitActivityStats: { method: "GET", params: { owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/stats/commit_activity" }, getCommitComment: { method: "GET", params: { comment_id: { required: true, type: "integer" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/comments/:comment_id" }, getCommitRefSha: { deprecated: "octokit.repos.getCommitRefSha() is deprecated, see https://developer.github.com/v3/repos/commits/#get-a-single-commit", headers: { accept: "application/vnd.github.v3.sha" }, method: "GET", params: { owner: { required: true, type: "string" }, ref: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/commits/:ref" }, getContents: { method: "GET", params: { owner: { required: true, type: "string" }, path: { required: true, type: "string" }, ref: { type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/contents/:path" }, getContributorsStats: { method: "GET", params: { owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/stats/contributors" }, getDeployKey: { method: "GET", params: { key_id: { required: true, type: "integer" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/keys/:key_id" }, getDeployment: { method: "GET", params: { deployment_id: { required: true, type: "integer" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/deployments/:deployment_id" }, getDeploymentStatus: { method: "GET", params: { deployment_id: { required: true, type: "integer" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" }, status_id: { required: true, type: "integer" } }, url: "/repos/:owner/:repo/deployments/:deployment_id/statuses/:status_id" }, getDownload: { method: "GET", params: { download_id: { required: true, type: "integer" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/downloads/:download_id" }, getHook: { method: "GET", params: { hook_id: { required: true, type: "integer" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/hooks/:hook_id" }, getLatestPagesBuild: { method: "GET", params: { owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/pages/builds/latest" }, getLatestRelease: { method: "GET", params: { owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/releases/latest" }, getPages: { method: "GET", params: { owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/pages" }, getPagesBuild: { method: "GET", params: { build_id: { required: true, type: "integer" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/pages/builds/:build_id" }, getParticipationStats: { method: "GET", params: { owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/stats/participation" }, getProtectedBranchAdminEnforcement: { method: "GET", params: { branch: { required: true, type: "string" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/branches/:branch/protection/enforce_admins" }, getProtectedBranchPullRequestReviewEnforcement: { method: "GET", params: { branch: { required: true, type: "string" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/branches/:branch/protection/required_pull_request_reviews" }, getProtectedBranchRequiredSignatures: { headers: { accept: "application/vnd.github.zzzax-preview+json" }, method: "GET", params: { branch: { required: true, type: "string" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/branches/:branch/protection/required_signatures" }, getProtectedBranchRequiredStatusChecks: { method: "GET", params: { branch: { required: true, type: "string" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/branches/:branch/protection/required_status_checks" }, getProtectedBranchRestrictions: { method: "GET", params: { branch: { required: true, type: "string" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/branches/:branch/protection/restrictions" }, getPunchCardStats: { method: "GET", params: { owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/stats/punch_card" }, getReadme: { method: "GET", params: { owner: { required: true, type: "string" }, ref: { type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/readme" }, getRelease: { method: "GET", params: { owner: { required: true, type: "string" }, release_id: { required: true, type: "integer" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/releases/:release_id" }, getReleaseAsset: { method: "GET", params: { asset_id: { required: true, type: "integer" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/releases/assets/:asset_id" }, getReleaseByTag: { method: "GET", params: { owner: { required: true, type: "string" }, repo: { required: true, type: "string" }, tag: { required: true, type: "string" } }, url: "/repos/:owner/:repo/releases/tags/:tag" }, getTeamsWithAccessToProtectedBranch: { method: "GET", params: { branch: { required: true, type: "string" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/teams" }, getTopPaths: { method: "GET", params: { owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/traffic/popular/paths" }, getTopReferrers: { method: "GET", params: { owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/traffic/popular/referrers" }, getUsersWithAccessToProtectedBranch: { method: "GET", params: { branch: { required: true, type: "string" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/users" }, getViews: { method: "GET", params: { owner: { required: true, type: "string" }, per: { enum: ["day", "week"], type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/traffic/views" }, list: { method: "GET", params: { affiliation: { type: "string" }, direction: { enum: ["asc", "desc"], type: "string" }, page: { type: "integer" }, per_page: { type: "integer" }, sort: { enum: ["created", "updated", "pushed", "full_name"], type: "string" }, type: { enum: ["all", "owner", "public", "private", "member"], type: "string" }, visibility: { enum: ["all", "public", "private"], type: "string" } }, url: "/user/repos" }, listAppsWithAccessToProtectedBranch: { deprecated: "octokit.repos.listAppsWithAccessToProtectedBranch() has been renamed to octokit.repos.getAppsWithAccessToProtectedBranch() (2019-09-13)", method: "GET", params: { branch: { required: true, type: "string" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/apps" }, listAssetsForRelease: { method: "GET", params: { owner: { required: true, type: "string" }, page: { type: "integer" }, per_page: { type: "integer" }, release_id: { required: true, type: "integer" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/releases/:release_id/assets" }, listBranches: { method: "GET", params: { owner: { required: true, type: "string" }, page: { type: "integer" }, per_page: { type: "integer" }, protected: { type: "boolean" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/branches" }, listBranchesForHeadCommit: { headers: { accept: "application/vnd.github.groot-preview+json" }, method: "GET", params: { commit_sha: { required: true, type: "string" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/commits/:commit_sha/branches-where-head" }, listCollaborators: { method: "GET", params: { affiliation: { enum: ["outside", "direct", "all"], type: "string" }, owner: { required: true, type: "string" }, page: { type: "integer" }, per_page: { type: "integer" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/collaborators" }, listCommentsForCommit: { method: "GET", params: { commit_sha: { required: true, type: "string" }, owner: { required: true, type: "string" }, page: { type: "integer" }, per_page: { type: "integer" }, ref: { alias: "commit_sha", deprecated: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/commits/:commit_sha/comments" }, listCommitComments: { method: "GET", params: { owner: { required: true, type: "string" }, page: { type: "integer" }, per_page: { type: "integer" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/comments" }, listCommits: { method: "GET", params: { author: { type: "string" }, owner: { required: true, type: "string" }, page: { type: "integer" }, path: { type: "string" }, per_page: { type: "integer" }, repo: { required: true, type: "string" }, sha: { type: "string" }, since: { type: "string" }, until: { type: "string" } }, url: "/repos/:owner/:repo/commits" }, listContributors: { method: "GET", params: { anon: { type: "string" }, owner: { required: true, type: "string" }, page: { type: "integer" }, per_page: { type: "integer" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/contributors" }, listDeployKeys: { method: "GET", params: { owner: { required: true, type: "string" }, page: { type: "integer" }, per_page: { type: "integer" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/keys" }, listDeploymentStatuses: { method: "GET", params: { deployment_id: { required: true, type: "integer" }, owner: { required: true, type: "string" }, page: { type: "integer" }, per_page: { type: "integer" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/deployments/:deployment_id/statuses" }, listDeployments: { method: "GET", params: { environment: { type: "string" }, owner: { required: true, type: "string" }, page: { type: "integer" }, per_page: { type: "integer" }, ref: { type: "string" }, repo: { required: true, type: "string" }, sha: { type: "string" }, task: { type: "string" } }, url: "/repos/:owner/:repo/deployments" }, listDownloads: { method: "GET", params: { owner: { required: true, type: "string" }, page: { type: "integer" }, per_page: { type: "integer" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/downloads" }, listForOrg: { method: "GET", params: { direction: { enum: ["asc", "desc"], type: "string" }, org: { required: true, type: "string" }, page: { type: "integer" }, per_page: { type: "integer" }, sort: { enum: ["created", "updated", "pushed", "full_name"], type: "string" }, type: { enum: ["all", "public", "private", "forks", "sources", "member", "internal"], type: "string" } }, url: "/orgs/:org/repos" }, listForUser: { method: "GET", params: { direction: { enum: ["asc", "desc"], type: "string" }, page: { type: "integer" }, per_page: { type: "integer" }, sort: { enum: ["created", "updated", "pushed", "full_name"], type: "string" }, type: { enum: ["all", "owner", "member"], type: "string" }, username: { required: true, type: "string" } }, url: "/users/:username/repos" }, listForks: { method: "GET", params: { owner: { required: true, type: "string" }, page: { type: "integer" }, per_page: { type: "integer" }, repo: { required: true, type: "string" }, sort: { enum: ["newest", "oldest", "stargazers"], type: "string" } }, url: "/repos/:owner/:repo/forks" }, listHooks: { method: "GET", params: { owner: { required: true, type: "string" }, page: { type: "integer" }, per_page: { type: "integer" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/hooks" }, listInvitations: { method: "GET", params: { owner: { required: true, type: "string" }, page: { type: "integer" }, per_page: { type: "integer" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/invitations" }, listInvitationsForAuthenticatedUser: { method: "GET", params: { page: { type: "integer" }, per_page: { type: "integer" } }, url: "/user/repository_invitations" }, listLanguages: { method: "GET", params: { owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/languages" }, listPagesBuilds: { method: "GET", params: { owner: { required: true, type: "string" }, page: { type: "integer" }, per_page: { type: "integer" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/pages/builds" }, listProtectedBranchRequiredStatusChecksContexts: { method: "GET", params: { branch: { required: true, type: "string" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/branches/:branch/protection/required_status_checks/contexts" }, listProtectedBranchTeamRestrictions: { deprecated: "octokit.repos.listProtectedBranchTeamRestrictions() has been renamed to octokit.repos.getTeamsWithAccessToProtectedBranch() (2019-09-09)", method: "GET", params: { branch: { required: true, type: "string" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/teams" }, listProtectedBranchUserRestrictions: { deprecated: "octokit.repos.listProtectedBranchUserRestrictions() has been renamed to octokit.repos.getUsersWithAccessToProtectedBranch() (2019-09-09)", method: "GET", params: { branch: { required: true, type: "string" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/users" }, listPublic: { method: "GET", params: { page: { type: "integer" }, per_page: { type: "integer" }, since: { type: "integer" } }, url: "/repositories" }, listPullRequestsAssociatedWithCommit: { headers: { accept: "application/vnd.github.groot-preview+json" }, method: "GET", params: { commit_sha: { required: true, type: "string" }, owner: { required: true, type: "string" }, page: { type: "integer" }, per_page: { type: "integer" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/commits/:commit_sha/pulls" }, listReleases: { method: "GET", params: { owner: { required: true, type: "string" }, page: { type: "integer" }, per_page: { type: "integer" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/releases" }, listStatusesForRef: { method: "GET", params: { owner: { required: true, type: "string" }, page: { type: "integer" }, per_page: { type: "integer" }, ref: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/commits/:ref/statuses" }, listTags: { method: "GET", params: { owner: { required: true, type: "string" }, page: { type: "integer" }, per_page: { type: "integer" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/tags" }, listTeams: { method: "GET", params: { owner: { required: true, type: "string" }, page: { type: "integer" }, per_page: { type: "integer" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/teams" }, listTeamsWithAccessToProtectedBranch: { deprecated: "octokit.repos.listTeamsWithAccessToProtectedBranch() has been renamed to octokit.repos.getTeamsWithAccessToProtectedBranch() (2019-09-13)", method: "GET", params: { branch: { required: true, type: "string" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/teams" }, listTopics: { headers: { accept: "application/vnd.github.mercy-preview+json" }, method: "GET", params: { owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/topics" }, listUsersWithAccessToProtectedBranch: { deprecated: "octokit.repos.listUsersWithAccessToProtectedBranch() has been renamed to octokit.repos.getUsersWithAccessToProtectedBranch() (2019-09-13)", method: "GET", params: { branch: { required: true, type: "string" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/users" }, merge: { method: "POST", params: { base: { required: true, type: "string" }, commit_message: { type: "string" }, head: { required: true, type: "string" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/merges" }, pingHook: { method: "POST", params: { hook_id: { required: true, type: "integer" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/hooks/:hook_id/pings" }, removeBranchProtection: { method: "DELETE", params: { branch: { required: true, type: "string" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/branches/:branch/protection" }, removeCollaborator: { method: "DELETE", params: { owner: { required: true, type: "string" }, repo: { required: true, type: "string" }, username: { required: true, type: "string" } }, url: "/repos/:owner/:repo/collaborators/:username" }, removeDeployKey: { method: "DELETE", params: { key_id: { required: true, type: "integer" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/keys/:key_id" }, removeProtectedBranchAdminEnforcement: { method: "DELETE", params: { branch: { required: true, type: "string" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/branches/:branch/protection/enforce_admins" }, removeProtectedBranchAppRestrictions: { method: "DELETE", params: { apps: { mapTo: "data", required: true, type: "string[]" }, branch: { required: true, type: "string" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/apps" }, removeProtectedBranchPullRequestReviewEnforcement: { method: "DELETE", params: { branch: { required: true, type: "string" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/branches/:branch/protection/required_pull_request_reviews" }, removeProtectedBranchRequiredSignatures: { headers: { accept: "application/vnd.github.zzzax-preview+json" }, method: "DELETE", params: { branch: { required: true, type: "string" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/branches/:branch/protection/required_signatures" }, removeProtectedBranchRequiredStatusChecks: { method: "DELETE", params: { branch: { required: true, type: "string" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/branches/:branch/protection/required_status_checks" }, removeProtectedBranchRequiredStatusChecksContexts: { method: "DELETE", params: { branch: { required: true, type: "string" }, contexts: { mapTo: "data", required: true, type: "string[]" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/branches/:branch/protection/required_status_checks/contexts" }, removeProtectedBranchRestrictions: { method: "DELETE", params: { branch: { required: true, type: "string" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/branches/:branch/protection/restrictions" }, removeProtectedBranchTeamRestrictions: { method: "DELETE", params: { branch: { required: true, type: "string" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" }, teams: { mapTo: "data", required: true, type: "string[]" } }, url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/teams" }, removeProtectedBranchUserRestrictions: { method: "DELETE", params: { branch: { required: true, type: "string" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" }, users: { mapTo: "data", required: true, type: "string[]" } }, url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/users" }, replaceProtectedBranchAppRestrictions: { method: "PUT", params: { apps: { mapTo: "data", required: true, type: "string[]" }, branch: { required: true, type: "string" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/apps" }, replaceProtectedBranchRequiredStatusChecksContexts: { method: "PUT", params: { branch: { required: true, type: "string" }, contexts: { mapTo: "data", required: true, type: "string[]" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/branches/:branch/protection/required_status_checks/contexts" }, replaceProtectedBranchTeamRestrictions: { method: "PUT", params: { branch: { required: true, type: "string" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" }, teams: { mapTo: "data", required: true, type: "string[]" } }, url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/teams" }, replaceProtectedBranchUserRestrictions: { method: "PUT", params: { branch: { required: true, type: "string" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" }, users: { mapTo: "data", required: true, type: "string[]" } }, url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/users" }, replaceTopics: { headers: { accept: "application/vnd.github.mercy-preview+json" }, method: "PUT", params: { names: { required: true, type: "string[]" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/topics" }, requestPageBuild: { method: "POST", params: { owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/pages/builds" }, retrieveCommunityProfileMetrics: { method: "GET", params: { owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/community/profile" }, testPushHook: { method: "POST", params: { hook_id: { required: true, type: "integer" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/hooks/:hook_id/tests" }, transfer: { method: "POST", params: { new_owner: { type: "string" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" }, team_ids: { type: "integer[]" } }, url: "/repos/:owner/:repo/transfer" }, update: { method: "PATCH", params: { allow_merge_commit: { type: "boolean" }, allow_rebase_merge: { type: "boolean" }, allow_squash_merge: { type: "boolean" }, archived: { type: "boolean" }, default_branch: { type: "string" }, delete_branch_on_merge: { type: "boolean" }, description: { type: "string" }, has_issues: { type: "boolean" }, has_projects: { type: "boolean" }, has_wiki: { type: "boolean" }, homepage: { type: "string" }, is_template: { type: "boolean" }, name: { type: "string" }, owner: { required: true, type: "string" }, private: { type: "boolean" }, repo: { required: true, type: "string" }, visibility: { enum: ["public", "private", "visibility", "internal"], type: "string" } }, url: "/repos/:owner/:repo" }, updateBranchProtection: { method: "PUT", params: { allow_deletions: { type: "boolean" }, allow_force_pushes: { allowNull: true, type: "boolean" }, branch: { required: true, type: "string" }, enforce_admins: { allowNull: true, required: true, type: "boolean" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" }, required_linear_history: { type: "boolean" }, required_pull_request_reviews: { allowNull: true, required: true, type: "object" }, "required_pull_request_reviews.dismiss_stale_reviews": { type: "boolean" }, "required_pull_request_reviews.dismissal_restrictions": { type: "object" }, "required_pull_request_reviews.dismissal_restrictions.teams": { type: "string[]" }, "required_pull_request_reviews.dismissal_restrictions.users": { type: "string[]" }, "required_pull_request_reviews.require_code_owner_reviews": { type: "boolean" }, "required_pull_request_reviews.required_approving_review_count": { type: "integer" }, required_status_checks: { allowNull: true, required: true, type: "object" }, "required_status_checks.contexts": { required: true, type: "string[]" }, "required_status_checks.strict": { required: true, type: "boolean" }, restrictions: { allowNull: true, required: true, type: "object" }, "restrictions.apps": { type: "string[]" }, "restrictions.teams": { required: true, type: "string[]" }, "restrictions.users": { required: true, type: "string[]" } }, url: "/repos/:owner/:repo/branches/:branch/protection" }, updateCommitComment: { method: "PATCH", params: { body: { required: true, type: "string" }, comment_id: { required: true, type: "integer" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/comments/:comment_id" }, updateFile: { deprecated: "octokit.repos.updateFile() has been renamed to octokit.repos.createOrUpdateFile() (2019-06-07)", method: "PUT", params: { author: { type: "object" }, "author.email": { required: true, type: "string" }, "author.name": { required: true, type: "string" }, branch: { type: "string" }, committer: { type: "object" }, "committer.email": { required: true, type: "string" }, "committer.name": { required: true, type: "string" }, content: { required: true, type: "string" }, message: { required: true, type: "string" }, owner: { required: true, type: "string" }, path: { required: true, type: "string" }, repo: { required: true, type: "string" }, sha: { type: "string" } }, url: "/repos/:owner/:repo/contents/:path" }, updateHook: { method: "PATCH", params: { active: { type: "boolean" }, add_events: { type: "string[]" }, config: { type: "object" }, "config.content_type": { type: "string" }, "config.insecure_ssl": { type: "string" }, "config.secret": { type: "string" }, "config.url": { required: true, type: "string" }, events: { type: "string[]" }, hook_id: { required: true, type: "integer" }, owner: { required: true, type: "string" }, remove_events: { type: "string[]" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/hooks/:hook_id" }, updateInformationAboutPagesSite: { method: "PUT", params: { cname: { type: "string" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" }, source: { enum: ['"gh-pages"', '"master"', '"master /docs"'], type: "string" } }, url: "/repos/:owner/:repo/pages" }, updateInvitation: { method: "PATCH", params: { invitation_id: { required: true, type: "integer" }, owner: { required: true, type: "string" }, permissions: { enum: ["read", "write", "admin"], type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/invitations/:invitation_id" }, updateProtectedBranchPullRequestReviewEnforcement: { method: "PATCH", params: { branch: { required: true, type: "string" }, dismiss_stale_reviews: { type: "boolean" }, dismissal_restrictions: { type: "object" }, "dismissal_restrictions.teams": { type: "string[]" }, "dismissal_restrictions.users": { type: "string[]" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" }, require_code_owner_reviews: { type: "boolean" }, required_approving_review_count: { type: "integer" } }, url: "/repos/:owner/:repo/branches/:branch/protection/required_pull_request_reviews" }, updateProtectedBranchRequiredStatusChecks: { method: "PATCH", params: { branch: { required: true, type: "string" }, contexts: { type: "string[]" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" }, strict: { type: "boolean" } }, url: "/repos/:owner/:repo/branches/:branch/protection/required_status_checks" }, updateRelease: { method: "PATCH", params: { body: { type: "string" }, draft: { type: "boolean" }, name: { type: "string" }, owner: { required: true, type: "string" }, prerelease: { type: "boolean" }, release_id: { required: true, type: "integer" }, repo: { required: true, type: "string" }, tag_name: { type: "string" }, target_commitish: { type: "string" } }, url: "/repos/:owner/:repo/releases/:release_id" }, updateReleaseAsset: { method: "PATCH", params: { asset_id: { required: true, type: "integer" }, label: { type: "string" }, name: { type: "string" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" } }, url: "/repos/:owner/:repo/releases/assets/:asset_id" }, uploadReleaseAsset: { method: "POST", params: { data: { mapTo: "data", required: true, type: "string | object" }, file: { alias: "data", deprecated: true, type: "string | object" }, headers: { required: true, type: "object" }, "headers.content-length": { required: true, type: "integer" }, "headers.content-type": { required: true, type: "string" }, label: { type: "string" }, name: { required: true, type: "string" }, url: { required: true, type: "string" } }, url: ":url" } }, search: { code: { method: "GET", params: { order: { enum: ["desc", "asc"], type: "string" }, page: { type: "integer" }, per_page: { type: "integer" }, q: { required: true, type: "string" }, sort: { enum: ["indexed"], type: "string" } }, url: "/search/code" }, commits: { headers: { accept: "application/vnd.github.cloak-preview+json" }, method: "GET", params: { order: { enum: ["desc", "asc"], type: "string" }, page: { type: "integer" }, per_page: { type: "integer" }, q: { required: true, type: "string" }, sort: { enum: ["author-date", "committer-date"], type: "string" } }, url: "/search/commits" }, issues: { deprecated: "octokit.search.issues() has been renamed to octokit.search.issuesAndPullRequests() (2018-12-27)", method: "GET", params: { order: { enum: ["desc", "asc"], type: "string" }, page: { type: "integer" }, per_page: { type: "integer" }, q: { required: true, type: "string" }, sort: { enum: ["comments", "reactions", "reactions-+1", "reactions--1", "reactions-smile", "reactions-thinking_face", "reactions-heart", "reactions-tada", "interactions", "created", "updated"], type: "string" } }, url: "/search/issues" }, issuesAndPullRequests: { method: "GET", params: { order: { enum: ["desc", "asc"], type: "string" }, page: { type: "integer" }, per_page: { type: "integer" }, q: { required: true, type: "string" }, sort: { enum: ["comments", "reactions", "reactions-+1", "reactions--1", "reactions-smile", "reactions-thinking_face", "reactions-heart", "reactions-tada", "interactions", "created", "updated"], type: "string" } }, url: "/search/issues" }, labels: { method: "GET", params: { order: { enum: ["desc", "asc"], type: "string" }, q: { required: true, type: "string" }, repository_id: { required: true, type: "integer" }, sort: { enum: ["created", "updated"], type: "string" } }, url: "/search/labels" }, repos: { method: "GET", params: { order: { enum: ["desc", "asc"], type: "string" }, page: { type: "integer" }, per_page: { type: "integer" }, q: { required: true, type: "string" }, sort: { enum: ["stars", "forks", "help-wanted-issues", "updated"], type: "string" } }, url: "/search/repositories" }, topics: { method: "GET", params: { q: { required: true, type: "string" } }, url: "/search/topics" }, users: { method: "GET", params: { order: { enum: ["desc", "asc"], type: "string" }, page: { type: "integer" }, per_page: { type: "integer" }, q: { required: true, type: "string" }, sort: { enum: ["followers", "repositories", "joined"], type: "string" } }, url: "/search/users" } }, teams: { addMember: { deprecated: "octokit.teams.addMember() has been renamed to octokit.teams.addMemberLegacy() (2020-01-16)", method: "PUT", params: { team_id: { required: true, type: "integer" }, username: { required: true, type: "string" } }, url: "/teams/:team_id/members/:username" }, addMemberLegacy: { deprecated: "octokit.teams.addMemberLegacy() is deprecated, see https://developer.github.com/v3/teams/members/#add-team-member-legacy", method: "PUT", params: { team_id: { required: true, type: "integer" }, username: { required: true, type: "string" } }, url: "/teams/:team_id/members/:username" }, addOrUpdateMembership: { deprecated: "octokit.teams.addOrUpdateMembership() has been renamed to octokit.teams.addOrUpdateMembershipLegacy() (2020-01-16)", method: "PUT", params: { role: { enum: ["member", "maintainer"], type: "string" }, team_id: { required: true, type: "integer" }, username: { required: true, type: "string" } }, url: "/teams/:team_id/memberships/:username" }, addOrUpdateMembershipInOrg: { method: "PUT", params: { org: { required: true, type: "string" }, role: { enum: ["member", "maintainer"], type: "string" }, team_slug: { required: true, type: "string" }, username: { required: true, type: "string" } }, url: "/orgs/:org/teams/:team_slug/memberships/:username" }, addOrUpdateMembershipLegacy: { deprecated: "octokit.teams.addOrUpdateMembershipLegacy() is deprecated, see https://developer.github.com/v3/teams/members/#add-or-update-team-membership-legacy", method: "PUT", params: { role: { enum: ["member", "maintainer"], type: "string" }, team_id: { required: true, type: "integer" }, username: { required: true, type: "string" } }, url: "/teams/:team_id/memberships/:username" }, addOrUpdateProject: { deprecated: "octokit.teams.addOrUpdateProject() has been renamed to octokit.teams.addOrUpdateProjectLegacy() (2020-01-16)", headers: { accept: "application/vnd.github.inertia-preview+json" }, method: "PUT", params: { permission: { enum: ["read", "write", "admin"], type: "string" }, project_id: { required: true, type: "integer" }, team_id: { required: true, type: "integer" } }, url: "/teams/:team_id/projects/:project_id" }, addOrUpdateProjectInOrg: { headers: { accept: "application/vnd.github.inertia-preview+json" }, method: "PUT", params: { org: { required: true, type: "string" }, permission: { enum: ["read", "write", "admin"], type: "string" }, project_id: { required: true, type: "integer" }, team_slug: { required: true, type: "string" } }, url: "/orgs/:org/teams/:team_slug/projects/:project_id" }, addOrUpdateProjectLegacy: { deprecated: "octokit.teams.addOrUpdateProjectLegacy() is deprecated, see https://developer.github.com/v3/teams/#add-or-update-team-project-legacy", headers: { accept: "application/vnd.github.inertia-preview+json" }, method: "PUT", params: { permission: { enum: ["read", "write", "admin"], type: "string" }, project_id: { required: true, type: "integer" }, team_id: { required: true, type: "integer" } }, url: "/teams/:team_id/projects/:project_id" }, addOrUpdateRepo: { deprecated: "octokit.teams.addOrUpdateRepo() has been renamed to octokit.teams.addOrUpdateRepoLegacy() (2020-01-16)", method: "PUT", params: { owner: { required: true, type: "string" }, permission: { enum: ["pull", "push", "admin"], type: "string" }, repo: { required: true, type: "string" }, team_id: { required: true, type: "integer" } }, url: "/teams/:team_id/repos/:owner/:repo" }, addOrUpdateRepoInOrg: { method: "PUT", params: { org: { required: true, type: "string" }, owner: { required: true, type: "string" }, permission: { enum: ["pull", "push", "admin"], type: "string" }, repo: { required: true, type: "string" }, team_slug: { required: true, type: "string" } }, url: "/orgs/:org/teams/:team_slug/repos/:owner/:repo" }, addOrUpdateRepoLegacy: { deprecated: "octokit.teams.addOrUpdateRepoLegacy() is deprecated, see https://developer.github.com/v3/teams/#add-or-update-team-repository-legacy", method: "PUT", params: { owner: { required: true, type: "string" }, permission: { enum: ["pull", "push", "admin"], type: "string" }, repo: { required: true, type: "string" }, team_id: { required: true, type: "integer" } }, url: "/teams/:team_id/repos/:owner/:repo" }, checkManagesRepo: { deprecated: "octokit.teams.checkManagesRepo() has been renamed to octokit.teams.checkManagesRepoLegacy() (2020-01-16)", method: "GET", params: { owner: { required: true, type: "string" }, repo: { required: true, type: "string" }, team_id: { required: true, type: "integer" } }, url: "/teams/:team_id/repos/:owner/:repo" }, checkManagesRepoInOrg: { method: "GET", params: { org: { required: true, type: "string" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" }, team_slug: { required: true, type: "string" } }, url: "/orgs/:org/teams/:team_slug/repos/:owner/:repo" }, checkManagesRepoLegacy: { deprecated: "octokit.teams.checkManagesRepoLegacy() is deprecated, see https://developer.github.com/v3/teams/#check-if-a-team-manages-a-repository-legacy", method: "GET", params: { owner: { required: true, type: "string" }, repo: { required: true, type: "string" }, team_id: { required: true, type: "integer" } }, url: "/teams/:team_id/repos/:owner/:repo" }, create: { method: "POST", params: { description: { type: "string" }, maintainers: { type: "string[]" }, name: { required: true, type: "string" }, org: { required: true, type: "string" }, parent_team_id: { type: "integer" }, permission: { enum: ["pull", "push", "admin"], type: "string" }, privacy: { enum: ["secret", "closed"], type: "string" }, repo_names: { type: "string[]" } }, url: "/orgs/:org/teams" }, createDiscussion: { deprecated: "octokit.teams.createDiscussion() has been renamed to octokit.teams.createDiscussionLegacy() (2020-01-16)", method: "POST", params: { body: { required: true, type: "string" }, private: { type: "boolean" }, team_id: { required: true, type: "integer" }, title: { required: true, type: "string" } }, url: "/teams/:team_id/discussions" }, createDiscussionComment: { deprecated: "octokit.teams.createDiscussionComment() has been renamed to octokit.teams.createDiscussionCommentLegacy() (2020-01-16)", method: "POST", params: { body: { required: true, type: "string" }, discussion_number: { required: true, type: "integer" }, team_id: { required: true, type: "integer" } }, url: "/teams/:team_id/discussions/:discussion_number/comments" }, createDiscussionCommentInOrg: { method: "POST", params: { body: { required: true, type: "string" }, discussion_number: { required: true, type: "integer" }, org: { required: true, type: "string" }, team_slug: { required: true, type: "string" } }, url: "/orgs/:org/teams/:team_slug/discussions/:discussion_number/comments" }, createDiscussionCommentLegacy: { deprecated: "octokit.teams.createDiscussionCommentLegacy() is deprecated, see https://developer.github.com/v3/teams/discussion_comments/#create-a-comment-legacy", method: "POST", params: { body: { required: true, type: "string" }, discussion_number: { required: true, type: "integer" }, team_id: { required: true, type: "integer" } }, url: "/teams/:team_id/discussions/:discussion_number/comments" }, createDiscussionInOrg: { method: "POST", params: { body: { required: true, type: "string" }, org: { required: true, type: "string" }, private: { type: "boolean" }, team_slug: { required: true, type: "string" }, title: { required: true, type: "string" } }, url: "/orgs/:org/teams/:team_slug/discussions" }, createDiscussionLegacy: { deprecated: "octokit.teams.createDiscussionLegacy() is deprecated, see https://developer.github.com/v3/teams/discussions/#create-a-discussion-legacy", method: "POST", params: { body: { required: true, type: "string" }, private: { type: "boolean" }, team_id: { required: true, type: "integer" }, title: { required: true, type: "string" } }, url: "/teams/:team_id/discussions" }, delete: { deprecated: "octokit.teams.delete() has been renamed to octokit.teams.deleteLegacy() (2020-01-16)", method: "DELETE", params: { team_id: { required: true, type: "integer" } }, url: "/teams/:team_id" }, deleteDiscussion: { deprecated: "octokit.teams.deleteDiscussion() has been renamed to octokit.teams.deleteDiscussionLegacy() (2020-01-16)", method: "DELETE", params: { discussion_number: { required: true, type: "integer" }, team_id: { required: true, type: "integer" } }, url: "/teams/:team_id/discussions/:discussion_number" }, deleteDiscussionComment: { deprecated: "octokit.teams.deleteDiscussionComment() has been renamed to octokit.teams.deleteDiscussionCommentLegacy() (2020-01-16)", method: "DELETE", params: { comment_number: { required: true, type: "integer" }, discussion_number: { required: true, type: "integer" }, team_id: { required: true, type: "integer" } }, url: "/teams/:team_id/discussions/:discussion_number/comments/:comment_number" }, deleteDiscussionCommentInOrg: { method: "DELETE", params: { comment_number: { required: true, type: "integer" }, discussion_number: { required: true, type: "integer" }, org: { required: true, type: "string" }, team_slug: { required: true, type: "string" } }, url: "/orgs/:org/teams/:team_slug/discussions/:discussion_number/comments/:comment_number" }, deleteDiscussionCommentLegacy: { deprecated: "octokit.teams.deleteDiscussionCommentLegacy() is deprecated, see https://developer.github.com/v3/teams/discussion_comments/#delete-a-comment-legacy", method: "DELETE", params: { comment_number: { required: true, type: "integer" }, discussion_number: { required: true, type: "integer" }, team_id: { required: true, type: "integer" } }, url: "/teams/:team_id/discussions/:discussion_number/comments/:comment_number" }, deleteDiscussionInOrg: { method: "DELETE", params: { discussion_number: { required: true, type: "integer" }, org: { required: true, type: "string" }, team_slug: { required: true, type: "string" } }, url: "/orgs/:org/teams/:team_slug/discussions/:discussion_number" }, deleteDiscussionLegacy: { deprecated: "octokit.teams.deleteDiscussionLegacy() is deprecated, see https://developer.github.com/v3/teams/discussions/#delete-a-discussion-legacy", method: "DELETE", params: { discussion_number: { required: true, type: "integer" }, team_id: { required: true, type: "integer" } }, url: "/teams/:team_id/discussions/:discussion_number" }, deleteInOrg: { method: "DELETE", params: { org: { required: true, type: "string" }, team_slug: { required: true, type: "string" } }, url: "/orgs/:org/teams/:team_slug" }, deleteLegacy: { deprecated: "octokit.teams.deleteLegacy() is deprecated, see https://developer.github.com/v3/teams/#delete-team-legacy", method: "DELETE", params: { team_id: { required: true, type: "integer" } }, url: "/teams/:team_id" }, get: { deprecated: "octokit.teams.get() has been renamed to octokit.teams.getLegacy() (2020-01-16)", method: "GET", params: { team_id: { required: true, type: "integer" } }, url: "/teams/:team_id" }, getByName: { method: "GET", params: { org: { required: true, type: "string" }, team_slug: { required: true, type: "string" } }, url: "/orgs/:org/teams/:team_slug" }, getDiscussion: { deprecated: "octokit.teams.getDiscussion() has been renamed to octokit.teams.getDiscussionLegacy() (2020-01-16)", method: "GET", params: { discussion_number: { required: true, type: "integer" }, team_id: { required: true, type: "integer" } }, url: "/teams/:team_id/discussions/:discussion_number" }, getDiscussionComment: { deprecated: "octokit.teams.getDiscussionComment() has been renamed to octokit.teams.getDiscussionCommentLegacy() (2020-01-16)", method: "GET", params: { comment_number: { required: true, type: "integer" }, discussion_number: { required: true, type: "integer" }, team_id: { required: true, type: "integer" } }, url: "/teams/:team_id/discussions/:discussion_number/comments/:comment_number" }, getDiscussionCommentInOrg: { method: "GET", params: { comment_number: { required: true, type: "integer" }, discussion_number: { required: true, type: "integer" }, org: { required: true, type: "string" }, team_slug: { required: true, type: "string" } }, url: "/orgs/:org/teams/:team_slug/discussions/:discussion_number/comments/:comment_number" }, getDiscussionCommentLegacy: { deprecated: "octokit.teams.getDiscussionCommentLegacy() is deprecated, see https://developer.github.com/v3/teams/discussion_comments/#get-a-single-comment-legacy", method: "GET", params: { comment_number: { required: true, type: "integer" }, discussion_number: { required: true, type: "integer" }, team_id: { required: true, type: "integer" } }, url: "/teams/:team_id/discussions/:discussion_number/comments/:comment_number" }, getDiscussionInOrg: { method: "GET", params: { discussion_number: { required: true, type: "integer" }, org: { required: true, type: "string" }, team_slug: { required: true, type: "string" } }, url: "/orgs/:org/teams/:team_slug/discussions/:discussion_number" }, getDiscussionLegacy: { deprecated: "octokit.teams.getDiscussionLegacy() is deprecated, see https://developer.github.com/v3/teams/discussions/#get-a-single-discussion-legacy", method: "GET", params: { discussion_number: { required: true, type: "integer" }, team_id: { required: true, type: "integer" } }, url: "/teams/:team_id/discussions/:discussion_number" }, getLegacy: { deprecated: "octokit.teams.getLegacy() is deprecated, see https://developer.github.com/v3/teams/#get-team-legacy", method: "GET", params: { team_id: { required: true, type: "integer" } }, url: "/teams/:team_id" }, getMember: { deprecated: "octokit.teams.getMember() has been renamed to octokit.teams.getMemberLegacy() (2020-01-16)", method: "GET", params: { team_id: { required: true, type: "integer" }, username: { required: true, type: "string" } }, url: "/teams/:team_id/members/:username" }, getMemberLegacy: { deprecated: "octokit.teams.getMemberLegacy() is deprecated, see https://developer.github.com/v3/teams/members/#get-team-member-legacy", method: "GET", params: { team_id: { required: true, type: "integer" }, username: { required: true, type: "string" } }, url: "/teams/:team_id/members/:username" }, getMembership: { deprecated: "octokit.teams.getMembership() has been renamed to octokit.teams.getMembershipLegacy() (2020-01-16)", method: "GET", params: { team_id: { required: true, type: "integer" }, username: { required: true, type: "string" } }, url: "/teams/:team_id/memberships/:username" }, getMembershipInOrg: { method: "GET", params: { org: { required: true, type: "string" }, team_slug: { required: true, type: "string" }, username: { required: true, type: "string" } }, url: "/orgs/:org/teams/:team_slug/memberships/:username" }, getMembershipLegacy: { deprecated: "octokit.teams.getMembershipLegacy() is deprecated, see https://developer.github.com/v3/teams/members/#get-team-membership-legacy", method: "GET", params: { team_id: { required: true, type: "integer" }, username: { required: true, type: "string" } }, url: "/teams/:team_id/memberships/:username" }, list: { method: "GET", params: { org: { required: true, type: "string" }, page: { type: "integer" }, per_page: { type: "integer" } }, url: "/orgs/:org/teams" }, listChild: { deprecated: "octokit.teams.listChild() has been renamed to octokit.teams.listChildLegacy() (2020-01-16)", method: "GET", params: { page: { type: "integer" }, per_page: { type: "integer" }, team_id: { required: true, type: "integer" } }, url: "/teams/:team_id/teams" }, listChildInOrg: { method: "GET", params: { org: { required: true, type: "string" }, page: { type: "integer" }, per_page: { type: "integer" }, team_slug: { required: true, type: "string" } }, url: "/orgs/:org/teams/:team_slug/teams" }, listChildLegacy: { deprecated: "octokit.teams.listChildLegacy() is deprecated, see https://developer.github.com/v3/teams/#list-child-teams-legacy", method: "GET", params: { page: { type: "integer" }, per_page: { type: "integer" }, team_id: { required: true, type: "integer" } }, url: "/teams/:team_id/teams" }, listDiscussionComments: { deprecated: "octokit.teams.listDiscussionComments() has been renamed to octokit.teams.listDiscussionCommentsLegacy() (2020-01-16)", method: "GET", params: { direction: { enum: ["asc", "desc"], type: "string" }, discussion_number: { required: true, type: "integer" }, page: { type: "integer" }, per_page: { type: "integer" }, team_id: { required: true, type: "integer" } }, url: "/teams/:team_id/discussions/:discussion_number/comments" }, listDiscussionCommentsInOrg: { method: "GET", params: { direction: { enum: ["asc", "desc"], type: "string" }, discussion_number: { required: true, type: "integer" }, org: { required: true, type: "string" }, page: { type: "integer" }, per_page: { type: "integer" }, team_slug: { required: true, type: "string" } }, url: "/orgs/:org/teams/:team_slug/discussions/:discussion_number/comments" }, listDiscussionCommentsLegacy: { deprecated: "octokit.teams.listDiscussionCommentsLegacy() is deprecated, see https://developer.github.com/v3/teams/discussion_comments/#list-comments-legacy", method: "GET", params: { direction: { enum: ["asc", "desc"], type: "string" }, discussion_number: { required: true, type: "integer" }, page: { type: "integer" }, per_page: { type: "integer" }, team_id: { required: true, type: "integer" } }, url: "/teams/:team_id/discussions/:discussion_number/comments" }, listDiscussions: { deprecated: "octokit.teams.listDiscussions() has been renamed to octokit.teams.listDiscussionsLegacy() (2020-01-16)", method: "GET", params: { direction: { enum: ["asc", "desc"], type: "string" }, page: { type: "integer" }, per_page: { type: "integer" }, team_id: { required: true, type: "integer" } }, url: "/teams/:team_id/discussions" }, listDiscussionsInOrg: { method: "GET", params: { direction: { enum: ["asc", "desc"], type: "string" }, org: { required: true, type: "string" }, page: { type: "integer" }, per_page: { type: "integer" }, team_slug: { required: true, type: "string" } }, url: "/orgs/:org/teams/:team_slug/discussions" }, listDiscussionsLegacy: { deprecated: "octokit.teams.listDiscussionsLegacy() is deprecated, see https://developer.github.com/v3/teams/discussions/#list-discussions-legacy", method: "GET", params: { direction: { enum: ["asc", "desc"], type: "string" }, page: { type: "integer" }, per_page: { type: "integer" }, team_id: { required: true, type: "integer" } }, url: "/teams/:team_id/discussions" }, listForAuthenticatedUser: { method: "GET", params: { page: { type: "integer" }, per_page: { type: "integer" } }, url: "/user/teams" }, listMembers: { deprecated: "octokit.teams.listMembers() has been renamed to octokit.teams.listMembersLegacy() (2020-01-16)", method: "GET", params: { page: { type: "integer" }, per_page: { type: "integer" }, role: { enum: ["member", "maintainer", "all"], type: "string" }, team_id: { required: true, type: "integer" } }, url: "/teams/:team_id/members" }, listMembersInOrg: { method: "GET", params: { org: { required: true, type: "string" }, page: { type: "integer" }, per_page: { type: "integer" }, role: { enum: ["member", "maintainer", "all"], type: "string" }, team_slug: { required: true, type: "string" } }, url: "/orgs/:org/teams/:team_slug/members" }, listMembersLegacy: { deprecated: "octokit.teams.listMembersLegacy() is deprecated, see https://developer.github.com/v3/teams/members/#list-team-members-legacy", method: "GET", params: { page: { type: "integer" }, per_page: { type: "integer" }, role: { enum: ["member", "maintainer", "all"], type: "string" }, team_id: { required: true, type: "integer" } }, url: "/teams/:team_id/members" }, listPendingInvitations: { deprecated: "octokit.teams.listPendingInvitations() has been renamed to octokit.teams.listPendingInvitationsLegacy() (2020-01-16)", method: "GET", params: { page: { type: "integer" }, per_page: { type: "integer" }, team_id: { required: true, type: "integer" } }, url: "/teams/:team_id/invitations" }, listPendingInvitationsInOrg: { method: "GET", params: { org: { required: true, type: "string" }, page: { type: "integer" }, per_page: { type: "integer" }, team_slug: { required: true, type: "string" } }, url: "/orgs/:org/teams/:team_slug/invitations" }, listPendingInvitationsLegacy: { deprecated: "octokit.teams.listPendingInvitationsLegacy() is deprecated, see https://developer.github.com/v3/teams/members/#list-pending-team-invitations-legacy", method: "GET", params: { page: { type: "integer" }, per_page: { type: "integer" }, team_id: { required: true, type: "integer" } }, url: "/teams/:team_id/invitations" }, listProjects: { deprecated: "octokit.teams.listProjects() has been renamed to octokit.teams.listProjectsLegacy() (2020-01-16)", headers: { accept: "application/vnd.github.inertia-preview+json" }, method: "GET", params: { page: { type: "integer" }, per_page: { type: "integer" }, team_id: { required: true, type: "integer" } }, url: "/teams/:team_id/projects" }, listProjectsInOrg: { headers: { accept: "application/vnd.github.inertia-preview+json" }, method: "GET", params: { org: { required: true, type: "string" }, page: { type: "integer" }, per_page: { type: "integer" }, team_slug: { required: true, type: "string" } }, url: "/orgs/:org/teams/:team_slug/projects" }, listProjectsLegacy: { deprecated: "octokit.teams.listProjectsLegacy() is deprecated, see https://developer.github.com/v3/teams/#list-team-projects-legacy", headers: { accept: "application/vnd.github.inertia-preview+json" }, method: "GET", params: { page: { type: "integer" }, per_page: { type: "integer" }, team_id: { required: true, type: "integer" } }, url: "/teams/:team_id/projects" }, listRepos: { deprecated: "octokit.teams.listRepos() has been renamed to octokit.teams.listReposLegacy() (2020-01-16)", method: "GET", params: { page: { type: "integer" }, per_page: { type: "integer" }, team_id: { required: true, type: "integer" } }, url: "/teams/:team_id/repos" }, listReposInOrg: { method: "GET", params: { org: { required: true, type: "string" }, page: { type: "integer" }, per_page: { type: "integer" }, team_slug: { required: true, type: "string" } }, url: "/orgs/:org/teams/:team_slug/repos" }, listReposLegacy: { deprecated: "octokit.teams.listReposLegacy() is deprecated, see https://developer.github.com/v3/teams/#list-team-repos-legacy", method: "GET", params: { page: { type: "integer" }, per_page: { type: "integer" }, team_id: { required: true, type: "integer" } }, url: "/teams/:team_id/repos" }, removeMember: { deprecated: "octokit.teams.removeMember() has been renamed to octokit.teams.removeMemberLegacy() (2020-01-16)", method: "DELETE", params: { team_id: { required: true, type: "integer" }, username: { required: true, type: "string" } }, url: "/teams/:team_id/members/:username" }, removeMemberLegacy: { deprecated: "octokit.teams.removeMemberLegacy() is deprecated, see https://developer.github.com/v3/teams/members/#remove-team-member-legacy", method: "DELETE", params: { team_id: { required: true, type: "integer" }, username: { required: true, type: "string" } }, url: "/teams/:team_id/members/:username" }, removeMembership: { deprecated: "octokit.teams.removeMembership() has been renamed to octokit.teams.removeMembershipLegacy() (2020-01-16)", method: "DELETE", params: { team_id: { required: true, type: "integer" }, username: { required: true, type: "string" } }, url: "/teams/:team_id/memberships/:username" }, removeMembershipInOrg: { method: "DELETE", params: { org: { required: true, type: "string" }, team_slug: { required: true, type: "string" }, username: { required: true, type: "string" } }, url: "/orgs/:org/teams/:team_slug/memberships/:username" }, removeMembershipLegacy: { deprecated: "octokit.teams.removeMembershipLegacy() is deprecated, see https://developer.github.com/v3/teams/members/#remove-team-membership-legacy", method: "DELETE", params: { team_id: { required: true, type: "integer" }, username: { required: true, type: "string" } }, url: "/teams/:team_id/memberships/:username" }, removeProject: { deprecated: "octokit.teams.removeProject() has been renamed to octokit.teams.removeProjectLegacy() (2020-01-16)", method: "DELETE", params: { project_id: { required: true, type: "integer" }, team_id: { required: true, type: "integer" } }, url: "/teams/:team_id/projects/:project_id" }, removeProjectInOrg: { method: "DELETE", params: { org: { required: true, type: "string" }, project_id: { required: true, type: "integer" }, team_slug: { required: true, type: "string" } }, url: "/orgs/:org/teams/:team_slug/projects/:project_id" }, removeProjectLegacy: { deprecated: "octokit.teams.removeProjectLegacy() is deprecated, see https://developer.github.com/v3/teams/#remove-team-project-legacy", method: "DELETE", params: { project_id: { required: true, type: "integer" }, team_id: { required: true, type: "integer" } }, url: "/teams/:team_id/projects/:project_id" }, removeRepo: { deprecated: "octokit.teams.removeRepo() has been renamed to octokit.teams.removeRepoLegacy() (2020-01-16)", method: "DELETE", params: { owner: { required: true, type: "string" }, repo: { required: true, type: "string" }, team_id: { required: true, type: "integer" } }, url: "/teams/:team_id/repos/:owner/:repo" }, removeRepoInOrg: { method: "DELETE", params: { org: { required: true, type: "string" }, owner: { required: true, type: "string" }, repo: { required: true, type: "string" }, team_slug: { required: true, type: "string" } }, url: "/orgs/:org/teams/:team_slug/repos/:owner/:repo" }, removeRepoLegacy: { deprecated: "octokit.teams.removeRepoLegacy() is deprecated, see https://developer.github.com/v3/teams/#remove-team-repository-legacy", method: "DELETE", params: { owner: { required: true, type: "string" }, repo: { required: true, type: "string" }, team_id: { required: true, type: "integer" } }, url: "/teams/:team_id/repos/:owner/:repo" }, reviewProject: { deprecated: "octokit.teams.reviewProject() has been renamed to octokit.teams.reviewProjectLegacy() (2020-01-16)", headers: { accept: "application/vnd.github.inertia-preview+json" }, method: "GET", params: { project_id: { required: true, type: "integer" }, team_id: { required: true, type: "integer" } }, url: "/teams/:team_id/projects/:project_id" }, reviewProjectInOrg: { headers: { accept: "application/vnd.github.inertia-preview+json" }, method: "GET", params: { org: { required: true, type: "string" }, project_id: { required: true, type: "integer" }, team_slug: { required: true, type: "string" } }, url: "/orgs/:org/teams/:team_slug/projects/:project_id" }, reviewProjectLegacy: { deprecated: "octokit.teams.reviewProjectLegacy() is deprecated, see https://developer.github.com/v3/teams/#review-a-team-project-legacy", headers: { accept: "application/vnd.github.inertia-preview+json" }, method: "GET", params: { project_id: { required: true, type: "integer" }, team_id: { required: true, type: "integer" } }, url: "/teams/:team_id/projects/:project_id" }, update: { deprecated: "octokit.teams.update() has been renamed to octokit.teams.updateLegacy() (2020-01-16)", method: "PATCH", params: { description: { type: "string" }, name: { required: true, type: "string" }, parent_team_id: { type: "integer" }, permission: { enum: ["pull", "push", "admin"], type: "string" }, privacy: { enum: ["secret", "closed"], type: "string" }, team_id: { required: true, type: "integer" } }, url: "/teams/:team_id" }, updateDiscussion: { deprecated: "octokit.teams.updateDiscussion() has been renamed to octokit.teams.updateDiscussionLegacy() (2020-01-16)", method: "PATCH", params: { body: { type: "string" }, discussion_number: { required: true, type: "integer" }, team_id: { required: true, type: "integer" }, title: { type: "string" } }, url: "/teams/:team_id/discussions/:discussion_number" }, updateDiscussionComment: { deprecated: "octokit.teams.updateDiscussionComment() has been renamed to octokit.teams.updateDiscussionCommentLegacy() (2020-01-16)", method: "PATCH", params: { body: { required: true, type: "string" }, comment_number: { required: true, type: "integer" }, discussion_number: { required: true, type: "integer" }, team_id: { required: true, type: "integer" } }, url: "/teams/:team_id/discussions/:discussion_number/comments/:comment_number" }, updateDiscussionCommentInOrg: { method: "PATCH", params: { body: { required: true, type: "string" }, comment_number: { required: true, type: "integer" }, discussion_number: { required: true, type: "integer" }, org: { required: true, type: "string" }, team_slug: { required: true, type: "string" } }, url: "/orgs/:org/teams/:team_slug/discussions/:discussion_number/comments/:comment_number" }, updateDiscussionCommentLegacy: { deprecated: "octokit.teams.updateDiscussionCommentLegacy() is deprecated, see https://developer.github.com/v3/teams/discussion_comments/#edit-a-comment-legacy", method: "PATCH", params: { body: { required: true, type: "string" }, comment_number: { required: true, type: "integer" }, discussion_number: { required: true, type: "integer" }, team_id: { required: true, type: "integer" } }, url: "/teams/:team_id/discussions/:discussion_number/comments/:comment_number" }, updateDiscussionInOrg: { method: "PATCH", params: { body: { type: "string" }, discussion_number: { required: true, type: "integer" }, org: { required: true, type: "string" }, team_slug: { required: true, type: "string" }, title: { type: "string" } }, url: "/orgs/:org/teams/:team_slug/discussions/:discussion_number" }, updateDiscussionLegacy: { deprecated: "octokit.teams.updateDiscussionLegacy() is deprecated, see https://developer.github.com/v3/teams/discussions/#edit-a-discussion-legacy", method: "PATCH", params: { body: { type: "string" }, discussion_number: { required: true, type: "integer" }, team_id: { required: true, type: "integer" }, title: { type: "string" } }, url: "/teams/:team_id/discussions/:discussion_number" }, updateInOrg: { method: "PATCH", params: { description: { type: "string" }, name: { required: true, type: "string" }, org: { required: true, type: "string" }, parent_team_id: { type: "integer" }, permission: { enum: ["pull", "push", "admin"], type: "string" }, privacy: { enum: ["secret", "closed"], type: "string" }, team_slug: { required: true, type: "string" } }, url: "/orgs/:org/teams/:team_slug" }, updateLegacy: { deprecated: "octokit.teams.updateLegacy() is deprecated, see https://developer.github.com/v3/teams/#edit-team-legacy", method: "PATCH", params: { description: { type: "string" }, name: { required: true, type: "string" }, parent_team_id: { type: "integer" }, permission: { enum: ["pull", "push", "admin"], type: "string" }, privacy: { enum: ["secret", "closed"], type: "string" }, team_id: { required: true, type: "integer" } }, url: "/teams/:team_id" } }, users: { addEmails: { method: "POST", params: { emails: { required: true, type: "string[]" } }, url: "/user/emails" }, block: { method: "PUT", params: { username: { required: true, type: "string" } }, url: "/user/blocks/:username" }, checkBlocked: { method: "GET", params: { username: { required: true, type: "string" } }, url: "/user/blocks/:username" }, checkFollowing: { method: "GET", params: { username: { required: true, type: "string" } }, url: "/user/following/:username" }, checkFollowingForUser: { method: "GET", params: { target_user: { required: true, type: "string" }, username: { required: true, type: "string" } }, url: "/users/:username/following/:target_user" }, createGpgKey: { method: "POST", params: { armored_public_key: { type: "string" } }, url: "/user/gpg_keys" }, createPublicKey: { method: "POST", params: { key: { type: "string" }, title: { type: "string" } }, url: "/user/keys" }, deleteEmails: { method: "DELETE", params: { emails: { required: true, type: "string[]" } }, url: "/user/emails" }, deleteGpgKey: { method: "DELETE", params: { gpg_key_id: { required: true, type: "integer" } }, url: "/user/gpg_keys/:gpg_key_id" }, deletePublicKey: { method: "DELETE", params: { key_id: { required: true, type: "integer" } }, url: "/user/keys/:key_id" }, follow: { method: "PUT", params: { username: { required: true, type: "string" } }, url: "/user/following/:username" }, getAuthenticated: { method: "GET", params: {}, url: "/user" }, getByUsername: { method: "GET", params: { username: { required: true, type: "string" } }, url: "/users/:username" }, getContextForUser: { method: "GET", params: { subject_id: { type: "string" }, subject_type: { enum: ["organization", "repository", "issue", "pull_request"], type: "string" }, username: { required: true, type: "string" } }, url: "/users/:username/hovercard" }, getGpgKey: { method: "GET", params: { gpg_key_id: { required: true, type: "integer" } }, url: "/user/gpg_keys/:gpg_key_id" }, getPublicKey: { method: "GET", params: { key_id: { required: true, type: "integer" } }, url: "/user/keys/:key_id" }, list: { method: "GET", params: { page: { type: "integer" }, per_page: { type: "integer" }, since: { type: "string" } }, url: "/users" }, listBlocked: { method: "GET", params: {}, url: "/user/blocks" }, listEmails: { method: "GET", params: { page: { type: "integer" }, per_page: { type: "integer" } }, url: "/user/emails" }, listFollowersForAuthenticatedUser: { method: "GET", params: { page: { type: "integer" }, per_page: { type: "integer" } }, url: "/user/followers" }, listFollowersForUser: { method: "GET", params: { page: { type: "integer" }, per_page: { type: "integer" }, username: { required: true, type: "string" } }, url: "/users/:username/followers" }, listFollowingForAuthenticatedUser: { method: "GET", params: { page: { type: "integer" }, per_page: { type: "integer" } }, url: "/user/following" }, listFollowingForUser: { method: "GET", params: { page: { type: "integer" }, per_page: { type: "integer" }, username: { required: true, type: "string" } }, url: "/users/:username/following" }, listGpgKeys: { method: "GET", params: { page: { type: "integer" }, per_page: { type: "integer" } }, url: "/user/gpg_keys" }, listGpgKeysForUser: { method: "GET", params: { page: { type: "integer" }, per_page: { type: "integer" }, username: { required: true, type: "string" } }, url: "/users/:username/gpg_keys" }, listPublicEmails: { method: "GET", params: { page: { type: "integer" }, per_page: { type: "integer" } }, url: "/user/public_emails" }, listPublicKeys: { method: "GET", params: { page: { type: "integer" }, per_page: { type: "integer" } }, url: "/user/keys" }, listPublicKeysForUser: { method: "GET", params: { page: { type: "integer" }, per_page: { type: "integer" }, username: { required: true, type: "string" } }, url: "/users/:username/keys" }, togglePrimaryEmailVisibility: { method: "PATCH", params: { email: { required: true, type: "string" }, visibility: { required: true, type: "string" } }, url: "/user/email/visibility" }, unblock: { method: "DELETE", params: { username: { required: true, type: "string" } }, url: "/user/blocks/:username" }, unfollow: { method: "DELETE", params: { username: { required: true, type: "string" } }, url: "/user/following/:username" }, updateAuthenticated: { method: "PATCH", params: { bio: { type: "string" }, blog: { type: "string" }, company: { type: "string" }, email: { type: "string" }, hireable: { type: "boolean" }, location: { type: "string" }, name: { type: "string" } }, url: "/user" } } }; const VERSION = "2.4.0"; function registerEndpoints(octokit, routes) { Object.keys(routes).forEach(namespaceName => { if (!octokit[namespaceName]) { octokit[namespaceName] = {}; } Object.keys(routes[namespaceName]).forEach(apiName => { const apiOptions = routes[namespaceName][apiName]; const endpointDefaults = ["method", "url", "headers"].reduce((map, key) => { if (typeof apiOptions[key] !== "undefined") { map[key] = apiOptions[key]; } return map; }, {}); endpointDefaults.request = { validate: apiOptions.params }; let request = octokit.request.defaults(endpointDefaults); // patch request & endpoint methods to support deprecated parameters. // Not the most elegant solution, but we don’t want to move deprecation // logic into octokit/endpoint.js as it’s out of scope const hasDeprecatedParam = Object.keys(apiOptions.params || {}).find(key => apiOptions.params[key].deprecated); if (hasDeprecatedParam) { const patch = patchForDeprecation.bind(null, octokit, apiOptions); request = patch(octokit.request.defaults(endpointDefaults), `.${namespaceName}.${apiName}()`); request.endpoint = patch(request.endpoint, `.${namespaceName}.${apiName}.endpoint()`); request.endpoint.merge = patch(request.endpoint.merge, `.${namespaceName}.${apiName}.endpoint.merge()`); } if (apiOptions.deprecated) { octokit[namespaceName][apiName] = Object.assign(function deprecatedEndpointMethod() { octokit.log.warn(new deprecation.Deprecation(`[@octokit/rest] ${apiOptions.deprecated}`)); octokit[namespaceName][apiName] = request; return request.apply(null, arguments); }, request); return; } octokit[namespaceName][apiName] = request; }); }); } function patchForDeprecation(octokit, apiOptions, method, methodName) { const patchedMethod = options => { options = Object.assign({}, options); Object.keys(options).forEach(key => { if (apiOptions.params[key] && apiOptions.params[key].deprecated) { const aliasKey = apiOptions.params[key].alias; octokit.log.warn(new deprecation.Deprecation(`[@octokit/rest] "${key}" parameter is deprecated for "${methodName}". Use "${aliasKey}" instead`)); if (!(aliasKey in options)) { options[aliasKey] = options[key]; } delete options[key]; } }); return method(options); }; Object.keys(method).forEach(key => { patchedMethod[key] = method[key]; }); return patchedMethod; } /** * This plugin is a 1:1 copy of internal @octokit/rest plugins. The primary * goal is to rebuild @octokit/rest on top of @octokit/core. Once that is * done, we will remove the registerEndpoints methods and return the methods * directly as with the other plugins. At that point we will also remove the * legacy workarounds and deprecations. * * See the plan at * https://github.com/octokit/plugin-rest-endpoint-methods.js/pull/1 */ function restEndpointMethods(octokit) { // @ts-ignore octokit.registerEndpoints = registerEndpoints.bind(null, octokit); registerEndpoints(octokit, endpointsByScope); // Aliasing scopes for backward compatibility // See https://github.com/octokit/rest.js/pull/1134 [["gitdata", "git"], ["authorization", "oauthAuthorizations"], ["pullRequests", "pulls"]].forEach(([deprecatedScope, scope]) => { Object.defineProperty(octokit, deprecatedScope, { get() { octokit.log.warn( // @ts-ignore new deprecation.Deprecation(`[@octokit/plugin-rest-endpoint-methods] "octokit.${deprecatedScope}.*" methods are deprecated, use "octokit.${scope}.*" instead`)); // @ts-ignore return octokit[scope]; } }); }); return {}; } restEndpointMethods.VERSION = VERSION; exports.restEndpointMethods = restEndpointMethods; //# sourceMappingURL=index.js.map /***/ }), /***/ 537: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } var deprecation = __nccwpck_require__(8932); var once = _interopDefault(__nccwpck_require__(1223)); const logOnce = once(deprecation => console.warn(deprecation)); /** * Error with extra properties to help with debugging */ class RequestError extends Error { constructor(message, statusCode, options) { super(message); // Maintains proper stack trace (only available on V8) /* istanbul ignore next */ if (Error.captureStackTrace) { Error.captureStackTrace(this, this.constructor); } this.name = "HttpError"; this.status = statusCode; Object.defineProperty(this, "code", { get() { logOnce(new deprecation.Deprecation("[@octokit/request-error] `error.code` is deprecated, use `error.status`.")); return statusCode; } }); this.headers = options.headers || {}; // redact request credentials without mutating original request options const requestCopy = Object.assign({}, options.request); if (options.request.headers.authorization) { requestCopy.headers = Object.assign({}, options.request.headers, { authorization: options.request.headers.authorization.replace(/ .*$/, " [REDACTED]") }); } requestCopy.url = requestCopy.url // client_id & client_secret can be passed as URL query parameters to increase rate limit // see https://developer.github.com/v3/#increasing-the-unauthenticated-rate-limit-for-oauth-applications .replace(/\bclient_secret=\w+/g, "client_secret=[REDACTED]") // OAuth tokens can be passed as URL query parameters, although it is not recommended // see https://developer.github.com/v3/#oauth2-token-sent-in-a-header .replace(/\baccess_token=\w+/g, "access_token=[REDACTED]"); this.request = requestCopy; } } exports.RequestError = RequestError; //# sourceMappingURL=index.js.map /***/ }), /***/ 6234: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } var endpoint = __nccwpck_require__(9440); var universalUserAgent = __nccwpck_require__(5030); var isPlainObject = __nccwpck_require__(3287); var nodeFetch = _interopDefault(__nccwpck_require__(467)); var requestError = __nccwpck_require__(537); const VERSION = "5.4.12"; function getBufferResponse(response) { return response.arrayBuffer(); } function fetchWrapper(requestOptions) { if (isPlainObject.isPlainObject(requestOptions.body) || Array.isArray(requestOptions.body)) { requestOptions.body = JSON.stringify(requestOptions.body); } let headers = {}; let status; let url; const fetch = requestOptions.request && requestOptions.request.fetch || nodeFetch; return fetch(requestOptions.url, Object.assign({ method: requestOptions.method, body: requestOptions.body, headers: requestOptions.headers, redirect: requestOptions.redirect }, requestOptions.request)).then(response => { url = response.url; status = response.status; for (const keyAndValue of response.headers) { headers[keyAndValue[0]] = keyAndValue[1]; } if (status === 204 || status === 205) { return; } // GitHub API returns 200 for HEAD requests if (requestOptions.method === "HEAD") { if (status < 400) { return; } throw new requestError.RequestError(response.statusText, status, { headers, request: requestOptions }); } if (status === 304) { throw new requestError.RequestError("Not modified", status, { headers, request: requestOptions }); } if (status >= 400) { return response.text().then(message => { const error = new requestError.RequestError(message, status, { headers, request: requestOptions }); try { let responseBody = JSON.parse(error.message); Object.assign(error, responseBody); let errors = responseBody.errors; // Assumption `errors` would always be in Array format error.message = error.message + ": " + errors.map(JSON.stringify).join(", "); } catch (e) {// ignore, see octokit/rest.js#684 } throw error; }); } const contentType = response.headers.get("content-type"); if (/application\/json/.test(contentType)) { return response.json(); } if (!contentType || /^text\/|charset=utf-8$/.test(contentType)) { return response.text(); } return getBufferResponse(response); }).then(data => { return { status, url, headers, data }; }).catch(error => { if (error instanceof requestError.RequestError) { throw error; } throw new requestError.RequestError(error.message, 500, { headers, request: requestOptions }); }); } function withDefaults(oldEndpoint, newDefaults) { const endpoint = oldEndpoint.defaults(newDefaults); const newApi = function (route, parameters) { const endpointOptions = endpoint.merge(route, parameters); if (!endpointOptions.request || !endpointOptions.request.hook) { return fetchWrapper(endpoint.parse(endpointOptions)); } const request = (route, parameters) => { return fetchWrapper(endpoint.parse(endpoint.merge(route, parameters))); }; Object.assign(request, { endpoint, defaults: withDefaults.bind(null, endpoint) }); return endpointOptions.request.hook(request, endpointOptions); }; return Object.assign(newApi, { endpoint, defaults: withDefaults.bind(null, endpoint) }); } const request = withDefaults(endpoint.endpoint, { headers: { "user-agent": `octokit-request.js/${VERSION} ${universalUserAgent.getUserAgent()}` } }); exports.request = request; //# sourceMappingURL=index.js.map /***/ }), /***/ 1150: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const {spawnSync} = __nccwpck_require__(3129); const isString = (a) => typeof a === 'string'; module.exports = (str, filter = {}) => { if (!isString(str)) { filter = str || {}; str = run(); } const { added, modified, untracked, deleted, renamed, } = filter; const files = parse(str); const picked = pick(files, { added, modified, untracked, deleted, renamed, }); const names = getNames(picked); return names; }; const getName = ({name}) => name; module.exports.getNames = getNames; function getNames(files) { return files.map(getName); } module.exports.run = run; function run() { const result = spawnSync('git', ['status', '--porcelain']); return result.stdout.toString(); } module.exports.parse = parse; function parse(str) { const result = []; const lines = str .split('\n') .filter(Boolean); for (const line of lines) { const {name, mode} = parseLine(line); result.push({ name, mode, }); } return result; } const UNTRACKED = '?'; const RENAMED = 'R'; const ARROW = '-> '; // "R a -> b" -> "b" const cutRenameTo = (line) => { const i = line.indexOf(ARROW); const count = i + ARROW.length; return line.slice(count); }; function parseLine(line) { const [first] = line; if (first === UNTRACKED) return { name: line.replace('?? ', ''), mode: UNTRACKED, }; if (first === RENAMED) return { name: cutRenameTo(line), mode: RENAMED, }; const [mode] = line.match(/^[\sA-Z]{1,}\s/, ''); const name = line.replace(mode, ''); return { name, mode, }; } const isModified = ({mode}) => /M/.test(mode); const isAdded = ({mode}) => /A/.test(mode); const isRenamed = ({mode}) => /R/.test(mode); const isDeleted = ({mode}) => /D/.test(mode); const isUntracked = ({mode}) => /\?/.test(mode); const check = ({added, modified, untracked, deleted, renamed}) => (file) => { let is = false; if (added) is = is || isAdded(file); if (modified) is = is || isModified(file); if (untracked) is = is || isUntracked(file); if (deleted) is = is || isDeleted(file); if (renamed) is = is || isRenamed(file); return is; }; module.exports.pick = pick; function pick(files, {added, modified, deleted, untracked, renamed}) { return files.filter(check({ added, modified, untracked, deleted, renamed, })); } /***/ }), /***/ 5995: /***/ ((module) => { module.exports = r => { const n = process.versions.node.split('.').map(x => parseInt(x, 10)) r = r.split('.').map(x => parseInt(x, 10)) return n[0] > r[0] || (n[0] === r[0] && (n[1] > r[1] || (n[1] === r[1] && n[2] >= r[2]))) } /***/ }), /***/ 5224: /***/ ((module) => { module.exports = function atob(str) { return Buffer.from(str, 'base64').toString('binary') } /***/ }), /***/ 3682: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { var register = __nccwpck_require__(4670) var addHook = __nccwpck_require__(5549) var removeHook = __nccwpck_require__(6819) // bind with array of arguments: https://stackoverflow.com/a/21792913 var bind = Function.bind var bindable = bind.bind(bind) function bindApi (hook, state, name) { var removeHookRef = bindable(removeHook, null).apply(null, name ? [state, name] : [state]) hook.api = { remove: removeHookRef } hook.remove = removeHookRef ;['before', 'error', 'after', 'wrap'].forEach(function (kind) { var args = name ? [state, kind, name] : [state, kind] hook[kind] = hook.api[kind] = bindable(addHook, null).apply(null, args) }) } function HookSingular () { var singularHookName = 'h' var singularHookState = { registry: {} } var singularHook = register.bind(null, singularHookState, singularHookName) bindApi(singularHook, singularHookState, singularHookName) return singularHook } function HookCollection () { var state = { registry: {} } var hook = register.bind(null, state) bindApi(hook, state) return hook } var collectionHookDeprecationMessageDisplayed = false function Hook () { if (!collectionHookDeprecationMessageDisplayed) { console.warn('[before-after-hook]: "Hook()" repurposing warning, use "Hook.Collection()". Read more: https://git.io/upgrade-before-after-hook-to-1.4') collectionHookDeprecationMessageDisplayed = true } return HookCollection() } Hook.Singular = HookSingular.bind() Hook.Collection = HookCollection.bind() module.exports = Hook // expose constructors as a named property for TypeScript module.exports.Hook = Hook module.exports.Singular = Hook.Singular module.exports.Collection = Hook.Collection /***/ }), /***/ 5549: /***/ ((module) => { module.exports = addHook function addHook (state, kind, name, hook) { var orig = hook if (!state.registry[name]) { state.registry[name] = [] } if (kind === 'before') { hook = function (method, options) { return Promise.resolve() .then(orig.bind(null, options)) .then(method.bind(null, options)) } } if (kind === 'after') { hook = function (method, options) { var result return Promise.resolve() .then(method.bind(null, options)) .then(function (result_) { result = result_ return orig(result, options) }) .then(function () { return result }) } } if (kind === 'error') { hook = function (method, options) { return Promise.resolve() .then(method.bind(null, options)) .catch(function (error) { return orig(error, options) }) } } state.registry[name].push({ hook: hook, orig: orig }) } /***/ }), /***/ 4670: /***/ ((module) => { module.exports = register function register (state, name, method, options) { if (typeof method !== 'function') { throw new Error('method for before hook must be a function') } if (!options) { options = {} } if (Array.isArray(name)) { return name.reverse().reduce(function (callback, name) { return register.bind(null, state, name, callback, options) }, method)() } return Promise.resolve() .then(function () { if (!state.registry[name]) { return method(options) } return (state.registry[name]).reduce(function (method, registered) { return registered.hook.bind(null, method, options) }, method)() }) } /***/ }), /***/ 6819: /***/ ((module) => { module.exports = removeHook function removeHook (state, name, method) { if (!state.registry[name]) { return } var index = state.registry[name] .map(function (registered) { return registered.orig }) .indexOf(method) if (index === -1) { return } state.registry[name].splice(index, 1) } /***/ }), /***/ 2358: /***/ ((module) => { module.exports = function btoa(str) { return new Buffer(str).toString('base64') } /***/ }), /***/ 8932: /***/ ((__unused_webpack_module, exports) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); class Deprecation extends Error { constructor(message) { super(message); // Maintains proper stack trace (only available on V8) /* istanbul ignore next */ if (Error.captureStackTrace) { Error.captureStackTrace(this, this.constructor); } this.name = 'Deprecation'; } } exports.Deprecation = Deprecation; /***/ }), /***/ 2437: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { /* @flow */ /*:: type DotenvParseOptions = { debug?: boolean } // keys and values from src type DotenvParseOutput = { [string]: string } type DotenvConfigOptions = { path?: string, // path to .env file encoding?: string, // encoding of .env file debug?: string // turn on logging for debugging purposes } type DotenvConfigOutput = { parsed?: DotenvParseOutput, error?: Error } */ const fs = __nccwpck_require__(5747) const path = __nccwpck_require__(5622) function log (message /*: string */) { console.log(`[dotenv][DEBUG] ${message}`) } const NEWLINE = '\n' const RE_INI_KEY_VAL = /^\s*([\w.-]+)\s*=\s*(.*)?\s*$/ const RE_NEWLINES = /\\n/g const NEWLINES_MATCH = /\n|\r|\r\n/ // Parses src into an Object function parse (src /*: string | Buffer */, options /*: ?DotenvParseOptions */) /*: DotenvParseOutput */ { const debug = Boolean(options && options.debug) const obj = {} // convert Buffers before splitting into lines and processing src.toString().split(NEWLINES_MATCH).forEach(function (line, idx) { // matching "KEY' and 'VAL' in 'KEY=VAL' const keyValueArr = line.match(RE_INI_KEY_VAL) // matched? if (keyValueArr != null) { const key = keyValueArr[1] // default undefined or missing values to empty string let val = (keyValueArr[2] || '') const end = val.length - 1 const isDoubleQuoted = val[0] === '"' && val[end] === '"' const isSingleQuoted = val[0] === "'" && val[end] === "'" // if single or double quoted, remove quotes if (isSingleQuoted || isDoubleQuoted) { val = val.substring(1, end) // if double quoted, expand newlines if (isDoubleQuoted) { val = val.replace(RE_NEWLINES, NEWLINE) } } else { // remove surrounding whitespace val = val.trim() } obj[key] = val } else if (debug) { log(`did not match key and value when parsing line ${idx + 1}: ${line}`) } }) return obj } // Populates process.env from .env file function config (options /*: ?DotenvConfigOptions */) /*: DotenvConfigOutput */ { let dotenvPath = path.resolve(process.cwd(), '.env') let encoding /*: string */ = 'utf8' let debug = false if (options) { if (options.path != null) { dotenvPath = options.path } if (options.encoding != null) { encoding = options.encoding } if (options.debug != null) { debug = true } } try { // specifying an encoding returns a string instead of a buffer const parsed = parse(fs.readFileSync(dotenvPath, { encoding }), { debug }) Object.keys(parsed).forEach(function (key) { if (!Object.prototype.hasOwnProperty.call(process.env, key)) { process.env[key] = parsed[key] } else if (debug) { log(`"${key}" is already defined in \`process.env\` and will not be overwritten`) } }) return { parsed } } catch (e) { return { error: e } } } module.exports.config = config module.exports.parse = parse /***/ }), /***/ 1205: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { var once = __nccwpck_require__(1223); var noop = function() {}; var isRequest = function(stream) { return stream.setHeader && typeof stream.abort === 'function'; }; var isChildProcess = function(stream) { return stream.stdio && Array.isArray(stream.stdio) && stream.stdio.length === 3 }; var eos = function(stream, opts, callback) { if (typeof opts === 'function') return eos(stream, null, opts); if (!opts) opts = {}; callback = once(callback || noop); var ws = stream._writableState; var rs = stream._readableState; var readable = opts.readable || (opts.readable !== false && stream.readable); var writable = opts.writable || (opts.writable !== false && stream.writable); var cancelled = false; var onlegacyfinish = function() { if (!stream.writable) onfinish(); }; var onfinish = function() { writable = false; if (!readable) callback.call(stream); }; var onend = function() { readable = false; if (!writable) callback.call(stream); }; var onexit = function(exitCode) { callback.call(stream, exitCode ? new Error('exited with error code: ' + exitCode) : null); }; var onerror = function(err) { callback.call(stream, err); }; var onclose = function() { process.nextTick(onclosenexttick); }; var onclosenexttick = function() { if (cancelled) return; if (readable && !(rs && (rs.ended && !rs.destroyed))) return callback.call(stream, new Error('premature close')); if (writable && !(ws && (ws.ended && !ws.destroyed))) return callback.call(stream, new Error('premature close')); }; var onrequest = function() { stream.req.on('finish', onfinish); }; if (isRequest(stream)) { stream.on('complete', onfinish); stream.on('abort', onclose); if (stream.req) onrequest(); else stream.on('request', onrequest); } else if (writable && !ws) { // legacy streams stream.on('end', onlegacyfinish); stream.on('close', onlegacyfinish); } if (isChildProcess(stream)) stream.on('exit', onexit); stream.on('end', onend); stream.on('finish', onfinish); if (opts.error !== false) stream.on('error', onerror); stream.on('close', onclose); return function() { cancelled = true; stream.removeListener('complete', onfinish); stream.removeListener('abort', onclose); stream.removeListener('request', onrequest); if (stream.req) stream.req.removeListener('finish', onfinish); stream.removeListener('end', onlegacyfinish); stream.removeListener('close', onlegacyfinish); stream.removeListener('finish', onfinish); stream.removeListener('exit', onexit); stream.removeListener('end', onend); stream.removeListener('error', onerror); stream.removeListener('close', onclose); }; }; module.exports = eos; /***/ }), /***/ 3338: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const fs = __nccwpck_require__(7758) const path = __nccwpck_require__(5622) const mkdirsSync = __nccwpck_require__(2915).mkdirsSync const utimesMillisSync = __nccwpck_require__(2548).utimesMillisSync const stat = __nccwpck_require__(3901) function copySync (src, dest, opts) { if (typeof opts === 'function') { opts = { filter: opts } } opts = opts || {} opts.clobber = 'clobber' in opts ? !!opts.clobber : true // default to true for now opts.overwrite = 'overwrite' in opts ? !!opts.overwrite : opts.clobber // overwrite falls back to clobber // Warn about using preserveTimestamps on 32-bit node if (opts.preserveTimestamps && process.arch === 'ia32') { console.warn(`fs-extra: Using the preserveTimestamps option in 32-bit node is not recommended;\n see https://github.com/jprichardson/node-fs-extra/issues/269`) } const { srcStat, destStat } = stat.checkPathsSync(src, dest, 'copy') stat.checkParentPathsSync(src, srcStat, dest, 'copy') return handleFilterAndCopy(destStat, src, dest, opts) } function handleFilterAndCopy (destStat, src, dest, opts) { if (opts.filter && !opts.filter(src, dest)) return const destParent = path.dirname(dest) if (!fs.existsSync(destParent)) mkdirsSync(destParent) return startCopy(destStat, src, dest, opts) } function startCopy (destStat, src, dest, opts) { if (opts.filter && !opts.filter(src, dest)) return return getStats(destStat, src, dest, opts) } function getStats (destStat, src, dest, opts) { const statSync = opts.dereference ? fs.statSync : fs.lstatSync const srcStat = statSync(src) if (srcStat.isDirectory()) return onDir(srcStat, destStat, src, dest, opts) else if (srcStat.isFile() || srcStat.isCharacterDevice() || srcStat.isBlockDevice()) return onFile(srcStat, destStat, src, dest, opts) else if (srcStat.isSymbolicLink()) return onLink(destStat, src, dest, opts) } function onFile (srcStat, destStat, src, dest, opts) { if (!destStat) return copyFile(srcStat, src, dest, opts) return mayCopyFile(srcStat, src, dest, opts) } function mayCopyFile (srcStat, src, dest, opts) { if (opts.overwrite) { fs.unlinkSync(dest) return copyFile(srcStat, src, dest, opts) } else if (opts.errorOnExist) { throw new Error(`'${dest}' already exists`) } } function copyFile (srcStat, src, dest, opts) { fs.copyFileSync(src, dest) if (opts.preserveTimestamps) handleTimestamps(srcStat.mode, src, dest) return setDestMode(dest, srcStat.mode) } function handleTimestamps (srcMode, src, dest) { // Make sure the file is writable before setting the timestamp // otherwise open fails with EPERM when invoked with 'r+' // (through utimes call) if (fileIsNotWritable(srcMode)) makeFileWritable(dest, srcMode) return setDestTimestamps(src, dest) } function fileIsNotWritable (srcMode) { return (srcMode & 0o200) === 0 } function makeFileWritable (dest, srcMode) { return setDestMode(dest, srcMode | 0o200) } function setDestMode (dest, srcMode) { return fs.chmodSync(dest, srcMode) } function setDestTimestamps (src, dest) { // The initial srcStat.atime cannot be trusted // because it is modified by the read(2) system call // (See https://nodejs.org/api/fs.html#fs_stat_time_values) const updatedSrcStat = fs.statSync(src) return utimesMillisSync(dest, updatedSrcStat.atime, updatedSrcStat.mtime) } function onDir (srcStat, destStat, src, dest, opts) { if (!destStat) return mkDirAndCopy(srcStat.mode, src, dest, opts) if (destStat && !destStat.isDirectory()) { throw new Error(`Cannot overwrite non-directory '${dest}' with directory '${src}'.`) } return copyDir(src, dest, opts) } function mkDirAndCopy (srcMode, src, dest, opts) { fs.mkdirSync(dest) copyDir(src, dest, opts) return setDestMode(dest, srcMode) } function copyDir (src, dest, opts) { fs.readdirSync(src).forEach(item => copyDirItem(item, src, dest, opts)) } function copyDirItem (item, src, dest, opts) { const srcItem = path.join(src, item) const destItem = path.join(dest, item) const { destStat } = stat.checkPathsSync(srcItem, destItem, 'copy') return startCopy(destStat, srcItem, destItem, opts) } function onLink (destStat, src, dest, opts) { let resolvedSrc = fs.readlinkSync(src) if (opts.dereference) { resolvedSrc = path.resolve(process.cwd(), resolvedSrc) } if (!destStat) { return fs.symlinkSync(resolvedSrc, dest) } else { let resolvedDest try { resolvedDest = fs.readlinkSync(dest) } catch (err) { // dest exists and is a regular file or directory, // Windows may throw UNKNOWN error. If dest already exists, // fs throws error anyway, so no need to guard against it here. if (err.code === 'EINVAL' || err.code === 'UNKNOWN') return fs.symlinkSync(resolvedSrc, dest) throw err } if (opts.dereference) { resolvedDest = path.resolve(process.cwd(), resolvedDest) } if (stat.isSrcSubdir(resolvedSrc, resolvedDest)) { throw new Error(`Cannot copy '${resolvedSrc}' to a subdirectory of itself, '${resolvedDest}'.`) } // prevent copy if src is a subdir of dest since unlinking // dest in this case would result in removing src contents // and therefore a broken symlink would be created. if (fs.statSync(dest).isDirectory() && stat.isSrcSubdir(resolvedDest, resolvedSrc)) { throw new Error(`Cannot overwrite '${resolvedDest}' with '${resolvedSrc}'.`) } return copyLink(resolvedSrc, dest) } } function copyLink (resolvedSrc, dest) { fs.unlinkSync(dest) return fs.symlinkSync(resolvedSrc, dest) } module.exports = copySync /***/ }), /***/ 1135: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; module.exports = { copySync: __nccwpck_require__(3338) } /***/ }), /***/ 8834: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const fs = __nccwpck_require__(7758) const path = __nccwpck_require__(5622) const mkdirs = __nccwpck_require__(2915).mkdirs const pathExists = __nccwpck_require__(3835).pathExists const utimesMillis = __nccwpck_require__(2548).utimesMillis const stat = __nccwpck_require__(3901) function copy (src, dest, opts, cb) { if (typeof opts === 'function' && !cb) { cb = opts opts = {} } else if (typeof opts === 'function') { opts = { filter: opts } } cb = cb || function () {} opts = opts || {} opts.clobber = 'clobber' in opts ? !!opts.clobber : true // default to true for now opts.overwrite = 'overwrite' in opts ? !!opts.overwrite : opts.clobber // overwrite falls back to clobber // Warn about using preserveTimestamps on 32-bit node if (opts.preserveTimestamps && process.arch === 'ia32') { console.warn(`fs-extra: Using the preserveTimestamps option in 32-bit node is not recommended;\n see https://github.com/jprichardson/node-fs-extra/issues/269`) } stat.checkPaths(src, dest, 'copy', (err, stats) => { if (err) return cb(err) const { srcStat, destStat } = stats stat.checkParentPaths(src, srcStat, dest, 'copy', err => { if (err) return cb(err) if (opts.filter) return handleFilter(checkParentDir, destStat, src, dest, opts, cb) return checkParentDir(destStat, src, dest, opts, cb) }) }) } function checkParentDir (destStat, src, dest, opts, cb) { const destParent = path.dirname(dest) pathExists(destParent, (err, dirExists) => { if (err) return cb(err) if (dirExists) return startCopy(destStat, src, dest, opts, cb) mkdirs(destParent, err => { if (err) return cb(err) return startCopy(destStat, src, dest, opts, cb) }) }) } function handleFilter (onInclude, destStat, src, dest, opts, cb) { Promise.resolve(opts.filter(src, dest)).then(include => { if (include) return onInclude(destStat, src, dest, opts, cb) return cb() }, error => cb(error)) } function startCopy (destStat, src, dest, opts, cb) { if (opts.filter) return handleFilter(getStats, destStat, src, dest, opts, cb) return getStats(destStat, src, dest, opts, cb) } function getStats (destStat, src, dest, opts, cb) { const stat = opts.dereference ? fs.stat : fs.lstat stat(src, (err, srcStat) => { if (err) return cb(err) if (srcStat.isDirectory()) return onDir(srcStat, destStat, src, dest, opts, cb) else if (srcStat.isFile() || srcStat.isCharacterDevice() || srcStat.isBlockDevice()) return onFile(srcStat, destStat, src, dest, opts, cb) else if (srcStat.isSymbolicLink()) return onLink(destStat, src, dest, opts, cb) }) } function onFile (srcStat, destStat, src, dest, opts, cb) { if (!destStat) return copyFile(srcStat, src, dest, opts, cb) return mayCopyFile(srcStat, src, dest, opts, cb) } function mayCopyFile (srcStat, src, dest, opts, cb) { if (opts.overwrite) { fs.unlink(dest, err => { if (err) return cb(err) return copyFile(srcStat, src, dest, opts, cb) }) } else if (opts.errorOnExist) { return cb(new Error(`'${dest}' already exists`)) } else return cb() } function copyFile (srcStat, src, dest, opts, cb) { fs.copyFile(src, dest, err => { if (err) return cb(err) if (opts.preserveTimestamps) return handleTimestampsAndMode(srcStat.mode, src, dest, cb) return setDestMode(dest, srcStat.mode, cb) }) } function handleTimestampsAndMode (srcMode, src, dest, cb) { // Make sure the file is writable before setting the timestamp // otherwise open fails with EPERM when invoked with 'r+' // (through utimes call) if (fileIsNotWritable(srcMode)) { return makeFileWritable(dest, srcMode, err => { if (err) return cb(err) return setDestTimestampsAndMode(srcMode, src, dest, cb) }) } return setDestTimestampsAndMode(srcMode, src, dest, cb) } function fileIsNotWritable (srcMode) { return (srcMode & 0o200) === 0 } function makeFileWritable (dest, srcMode, cb) { return setDestMode(dest, srcMode | 0o200, cb) } function setDestTimestampsAndMode (srcMode, src, dest, cb) { setDestTimestamps(src, dest, err => { if (err) return cb(err) return setDestMode(dest, srcMode, cb) }) } function setDestMode (dest, srcMode, cb) { return fs.chmod(dest, srcMode, cb) } function setDestTimestamps (src, dest, cb) { // The initial srcStat.atime cannot be trusted // because it is modified by the read(2) system call // (See https://nodejs.org/api/fs.html#fs_stat_time_values) fs.stat(src, (err, updatedSrcStat) => { if (err) return cb(err) return utimesMillis(dest, updatedSrcStat.atime, updatedSrcStat.mtime, cb) }) } function onDir (srcStat, destStat, src, dest, opts, cb) { if (!destStat) return mkDirAndCopy(srcStat.mode, src, dest, opts, cb) if (destStat && !destStat.isDirectory()) { return cb(new Error(`Cannot overwrite non-directory '${dest}' with directory '${src}'.`)) } return copyDir(src, dest, opts, cb) } function mkDirAndCopy (srcMode, src, dest, opts, cb) { fs.mkdir(dest, err => { if (err) return cb(err) copyDir(src, dest, opts, err => { if (err) return cb(err) return setDestMode(dest, srcMode, cb) }) }) } function copyDir (src, dest, opts, cb) { fs.readdir(src, (err, items) => { if (err) return cb(err) return copyDirItems(items, src, dest, opts, cb) }) } function copyDirItems (items, src, dest, opts, cb) { const item = items.pop() if (!item) return cb() return copyDirItem(items, item, src, dest, opts, cb) } function copyDirItem (items, item, src, dest, opts, cb) { const srcItem = path.join(src, item) const destItem = path.join(dest, item) stat.checkPaths(srcItem, destItem, 'copy', (err, stats) => { if (err) return cb(err) const { destStat } = stats startCopy(destStat, srcItem, destItem, opts, err => { if (err) return cb(err) return copyDirItems(items, src, dest, opts, cb) }) }) } function onLink (destStat, src, dest, opts, cb) { fs.readlink(src, (err, resolvedSrc) => { if (err) return cb(err) if (opts.dereference) { resolvedSrc = path.resolve(process.cwd(), resolvedSrc) } if (!destStat) { return fs.symlink(resolvedSrc, dest, cb) } else { fs.readlink(dest, (err, resolvedDest) => { if (err) { // dest exists and is a regular file or directory, // Windows may throw UNKNOWN error. If dest already exists, // fs throws error anyway, so no need to guard against it here. if (err.code === 'EINVAL' || err.code === 'UNKNOWN') return fs.symlink(resolvedSrc, dest, cb) return cb(err) } if (opts.dereference) { resolvedDest = path.resolve(process.cwd(), resolvedDest) } if (stat.isSrcSubdir(resolvedSrc, resolvedDest)) { return cb(new Error(`Cannot copy '${resolvedSrc}' to a subdirectory of itself, '${resolvedDest}'.`)) } // do not copy if src is a subdir of dest since unlinking // dest in this case would result in removing src contents // and therefore a broken symlink would be created. if (destStat.isDirectory() && stat.isSrcSubdir(resolvedDest, resolvedSrc)) { return cb(new Error(`Cannot overwrite '${resolvedDest}' with '${resolvedSrc}'.`)) } return copyLink(resolvedSrc, dest, cb) }) } }) } function copyLink (resolvedSrc, dest, cb) { fs.unlink(dest, err => { if (err) return cb(err) return fs.symlink(resolvedSrc, dest, cb) }) } module.exports = copy /***/ }), /***/ 1335: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const u = __nccwpck_require__(1463).fromCallback module.exports = { copy: u(__nccwpck_require__(8834)) } /***/ }), /***/ 6970: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const u = __nccwpck_require__(1463).fromCallback const fs = __nccwpck_require__(7758) const path = __nccwpck_require__(5622) const mkdir = __nccwpck_require__(2915) const remove = __nccwpck_require__(7357) const emptyDir = u(function emptyDir (dir, callback) { callback = callback || function () {} fs.readdir(dir, (err, items) => { if (err) return mkdir.mkdirs(dir, callback) items = items.map(item => path.join(dir, item)) deleteItem() function deleteItem () { const item = items.pop() if (!item) return callback() remove.remove(item, err => { if (err) return callback(err) deleteItem() }) } }) }) function emptyDirSync (dir) { let items try { items = fs.readdirSync(dir) } catch { return mkdir.mkdirsSync(dir) } items.forEach(item => { item = path.join(dir, item) remove.removeSync(item) }) } module.exports = { emptyDirSync, emptydirSync: emptyDirSync, emptyDir, emptydir: emptyDir } /***/ }), /***/ 2164: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const u = __nccwpck_require__(1463).fromCallback const path = __nccwpck_require__(5622) const fs = __nccwpck_require__(7758) const mkdir = __nccwpck_require__(2915) function createFile (file, callback) { function makeFile () { fs.writeFile(file, '', err => { if (err) return callback(err) callback() }) } fs.stat(file, (err, stats) => { // eslint-disable-line handle-callback-err if (!err && stats.isFile()) return callback() const dir = path.dirname(file) fs.stat(dir, (err, stats) => { if (err) { // if the directory doesn't exist, make it if (err.code === 'ENOENT') { return mkdir.mkdirs(dir, err => { if (err) return callback(err) makeFile() }) } return callback(err) } if (stats.isDirectory()) makeFile() else { // parent is not a directory // This is just to cause an internal ENOTDIR error to be thrown fs.readdir(dir, err => { if (err) return callback(err) }) } }) }) } function createFileSync (file) { let stats try { stats = fs.statSync(file) } catch {} if (stats && stats.isFile()) return const dir = path.dirname(file) try { if (!fs.statSync(dir).isDirectory()) { // parent is not a directory // This is just to cause an internal ENOTDIR error to be thrown fs.readdirSync(dir) } } catch (err) { // If the stat call above failed because the directory doesn't exist, create it if (err && err.code === 'ENOENT') mkdir.mkdirsSync(dir) else throw err } fs.writeFileSync(file, '') } module.exports = { createFile: u(createFile), createFileSync } /***/ }), /***/ 55: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const file = __nccwpck_require__(2164) const link = __nccwpck_require__(3797) const symlink = __nccwpck_require__(2549) module.exports = { // file createFile: file.createFile, createFileSync: file.createFileSync, ensureFile: file.createFile, ensureFileSync: file.createFileSync, // link createLink: link.createLink, createLinkSync: link.createLinkSync, ensureLink: link.createLink, ensureLinkSync: link.createLinkSync, // symlink createSymlink: symlink.createSymlink, createSymlinkSync: symlink.createSymlinkSync, ensureSymlink: symlink.createSymlink, ensureSymlinkSync: symlink.createSymlinkSync } /***/ }), /***/ 3797: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const u = __nccwpck_require__(1463).fromCallback const path = __nccwpck_require__(5622) const fs = __nccwpck_require__(7758) const mkdir = __nccwpck_require__(2915) const pathExists = __nccwpck_require__(3835).pathExists function createLink (srcpath, dstpath, callback) { function makeLink (srcpath, dstpath) { fs.link(srcpath, dstpath, err => { if (err) return callback(err) callback(null) }) } pathExists(dstpath, (err, destinationExists) => { if (err) return callback(err) if (destinationExists) return callback(null) fs.lstat(srcpath, (err) => { if (err) { err.message = err.message.replace('lstat', 'ensureLink') return callback(err) } const dir = path.dirname(dstpath) pathExists(dir, (err, dirExists) => { if (err) return callback(err) if (dirExists) return makeLink(srcpath, dstpath) mkdir.mkdirs(dir, err => { if (err) return callback(err) makeLink(srcpath, dstpath) }) }) }) }) } function createLinkSync (srcpath, dstpath) { const destinationExists = fs.existsSync(dstpath) if (destinationExists) return undefined try { fs.lstatSync(srcpath) } catch (err) { err.message = err.message.replace('lstat', 'ensureLink') throw err } const dir = path.dirname(dstpath) const dirExists = fs.existsSync(dir) if (dirExists) return fs.linkSync(srcpath, dstpath) mkdir.mkdirsSync(dir) return fs.linkSync(srcpath, dstpath) } module.exports = { createLink: u(createLink), createLinkSync } /***/ }), /***/ 3727: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const path = __nccwpck_require__(5622) const fs = __nccwpck_require__(7758) const pathExists = __nccwpck_require__(3835).pathExists /** * Function that returns two types of paths, one relative to symlink, and one * relative to the current working directory. Checks if path is absolute or * relative. If the path is relative, this function checks if the path is * relative to symlink or relative to current working directory. This is an * initiative to find a smarter `srcpath` to supply when building symlinks. * This allows you to determine which path to use out of one of three possible * types of source paths. The first is an absolute path. This is detected by * `path.isAbsolute()`. When an absolute path is provided, it is checked to * see if it exists. If it does it's used, if not an error is returned * (callback)/ thrown (sync). The other two options for `srcpath` are a * relative url. By default Node's `fs.symlink` works by creating a symlink * using `dstpath` and expects the `srcpath` to be relative to the newly * created symlink. If you provide a `srcpath` that does not exist on the file * system it results in a broken symlink. To minimize this, the function * checks to see if the 'relative to symlink' source file exists, and if it * does it will use it. If it does not, it checks if there's a file that * exists that is relative to the current working directory, if does its used. * This preserves the expectations of the original fs.symlink spec and adds * the ability to pass in `relative to current working direcotry` paths. */ function symlinkPaths (srcpath, dstpath, callback) { if (path.isAbsolute(srcpath)) { return fs.lstat(srcpath, (err) => { if (err) { err.message = err.message.replace('lstat', 'ensureSymlink') return callback(err) } return callback(null, { toCwd: srcpath, toDst: srcpath }) }) } else { const dstdir = path.dirname(dstpath) const relativeToDst = path.join(dstdir, srcpath) return pathExists(relativeToDst, (err, exists) => { if (err) return callback(err) if (exists) { return callback(null, { toCwd: relativeToDst, toDst: srcpath }) } else { return fs.lstat(srcpath, (err) => { if (err) { err.message = err.message.replace('lstat', 'ensureSymlink') return callback(err) } return callback(null, { toCwd: srcpath, toDst: path.relative(dstdir, srcpath) }) }) } }) } } function symlinkPathsSync (srcpath, dstpath) { let exists if (path.isAbsolute(srcpath)) { exists = fs.existsSync(srcpath) if (!exists) throw new Error('absolute srcpath does not exist') return { toCwd: srcpath, toDst: srcpath } } else { const dstdir = path.dirname(dstpath) const relativeToDst = path.join(dstdir, srcpath) exists = fs.existsSync(relativeToDst) if (exists) { return { toCwd: relativeToDst, toDst: srcpath } } else { exists = fs.existsSync(srcpath) if (!exists) throw new Error('relative srcpath does not exist') return { toCwd: srcpath, toDst: path.relative(dstdir, srcpath) } } } } module.exports = { symlinkPaths, symlinkPathsSync } /***/ }), /***/ 8254: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const fs = __nccwpck_require__(7758) function symlinkType (srcpath, type, callback) { callback = (typeof type === 'function') ? type : callback type = (typeof type === 'function') ? false : type if (type) return callback(null, type) fs.lstat(srcpath, (err, stats) => { if (err) return callback(null, 'file') type = (stats && stats.isDirectory()) ? 'dir' : 'file' callback(null, type) }) } function symlinkTypeSync (srcpath, type) { let stats if (type) return type try { stats = fs.lstatSync(srcpath) } catch { return 'file' } return (stats && stats.isDirectory()) ? 'dir' : 'file' } module.exports = { symlinkType, symlinkTypeSync } /***/ }), /***/ 2549: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const u = __nccwpck_require__(1463).fromCallback const path = __nccwpck_require__(5622) const fs = __nccwpck_require__(7758) const _mkdirs = __nccwpck_require__(2915) const mkdirs = _mkdirs.mkdirs const mkdirsSync = _mkdirs.mkdirsSync const _symlinkPaths = __nccwpck_require__(3727) const symlinkPaths = _symlinkPaths.symlinkPaths const symlinkPathsSync = _symlinkPaths.symlinkPathsSync const _symlinkType = __nccwpck_require__(8254) const symlinkType = _symlinkType.symlinkType const symlinkTypeSync = _symlinkType.symlinkTypeSync const pathExists = __nccwpck_require__(3835).pathExists function createSymlink (srcpath, dstpath, type, callback) { callback = (typeof type === 'function') ? type : callback type = (typeof type === 'function') ? false : type pathExists(dstpath, (err, destinationExists) => { if (err) return callback(err) if (destinationExists) return callback(null) symlinkPaths(srcpath, dstpath, (err, relative) => { if (err) return callback(err) srcpath = relative.toDst symlinkType(relative.toCwd, type, (err, type) => { if (err) return callback(err) const dir = path.dirname(dstpath) pathExists(dir, (err, dirExists) => { if (err) return callback(err) if (dirExists) return fs.symlink(srcpath, dstpath, type, callback) mkdirs(dir, err => { if (err) return callback(err) fs.symlink(srcpath, dstpath, type, callback) }) }) }) }) }) } function createSymlinkSync (srcpath, dstpath, type) { const destinationExists = fs.existsSync(dstpath) if (destinationExists) return undefined const relative = symlinkPathsSync(srcpath, dstpath) srcpath = relative.toDst type = symlinkTypeSync(relative.toCwd, type) const dir = path.dirname(dstpath) const exists = fs.existsSync(dir) if (exists) return fs.symlinkSync(srcpath, dstpath, type) mkdirsSync(dir) return fs.symlinkSync(srcpath, dstpath, type) } module.exports = { createSymlink: u(createSymlink), createSymlinkSync } /***/ }), /***/ 1176: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // This is adapted from https://github.com/normalize/mz // Copyright (c) 2014-2016 Jonathan Ong me@jongleberry.com and Contributors const u = __nccwpck_require__(1463).fromCallback const fs = __nccwpck_require__(7758) const api = [ 'access', 'appendFile', 'chmod', 'chown', 'close', 'copyFile', 'fchmod', 'fchown', 'fdatasync', 'fstat', 'fsync', 'ftruncate', 'futimes', 'lchmod', 'lchown', 'link', 'lstat', 'mkdir', 'mkdtemp', 'open', 'opendir', 'readdir', 'readFile', 'readlink', 'realpath', 'rename', 'rm', 'rmdir', 'stat', 'symlink', 'truncate', 'unlink', 'utimes', 'writeFile' ].filter(key => { // Some commands are not available on some systems. Ex: // fs.opendir was added in Node.js v12.12.0 // fs.rm was added in Node.js v14.14.0 // fs.lchown is not available on at least some Linux return typeof fs[key] === 'function' }) // Export all keys: Object.keys(fs).forEach(key => { if (key === 'promises') { // fs.promises is a getter property that triggers ExperimentalWarning // Don't re-export it here, the getter is defined in "lib/index.js" return } exports[key] = fs[key] }) // Universalify async methods: api.forEach(method => { exports[method] = u(fs[method]) }) // We differ from mz/fs in that we still ship the old, broken, fs.exists() // since we are a drop-in replacement for the native module exports.exists = function (filename, callback) { if (typeof callback === 'function') { return fs.exists(filename, callback) } return new Promise(resolve => { return fs.exists(filename, resolve) }) } // fs.read(), fs.write(), & fs.writev() need special treatment due to multiple callback args exports.read = function (fd, buffer, offset, length, position, callback) { if (typeof callback === 'function') { return fs.read(fd, buffer, offset, length, position, callback) } return new Promise((resolve, reject) => { fs.read(fd, buffer, offset, length, position, (err, bytesRead, buffer) => { if (err) return reject(err) resolve({ bytesRead, buffer }) }) }) } // Function signature can be // fs.write(fd, buffer[, offset[, length[, position]]], callback) // OR // fs.write(fd, string[, position[, encoding]], callback) // We need to handle both cases, so we use ...args exports.write = function (fd, buffer, ...args) { if (typeof args[args.length - 1] === 'function') { return fs.write(fd, buffer, ...args) } return new Promise((resolve, reject) => { fs.write(fd, buffer, ...args, (err, bytesWritten, buffer) => { if (err) return reject(err) resolve({ bytesWritten, buffer }) }) }) } // fs.writev only available in Node v12.9.0+ if (typeof fs.writev === 'function') { // Function signature is // s.writev(fd, buffers[, position], callback) // We need to handle the optional arg, so we use ...args exports.writev = function (fd, buffers, ...args) { if (typeof args[args.length - 1] === 'function') { return fs.writev(fd, buffers, ...args) } return new Promise((resolve, reject) => { fs.writev(fd, buffers, ...args, (err, bytesWritten, buffers) => { if (err) return reject(err) resolve({ bytesWritten, buffers }) }) }) } } // fs.realpath.native only available in Node v9.2+ if (typeof fs.realpath.native === 'function') { exports.realpath.native = u(fs.realpath.native) } /***/ }), /***/ 5630: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; module.exports = { // Export promiseified graceful-fs: ...__nccwpck_require__(1176), // Export extra methods: ...__nccwpck_require__(1135), ...__nccwpck_require__(1335), ...__nccwpck_require__(6970), ...__nccwpck_require__(55), ...__nccwpck_require__(213), ...__nccwpck_require__(2915), ...__nccwpck_require__(9665), ...__nccwpck_require__(1497), ...__nccwpck_require__(6570), ...__nccwpck_require__(3835), ...__nccwpck_require__(7357) } // Export fs.promises as a getter property so that we don't trigger // ExperimentalWarning before fs.promises is actually accessed. const fs = __nccwpck_require__(5747) if (Object.getOwnPropertyDescriptor(fs, 'promises')) { Object.defineProperty(module.exports, "promises", ({ get () { return fs.promises } })) } /***/ }), /***/ 213: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const u = __nccwpck_require__(1463).fromPromise const jsonFile = __nccwpck_require__(8970) jsonFile.outputJson = u(__nccwpck_require__(531)) jsonFile.outputJsonSync = __nccwpck_require__(9421) // aliases jsonFile.outputJSON = jsonFile.outputJson jsonFile.outputJSONSync = jsonFile.outputJsonSync jsonFile.writeJSON = jsonFile.writeJson jsonFile.writeJSONSync = jsonFile.writeJsonSync jsonFile.readJSON = jsonFile.readJson jsonFile.readJSONSync = jsonFile.readJsonSync module.exports = jsonFile /***/ }), /***/ 8970: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const jsonFile = __nccwpck_require__(6160) module.exports = { // jsonfile exports readJson: jsonFile.readFile, readJsonSync: jsonFile.readFileSync, writeJson: jsonFile.writeFile, writeJsonSync: jsonFile.writeFileSync } /***/ }), /***/ 9421: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const { stringify } = __nccwpck_require__(5902) const { outputFileSync } = __nccwpck_require__(6570) function outputJsonSync (file, data, options) { const str = stringify(data, options) outputFileSync(file, str, options) } module.exports = outputJsonSync /***/ }), /***/ 531: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const { stringify } = __nccwpck_require__(5902) const { outputFile } = __nccwpck_require__(6570) async function outputJson (file, data, options = {}) { const str = stringify(data, options) await outputFile(file, str, options) } module.exports = outputJson /***/ }), /***/ 2915: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const u = __nccwpck_require__(1463).fromPromise const { makeDir: _makeDir, makeDirSync } = __nccwpck_require__(2751) const makeDir = u(_makeDir) module.exports = { mkdirs: makeDir, mkdirsSync: makeDirSync, // alias mkdirp: makeDir, mkdirpSync: makeDirSync, ensureDir: makeDir, ensureDirSync: makeDirSync } /***/ }), /***/ 2751: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; // Adapted from https://github.com/sindresorhus/make-dir // Copyright (c) Sindre Sorhus (sindresorhus.com) // Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: // The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. const fs = __nccwpck_require__(1176) const path = __nccwpck_require__(5622) const atLeastNode = __nccwpck_require__(5995) const useNativeRecursiveOption = atLeastNode('10.12.0') // https://github.com/nodejs/node/issues/8987 // https://github.com/libuv/libuv/pull/1088 const checkPath = pth => { if (process.platform === 'win32') { const pathHasInvalidWinCharacters = /[<>:"|?*]/.test(pth.replace(path.parse(pth).root, '')) if (pathHasInvalidWinCharacters) { const error = new Error(`Path contains invalid characters: ${pth}`) error.code = 'EINVAL' throw error } } } const processOptions = options => { const defaults = { mode: 0o777 } if (typeof options === 'number') options = { mode: options } return { ...defaults, ...options } } const permissionError = pth => { // This replicates the exception of `fs.mkdir` with native the // `recusive` option when run on an invalid drive under Windows. const error = new Error(`operation not permitted, mkdir '${pth}'`) error.code = 'EPERM' error.errno = -4048 error.path = pth error.syscall = 'mkdir' return error } module.exports.makeDir = async (input, options) => { checkPath(input) options = processOptions(options) if (useNativeRecursiveOption) { const pth = path.resolve(input) return fs.mkdir(pth, { mode: options.mode, recursive: true }) } const make = async pth => { try { await fs.mkdir(pth, options.mode) } catch (error) { if (error.code === 'EPERM') { throw error } if (error.code === 'ENOENT') { if (path.dirname(pth) === pth) { throw permissionError(pth) } if (error.message.includes('null bytes')) { throw error } await make(path.dirname(pth)) return make(pth) } try { const stats = await fs.stat(pth) if (!stats.isDirectory()) { // This error is never exposed to the user // it is caught below, and the original error is thrown throw new Error('The path is not a directory') } } catch { throw error } } } return make(path.resolve(input)) } module.exports.makeDirSync = (input, options) => { checkPath(input) options = processOptions(options) if (useNativeRecursiveOption) { const pth = path.resolve(input) return fs.mkdirSync(pth, { mode: options.mode, recursive: true }) } const make = pth => { try { fs.mkdirSync(pth, options.mode) } catch (error) { if (error.code === 'EPERM') { throw error } if (error.code === 'ENOENT') { if (path.dirname(pth) === pth) { throw permissionError(pth) } if (error.message.includes('null bytes')) { throw error } make(path.dirname(pth)) return make(pth) } try { if (!fs.statSync(pth).isDirectory()) { // This error is never exposed to the user // it is caught below, and the original error is thrown throw new Error('The path is not a directory') } } catch { throw error } } } return make(path.resolve(input)) } /***/ }), /***/ 9665: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; module.exports = { moveSync: __nccwpck_require__(6445) } /***/ }), /***/ 6445: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const fs = __nccwpck_require__(7758) const path = __nccwpck_require__(5622) const copySync = __nccwpck_require__(1135).copySync const removeSync = __nccwpck_require__(7357).removeSync const mkdirpSync = __nccwpck_require__(2915).mkdirpSync const stat = __nccwpck_require__(3901) function moveSync (src, dest, opts) { opts = opts || {} const overwrite = opts.overwrite || opts.clobber || false const { srcStat } = stat.checkPathsSync(src, dest, 'move') stat.checkParentPathsSync(src, srcStat, dest, 'move') mkdirpSync(path.dirname(dest)) return doRename(src, dest, overwrite) } function doRename (src, dest, overwrite) { if (overwrite) { removeSync(dest) return rename(src, dest, overwrite) } if (fs.existsSync(dest)) throw new Error('dest already exists.') return rename(src, dest, overwrite) } function rename (src, dest, overwrite) { try { fs.renameSync(src, dest) } catch (err) { if (err.code !== 'EXDEV') throw err return moveAcrossDevice(src, dest, overwrite) } } function moveAcrossDevice (src, dest, overwrite) { const opts = { overwrite, errorOnExist: true } copySync(src, dest, opts) return removeSync(src) } module.exports = moveSync /***/ }), /***/ 1497: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const u = __nccwpck_require__(1463).fromCallback module.exports = { move: u(__nccwpck_require__(2231)) } /***/ }), /***/ 2231: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const fs = __nccwpck_require__(7758) const path = __nccwpck_require__(5622) const copy = __nccwpck_require__(1335).copy const remove = __nccwpck_require__(7357).remove const mkdirp = __nccwpck_require__(2915).mkdirp const pathExists = __nccwpck_require__(3835).pathExists const stat = __nccwpck_require__(3901) function move (src, dest, opts, cb) { if (typeof opts === 'function') { cb = opts opts = {} } const overwrite = opts.overwrite || opts.clobber || false stat.checkPaths(src, dest, 'move', (err, stats) => { if (err) return cb(err) const { srcStat } = stats stat.checkParentPaths(src, srcStat, dest, 'move', err => { if (err) return cb(err) mkdirp(path.dirname(dest), err => { if (err) return cb(err) return doRename(src, dest, overwrite, cb) }) }) }) } function doRename (src, dest, overwrite, cb) { if (overwrite) { return remove(dest, err => { if (err) return cb(err) return rename(src, dest, overwrite, cb) }) } pathExists(dest, (err, destExists) => { if (err) return cb(err) if (destExists) return cb(new Error('dest already exists.')) return rename(src, dest, overwrite, cb) }) } function rename (src, dest, overwrite, cb) { fs.rename(src, dest, err => { if (!err) return cb() if (err.code !== 'EXDEV') return cb(err) return moveAcrossDevice(src, dest, overwrite, cb) }) } function moveAcrossDevice (src, dest, overwrite, cb) { const opts = { overwrite, errorOnExist: true } copy(src, dest, opts, err => { if (err) return cb(err) return remove(src, cb) }) } module.exports = move /***/ }), /***/ 6570: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const u = __nccwpck_require__(1463).fromCallback const fs = __nccwpck_require__(7758) const path = __nccwpck_require__(5622) const mkdir = __nccwpck_require__(2915) const pathExists = __nccwpck_require__(3835).pathExists function outputFile (file, data, encoding, callback) { if (typeof encoding === 'function') { callback = encoding encoding = 'utf8' } const dir = path.dirname(file) pathExists(dir, (err, itDoes) => { if (err) return callback(err) if (itDoes) return fs.writeFile(file, data, encoding, callback) mkdir.mkdirs(dir, err => { if (err) return callback(err) fs.writeFile(file, data, encoding, callback) }) }) } function outputFileSync (file, ...args) { const dir = path.dirname(file) if (fs.existsSync(dir)) { return fs.writeFileSync(file, ...args) } mkdir.mkdirsSync(dir) fs.writeFileSync(file, ...args) } module.exports = { outputFile: u(outputFile), outputFileSync } /***/ }), /***/ 3835: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const u = __nccwpck_require__(1463).fromPromise const fs = __nccwpck_require__(1176) function pathExists (path) { return fs.access(path).then(() => true).catch(() => false) } module.exports = { pathExists: u(pathExists), pathExistsSync: fs.existsSync } /***/ }), /***/ 7357: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const u = __nccwpck_require__(1463).fromCallback const rimraf = __nccwpck_require__(7247) module.exports = { remove: u(rimraf), removeSync: rimraf.sync } /***/ }), /***/ 7247: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const fs = __nccwpck_require__(7758) const path = __nccwpck_require__(5622) const assert = __nccwpck_require__(2357) const isWindows = (process.platform === 'win32') function defaults (options) { const methods = [ 'unlink', 'chmod', 'stat', 'lstat', 'rmdir', 'readdir' ] methods.forEach(m => { options[m] = options[m] || fs[m] m = m + 'Sync' options[m] = options[m] || fs[m] }) options.maxBusyTries = options.maxBusyTries || 3 } function rimraf (p, options, cb) { let busyTries = 0 if (typeof options === 'function') { cb = options options = {} } assert(p, 'rimraf: missing path') assert.strictEqual(typeof p, 'string', 'rimraf: path should be a string') assert.strictEqual(typeof cb, 'function', 'rimraf: callback function required') assert(options, 'rimraf: invalid options argument provided') assert.strictEqual(typeof options, 'object', 'rimraf: options should be object') defaults(options) rimraf_(p, options, function CB (er) { if (er) { if ((er.code === 'EBUSY' || er.code === 'ENOTEMPTY' || er.code === 'EPERM') && busyTries < options.maxBusyTries) { busyTries++ const time = busyTries * 100 // try again, with the same exact callback as this one. return setTimeout(() => rimraf_(p, options, CB), time) } // already gone if (er.code === 'ENOENT') er = null } cb(er) }) } // Two possible strategies. // 1. Assume it's a file. unlink it, then do the dir stuff on EPERM or EISDIR // 2. Assume it's a directory. readdir, then do the file stuff on ENOTDIR // // Both result in an extra syscall when you guess wrong. However, there // are likely far more normal files in the world than directories. This // is based on the assumption that a the average number of files per // directory is >= 1. // // If anyone ever complains about this, then I guess the strategy could // be made configurable somehow. But until then, YAGNI. function rimraf_ (p, options, cb) { assert(p) assert(options) assert(typeof cb === 'function') // sunos lets the root user unlink directories, which is... weird. // so we have to lstat here and make sure it's not a dir. options.lstat(p, (er, st) => { if (er && er.code === 'ENOENT') { return cb(null) } // Windows can EPERM on stat. Life is suffering. if (er && er.code === 'EPERM' && isWindows) { return fixWinEPERM(p, options, er, cb) } if (st && st.isDirectory()) { return rmdir(p, options, er, cb) } options.unlink(p, er => { if (er) { if (er.code === 'ENOENT') { return cb(null) } if (er.code === 'EPERM') { return (isWindows) ? fixWinEPERM(p, options, er, cb) : rmdir(p, options, er, cb) } if (er.code === 'EISDIR') { return rmdir(p, options, er, cb) } } return cb(er) }) }) } function fixWinEPERM (p, options, er, cb) { assert(p) assert(options) assert(typeof cb === 'function') options.chmod(p, 0o666, er2 => { if (er2) { cb(er2.code === 'ENOENT' ? null : er) } else { options.stat(p, (er3, stats) => { if (er3) { cb(er3.code === 'ENOENT' ? null : er) } else if (stats.isDirectory()) { rmdir(p, options, er, cb) } else { options.unlink(p, cb) } }) } }) } function fixWinEPERMSync (p, options, er) { let stats assert(p) assert(options) try { options.chmodSync(p, 0o666) } catch (er2) { if (er2.code === 'ENOENT') { return } else { throw er } } try { stats = options.statSync(p) } catch (er3) { if (er3.code === 'ENOENT') { return } else { throw er } } if (stats.isDirectory()) { rmdirSync(p, options, er) } else { options.unlinkSync(p) } } function rmdir (p, options, originalEr, cb) { assert(p) assert(options) assert(typeof cb === 'function') // try to rmdir first, and only readdir on ENOTEMPTY or EEXIST (SunOS) // if we guessed wrong, and it's not a directory, then // raise the original error. options.rmdir(p, er => { if (er && (er.code === 'ENOTEMPTY' || er.code === 'EEXIST' || er.code === 'EPERM')) { rmkids(p, options, cb) } else if (er && er.code === 'ENOTDIR') { cb(originalEr) } else { cb(er) } }) } function rmkids (p, options, cb) { assert(p) assert(options) assert(typeof cb === 'function') options.readdir(p, (er, files) => { if (er) return cb(er) let n = files.length let errState if (n === 0) return options.rmdir(p, cb) files.forEach(f => { rimraf(path.join(p, f), options, er => { if (errState) { return } if (er) return cb(errState = er) if (--n === 0) { options.rmdir(p, cb) } }) }) }) } // this looks simpler, and is strictly *faster*, but will // tie up the JavaScript thread and fail on excessively // deep directory trees. function rimrafSync (p, options) { let st options = options || {} defaults(options) assert(p, 'rimraf: missing path') assert.strictEqual(typeof p, 'string', 'rimraf: path should be a string') assert(options, 'rimraf: missing options') assert.strictEqual(typeof options, 'object', 'rimraf: options should be object') try { st = options.lstatSync(p) } catch (er) { if (er.code === 'ENOENT') { return } // Windows can EPERM on stat. Life is suffering. if (er.code === 'EPERM' && isWindows) { fixWinEPERMSync(p, options, er) } } try { // sunos lets the root user unlink directories, which is... weird. if (st && st.isDirectory()) { rmdirSync(p, options, null) } else { options.unlinkSync(p) } } catch (er) { if (er.code === 'ENOENT') { return } else if (er.code === 'EPERM') { return isWindows ? fixWinEPERMSync(p, options, er) : rmdirSync(p, options, er) } else if (er.code !== 'EISDIR') { throw er } rmdirSync(p, options, er) } } function rmdirSync (p, options, originalEr) { assert(p) assert(options) try { options.rmdirSync(p) } catch (er) { if (er.code === 'ENOTDIR') { throw originalEr } else if (er.code === 'ENOTEMPTY' || er.code === 'EEXIST' || er.code === 'EPERM') { rmkidsSync(p, options) } else if (er.code !== 'ENOENT') { throw er } } } function rmkidsSync (p, options) { assert(p) assert(options) options.readdirSync(p).forEach(f => rimrafSync(path.join(p, f), options)) if (isWindows) { // We only end up here once we got ENOTEMPTY at least once, and // at this point, we are guaranteed to have removed all the kids. // So, we know that it won't be ENOENT or ENOTDIR or anything else. // try really hard to delete stuff on windows, because it has a // PROFOUNDLY annoying habit of not closing handles promptly when // files are deleted, resulting in spurious ENOTEMPTY errors. const startTime = Date.now() do { try { const ret = options.rmdirSync(p, options) return ret } catch {} } while (Date.now() - startTime < 500) // give up after 500ms } else { const ret = options.rmdirSync(p, options) return ret } } module.exports = rimraf rimraf.sync = rimrafSync /***/ }), /***/ 3901: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const fs = __nccwpck_require__(1176) const path = __nccwpck_require__(5622) const util = __nccwpck_require__(1669) const atLeastNode = __nccwpck_require__(5995) const nodeSupportsBigInt = atLeastNode('10.5.0') const stat = (file) => nodeSupportsBigInt ? fs.stat(file, { bigint: true }) : fs.stat(file) const statSync = (file) => nodeSupportsBigInt ? fs.statSync(file, { bigint: true }) : fs.statSync(file) function getStats (src, dest) { return Promise.all([ stat(src), stat(dest).catch(err => { if (err.code === 'ENOENT') return null throw err }) ]).then(([srcStat, destStat]) => ({ srcStat, destStat })) } function getStatsSync (src, dest) { let destStat const srcStat = statSync(src) try { destStat = statSync(dest) } catch (err) { if (err.code === 'ENOENT') return { srcStat, destStat: null } throw err } return { srcStat, destStat } } function checkPaths (src, dest, funcName, cb) { util.callbackify(getStats)(src, dest, (err, stats) => { if (err) return cb(err) const { srcStat, destStat } = stats if (destStat && areIdentical(srcStat, destStat)) { return cb(new Error('Source and destination must not be the same.')) } if (srcStat.isDirectory() && isSrcSubdir(src, dest)) { return cb(new Error(errMsg(src, dest, funcName))) } return cb(null, { srcStat, destStat }) }) } function checkPathsSync (src, dest, funcName) { const { srcStat, destStat } = getStatsSync(src, dest) if (destStat && areIdentical(srcStat, destStat)) { throw new Error('Source and destination must not be the same.') } if (srcStat.isDirectory() && isSrcSubdir(src, dest)) { throw new Error(errMsg(src, dest, funcName)) } return { srcStat, destStat } } // recursively check if dest parent is a subdirectory of src. // It works for all file types including symlinks since it // checks the src and dest inodes. It starts from the deepest // parent and stops once it reaches the src parent or the root path. function checkParentPaths (src, srcStat, dest, funcName, cb) { const srcParent = path.resolve(path.dirname(src)) const destParent = path.resolve(path.dirname(dest)) if (destParent === srcParent || destParent === path.parse(destParent).root) return cb() const callback = (err, destStat) => { if (err) { if (err.code === 'ENOENT') return cb() return cb(err) } if (areIdentical(srcStat, destStat)) { return cb(new Error(errMsg(src, dest, funcName))) } return checkParentPaths(src, srcStat, destParent, funcName, cb) } if (nodeSupportsBigInt) fs.stat(destParent, { bigint: true }, callback) else fs.stat(destParent, callback) } function checkParentPathsSync (src, srcStat, dest, funcName) { const srcParent = path.resolve(path.dirname(src)) const destParent = path.resolve(path.dirname(dest)) if (destParent === srcParent || destParent === path.parse(destParent).root) return let destStat try { destStat = statSync(destParent) } catch (err) { if (err.code === 'ENOENT') return throw err } if (areIdentical(srcStat, destStat)) { throw new Error(errMsg(src, dest, funcName)) } return checkParentPathsSync(src, srcStat, destParent, funcName) } function areIdentical (srcStat, destStat) { if (destStat.ino && destStat.dev && destStat.ino === srcStat.ino && destStat.dev === srcStat.dev) { if (nodeSupportsBigInt || destStat.ino < Number.MAX_SAFE_INTEGER) { // definitive answer return true } // Use additional heuristics if we can't use 'bigint'. // Different 'ino' could be represented the same if they are >= Number.MAX_SAFE_INTEGER // See issue 657 if (destStat.size === srcStat.size && destStat.mode === srcStat.mode && destStat.nlink === srcStat.nlink && destStat.atimeMs === srcStat.atimeMs && destStat.mtimeMs === srcStat.mtimeMs && destStat.ctimeMs === srcStat.ctimeMs && destStat.birthtimeMs === srcStat.birthtimeMs) { // heuristic answer return true } } return false } // return true if dest is a subdir of src, otherwise false. // It only checks the path strings. function isSrcSubdir (src, dest) { const srcArr = path.resolve(src).split(path.sep).filter(i => i) const destArr = path.resolve(dest).split(path.sep).filter(i => i) return srcArr.reduce((acc, cur, i) => acc && destArr[i] === cur, true) } function errMsg (src, dest, funcName) { return `Cannot ${funcName} '${src}' to a subdirectory of itself, '${dest}'.` } module.exports = { checkPaths, checkPathsSync, checkParentPaths, checkParentPathsSync, isSrcSubdir } /***/ }), /***/ 2548: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const fs = __nccwpck_require__(7758) function utimesMillis (path, atime, mtime, callback) { // if (!HAS_MILLIS_RES) return fs.utimes(path, atime, mtime, callback) fs.open(path, 'r+', (err, fd) => { if (err) return callback(err) fs.futimes(fd, atime, mtime, futimesErr => { fs.close(fd, closeErr => { if (callback) callback(futimesErr || closeErr) }) }) }) } function utimesMillisSync (path, atime, mtime) { const fd = fs.openSync(path, 'r+') fs.futimesSync(fd, atime, mtime) return fs.closeSync(fd) } module.exports = { utimesMillis, utimesMillisSync } /***/ }), /***/ 7356: /***/ ((module) => { "use strict"; module.exports = clone var getPrototypeOf = Object.getPrototypeOf || function (obj) { return obj.__proto__ } function clone (obj) { if (obj === null || typeof obj !== 'object') return obj if (obj instanceof Object) var copy = { __proto__: getPrototypeOf(obj) } else var copy = Object.create(null) Object.getOwnPropertyNames(obj).forEach(function (key) { Object.defineProperty(copy, key, Object.getOwnPropertyDescriptor(obj, key)) }) return copy } /***/ }), /***/ 7758: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { var fs = __nccwpck_require__(5747) var polyfills = __nccwpck_require__(263) var legacy = __nccwpck_require__(5162) var clone = __nccwpck_require__(7356) var util = __nccwpck_require__(1669) /* istanbul ignore next - node 0.x polyfill */ var gracefulQueue var previousSymbol /* istanbul ignore else - node 0.x polyfill */ if (typeof Symbol === 'function' && typeof Symbol.for === 'function') { gracefulQueue = Symbol.for('graceful-fs.queue') // This is used in testing by future versions previousSymbol = Symbol.for('graceful-fs.previous') } else { gracefulQueue = '___graceful-fs.queue' previousSymbol = '___graceful-fs.previous' } function noop () {} function publishQueue(context, queue) { Object.defineProperty(context, gracefulQueue, { get: function() { return queue } }) } var debug = noop if (util.debuglog) debug = util.debuglog('gfs4') else if (/\bgfs4\b/i.test(process.env.NODE_DEBUG || '')) debug = function() { var m = util.format.apply(util, arguments) m = 'GFS4: ' + m.split(/\n/).join('\nGFS4: ') console.error(m) } // Once time initialization if (!fs[gracefulQueue]) { // This queue can be shared by multiple loaded instances var queue = global[gracefulQueue] || [] publishQueue(fs, queue) // Patch fs.close/closeSync to shared queue version, because we need // to retry() whenever a close happens *anywhere* in the program. // This is essential when multiple graceful-fs instances are // in play at the same time. fs.close = (function (fs$close) { function close (fd, cb) { return fs$close.call(fs, fd, function (err) { // This function uses the graceful-fs shared queue if (!err) { retry() } if (typeof cb === 'function') cb.apply(this, arguments) }) } Object.defineProperty(close, previousSymbol, { value: fs$close }) return close })(fs.close) fs.closeSync = (function (fs$closeSync) { function closeSync (fd) { // This function uses the graceful-fs shared queue fs$closeSync.apply(fs, arguments) retry() } Object.defineProperty(closeSync, previousSymbol, { value: fs$closeSync }) return closeSync })(fs.closeSync) if (/\bgfs4\b/i.test(process.env.NODE_DEBUG || '')) { process.on('exit', function() { debug(fs[gracefulQueue]) __nccwpck_require__(2357).equal(fs[gracefulQueue].length, 0) }) } } if (!global[gracefulQueue]) { publishQueue(global, fs[gracefulQueue]); } module.exports = patch(clone(fs)) if (process.env.TEST_GRACEFUL_FS_GLOBAL_PATCH && !fs.__patched) { module.exports = patch(fs) fs.__patched = true; } function patch (fs) { // Everything that references the open() function needs to be in here polyfills(fs) fs.gracefulify = patch fs.createReadStream = createReadStream fs.createWriteStream = createWriteStream var fs$readFile = fs.readFile fs.readFile = readFile function readFile (path, options, cb) { if (typeof options === 'function') cb = options, options = null return go$readFile(path, options, cb) function go$readFile (path, options, cb) { return fs$readFile(path, options, function (err) { if (err && (err.code === 'EMFILE' || err.code === 'ENFILE')) enqueue([go$readFile, [path, options, cb]]) else { if (typeof cb === 'function') cb.apply(this, arguments) retry() } }) } } var fs$writeFile = fs.writeFile fs.writeFile = writeFile function writeFile (path, data, options, cb) { if (typeof options === 'function') cb = options, options = null return go$writeFile(path, data, options, cb) function go$writeFile (path, data, options, cb) { return fs$writeFile(path, data, options, function (err) { if (err && (err.code === 'EMFILE' || err.code === 'ENFILE')) enqueue([go$writeFile, [path, data, options, cb]]) else { if (typeof cb === 'function') cb.apply(this, arguments) retry() } }) } } var fs$appendFile = fs.appendFile if (fs$appendFile) fs.appendFile = appendFile function appendFile (path, data, options, cb) { if (typeof options === 'function') cb = options, options = null return go$appendFile(path, data, options, cb) function go$appendFile (path, data, options, cb) { return fs$appendFile(path, data, options, function (err) { if (err && (err.code === 'EMFILE' || err.code === 'ENFILE')) enqueue([go$appendFile, [path, data, options, cb]]) else { if (typeof cb === 'function') cb.apply(this, arguments) retry() } }) } } var fs$copyFile = fs.copyFile if (fs$copyFile) fs.copyFile = copyFile function copyFile (src, dest, flags, cb) { if (typeof flags === 'function') { cb = flags flags = 0 } return fs$copyFile(src, dest, flags, function (err) { if (err && (err.code === 'EMFILE' || err.code === 'ENFILE')) enqueue([fs$copyFile, [src, dest, flags, cb]]) else { if (typeof cb === 'function') cb.apply(this, arguments) retry() } }) } var fs$readdir = fs.readdir fs.readdir = readdir function readdir (path, options, cb) { var args = [path] if (typeof options !== 'function') { args.push(options) } else { cb = options } args.push(go$readdir$cb) return go$readdir(args) function go$readdir$cb (err, files) { if (files && files.sort) files.sort() if (err && (err.code === 'EMFILE' || err.code === 'ENFILE')) enqueue([go$readdir, [args]]) else { if (typeof cb === 'function') cb.apply(this, arguments) retry() } } } function go$readdir (args) { return fs$readdir.apply(fs, args) } if (process.version.substr(0, 4) === 'v0.8') { var legStreams = legacy(fs) ReadStream = legStreams.ReadStream WriteStream = legStreams.WriteStream } var fs$ReadStream = fs.ReadStream if (fs$ReadStream) { ReadStream.prototype = Object.create(fs$ReadStream.prototype) ReadStream.prototype.open = ReadStream$open } var fs$WriteStream = fs.WriteStream if (fs$WriteStream) { WriteStream.prototype = Object.create(fs$WriteStream.prototype) WriteStream.prototype.open = WriteStream$open } Object.defineProperty(fs, 'ReadStream', { get: function () { return ReadStream }, set: function (val) { ReadStream = val }, enumerable: true, configurable: true }) Object.defineProperty(fs, 'WriteStream', { get: function () { return WriteStream }, set: function (val) { WriteStream = val }, enumerable: true, configurable: true }) // legacy names var FileReadStream = ReadStream Object.defineProperty(fs, 'FileReadStream', { get: function () { return FileReadStream }, set: function (val) { FileReadStream = val }, enumerable: true, configurable: true }) var FileWriteStream = WriteStream Object.defineProperty(fs, 'FileWriteStream', { get: function () { return FileWriteStream }, set: function (val) { FileWriteStream = val }, enumerable: true, configurable: true }) function ReadStream (path, options) { if (this instanceof ReadStream) return fs$ReadStream.apply(this, arguments), this else return ReadStream.apply(Object.create(ReadStream.prototype), arguments) } function ReadStream$open () { var that = this open(that.path, that.flags, that.mode, function (err, fd) { if (err) { if (that.autoClose) that.destroy() that.emit('error', err) } else { that.fd = fd that.emit('open', fd) that.read() } }) } function WriteStream (path, options) { if (this instanceof WriteStream) return fs$WriteStream.apply(this, arguments), this else return WriteStream.apply(Object.create(WriteStream.prototype), arguments) } function WriteStream$open () { var that = this open(that.path, that.flags, that.mode, function (err, fd) { if (err) { that.destroy() that.emit('error', err) } else { that.fd = fd that.emit('open', fd) } }) } function createReadStream (path, options) { return new fs.ReadStream(path, options) } function createWriteStream (path, options) { return new fs.WriteStream(path, options) } var fs$open = fs.open fs.open = open function open (path, flags, mode, cb) { if (typeof mode === 'function') cb = mode, mode = null return go$open(path, flags, mode, cb) function go$open (path, flags, mode, cb) { return fs$open(path, flags, mode, function (err, fd) { if (err && (err.code === 'EMFILE' || err.code === 'ENFILE')) enqueue([go$open, [path, flags, mode, cb]]) else { if (typeof cb === 'function') cb.apply(this, arguments) retry() } }) } } return fs } function enqueue (elem) { debug('ENQUEUE', elem[0].name, elem[1]) fs[gracefulQueue].push(elem) } function retry () { var elem = fs[gracefulQueue].shift() if (elem) { debug('RETRY', elem[0].name, elem[1]) elem[0].apply(null, elem[1]) } } /***/ }), /***/ 5162: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { var Stream = __nccwpck_require__(2413).Stream module.exports = legacy function legacy (fs) { return { ReadStream: ReadStream, WriteStream: WriteStream } function ReadStream (path, options) { if (!(this instanceof ReadStream)) return new ReadStream(path, options); Stream.call(this); var self = this; this.path = path; this.fd = null; this.readable = true; this.paused = false; this.flags = 'r'; this.mode = 438; /*=0666*/ this.bufferSize = 64 * 1024; options = options || {}; // Mixin options into this var keys = Object.keys(options); for (var index = 0, length = keys.length; index < length; index++) { var key = keys[index]; this[key] = options[key]; } if (this.encoding) this.setEncoding(this.encoding); if (this.start !== undefined) { if ('number' !== typeof this.start) { throw TypeError('start must be a Number'); } if (this.end === undefined) { this.end = Infinity; } else if ('number' !== typeof this.end) { throw TypeError('end must be a Number'); } if (this.start > this.end) { throw new Error('start must be <= end'); } this.pos = this.start; } if (this.fd !== null) { process.nextTick(function() { self._read(); }); return; } fs.open(this.path, this.flags, this.mode, function (err, fd) { if (err) { self.emit('error', err); self.readable = false; return; } self.fd = fd; self.emit('open', fd); self._read(); }) } function WriteStream (path, options) { if (!(this instanceof WriteStream)) return new WriteStream(path, options); Stream.call(this); this.path = path; this.fd = null; this.writable = true; this.flags = 'w'; this.encoding = 'binary'; this.mode = 438; /*=0666*/ this.bytesWritten = 0; options = options || {}; // Mixin options into this var keys = Object.keys(options); for (var index = 0, length = keys.length; index < length; index++) { var key = keys[index]; this[key] = options[key]; } if (this.start !== undefined) { if ('number' !== typeof this.start) { throw TypeError('start must be a Number'); } if (this.start < 0) { throw new Error('start must be >= zero'); } this.pos = this.start; } this.busy = false; this._queue = []; if (this.fd === null) { this._open = fs.open; this._queue.push([this._open, this.path, this.flags, this.mode, undefined]); this.flush(); } } } /***/ }), /***/ 263: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { var constants = __nccwpck_require__(7619) var origCwd = process.cwd var cwd = null var platform = process.env.GRACEFUL_FS_PLATFORM || process.platform process.cwd = function() { if (!cwd) cwd = origCwd.call(process) return cwd } try { process.cwd() } catch (er) {} // This check is needed until node.js 12 is required if (typeof process.chdir === 'function') { var chdir = process.chdir process.chdir = function (d) { cwd = null chdir.call(process, d) } if (Object.setPrototypeOf) Object.setPrototypeOf(process.chdir, chdir) } module.exports = patch function patch (fs) { // (re-)implement some things that are known busted or missing. // lchmod, broken prior to 0.6.2 // back-port the fix here. if (constants.hasOwnProperty('O_SYMLINK') && process.version.match(/^v0\.6\.[0-2]|^v0\.5\./)) { patchLchmod(fs) } // lutimes implementation, or no-op if (!fs.lutimes) { patchLutimes(fs) } // https://github.com/isaacs/node-graceful-fs/issues/4 // Chown should not fail on einval or eperm if non-root. // It should not fail on enosys ever, as this just indicates // that a fs doesn't support the intended operation. fs.chown = chownFix(fs.chown) fs.fchown = chownFix(fs.fchown) fs.lchown = chownFix(fs.lchown) fs.chmod = chmodFix(fs.chmod) fs.fchmod = chmodFix(fs.fchmod) fs.lchmod = chmodFix(fs.lchmod) fs.chownSync = chownFixSync(fs.chownSync) fs.fchownSync = chownFixSync(fs.fchownSync) fs.lchownSync = chownFixSync(fs.lchownSync) fs.chmodSync = chmodFixSync(fs.chmodSync) fs.fchmodSync = chmodFixSync(fs.fchmodSync) fs.lchmodSync = chmodFixSync(fs.lchmodSync) fs.stat = statFix(fs.stat) fs.fstat = statFix(fs.fstat) fs.lstat = statFix(fs.lstat) fs.statSync = statFixSync(fs.statSync) fs.fstatSync = statFixSync(fs.fstatSync) fs.lstatSync = statFixSync(fs.lstatSync) // if lchmod/lchown do not exist, then make them no-ops if (!fs.lchmod) { fs.lchmod = function (path, mode, cb) { if (cb) process.nextTick(cb) } fs.lchmodSync = function () {} } if (!fs.lchown) { fs.lchown = function (path, uid, gid, cb) { if (cb) process.nextTick(cb) } fs.lchownSync = function () {} } // on Windows, A/V software can lock the directory, causing this // to fail with an EACCES or EPERM if the directory contains newly // created files. Try again on failure, for up to 60 seconds. // Set the timeout this long because some Windows Anti-Virus, such as Parity // bit9, may lock files for up to a minute, causing npm package install // failures. Also, take care to yield the scheduler. Windows scheduling gives // CPU to a busy looping process, which can cause the program causing the lock // contention to be starved of CPU by node, so the contention doesn't resolve. if (platform === "win32") { fs.rename = (function (fs$rename) { return function (from, to, cb) { var start = Date.now() var backoff = 0; fs$rename(from, to, function CB (er) { if (er && (er.code === "EACCES" || er.code === "EPERM") && Date.now() - start < 60000) { setTimeout(function() { fs.stat(to, function (stater, st) { if (stater && stater.code === "ENOENT") fs$rename(from, to, CB); else cb(er) }) }, backoff) if (backoff < 100) backoff += 10; return; } if (cb) cb(er) }) }})(fs.rename) } // if read() returns EAGAIN, then just try it again. fs.read = (function (fs$read) { function read (fd, buffer, offset, length, position, callback_) { var callback if (callback_ && typeof callback_ === 'function') { var eagCounter = 0 callback = function (er, _, __) { if (er && er.code === 'EAGAIN' && eagCounter < 10) { eagCounter ++ return fs$read.call(fs, fd, buffer, offset, length, position, callback) } callback_.apply(this, arguments) } } return fs$read.call(fs, fd, buffer, offset, length, position, callback) } // This ensures `util.promisify` works as it does for native `fs.read`. if (Object.setPrototypeOf) Object.setPrototypeOf(read, fs$read) return read })(fs.read) fs.readSync = (function (fs$readSync) { return function (fd, buffer, offset, length, position) { var eagCounter = 0 while (true) { try { return fs$readSync.call(fs, fd, buffer, offset, length, position) } catch (er) { if (er.code === 'EAGAIN' && eagCounter < 10) { eagCounter ++ continue } throw er } } }})(fs.readSync) function patchLchmod (fs) { fs.lchmod = function (path, mode, callback) { fs.open( path , constants.O_WRONLY | constants.O_SYMLINK , mode , function (err, fd) { if (err) { if (callback) callback(err) return } // prefer to return the chmod error, if one occurs, // but still try to close, and report closing errors if they occur. fs.fchmod(fd, mode, function (err) { fs.close(fd, function(err2) { if (callback) callback(err || err2) }) }) }) } fs.lchmodSync = function (path, mode) { var fd = fs.openSync(path, constants.O_WRONLY | constants.O_SYMLINK, mode) // prefer to return the chmod error, if one occurs, // but still try to close, and report closing errors if they occur. var threw = true var ret try { ret = fs.fchmodSync(fd, mode) threw = false } finally { if (threw) { try { fs.closeSync(fd) } catch (er) {} } else { fs.closeSync(fd) } } return ret } } function patchLutimes (fs) { if (constants.hasOwnProperty("O_SYMLINK")) { fs.lutimes = function (path, at, mt, cb) { fs.open(path, constants.O_SYMLINK, function (er, fd) { if (er) { if (cb) cb(er) return } fs.futimes(fd, at, mt, function (er) { fs.close(fd, function (er2) { if (cb) cb(er || er2) }) }) }) } fs.lutimesSync = function (path, at, mt) { var fd = fs.openSync(path, constants.O_SYMLINK) var ret var threw = true try { ret = fs.futimesSync(fd, at, mt) threw = false } finally { if (threw) { try { fs.closeSync(fd) } catch (er) {} } else { fs.closeSync(fd) } } return ret } } else { fs.lutimes = function (_a, _b, _c, cb) { if (cb) process.nextTick(cb) } fs.lutimesSync = function () {} } } function chmodFix (orig) { if (!orig) return orig return function (target, mode, cb) { return orig.call(fs, target, mode, function (er) { if (chownErOk(er)) er = null if (cb) cb.apply(this, arguments) }) } } function chmodFixSync (orig) { if (!orig) return orig return function (target, mode) { try { return orig.call(fs, target, mode) } catch (er) { if (!chownErOk(er)) throw er } } } function chownFix (orig) { if (!orig) return orig return function (target, uid, gid, cb) { return orig.call(fs, target, uid, gid, function (er) { if (chownErOk(er)) er = null if (cb) cb.apply(this, arguments) }) } } function chownFixSync (orig) { if (!orig) return orig return function (target, uid, gid) { try { return orig.call(fs, target, uid, gid) } catch (er) { if (!chownErOk(er)) throw er } } } function statFix (orig) { if (!orig) return orig // Older versions of Node erroneously returned signed integers for // uid + gid. return function (target, options, cb) { if (typeof options === 'function') { cb = options options = null } function callback (er, stats) { if (stats) { if (stats.uid < 0) stats.uid += 0x100000000 if (stats.gid < 0) stats.gid += 0x100000000 } if (cb) cb.apply(this, arguments) } return options ? orig.call(fs, target, options, callback) : orig.call(fs, target, callback) } } function statFixSync (orig) { if (!orig) return orig // Older versions of Node erroneously returned signed integers for // uid + gid. return function (target, options) { var stats = options ? orig.call(fs, target, options) : orig.call(fs, target) if (stats.uid < 0) stats.uid += 0x100000000 if (stats.gid < 0) stats.gid += 0x100000000 return stats; } } // ENOSYS means that the fs doesn't support the op. Just ignore // that, because it doesn't matter. // // if there's no getuid, or if getuid() is something other // than 0, and the error is EINVAL or EPERM, then just ignore // it. // // This specific case is a silent failure in cp, install, tar, // and most other unix tools that manage permissions. // // When running as root, or if other types of errors are // encountered, then it's strict. function chownErOk (er) { if (!er) return true if (er.code === "ENOSYS") return true var nonroot = !process.getuid || process.getuid() !== 0 if (nonroot) { if (er.code === "EINVAL" || er.code === "EPERM") return true } return false } } /***/ }), /***/ 3287: /***/ ((__unused_webpack_module, exports) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); /*! * is-plain-object * * Copyright (c) 2014-2017, Jon Schlinkert. * Released under the MIT License. */ function isObject(o) { return Object.prototype.toString.call(o) === '[object Object]'; } function isPlainObject(o) { var ctor,prot; if (isObject(o) === false) return false; // If has modified constructor ctor = o.constructor; if (ctor === undefined) return true; // If has modified prototype prot = ctor.prototype; if (isObject(prot) === false) return false; // If constructor does not have an Object-specific method if (prot.hasOwnProperty('isPrototypeOf') === false) { return false; } // Most likely a plain Object return true; } exports.isPlainObject = isPlainObject; /***/ }), /***/ 7126: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { var fs = __nccwpck_require__(5747) var core if (process.platform === 'win32' || global.TESTING_WINDOWS) { core = __nccwpck_require__(2001) } else { core = __nccwpck_require__(9728) } module.exports = isexe isexe.sync = sync function isexe (path, options, cb) { if (typeof options === 'function') { cb = options options = {} } if (!cb) { if (typeof Promise !== 'function') { throw new TypeError('callback not provided') } return new Promise(function (resolve, reject) { isexe(path, options || {}, function (er, is) { if (er) { reject(er) } else { resolve(is) } }) }) } core(path, options || {}, function (er, is) { // ignore EACCES because that just means we aren't allowed to run it if (er) { if (er.code === 'EACCES' || options && options.ignoreErrors) { er = null is = false } } cb(er, is) }) } function sync (path, options) { // my kingdom for a filtered catch try { return core.sync(path, options || {}) } catch (er) { if (options && options.ignoreErrors || er.code === 'EACCES') { return false } else { throw er } } } /***/ }), /***/ 9728: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { module.exports = isexe isexe.sync = sync var fs = __nccwpck_require__(5747) function isexe (path, options, cb) { fs.stat(path, function (er, stat) { cb(er, er ? false : checkStat(stat, options)) }) } function sync (path, options) { return checkStat(fs.statSync(path), options) } function checkStat (stat, options) { return stat.isFile() && checkMode(stat, options) } function checkMode (stat, options) { var mod = stat.mode var uid = stat.uid var gid = stat.gid var myUid = options.uid !== undefined ? options.uid : process.getuid && process.getuid() var myGid = options.gid !== undefined ? options.gid : process.getgid && process.getgid() var u = parseInt('100', 8) var g = parseInt('010', 8) var o = parseInt('001', 8) var ug = u | g var ret = (mod & o) || (mod & g) && gid === myGid || (mod & u) && uid === myUid || (mod & ug) && myUid === 0 return ret } /***/ }), /***/ 2001: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { module.exports = isexe isexe.sync = sync var fs = __nccwpck_require__(5747) function checkPathExt (path, options) { var pathext = options.pathExt !== undefined ? options.pathExt : process.env.PATHEXT if (!pathext) { return true } pathext = pathext.split(';') if (pathext.indexOf('') !== -1) { return true } for (var i = 0; i < pathext.length; i++) { var p = pathext[i].toLowerCase() if (p && path.substr(-p.length).toLowerCase() === p) { return true } } return false } function checkStat (stat, path, options) { if (!stat.isSymbolicLink() && !stat.isFile()) { return false } return checkPathExt(path, options) } function isexe (path, options, cb) { fs.stat(path, function (er, stat) { cb(er, er ? false : checkStat(stat, path, options)) }) } function sync (path, options) { return checkStat(fs.statSync(path), path, options) } /***/ }), /***/ 1917: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; var loader = __nccwpck_require__(1161); var dumper = __nccwpck_require__(8866); function renamed(from, to) { return function () { throw new Error('Function yaml.' + from + ' is removed in js-yaml 4. ' + 'Use yaml.' + to + ' instead, which is now safe by default.'); }; } module.exports.Type = __nccwpck_require__(6073); module.exports.Schema = __nccwpck_require__(1082); module.exports.FAILSAFE_SCHEMA = __nccwpck_require__(8562); module.exports.JSON_SCHEMA = __nccwpck_require__(1035); module.exports.CORE_SCHEMA = __nccwpck_require__(2011); module.exports.DEFAULT_SCHEMA = __nccwpck_require__(8759); module.exports.load = loader.load; module.exports.loadAll = loader.loadAll; module.exports.dump = dumper.dump; module.exports.YAMLException = __nccwpck_require__(8179); // Re-export all types in case user wants to create custom schema module.exports.types = { binary: __nccwpck_require__(7900), float: __nccwpck_require__(2705), map: __nccwpck_require__(6150), null: __nccwpck_require__(721), pairs: __nccwpck_require__(6860), set: __nccwpck_require__(9548), timestamp: __nccwpck_require__(9212), bool: __nccwpck_require__(4993), int: __nccwpck_require__(1615), merge: __nccwpck_require__(6104), omap: __nccwpck_require__(9046), seq: __nccwpck_require__(7283), str: __nccwpck_require__(3619) }; // Removed functions from JS-YAML 3.0.x module.exports.safeLoad = renamed('safeLoad', 'load'); module.exports.safeLoadAll = renamed('safeLoadAll', 'loadAll'); module.exports.safeDump = renamed('safeDump', 'dump'); /***/ }), /***/ 6829: /***/ ((module) => { "use strict"; function isNothing(subject) { return (typeof subject === 'undefined') || (subject === null); } function isObject(subject) { return (typeof subject === 'object') && (subject !== null); } function toArray(sequence) { if (Array.isArray(sequence)) return sequence; else if (isNothing(sequence)) return []; return [ sequence ]; } function extend(target, source) { var index, length, key, sourceKeys; if (source) { sourceKeys = Object.keys(source); for (index = 0, length = sourceKeys.length; index < length; index += 1) { key = sourceKeys[index]; target[key] = source[key]; } } return target; } function repeat(string, count) { var result = '', cycle; for (cycle = 0; cycle < count; cycle += 1) { result += string; } return result; } function isNegativeZero(number) { return (number === 0) && (Number.NEGATIVE_INFINITY === 1 / number); } module.exports.isNothing = isNothing; module.exports.isObject = isObject; module.exports.toArray = toArray; module.exports.repeat = repeat; module.exports.isNegativeZero = isNegativeZero; module.exports.extend = extend; /***/ }), /***/ 8866: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; /*eslint-disable no-use-before-define*/ var common = __nccwpck_require__(6829); var YAMLException = __nccwpck_require__(8179); var DEFAULT_SCHEMA = __nccwpck_require__(8759); var _toString = Object.prototype.toString; var _hasOwnProperty = Object.prototype.hasOwnProperty; var CHAR_BOM = 0xFEFF; var CHAR_TAB = 0x09; /* Tab */ var CHAR_LINE_FEED = 0x0A; /* LF */ var CHAR_CARRIAGE_RETURN = 0x0D; /* CR */ var CHAR_SPACE = 0x20; /* Space */ var CHAR_EXCLAMATION = 0x21; /* ! */ var CHAR_DOUBLE_QUOTE = 0x22; /* " */ var CHAR_SHARP = 0x23; /* # */ var CHAR_PERCENT = 0x25; /* % */ var CHAR_AMPERSAND = 0x26; /* & */ var CHAR_SINGLE_QUOTE = 0x27; /* ' */ var CHAR_ASTERISK = 0x2A; /* * */ var CHAR_COMMA = 0x2C; /* , */ var CHAR_MINUS = 0x2D; /* - */ var CHAR_COLON = 0x3A; /* : */ var CHAR_EQUALS = 0x3D; /* = */ var CHAR_GREATER_THAN = 0x3E; /* > */ var CHAR_QUESTION = 0x3F; /* ? */ var CHAR_COMMERCIAL_AT = 0x40; /* @ */ var CHAR_LEFT_SQUARE_BRACKET = 0x5B; /* [ */ var CHAR_RIGHT_SQUARE_BRACKET = 0x5D; /* ] */ var CHAR_GRAVE_ACCENT = 0x60; /* ` */ var CHAR_LEFT_CURLY_BRACKET = 0x7B; /* { */ var CHAR_VERTICAL_LINE = 0x7C; /* | */ var CHAR_RIGHT_CURLY_BRACKET = 0x7D; /* } */ var ESCAPE_SEQUENCES = {}; ESCAPE_SEQUENCES[0x00] = '\\0'; ESCAPE_SEQUENCES[0x07] = '\\a'; ESCAPE_SEQUENCES[0x08] = '\\b'; ESCAPE_SEQUENCES[0x09] = '\\t'; ESCAPE_SEQUENCES[0x0A] = '\\n'; ESCAPE_SEQUENCES[0x0B] = '\\v'; ESCAPE_SEQUENCES[0x0C] = '\\f'; ESCAPE_SEQUENCES[0x0D] = '\\r'; ESCAPE_SEQUENCES[0x1B] = '\\e'; ESCAPE_SEQUENCES[0x22] = '\\"'; ESCAPE_SEQUENCES[0x5C] = '\\\\'; ESCAPE_SEQUENCES[0x85] = '\\N'; ESCAPE_SEQUENCES[0xA0] = '\\_'; ESCAPE_SEQUENCES[0x2028] = '\\L'; ESCAPE_SEQUENCES[0x2029] = '\\P'; var DEPRECATED_BOOLEANS_SYNTAX = [ 'y', 'Y', 'yes', 'Yes', 'YES', 'on', 'On', 'ON', 'n', 'N', 'no', 'No', 'NO', 'off', 'Off', 'OFF' ]; var DEPRECATED_BASE60_SYNTAX = /^[-+]?[0-9_]+(?::[0-9_]+)+(?:\.[0-9_]*)?$/; function compileStyleMap(schema, map) { var result, keys, index, length, tag, style, type; if (map === null) return {}; result = {}; keys = Object.keys(map); for (index = 0, length = keys.length; index < length; index += 1) { tag = keys[index]; style = String(map[tag]); if (tag.slice(0, 2) === '!!') { tag = 'tag:yaml.org,2002:' + tag.slice(2); } type = schema.compiledTypeMap['fallback'][tag]; if (type && _hasOwnProperty.call(type.styleAliases, style)) { style = type.styleAliases[style]; } result[tag] = style; } return result; } function encodeHex(character) { var string, handle, length; string = character.toString(16).toUpperCase(); if (character <= 0xFF) { handle = 'x'; length = 2; } else if (character <= 0xFFFF) { handle = 'u'; length = 4; } else if (character <= 0xFFFFFFFF) { handle = 'U'; length = 8; } else { throw new YAMLException('code point within a string may not be greater than 0xFFFFFFFF'); } return '\\' + handle + common.repeat('0', length - string.length) + string; } var QUOTING_TYPE_SINGLE = 1, QUOTING_TYPE_DOUBLE = 2; function State(options) { this.schema = options['schema'] || DEFAULT_SCHEMA; this.indent = Math.max(1, (options['indent'] || 2)); this.noArrayIndent = options['noArrayIndent'] || false; this.skipInvalid = options['skipInvalid'] || false; this.flowLevel = (common.isNothing(options['flowLevel']) ? -1 : options['flowLevel']); this.styleMap = compileStyleMap(this.schema, options['styles'] || null); this.sortKeys = options['sortKeys'] || false; this.lineWidth = options['lineWidth'] || 80; this.noRefs = options['noRefs'] || false; this.noCompatMode = options['noCompatMode'] || false; this.condenseFlow = options['condenseFlow'] || false; this.quotingType = options['quotingType'] === '"' ? QUOTING_TYPE_DOUBLE : QUOTING_TYPE_SINGLE; this.forceQuotes = options['forceQuotes'] || false; this.replacer = typeof options['replacer'] === 'function' ? options['replacer'] : null; this.implicitTypes = this.schema.compiledImplicit; this.explicitTypes = this.schema.compiledExplicit; this.tag = null; this.result = ''; this.duplicates = []; this.usedDuplicates = null; } // Indents every line in a string. Empty lines (\n only) are not indented. function indentString(string, spaces) { var ind = common.repeat(' ', spaces), position = 0, next = -1, result = '', line, length = string.length; while (position < length) { next = string.indexOf('\n', position); if (next === -1) { line = string.slice(position); position = length; } else { line = string.slice(position, next + 1); position = next + 1; } if (line.length && line !== '\n') result += ind; result += line; } return result; } function generateNextLine(state, level) { return '\n' + common.repeat(' ', state.indent * level); } function testImplicitResolving(state, str) { var index, length, type; for (index = 0, length = state.implicitTypes.length; index < length; index += 1) { type = state.implicitTypes[index]; if (type.resolve(str)) { return true; } } return false; } // [33] s-white ::= s-space | s-tab function isWhitespace(c) { return c === CHAR_SPACE || c === CHAR_TAB; } // Returns true if the character can be printed without escaping. // From YAML 1.2: "any allowed characters known to be non-printable // should also be escaped. [However,] This isn’t mandatory" // Derived from nb-char - \t - #x85 - #xA0 - #x2028 - #x2029. function isPrintable(c) { return (0x00020 <= c && c <= 0x00007E) || ((0x000A1 <= c && c <= 0x00D7FF) && c !== 0x2028 && c !== 0x2029) || ((0x0E000 <= c && c <= 0x00FFFD) && c !== CHAR_BOM) || (0x10000 <= c && c <= 0x10FFFF); } // [34] ns-char ::= nb-char - s-white // [27] nb-char ::= c-printable - b-char - c-byte-order-mark // [26] b-char ::= b-line-feed | b-carriage-return // Including s-white (for some reason, examples doesn't match specs in this aspect) // ns-char ::= c-printable - b-line-feed - b-carriage-return - c-byte-order-mark function isNsCharOrWhitespace(c) { return isPrintable(c) && c !== CHAR_BOM // - b-char && c !== CHAR_CARRIAGE_RETURN && c !== CHAR_LINE_FEED; } // [127] ns-plain-safe(c) ::= c = flow-out ⇒ ns-plain-safe-out // c = flow-in ⇒ ns-plain-safe-in // c = block-key ⇒ ns-plain-safe-out // c = flow-key ⇒ ns-plain-safe-in // [128] ns-plain-safe-out ::= ns-char // [129] ns-plain-safe-in ::= ns-char - c-flow-indicator // [130] ns-plain-char(c) ::= ( ns-plain-safe(c) - “:” - “#” ) // | ( /* An ns-char preceding */ “#” ) // | ( “:” /* Followed by an ns-plain-safe(c) */ ) function isPlainSafe(c, prev, inblock) { var cIsNsCharOrWhitespace = isNsCharOrWhitespace(c); var cIsNsChar = cIsNsCharOrWhitespace && !isWhitespace(c); return ( // ns-plain-safe inblock ? // c = flow-in cIsNsCharOrWhitespace : cIsNsCharOrWhitespace // - c-flow-indicator && c !== CHAR_COMMA && c !== CHAR_LEFT_SQUARE_BRACKET && c !== CHAR_RIGHT_SQUARE_BRACKET && c !== CHAR_LEFT_CURLY_BRACKET && c !== CHAR_RIGHT_CURLY_BRACKET ) // ns-plain-char && c !== CHAR_SHARP // false on '#' && !(prev === CHAR_COLON && !cIsNsChar) // false on ': ' || (isNsCharOrWhitespace(prev) && !isWhitespace(prev) && c === CHAR_SHARP) // change to true on '[^ ]#' || (prev === CHAR_COLON && cIsNsChar); // change to true on ':[^ ]' } // Simplified test for values allowed as the first character in plain style. function isPlainSafeFirst(c) { // Uses a subset of ns-char - c-indicator // where ns-char = nb-char - s-white. // No support of ( ( “?” | “:” | “-” ) /* Followed by an ns-plain-safe(c)) */ ) part return isPrintable(c) && c !== CHAR_BOM && !isWhitespace(c) // - s-white // - (c-indicator ::= // “-” | “?” | “:” | “,” | “[” | “]” | “{” | “}” && c !== CHAR_MINUS && c !== CHAR_QUESTION && c !== CHAR_COLON && c !== CHAR_COMMA && c !== CHAR_LEFT_SQUARE_BRACKET && c !== CHAR_RIGHT_SQUARE_BRACKET && c !== CHAR_LEFT_CURLY_BRACKET && c !== CHAR_RIGHT_CURLY_BRACKET // | “#” | “&” | “*” | “!” | “|” | “=” | “>” | “'” | “"” && c !== CHAR_SHARP && c !== CHAR_AMPERSAND && c !== CHAR_ASTERISK && c !== CHAR_EXCLAMATION && c !== CHAR_VERTICAL_LINE && c !== CHAR_EQUALS && c !== CHAR_GREATER_THAN && c !== CHAR_SINGLE_QUOTE && c !== CHAR_DOUBLE_QUOTE // | “%” | “@” | “`”) && c !== CHAR_PERCENT && c !== CHAR_COMMERCIAL_AT && c !== CHAR_GRAVE_ACCENT; } // Simplified test for values allowed as the last character in plain style. function isPlainSafeLast(c) { // just not whitespace or colon, it will be checked to be plain character later return !isWhitespace(c) && c !== CHAR_COLON; } // Same as 'string'.codePointAt(pos), but works in older browsers. function codePointAt(string, pos) { var first = string.charCodeAt(pos), second; if (first >= 0xD800 && first <= 0xDBFF && pos + 1 < string.length) { second = string.charCodeAt(pos + 1); if (second >= 0xDC00 && second <= 0xDFFF) { // https://mathiasbynens.be/notes/javascript-encoding#surrogate-formulae return (first - 0xD800) * 0x400 + second - 0xDC00 + 0x10000; } } return first; } // Determines whether block indentation indicator is required. function needIndentIndicator(string) { var leadingSpaceRe = /^\n* /; return leadingSpaceRe.test(string); } var STYLE_PLAIN = 1, STYLE_SINGLE = 2, STYLE_LITERAL = 3, STYLE_FOLDED = 4, STYLE_DOUBLE = 5; // Determines which scalar styles are possible and returns the preferred style. // lineWidth = -1 => no limit. // Pre-conditions: str.length > 0. // Post-conditions: // STYLE_PLAIN or STYLE_SINGLE => no \n are in the string. // STYLE_LITERAL => no lines are suitable for folding (or lineWidth is -1). // STYLE_FOLDED => a line > lineWidth and can be folded (and lineWidth != -1). function chooseScalarStyle(string, singleLineOnly, indentPerLevel, lineWidth, testAmbiguousType, quotingType, forceQuotes, inblock) { var i; var char = 0; var prevChar = null; var hasLineBreak = false; var hasFoldableLine = false; // only checked if shouldTrackWidth var shouldTrackWidth = lineWidth !== -1; var previousLineBreak = -1; // count the first line correctly var plain = isPlainSafeFirst(codePointAt(string, 0)) && isPlainSafeLast(codePointAt(string, string.length - 1)); if (singleLineOnly || forceQuotes) { // Case: no block styles. // Check for disallowed characters to rule out plain and single. for (i = 0; i < string.length; char >= 0x10000 ? i += 2 : i++) { char = codePointAt(string, i); if (!isPrintable(char)) { return STYLE_DOUBLE; } plain = plain && isPlainSafe(char, prevChar, inblock); prevChar = char; } } else { // Case: block styles permitted. for (i = 0; i < string.length; char >= 0x10000 ? i += 2 : i++) { char = codePointAt(string, i); if (char === CHAR_LINE_FEED) { hasLineBreak = true; // Check if any line can be folded. if (shouldTrackWidth) { hasFoldableLine = hasFoldableLine || // Foldable line = too long, and not more-indented. (i - previousLineBreak - 1 > lineWidth && string[previousLineBreak + 1] !== ' '); previousLineBreak = i; } } else if (!isPrintable(char)) { return STYLE_DOUBLE; } plain = plain && isPlainSafe(char, prevChar, inblock); prevChar = char; } // in case the end is missing a \n hasFoldableLine = hasFoldableLine || (shouldTrackWidth && (i - previousLineBreak - 1 > lineWidth && string[previousLineBreak + 1] !== ' ')); } // Although every style can represent \n without escaping, prefer block styles // for multiline, since they're more readable and they don't add empty lines. // Also prefer folding a super-long line. if (!hasLineBreak && !hasFoldableLine) { // Strings interpretable as another type have to be quoted; // e.g. the string 'true' vs. the boolean true. if (plain && !forceQuotes && !testAmbiguousType(string)) { return STYLE_PLAIN; } return quotingType === QUOTING_TYPE_DOUBLE ? STYLE_DOUBLE : STYLE_SINGLE; } // Edge case: block indentation indicator can only have one digit. if (indentPerLevel > 9 && needIndentIndicator(string)) { return STYLE_DOUBLE; } // At this point we know block styles are valid. // Prefer literal style unless we want to fold. if (!forceQuotes) { return hasFoldableLine ? STYLE_FOLDED : STYLE_LITERAL; } return quotingType === QUOTING_TYPE_DOUBLE ? STYLE_DOUBLE : STYLE_SINGLE; } // Note: line breaking/folding is implemented for only the folded style. // NB. We drop the last trailing newline (if any) of a returned block scalar // since the dumper adds its own newline. This always works: // • No ending newline => unaffected; already using strip "-" chomping. // • Ending newline => removed then restored. // Importantly, this keeps the "+" chomp indicator from gaining an extra line. function writeScalar(state, string, level, iskey, inblock) { state.dump = (function () { if (string.length === 0) { return state.quotingType === QUOTING_TYPE_DOUBLE ? '""' : "''"; } if (!state.noCompatMode) { if (DEPRECATED_BOOLEANS_SYNTAX.indexOf(string) !== -1 || DEPRECATED_BASE60_SYNTAX.test(string)) { return state.quotingType === QUOTING_TYPE_DOUBLE ? ('"' + string + '"') : ("'" + string + "'"); } } var indent = state.indent * Math.max(1, level); // no 0-indent scalars // As indentation gets deeper, let the width decrease monotonically // to the lower bound min(state.lineWidth, 40). // Note that this implies // state.lineWidth ≤ 40 + state.indent: width is fixed at the lower bound. // state.lineWidth > 40 + state.indent: width decreases until the lower bound. // This behaves better than a constant minimum width which disallows narrower options, // or an indent threshold which causes the width to suddenly increase. var lineWidth = state.lineWidth === -1 ? -1 : Math.max(Math.min(state.lineWidth, 40), state.lineWidth - indent); // Without knowing if keys are implicit/explicit, assume implicit for safety. var singleLineOnly = iskey // No block styles in flow mode. || (state.flowLevel > -1 && level >= state.flowLevel); function testAmbiguity(string) { return testImplicitResolving(state, string); } switch (chooseScalarStyle(string, singleLineOnly, state.indent, lineWidth, testAmbiguity, state.quotingType, state.forceQuotes && !iskey, inblock)) { case STYLE_PLAIN: return string; case STYLE_SINGLE: return "'" + string.replace(/'/g, "''") + "'"; case STYLE_LITERAL: return '|' + blockHeader(string, state.indent) + dropEndingNewline(indentString(string, indent)); case STYLE_FOLDED: return '>' + blockHeader(string, state.indent) + dropEndingNewline(indentString(foldString(string, lineWidth), indent)); case STYLE_DOUBLE: return '"' + escapeString(string, lineWidth) + '"'; default: throw new YAMLException('impossible error: invalid scalar style'); } }()); } // Pre-conditions: string is valid for a block scalar, 1 <= indentPerLevel <= 9. function blockHeader(string, indentPerLevel) { var indentIndicator = needIndentIndicator(string) ? String(indentPerLevel) : ''; // note the special case: the string '\n' counts as a "trailing" empty line. var clip = string[string.length - 1] === '\n'; var keep = clip && (string[string.length - 2] === '\n' || string === '\n'); var chomp = keep ? '+' : (clip ? '' : '-'); return indentIndicator + chomp + '\n'; } // (See the note for writeScalar.) function dropEndingNewline(string) { return string[string.length - 1] === '\n' ? string.slice(0, -1) : string; } // Note: a long line without a suitable break point will exceed the width limit. // Pre-conditions: every char in str isPrintable, str.length > 0, width > 0. function foldString(string, width) { // In folded style, $k$ consecutive newlines output as $k+1$ newlines— // unless they're before or after a more-indented line, or at the very // beginning or end, in which case $k$ maps to $k$. // Therefore, parse each chunk as newline(s) followed by a content line. var lineRe = /(\n+)([^\n]*)/g; // first line (possibly an empty line) var result = (function () { var nextLF = string.indexOf('\n'); nextLF = nextLF !== -1 ? nextLF : string.length; lineRe.lastIndex = nextLF; return foldLine(string.slice(0, nextLF), width); }()); // If we haven't reached the first content line yet, don't add an extra \n. var prevMoreIndented = string[0] === '\n' || string[0] === ' '; var moreIndented; // rest of the lines var match; while ((match = lineRe.exec(string))) { var prefix = match[1], line = match[2]; moreIndented = (line[0] === ' '); result += prefix + (!prevMoreIndented && !moreIndented && line !== '' ? '\n' : '') + foldLine(line, width); prevMoreIndented = moreIndented; } return result; } // Greedy line breaking. // Picks the longest line under the limit each time, // otherwise settles for the shortest line over the limit. // NB. More-indented lines *cannot* be folded, as that would add an extra \n. function foldLine(line, width) { if (line === '' || line[0] === ' ') return line; // Since a more-indented line adds a \n, breaks can't be followed by a space. var breakRe = / [^ ]/g; // note: the match index will always be <= length-2. var match; // start is an inclusive index. end, curr, and next are exclusive. var start = 0, end, curr = 0, next = 0; var result = ''; // Invariants: 0 <= start <= length-1. // 0 <= curr <= next <= max(0, length-2). curr - start <= width. // Inside the loop: // A match implies length >= 2, so curr and next are <= length-2. while ((match = breakRe.exec(line))) { next = match.index; // maintain invariant: curr - start <= width if (next - start > width) { end = (curr > start) ? curr : next; // derive end <= length-2 result += '\n' + line.slice(start, end); // skip the space that was output as \n start = end + 1; // derive start <= length-1 } curr = next; } // By the invariants, start <= length-1, so there is something left over. // It is either the whole string or a part starting from non-whitespace. result += '\n'; // Insert a break if the remainder is too long and there is a break available. if (line.length - start > width && curr > start) { result += line.slice(start, curr) + '\n' + line.slice(curr + 1); } else { result += line.slice(start); } return result.slice(1); // drop extra \n joiner } // Escapes a double-quoted string. function escapeString(string) { var result = ''; var char = 0; var escapeSeq; for (var i = 0; i < string.length; char >= 0x10000 ? i += 2 : i++) { char = codePointAt(string, i); escapeSeq = ESCAPE_SEQUENCES[char]; if (!escapeSeq && isPrintable(char)) { result += string[i]; if (char >= 0x10000) result += string[i + 1]; } else { result += escapeSeq || encodeHex(char); } } return result; } function writeFlowSequence(state, level, object) { var _result = '', _tag = state.tag, index, length, value; for (index = 0, length = object.length; index < length; index += 1) { value = object[index]; if (state.replacer) { value = state.replacer.call(object, String(index), value); } // Write only valid elements, put null instead of invalid elements. if (writeNode(state, level, value, false, false) || (typeof value === 'undefined' && writeNode(state, level, null, false, false))) { if (_result !== '') _result += ',' + (!state.condenseFlow ? ' ' : ''); _result += state.dump; } } state.tag = _tag; state.dump = '[' + _result + ']'; } function writeBlockSequence(state, level, object, compact) { var _result = '', _tag = state.tag, index, length, value; for (index = 0, length = object.length; index < length; index += 1) { value = object[index]; if (state.replacer) { value = state.replacer.call(object, String(index), value); } // Write only valid elements, put null instead of invalid elements. if (writeNode(state, level + 1, value, true, true, false, true) || (typeof value === 'undefined' && writeNode(state, level + 1, null, true, true, false, true))) { if (!compact || _result !== '') { _result += generateNextLine(state, level); } if (state.dump && CHAR_LINE_FEED === state.dump.charCodeAt(0)) { _result += '-'; } else { _result += '- '; } _result += state.dump; } } state.tag = _tag; state.dump = _result || '[]'; // Empty sequence if no valid values. } function writeFlowMapping(state, level, object) { var _result = '', _tag = state.tag, objectKeyList = Object.keys(object), index, length, objectKey, objectValue, pairBuffer; for (index = 0, length = objectKeyList.length; index < length; index += 1) { pairBuffer = ''; if (_result !== '') pairBuffer += ', '; if (state.condenseFlow) pairBuffer += '"'; objectKey = objectKeyList[index]; objectValue = object[objectKey]; if (state.replacer) { objectValue = state.replacer.call(object, objectKey, objectValue); } if (!writeNode(state, level, objectKey, false, false)) { continue; // Skip this pair because of invalid key; } if (state.dump.length > 1024) pairBuffer += '? '; pairBuffer += state.dump + (state.condenseFlow ? '"' : '') + ':' + (state.condenseFlow ? '' : ' '); if (!writeNode(state, level, objectValue, false, false)) { continue; // Skip this pair because of invalid value. } pairBuffer += state.dump; // Both key and value are valid. _result += pairBuffer; } state.tag = _tag; state.dump = '{' + _result + '}'; } function writeBlockMapping(state, level, object, compact) { var _result = '', _tag = state.tag, objectKeyList = Object.keys(object), index, length, objectKey, objectValue, explicitPair, pairBuffer; // Allow sorting keys so that the output file is deterministic if (state.sortKeys === true) { // Default sorting objectKeyList.sort(); } else if (typeof state.sortKeys === 'function') { // Custom sort function objectKeyList.sort(state.sortKeys); } else if (state.sortKeys) { // Something is wrong throw new YAMLException('sortKeys must be a boolean or a function'); } for (index = 0, length = objectKeyList.length; index < length; index += 1) { pairBuffer = ''; if (!compact || _result !== '') { pairBuffer += generateNextLine(state, level); } objectKey = objectKeyList[index]; objectValue = object[objectKey]; if (state.replacer) { objectValue = state.replacer.call(object, objectKey, objectValue); } if (!writeNode(state, level + 1, objectKey, true, true, true)) { continue; // Skip this pair because of invalid key. } explicitPair = (state.tag !== null && state.tag !== '?') || (state.dump && state.dump.length > 1024); if (explicitPair) { if (state.dump && CHAR_LINE_FEED === state.dump.charCodeAt(0)) { pairBuffer += '?'; } else { pairBuffer += '? '; } } pairBuffer += state.dump; if (explicitPair) { pairBuffer += generateNextLine(state, level); } if (!writeNode(state, level + 1, objectValue, true, explicitPair)) { continue; // Skip this pair because of invalid value. } if (state.dump && CHAR_LINE_FEED === state.dump.charCodeAt(0)) { pairBuffer += ':'; } else { pairBuffer += ': '; } pairBuffer += state.dump; // Both key and value are valid. _result += pairBuffer; } state.tag = _tag; state.dump = _result || '{}'; // Empty mapping if no valid pairs. } function detectType(state, object, explicit) { var _result, typeList, index, length, type, style; typeList = explicit ? state.explicitTypes : state.implicitTypes; for (index = 0, length = typeList.length; index < length; index += 1) { type = typeList[index]; if ((type.instanceOf || type.predicate) && (!type.instanceOf || ((typeof object === 'object') && (object instanceof type.instanceOf))) && (!type.predicate || type.predicate(object))) { if (explicit) { if (type.multi && type.representName) { state.tag = type.representName(object); } else { state.tag = type.tag; } } else { state.tag = '?'; } if (type.represent) { style = state.styleMap[type.tag] || type.defaultStyle; if (_toString.call(type.represent) === '[object Function]') { _result = type.represent(object, style); } else if (_hasOwnProperty.call(type.represent, style)) { _result = type.represent[style](object, style); } else { throw new YAMLException('!<' + type.tag + '> tag resolver accepts not "' + style + '" style'); } state.dump = _result; } return true; } } return false; } // Serializes `object` and writes it to global `result`. // Returns true on success, or false on invalid object. // function writeNode(state, level, object, block, compact, iskey, isblockseq) { state.tag = null; state.dump = object; if (!detectType(state, object, false)) { detectType(state, object, true); } var type = _toString.call(state.dump); var inblock = block; var tagStr; if (block) { block = (state.flowLevel < 0 || state.flowLevel > level); } var objectOrArray = type === '[object Object]' || type === '[object Array]', duplicateIndex, duplicate; if (objectOrArray) { duplicateIndex = state.duplicates.indexOf(object); duplicate = duplicateIndex !== -1; } if ((state.tag !== null && state.tag !== '?') || duplicate || (state.indent !== 2 && level > 0)) { compact = false; } if (duplicate && state.usedDuplicates[duplicateIndex]) { state.dump = '*ref_' + duplicateIndex; } else { if (objectOrArray && duplicate && !state.usedDuplicates[duplicateIndex]) { state.usedDuplicates[duplicateIndex] = true; } if (type === '[object Object]') { if (block && (Object.keys(state.dump).length !== 0)) { writeBlockMapping(state, level, state.dump, compact); if (duplicate) { state.dump = '&ref_' + duplicateIndex + state.dump; } } else { writeFlowMapping(state, level, state.dump); if (duplicate) { state.dump = '&ref_' + duplicateIndex + ' ' + state.dump; } } } else if (type === '[object Array]') { if (block && (state.dump.length !== 0)) { if (state.noArrayIndent && !isblockseq && level > 0) { writeBlockSequence(state, level - 1, state.dump, compact); } else { writeBlockSequence(state, level, state.dump, compact); } if (duplicate) { state.dump = '&ref_' + duplicateIndex + state.dump; } } else { writeFlowSequence(state, level, state.dump); if (duplicate) { state.dump = '&ref_' + duplicateIndex + ' ' + state.dump; } } } else if (type === '[object String]') { if (state.tag !== '?') { writeScalar(state, state.dump, level, iskey, inblock); } } else if (type === '[object Undefined]') { return false; } else { if (state.skipInvalid) return false; throw new YAMLException('unacceptable kind of an object to dump ' + type); } if (state.tag !== null && state.tag !== '?') { // Need to encode all characters except those allowed by the spec: // // [35] ns-dec-digit ::= [#x30-#x39] /* 0-9 */ // [36] ns-hex-digit ::= ns-dec-digit // | [#x41-#x46] /* A-F */ | [#x61-#x66] /* a-f */ // [37] ns-ascii-letter ::= [#x41-#x5A] /* A-Z */ | [#x61-#x7A] /* a-z */ // [38] ns-word-char ::= ns-dec-digit | ns-ascii-letter | “-” // [39] ns-uri-char ::= “%” ns-hex-digit ns-hex-digit | ns-word-char | “#” // | “;” | “/” | “?” | “:” | “@” | “&” | “=” | “+” | “$” | “,” // | “_” | “.” | “!” | “~” | “*” | “'” | “(” | “)” | “[” | “]” // // Also need to encode '!' because it has special meaning (end of tag prefix). // tagStr = encodeURI( state.tag[0] === '!' ? state.tag.slice(1) : state.tag ).replace(/!/g, '%21'); if (state.tag[0] === '!') { tagStr = '!' + tagStr; } else if (tagStr.slice(0, 18) === 'tag:yaml.org,2002:') { tagStr = '!!' + tagStr.slice(18); } else { tagStr = '!<' + tagStr + '>'; } state.dump = tagStr + ' ' + state.dump; } } return true; } function getDuplicateReferences(object, state) { var objects = [], duplicatesIndexes = [], index, length; inspectNode(object, objects, duplicatesIndexes); for (index = 0, length = duplicatesIndexes.length; index < length; index += 1) { state.duplicates.push(objects[duplicatesIndexes[index]]); } state.usedDuplicates = new Array(length); } function inspectNode(object, objects, duplicatesIndexes) { var objectKeyList, index, length; if (object !== null && typeof object === 'object') { index = objects.indexOf(object); if (index !== -1) { if (duplicatesIndexes.indexOf(index) === -1) { duplicatesIndexes.push(index); } } else { objects.push(object); if (Array.isArray(object)) { for (index = 0, length = object.length; index < length; index += 1) { inspectNode(object[index], objects, duplicatesIndexes); } } else { objectKeyList = Object.keys(object); for (index = 0, length = objectKeyList.length; index < length; index += 1) { inspectNode(object[objectKeyList[index]], objects, duplicatesIndexes); } } } } } function dump(input, options) { options = options || {}; var state = new State(options); if (!state.noRefs) getDuplicateReferences(input, state); var value = input; if (state.replacer) { value = state.replacer.call({ '': value }, '', value); } if (writeNode(state, 0, value, true, true)) return state.dump + '\n'; return ''; } module.exports.dump = dump; /***/ }), /***/ 8179: /***/ ((module) => { "use strict"; // YAML error class. http://stackoverflow.com/questions/8458984 // function formatError(exception, compact) { var where = '', message = exception.reason || '(unknown reason)'; if (!exception.mark) return message; if (exception.mark.name) { where += 'in "' + exception.mark.name + '" '; } where += '(' + (exception.mark.line + 1) + ':' + (exception.mark.column + 1) + ')'; if (!compact && exception.mark.snippet) { where += '\n\n' + exception.mark.snippet; } return message + ' ' + where; } function YAMLException(reason, mark) { // Super constructor Error.call(this); this.name = 'YAMLException'; this.reason = reason; this.mark = mark; this.message = formatError(this, false); // Include stack trace in error object if (Error.captureStackTrace) { // Chrome and NodeJS Error.captureStackTrace(this, this.constructor); } else { // FF, IE 10+ and Safari 6+. Fallback for others this.stack = (new Error()).stack || ''; } } // Inherit from Error YAMLException.prototype = Object.create(Error.prototype); YAMLException.prototype.constructor = YAMLException; YAMLException.prototype.toString = function toString(compact) { return this.name + ': ' + formatError(this, compact); }; module.exports = YAMLException; /***/ }), /***/ 1161: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; /*eslint-disable max-len,no-use-before-define*/ var common = __nccwpck_require__(6829); var YAMLException = __nccwpck_require__(8179); var makeSnippet = __nccwpck_require__(6975); var DEFAULT_SCHEMA = __nccwpck_require__(8759); var _hasOwnProperty = Object.prototype.hasOwnProperty; var CONTEXT_FLOW_IN = 1; var CONTEXT_FLOW_OUT = 2; var CONTEXT_BLOCK_IN = 3; var CONTEXT_BLOCK_OUT = 4; var CHOMPING_CLIP = 1; var CHOMPING_STRIP = 2; var CHOMPING_KEEP = 3; var PATTERN_NON_PRINTABLE = /[\x00-\x08\x0B\x0C\x0E-\x1F\x7F-\x84\x86-\x9F\uFFFE\uFFFF]|[\uD800-\uDBFF](?![\uDC00-\uDFFF])|(?:[^\uD800-\uDBFF]|^)[\uDC00-\uDFFF]/; var PATTERN_NON_ASCII_LINE_BREAKS = /[\x85\u2028\u2029]/; var PATTERN_FLOW_INDICATORS = /[,\[\]\{\}]/; var PATTERN_TAG_HANDLE = /^(?:!|!!|![a-z\-]+!)$/i; var PATTERN_TAG_URI = /^(?:!|[^,\[\]\{\}])(?:%[0-9a-f]{2}|[0-9a-z\-#;\/\?:@&=\+\$,_\.!~\*'\(\)\[\]])*$/i; function _class(obj) { return Object.prototype.toString.call(obj); } function is_EOL(c) { return (c === 0x0A/* LF */) || (c === 0x0D/* CR */); } function is_WHITE_SPACE(c) { return (c === 0x09/* Tab */) || (c === 0x20/* Space */); } function is_WS_OR_EOL(c) { return (c === 0x09/* Tab */) || (c === 0x20/* Space */) || (c === 0x0A/* LF */) || (c === 0x0D/* CR */); } function is_FLOW_INDICATOR(c) { return c === 0x2C/* , */ || c === 0x5B/* [ */ || c === 0x5D/* ] */ || c === 0x7B/* { */ || c === 0x7D/* } */; } function fromHexCode(c) { var lc; if ((0x30/* 0 */ <= c) && (c <= 0x39/* 9 */)) { return c - 0x30; } /*eslint-disable no-bitwise*/ lc = c | 0x20; if ((0x61/* a */ <= lc) && (lc <= 0x66/* f */)) { return lc - 0x61 + 10; } return -1; } function escapedHexLen(c) { if (c === 0x78/* x */) { return 2; } if (c === 0x75/* u */) { return 4; } if (c === 0x55/* U */) { return 8; } return 0; } function fromDecimalCode(c) { if ((0x30/* 0 */ <= c) && (c <= 0x39/* 9 */)) { return c - 0x30; } return -1; } function simpleEscapeSequence(c) { /* eslint-disable indent */ return (c === 0x30/* 0 */) ? '\x00' : (c === 0x61/* a */) ? '\x07' : (c === 0x62/* b */) ? '\x08' : (c === 0x74/* t */) ? '\x09' : (c === 0x09/* Tab */) ? '\x09' : (c === 0x6E/* n */) ? '\x0A' : (c === 0x76/* v */) ? '\x0B' : (c === 0x66/* f */) ? '\x0C' : (c === 0x72/* r */) ? '\x0D' : (c === 0x65/* e */) ? '\x1B' : (c === 0x20/* Space */) ? ' ' : (c === 0x22/* " */) ? '\x22' : (c === 0x2F/* / */) ? '/' : (c === 0x5C/* \ */) ? '\x5C' : (c === 0x4E/* N */) ? '\x85' : (c === 0x5F/* _ */) ? '\xA0' : (c === 0x4C/* L */) ? '\u2028' : (c === 0x50/* P */) ? '\u2029' : ''; } function charFromCodepoint(c) { if (c <= 0xFFFF) { return String.fromCharCode(c); } // Encode UTF-16 surrogate pair // https://en.wikipedia.org/wiki/UTF-16#Code_points_U.2B010000_to_U.2B10FFFF return String.fromCharCode( ((c - 0x010000) >> 10) + 0xD800, ((c - 0x010000) & 0x03FF) + 0xDC00 ); } var simpleEscapeCheck = new Array(256); // integer, for fast access var simpleEscapeMap = new Array(256); for (var i = 0; i < 256; i++) { simpleEscapeCheck[i] = simpleEscapeSequence(i) ? 1 : 0; simpleEscapeMap[i] = simpleEscapeSequence(i); } function State(input, options) { this.input = input; this.filename = options['filename'] || null; this.schema = options['schema'] || DEFAULT_SCHEMA; this.onWarning = options['onWarning'] || null; // (Hidden) Remove? makes the loader to expect YAML 1.1 documents // if such documents have no explicit %YAML directive this.legacy = options['legacy'] || false; this.json = options['json'] || false; this.listener = options['listener'] || null; this.implicitTypes = this.schema.compiledImplicit; this.typeMap = this.schema.compiledTypeMap; this.length = input.length; this.position = 0; this.line = 0; this.lineStart = 0; this.lineIndent = 0; // position of first leading tab in the current line, // used to make sure there are no tabs in the indentation this.firstTabInLine = -1; this.documents = []; /* this.version; this.checkLineBreaks; this.tagMap; this.anchorMap; this.tag; this.anchor; this.kind; this.result;*/ } function generateError(state, message) { var mark = { name: state.filename, buffer: state.input.slice(0, -1), // omit trailing \0 position: state.position, line: state.line, column: state.position - state.lineStart }; mark.snippet = makeSnippet(mark); return new YAMLException(message, mark); } function throwError(state, message) { throw generateError(state, message); } function throwWarning(state, message) { if (state.onWarning) { state.onWarning.call(null, generateError(state, message)); } } var directiveHandlers = { YAML: function handleYamlDirective(state, name, args) { var match, major, minor; if (state.version !== null) { throwError(state, 'duplication of %YAML directive'); } if (args.length !== 1) { throwError(state, 'YAML directive accepts exactly one argument'); } match = /^([0-9]+)\.([0-9]+)$/.exec(args[0]); if (match === null) { throwError(state, 'ill-formed argument of the YAML directive'); } major = parseInt(match[1], 10); minor = parseInt(match[2], 10); if (major !== 1) { throwError(state, 'unacceptable YAML version of the document'); } state.version = args[0]; state.checkLineBreaks = (minor < 2); if (minor !== 1 && minor !== 2) { throwWarning(state, 'unsupported YAML version of the document'); } }, TAG: function handleTagDirective(state, name, args) { var handle, prefix; if (args.length !== 2) { throwError(state, 'TAG directive accepts exactly two arguments'); } handle = args[0]; prefix = args[1]; if (!PATTERN_TAG_HANDLE.test(handle)) { throwError(state, 'ill-formed tag handle (first argument) of the TAG directive'); } if (_hasOwnProperty.call(state.tagMap, handle)) { throwError(state, 'there is a previously declared suffix for "' + handle + '" tag handle'); } if (!PATTERN_TAG_URI.test(prefix)) { throwError(state, 'ill-formed tag prefix (second argument) of the TAG directive'); } try { prefix = decodeURIComponent(prefix); } catch (err) { throwError(state, 'tag prefix is malformed: ' + prefix); } state.tagMap[handle] = prefix; } }; function captureSegment(state, start, end, checkJson) { var _position, _length, _character, _result; if (start < end) { _result = state.input.slice(start, end); if (checkJson) { for (_position = 0, _length = _result.length; _position < _length; _position += 1) { _character = _result.charCodeAt(_position); if (!(_character === 0x09 || (0x20 <= _character && _character <= 0x10FFFF))) { throwError(state, 'expected valid JSON character'); } } } else if (PATTERN_NON_PRINTABLE.test(_result)) { throwError(state, 'the stream contains non-printable characters'); } state.result += _result; } } function mergeMappings(state, destination, source, overridableKeys) { var sourceKeys, key, index, quantity; if (!common.isObject(source)) { throwError(state, 'cannot merge mappings; the provided source object is unacceptable'); } sourceKeys = Object.keys(source); for (index = 0, quantity = sourceKeys.length; index < quantity; index += 1) { key = sourceKeys[index]; if (!_hasOwnProperty.call(destination, key)) { destination[key] = source[key]; overridableKeys[key] = true; } } } function storeMappingPair(state, _result, overridableKeys, keyTag, keyNode, valueNode, startLine, startLineStart, startPos) { var index, quantity; // The output is a plain object here, so keys can only be strings. // We need to convert keyNode to a string, but doing so can hang the process // (deeply nested arrays that explode exponentially using aliases). if (Array.isArray(keyNode)) { keyNode = Array.prototype.slice.call(keyNode); for (index = 0, quantity = keyNode.length; index < quantity; index += 1) { if (Array.isArray(keyNode[index])) { throwError(state, 'nested arrays are not supported inside keys'); } if (typeof keyNode === 'object' && _class(keyNode[index]) === '[object Object]') { keyNode[index] = '[object Object]'; } } } // Avoid code execution in load() via toString property // (still use its own toString for arrays, timestamps, // and whatever user schema extensions happen to have @@toStringTag) if (typeof keyNode === 'object' && _class(keyNode) === '[object Object]') { keyNode = '[object Object]'; } keyNode = String(keyNode); if (_result === null) { _result = {}; } if (keyTag === 'tag:yaml.org,2002:merge') { if (Array.isArray(valueNode)) { for (index = 0, quantity = valueNode.length; index < quantity; index += 1) { mergeMappings(state, _result, valueNode[index], overridableKeys); } } else { mergeMappings(state, _result, valueNode, overridableKeys); } } else { if (!state.json && !_hasOwnProperty.call(overridableKeys, keyNode) && _hasOwnProperty.call(_result, keyNode)) { state.line = startLine || state.line; state.lineStart = startLineStart || state.lineStart; state.position = startPos || state.position; throwError(state, 'duplicated mapping key'); } // used for this specific key only because Object.defineProperty is slow if (keyNode === '__proto__') { Object.defineProperty(_result, keyNode, { configurable: true, enumerable: true, writable: true, value: valueNode }); } else { _result[keyNode] = valueNode; } delete overridableKeys[keyNode]; } return _result; } function readLineBreak(state) { var ch; ch = state.input.charCodeAt(state.position); if (ch === 0x0A/* LF */) { state.position++; } else if (ch === 0x0D/* CR */) { state.position++; if (state.input.charCodeAt(state.position) === 0x0A/* LF */) { state.position++; } } else { throwError(state, 'a line break is expected'); } state.line += 1; state.lineStart = state.position; state.firstTabInLine = -1; } function skipSeparationSpace(state, allowComments, checkIndent) { var lineBreaks = 0, ch = state.input.charCodeAt(state.position); while (ch !== 0) { while (is_WHITE_SPACE(ch)) { if (ch === 0x09/* Tab */ && state.firstTabInLine === -1) { state.firstTabInLine = state.position; } ch = state.input.charCodeAt(++state.position); } if (allowComments && ch === 0x23/* # */) { do { ch = state.input.charCodeAt(++state.position); } while (ch !== 0x0A/* LF */ && ch !== 0x0D/* CR */ && ch !== 0); } if (is_EOL(ch)) { readLineBreak(state); ch = state.input.charCodeAt(state.position); lineBreaks++; state.lineIndent = 0; while (ch === 0x20/* Space */) { state.lineIndent++; ch = state.input.charCodeAt(++state.position); } } else { break; } } if (checkIndent !== -1 && lineBreaks !== 0 && state.lineIndent < checkIndent) { throwWarning(state, 'deficient indentation'); } return lineBreaks; } function testDocumentSeparator(state) { var _position = state.position, ch; ch = state.input.charCodeAt(_position); // Condition state.position === state.lineStart is tested // in parent on each call, for efficiency. No needs to test here again. if ((ch === 0x2D/* - */ || ch === 0x2E/* . */) && ch === state.input.charCodeAt(_position + 1) && ch === state.input.charCodeAt(_position + 2)) { _position += 3; ch = state.input.charCodeAt(_position); if (ch === 0 || is_WS_OR_EOL(ch)) { return true; } } return false; } function writeFoldedLines(state, count) { if (count === 1) { state.result += ' '; } else if (count > 1) { state.result += common.repeat('\n', count - 1); } } function readPlainScalar(state, nodeIndent, withinFlowCollection) { var preceding, following, captureStart, captureEnd, hasPendingContent, _line, _lineStart, _lineIndent, _kind = state.kind, _result = state.result, ch; ch = state.input.charCodeAt(state.position); if (is_WS_OR_EOL(ch) || is_FLOW_INDICATOR(ch) || ch === 0x23/* # */ || ch === 0x26/* & */ || ch === 0x2A/* * */ || ch === 0x21/* ! */ || ch === 0x7C/* | */ || ch === 0x3E/* > */ || ch === 0x27/* ' */ || ch === 0x22/* " */ || ch === 0x25/* % */ || ch === 0x40/* @ */ || ch === 0x60/* ` */) { return false; } if (ch === 0x3F/* ? */ || ch === 0x2D/* - */) { following = state.input.charCodeAt(state.position + 1); if (is_WS_OR_EOL(following) || withinFlowCollection && is_FLOW_INDICATOR(following)) { return false; } } state.kind = 'scalar'; state.result = ''; captureStart = captureEnd = state.position; hasPendingContent = false; while (ch !== 0) { if (ch === 0x3A/* : */) { following = state.input.charCodeAt(state.position + 1); if (is_WS_OR_EOL(following) || withinFlowCollection && is_FLOW_INDICATOR(following)) { break; } } else if (ch === 0x23/* # */) { preceding = state.input.charCodeAt(state.position - 1); if (is_WS_OR_EOL(preceding)) { break; } } else if ((state.position === state.lineStart && testDocumentSeparator(state)) || withinFlowCollection && is_FLOW_INDICATOR(ch)) { break; } else if (is_EOL(ch)) { _line = state.line; _lineStart = state.lineStart; _lineIndent = state.lineIndent; skipSeparationSpace(state, false, -1); if (state.lineIndent >= nodeIndent) { hasPendingContent = true; ch = state.input.charCodeAt(state.position); continue; } else { state.position = captureEnd; state.line = _line; state.lineStart = _lineStart; state.lineIndent = _lineIndent; break; } } if (hasPendingContent) { captureSegment(state, captureStart, captureEnd, false); writeFoldedLines(state, state.line - _line); captureStart = captureEnd = state.position; hasPendingContent = false; } if (!is_WHITE_SPACE(ch)) { captureEnd = state.position + 1; } ch = state.input.charCodeAt(++state.position); } captureSegment(state, captureStart, captureEnd, false); if (state.result) { return true; } state.kind = _kind; state.result = _result; return false; } function readSingleQuotedScalar(state, nodeIndent) { var ch, captureStart, captureEnd; ch = state.input.charCodeAt(state.position); if (ch !== 0x27/* ' */) { return false; } state.kind = 'scalar'; state.result = ''; state.position++; captureStart = captureEnd = state.position; while ((ch = state.input.charCodeAt(state.position)) !== 0) { if (ch === 0x27/* ' */) { captureSegment(state, captureStart, state.position, true); ch = state.input.charCodeAt(++state.position); if (ch === 0x27/* ' */) { captureStart = state.position; state.position++; captureEnd = state.position; } else { return true; } } else if (is_EOL(ch)) { captureSegment(state, captureStart, captureEnd, true); writeFoldedLines(state, skipSeparationSpace(state, false, nodeIndent)); captureStart = captureEnd = state.position; } else if (state.position === state.lineStart && testDocumentSeparator(state)) { throwError(state, 'unexpected end of the document within a single quoted scalar'); } else { state.position++; captureEnd = state.position; } } throwError(state, 'unexpected end of the stream within a single quoted scalar'); } function readDoubleQuotedScalar(state, nodeIndent) { var captureStart, captureEnd, hexLength, hexResult, tmp, ch; ch = state.input.charCodeAt(state.position); if (ch !== 0x22/* " */) { return false; } state.kind = 'scalar'; state.result = ''; state.position++; captureStart = captureEnd = state.position; while ((ch = state.input.charCodeAt(state.position)) !== 0) { if (ch === 0x22/* " */) { captureSegment(state, captureStart, state.position, true); state.position++; return true; } else if (ch === 0x5C/* \ */) { captureSegment(state, captureStart, state.position, true); ch = state.input.charCodeAt(++state.position); if (is_EOL(ch)) { skipSeparationSpace(state, false, nodeIndent); // TODO: rework to inline fn with no type cast? } else if (ch < 256 && simpleEscapeCheck[ch]) { state.result += simpleEscapeMap[ch]; state.position++; } else if ((tmp = escapedHexLen(ch)) > 0) { hexLength = tmp; hexResult = 0; for (; hexLength > 0; hexLength--) { ch = state.input.charCodeAt(++state.position); if ((tmp = fromHexCode(ch)) >= 0) { hexResult = (hexResult << 4) + tmp; } else { throwError(state, 'expected hexadecimal character'); } } state.result += charFromCodepoint(hexResult); state.position++; } else { throwError(state, 'unknown escape sequence'); } captureStart = captureEnd = state.position; } else if (is_EOL(ch)) { captureSegment(state, captureStart, captureEnd, true); writeFoldedLines(state, skipSeparationSpace(state, false, nodeIndent)); captureStart = captureEnd = state.position; } else if (state.position === state.lineStart && testDocumentSeparator(state)) { throwError(state, 'unexpected end of the document within a double quoted scalar'); } else { state.position++; captureEnd = state.position; } } throwError(state, 'unexpected end of the stream within a double quoted scalar'); } function readFlowCollection(state, nodeIndent) { var readNext = true, _line, _lineStart, _pos, _tag = state.tag, _result, _anchor = state.anchor, following, terminator, isPair, isExplicitPair, isMapping, overridableKeys = Object.create(null), keyNode, keyTag, valueNode, ch; ch = state.input.charCodeAt(state.position); if (ch === 0x5B/* [ */) { terminator = 0x5D;/* ] */ isMapping = false; _result = []; } else if (ch === 0x7B/* { */) { terminator = 0x7D;/* } */ isMapping = true; _result = {}; } else { return false; } if (state.anchor !== null) { state.anchorMap[state.anchor] = _result; } ch = state.input.charCodeAt(++state.position); while (ch !== 0) { skipSeparationSpace(state, true, nodeIndent); ch = state.input.charCodeAt(state.position); if (ch === terminator) { state.position++; state.tag = _tag; state.anchor = _anchor; state.kind = isMapping ? 'mapping' : 'sequence'; state.result = _result; return true; } else if (!readNext) { throwError(state, 'missed comma between flow collection entries'); } else if (ch === 0x2C/* , */) { // "flow collection entries can never be completely empty", as per YAML 1.2, section 7.4 throwError(state, "expected the node content, but found ','"); } keyTag = keyNode = valueNode = null; isPair = isExplicitPair = false; if (ch === 0x3F/* ? */) { following = state.input.charCodeAt(state.position + 1); if (is_WS_OR_EOL(following)) { isPair = isExplicitPair = true; state.position++; skipSeparationSpace(state, true, nodeIndent); } } _line = state.line; // Save the current line. _lineStart = state.lineStart; _pos = state.position; composeNode(state, nodeIndent, CONTEXT_FLOW_IN, false, true); keyTag = state.tag; keyNode = state.result; skipSeparationSpace(state, true, nodeIndent); ch = state.input.charCodeAt(state.position); if ((isExplicitPair || state.line === _line) && ch === 0x3A/* : */) { isPair = true; ch = state.input.charCodeAt(++state.position); skipSeparationSpace(state, true, nodeIndent); composeNode(state, nodeIndent, CONTEXT_FLOW_IN, false, true); valueNode = state.result; } if (isMapping) { storeMappingPair(state, _result, overridableKeys, keyTag, keyNode, valueNode, _line, _lineStart, _pos); } else if (isPair) { _result.push(storeMappingPair(state, null, overridableKeys, keyTag, keyNode, valueNode, _line, _lineStart, _pos)); } else { _result.push(keyNode); } skipSeparationSpace(state, true, nodeIndent); ch = state.input.charCodeAt(state.position); if (ch === 0x2C/* , */) { readNext = true; ch = state.input.charCodeAt(++state.position); } else { readNext = false; } } throwError(state, 'unexpected end of the stream within a flow collection'); } function readBlockScalar(state, nodeIndent) { var captureStart, folding, chomping = CHOMPING_CLIP, didReadContent = false, detectedIndent = false, textIndent = nodeIndent, emptyLines = 0, atMoreIndented = false, tmp, ch; ch = state.input.charCodeAt(state.position); if (ch === 0x7C/* | */) { folding = false; } else if (ch === 0x3E/* > */) { folding = true; } else { return false; } state.kind = 'scalar'; state.result = ''; while (ch !== 0) { ch = state.input.charCodeAt(++state.position); if (ch === 0x2B/* + */ || ch === 0x2D/* - */) { if (CHOMPING_CLIP === chomping) { chomping = (ch === 0x2B/* + */) ? CHOMPING_KEEP : CHOMPING_STRIP; } else { throwError(state, 'repeat of a chomping mode identifier'); } } else if ((tmp = fromDecimalCode(ch)) >= 0) { if (tmp === 0) { throwError(state, 'bad explicit indentation width of a block scalar; it cannot be less than one'); } else if (!detectedIndent) { textIndent = nodeIndent + tmp - 1; detectedIndent = true; } else { throwError(state, 'repeat of an indentation width identifier'); } } else { break; } } if (is_WHITE_SPACE(ch)) { do { ch = state.input.charCodeAt(++state.position); } while (is_WHITE_SPACE(ch)); if (ch === 0x23/* # */) { do { ch = state.input.charCodeAt(++state.position); } while (!is_EOL(ch) && (ch !== 0)); } } while (ch !== 0) { readLineBreak(state); state.lineIndent = 0; ch = state.input.charCodeAt(state.position); while ((!detectedIndent || state.lineIndent < textIndent) && (ch === 0x20/* Space */)) { state.lineIndent++; ch = state.input.charCodeAt(++state.position); } if (!detectedIndent && state.lineIndent > textIndent) { textIndent = state.lineIndent; } if (is_EOL(ch)) { emptyLines++; continue; } // End of the scalar. if (state.lineIndent < textIndent) { // Perform the chomping. if (chomping === CHOMPING_KEEP) { state.result += common.repeat('\n', didReadContent ? 1 + emptyLines : emptyLines); } else if (chomping === CHOMPING_CLIP) { if (didReadContent) { // i.e. only if the scalar is not empty. state.result += '\n'; } } // Break this `while` cycle and go to the funciton's epilogue. break; } // Folded style: use fancy rules to handle line breaks. if (folding) { // Lines starting with white space characters (more-indented lines) are not folded. if (is_WHITE_SPACE(ch)) { atMoreIndented = true; // except for the first content line (cf. Example 8.1) state.result += common.repeat('\n', didReadContent ? 1 + emptyLines : emptyLines); // End of more-indented block. } else if (atMoreIndented) { atMoreIndented = false; state.result += common.repeat('\n', emptyLines + 1); // Just one line break - perceive as the same line. } else if (emptyLines === 0) { if (didReadContent) { // i.e. only if we have already read some scalar content. state.result += ' '; } // Several line breaks - perceive as different lines. } else { state.result += common.repeat('\n', emptyLines); } // Literal style: just add exact number of line breaks between content lines. } else { // Keep all line breaks except the header line break. state.result += common.repeat('\n', didReadContent ? 1 + emptyLines : emptyLines); } didReadContent = true; detectedIndent = true; emptyLines = 0; captureStart = state.position; while (!is_EOL(ch) && (ch !== 0)) { ch = state.input.charCodeAt(++state.position); } captureSegment(state, captureStart, state.position, false); } return true; } function readBlockSequence(state, nodeIndent) { var _line, _tag = state.tag, _anchor = state.anchor, _result = [], following, detected = false, ch; // there is a leading tab before this token, so it can't be a block sequence/mapping; // it can still be flow sequence/mapping or a scalar if (state.firstTabInLine !== -1) return false; if (state.anchor !== null) { state.anchorMap[state.anchor] = _result; } ch = state.input.charCodeAt(state.position); while (ch !== 0) { if (state.firstTabInLine !== -1) { state.position = state.firstTabInLine; throwError(state, 'tab characters must not be used in indentation'); } if (ch !== 0x2D/* - */) { break; } following = state.input.charCodeAt(state.position + 1); if (!is_WS_OR_EOL(following)) { break; } detected = true; state.position++; if (skipSeparationSpace(state, true, -1)) { if (state.lineIndent <= nodeIndent) { _result.push(null); ch = state.input.charCodeAt(state.position); continue; } } _line = state.line; composeNode(state, nodeIndent, CONTEXT_BLOCK_IN, false, true); _result.push(state.result); skipSeparationSpace(state, true, -1); ch = state.input.charCodeAt(state.position); if ((state.line === _line || state.lineIndent > nodeIndent) && (ch !== 0)) { throwError(state, 'bad indentation of a sequence entry'); } else if (state.lineIndent < nodeIndent) { break; } } if (detected) { state.tag = _tag; state.anchor = _anchor; state.kind = 'sequence'; state.result = _result; return true; } return false; } function readBlockMapping(state, nodeIndent, flowIndent) { var following, allowCompact, _line, _keyLine, _keyLineStart, _keyPos, _tag = state.tag, _anchor = state.anchor, _result = {}, overridableKeys = Object.create(null), keyTag = null, keyNode = null, valueNode = null, atExplicitKey = false, detected = false, ch; // there is a leading tab before this token, so it can't be a block sequence/mapping; // it can still be flow sequence/mapping or a scalar if (state.firstTabInLine !== -1) return false; if (state.anchor !== null) { state.anchorMap[state.anchor] = _result; } ch = state.input.charCodeAt(state.position); while (ch !== 0) { if (!atExplicitKey && state.firstTabInLine !== -1) { state.position = state.firstTabInLine; throwError(state, 'tab characters must not be used in indentation'); } following = state.input.charCodeAt(state.position + 1); _line = state.line; // Save the current line. // // Explicit notation case. There are two separate blocks: // first for the key (denoted by "?") and second for the value (denoted by ":") // if ((ch === 0x3F/* ? */ || ch === 0x3A/* : */) && is_WS_OR_EOL(following)) { if (ch === 0x3F/* ? */) { if (atExplicitKey) { storeMappingPair(state, _result, overridableKeys, keyTag, keyNode, null, _keyLine, _keyLineStart, _keyPos); keyTag = keyNode = valueNode = null; } detected = true; atExplicitKey = true; allowCompact = true; } else if (atExplicitKey) { // i.e. 0x3A/* : */ === character after the explicit key. atExplicitKey = false; allowCompact = true; } else { throwError(state, 'incomplete explicit mapping pair; a key node is missed; or followed by a non-tabulated empty line'); } state.position += 1; ch = following; // // Implicit notation case. Flow-style node as the key first, then ":", and the value. // } else { _keyLine = state.line; _keyLineStart = state.lineStart; _keyPos = state.position; if (!composeNode(state, flowIndent, CONTEXT_FLOW_OUT, false, true)) { // Neither implicit nor explicit notation. // Reading is done. Go to the epilogue. break; } if (state.line === _line) { ch = state.input.charCodeAt(state.position); while (is_WHITE_SPACE(ch)) { ch = state.input.charCodeAt(++state.position); } if (ch === 0x3A/* : */) { ch = state.input.charCodeAt(++state.position); if (!is_WS_OR_EOL(ch)) { throwError(state, 'a whitespace character is expected after the key-value separator within a block mapping'); } if (atExplicitKey) { storeMappingPair(state, _result, overridableKeys, keyTag, keyNode, null, _keyLine, _keyLineStart, _keyPos); keyTag = keyNode = valueNode = null; } detected = true; atExplicitKey = false; allowCompact = false; keyTag = state.tag; keyNode = state.result; } else if (detected) { throwError(state, 'can not read an implicit mapping pair; a colon is missed'); } else { state.tag = _tag; state.anchor = _anchor; return true; // Keep the result of `composeNode`. } } else if (detected) { throwError(state, 'can not read a block mapping entry; a multiline key may not be an implicit key'); } else { state.tag = _tag; state.anchor = _anchor; return true; // Keep the result of `composeNode`. } } // // Common reading code for both explicit and implicit notations. // if (state.line === _line || state.lineIndent > nodeIndent) { if (atExplicitKey) { _keyLine = state.line; _keyLineStart = state.lineStart; _keyPos = state.position; } if (composeNode(state, nodeIndent, CONTEXT_BLOCK_OUT, true, allowCompact)) { if (atExplicitKey) { keyNode = state.result; } else { valueNode = state.result; } } if (!atExplicitKey) { storeMappingPair(state, _result, overridableKeys, keyTag, keyNode, valueNode, _keyLine, _keyLineStart, _keyPos); keyTag = keyNode = valueNode = null; } skipSeparationSpace(state, true, -1); ch = state.input.charCodeAt(state.position); } if ((state.line === _line || state.lineIndent > nodeIndent) && (ch !== 0)) { throwError(state, 'bad indentation of a mapping entry'); } else if (state.lineIndent < nodeIndent) { break; } } // // Epilogue. // // Special case: last mapping's node contains only the key in explicit notation. if (atExplicitKey) { storeMappingPair(state, _result, overridableKeys, keyTag, keyNode, null, _keyLine, _keyLineStart, _keyPos); } // Expose the resulting mapping. if (detected) { state.tag = _tag; state.anchor = _anchor; state.kind = 'mapping'; state.result = _result; } return detected; } function readTagProperty(state) { var _position, isVerbatim = false, isNamed = false, tagHandle, tagName, ch; ch = state.input.charCodeAt(state.position); if (ch !== 0x21/* ! */) return false; if (state.tag !== null) { throwError(state, 'duplication of a tag property'); } ch = state.input.charCodeAt(++state.position); if (ch === 0x3C/* < */) { isVerbatim = true; ch = state.input.charCodeAt(++state.position); } else if (ch === 0x21/* ! */) { isNamed = true; tagHandle = '!!'; ch = state.input.charCodeAt(++state.position); } else { tagHandle = '!'; } _position = state.position; if (isVerbatim) { do { ch = state.input.charCodeAt(++state.position); } while (ch !== 0 && ch !== 0x3E/* > */); if (state.position < state.length) { tagName = state.input.slice(_position, state.position); ch = state.input.charCodeAt(++state.position); } else { throwError(state, 'unexpected end of the stream within a verbatim tag'); } } else { while (ch !== 0 && !is_WS_OR_EOL(ch)) { if (ch === 0x21/* ! */) { if (!isNamed) { tagHandle = state.input.slice(_position - 1, state.position + 1); if (!PATTERN_TAG_HANDLE.test(tagHandle)) { throwError(state, 'named tag handle cannot contain such characters'); } isNamed = true; _position = state.position + 1; } else { throwError(state, 'tag suffix cannot contain exclamation marks'); } } ch = state.input.charCodeAt(++state.position); } tagName = state.input.slice(_position, state.position); if (PATTERN_FLOW_INDICATORS.test(tagName)) { throwError(state, 'tag suffix cannot contain flow indicator characters'); } } if (tagName && !PATTERN_TAG_URI.test(tagName)) { throwError(state, 'tag name cannot contain such characters: ' + tagName); } try { tagName = decodeURIComponent(tagName); } catch (err) { throwError(state, 'tag name is malformed: ' + tagName); } if (isVerbatim) { state.tag = tagName; } else if (_hasOwnProperty.call(state.tagMap, tagHandle)) { state.tag = state.tagMap[tagHandle] + tagName; } else if (tagHandle === '!') { state.tag = '!' + tagName; } else if (tagHandle === '!!') { state.tag = 'tag:yaml.org,2002:' + tagName; } else { throwError(state, 'undeclared tag handle "' + tagHandle + '"'); } return true; } function readAnchorProperty(state) { var _position, ch; ch = state.input.charCodeAt(state.position); if (ch !== 0x26/* & */) return false; if (state.anchor !== null) { throwError(state, 'duplication of an anchor property'); } ch = state.input.charCodeAt(++state.position); _position = state.position; while (ch !== 0 && !is_WS_OR_EOL(ch) && !is_FLOW_INDICATOR(ch)) { ch = state.input.charCodeAt(++state.position); } if (state.position === _position) { throwError(state, 'name of an anchor node must contain at least one character'); } state.anchor = state.input.slice(_position, state.position); return true; } function readAlias(state) { var _position, alias, ch; ch = state.input.charCodeAt(state.position); if (ch !== 0x2A/* * */) return false; ch = state.input.charCodeAt(++state.position); _position = state.position; while (ch !== 0 && !is_WS_OR_EOL(ch) && !is_FLOW_INDICATOR(ch)) { ch = state.input.charCodeAt(++state.position); } if (state.position === _position) { throwError(state, 'name of an alias node must contain at least one character'); } alias = state.input.slice(_position, state.position); if (!_hasOwnProperty.call(state.anchorMap, alias)) { throwError(state, 'unidentified alias "' + alias + '"'); } state.result = state.anchorMap[alias]; skipSeparationSpace(state, true, -1); return true; } function composeNode(state, parentIndent, nodeContext, allowToSeek, allowCompact) { var allowBlockStyles, allowBlockScalars, allowBlockCollections, indentStatus = 1, // 1: this>parent, 0: this=parent, -1: this parentIndent) { indentStatus = 1; } else if (state.lineIndent === parentIndent) { indentStatus = 0; } else if (state.lineIndent < parentIndent) { indentStatus = -1; } } } if (indentStatus === 1) { while (readTagProperty(state) || readAnchorProperty(state)) { if (skipSeparationSpace(state, true, -1)) { atNewLine = true; allowBlockCollections = allowBlockStyles; if (state.lineIndent > parentIndent) { indentStatus = 1; } else if (state.lineIndent === parentIndent) { indentStatus = 0; } else if (state.lineIndent < parentIndent) { indentStatus = -1; } } else { allowBlockCollections = false; } } } if (allowBlockCollections) { allowBlockCollections = atNewLine || allowCompact; } if (indentStatus === 1 || CONTEXT_BLOCK_OUT === nodeContext) { if (CONTEXT_FLOW_IN === nodeContext || CONTEXT_FLOW_OUT === nodeContext) { flowIndent = parentIndent; } else { flowIndent = parentIndent + 1; } blockIndent = state.position - state.lineStart; if (indentStatus === 1) { if (allowBlockCollections && (readBlockSequence(state, blockIndent) || readBlockMapping(state, blockIndent, flowIndent)) || readFlowCollection(state, flowIndent)) { hasContent = true; } else { if ((allowBlockScalars && readBlockScalar(state, flowIndent)) || readSingleQuotedScalar(state, flowIndent) || readDoubleQuotedScalar(state, flowIndent)) { hasContent = true; } else if (readAlias(state)) { hasContent = true; if (state.tag !== null || state.anchor !== null) { throwError(state, 'alias node should not have any properties'); } } else if (readPlainScalar(state, flowIndent, CONTEXT_FLOW_IN === nodeContext)) { hasContent = true; if (state.tag === null) { state.tag = '?'; } } if (state.anchor !== null) { state.anchorMap[state.anchor] = state.result; } } } else if (indentStatus === 0) { // Special case: block sequences are allowed to have same indentation level as the parent. // http://www.yaml.org/spec/1.2/spec.html#id2799784 hasContent = allowBlockCollections && readBlockSequence(state, blockIndent); } } if (state.tag === null) { if (state.anchor !== null) { state.anchorMap[state.anchor] = state.result; } } else if (state.tag === '?') { // Implicit resolving is not allowed for non-scalar types, and '?' // non-specific tag is only automatically assigned to plain scalars. // // We only need to check kind conformity in case user explicitly assigns '?' // tag, for example like this: "! [0]" // if (state.result !== null && state.kind !== 'scalar') { throwError(state, 'unacceptable node kind for ! tag; it should be "scalar", not "' + state.kind + '"'); } for (typeIndex = 0, typeQuantity = state.implicitTypes.length; typeIndex < typeQuantity; typeIndex += 1) { type = state.implicitTypes[typeIndex]; if (type.resolve(state.result)) { // `state.result` updated in resolver if matched state.result = type.construct(state.result); state.tag = type.tag; if (state.anchor !== null) { state.anchorMap[state.anchor] = state.result; } break; } } } else if (state.tag !== '!') { if (_hasOwnProperty.call(state.typeMap[state.kind || 'fallback'], state.tag)) { type = state.typeMap[state.kind || 'fallback'][state.tag]; } else { // looking for multi type type = null; typeList = state.typeMap.multi[state.kind || 'fallback']; for (typeIndex = 0, typeQuantity = typeList.length; typeIndex < typeQuantity; typeIndex += 1) { if (state.tag.slice(0, typeList[typeIndex].tag.length) === typeList[typeIndex].tag) { type = typeList[typeIndex]; break; } } } if (!type) { throwError(state, 'unknown tag !<' + state.tag + '>'); } if (state.result !== null && type.kind !== state.kind) { throwError(state, 'unacceptable node kind for !<' + state.tag + '> tag; it should be "' + type.kind + '", not "' + state.kind + '"'); } if (!type.resolve(state.result, state.tag)) { // `state.result` updated in resolver if matched throwError(state, 'cannot resolve a node with !<' + state.tag + '> explicit tag'); } else { state.result = type.construct(state.result, state.tag); if (state.anchor !== null) { state.anchorMap[state.anchor] = state.result; } } } if (state.listener !== null) { state.listener('close', state); } return state.tag !== null || state.anchor !== null || hasContent; } function readDocument(state) { var documentStart = state.position, _position, directiveName, directiveArgs, hasDirectives = false, ch; state.version = null; state.checkLineBreaks = state.legacy; state.tagMap = Object.create(null); state.anchorMap = Object.create(null); while ((ch = state.input.charCodeAt(state.position)) !== 0) { skipSeparationSpace(state, true, -1); ch = state.input.charCodeAt(state.position); if (state.lineIndent > 0 || ch !== 0x25/* % */) { break; } hasDirectives = true; ch = state.input.charCodeAt(++state.position); _position = state.position; while (ch !== 0 && !is_WS_OR_EOL(ch)) { ch = state.input.charCodeAt(++state.position); } directiveName = state.input.slice(_position, state.position); directiveArgs = []; if (directiveName.length < 1) { throwError(state, 'directive name must not be less than one character in length'); } while (ch !== 0) { while (is_WHITE_SPACE(ch)) { ch = state.input.charCodeAt(++state.position); } if (ch === 0x23/* # */) { do { ch = state.input.charCodeAt(++state.position); } while (ch !== 0 && !is_EOL(ch)); break; } if (is_EOL(ch)) break; _position = state.position; while (ch !== 0 && !is_WS_OR_EOL(ch)) { ch = state.input.charCodeAt(++state.position); } directiveArgs.push(state.input.slice(_position, state.position)); } if (ch !== 0) readLineBreak(state); if (_hasOwnProperty.call(directiveHandlers, directiveName)) { directiveHandlers[directiveName](state, directiveName, directiveArgs); } else { throwWarning(state, 'unknown document directive "' + directiveName + '"'); } } skipSeparationSpace(state, true, -1); if (state.lineIndent === 0 && state.input.charCodeAt(state.position) === 0x2D/* - */ && state.input.charCodeAt(state.position + 1) === 0x2D/* - */ && state.input.charCodeAt(state.position + 2) === 0x2D/* - */) { state.position += 3; skipSeparationSpace(state, true, -1); } else if (hasDirectives) { throwError(state, 'directives end mark is expected'); } composeNode(state, state.lineIndent - 1, CONTEXT_BLOCK_OUT, false, true); skipSeparationSpace(state, true, -1); if (state.checkLineBreaks && PATTERN_NON_ASCII_LINE_BREAKS.test(state.input.slice(documentStart, state.position))) { throwWarning(state, 'non-ASCII line breaks are interpreted as content'); } state.documents.push(state.result); if (state.position === state.lineStart && testDocumentSeparator(state)) { if (state.input.charCodeAt(state.position) === 0x2E/* . */) { state.position += 3; skipSeparationSpace(state, true, -1); } return; } if (state.position < (state.length - 1)) { throwError(state, 'end of the stream or a document separator is expected'); } else { return; } } function loadDocuments(input, options) { input = String(input); options = options || {}; if (input.length !== 0) { // Add tailing `\n` if not exists if (input.charCodeAt(input.length - 1) !== 0x0A/* LF */ && input.charCodeAt(input.length - 1) !== 0x0D/* CR */) { input += '\n'; } // Strip BOM if (input.charCodeAt(0) === 0xFEFF) { input = input.slice(1); } } var state = new State(input, options); var nullpos = input.indexOf('\0'); if (nullpos !== -1) { state.position = nullpos; throwError(state, 'null byte is not allowed in input'); } // Use 0 as string terminator. That significantly simplifies bounds check. state.input += '\0'; while (state.input.charCodeAt(state.position) === 0x20/* Space */) { state.lineIndent += 1; state.position += 1; } while (state.position < (state.length - 1)) { readDocument(state); } return state.documents; } function loadAll(input, iterator, options) { if (iterator !== null && typeof iterator === 'object' && typeof options === 'undefined') { options = iterator; iterator = null; } var documents = loadDocuments(input, options); if (typeof iterator !== 'function') { return documents; } for (var index = 0, length = documents.length; index < length; index += 1) { iterator(documents[index]); } } function load(input, options) { var documents = loadDocuments(input, options); if (documents.length === 0) { /*eslint-disable no-undefined*/ return undefined; } else if (documents.length === 1) { return documents[0]; } throw new YAMLException('expected a single document in the stream, but found more'); } module.exports.loadAll = loadAll; module.exports.load = load; /***/ }), /***/ 1082: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; /*eslint-disable max-len*/ var YAMLException = __nccwpck_require__(8179); var Type = __nccwpck_require__(6073); function compileList(schema, name) { var result = []; schema[name].forEach(function (currentType) { var newIndex = result.length; result.forEach(function (previousType, previousIndex) { if (previousType.tag === currentType.tag && previousType.kind === currentType.kind && previousType.multi === currentType.multi) { newIndex = previousIndex; } }); result[newIndex] = currentType; }); return result; } function compileMap(/* lists... */) { var result = { scalar: {}, sequence: {}, mapping: {}, fallback: {}, multi: { scalar: [], sequence: [], mapping: [], fallback: [] } }, index, length; function collectType(type) { if (type.multi) { result.multi[type.kind].push(type); result.multi['fallback'].push(type); } else { result[type.kind][type.tag] = result['fallback'][type.tag] = type; } } for (index = 0, length = arguments.length; index < length; index += 1) { arguments[index].forEach(collectType); } return result; } function Schema(definition) { return this.extend(definition); } Schema.prototype.extend = function extend(definition) { var implicit = []; var explicit = []; if (definition instanceof Type) { // Schema.extend(type) explicit.push(definition); } else if (Array.isArray(definition)) { // Schema.extend([ type1, type2, ... ]) explicit = explicit.concat(definition); } else if (definition && (Array.isArray(definition.implicit) || Array.isArray(definition.explicit))) { // Schema.extend({ explicit: [ type1, type2, ... ], implicit: [ type1, type2, ... ] }) if (definition.implicit) implicit = implicit.concat(definition.implicit); if (definition.explicit) explicit = explicit.concat(definition.explicit); } else { throw new YAMLException('Schema.extend argument should be a Type, [ Type ], ' + 'or a schema definition ({ implicit: [...], explicit: [...] })'); } implicit.forEach(function (type) { if (!(type instanceof Type)) { throw new YAMLException('Specified list of YAML types (or a single Type object) contains a non-Type object.'); } if (type.loadKind && type.loadKind !== 'scalar') { throw new YAMLException('There is a non-scalar type in the implicit list of a schema. Implicit resolving of such types is not supported.'); } if (type.multi) { throw new YAMLException('There is a multi type in the implicit list of a schema. Multi tags can only be listed as explicit.'); } }); explicit.forEach(function (type) { if (!(type instanceof Type)) { throw new YAMLException('Specified list of YAML types (or a single Type object) contains a non-Type object.'); } }); var result = Object.create(Schema.prototype); result.implicit = (this.implicit || []).concat(implicit); result.explicit = (this.explicit || []).concat(explicit); result.compiledImplicit = compileList(result, 'implicit'); result.compiledExplicit = compileList(result, 'explicit'); result.compiledTypeMap = compileMap(result.compiledImplicit, result.compiledExplicit); return result; }; module.exports = Schema; /***/ }), /***/ 2011: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; // Standard YAML's Core schema. // http://www.yaml.org/spec/1.2/spec.html#id2804923 // // NOTE: JS-YAML does not support schema-specific tag resolution restrictions. // So, Core schema has no distinctions from JSON schema is JS-YAML. module.exports = __nccwpck_require__(1035); /***/ }), /***/ 8759: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; // JS-YAML's default schema for `safeLoad` function. // It is not described in the YAML specification. // // This schema is based on standard YAML's Core schema and includes most of // extra types described at YAML tag repository. (http://yaml.org/type/) module.exports = __nccwpck_require__(2011).extend({ implicit: [ __nccwpck_require__(9212), __nccwpck_require__(6104) ], explicit: [ __nccwpck_require__(7900), __nccwpck_require__(9046), __nccwpck_require__(6860), __nccwpck_require__(9548) ] }); /***/ }), /***/ 8562: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; // Standard YAML's Failsafe schema. // http://www.yaml.org/spec/1.2/spec.html#id2802346 var Schema = __nccwpck_require__(1082); module.exports = new Schema({ explicit: [ __nccwpck_require__(3619), __nccwpck_require__(7283), __nccwpck_require__(6150) ] }); /***/ }), /***/ 1035: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; // Standard YAML's JSON schema. // http://www.yaml.org/spec/1.2/spec.html#id2803231 // // NOTE: JS-YAML does not support schema-specific tag resolution restrictions. // So, this schema is not such strict as defined in the YAML specification. // It allows numbers in binary notaion, use `Null` and `NULL` as `null`, etc. module.exports = __nccwpck_require__(8562).extend({ implicit: [ __nccwpck_require__(721), __nccwpck_require__(4993), __nccwpck_require__(1615), __nccwpck_require__(2705) ] }); /***/ }), /***/ 6975: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; var common = __nccwpck_require__(6829); // get snippet for a single line, respecting maxLength function getLine(buffer, lineStart, lineEnd, position, maxLineLength) { var head = ''; var tail = ''; var maxHalfLength = Math.floor(maxLineLength / 2) - 1; if (position - lineStart > maxHalfLength) { head = ' ... '; lineStart = position - maxHalfLength + head.length; } if (lineEnd - position > maxHalfLength) { tail = ' ...'; lineEnd = position + maxHalfLength - tail.length; } return { str: head + buffer.slice(lineStart, lineEnd).replace(/\t/g, '→') + tail, pos: position - lineStart + head.length // relative position }; } function padStart(string, max) { return common.repeat(' ', max - string.length) + string; } function makeSnippet(mark, options) { options = Object.create(options || null); if (!mark.buffer) return null; if (!options.maxLength) options.maxLength = 79; if (typeof options.indent !== 'number') options.indent = 1; if (typeof options.linesBefore !== 'number') options.linesBefore = 3; if (typeof options.linesAfter !== 'number') options.linesAfter = 2; var re = /\r?\n|\r|\0/g; var lineStarts = [ 0 ]; var lineEnds = []; var match; var foundLineNo = -1; while ((match = re.exec(mark.buffer))) { lineEnds.push(match.index); lineStarts.push(match.index + match[0].length); if (mark.position <= match.index && foundLineNo < 0) { foundLineNo = lineStarts.length - 2; } } if (foundLineNo < 0) foundLineNo = lineStarts.length - 1; var result = '', i, line; var lineNoLength = Math.min(mark.line + options.linesAfter, lineEnds.length).toString().length; var maxLineLength = options.maxLength - (options.indent + lineNoLength + 3); for (i = 1; i <= options.linesBefore; i++) { if (foundLineNo - i < 0) break; line = getLine( mark.buffer, lineStarts[foundLineNo - i], lineEnds[foundLineNo - i], mark.position - (lineStarts[foundLineNo] - lineStarts[foundLineNo - i]), maxLineLength ); result = common.repeat(' ', options.indent) + padStart((mark.line - i + 1).toString(), lineNoLength) + ' | ' + line.str + '\n' + result; } line = getLine(mark.buffer, lineStarts[foundLineNo], lineEnds[foundLineNo], mark.position, maxLineLength); result += common.repeat(' ', options.indent) + padStart((mark.line + 1).toString(), lineNoLength) + ' | ' + line.str + '\n'; result += common.repeat('-', options.indent + lineNoLength + 3 + line.pos) + '^' + '\n'; for (i = 1; i <= options.linesAfter; i++) { if (foundLineNo + i >= lineEnds.length) break; line = getLine( mark.buffer, lineStarts[foundLineNo + i], lineEnds[foundLineNo + i], mark.position - (lineStarts[foundLineNo] - lineStarts[foundLineNo + i]), maxLineLength ); result += common.repeat(' ', options.indent) + padStart((mark.line + i + 1).toString(), lineNoLength) + ' | ' + line.str + '\n'; } return result.replace(/\n$/, ''); } module.exports = makeSnippet; /***/ }), /***/ 6073: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; var YAMLException = __nccwpck_require__(8179); var TYPE_CONSTRUCTOR_OPTIONS = [ 'kind', 'multi', 'resolve', 'construct', 'instanceOf', 'predicate', 'represent', 'representName', 'defaultStyle', 'styleAliases' ]; var YAML_NODE_KINDS = [ 'scalar', 'sequence', 'mapping' ]; function compileStyleAliases(map) { var result = {}; if (map !== null) { Object.keys(map).forEach(function (style) { map[style].forEach(function (alias) { result[String(alias)] = style; }); }); } return result; } function Type(tag, options) { options = options || {}; Object.keys(options).forEach(function (name) { if (TYPE_CONSTRUCTOR_OPTIONS.indexOf(name) === -1) { throw new YAMLException('Unknown option "' + name + '" is met in definition of "' + tag + '" YAML type.'); } }); // TODO: Add tag format check. this.options = options; // keep original options in case user wants to extend this type later this.tag = tag; this.kind = options['kind'] || null; this.resolve = options['resolve'] || function () { return true; }; this.construct = options['construct'] || function (data) { return data; }; this.instanceOf = options['instanceOf'] || null; this.predicate = options['predicate'] || null; this.represent = options['represent'] || null; this.representName = options['representName'] || null; this.defaultStyle = options['defaultStyle'] || null; this.multi = options['multi'] || false; this.styleAliases = compileStyleAliases(options['styleAliases'] || null); if (YAML_NODE_KINDS.indexOf(this.kind) === -1) { throw new YAMLException('Unknown kind "' + this.kind + '" is specified for "' + tag + '" YAML type.'); } } module.exports = Type; /***/ }), /***/ 7900: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; /*eslint-disable no-bitwise*/ var Type = __nccwpck_require__(6073); // [ 64, 65, 66 ] -> [ padding, CR, LF ] var BASE64_MAP = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=\n\r'; function resolveYamlBinary(data) { if (data === null) return false; var code, idx, bitlen = 0, max = data.length, map = BASE64_MAP; // Convert one by one. for (idx = 0; idx < max; idx++) { code = map.indexOf(data.charAt(idx)); // Skip CR/LF if (code > 64) continue; // Fail on illegal characters if (code < 0) return false; bitlen += 6; } // If there are any bits left, source was corrupted return (bitlen % 8) === 0; } function constructYamlBinary(data) { var idx, tailbits, input = data.replace(/[\r\n=]/g, ''), // remove CR/LF & padding to simplify scan max = input.length, map = BASE64_MAP, bits = 0, result = []; // Collect by 6*4 bits (3 bytes) for (idx = 0; idx < max; idx++) { if ((idx % 4 === 0) && idx) { result.push((bits >> 16) & 0xFF); result.push((bits >> 8) & 0xFF); result.push(bits & 0xFF); } bits = (bits << 6) | map.indexOf(input.charAt(idx)); } // Dump tail tailbits = (max % 4) * 6; if (tailbits === 0) { result.push((bits >> 16) & 0xFF); result.push((bits >> 8) & 0xFF); result.push(bits & 0xFF); } else if (tailbits === 18) { result.push((bits >> 10) & 0xFF); result.push((bits >> 2) & 0xFF); } else if (tailbits === 12) { result.push((bits >> 4) & 0xFF); } return new Uint8Array(result); } function representYamlBinary(object /*, style*/) { var result = '', bits = 0, idx, tail, max = object.length, map = BASE64_MAP; // Convert every three bytes to 4 ASCII characters. for (idx = 0; idx < max; idx++) { if ((idx % 3 === 0) && idx) { result += map[(bits >> 18) & 0x3F]; result += map[(bits >> 12) & 0x3F]; result += map[(bits >> 6) & 0x3F]; result += map[bits & 0x3F]; } bits = (bits << 8) + object[idx]; } // Dump tail tail = max % 3; if (tail === 0) { result += map[(bits >> 18) & 0x3F]; result += map[(bits >> 12) & 0x3F]; result += map[(bits >> 6) & 0x3F]; result += map[bits & 0x3F]; } else if (tail === 2) { result += map[(bits >> 10) & 0x3F]; result += map[(bits >> 4) & 0x3F]; result += map[(bits << 2) & 0x3F]; result += map[64]; } else if (tail === 1) { result += map[(bits >> 2) & 0x3F]; result += map[(bits << 4) & 0x3F]; result += map[64]; result += map[64]; } return result; } function isBinary(obj) { return Object.prototype.toString.call(obj) === '[object Uint8Array]'; } module.exports = new Type('tag:yaml.org,2002:binary', { kind: 'scalar', resolve: resolveYamlBinary, construct: constructYamlBinary, predicate: isBinary, represent: representYamlBinary }); /***/ }), /***/ 4993: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; var Type = __nccwpck_require__(6073); function resolveYamlBoolean(data) { if (data === null) return false; var max = data.length; return (max === 4 && (data === 'true' || data === 'True' || data === 'TRUE')) || (max === 5 && (data === 'false' || data === 'False' || data === 'FALSE')); } function constructYamlBoolean(data) { return data === 'true' || data === 'True' || data === 'TRUE'; } function isBoolean(object) { return Object.prototype.toString.call(object) === '[object Boolean]'; } module.exports = new Type('tag:yaml.org,2002:bool', { kind: 'scalar', resolve: resolveYamlBoolean, construct: constructYamlBoolean, predicate: isBoolean, represent: { lowercase: function (object) { return object ? 'true' : 'false'; }, uppercase: function (object) { return object ? 'TRUE' : 'FALSE'; }, camelcase: function (object) { return object ? 'True' : 'False'; } }, defaultStyle: 'lowercase' }); /***/ }), /***/ 2705: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; var common = __nccwpck_require__(6829); var Type = __nccwpck_require__(6073); var YAML_FLOAT_PATTERN = new RegExp( // 2.5e4, 2.5 and integers '^(?:[-+]?(?:[0-9][0-9_]*)(?:\\.[0-9_]*)?(?:[eE][-+]?[0-9]+)?' + // .2e4, .2 // special case, seems not from spec '|\\.[0-9_]+(?:[eE][-+]?[0-9]+)?' + // .inf '|[-+]?\\.(?:inf|Inf|INF)' + // .nan '|\\.(?:nan|NaN|NAN))$'); function resolveYamlFloat(data) { if (data === null) return false; if (!YAML_FLOAT_PATTERN.test(data) || // Quick hack to not allow integers end with `_` // Probably should update regexp & check speed data[data.length - 1] === '_') { return false; } return true; } function constructYamlFloat(data) { var value, sign; value = data.replace(/_/g, '').toLowerCase(); sign = value[0] === '-' ? -1 : 1; if ('+-'.indexOf(value[0]) >= 0) { value = value.slice(1); } if (value === '.inf') { return (sign === 1) ? Number.POSITIVE_INFINITY : Number.NEGATIVE_INFINITY; } else if (value === '.nan') { return NaN; } return sign * parseFloat(value, 10); } var SCIENTIFIC_WITHOUT_DOT = /^[-+]?[0-9]+e/; function representYamlFloat(object, style) { var res; if (isNaN(object)) { switch (style) { case 'lowercase': return '.nan'; case 'uppercase': return '.NAN'; case 'camelcase': return '.NaN'; } } else if (Number.POSITIVE_INFINITY === object) { switch (style) { case 'lowercase': return '.inf'; case 'uppercase': return '.INF'; case 'camelcase': return '.Inf'; } } else if (Number.NEGATIVE_INFINITY === object) { switch (style) { case 'lowercase': return '-.inf'; case 'uppercase': return '-.INF'; case 'camelcase': return '-.Inf'; } } else if (common.isNegativeZero(object)) { return '-0.0'; } res = object.toString(10); // JS stringifier can build scientific format without dots: 5e-100, // while YAML requres dot: 5.e-100. Fix it with simple hack return SCIENTIFIC_WITHOUT_DOT.test(res) ? res.replace('e', '.e') : res; } function isFloat(object) { return (Object.prototype.toString.call(object) === '[object Number]') && (object % 1 !== 0 || common.isNegativeZero(object)); } module.exports = new Type('tag:yaml.org,2002:float', { kind: 'scalar', resolve: resolveYamlFloat, construct: constructYamlFloat, predicate: isFloat, represent: representYamlFloat, defaultStyle: 'lowercase' }); /***/ }), /***/ 1615: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; var common = __nccwpck_require__(6829); var Type = __nccwpck_require__(6073); function isHexCode(c) { return ((0x30/* 0 */ <= c) && (c <= 0x39/* 9 */)) || ((0x41/* A */ <= c) && (c <= 0x46/* F */)) || ((0x61/* a */ <= c) && (c <= 0x66/* f */)); } function isOctCode(c) { return ((0x30/* 0 */ <= c) && (c <= 0x37/* 7 */)); } function isDecCode(c) { return ((0x30/* 0 */ <= c) && (c <= 0x39/* 9 */)); } function resolveYamlInteger(data) { if (data === null) return false; var max = data.length, index = 0, hasDigits = false, ch; if (!max) return false; ch = data[index]; // sign if (ch === '-' || ch === '+') { ch = data[++index]; } if (ch === '0') { // 0 if (index + 1 === max) return true; ch = data[++index]; // base 2, base 8, base 16 if (ch === 'b') { // base 2 index++; for (; index < max; index++) { ch = data[index]; if (ch === '_') continue; if (ch !== '0' && ch !== '1') return false; hasDigits = true; } return hasDigits && ch !== '_'; } if (ch === 'x') { // base 16 index++; for (; index < max; index++) { ch = data[index]; if (ch === '_') continue; if (!isHexCode(data.charCodeAt(index))) return false; hasDigits = true; } return hasDigits && ch !== '_'; } if (ch === 'o') { // base 8 index++; for (; index < max; index++) { ch = data[index]; if (ch === '_') continue; if (!isOctCode(data.charCodeAt(index))) return false; hasDigits = true; } return hasDigits && ch !== '_'; } } // base 10 (except 0) // value should not start with `_`; if (ch === '_') return false; for (; index < max; index++) { ch = data[index]; if (ch === '_') continue; if (!isDecCode(data.charCodeAt(index))) { return false; } hasDigits = true; } // Should have digits and should not end with `_` if (!hasDigits || ch === '_') return false; return true; } function constructYamlInteger(data) { var value = data, sign = 1, ch; if (value.indexOf('_') !== -1) { value = value.replace(/_/g, ''); } ch = value[0]; if (ch === '-' || ch === '+') { if (ch === '-') sign = -1; value = value.slice(1); ch = value[0]; } if (value === '0') return 0; if (ch === '0') { if (value[1] === 'b') return sign * parseInt(value.slice(2), 2); if (value[1] === 'x') return sign * parseInt(value.slice(2), 16); if (value[1] === 'o') return sign * parseInt(value.slice(2), 8); } return sign * parseInt(value, 10); } function isInteger(object) { return (Object.prototype.toString.call(object)) === '[object Number]' && (object % 1 === 0 && !common.isNegativeZero(object)); } module.exports = new Type('tag:yaml.org,2002:int', { kind: 'scalar', resolve: resolveYamlInteger, construct: constructYamlInteger, predicate: isInteger, represent: { binary: function (obj) { return obj >= 0 ? '0b' + obj.toString(2) : '-0b' + obj.toString(2).slice(1); }, octal: function (obj) { return obj >= 0 ? '0o' + obj.toString(8) : '-0o' + obj.toString(8).slice(1); }, decimal: function (obj) { return obj.toString(10); }, /* eslint-disable max-len */ hexadecimal: function (obj) { return obj >= 0 ? '0x' + obj.toString(16).toUpperCase() : '-0x' + obj.toString(16).toUpperCase().slice(1); } }, defaultStyle: 'decimal', styleAliases: { binary: [ 2, 'bin' ], octal: [ 8, 'oct' ], decimal: [ 10, 'dec' ], hexadecimal: [ 16, 'hex' ] } }); /***/ }), /***/ 6150: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; var Type = __nccwpck_require__(6073); module.exports = new Type('tag:yaml.org,2002:map', { kind: 'mapping', construct: function (data) { return data !== null ? data : {}; } }); /***/ }), /***/ 6104: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; var Type = __nccwpck_require__(6073); function resolveYamlMerge(data) { return data === '<<' || data === null; } module.exports = new Type('tag:yaml.org,2002:merge', { kind: 'scalar', resolve: resolveYamlMerge }); /***/ }), /***/ 721: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; var Type = __nccwpck_require__(6073); function resolveYamlNull(data) { if (data === null) return true; var max = data.length; return (max === 1 && data === '~') || (max === 4 && (data === 'null' || data === 'Null' || data === 'NULL')); } function constructYamlNull() { return null; } function isNull(object) { return object === null; } module.exports = new Type('tag:yaml.org,2002:null', { kind: 'scalar', resolve: resolveYamlNull, construct: constructYamlNull, predicate: isNull, represent: { canonical: function () { return '~'; }, lowercase: function () { return 'null'; }, uppercase: function () { return 'NULL'; }, camelcase: function () { return 'Null'; }, empty: function () { return ''; } }, defaultStyle: 'lowercase' }); /***/ }), /***/ 9046: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; var Type = __nccwpck_require__(6073); var _hasOwnProperty = Object.prototype.hasOwnProperty; var _toString = Object.prototype.toString; function resolveYamlOmap(data) { if (data === null) return true; var objectKeys = [], index, length, pair, pairKey, pairHasKey, object = data; for (index = 0, length = object.length; index < length; index += 1) { pair = object[index]; pairHasKey = false; if (_toString.call(pair) !== '[object Object]') return false; for (pairKey in pair) { if (_hasOwnProperty.call(pair, pairKey)) { if (!pairHasKey) pairHasKey = true; else return false; } } if (!pairHasKey) return false; if (objectKeys.indexOf(pairKey) === -1) objectKeys.push(pairKey); else return false; } return true; } function constructYamlOmap(data) { return data !== null ? data : []; } module.exports = new Type('tag:yaml.org,2002:omap', { kind: 'sequence', resolve: resolveYamlOmap, construct: constructYamlOmap }); /***/ }), /***/ 6860: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; var Type = __nccwpck_require__(6073); var _toString = Object.prototype.toString; function resolveYamlPairs(data) { if (data === null) return true; var index, length, pair, keys, result, object = data; result = new Array(object.length); for (index = 0, length = object.length; index < length; index += 1) { pair = object[index]; if (_toString.call(pair) !== '[object Object]') return false; keys = Object.keys(pair); if (keys.length !== 1) return false; result[index] = [ keys[0], pair[keys[0]] ]; } return true; } function constructYamlPairs(data) { if (data === null) return []; var index, length, pair, keys, result, object = data; result = new Array(object.length); for (index = 0, length = object.length; index < length; index += 1) { pair = object[index]; keys = Object.keys(pair); result[index] = [ keys[0], pair[keys[0]] ]; } return result; } module.exports = new Type('tag:yaml.org,2002:pairs', { kind: 'sequence', resolve: resolveYamlPairs, construct: constructYamlPairs }); /***/ }), /***/ 7283: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; var Type = __nccwpck_require__(6073); module.exports = new Type('tag:yaml.org,2002:seq', { kind: 'sequence', construct: function (data) { return data !== null ? data : []; } }); /***/ }), /***/ 9548: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; var Type = __nccwpck_require__(6073); var _hasOwnProperty = Object.prototype.hasOwnProperty; function resolveYamlSet(data) { if (data === null) return true; var key, object = data; for (key in object) { if (_hasOwnProperty.call(object, key)) { if (object[key] !== null) return false; } } return true; } function constructYamlSet(data) { return data !== null ? data : {}; } module.exports = new Type('tag:yaml.org,2002:set', { kind: 'mapping', resolve: resolveYamlSet, construct: constructYamlSet }); /***/ }), /***/ 3619: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; var Type = __nccwpck_require__(6073); module.exports = new Type('tag:yaml.org,2002:str', { kind: 'scalar', construct: function (data) { return data !== null ? data : ''; } }); /***/ }), /***/ 9212: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; var Type = __nccwpck_require__(6073); var YAML_DATE_REGEXP = new RegExp( '^([0-9][0-9][0-9][0-9])' + // [1] year '-([0-9][0-9])' + // [2] month '-([0-9][0-9])$'); // [3] day var YAML_TIMESTAMP_REGEXP = new RegExp( '^([0-9][0-9][0-9][0-9])' + // [1] year '-([0-9][0-9]?)' + // [2] month '-([0-9][0-9]?)' + // [3] day '(?:[Tt]|[ \\t]+)' + // ... '([0-9][0-9]?)' + // [4] hour ':([0-9][0-9])' + // [5] minute ':([0-9][0-9])' + // [6] second '(?:\\.([0-9]*))?' + // [7] fraction '(?:[ \\t]*(Z|([-+])([0-9][0-9]?)' + // [8] tz [9] tz_sign [10] tz_hour '(?::([0-9][0-9]))?))?$'); // [11] tz_minute function resolveYamlTimestamp(data) { if (data === null) return false; if (YAML_DATE_REGEXP.exec(data) !== null) return true; if (YAML_TIMESTAMP_REGEXP.exec(data) !== null) return true; return false; } function constructYamlTimestamp(data) { var match, year, month, day, hour, minute, second, fraction = 0, delta = null, tz_hour, tz_minute, date; match = YAML_DATE_REGEXP.exec(data); if (match === null) match = YAML_TIMESTAMP_REGEXP.exec(data); if (match === null) throw new Error('Date resolve error'); // match: [1] year [2] month [3] day year = +(match[1]); month = +(match[2]) - 1; // JS month starts with 0 day = +(match[3]); if (!match[4]) { // no hour return new Date(Date.UTC(year, month, day)); } // match: [4] hour [5] minute [6] second [7] fraction hour = +(match[4]); minute = +(match[5]); second = +(match[6]); if (match[7]) { fraction = match[7].slice(0, 3); while (fraction.length < 3) { // milli-seconds fraction += '0'; } fraction = +fraction; } // match: [8] tz [9] tz_sign [10] tz_hour [11] tz_minute if (match[9]) { tz_hour = +(match[10]); tz_minute = +(match[11] || 0); delta = (tz_hour * 60 + tz_minute) * 60000; // delta in mili-seconds if (match[9] === '-') delta = -delta; } date = new Date(Date.UTC(year, month, day, hour, minute, second, fraction)); if (delta) date.setTime(date.getTime() - delta); return date; } function representYamlTimestamp(object /*, style*/) { return object.toISOString(); } module.exports = new Type('tag:yaml.org,2002:timestamp', { kind: 'scalar', resolve: resolveYamlTimestamp, construct: constructYamlTimestamp, instanceOf: Date, represent: representYamlTimestamp }); /***/ }), /***/ 6160: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { let _fs try { _fs = __nccwpck_require__(7758) } catch (_) { _fs = __nccwpck_require__(5747) } const universalify = __nccwpck_require__(1463) const { stringify, stripBom } = __nccwpck_require__(5902) async function _readFile (file, options = {}) { if (typeof options === 'string') { options = { encoding: options } } const fs = options.fs || _fs const shouldThrow = 'throws' in options ? options.throws : true let data = await universalify.fromCallback(fs.readFile)(file, options) data = stripBom(data) let obj try { obj = JSON.parse(data, options ? options.reviver : null) } catch (err) { if (shouldThrow) { err.message = `${file}: ${err.message}` throw err } else { return null } } return obj } const readFile = universalify.fromPromise(_readFile) function readFileSync (file, options = {}) { if (typeof options === 'string') { options = { encoding: options } } const fs = options.fs || _fs const shouldThrow = 'throws' in options ? options.throws : true try { let content = fs.readFileSync(file, options) content = stripBom(content) return JSON.parse(content, options.reviver) } catch (err) { if (shouldThrow) { err.message = `${file}: ${err.message}` throw err } else { return null } } } async function _writeFile (file, obj, options = {}) { const fs = options.fs || _fs const str = stringify(obj, options) await universalify.fromCallback(fs.writeFile)(file, str, options) } const writeFile = universalify.fromPromise(_writeFile) function writeFileSync (file, obj, options = {}) { const fs = options.fs || _fs const str = stringify(obj, options) // not sure if fs.writeFileSync returns anything, but just in case return fs.writeFileSync(file, str, options) } const jsonfile = { readFile, readFileSync, writeFile, writeFileSync } module.exports = jsonfile /***/ }), /***/ 5902: /***/ ((module) => { function stringify (obj, { EOL = '\n', finalEOL = true, replacer = null, spaces } = {}) { const EOF = finalEOL ? EOL : '' const str = JSON.stringify(obj, replacer, spaces) return str.replace(/\n/g, EOL) + EOF } function stripBom (content) { // we do this because JSON.parse would convert it to a utf8 string if encoding wasn't specified if (Buffer.isBuffer(content)) content = content.toString('utf8') return content.replace(/^\uFEFF/, '') } module.exports = { stringify, stripBom } /***/ }), /***/ 9197: /***/ ((module) => { /** * lodash (Custom Build) * Build: `lodash modularize exports="npm" -o ./` * Copyright jQuery Foundation and other contributors * Released under MIT license * Based on Underscore.js 1.8.3 * Copyright Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors */ /** Used as the `TypeError` message for "Functions" methods. */ var FUNC_ERROR_TEXT = 'Expected a function'; /** Used to stand-in for `undefined` hash values. */ var HASH_UNDEFINED = '__lodash_hash_undefined__'; /** Used as references for various `Number` constants. */ var INFINITY = 1 / 0; /** `Object#toString` result references. */ var funcTag = '[object Function]', genTag = '[object GeneratorFunction]', symbolTag = '[object Symbol]'; /** Used to match property names within property paths. */ var reIsDeepProp = /\.|\[(?:[^[\]]*|(["'])(?:(?!\1)[^\\]|\\.)*?\1)\]/, reIsPlainProp = /^\w*$/, reLeadingDot = /^\./, rePropName = /[^.[\]]+|\[(?:(-?\d+(?:\.\d+)?)|(["'])((?:(?!\2)[^\\]|\\.)*?)\2)\]|(?=(?:\.|\[\])(?:\.|\[\]|$))/g; /** * Used to match `RegExp` * [syntax characters](http://ecma-international.org/ecma-262/7.0/#sec-patterns). */ var reRegExpChar = /[\\^$.*+?()[\]{}|]/g; /** Used to match backslashes in property paths. */ var reEscapeChar = /\\(\\)?/g; /** Used to detect host constructors (Safari). */ var reIsHostCtor = /^\[object .+?Constructor\]$/; /** Detect free variable `global` from Node.js. */ var freeGlobal = typeof global == 'object' && global && global.Object === Object && global; /** Detect free variable `self`. */ var freeSelf = typeof self == 'object' && self && self.Object === Object && self; /** Used as a reference to the global object. */ var root = freeGlobal || freeSelf || Function('return this')(); /** * Gets the value at `key` of `object`. * * @private * @param {Object} [object] The object to query. * @param {string} key The key of the property to get. * @returns {*} Returns the property value. */ function getValue(object, key) { return object == null ? undefined : object[key]; } /** * Checks if `value` is a host object in IE < 9. * * @private * @param {*} value The value to check. * @returns {boolean} Returns `true` if `value` is a host object, else `false`. */ function isHostObject(value) { // Many host objects are `Object` objects that can coerce to strings // despite having improperly defined `toString` methods. var result = false; if (value != null && typeof value.toString != 'function') { try { result = !!(value + ''); } catch (e) {} } return result; } /** Used for built-in method references. */ var arrayProto = Array.prototype, funcProto = Function.prototype, objectProto = Object.prototype; /** Used to detect overreaching core-js shims. */ var coreJsData = root['__core-js_shared__']; /** Used to detect methods masquerading as native. */ var maskSrcKey = (function() { var uid = /[^.]+$/.exec(coreJsData && coreJsData.keys && coreJsData.keys.IE_PROTO || ''); return uid ? ('Symbol(src)_1.' + uid) : ''; }()); /** Used to resolve the decompiled source of functions. */ var funcToString = funcProto.toString; /** Used to check objects for own properties. */ var hasOwnProperty = objectProto.hasOwnProperty; /** * Used to resolve the * [`toStringTag`](http://ecma-international.org/ecma-262/7.0/#sec-object.prototype.tostring) * of values. */ var objectToString = objectProto.toString; /** Used to detect if a method is native. */ var reIsNative = RegExp('^' + funcToString.call(hasOwnProperty).replace(reRegExpChar, '\\$&') .replace(/hasOwnProperty|(function).*?(?=\\\()| for .+?(?=\\\])/g, '$1.*?') + '$' ); /** Built-in value references. */ var Symbol = root.Symbol, splice = arrayProto.splice; /* Built-in method references that are verified to be native. */ var Map = getNative(root, 'Map'), nativeCreate = getNative(Object, 'create'); /** Used to convert symbols to primitives and strings. */ var symbolProto = Symbol ? Symbol.prototype : undefined, symbolToString = symbolProto ? symbolProto.toString : undefined; /** * Creates a hash object. * * @private * @constructor * @param {Array} [entries] The key-value pairs to cache. */ function Hash(entries) { var index = -1, length = entries ? entries.length : 0; this.clear(); while (++index < length) { var entry = entries[index]; this.set(entry[0], entry[1]); } } /** * Removes all key-value entries from the hash. * * @private * @name clear * @memberOf Hash */ function hashClear() { this.__data__ = nativeCreate ? nativeCreate(null) : {}; } /** * Removes `key` and its value from the hash. * * @private * @name delete * @memberOf Hash * @param {Object} hash The hash to modify. * @param {string} key The key of the value to remove. * @returns {boolean} Returns `true` if the entry was removed, else `false`. */ function hashDelete(key) { return this.has(key) && delete this.__data__[key]; } /** * Gets the hash value for `key`. * * @private * @name get * @memberOf Hash * @param {string} key The key of the value to get. * @returns {*} Returns the entry value. */ function hashGet(key) { var data = this.__data__; if (nativeCreate) { var result = data[key]; return result === HASH_UNDEFINED ? undefined : result; } return hasOwnProperty.call(data, key) ? data[key] : undefined; } /** * Checks if a hash value for `key` exists. * * @private * @name has * @memberOf Hash * @param {string} key The key of the entry to check. * @returns {boolean} Returns `true` if an entry for `key` exists, else `false`. */ function hashHas(key) { var data = this.__data__; return nativeCreate ? data[key] !== undefined : hasOwnProperty.call(data, key); } /** * Sets the hash `key` to `value`. * * @private * @name set * @memberOf Hash * @param {string} key The key of the value to set. * @param {*} value The value to set. * @returns {Object} Returns the hash instance. */ function hashSet(key, value) { var data = this.__data__; data[key] = (nativeCreate && value === undefined) ? HASH_UNDEFINED : value; return this; } // Add methods to `Hash`. Hash.prototype.clear = hashClear; Hash.prototype['delete'] = hashDelete; Hash.prototype.get = hashGet; Hash.prototype.has = hashHas; Hash.prototype.set = hashSet; /** * Creates an list cache object. * * @private * @constructor * @param {Array} [entries] The key-value pairs to cache. */ function ListCache(entries) { var index = -1, length = entries ? entries.length : 0; this.clear(); while (++index < length) { var entry = entries[index]; this.set(entry[0], entry[1]); } } /** * Removes all key-value entries from the list cache. * * @private * @name clear * @memberOf ListCache */ function listCacheClear() { this.__data__ = []; } /** * Removes `key` and its value from the list cache. * * @private * @name delete * @memberOf ListCache * @param {string} key The key of the value to remove. * @returns {boolean} Returns `true` if the entry was removed, else `false`. */ function listCacheDelete(key) { var data = this.__data__, index = assocIndexOf(data, key); if (index < 0) { return false; } var lastIndex = data.length - 1; if (index == lastIndex) { data.pop(); } else { splice.call(data, index, 1); } return true; } /** * Gets the list cache value for `key`. * * @private * @name get * @memberOf ListCache * @param {string} key The key of the value to get. * @returns {*} Returns the entry value. */ function listCacheGet(key) { var data = this.__data__, index = assocIndexOf(data, key); return index < 0 ? undefined : data[index][1]; } /** * Checks if a list cache value for `key` exists. * * @private * @name has * @memberOf ListCache * @param {string} key The key of the entry to check. * @returns {boolean} Returns `true` if an entry for `key` exists, else `false`. */ function listCacheHas(key) { return assocIndexOf(this.__data__, key) > -1; } /** * Sets the list cache `key` to `value`. * * @private * @name set * @memberOf ListCache * @param {string} key The key of the value to set. * @param {*} value The value to set. * @returns {Object} Returns the list cache instance. */ function listCacheSet(key, value) { var data = this.__data__, index = assocIndexOf(data, key); if (index < 0) { data.push([key, value]); } else { data[index][1] = value; } return this; } // Add methods to `ListCache`. ListCache.prototype.clear = listCacheClear; ListCache.prototype['delete'] = listCacheDelete; ListCache.prototype.get = listCacheGet; ListCache.prototype.has = listCacheHas; ListCache.prototype.set = listCacheSet; /** * Creates a map cache object to store key-value pairs. * * @private * @constructor * @param {Array} [entries] The key-value pairs to cache. */ function MapCache(entries) { var index = -1, length = entries ? entries.length : 0; this.clear(); while (++index < length) { var entry = entries[index]; this.set(entry[0], entry[1]); } } /** * Removes all key-value entries from the map. * * @private * @name clear * @memberOf MapCache */ function mapCacheClear() { this.__data__ = { 'hash': new Hash, 'map': new (Map || ListCache), 'string': new Hash }; } /** * Removes `key` and its value from the map. * * @private * @name delete * @memberOf MapCache * @param {string} key The key of the value to remove. * @returns {boolean} Returns `true` if the entry was removed, else `false`. */ function mapCacheDelete(key) { return getMapData(this, key)['delete'](key); } /** * Gets the map value for `key`. * * @private * @name get * @memberOf MapCache * @param {string} key The key of the value to get. * @returns {*} Returns the entry value. */ function mapCacheGet(key) { return getMapData(this, key).get(key); } /** * Checks if a map value for `key` exists. * * @private * @name has * @memberOf MapCache * @param {string} key The key of the entry to check. * @returns {boolean} Returns `true` if an entry for `key` exists, else `false`. */ function mapCacheHas(key) { return getMapData(this, key).has(key); } /** * Sets the map `key` to `value`. * * @private * @name set * @memberOf MapCache * @param {string} key The key of the value to set. * @param {*} value The value to set. * @returns {Object} Returns the map cache instance. */ function mapCacheSet(key, value) { getMapData(this, key).set(key, value); return this; } // Add methods to `MapCache`. MapCache.prototype.clear = mapCacheClear; MapCache.prototype['delete'] = mapCacheDelete; MapCache.prototype.get = mapCacheGet; MapCache.prototype.has = mapCacheHas; MapCache.prototype.set = mapCacheSet; /** * Gets the index at which the `key` is found in `array` of key-value pairs. * * @private * @param {Array} array The array to inspect. * @param {*} key The key to search for. * @returns {number} Returns the index of the matched value, else `-1`. */ function assocIndexOf(array, key) { var length = array.length; while (length--) { if (eq(array[length][0], key)) { return length; } } return -1; } /** * The base implementation of `_.get` without support for default values. * * @private * @param {Object} object The object to query. * @param {Array|string} path The path of the property to get. * @returns {*} Returns the resolved value. */ function baseGet(object, path) { path = isKey(path, object) ? [path] : castPath(path); var index = 0, length = path.length; while (object != null && index < length) { object = object[toKey(path[index++])]; } return (index && index == length) ? object : undefined; } /** * The base implementation of `_.isNative` without bad shim checks. * * @private * @param {*} value The value to check. * @returns {boolean} Returns `true` if `value` is a native function, * else `false`. */ function baseIsNative(value) { if (!isObject(value) || isMasked(value)) { return false; } var pattern = (isFunction(value) || isHostObject(value)) ? reIsNative : reIsHostCtor; return pattern.test(toSource(value)); } /** * The base implementation of `_.toString` which doesn't convert nullish * values to empty strings. * * @private * @param {*} value The value to process. * @returns {string} Returns the string. */ function baseToString(value) { // Exit early for strings to avoid a performance hit in some environments. if (typeof value == 'string') { return value; } if (isSymbol(value)) { return symbolToString ? symbolToString.call(value) : ''; } var result = (value + ''); return (result == '0' && (1 / value) == -INFINITY) ? '-0' : result; } /** * Casts `value` to a path array if it's not one. * * @private * @param {*} value The value to inspect. * @returns {Array} Returns the cast property path array. */ function castPath(value) { return isArray(value) ? value : stringToPath(value); } /** * Gets the data for `map`. * * @private * @param {Object} map The map to query. * @param {string} key The reference key. * @returns {*} Returns the map data. */ function getMapData(map, key) { var data = map.__data__; return isKeyable(key) ? data[typeof key == 'string' ? 'string' : 'hash'] : data.map; } /** * Gets the native function at `key` of `object`. * * @private * @param {Object} object The object to query. * @param {string} key The key of the method to get. * @returns {*} Returns the function if it's native, else `undefined`. */ function getNative(object, key) { var value = getValue(object, key); return baseIsNative(value) ? value : undefined; } /** * Checks if `value` is a property name and not a property path. * * @private * @param {*} value The value to check. * @param {Object} [object] The object to query keys on. * @returns {boolean} Returns `true` if `value` is a property name, else `false`. */ function isKey(value, object) { if (isArray(value)) { return false; } var type = typeof value; if (type == 'number' || type == 'symbol' || type == 'boolean' || value == null || isSymbol(value)) { return true; } return reIsPlainProp.test(value) || !reIsDeepProp.test(value) || (object != null && value in Object(object)); } /** * Checks if `value` is suitable for use as unique object key. * * @private * @param {*} value The value to check. * @returns {boolean} Returns `true` if `value` is suitable, else `false`. */ function isKeyable(value) { var type = typeof value; return (type == 'string' || type == 'number' || type == 'symbol' || type == 'boolean') ? (value !== '__proto__') : (value === null); } /** * Checks if `func` has its source masked. * * @private * @param {Function} func The function to check. * @returns {boolean} Returns `true` if `func` is masked, else `false`. */ function isMasked(func) { return !!maskSrcKey && (maskSrcKey in func); } /** * Converts `string` to a property path array. * * @private * @param {string} string The string to convert. * @returns {Array} Returns the property path array. */ var stringToPath = memoize(function(string) { string = toString(string); var result = []; if (reLeadingDot.test(string)) { result.push(''); } string.replace(rePropName, function(match, number, quote, string) { result.push(quote ? string.replace(reEscapeChar, '$1') : (number || match)); }); return result; }); /** * Converts `value` to a string key if it's not a string or symbol. * * @private * @param {*} value The value to inspect. * @returns {string|symbol} Returns the key. */ function toKey(value) { if (typeof value == 'string' || isSymbol(value)) { return value; } var result = (value + ''); return (result == '0' && (1 / value) == -INFINITY) ? '-0' : result; } /** * Converts `func` to its source code. * * @private * @param {Function} func The function to process. * @returns {string} Returns the source code. */ function toSource(func) { if (func != null) { try { return funcToString.call(func); } catch (e) {} try { return (func + ''); } catch (e) {} } return ''; } /** * Creates a function that memoizes the result of `func`. If `resolver` is * provided, it determines the cache key for storing the result based on the * arguments provided to the memoized function. By default, the first argument * provided to the memoized function is used as the map cache key. The `func` * is invoked with the `this` binding of the memoized function. * * **Note:** The cache is exposed as the `cache` property on the memoized * function. Its creation may be customized by replacing the `_.memoize.Cache` * constructor with one whose instances implement the * [`Map`](http://ecma-international.org/ecma-262/7.0/#sec-properties-of-the-map-prototype-object) * method interface of `delete`, `get`, `has`, and `set`. * * @static * @memberOf _ * @since 0.1.0 * @category Function * @param {Function} func The function to have its output memoized. * @param {Function} [resolver] The function to resolve the cache key. * @returns {Function} Returns the new memoized function. * @example * * var object = { 'a': 1, 'b': 2 }; * var other = { 'c': 3, 'd': 4 }; * * var values = _.memoize(_.values); * values(object); * // => [1, 2] * * values(other); * // => [3, 4] * * object.a = 2; * values(object); * // => [1, 2] * * // Modify the result cache. * values.cache.set(object, ['a', 'b']); * values(object); * // => ['a', 'b'] * * // Replace `_.memoize.Cache`. * _.memoize.Cache = WeakMap; */ function memoize(func, resolver) { if (typeof func != 'function' || (resolver && typeof resolver != 'function')) { throw new TypeError(FUNC_ERROR_TEXT); } var memoized = function() { var args = arguments, key = resolver ? resolver.apply(this, args) : args[0], cache = memoized.cache; if (cache.has(key)) { return cache.get(key); } var result = func.apply(this, args); memoized.cache = cache.set(key, result); return result; }; memoized.cache = new (memoize.Cache || MapCache); return memoized; } // Assign cache to `_.memoize`. memoize.Cache = MapCache; /** * Performs a * [`SameValueZero`](http://ecma-international.org/ecma-262/7.0/#sec-samevaluezero) * comparison between two values to determine if they are equivalent. * * @static * @memberOf _ * @since 4.0.0 * @category Lang * @param {*} value The value to compare. * @param {*} other The other value to compare. * @returns {boolean} Returns `true` if the values are equivalent, else `false`. * @example * * var object = { 'a': 1 }; * var other = { 'a': 1 }; * * _.eq(object, object); * // => true * * _.eq(object, other); * // => false * * _.eq('a', 'a'); * // => true * * _.eq('a', Object('a')); * // => false * * _.eq(NaN, NaN); * // => true */ function eq(value, other) { return value === other || (value !== value && other !== other); } /** * Checks if `value` is classified as an `Array` object. * * @static * @memberOf _ * @since 0.1.0 * @category Lang * @param {*} value The value to check. * @returns {boolean} Returns `true` if `value` is an array, else `false`. * @example * * _.isArray([1, 2, 3]); * // => true * * _.isArray(document.body.children); * // => false * * _.isArray('abc'); * // => false * * _.isArray(_.noop); * // => false */ var isArray = Array.isArray; /** * Checks if `value` is classified as a `Function` object. * * @static * @memberOf _ * @since 0.1.0 * @category Lang * @param {*} value The value to check. * @returns {boolean} Returns `true` if `value` is a function, else `false`. * @example * * _.isFunction(_); * // => true * * _.isFunction(/abc/); * // => false */ function isFunction(value) { // The use of `Object#toString` avoids issues with the `typeof` operator // in Safari 8-9 which returns 'object' for typed array and other constructors. var tag = isObject(value) ? objectToString.call(value) : ''; return tag == funcTag || tag == genTag; } /** * Checks if `value` is the * [language type](http://www.ecma-international.org/ecma-262/7.0/#sec-ecmascript-language-types) * of `Object`. (e.g. arrays, functions, objects, regexes, `new Number(0)`, and `new String('')`) * * @static * @memberOf _ * @since 0.1.0 * @category Lang * @param {*} value The value to check. * @returns {boolean} Returns `true` if `value` is an object, else `false`. * @example * * _.isObject({}); * // => true * * _.isObject([1, 2, 3]); * // => true * * _.isObject(_.noop); * // => true * * _.isObject(null); * // => false */ function isObject(value) { var type = typeof value; return !!value && (type == 'object' || type == 'function'); } /** * Checks if `value` is object-like. A value is object-like if it's not `null` * and has a `typeof` result of "object". * * @static * @memberOf _ * @since 4.0.0 * @category Lang * @param {*} value The value to check. * @returns {boolean} Returns `true` if `value` is object-like, else `false`. * @example * * _.isObjectLike({}); * // => true * * _.isObjectLike([1, 2, 3]); * // => true * * _.isObjectLike(_.noop); * // => false * * _.isObjectLike(null); * // => false */ function isObjectLike(value) { return !!value && typeof value == 'object'; } /** * Checks if `value` is classified as a `Symbol` primitive or object. * * @static * @memberOf _ * @since 4.0.0 * @category Lang * @param {*} value The value to check. * @returns {boolean} Returns `true` if `value` is a symbol, else `false`. * @example * * _.isSymbol(Symbol.iterator); * // => true * * _.isSymbol('abc'); * // => false */ function isSymbol(value) { return typeof value == 'symbol' || (isObjectLike(value) && objectToString.call(value) == symbolTag); } /** * Converts `value` to a string. An empty string is returned for `null` * and `undefined` values. The sign of `-0` is preserved. * * @static * @memberOf _ * @since 4.0.0 * @category Lang * @param {*} value The value to process. * @returns {string} Returns the string. * @example * * _.toString(null); * // => '' * * _.toString(-0); * // => '-0' * * _.toString([1, 2, 3]); * // => '1,2,3' */ function toString(value) { return value == null ? '' : baseToString(value); } /** * Gets the value at `path` of `object`. If the resolved value is * `undefined`, the `defaultValue` is returned in its place. * * @static * @memberOf _ * @since 3.7.0 * @category Object * @param {Object} object The object to query. * @param {Array|string} path The path of the property to get. * @param {*} [defaultValue] The value returned for `undefined` resolved values. * @returns {*} Returns the resolved value. * @example * * var object = { 'a': [{ 'b': { 'c': 3 } }] }; * * _.get(object, 'a[0].b.c'); * // => 3 * * _.get(object, ['a', '0', 'b', 'c']); * // => 3 * * _.get(object, 'a.b.c', 'default'); * // => 'default' */ function get(object, path, defaultValue) { var result = object == null ? undefined : baseGet(object, path); return result === undefined ? defaultValue : result; } module.exports = get; /***/ }), /***/ 1552: /***/ ((module) => { /** * lodash (Custom Build) * Build: `lodash modularize exports="npm" -o ./` * Copyright jQuery Foundation and other contributors * Released under MIT license * Based on Underscore.js 1.8.3 * Copyright Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors */ /** Used as the `TypeError` message for "Functions" methods. */ var FUNC_ERROR_TEXT = 'Expected a function'; /** Used to stand-in for `undefined` hash values. */ var HASH_UNDEFINED = '__lodash_hash_undefined__'; /** Used as references for various `Number` constants. */ var INFINITY = 1 / 0, MAX_SAFE_INTEGER = 9007199254740991; /** `Object#toString` result references. */ var funcTag = '[object Function]', genTag = '[object GeneratorFunction]', symbolTag = '[object Symbol]'; /** Used to match property names within property paths. */ var reIsDeepProp = /\.|\[(?:[^[\]]*|(["'])(?:(?!\1)[^\\]|\\.)*?\1)\]/, reIsPlainProp = /^\w*$/, reLeadingDot = /^\./, rePropName = /[^.[\]]+|\[(?:(-?\d+(?:\.\d+)?)|(["'])((?:(?!\2)[^\\]|\\.)*?)\2)\]|(?=(?:\.|\[\])(?:\.|\[\]|$))/g; /** * Used to match `RegExp` * [syntax characters](http://ecma-international.org/ecma-262/7.0/#sec-patterns). */ var reRegExpChar = /[\\^$.*+?()[\]{}|]/g; /** Used to match backslashes in property paths. */ var reEscapeChar = /\\(\\)?/g; /** Used to detect host constructors (Safari). */ var reIsHostCtor = /^\[object .+?Constructor\]$/; /** Used to detect unsigned integer values. */ var reIsUint = /^(?:0|[1-9]\d*)$/; /** Detect free variable `global` from Node.js. */ var freeGlobal = typeof global == 'object' && global && global.Object === Object && global; /** Detect free variable `self`. */ var freeSelf = typeof self == 'object' && self && self.Object === Object && self; /** Used as a reference to the global object. */ var root = freeGlobal || freeSelf || Function('return this')(); /** * Gets the value at `key` of `object`. * * @private * @param {Object} [object] The object to query. * @param {string} key The key of the property to get. * @returns {*} Returns the property value. */ function getValue(object, key) { return object == null ? undefined : object[key]; } /** * Checks if `value` is a host object in IE < 9. * * @private * @param {*} value The value to check. * @returns {boolean} Returns `true` if `value` is a host object, else `false`. */ function isHostObject(value) { // Many host objects are `Object` objects that can coerce to strings // despite having improperly defined `toString` methods. var result = false; if (value != null && typeof value.toString != 'function') { try { result = !!(value + ''); } catch (e) {} } return result; } /** Used for built-in method references. */ var arrayProto = Array.prototype, funcProto = Function.prototype, objectProto = Object.prototype; /** Used to detect overreaching core-js shims. */ var coreJsData = root['__core-js_shared__']; /** Used to detect methods masquerading as native. */ var maskSrcKey = (function() { var uid = /[^.]+$/.exec(coreJsData && coreJsData.keys && coreJsData.keys.IE_PROTO || ''); return uid ? ('Symbol(src)_1.' + uid) : ''; }()); /** Used to resolve the decompiled source of functions. */ var funcToString = funcProto.toString; /** Used to check objects for own properties. */ var hasOwnProperty = objectProto.hasOwnProperty; /** * Used to resolve the * [`toStringTag`](http://ecma-international.org/ecma-262/7.0/#sec-object.prototype.tostring) * of values. */ var objectToString = objectProto.toString; /** Used to detect if a method is native. */ var reIsNative = RegExp('^' + funcToString.call(hasOwnProperty).replace(reRegExpChar, '\\$&') .replace(/hasOwnProperty|(function).*?(?=\\\()| for .+?(?=\\\])/g, '$1.*?') + '$' ); /** Built-in value references. */ var Symbol = root.Symbol, splice = arrayProto.splice; /* Built-in method references that are verified to be native. */ var Map = getNative(root, 'Map'), nativeCreate = getNative(Object, 'create'); /** Used to convert symbols to primitives and strings. */ var symbolProto = Symbol ? Symbol.prototype : undefined, symbolToString = symbolProto ? symbolProto.toString : undefined; /** * Creates a hash object. * * @private * @constructor * @param {Array} [entries] The key-value pairs to cache. */ function Hash(entries) { var index = -1, length = entries ? entries.length : 0; this.clear(); while (++index < length) { var entry = entries[index]; this.set(entry[0], entry[1]); } } /** * Removes all key-value entries from the hash. * * @private * @name clear * @memberOf Hash */ function hashClear() { this.__data__ = nativeCreate ? nativeCreate(null) : {}; } /** * Removes `key` and its value from the hash. * * @private * @name delete * @memberOf Hash * @param {Object} hash The hash to modify. * @param {string} key The key of the value to remove. * @returns {boolean} Returns `true` if the entry was removed, else `false`. */ function hashDelete(key) { return this.has(key) && delete this.__data__[key]; } /** * Gets the hash value for `key`. * * @private * @name get * @memberOf Hash * @param {string} key The key of the value to get. * @returns {*} Returns the entry value. */ function hashGet(key) { var data = this.__data__; if (nativeCreate) { var result = data[key]; return result === HASH_UNDEFINED ? undefined : result; } return hasOwnProperty.call(data, key) ? data[key] : undefined; } /** * Checks if a hash value for `key` exists. * * @private * @name has * @memberOf Hash * @param {string} key The key of the entry to check. * @returns {boolean} Returns `true` if an entry for `key` exists, else `false`. */ function hashHas(key) { var data = this.__data__; return nativeCreate ? data[key] !== undefined : hasOwnProperty.call(data, key); } /** * Sets the hash `key` to `value`. * * @private * @name set * @memberOf Hash * @param {string} key The key of the value to set. * @param {*} value The value to set. * @returns {Object} Returns the hash instance. */ function hashSet(key, value) { var data = this.__data__; data[key] = (nativeCreate && value === undefined) ? HASH_UNDEFINED : value; return this; } // Add methods to `Hash`. Hash.prototype.clear = hashClear; Hash.prototype['delete'] = hashDelete; Hash.prototype.get = hashGet; Hash.prototype.has = hashHas; Hash.prototype.set = hashSet; /** * Creates an list cache object. * * @private * @constructor * @param {Array} [entries] The key-value pairs to cache. */ function ListCache(entries) { var index = -1, length = entries ? entries.length : 0; this.clear(); while (++index < length) { var entry = entries[index]; this.set(entry[0], entry[1]); } } /** * Removes all key-value entries from the list cache. * * @private * @name clear * @memberOf ListCache */ function listCacheClear() { this.__data__ = []; } /** * Removes `key` and its value from the list cache. * * @private * @name delete * @memberOf ListCache * @param {string} key The key of the value to remove. * @returns {boolean} Returns `true` if the entry was removed, else `false`. */ function listCacheDelete(key) { var data = this.__data__, index = assocIndexOf(data, key); if (index < 0) { return false; } var lastIndex = data.length - 1; if (index == lastIndex) { data.pop(); } else { splice.call(data, index, 1); } return true; } /** * Gets the list cache value for `key`. * * @private * @name get * @memberOf ListCache * @param {string} key The key of the value to get. * @returns {*} Returns the entry value. */ function listCacheGet(key) { var data = this.__data__, index = assocIndexOf(data, key); return index < 0 ? undefined : data[index][1]; } /** * Checks if a list cache value for `key` exists. * * @private * @name has * @memberOf ListCache * @param {string} key The key of the entry to check. * @returns {boolean} Returns `true` if an entry for `key` exists, else `false`. */ function listCacheHas(key) { return assocIndexOf(this.__data__, key) > -1; } /** * Sets the list cache `key` to `value`. * * @private * @name set * @memberOf ListCache * @param {string} key The key of the value to set. * @param {*} value The value to set. * @returns {Object} Returns the list cache instance. */ function listCacheSet(key, value) { var data = this.__data__, index = assocIndexOf(data, key); if (index < 0) { data.push([key, value]); } else { data[index][1] = value; } return this; } // Add methods to `ListCache`. ListCache.prototype.clear = listCacheClear; ListCache.prototype['delete'] = listCacheDelete; ListCache.prototype.get = listCacheGet; ListCache.prototype.has = listCacheHas; ListCache.prototype.set = listCacheSet; /** * Creates a map cache object to store key-value pairs. * * @private * @constructor * @param {Array} [entries] The key-value pairs to cache. */ function MapCache(entries) { var index = -1, length = entries ? entries.length : 0; this.clear(); while (++index < length) { var entry = entries[index]; this.set(entry[0], entry[1]); } } /** * Removes all key-value entries from the map. * * @private * @name clear * @memberOf MapCache */ function mapCacheClear() { this.__data__ = { 'hash': new Hash, 'map': new (Map || ListCache), 'string': new Hash }; } /** * Removes `key` and its value from the map. * * @private * @name delete * @memberOf MapCache * @param {string} key The key of the value to remove. * @returns {boolean} Returns `true` if the entry was removed, else `false`. */ function mapCacheDelete(key) { return getMapData(this, key)['delete'](key); } /** * Gets the map value for `key`. * * @private * @name get * @memberOf MapCache * @param {string} key The key of the value to get. * @returns {*} Returns the entry value. */ function mapCacheGet(key) { return getMapData(this, key).get(key); } /** * Checks if a map value for `key` exists. * * @private * @name has * @memberOf MapCache * @param {string} key The key of the entry to check. * @returns {boolean} Returns `true` if an entry for `key` exists, else `false`. */ function mapCacheHas(key) { return getMapData(this, key).has(key); } /** * Sets the map `key` to `value`. * * @private * @name set * @memberOf MapCache * @param {string} key The key of the value to set. * @param {*} value The value to set. * @returns {Object} Returns the map cache instance. */ function mapCacheSet(key, value) { getMapData(this, key).set(key, value); return this; } // Add methods to `MapCache`. MapCache.prototype.clear = mapCacheClear; MapCache.prototype['delete'] = mapCacheDelete; MapCache.prototype.get = mapCacheGet; MapCache.prototype.has = mapCacheHas; MapCache.prototype.set = mapCacheSet; /** * Assigns `value` to `key` of `object` if the existing value is not equivalent * using [`SameValueZero`](http://ecma-international.org/ecma-262/7.0/#sec-samevaluezero) * for equality comparisons. * * @private * @param {Object} object The object to modify. * @param {string} key The key of the property to assign. * @param {*} value The value to assign. */ function assignValue(object, key, value) { var objValue = object[key]; if (!(hasOwnProperty.call(object, key) && eq(objValue, value)) || (value === undefined && !(key in object))) { object[key] = value; } } /** * Gets the index at which the `key` is found in `array` of key-value pairs. * * @private * @param {Array} array The array to inspect. * @param {*} key The key to search for. * @returns {number} Returns the index of the matched value, else `-1`. */ function assocIndexOf(array, key) { var length = array.length; while (length--) { if (eq(array[length][0], key)) { return length; } } return -1; } /** * The base implementation of `_.isNative` without bad shim checks. * * @private * @param {*} value The value to check. * @returns {boolean} Returns `true` if `value` is a native function, * else `false`. */ function baseIsNative(value) { if (!isObject(value) || isMasked(value)) { return false; } var pattern = (isFunction(value) || isHostObject(value)) ? reIsNative : reIsHostCtor; return pattern.test(toSource(value)); } /** * The base implementation of `_.set`. * * @private * @param {Object} object The object to modify. * @param {Array|string} path The path of the property to set. * @param {*} value The value to set. * @param {Function} [customizer] The function to customize path creation. * @returns {Object} Returns `object`. */ function baseSet(object, path, value, customizer) { if (!isObject(object)) { return object; } path = isKey(path, object) ? [path] : castPath(path); var index = -1, length = path.length, lastIndex = length - 1, nested = object; while (nested != null && ++index < length) { var key = toKey(path[index]), newValue = value; if (index != lastIndex) { var objValue = nested[key]; newValue = customizer ? customizer(objValue, key, nested) : undefined; if (newValue === undefined) { newValue = isObject(objValue) ? objValue : (isIndex(path[index + 1]) ? [] : {}); } } assignValue(nested, key, newValue); nested = nested[key]; } return object; } /** * The base implementation of `_.toString` which doesn't convert nullish * values to empty strings. * * @private * @param {*} value The value to process. * @returns {string} Returns the string. */ function baseToString(value) { // Exit early for strings to avoid a performance hit in some environments. if (typeof value == 'string') { return value; } if (isSymbol(value)) { return symbolToString ? symbolToString.call(value) : ''; } var result = (value + ''); return (result == '0' && (1 / value) == -INFINITY) ? '-0' : result; } /** * Casts `value` to a path array if it's not one. * * @private * @param {*} value The value to inspect. * @returns {Array} Returns the cast property path array. */ function castPath(value) { return isArray(value) ? value : stringToPath(value); } /** * Gets the data for `map`. * * @private * @param {Object} map The map to query. * @param {string} key The reference key. * @returns {*} Returns the map data. */ function getMapData(map, key) { var data = map.__data__; return isKeyable(key) ? data[typeof key == 'string' ? 'string' : 'hash'] : data.map; } /** * Gets the native function at `key` of `object`. * * @private * @param {Object} object The object to query. * @param {string} key The key of the method to get. * @returns {*} Returns the function if it's native, else `undefined`. */ function getNative(object, key) { var value = getValue(object, key); return baseIsNative(value) ? value : undefined; } /** * Checks if `value` is a valid array-like index. * * @private * @param {*} value The value to check. * @param {number} [length=MAX_SAFE_INTEGER] The upper bounds of a valid index. * @returns {boolean} Returns `true` if `value` is a valid index, else `false`. */ function isIndex(value, length) { length = length == null ? MAX_SAFE_INTEGER : length; return !!length && (typeof value == 'number' || reIsUint.test(value)) && (value > -1 && value % 1 == 0 && value < length); } /** * Checks if `value` is a property name and not a property path. * * @private * @param {*} value The value to check. * @param {Object} [object] The object to query keys on. * @returns {boolean} Returns `true` if `value` is a property name, else `false`. */ function isKey(value, object) { if (isArray(value)) { return false; } var type = typeof value; if (type == 'number' || type == 'symbol' || type == 'boolean' || value == null || isSymbol(value)) { return true; } return reIsPlainProp.test(value) || !reIsDeepProp.test(value) || (object != null && value in Object(object)); } /** * Checks if `value` is suitable for use as unique object key. * * @private * @param {*} value The value to check. * @returns {boolean} Returns `true` if `value` is suitable, else `false`. */ function isKeyable(value) { var type = typeof value; return (type == 'string' || type == 'number' || type == 'symbol' || type == 'boolean') ? (value !== '__proto__') : (value === null); } /** * Checks if `func` has its source masked. * * @private * @param {Function} func The function to check. * @returns {boolean} Returns `true` if `func` is masked, else `false`. */ function isMasked(func) { return !!maskSrcKey && (maskSrcKey in func); } /** * Converts `string` to a property path array. * * @private * @param {string} string The string to convert. * @returns {Array} Returns the property path array. */ var stringToPath = memoize(function(string) { string = toString(string); var result = []; if (reLeadingDot.test(string)) { result.push(''); } string.replace(rePropName, function(match, number, quote, string) { result.push(quote ? string.replace(reEscapeChar, '$1') : (number || match)); }); return result; }); /** * Converts `value` to a string key if it's not a string or symbol. * * @private * @param {*} value The value to inspect. * @returns {string|symbol} Returns the key. */ function toKey(value) { if (typeof value == 'string' || isSymbol(value)) { return value; } var result = (value + ''); return (result == '0' && (1 / value) == -INFINITY) ? '-0' : result; } /** * Converts `func` to its source code. * * @private * @param {Function} func The function to process. * @returns {string} Returns the source code. */ function toSource(func) { if (func != null) { try { return funcToString.call(func); } catch (e) {} try { return (func + ''); } catch (e) {} } return ''; } /** * Creates a function that memoizes the result of `func`. If `resolver` is * provided, it determines the cache key for storing the result based on the * arguments provided to the memoized function. By default, the first argument * provided to the memoized function is used as the map cache key. The `func` * is invoked with the `this` binding of the memoized function. * * **Note:** The cache is exposed as the `cache` property on the memoized * function. Its creation may be customized by replacing the `_.memoize.Cache` * constructor with one whose instances implement the * [`Map`](http://ecma-international.org/ecma-262/7.0/#sec-properties-of-the-map-prototype-object) * method interface of `delete`, `get`, `has`, and `set`. * * @static * @memberOf _ * @since 0.1.0 * @category Function * @param {Function} func The function to have its output memoized. * @param {Function} [resolver] The function to resolve the cache key. * @returns {Function} Returns the new memoized function. * @example * * var object = { 'a': 1, 'b': 2 }; * var other = { 'c': 3, 'd': 4 }; * * var values = _.memoize(_.values); * values(object); * // => [1, 2] * * values(other); * // => [3, 4] * * object.a = 2; * values(object); * // => [1, 2] * * // Modify the result cache. * values.cache.set(object, ['a', 'b']); * values(object); * // => ['a', 'b'] * * // Replace `_.memoize.Cache`. * _.memoize.Cache = WeakMap; */ function memoize(func, resolver) { if (typeof func != 'function' || (resolver && typeof resolver != 'function')) { throw new TypeError(FUNC_ERROR_TEXT); } var memoized = function() { var args = arguments, key = resolver ? resolver.apply(this, args) : args[0], cache = memoized.cache; if (cache.has(key)) { return cache.get(key); } var result = func.apply(this, args); memoized.cache = cache.set(key, result); return result; }; memoized.cache = new (memoize.Cache || MapCache); return memoized; } // Assign cache to `_.memoize`. memoize.Cache = MapCache; /** * Performs a * [`SameValueZero`](http://ecma-international.org/ecma-262/7.0/#sec-samevaluezero) * comparison between two values to determine if they are equivalent. * * @static * @memberOf _ * @since 4.0.0 * @category Lang * @param {*} value The value to compare. * @param {*} other The other value to compare. * @returns {boolean} Returns `true` if the values are equivalent, else `false`. * @example * * var object = { 'a': 1 }; * var other = { 'a': 1 }; * * _.eq(object, object); * // => true * * _.eq(object, other); * // => false * * _.eq('a', 'a'); * // => true * * _.eq('a', Object('a')); * // => false * * _.eq(NaN, NaN); * // => true */ function eq(value, other) { return value === other || (value !== value && other !== other); } /** * Checks if `value` is classified as an `Array` object. * * @static * @memberOf _ * @since 0.1.0 * @category Lang * @param {*} value The value to check. * @returns {boolean} Returns `true` if `value` is an array, else `false`. * @example * * _.isArray([1, 2, 3]); * // => true * * _.isArray(document.body.children); * // => false * * _.isArray('abc'); * // => false * * _.isArray(_.noop); * // => false */ var isArray = Array.isArray; /** * Checks if `value` is classified as a `Function` object. * * @static * @memberOf _ * @since 0.1.0 * @category Lang * @param {*} value The value to check. * @returns {boolean} Returns `true` if `value` is a function, else `false`. * @example * * _.isFunction(_); * // => true * * _.isFunction(/abc/); * // => false */ function isFunction(value) { // The use of `Object#toString` avoids issues with the `typeof` operator // in Safari 8-9 which returns 'object' for typed array and other constructors. var tag = isObject(value) ? objectToString.call(value) : ''; return tag == funcTag || tag == genTag; } /** * Checks if `value` is the * [language type](http://www.ecma-international.org/ecma-262/7.0/#sec-ecmascript-language-types) * of `Object`. (e.g. arrays, functions, objects, regexes, `new Number(0)`, and `new String('')`) * * @static * @memberOf _ * @since 0.1.0 * @category Lang * @param {*} value The value to check. * @returns {boolean} Returns `true` if `value` is an object, else `false`. * @example * * _.isObject({}); * // => true * * _.isObject([1, 2, 3]); * // => true * * _.isObject(_.noop); * // => true * * _.isObject(null); * // => false */ function isObject(value) { var type = typeof value; return !!value && (type == 'object' || type == 'function'); } /** * Checks if `value` is object-like. A value is object-like if it's not `null` * and has a `typeof` result of "object". * * @static * @memberOf _ * @since 4.0.0 * @category Lang * @param {*} value The value to check. * @returns {boolean} Returns `true` if `value` is object-like, else `false`. * @example * * _.isObjectLike({}); * // => true * * _.isObjectLike([1, 2, 3]); * // => true * * _.isObjectLike(_.noop); * // => false * * _.isObjectLike(null); * // => false */ function isObjectLike(value) { return !!value && typeof value == 'object'; } /** * Checks if `value` is classified as a `Symbol` primitive or object. * * @static * @memberOf _ * @since 4.0.0 * @category Lang * @param {*} value The value to check. * @returns {boolean} Returns `true` if `value` is a symbol, else `false`. * @example * * _.isSymbol(Symbol.iterator); * // => true * * _.isSymbol('abc'); * // => false */ function isSymbol(value) { return typeof value == 'symbol' || (isObjectLike(value) && objectToString.call(value) == symbolTag); } /** * Converts `value` to a string. An empty string is returned for `null` * and `undefined` values. The sign of `-0` is preserved. * * @static * @memberOf _ * @since 4.0.0 * @category Lang * @param {*} value The value to process. * @returns {string} Returns the string. * @example * * _.toString(null); * // => '' * * _.toString(-0); * // => '-0' * * _.toString([1, 2, 3]); * // => '1,2,3' */ function toString(value) { return value == null ? '' : baseToString(value); } /** * Sets the value at `path` of `object`. If a portion of `path` doesn't exist, * it's created. Arrays are created for missing index properties while objects * are created for all other missing properties. Use `_.setWith` to customize * `path` creation. * * **Note:** This method mutates `object`. * * @static * @memberOf _ * @since 3.7.0 * @category Object * @param {Object} object The object to modify. * @param {Array|string} path The path of the property to set. * @param {*} value The value to set. * @returns {Object} Returns `object`. * @example * * var object = { 'a': [{ 'b': { 'c': 3 } }] }; * * _.set(object, 'a[0].b.c', 4); * console.log(object.a[0].b.c); * // => 4 * * _.set(object, ['x', '0', 'y', 'z'], 5); * console.log(object.x[0].y.z); * // => 5 */ function set(object, path, value) { return object == null ? object : baseSet(object, path, value); } module.exports = set; /***/ }), /***/ 8216: /***/ ((module) => { /** * lodash (Custom Build) * Build: `lodash modularize exports="npm" -o ./` * Copyright jQuery Foundation and other contributors * Released under MIT license * Based on Underscore.js 1.8.3 * Copyright Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors */ /** Used as the size to enable large array optimizations. */ var LARGE_ARRAY_SIZE = 200; /** Used to stand-in for `undefined` hash values. */ var HASH_UNDEFINED = '__lodash_hash_undefined__'; /** Used as references for various `Number` constants. */ var INFINITY = 1 / 0; /** `Object#toString` result references. */ var funcTag = '[object Function]', genTag = '[object GeneratorFunction]'; /** * Used to match `RegExp` * [syntax characters](http://ecma-international.org/ecma-262/7.0/#sec-patterns). */ var reRegExpChar = /[\\^$.*+?()[\]{}|]/g; /** Used to detect host constructors (Safari). */ var reIsHostCtor = /^\[object .+?Constructor\]$/; /** Detect free variable `global` from Node.js. */ var freeGlobal = typeof global == 'object' && global && global.Object === Object && global; /** Detect free variable `self`. */ var freeSelf = typeof self == 'object' && self && self.Object === Object && self; /** Used as a reference to the global object. */ var root = freeGlobal || freeSelf || Function('return this')(); /** * A specialized version of `_.includes` for arrays without support for * specifying an index to search from. * * @private * @param {Array} [array] The array to inspect. * @param {*} target The value to search for. * @returns {boolean} Returns `true` if `target` is found, else `false`. */ function arrayIncludes(array, value) { var length = array ? array.length : 0; return !!length && baseIndexOf(array, value, 0) > -1; } /** * This function is like `arrayIncludes` except that it accepts a comparator. * * @private * @param {Array} [array] The array to inspect. * @param {*} target The value to search for. * @param {Function} comparator The comparator invoked per element. * @returns {boolean} Returns `true` if `target` is found, else `false`. */ function arrayIncludesWith(array, value, comparator) { var index = -1, length = array ? array.length : 0; while (++index < length) { if (comparator(value, array[index])) { return true; } } return false; } /** * The base implementation of `_.findIndex` and `_.findLastIndex` without * support for iteratee shorthands. * * @private * @param {Array} array The array to inspect. * @param {Function} predicate The function invoked per iteration. * @param {number} fromIndex The index to search from. * @param {boolean} [fromRight] Specify iterating from right to left. * @returns {number} Returns the index of the matched value, else `-1`. */ function baseFindIndex(array, predicate, fromIndex, fromRight) { var length = array.length, index = fromIndex + (fromRight ? 1 : -1); while ((fromRight ? index-- : ++index < length)) { if (predicate(array[index], index, array)) { return index; } } return -1; } /** * The base implementation of `_.indexOf` without `fromIndex` bounds checks. * * @private * @param {Array} array The array to inspect. * @param {*} value The value to search for. * @param {number} fromIndex The index to search from. * @returns {number} Returns the index of the matched value, else `-1`. */ function baseIndexOf(array, value, fromIndex) { if (value !== value) { return baseFindIndex(array, baseIsNaN, fromIndex); } var index = fromIndex - 1, length = array.length; while (++index < length) { if (array[index] === value) { return index; } } return -1; } /** * The base implementation of `_.isNaN` without support for number objects. * * @private * @param {*} value The value to check. * @returns {boolean} Returns `true` if `value` is `NaN`, else `false`. */ function baseIsNaN(value) { return value !== value; } /** * Checks if a cache value for `key` exists. * * @private * @param {Object} cache The cache to query. * @param {string} key The key of the entry to check. * @returns {boolean} Returns `true` if an entry for `key` exists, else `false`. */ function cacheHas(cache, key) { return cache.has(key); } /** * Gets the value at `key` of `object`. * * @private * @param {Object} [object] The object to query. * @param {string} key The key of the property to get. * @returns {*} Returns the property value. */ function getValue(object, key) { return object == null ? undefined : object[key]; } /** * Checks if `value` is a host object in IE < 9. * * @private * @param {*} value The value to check. * @returns {boolean} Returns `true` if `value` is a host object, else `false`. */ function isHostObject(value) { // Many host objects are `Object` objects that can coerce to strings // despite having improperly defined `toString` methods. var result = false; if (value != null && typeof value.toString != 'function') { try { result = !!(value + ''); } catch (e) {} } return result; } /** * Converts `set` to an array of its values. * * @private * @param {Object} set The set to convert. * @returns {Array} Returns the values. */ function setToArray(set) { var index = -1, result = Array(set.size); set.forEach(function(value) { result[++index] = value; }); return result; } /** Used for built-in method references. */ var arrayProto = Array.prototype, funcProto = Function.prototype, objectProto = Object.prototype; /** Used to detect overreaching core-js shims. */ var coreJsData = root['__core-js_shared__']; /** Used to detect methods masquerading as native. */ var maskSrcKey = (function() { var uid = /[^.]+$/.exec(coreJsData && coreJsData.keys && coreJsData.keys.IE_PROTO || ''); return uid ? ('Symbol(src)_1.' + uid) : ''; }()); /** Used to resolve the decompiled source of functions. */ var funcToString = funcProto.toString; /** Used to check objects for own properties. */ var hasOwnProperty = objectProto.hasOwnProperty; /** * Used to resolve the * [`toStringTag`](http://ecma-international.org/ecma-262/7.0/#sec-object.prototype.tostring) * of values. */ var objectToString = objectProto.toString; /** Used to detect if a method is native. */ var reIsNative = RegExp('^' + funcToString.call(hasOwnProperty).replace(reRegExpChar, '\\$&') .replace(/hasOwnProperty|(function).*?(?=\\\()| for .+?(?=\\\])/g, '$1.*?') + '$' ); /** Built-in value references. */ var splice = arrayProto.splice; /* Built-in method references that are verified to be native. */ var Map = getNative(root, 'Map'), Set = getNative(root, 'Set'), nativeCreate = getNative(Object, 'create'); /** * Creates a hash object. * * @private * @constructor * @param {Array} [entries] The key-value pairs to cache. */ function Hash(entries) { var index = -1, length = entries ? entries.length : 0; this.clear(); while (++index < length) { var entry = entries[index]; this.set(entry[0], entry[1]); } } /** * Removes all key-value entries from the hash. * * @private * @name clear * @memberOf Hash */ function hashClear() { this.__data__ = nativeCreate ? nativeCreate(null) : {}; } /** * Removes `key` and its value from the hash. * * @private * @name delete * @memberOf Hash * @param {Object} hash The hash to modify. * @param {string} key The key of the value to remove. * @returns {boolean} Returns `true` if the entry was removed, else `false`. */ function hashDelete(key) { return this.has(key) && delete this.__data__[key]; } /** * Gets the hash value for `key`. * * @private * @name get * @memberOf Hash * @param {string} key The key of the value to get. * @returns {*} Returns the entry value. */ function hashGet(key) { var data = this.__data__; if (nativeCreate) { var result = data[key]; return result === HASH_UNDEFINED ? undefined : result; } return hasOwnProperty.call(data, key) ? data[key] : undefined; } /** * Checks if a hash value for `key` exists. * * @private * @name has * @memberOf Hash * @param {string} key The key of the entry to check. * @returns {boolean} Returns `true` if an entry for `key` exists, else `false`. */ function hashHas(key) { var data = this.__data__; return nativeCreate ? data[key] !== undefined : hasOwnProperty.call(data, key); } /** * Sets the hash `key` to `value`. * * @private * @name set * @memberOf Hash * @param {string} key The key of the value to set. * @param {*} value The value to set. * @returns {Object} Returns the hash instance. */ function hashSet(key, value) { var data = this.__data__; data[key] = (nativeCreate && value === undefined) ? HASH_UNDEFINED : value; return this; } // Add methods to `Hash`. Hash.prototype.clear = hashClear; Hash.prototype['delete'] = hashDelete; Hash.prototype.get = hashGet; Hash.prototype.has = hashHas; Hash.prototype.set = hashSet; /** * Creates an list cache object. * * @private * @constructor * @param {Array} [entries] The key-value pairs to cache. */ function ListCache(entries) { var index = -1, length = entries ? entries.length : 0; this.clear(); while (++index < length) { var entry = entries[index]; this.set(entry[0], entry[1]); } } /** * Removes all key-value entries from the list cache. * * @private * @name clear * @memberOf ListCache */ function listCacheClear() { this.__data__ = []; } /** * Removes `key` and its value from the list cache. * * @private * @name delete * @memberOf ListCache * @param {string} key The key of the value to remove. * @returns {boolean} Returns `true` if the entry was removed, else `false`. */ function listCacheDelete(key) { var data = this.__data__, index = assocIndexOf(data, key); if (index < 0) { return false; } var lastIndex = data.length - 1; if (index == lastIndex) { data.pop(); } else { splice.call(data, index, 1); } return true; } /** * Gets the list cache value for `key`. * * @private * @name get * @memberOf ListCache * @param {string} key The key of the value to get. * @returns {*} Returns the entry value. */ function listCacheGet(key) { var data = this.__data__, index = assocIndexOf(data, key); return index < 0 ? undefined : data[index][1]; } /** * Checks if a list cache value for `key` exists. * * @private * @name has * @memberOf ListCache * @param {string} key The key of the entry to check. * @returns {boolean} Returns `true` if an entry for `key` exists, else `false`. */ function listCacheHas(key) { return assocIndexOf(this.__data__, key) > -1; } /** * Sets the list cache `key` to `value`. * * @private * @name set * @memberOf ListCache * @param {string} key The key of the value to set. * @param {*} value The value to set. * @returns {Object} Returns the list cache instance. */ function listCacheSet(key, value) { var data = this.__data__, index = assocIndexOf(data, key); if (index < 0) { data.push([key, value]); } else { data[index][1] = value; } return this; } // Add methods to `ListCache`. ListCache.prototype.clear = listCacheClear; ListCache.prototype['delete'] = listCacheDelete; ListCache.prototype.get = listCacheGet; ListCache.prototype.has = listCacheHas; ListCache.prototype.set = listCacheSet; /** * Creates a map cache object to store key-value pairs. * * @private * @constructor * @param {Array} [entries] The key-value pairs to cache. */ function MapCache(entries) { var index = -1, length = entries ? entries.length : 0; this.clear(); while (++index < length) { var entry = entries[index]; this.set(entry[0], entry[1]); } } /** * Removes all key-value entries from the map. * * @private * @name clear * @memberOf MapCache */ function mapCacheClear() { this.__data__ = { 'hash': new Hash, 'map': new (Map || ListCache), 'string': new Hash }; } /** * Removes `key` and its value from the map. * * @private * @name delete * @memberOf MapCache * @param {string} key The key of the value to remove. * @returns {boolean} Returns `true` if the entry was removed, else `false`. */ function mapCacheDelete(key) { return getMapData(this, key)['delete'](key); } /** * Gets the map value for `key`. * * @private * @name get * @memberOf MapCache * @param {string} key The key of the value to get. * @returns {*} Returns the entry value. */ function mapCacheGet(key) { return getMapData(this, key).get(key); } /** * Checks if a map value for `key` exists. * * @private * @name has * @memberOf MapCache * @param {string} key The key of the entry to check. * @returns {boolean} Returns `true` if an entry for `key` exists, else `false`. */ function mapCacheHas(key) { return getMapData(this, key).has(key); } /** * Sets the map `key` to `value`. * * @private * @name set * @memberOf MapCache * @param {string} key The key of the value to set. * @param {*} value The value to set. * @returns {Object} Returns the map cache instance. */ function mapCacheSet(key, value) { getMapData(this, key).set(key, value); return this; } // Add methods to `MapCache`. MapCache.prototype.clear = mapCacheClear; MapCache.prototype['delete'] = mapCacheDelete; MapCache.prototype.get = mapCacheGet; MapCache.prototype.has = mapCacheHas; MapCache.prototype.set = mapCacheSet; /** * * Creates an array cache object to store unique values. * * @private * @constructor * @param {Array} [values] The values to cache. */ function SetCache(values) { var index = -1, length = values ? values.length : 0; this.__data__ = new MapCache; while (++index < length) { this.add(values[index]); } } /** * Adds `value` to the array cache. * * @private * @name add * @memberOf SetCache * @alias push * @param {*} value The value to cache. * @returns {Object} Returns the cache instance. */ function setCacheAdd(value) { this.__data__.set(value, HASH_UNDEFINED); return this; } /** * Checks if `value` is in the array cache. * * @private * @name has * @memberOf SetCache * @param {*} value The value to search for. * @returns {number} Returns `true` if `value` is found, else `false`. */ function setCacheHas(value) { return this.__data__.has(value); } // Add methods to `SetCache`. SetCache.prototype.add = SetCache.prototype.push = setCacheAdd; SetCache.prototype.has = setCacheHas; /** * Gets the index at which the `key` is found in `array` of key-value pairs. * * @private * @param {Array} array The array to inspect. * @param {*} key The key to search for. * @returns {number} Returns the index of the matched value, else `-1`. */ function assocIndexOf(array, key) { var length = array.length; while (length--) { if (eq(array[length][0], key)) { return length; } } return -1; } /** * The base implementation of `_.isNative` without bad shim checks. * * @private * @param {*} value The value to check. * @returns {boolean} Returns `true` if `value` is a native function, * else `false`. */ function baseIsNative(value) { if (!isObject(value) || isMasked(value)) { return false; } var pattern = (isFunction(value) || isHostObject(value)) ? reIsNative : reIsHostCtor; return pattern.test(toSource(value)); } /** * The base implementation of `_.uniqBy` without support for iteratee shorthands. * * @private * @param {Array} array The array to inspect. * @param {Function} [iteratee] The iteratee invoked per element. * @param {Function} [comparator] The comparator invoked per element. * @returns {Array} Returns the new duplicate free array. */ function baseUniq(array, iteratee, comparator) { var index = -1, includes = arrayIncludes, length = array.length, isCommon = true, result = [], seen = result; if (comparator) { isCommon = false; includes = arrayIncludesWith; } else if (length >= LARGE_ARRAY_SIZE) { var set = iteratee ? null : createSet(array); if (set) { return setToArray(set); } isCommon = false; includes = cacheHas; seen = new SetCache; } else { seen = iteratee ? [] : result; } outer: while (++index < length) { var value = array[index], computed = iteratee ? iteratee(value) : value; value = (comparator || value !== 0) ? value : 0; if (isCommon && computed === computed) { var seenIndex = seen.length; while (seenIndex--) { if (seen[seenIndex] === computed) { continue outer; } } if (iteratee) { seen.push(computed); } result.push(value); } else if (!includes(seen, computed, comparator)) { if (seen !== result) { seen.push(computed); } result.push(value); } } return result; } /** * Creates a set object of `values`. * * @private * @param {Array} values The values to add to the set. * @returns {Object} Returns the new set. */ var createSet = !(Set && (1 / setToArray(new Set([,-0]))[1]) == INFINITY) ? noop : function(values) { return new Set(values); }; /** * Gets the data for `map`. * * @private * @param {Object} map The map to query. * @param {string} key The reference key. * @returns {*} Returns the map data. */ function getMapData(map, key) { var data = map.__data__; return isKeyable(key) ? data[typeof key == 'string' ? 'string' : 'hash'] : data.map; } /** * Gets the native function at `key` of `object`. * * @private * @param {Object} object The object to query. * @param {string} key The key of the method to get. * @returns {*} Returns the function if it's native, else `undefined`. */ function getNative(object, key) { var value = getValue(object, key); return baseIsNative(value) ? value : undefined; } /** * Checks if `value` is suitable for use as unique object key. * * @private * @param {*} value The value to check. * @returns {boolean} Returns `true` if `value` is suitable, else `false`. */ function isKeyable(value) { var type = typeof value; return (type == 'string' || type == 'number' || type == 'symbol' || type == 'boolean') ? (value !== '__proto__') : (value === null); } /** * Checks if `func` has its source masked. * * @private * @param {Function} func The function to check. * @returns {boolean} Returns `true` if `func` is masked, else `false`. */ function isMasked(func) { return !!maskSrcKey && (maskSrcKey in func); } /** * Converts `func` to its source code. * * @private * @param {Function} func The function to process. * @returns {string} Returns the source code. */ function toSource(func) { if (func != null) { try { return funcToString.call(func); } catch (e) {} try { return (func + ''); } catch (e) {} } return ''; } /** * Creates a duplicate-free version of an array, using * [`SameValueZero`](http://ecma-international.org/ecma-262/7.0/#sec-samevaluezero) * for equality comparisons, in which only the first occurrence of each * element is kept. * * @static * @memberOf _ * @since 0.1.0 * @category Array * @param {Array} array The array to inspect. * @returns {Array} Returns the new duplicate free array. * @example * * _.uniq([2, 1, 2]); * // => [2, 1] */ function uniq(array) { return (array && array.length) ? baseUniq(array) : []; } /** * Performs a * [`SameValueZero`](http://ecma-international.org/ecma-262/7.0/#sec-samevaluezero) * comparison between two values to determine if they are equivalent. * * @static * @memberOf _ * @since 4.0.0 * @category Lang * @param {*} value The value to compare. * @param {*} other The other value to compare. * @returns {boolean} Returns `true` if the values are equivalent, else `false`. * @example * * var object = { 'a': 1 }; * var other = { 'a': 1 }; * * _.eq(object, object); * // => true * * _.eq(object, other); * // => false * * _.eq('a', 'a'); * // => true * * _.eq('a', Object('a')); * // => false * * _.eq(NaN, NaN); * // => true */ function eq(value, other) { return value === other || (value !== value && other !== other); } /** * Checks if `value` is classified as a `Function` object. * * @static * @memberOf _ * @since 0.1.0 * @category Lang * @param {*} value The value to check. * @returns {boolean} Returns `true` if `value` is a function, else `false`. * @example * * _.isFunction(_); * // => true * * _.isFunction(/abc/); * // => false */ function isFunction(value) { // The use of `Object#toString` avoids issues with the `typeof` operator // in Safari 8-9 which returns 'object' for typed array and other constructors. var tag = isObject(value) ? objectToString.call(value) : ''; return tag == funcTag || tag == genTag; } /** * Checks if `value` is the * [language type](http://www.ecma-international.org/ecma-262/7.0/#sec-ecmascript-language-types) * of `Object`. (e.g. arrays, functions, objects, regexes, `new Number(0)`, and `new String('')`) * * @static * @memberOf _ * @since 0.1.0 * @category Lang * @param {*} value The value to check. * @returns {boolean} Returns `true` if `value` is an object, else `false`. * @example * * _.isObject({}); * // => true * * _.isObject([1, 2, 3]); * // => true * * _.isObject(_.noop); * // => true * * _.isObject(null); * // => false */ function isObject(value) { var type = typeof value; return !!value && (type == 'object' || type == 'function'); } /** * This method returns `undefined`. * * @static * @memberOf _ * @since 2.3.0 * @category Util * @example * * _.times(2, _.noop); * // => [undefined, undefined] */ function noop() { // No operation performed. } module.exports = uniq; /***/ }), /***/ 7493: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const os = __nccwpck_require__(2087); const nameMap = new Map([ [20, ['Big Sur', '11']], [19, ['Catalina', '10.15']], [18, ['Mojave', '10.14']], [17, ['High Sierra', '10.13']], [16, ['Sierra', '10.12']], [15, ['El Capitan', '10.11']], [14, ['Yosemite', '10.10']], [13, ['Mavericks', '10.9']], [12, ['Mountain Lion', '10.8']], [11, ['Lion', '10.7']], [10, ['Snow Leopard', '10.6']], [9, ['Leopard', '10.5']], [8, ['Tiger', '10.4']], [7, ['Panther', '10.3']], [6, ['Jaguar', '10.2']], [5, ['Puma', '10.1']] ]); const macosRelease = release => { release = Number((release || os.release()).split('.')[0]); const [name, version] = nameMap.get(release); return { name, version }; }; module.exports = macosRelease; // TODO: remove this in the next major version module.exports.default = macosRelease; /***/ }), /***/ 8560: /***/ ((module) => { "use strict"; /** * Tries to execute a function and discards any error that occurs. * @param {Function} fn - Function that might or might not throw an error. * @returns {?*} Return-value of the function when no error occurred. */ module.exports = function(fn) { try { return fn() } catch (e) {} } /***/ }), /***/ 467: /***/ ((module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } var Stream = _interopDefault(__nccwpck_require__(2413)); var http = _interopDefault(__nccwpck_require__(8605)); var Url = _interopDefault(__nccwpck_require__(8835)); var https = _interopDefault(__nccwpck_require__(7211)); var zlib = _interopDefault(__nccwpck_require__(8761)); // Based on https://github.com/tmpvar/jsdom/blob/aa85b2abf07766ff7bf5c1f6daafb3726f2f2db5/lib/jsdom/living/blob.js // fix for "Readable" isn't a named export issue const Readable = Stream.Readable; const BUFFER = Symbol('buffer'); const TYPE = Symbol('type'); class Blob { constructor() { this[TYPE] = ''; const blobParts = arguments[0]; const options = arguments[1]; const buffers = []; let size = 0; if (blobParts) { const a = blobParts; const length = Number(a.length); for (let i = 0; i < length; i++) { const element = a[i]; let buffer; if (element instanceof Buffer) { buffer = element; } else if (ArrayBuffer.isView(element)) { buffer = Buffer.from(element.buffer, element.byteOffset, element.byteLength); } else if (element instanceof ArrayBuffer) { buffer = Buffer.from(element); } else if (element instanceof Blob) { buffer = element[BUFFER]; } else { buffer = Buffer.from(typeof element === 'string' ? element : String(element)); } size += buffer.length; buffers.push(buffer); } } this[BUFFER] = Buffer.concat(buffers); let type = options && options.type !== undefined && String(options.type).toLowerCase(); if (type && !/[^\u0020-\u007E]/.test(type)) { this[TYPE] = type; } } get size() { return this[BUFFER].length; } get type() { return this[TYPE]; } text() { return Promise.resolve(this[BUFFER].toString()); } arrayBuffer() { const buf = this[BUFFER]; const ab = buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); return Promise.resolve(ab); } stream() { const readable = new Readable(); readable._read = function () {}; readable.push(this[BUFFER]); readable.push(null); return readable; } toString() { return '[object Blob]'; } slice() { const size = this.size; const start = arguments[0]; const end = arguments[1]; let relativeStart, relativeEnd; if (start === undefined) { relativeStart = 0; } else if (start < 0) { relativeStart = Math.max(size + start, 0); } else { relativeStart = Math.min(start, size); } if (end === undefined) { relativeEnd = size; } else if (end < 0) { relativeEnd = Math.max(size + end, 0); } else { relativeEnd = Math.min(end, size); } const span = Math.max(relativeEnd - relativeStart, 0); const buffer = this[BUFFER]; const slicedBuffer = buffer.slice(relativeStart, relativeStart + span); const blob = new Blob([], { type: arguments[2] }); blob[BUFFER] = slicedBuffer; return blob; } } Object.defineProperties(Blob.prototype, { size: { enumerable: true }, type: { enumerable: true }, slice: { enumerable: true } }); Object.defineProperty(Blob.prototype, Symbol.toStringTag, { value: 'Blob', writable: false, enumerable: false, configurable: true }); /** * fetch-error.js * * FetchError interface for operational errors */ /** * Create FetchError instance * * @param String message Error message for human * @param String type Error type for machine * @param String systemError For Node.js system error * @return FetchError */ function FetchError(message, type, systemError) { Error.call(this, message); this.message = message; this.type = type; // when err.type is `system`, err.code contains system error code if (systemError) { this.code = this.errno = systemError.code; } // hide custom error implementation details from end-users Error.captureStackTrace(this, this.constructor); } FetchError.prototype = Object.create(Error.prototype); FetchError.prototype.constructor = FetchError; FetchError.prototype.name = 'FetchError'; let convert; try { convert = __nccwpck_require__(2877).convert; } catch (e) {} const INTERNALS = Symbol('Body internals'); // fix an issue where "PassThrough" isn't a named export for node <10 const PassThrough = Stream.PassThrough; /** * Body mixin * * Ref: https://fetch.spec.whatwg.org/#body * * @param Stream body Readable stream * @param Object opts Response options * @return Void */ function Body(body) { var _this = this; var _ref = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}, _ref$size = _ref.size; let size = _ref$size === undefined ? 0 : _ref$size; var _ref$timeout = _ref.timeout; let timeout = _ref$timeout === undefined ? 0 : _ref$timeout; if (body == null) { // body is undefined or null body = null; } else if (isURLSearchParams(body)) { // body is a URLSearchParams body = Buffer.from(body.toString()); } else if (isBlob(body)) ; else if (Buffer.isBuffer(body)) ; else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') { // body is ArrayBuffer body = Buffer.from(body); } else if (ArrayBuffer.isView(body)) { // body is ArrayBufferView body = Buffer.from(body.buffer, body.byteOffset, body.byteLength); } else if (body instanceof Stream) ; else { // none of the above // coerce to string then buffer body = Buffer.from(String(body)); } this[INTERNALS] = { body, disturbed: false, error: null }; this.size = size; this.timeout = timeout; if (body instanceof Stream) { body.on('error', function (err) { const error = err.name === 'AbortError' ? err : new FetchError(`Invalid response body while trying to fetch ${_this.url}: ${err.message}`, 'system', err); _this[INTERNALS].error = error; }); } } Body.prototype = { get body() { return this[INTERNALS].body; }, get bodyUsed() { return this[INTERNALS].disturbed; }, /** * Decode response as ArrayBuffer * * @return Promise */ arrayBuffer() { return consumeBody.call(this).then(function (buf) { return buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); }); }, /** * Return raw response as Blob * * @return Promise */ blob() { let ct = this.headers && this.headers.get('content-type') || ''; return consumeBody.call(this).then(function (buf) { return Object.assign( // Prevent copying new Blob([], { type: ct.toLowerCase() }), { [BUFFER]: buf }); }); }, /** * Decode response as json * * @return Promise */ json() { var _this2 = this; return consumeBody.call(this).then(function (buffer) { try { return JSON.parse(buffer.toString()); } catch (err) { return Body.Promise.reject(new FetchError(`invalid json response body at ${_this2.url} reason: ${err.message}`, 'invalid-json')); } }); }, /** * Decode response as text * * @return Promise */ text() { return consumeBody.call(this).then(function (buffer) { return buffer.toString(); }); }, /** * Decode response as buffer (non-spec api) * * @return Promise */ buffer() { return consumeBody.call(this); }, /** * Decode response as text, while automatically detecting the encoding and * trying to decode to UTF-8 (non-spec api) * * @return Promise */ textConverted() { var _this3 = this; return consumeBody.call(this).then(function (buffer) { return convertBody(buffer, _this3.headers); }); } }; // In browsers, all properties are enumerable. Object.defineProperties(Body.prototype, { body: { enumerable: true }, bodyUsed: { enumerable: true }, arrayBuffer: { enumerable: true }, blob: { enumerable: true }, json: { enumerable: true }, text: { enumerable: true } }); Body.mixIn = function (proto) { for (const name of Object.getOwnPropertyNames(Body.prototype)) { // istanbul ignore else: future proof if (!(name in proto)) { const desc = Object.getOwnPropertyDescriptor(Body.prototype, name); Object.defineProperty(proto, name, desc); } } }; /** * Consume and convert an entire Body to a Buffer. * * Ref: https://fetch.spec.whatwg.org/#concept-body-consume-body * * @return Promise */ function consumeBody() { var _this4 = this; if (this[INTERNALS].disturbed) { return Body.Promise.reject(new TypeError(`body used already for: ${this.url}`)); } this[INTERNALS].disturbed = true; if (this[INTERNALS].error) { return Body.Promise.reject(this[INTERNALS].error); } let body = this.body; // body is null if (body === null) { return Body.Promise.resolve(Buffer.alloc(0)); } // body is blob if (isBlob(body)) { body = body.stream(); } // body is buffer if (Buffer.isBuffer(body)) { return Body.Promise.resolve(body); } // istanbul ignore if: should never happen if (!(body instanceof Stream)) { return Body.Promise.resolve(Buffer.alloc(0)); } // body is stream // get ready to actually consume the body let accum = []; let accumBytes = 0; let abort = false; return new Body.Promise(function (resolve, reject) { let resTimeout; // allow timeout on slow response body if (_this4.timeout) { resTimeout = setTimeout(function () { abort = true; reject(new FetchError(`Response timeout while trying to fetch ${_this4.url} (over ${_this4.timeout}ms)`, 'body-timeout')); }, _this4.timeout); } // handle stream errors body.on('error', function (err) { if (err.name === 'AbortError') { // if the request was aborted, reject with this Error abort = true; reject(err); } else { // other errors, such as incorrect content-encoding reject(new FetchError(`Invalid response body while trying to fetch ${_this4.url}: ${err.message}`, 'system', err)); } }); body.on('data', function (chunk) { if (abort || chunk === null) { return; } if (_this4.size && accumBytes + chunk.length > _this4.size) { abort = true; reject(new FetchError(`content size at ${_this4.url} over limit: ${_this4.size}`, 'max-size')); return; } accumBytes += chunk.length; accum.push(chunk); }); body.on('end', function () { if (abort) { return; } clearTimeout(resTimeout); try { resolve(Buffer.concat(accum, accumBytes)); } catch (err) { // handle streams that have accumulated too much data (issue #414) reject(new FetchError(`Could not create Buffer from response body for ${_this4.url}: ${err.message}`, 'system', err)); } }); }); } /** * Detect buffer encoding and convert to target encoding * ref: http://www.w3.org/TR/2011/WD-html5-20110113/parsing.html#determining-the-character-encoding * * @param Buffer buffer Incoming buffer * @param String encoding Target encoding * @return String */ function convertBody(buffer, headers) { if (typeof convert !== 'function') { throw new Error('The package `encoding` must be installed to use the textConverted() function'); } const ct = headers.get('content-type'); let charset = 'utf-8'; let res, str; // header if (ct) { res = /charset=([^;]*)/i.exec(ct); } // no charset in content type, peek at response body for at most 1024 bytes str = buffer.slice(0, 1024).toString(); // html5 if (!res && str) { res = / 0 && arguments[0] !== undefined ? arguments[0] : undefined; this[MAP] = Object.create(null); if (init instanceof Headers) { const rawHeaders = init.raw(); const headerNames = Object.keys(rawHeaders); for (const headerName of headerNames) { for (const value of rawHeaders[headerName]) { this.append(headerName, value); } } return; } // We don't worry about converting prop to ByteString here as append() // will handle it. if (init == null) ; else if (typeof init === 'object') { const method = init[Symbol.iterator]; if (method != null) { if (typeof method !== 'function') { throw new TypeError('Header pairs must be iterable'); } // sequence> // Note: per spec we have to first exhaust the lists then process them const pairs = []; for (const pair of init) { if (typeof pair !== 'object' || typeof pair[Symbol.iterator] !== 'function') { throw new TypeError('Each header pair must be iterable'); } pairs.push(Array.from(pair)); } for (const pair of pairs) { if (pair.length !== 2) { throw new TypeError('Each header pair must be a name/value tuple'); } this.append(pair[0], pair[1]); } } else { // record for (const key of Object.keys(init)) { const value = init[key]; this.append(key, value); } } } else { throw new TypeError('Provided initializer must be an object'); } } /** * Return combined header value given name * * @param String name Header name * @return Mixed */ get(name) { name = `${name}`; validateName(name); const key = find(this[MAP], name); if (key === undefined) { return null; } return this[MAP][key].join(', '); } /** * Iterate over all headers * * @param Function callback Executed for each item with parameters (value, name, thisArg) * @param Boolean thisArg `this` context for callback function * @return Void */ forEach(callback) { let thisArg = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : undefined; let pairs = getHeaders(this); let i = 0; while (i < pairs.length) { var _pairs$i = pairs[i]; const name = _pairs$i[0], value = _pairs$i[1]; callback.call(thisArg, value, name, this); pairs = getHeaders(this); i++; } } /** * Overwrite header values given name * * @param String name Header name * @param String value Header value * @return Void */ set(name, value) { name = `${name}`; value = `${value}`; validateName(name); validateValue(value); const key = find(this[MAP], name); this[MAP][key !== undefined ? key : name] = [value]; } /** * Append a value onto existing header * * @param String name Header name * @param String value Header value * @return Void */ append(name, value) { name = `${name}`; value = `${value}`; validateName(name); validateValue(value); const key = find(this[MAP], name); if (key !== undefined) { this[MAP][key].push(value); } else { this[MAP][name] = [value]; } } /** * Check for header name existence * * @param String name Header name * @return Boolean */ has(name) { name = `${name}`; validateName(name); return find(this[MAP], name) !== undefined; } /** * Delete all header values given name * * @param String name Header name * @return Void */ delete(name) { name = `${name}`; validateName(name); const key = find(this[MAP], name); if (key !== undefined) { delete this[MAP][key]; } } /** * Return raw headers (non-spec api) * * @return Object */ raw() { return this[MAP]; } /** * Get an iterator on keys. * * @return Iterator */ keys() { return createHeadersIterator(this, 'key'); } /** * Get an iterator on values. * * @return Iterator */ values() { return createHeadersIterator(this, 'value'); } /** * Get an iterator on entries. * * This is the default iterator of the Headers object. * * @return Iterator */ [Symbol.iterator]() { return createHeadersIterator(this, 'key+value'); } } Headers.prototype.entries = Headers.prototype[Symbol.iterator]; Object.defineProperty(Headers.prototype, Symbol.toStringTag, { value: 'Headers', writable: false, enumerable: false, configurable: true }); Object.defineProperties(Headers.prototype, { get: { enumerable: true }, forEach: { enumerable: true }, set: { enumerable: true }, append: { enumerable: true }, has: { enumerable: true }, delete: { enumerable: true }, keys: { enumerable: true }, values: { enumerable: true }, entries: { enumerable: true } }); function getHeaders(headers) { let kind = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'key+value'; const keys = Object.keys(headers[MAP]).sort(); return keys.map(kind === 'key' ? function (k) { return k.toLowerCase(); } : kind === 'value' ? function (k) { return headers[MAP][k].join(', '); } : function (k) { return [k.toLowerCase(), headers[MAP][k].join(', ')]; }); } const INTERNAL = Symbol('internal'); function createHeadersIterator(target, kind) { const iterator = Object.create(HeadersIteratorPrototype); iterator[INTERNAL] = { target, kind, index: 0 }; return iterator; } const HeadersIteratorPrototype = Object.setPrototypeOf({ next() { // istanbul ignore if if (!this || Object.getPrototypeOf(this) !== HeadersIteratorPrototype) { throw new TypeError('Value of `this` is not a HeadersIterator'); } var _INTERNAL = this[INTERNAL]; const target = _INTERNAL.target, kind = _INTERNAL.kind, index = _INTERNAL.index; const values = getHeaders(target, kind); const len = values.length; if (index >= len) { return { value: undefined, done: true }; } this[INTERNAL].index = index + 1; return { value: values[index], done: false }; } }, Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]()))); Object.defineProperty(HeadersIteratorPrototype, Symbol.toStringTag, { value: 'HeadersIterator', writable: false, enumerable: false, configurable: true }); /** * Export the Headers object in a form that Node.js can consume. * * @param Headers headers * @return Object */ function exportNodeCompatibleHeaders(headers) { const obj = Object.assign({ __proto__: null }, headers[MAP]); // http.request() only supports string as Host header. This hack makes // specifying custom Host header possible. const hostHeaderKey = find(headers[MAP], 'Host'); if (hostHeaderKey !== undefined) { obj[hostHeaderKey] = obj[hostHeaderKey][0]; } return obj; } /** * Create a Headers object from an object of headers, ignoring those that do * not conform to HTTP grammar productions. * * @param Object obj Object of headers * @return Headers */ function createHeadersLenient(obj) { const headers = new Headers(); for (const name of Object.keys(obj)) { if (invalidTokenRegex.test(name)) { continue; } if (Array.isArray(obj[name])) { for (const val of obj[name]) { if (invalidHeaderCharRegex.test(val)) { continue; } if (headers[MAP][name] === undefined) { headers[MAP][name] = [val]; } else { headers[MAP][name].push(val); } } } else if (!invalidHeaderCharRegex.test(obj[name])) { headers[MAP][name] = [obj[name]]; } } return headers; } const INTERNALS$1 = Symbol('Response internals'); // fix an issue where "STATUS_CODES" aren't a named export for node <10 const STATUS_CODES = http.STATUS_CODES; /** * Response class * * @param Stream body Readable stream * @param Object opts Response options * @return Void */ class Response { constructor() { let body = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : null; let opts = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; Body.call(this, body, opts); const status = opts.status || 200; const headers = new Headers(opts.headers); if (body != null && !headers.has('Content-Type')) { const contentType = extractContentType(body); if (contentType) { headers.append('Content-Type', contentType); } } this[INTERNALS$1] = { url: opts.url, status, statusText: opts.statusText || STATUS_CODES[status], headers, counter: opts.counter }; } get url() { return this[INTERNALS$1].url || ''; } get status() { return this[INTERNALS$1].status; } /** * Convenience property representing if the request ended normally */ get ok() { return this[INTERNALS$1].status >= 200 && this[INTERNALS$1].status < 300; } get redirected() { return this[INTERNALS$1].counter > 0; } get statusText() { return this[INTERNALS$1].statusText; } get headers() { return this[INTERNALS$1].headers; } /** * Clone this response * * @return Response */ clone() { return new Response(clone(this), { url: this.url, status: this.status, statusText: this.statusText, headers: this.headers, ok: this.ok, redirected: this.redirected }); } } Body.mixIn(Response.prototype); Object.defineProperties(Response.prototype, { url: { enumerable: true }, status: { enumerable: true }, ok: { enumerable: true }, redirected: { enumerable: true }, statusText: { enumerable: true }, headers: { enumerable: true }, clone: { enumerable: true } }); Object.defineProperty(Response.prototype, Symbol.toStringTag, { value: 'Response', writable: false, enumerable: false, configurable: true }); const INTERNALS$2 = Symbol('Request internals'); // fix an issue where "format", "parse" aren't a named export for node <10 const parse_url = Url.parse; const format_url = Url.format; const streamDestructionSupported = 'destroy' in Stream.Readable.prototype; /** * Check if a value is an instance of Request. * * @param Mixed input * @return Boolean */ function isRequest(input) { return typeof input === 'object' && typeof input[INTERNALS$2] === 'object'; } function isAbortSignal(signal) { const proto = signal && typeof signal === 'object' && Object.getPrototypeOf(signal); return !!(proto && proto.constructor.name === 'AbortSignal'); } /** * Request class * * @param Mixed input Url or Request instance * @param Object init Custom options * @return Void */ class Request { constructor(input) { let init = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; let parsedURL; // normalize input if (!isRequest(input)) { if (input && input.href) { // in order to support Node.js' Url objects; though WHATWG's URL objects // will fall into this branch also (since their `toString()` will return // `href` property anyway) parsedURL = parse_url(input.href); } else { // coerce input to a string before attempting to parse parsedURL = parse_url(`${input}`); } input = {}; } else { parsedURL = parse_url(input.url); } let method = init.method || input.method || 'GET'; method = method.toUpperCase(); if ((init.body != null || isRequest(input) && input.body !== null) && (method === 'GET' || method === 'HEAD')) { throw new TypeError('Request with GET/HEAD method cannot have body'); } let inputBody = init.body != null ? init.body : isRequest(input) && input.body !== null ? clone(input) : null; Body.call(this, inputBody, { timeout: init.timeout || input.timeout || 0, size: init.size || input.size || 0 }); const headers = new Headers(init.headers || input.headers || {}); if (inputBody != null && !headers.has('Content-Type')) { const contentType = extractContentType(inputBody); if (contentType) { headers.append('Content-Type', contentType); } } let signal = isRequest(input) ? input.signal : null; if ('signal' in init) signal = init.signal; if (signal != null && !isAbortSignal(signal)) { throw new TypeError('Expected signal to be an instanceof AbortSignal'); } this[INTERNALS$2] = { method, redirect: init.redirect || input.redirect || 'follow', headers, parsedURL, signal }; // node-fetch-only options this.follow = init.follow !== undefined ? init.follow : input.follow !== undefined ? input.follow : 20; this.compress = init.compress !== undefined ? init.compress : input.compress !== undefined ? input.compress : true; this.counter = init.counter || input.counter || 0; this.agent = init.agent || input.agent; } get method() { return this[INTERNALS$2].method; } get url() { return format_url(this[INTERNALS$2].parsedURL); } get headers() { return this[INTERNALS$2].headers; } get redirect() { return this[INTERNALS$2].redirect; } get signal() { return this[INTERNALS$2].signal; } /** * Clone this request * * @return Request */ clone() { return new Request(this); } } Body.mixIn(Request.prototype); Object.defineProperty(Request.prototype, Symbol.toStringTag, { value: 'Request', writable: false, enumerable: false, configurable: true }); Object.defineProperties(Request.prototype, { method: { enumerable: true }, url: { enumerable: true }, headers: { enumerable: true }, redirect: { enumerable: true }, clone: { enumerable: true }, signal: { enumerable: true } }); /** * Convert a Request to Node.js http request options. * * @param Request A Request instance * @return Object The options object to be passed to http.request */ function getNodeRequestOptions(request) { const parsedURL = request[INTERNALS$2].parsedURL; const headers = new Headers(request[INTERNALS$2].headers); // fetch step 1.3 if (!headers.has('Accept')) { headers.set('Accept', '*/*'); } // Basic fetch if (!parsedURL.protocol || !parsedURL.hostname) { throw new TypeError('Only absolute URLs are supported'); } if (!/^https?:$/.test(parsedURL.protocol)) { throw new TypeError('Only HTTP(S) protocols are supported'); } if (request.signal && request.body instanceof Stream.Readable && !streamDestructionSupported) { throw new Error('Cancellation of streamed requests with AbortSignal is not supported in node < 8'); } // HTTP-network-or-cache fetch steps 2.4-2.7 let contentLengthValue = null; if (request.body == null && /^(POST|PUT)$/i.test(request.method)) { contentLengthValue = '0'; } if (request.body != null) { const totalBytes = getTotalBytes(request); if (typeof totalBytes === 'number') { contentLengthValue = String(totalBytes); } } if (contentLengthValue) { headers.set('Content-Length', contentLengthValue); } // HTTP-network-or-cache fetch step 2.11 if (!headers.has('User-Agent')) { headers.set('User-Agent', 'node-fetch/1.0 (+https://github.com/bitinn/node-fetch)'); } // HTTP-network-or-cache fetch step 2.15 if (request.compress && !headers.has('Accept-Encoding')) { headers.set('Accept-Encoding', 'gzip,deflate'); } let agent = request.agent; if (typeof agent === 'function') { agent = agent(parsedURL); } if (!headers.has('Connection') && !agent) { headers.set('Connection', 'close'); } // HTTP-network fetch step 4.2 // chunked encoding is handled by Node.js return Object.assign({}, parsedURL, { method: request.method, headers: exportNodeCompatibleHeaders(headers), agent }); } /** * abort-error.js * * AbortError interface for cancelled requests */ /** * Create AbortError instance * * @param String message Error message for human * @return AbortError */ function AbortError(message) { Error.call(this, message); this.type = 'aborted'; this.message = message; // hide custom error implementation details from end-users Error.captureStackTrace(this, this.constructor); } AbortError.prototype = Object.create(Error.prototype); AbortError.prototype.constructor = AbortError; AbortError.prototype.name = 'AbortError'; // fix an issue where "PassThrough", "resolve" aren't a named export for node <10 const PassThrough$1 = Stream.PassThrough; const resolve_url = Url.resolve; /** * Fetch function * * @param Mixed url Absolute url or Request instance * @param Object opts Fetch options * @return Promise */ function fetch(url, opts) { // allow custom promise if (!fetch.Promise) { throw new Error('native promise missing, set fetch.Promise to your favorite alternative'); } Body.Promise = fetch.Promise; // wrap http.request into fetch return new fetch.Promise(function (resolve, reject) { // build request object const request = new Request(url, opts); const options = getNodeRequestOptions(request); const send = (options.protocol === 'https:' ? https : http).request; const signal = request.signal; let response = null; const abort = function abort() { let error = new AbortError('The user aborted a request.'); reject(error); if (request.body && request.body instanceof Stream.Readable) { request.body.destroy(error); } if (!response || !response.body) return; response.body.emit('error', error); }; if (signal && signal.aborted) { abort(); return; } const abortAndFinalize = function abortAndFinalize() { abort(); finalize(); }; // send request const req = send(options); let reqTimeout; if (signal) { signal.addEventListener('abort', abortAndFinalize); } function finalize() { req.abort(); if (signal) signal.removeEventListener('abort', abortAndFinalize); clearTimeout(reqTimeout); } if (request.timeout) { req.once('socket', function (socket) { reqTimeout = setTimeout(function () { reject(new FetchError(`network timeout at: ${request.url}`, 'request-timeout')); finalize(); }, request.timeout); }); } req.on('error', function (err) { reject(new FetchError(`request to ${request.url} failed, reason: ${err.message}`, 'system', err)); finalize(); }); req.on('response', function (res) { clearTimeout(reqTimeout); const headers = createHeadersLenient(res.headers); // HTTP fetch step 5 if (fetch.isRedirect(res.statusCode)) { // HTTP fetch step 5.2 const location = headers.get('Location'); // HTTP fetch step 5.3 const locationURL = location === null ? null : resolve_url(request.url, location); // HTTP fetch step 5.5 switch (request.redirect) { case 'error': reject(new FetchError(`uri requested responds with a redirect, redirect mode is set to error: ${request.url}`, 'no-redirect')); finalize(); return; case 'manual': // node-fetch-specific step: make manual redirect a bit easier to use by setting the Location header value to the resolved URL. if (locationURL !== null) { // handle corrupted header try { headers.set('Location', locationURL); } catch (err) { // istanbul ignore next: nodejs server prevent invalid response headers, we can't test this through normal request reject(err); } } break; case 'follow': // HTTP-redirect fetch step 2 if (locationURL === null) { break; } // HTTP-redirect fetch step 5 if (request.counter >= request.follow) { reject(new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect')); finalize(); return; } // HTTP-redirect fetch step 6 (counter increment) // Create a new Request object. const requestOpts = { headers: new Headers(request.headers), follow: request.follow, counter: request.counter + 1, agent: request.agent, compress: request.compress, method: request.method, body: request.body, signal: request.signal, timeout: request.timeout, size: request.size }; // HTTP-redirect fetch step 9 if (res.statusCode !== 303 && request.body && getTotalBytes(request) === null) { reject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect')); finalize(); return; } // HTTP-redirect fetch step 11 if (res.statusCode === 303 || (res.statusCode === 301 || res.statusCode === 302) && request.method === 'POST') { requestOpts.method = 'GET'; requestOpts.body = undefined; requestOpts.headers.delete('content-length'); } // HTTP-redirect fetch step 15 resolve(fetch(new Request(locationURL, requestOpts))); finalize(); return; } } // prepare response res.once('end', function () { if (signal) signal.removeEventListener('abort', abortAndFinalize); }); let body = res.pipe(new PassThrough$1()); const response_options = { url: request.url, status: res.statusCode, statusText: res.statusMessage, headers: headers, size: request.size, timeout: request.timeout, counter: request.counter }; // HTTP-network fetch step 12.1.1.3 const codings = headers.get('Content-Encoding'); // HTTP-network fetch step 12.1.1.4: handle content codings // in following scenarios we ignore compression support // 1. compression support is disabled // 2. HEAD request // 3. no Content-Encoding header // 4. no content response (204) // 5. content not modified response (304) if (!request.compress || request.method === 'HEAD' || codings === null || res.statusCode === 204 || res.statusCode === 304) { response = new Response(body, response_options); resolve(response); return; } // For Node v6+ // Be less strict when decoding compressed responses, since sometimes // servers send slightly invalid responses that are still accepted // by common browsers. // Always using Z_SYNC_FLUSH is what cURL does. const zlibOptions = { flush: zlib.Z_SYNC_FLUSH, finishFlush: zlib.Z_SYNC_FLUSH }; // for gzip if (codings == 'gzip' || codings == 'x-gzip') { body = body.pipe(zlib.createGunzip(zlibOptions)); response = new Response(body, response_options); resolve(response); return; } // for deflate if (codings == 'deflate' || codings == 'x-deflate') { // handle the infamous raw deflate response from old servers // a hack for old IIS and Apache servers const raw = res.pipe(new PassThrough$1()); raw.once('data', function (chunk) { // see http://stackoverflow.com/questions/37519828 if ((chunk[0] & 0x0F) === 0x08) { body = body.pipe(zlib.createInflate()); } else { body = body.pipe(zlib.createInflateRaw()); } response = new Response(body, response_options); resolve(response); }); return; } // for br if (codings == 'br' && typeof zlib.createBrotliDecompress === 'function') { body = body.pipe(zlib.createBrotliDecompress()); response = new Response(body, response_options); resolve(response); return; } // otherwise, use response as-is response = new Response(body, response_options); resolve(response); }); writeToStream(req, request); }); } /** * Redirect code matching * * @param Number code Status code * @return Boolean */ fetch.isRedirect = function (code) { return code === 301 || code === 302 || code === 303 || code === 307 || code === 308; }; // expose Promise fetch.Promise = global.Promise; module.exports = exports = fetch; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.default = exports; exports.Headers = Headers; exports.Request = Request; exports.Response = Response; exports.FetchError = FetchError; /***/ }), /***/ 2072: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { module.exports = paginationMethodsPlugin function paginationMethodsPlugin (octokit) { octokit.getFirstPage = __nccwpck_require__(9555).bind(null, octokit) octokit.getLastPage = __nccwpck_require__(2203).bind(null, octokit) octokit.getNextPage = __nccwpck_require__(6655).bind(null, octokit) octokit.getPreviousPage = __nccwpck_require__(3032).bind(null, octokit) octokit.hasFirstPage = __nccwpck_require__(9631) octokit.hasLastPage = __nccwpck_require__(4286) octokit.hasNextPage = __nccwpck_require__(500) octokit.hasPreviousPage = __nccwpck_require__(5996) } /***/ }), /***/ 191: /***/ ((module) => { module.exports = deprecate const loggedMessages = {} function deprecate (message) { if (loggedMessages[message]) { return } console.warn(`DEPRECATED (@octokit/rest): ${message}`) loggedMessages[message] = 1 } /***/ }), /***/ 9555: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { module.exports = getFirstPage const getPage = __nccwpck_require__(8604) function getFirstPage (octokit, link, headers) { return getPage(octokit, link, 'first', headers) } /***/ }), /***/ 2203: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { module.exports = getLastPage const getPage = __nccwpck_require__(8604) function getLastPage (octokit, link, headers) { return getPage(octokit, link, 'last', headers) } /***/ }), /***/ 6655: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { module.exports = getNextPage const getPage = __nccwpck_require__(8604) function getNextPage (octokit, link, headers) { return getPage(octokit, link, 'next', headers) } /***/ }), /***/ 7889: /***/ ((module) => { module.exports = getPageLinks function getPageLinks (link) { link = link.link || link.headers.link || '' const links = {} // link format: // '; rel="next", ; rel="last"' link.replace(/<([^>]*)>;\s*rel="([\w]*)"/g, (m, uri, type) => { links[type] = uri }) return links } /***/ }), /***/ 8604: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { module.exports = getPage const deprecate = __nccwpck_require__(191) const getPageLinks = __nccwpck_require__(7889) const HttpError = __nccwpck_require__(6058) function getPage (octokit, link, which, headers) { deprecate(`octokit.get${which.charAt(0).toUpperCase() + which.slice(1)}Page() – You can use octokit.paginate or async iterators instead: https://github.com/octokit/rest.js#pagination.`) const url = getPageLinks(link)[which] if (!url) { const urlError = new HttpError(`No ${which} page found`, 404) return Promise.reject(urlError) } const requestOptions = { url, headers: applyAcceptHeader(link, headers) } const promise = octokit.request(requestOptions) return promise } function applyAcceptHeader (res, headers) { const previous = res.headers && res.headers['x-github-media-type'] if (!previous || (headers && headers.accept)) { return headers } headers = headers || {} headers.accept = 'application/vnd.' + previous .replace('; param=', '.') .replace('; format=', '+') return headers } /***/ }), /***/ 3032: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { module.exports = getPreviousPage const getPage = __nccwpck_require__(8604) function getPreviousPage (octokit, link, headers) { return getPage(octokit, link, 'prev', headers) } /***/ }), /***/ 9631: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { module.exports = hasFirstPage const deprecate = __nccwpck_require__(191) const getPageLinks = __nccwpck_require__(7889) function hasFirstPage (link) { deprecate(`octokit.hasFirstPage() – You can use octokit.paginate or async iterators instead: https://github.com/octokit/rest.js#pagination.`) return getPageLinks(link).first } /***/ }), /***/ 4286: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { module.exports = hasLastPage const deprecate = __nccwpck_require__(191) const getPageLinks = __nccwpck_require__(7889) function hasLastPage (link) { deprecate(`octokit.hasLastPage() – You can use octokit.paginate or async iterators instead: https://github.com/octokit/rest.js#pagination.`) return getPageLinks(link).last } /***/ }), /***/ 500: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { module.exports = hasNextPage const deprecate = __nccwpck_require__(191) const getPageLinks = __nccwpck_require__(7889) function hasNextPage (link) { deprecate(`octokit.hasNextPage() – You can use octokit.paginate or async iterators instead: https://github.com/octokit/rest.js#pagination.`) return getPageLinks(link).next } /***/ }), /***/ 5996: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { module.exports = hasPreviousPage const deprecate = __nccwpck_require__(191) const getPageLinks = __nccwpck_require__(7889) function hasPreviousPage (link) { deprecate(`octokit.hasPreviousPage() – You can use octokit.paginate or async iterators instead: https://github.com/octokit/rest.js#pagination.`) return getPageLinks(link).prev } /***/ }), /***/ 6058: /***/ ((module) => { module.exports = class HttpError extends Error { constructor (message, code, headers) { super(message) // Maintains proper stack trace (only available on V8) /* istanbul ignore next */ if (Error.captureStackTrace) { Error.captureStackTrace(this, this.constructor) } this.name = 'HttpError' this.code = code this.headers = headers } } /***/ }), /***/ 1223: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { var wrappy = __nccwpck_require__(2940) module.exports = wrappy(once) module.exports.strict = wrappy(onceStrict) once.proto = once(function () { Object.defineProperty(Function.prototype, 'once', { value: function () { return once(this) }, configurable: true }) Object.defineProperty(Function.prototype, 'onceStrict', { value: function () { return onceStrict(this) }, configurable: true }) }) function once (fn) { var f = function () { if (f.called) return f.value f.called = true return f.value = fn.apply(this, arguments) } f.called = false return f } function onceStrict (fn) { var f = function () { if (f.called) throw new Error(f.onceError) f.called = true return f.value = fn.apply(this, arguments) } var name = fn.name || 'Function wrapped with `once`' f.onceError = name + " shouldn't be called more than once" f.called = false return f } /***/ }), /***/ 4824: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const os = __nccwpck_require__(2087); const macosRelease = __nccwpck_require__(7493); const winRelease = __nccwpck_require__(3515); const osName = (platform, release) => { if (!platform && release) { throw new Error('You can\'t specify a `release` without specifying `platform`'); } platform = platform || os.platform(); let id; if (platform === 'darwin') { if (!release && os.platform() === 'darwin') { release = os.release(); } const prefix = release ? (Number(release.split('.')[0]) > 15 ? 'macOS' : 'OS X') : 'macOS'; id = release ? macosRelease(release).name : ''; return prefix + (id ? ' ' + id : ''); } if (platform === 'linux') { if (!release && os.platform() === 'linux') { release = os.release(); } id = release ? release.replace(/^(\d+\.\d+).*/, '$1') : ''; return 'Linux' + (id ? ' ' + id : ''); } if (platform === 'win32') { if (!release && os.platform() === 'win32') { release = os.release(); } id = release ? winRelease(release) : ''; return 'Windows' + (id ? ' ' + id : ''); } return platform; }; module.exports = osName; /***/ }), /***/ 1330: /***/ ((module) => { "use strict"; module.exports = (promise, onFinally) => { onFinally = onFinally || (() => {}); return promise.then( val => new Promise(resolve => { resolve(onFinally()); }).then(() => val), err => new Promise(resolve => { resolve(onFinally()); }).then(() => { throw err; }) ); }; /***/ }), /***/ 8341: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { var once = __nccwpck_require__(1223) var eos = __nccwpck_require__(1205) var fs = __nccwpck_require__(5747) // we only need fs to get the ReadStream and WriteStream prototypes var noop = function () {} var ancient = /^v?\.0/.test(process.version) var isFn = function (fn) { return typeof fn === 'function' } var isFS = function (stream) { if (!ancient) return false // newer node version do not need to care about fs is a special way if (!fs) return false // browser return (stream instanceof (fs.ReadStream || noop) || stream instanceof (fs.WriteStream || noop)) && isFn(stream.close) } var isRequest = function (stream) { return stream.setHeader && isFn(stream.abort) } var destroyer = function (stream, reading, writing, callback) { callback = once(callback) var closed = false stream.on('close', function () { closed = true }) eos(stream, {readable: reading, writable: writing}, function (err) { if (err) return callback(err) closed = true callback() }) var destroyed = false return function (err) { if (closed) return if (destroyed) return destroyed = true if (isFS(stream)) return stream.close(noop) // use close for fs streams to avoid fd leaks if (isRequest(stream)) return stream.abort() // request.destroy just do .end - .abort is what we want if (isFn(stream.destroy)) return stream.destroy() callback(err || new Error('stream was destroyed')) } } var call = function (fn) { fn() } var pipe = function (from, to) { return from.pipe(to) } var pump = function () { var streams = Array.prototype.slice.call(arguments) var callback = isFn(streams[streams.length - 1] || noop) && streams.pop() || noop if (Array.isArray(streams[0])) streams = streams[0] if (streams.length < 2) throw new Error('pump requires two streams per minimum') var error var destroys = streams.map(function (stream, i) { var reading = i < streams.length - 1 var writing = i > 0 return destroyer(stream, reading, writing, function (err) { if (!error) error = err if (err) destroys.forEach(call) if (reading) return destroys.forEach(call) callback(error) }) }) return streams.reduce(pipe) } module.exports = pump /***/ }), /***/ 4931: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { // Note: since nyc uses this module to output coverage, any lines // that are in the direct sync flow of nyc's outputCoverage are // ignored, since we can never get coverage for them. var assert = __nccwpck_require__(2357) var signals = __nccwpck_require__(3710) var isWin = /^win/i.test(process.platform) var EE = __nccwpck_require__(8614) /* istanbul ignore if */ if (typeof EE !== 'function') { EE = EE.EventEmitter } var emitter if (process.__signal_exit_emitter__) { emitter = process.__signal_exit_emitter__ } else { emitter = process.__signal_exit_emitter__ = new EE() emitter.count = 0 emitter.emitted = {} } // Because this emitter is a global, we have to check to see if a // previous version of this library failed to enable infinite listeners. // I know what you're about to say. But literally everything about // signal-exit is a compromise with evil. Get used to it. if (!emitter.infinite) { emitter.setMaxListeners(Infinity) emitter.infinite = true } module.exports = function (cb, opts) { assert.equal(typeof cb, 'function', 'a callback must be provided for exit handler') if (loaded === false) { load() } var ev = 'exit' if (opts && opts.alwaysLast) { ev = 'afterexit' } var remove = function () { emitter.removeListener(ev, cb) if (emitter.listeners('exit').length === 0 && emitter.listeners('afterexit').length === 0) { unload() } } emitter.on(ev, cb) return remove } module.exports.unload = unload function unload () { if (!loaded) { return } loaded = false signals.forEach(function (sig) { try { process.removeListener(sig, sigListeners[sig]) } catch (er) {} }) process.emit = originalProcessEmit process.reallyExit = originalProcessReallyExit emitter.count -= 1 } function emit (event, code, signal) { if (emitter.emitted[event]) { return } emitter.emitted[event] = true emitter.emit(event, code, signal) } // { : , ... } var sigListeners = {} signals.forEach(function (sig) { sigListeners[sig] = function listener () { // If there are no other listeners, an exit is coming! // Simplest way: remove us and then re-send the signal. // We know that this will kill the process, so we can // safely emit now. var listeners = process.listeners(sig) if (listeners.length === emitter.count) { unload() emit('exit', null, sig) /* istanbul ignore next */ emit('afterexit', null, sig) /* istanbul ignore next */ if (isWin && sig === 'SIGHUP') { // "SIGHUP" throws an `ENOSYS` error on Windows, // so use a supported signal instead sig = 'SIGINT' } process.kill(process.pid, sig) } } }) module.exports.signals = function () { return signals } module.exports.load = load var loaded = false function load () { if (loaded) { return } loaded = true // This is the number of onSignalExit's that are in play. // It's important so that we can count the correct number of // listeners on signals, and don't wait for the other one to // handle it instead of us. emitter.count += 1 signals = signals.filter(function (sig) { try { process.on(sig, sigListeners[sig]) return true } catch (er) { return false } }) process.emit = processEmit process.reallyExit = processReallyExit } var originalProcessReallyExit = process.reallyExit function processReallyExit (code) { process.exitCode = code || 0 emit('exit', process.exitCode, null) /* istanbul ignore next */ emit('afterexit', process.exitCode, null) /* istanbul ignore next */ originalProcessReallyExit.call(process, process.exitCode) } var originalProcessEmit = process.emit function processEmit (ev, arg) { if (ev === 'exit') { if (arg !== undefined) { process.exitCode = arg } var ret = originalProcessEmit.apply(this, arguments) emit('exit', process.exitCode, null) /* istanbul ignore next */ emit('afterexit', process.exitCode, null) return ret } else { return originalProcessEmit.apply(this, arguments) } } /***/ }), /***/ 3710: /***/ ((module) => { // This is not the set of all possible signals. // // It IS, however, the set of all signals that trigger // an exit on either Linux or BSD systems. Linux is a // superset of the signal names supported on BSD, and // the unknown signals just fail to register, so we can // catch that easily enough. // // Don't bother with SIGKILL. It's uncatchable, which // means that we can't fire any callbacks anyway. // // If a user does happen to register a handler on a non- // fatal signal like SIGWINCH or something, and then // exit, it'll end up firing `process.emit('exit')`, so // the handler will be fired anyway. // // SIGBUS, SIGFPE, SIGSEGV and SIGILL, when not raised // artificially, inherently leave the process in a // state from which it is not safe to try and enter JS // listeners. module.exports = [ 'SIGABRT', 'SIGALRM', 'SIGHUP', 'SIGINT', 'SIGTERM' ] if (process.platform !== 'win32') { module.exports.push( 'SIGVTALRM', 'SIGXCPU', 'SIGXFSZ', 'SIGUSR2', 'SIGTRAP', 'SIGSYS', 'SIGQUIT', 'SIGIOT' // should detect profiler and enable/disable accordingly. // see #21 // 'SIGPROF' ) } if (process.platform === 'linux') { module.exports.push( 'SIGIO', 'SIGPOLL', 'SIGPWR', 'SIGSTKFLT', 'SIGUNUSED' ) } /***/ }), /***/ 5515: /***/ ((module) => { "use strict"; module.exports = function (x) { var lf = typeof x === 'string' ? '\n' : '\n'.charCodeAt(); var cr = typeof x === 'string' ? '\r' : '\r'.charCodeAt(); if (x[x.length - 1] === lf) { x = x.slice(0, x.length - 1); } if (x[x.length - 1] === cr) { x = x.slice(0, x.length - 1); } return x; }; /***/ }), /***/ 4294: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { module.exports = __nccwpck_require__(4219); /***/ }), /***/ 4219: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; var net = __nccwpck_require__(1631); var tls = __nccwpck_require__(4016); var http = __nccwpck_require__(8605); var https = __nccwpck_require__(7211); var events = __nccwpck_require__(8614); var assert = __nccwpck_require__(2357); var util = __nccwpck_require__(1669); exports.httpOverHttp = httpOverHttp; exports.httpsOverHttp = httpsOverHttp; exports.httpOverHttps = httpOverHttps; exports.httpsOverHttps = httpsOverHttps; function httpOverHttp(options) { var agent = new TunnelingAgent(options); agent.request = http.request; return agent; } function httpsOverHttp(options) { var agent = new TunnelingAgent(options); agent.request = http.request; agent.createSocket = createSecureSocket; agent.defaultPort = 443; return agent; } function httpOverHttps(options) { var agent = new TunnelingAgent(options); agent.request = https.request; return agent; } function httpsOverHttps(options) { var agent = new TunnelingAgent(options); agent.request = https.request; agent.createSocket = createSecureSocket; agent.defaultPort = 443; return agent; } function TunnelingAgent(options) { var self = this; self.options = options || {}; self.proxyOptions = self.options.proxy || {}; self.maxSockets = self.options.maxSockets || http.Agent.defaultMaxSockets; self.requests = []; self.sockets = []; self.on('free', function onFree(socket, host, port, localAddress) { var options = toOptions(host, port, localAddress); for (var i = 0, len = self.requests.length; i < len; ++i) { var pending = self.requests[i]; if (pending.host === options.host && pending.port === options.port) { // Detect the request to connect same origin server, // reuse the connection. self.requests.splice(i, 1); pending.request.onSocket(socket); return; } } socket.destroy(); self.removeSocket(socket); }); } util.inherits(TunnelingAgent, events.EventEmitter); TunnelingAgent.prototype.addRequest = function addRequest(req, host, port, localAddress) { var self = this; var options = mergeOptions({request: req}, self.options, toOptions(host, port, localAddress)); if (self.sockets.length >= this.maxSockets) { // We are over limit so we'll add it to the queue. self.requests.push(options); return; } // If we are under maxSockets create a new one. self.createSocket(options, function(socket) { socket.on('free', onFree); socket.on('close', onCloseOrRemove); socket.on('agentRemove', onCloseOrRemove); req.onSocket(socket); function onFree() { self.emit('free', socket, options); } function onCloseOrRemove(err) { self.removeSocket(socket); socket.removeListener('free', onFree); socket.removeListener('close', onCloseOrRemove); socket.removeListener('agentRemove', onCloseOrRemove); } }); }; TunnelingAgent.prototype.createSocket = function createSocket(options, cb) { var self = this; var placeholder = {}; self.sockets.push(placeholder); var connectOptions = mergeOptions({}, self.proxyOptions, { method: 'CONNECT', path: options.host + ':' + options.port, agent: false, headers: { host: options.host + ':' + options.port } }); if (options.localAddress) { connectOptions.localAddress = options.localAddress; } if (connectOptions.proxyAuth) { connectOptions.headers = connectOptions.headers || {}; connectOptions.headers['Proxy-Authorization'] = 'Basic ' + new Buffer(connectOptions.proxyAuth).toString('base64'); } debug('making CONNECT request'); var connectReq = self.request(connectOptions); connectReq.useChunkedEncodingByDefault = false; // for v0.6 connectReq.once('response', onResponse); // for v0.6 connectReq.once('upgrade', onUpgrade); // for v0.6 connectReq.once('connect', onConnect); // for v0.7 or later connectReq.once('error', onError); connectReq.end(); function onResponse(res) { // Very hacky. This is necessary to avoid http-parser leaks. res.upgrade = true; } function onUpgrade(res, socket, head) { // Hacky. process.nextTick(function() { onConnect(res, socket, head); }); } function onConnect(res, socket, head) { connectReq.removeAllListeners(); socket.removeAllListeners(); if (res.statusCode !== 200) { debug('tunneling socket could not be established, statusCode=%d', res.statusCode); socket.destroy(); var error = new Error('tunneling socket could not be established, ' + 'statusCode=' + res.statusCode); error.code = 'ECONNRESET'; options.request.emit('error', error); self.removeSocket(placeholder); return; } if (head.length > 0) { debug('got illegal response body from proxy'); socket.destroy(); var error = new Error('got illegal response body from proxy'); error.code = 'ECONNRESET'; options.request.emit('error', error); self.removeSocket(placeholder); return; } debug('tunneling connection has established'); self.sockets[self.sockets.indexOf(placeholder)] = socket; return cb(socket); } function onError(cause) { connectReq.removeAllListeners(); debug('tunneling socket could not be established, cause=%s\n', cause.message, cause.stack); var error = new Error('tunneling socket could not be established, ' + 'cause=' + cause.message); error.code = 'ECONNRESET'; options.request.emit('error', error); self.removeSocket(placeholder); } }; TunnelingAgent.prototype.removeSocket = function removeSocket(socket) { var pos = this.sockets.indexOf(socket) if (pos === -1) { return; } this.sockets.splice(pos, 1); var pending = this.requests.shift(); if (pending) { // If we have pending requests and a socket gets closed a new one // needs to be created to take over in the pool for the one that closed. this.createSocket(pending, function(socket) { pending.request.onSocket(socket); }); } }; function createSecureSocket(options, cb) { var self = this; TunnelingAgent.prototype.createSocket.call(self, options, function(socket) { var hostHeader = options.request.getHeader('host'); var tlsOptions = mergeOptions({}, self.options, { socket: socket, servername: hostHeader ? hostHeader.replace(/:.*$/, '') : options.host }); // 0 is dummy port for v0.6 var secureSocket = tls.connect(0, tlsOptions); self.sockets[self.sockets.indexOf(socket)] = secureSocket; cb(secureSocket); }); } function toOptions(host, port, localAddress) { if (typeof host === 'string') { // since v0.10 return { host: host, port: port, localAddress: localAddress }; } return host; // for v0.11 or later } function mergeOptions(target) { for (var i = 1, len = arguments.length; i < len; ++i) { var overrides = arguments[i]; if (typeof overrides === 'object') { var keys = Object.keys(overrides); for (var j = 0, keyLen = keys.length; j < keyLen; ++j) { var k = keys[j]; if (overrides[k] !== undefined) { target[k] = overrides[k]; } } } } return target; } var debug; if (process.env.NODE_DEBUG && /\btunnel\b/.test(process.env.NODE_DEBUG)) { debug = function() { var args = Array.prototype.slice.call(arguments); if (typeof args[0] === 'string') { args[0] = 'TUNNEL: ' + args[0]; } else { args.unshift('TUNNEL:'); } console.error.apply(console, args); } } else { debug = function() {}; } exports.debug = debug; // for test /***/ }), /***/ 5030: /***/ ((__unused_webpack_module, exports) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); function getUserAgent() { if (typeof navigator === "object" && "userAgent" in navigator) { return navigator.userAgent; } if (typeof process === "object" && "version" in process) { return `Node.js/${process.version.substr(1)} (${process.platform}; ${process.arch})`; } return ""; } exports.getUserAgent = getUserAgent; //# sourceMappingURL=index.js.map /***/ }), /***/ 1463: /***/ ((__unused_webpack_module, exports) => { "use strict"; exports.fromCallback = function (fn) { return Object.defineProperty(function (...args) { if (typeof args[args.length - 1] === 'function') fn.apply(this, args) else { return new Promise((resolve, reject) => { fn.call( this, ...args, (err, res) => (err != null) ? reject(err) : resolve(res) ) }) } }, 'name', { value: fn.name }) } exports.fromPromise = function (fn) { return Object.defineProperty(function (...args) { const cb = args[args.length - 1] if (typeof cb !== 'function') return fn.apply(this, args) else fn.apply(this, args.slice(0, -1)).then(r => cb(null, r), cb) }, 'name', { value: fn.name }) } /***/ }), /***/ 3515: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const os = __nccwpck_require__(2087); const execa = __nccwpck_require__(4780); // Reference: https://www.gaijin.at/en/lstwinver.php const names = new Map([ ['10.0', '10'], ['6.3', '8.1'], ['6.2', '8'], ['6.1', '7'], ['6.0', 'Vista'], ['5.2', 'Server 2003'], ['5.1', 'XP'], ['5.0', '2000'], ['4.9', 'ME'], ['4.1', '98'], ['4.0', '95'] ]); const windowsRelease = release => { const version = /\d+\.\d/.exec(release || os.release()); if (release && !version) { throw new Error('`release` argument doesn\'t match `n.n`'); } const ver = (version || [])[0]; // Server 2008, 2012, 2016, and 2019 versions are ambiguous with desktop versions and must be detected at runtime. // If `release` is omitted or we're on a Windows system, and the version number is an ambiguous version // then use `wmic` to get the OS caption: https://msdn.microsoft.com/en-us/library/aa394531(v=vs.85).aspx // If `wmic` is obsoloete (later versions of Windows 10), use PowerShell instead. // If the resulting caption contains the year 2008, 2012, 2016 or 2019, it is a server version, so return a server OS name. if ((!release || release === os.release()) && ['6.1', '6.2', '6.3', '10.0'].includes(ver)) { let stdout; try { stdout = execa.sync('wmic', ['os', 'get', 'Caption']).stdout || ''; } catch (_) { stdout = execa.sync('powershell', ['(Get-CimInstance -ClassName Win32_OperatingSystem).caption']).stdout || ''; } const year = (stdout.match(/2008|2012|2016|2019/) || [])[0]; if (year) { return `Server ${year}`; } } return names.get(ver); }; module.exports = windowsRelease; /***/ }), /***/ 6868: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const cp = __nccwpck_require__(3129); const parse = __nccwpck_require__(6876); const enoent = __nccwpck_require__(8625); function spawn(command, args, options) { // Parse the arguments const parsed = parse(command, args, options); // Spawn the child process const spawned = cp.spawn(parsed.command, parsed.args, parsed.options); // Hook into child process "exit" event to emit an error if the command // does not exists, see: https://github.com/IndigoUnited/node-cross-spawn/issues/16 enoent.hookChildProcess(spawned, parsed); return spawned; } function spawnSync(command, args, options) { // Parse the arguments const parsed = parse(command, args, options); // Spawn the child process const result = cp.spawnSync(parsed.command, parsed.args, parsed.options); // Analyze if the command does not exist, see: https://github.com/IndigoUnited/node-cross-spawn/issues/16 result.error = result.error || enoent.verifyENOENTSync(result.status, parsed); return result; } module.exports = spawn; module.exports.spawn = spawn; module.exports.sync = spawnSync; module.exports._parse = parse; module.exports._enoent = enoent; /***/ }), /***/ 8625: /***/ ((module) => { "use strict"; const isWin = process.platform === 'win32'; function notFoundError(original, syscall) { return Object.assign(new Error(`${syscall} ${original.command} ENOENT`), { code: 'ENOENT', errno: 'ENOENT', syscall: `${syscall} ${original.command}`, path: original.command, spawnargs: original.args, }); } function hookChildProcess(cp, parsed) { if (!isWin) { return; } const originalEmit = cp.emit; cp.emit = function (name, arg1) { // If emitting "exit" event and exit code is 1, we need to check if // the command exists and emit an "error" instead // See https://github.com/IndigoUnited/node-cross-spawn/issues/16 if (name === 'exit') { const err = verifyENOENT(arg1, parsed, 'spawn'); if (err) { return originalEmit.call(cp, 'error', err); } } return originalEmit.apply(cp, arguments); // eslint-disable-line prefer-rest-params }; } function verifyENOENT(status, parsed) { if (isWin && status === 1 && !parsed.file) { return notFoundError(parsed.original, 'spawn'); } return null; } function verifyENOENTSync(status, parsed) { if (isWin && status === 1 && !parsed.file) { return notFoundError(parsed.original, 'spawnSync'); } return null; } module.exports = { hookChildProcess, verifyENOENT, verifyENOENTSync, notFoundError, }; /***/ }), /***/ 6876: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const path = __nccwpck_require__(5622); const niceTry = __nccwpck_require__(8560); const resolveCommand = __nccwpck_require__(8741); const escape = __nccwpck_require__(4300); const readShebang = __nccwpck_require__(8536); const semver = __nccwpck_require__(9317); const isWin = process.platform === 'win32'; const isExecutableRegExp = /\.(?:com|exe)$/i; const isCmdShimRegExp = /node_modules[\\/].bin[\\/][^\\/]+\.cmd$/i; // `options.shell` is supported in Node ^4.8.0, ^5.7.0 and >= 6.0.0 const supportsShellOption = niceTry(() => semver.satisfies(process.version, '^4.8.0 || ^5.7.0 || >= 6.0.0', true)) || false; function detectShebang(parsed) { parsed.file = resolveCommand(parsed); const shebang = parsed.file && readShebang(parsed.file); if (shebang) { parsed.args.unshift(parsed.file); parsed.command = shebang; return resolveCommand(parsed); } return parsed.file; } function parseNonShell(parsed) { if (!isWin) { return parsed; } // Detect & add support for shebangs const commandFile = detectShebang(parsed); // We don't need a shell if the command filename is an executable const needsShell = !isExecutableRegExp.test(commandFile); // If a shell is required, use cmd.exe and take care of escaping everything correctly // Note that `forceShell` is an hidden option used only in tests if (parsed.options.forceShell || needsShell) { // Need to double escape meta chars if the command is a cmd-shim located in `node_modules/.bin/` // The cmd-shim simply calls execute the package bin file with NodeJS, proxying any argument // Because the escape of metachars with ^ gets interpreted when the cmd.exe is first called, // we need to double escape them const needsDoubleEscapeMetaChars = isCmdShimRegExp.test(commandFile); // Normalize posix paths into OS compatible paths (e.g.: foo/bar -> foo\bar) // This is necessary otherwise it will always fail with ENOENT in those cases parsed.command = path.normalize(parsed.command); // Escape command & arguments parsed.command = escape.command(parsed.command); parsed.args = parsed.args.map((arg) => escape.argument(arg, needsDoubleEscapeMetaChars)); const shellCommand = [parsed.command].concat(parsed.args).join(' '); parsed.args = ['/d', '/s', '/c', `"${shellCommand}"`]; parsed.command = process.env.comspec || 'cmd.exe'; parsed.options.windowsVerbatimArguments = true; // Tell node's spawn that the arguments are already escaped } return parsed; } function parseShell(parsed) { // If node supports the shell option, there's no need to mimic its behavior if (supportsShellOption) { return parsed; } // Mimic node shell option // See https://github.com/nodejs/node/blob/b9f6a2dc059a1062776133f3d4fd848c4da7d150/lib/child_process.js#L335 const shellCommand = [parsed.command].concat(parsed.args).join(' '); if (isWin) { parsed.command = typeof parsed.options.shell === 'string' ? parsed.options.shell : process.env.comspec || 'cmd.exe'; parsed.args = ['/d', '/s', '/c', `"${shellCommand}"`]; parsed.options.windowsVerbatimArguments = true; // Tell node's spawn that the arguments are already escaped } else { if (typeof parsed.options.shell === 'string') { parsed.command = parsed.options.shell; } else if (process.platform === 'android') { parsed.command = '/system/bin/sh'; } else { parsed.command = '/bin/sh'; } parsed.args = ['-c', shellCommand]; } return parsed; } function parse(command, args, options) { // Normalize arguments, similar to nodejs if (args && !Array.isArray(args)) { options = args; args = null; } args = args ? args.slice(0) : []; // Clone array to avoid changing the original options = Object.assign({}, options); // Clone object to avoid changing the original // Build our parsed object const parsed = { command, args, options, file: undefined, original: { command, args, }, }; // Delegate further parsing to shell or non-shell return options.shell ? parseShell(parsed) : parseNonShell(parsed); } module.exports = parse; /***/ }), /***/ 4300: /***/ ((module) => { "use strict"; // See http://www.robvanderwoude.com/escapechars.php const metaCharsRegExp = /([()\][%!^"`<>&|;, *?])/g; function escapeCommand(arg) { // Escape meta chars arg = arg.replace(metaCharsRegExp, '^$1'); return arg; } function escapeArgument(arg, doubleEscapeMetaChars) { // Convert to string arg = `${arg}`; // Algorithm below is based on https://qntm.org/cmd // Sequence of backslashes followed by a double quote: // double up all the backslashes and escape the double quote arg = arg.replace(/(\\*)"/g, '$1$1\\"'); // Sequence of backslashes followed by the end of the string // (which will become a double quote later): // double up all the backslashes arg = arg.replace(/(\\*)$/, '$1$1'); // All other backslashes occur literally // Quote the whole thing: arg = `"${arg}"`; // Escape meta chars arg = arg.replace(metaCharsRegExp, '^$1'); // Double escape meta chars if necessary if (doubleEscapeMetaChars) { arg = arg.replace(metaCharsRegExp, '^$1'); } return arg; } module.exports.command = escapeCommand; module.exports.argument = escapeArgument; /***/ }), /***/ 8536: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const fs = __nccwpck_require__(5747); const shebangCommand = __nccwpck_require__(2116); function readShebang(command) { // Read the first 150 bytes from the file const size = 150; let buffer; if (Buffer.alloc) { // Node.js v4.5+ / v5.10+ buffer = Buffer.alloc(size); } else { // Old Node.js API buffer = new Buffer(size); buffer.fill(0); // zero-fill } let fd; try { fd = fs.openSync(command, 'r'); fs.readSync(fd, buffer, 0, size, 0); fs.closeSync(fd); } catch (e) { /* Empty */ } // Attempt to extract shebang (null is returned if not a shebang) return shebangCommand(buffer.toString()); } module.exports = readShebang; /***/ }), /***/ 8741: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const path = __nccwpck_require__(5622); const which = __nccwpck_require__(3411); const pathKey = __nccwpck_require__(7299)(); function resolveCommandAttempt(parsed, withoutPathExt) { const cwd = process.cwd(); const hasCustomCwd = parsed.options.cwd != null; // If a custom `cwd` was specified, we need to change the process cwd // because `which` will do stat calls but does not support a custom cwd if (hasCustomCwd) { try { process.chdir(parsed.options.cwd); } catch (err) { /* Empty */ } } let resolved; try { resolved = which.sync(parsed.command, { path: (parsed.options.env || process.env)[pathKey], pathExt: withoutPathExt ? path.delimiter : undefined, }); } catch (e) { /* Empty */ } finally { process.chdir(cwd); } // If we successfully resolved, ensure that an absolute path is returned // Note that when a custom `cwd` was used, we need to resolve to an absolute path based on it if (resolved) { resolved = path.resolve(hasCustomCwd ? parsed.options.cwd : '', resolved); } return resolved; } function resolveCommand(parsed) { return resolveCommandAttempt(parsed) || resolveCommandAttempt(parsed, true); } module.exports = resolveCommand; /***/ }), /***/ 4780: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const path = __nccwpck_require__(5622); const childProcess = __nccwpck_require__(3129); const crossSpawn = __nccwpck_require__(6868); const stripEof = __nccwpck_require__(5515); const npmRunPath = __nccwpck_require__(2509); const isStream = __nccwpck_require__(2597); const _getStream = __nccwpck_require__(2560); const pFinally = __nccwpck_require__(1330); const onExit = __nccwpck_require__(4931); const errname = __nccwpck_require__(2160); const stdio = __nccwpck_require__(7023); const TEN_MEGABYTES = 1000 * 1000 * 10; function handleArgs(cmd, args, opts) { let parsed; opts = Object.assign({ extendEnv: true, env: {} }, opts); if (opts.extendEnv) { opts.env = Object.assign({}, process.env, opts.env); } if (opts.__winShell === true) { delete opts.__winShell; parsed = { command: cmd, args, options: opts, file: cmd, original: { cmd, args } }; } else { parsed = crossSpawn._parse(cmd, args, opts); } opts = Object.assign({ maxBuffer: TEN_MEGABYTES, buffer: true, stripEof: true, preferLocal: true, localDir: parsed.options.cwd || process.cwd(), encoding: 'utf8', reject: true, cleanup: true }, parsed.options); opts.stdio = stdio(opts); if (opts.preferLocal) { opts.env = npmRunPath.env(Object.assign({}, opts, {cwd: opts.localDir})); } if (opts.detached) { // #115 opts.cleanup = false; } if (process.platform === 'win32' && path.basename(parsed.command) === 'cmd.exe') { // #116 parsed.args.unshift('/q'); } return { cmd: parsed.command, args: parsed.args, opts, parsed }; } function handleInput(spawned, input) { if (input === null || input === undefined) { return; } if (isStream(input)) { input.pipe(spawned.stdin); } else { spawned.stdin.end(input); } } function handleOutput(opts, val) { if (val && opts.stripEof) { val = stripEof(val); } return val; } function handleShell(fn, cmd, opts) { let file = '/bin/sh'; let args = ['-c', cmd]; opts = Object.assign({}, opts); if (process.platform === 'win32') { opts.__winShell = true; file = process.env.comspec || 'cmd.exe'; args = ['/s', '/c', `"${cmd}"`]; opts.windowsVerbatimArguments = true; } if (opts.shell) { file = opts.shell; delete opts.shell; } return fn(file, args, opts); } function getStream(process, stream, {encoding, buffer, maxBuffer}) { if (!process[stream]) { return null; } let ret; if (!buffer) { // TODO: Use `ret = util.promisify(stream.finished)(process[stream]);` when targeting Node.js 10 ret = new Promise((resolve, reject) => { process[stream] .once('end', resolve) .once('error', reject); }); } else if (encoding) { ret = _getStream(process[stream], { encoding, maxBuffer }); } else { ret = _getStream.buffer(process[stream], {maxBuffer}); } return ret.catch(err => { err.stream = stream; err.message = `${stream} ${err.message}`; throw err; }); } function makeError(result, options) { const {stdout, stderr} = result; let err = result.error; const {code, signal} = result; const {parsed, joinedCmd} = options; const timedOut = options.timedOut || false; if (!err) { let output = ''; if (Array.isArray(parsed.opts.stdio)) { if (parsed.opts.stdio[2] !== 'inherit') { output += output.length > 0 ? stderr : `\n${stderr}`; } if (parsed.opts.stdio[1] !== 'inherit') { output += `\n${stdout}`; } } else if (parsed.opts.stdio !== 'inherit') { output = `\n${stderr}${stdout}`; } err = new Error(`Command failed: ${joinedCmd}${output}`); err.code = code < 0 ? errname(code) : code; } err.stdout = stdout; err.stderr = stderr; err.failed = true; err.signal = signal || null; err.cmd = joinedCmd; err.timedOut = timedOut; return err; } function joinCmd(cmd, args) { let joinedCmd = cmd; if (Array.isArray(args) && args.length > 0) { joinedCmd += ' ' + args.join(' '); } return joinedCmd; } module.exports = (cmd, args, opts) => { const parsed = handleArgs(cmd, args, opts); const {encoding, buffer, maxBuffer} = parsed.opts; const joinedCmd = joinCmd(cmd, args); let spawned; try { spawned = childProcess.spawn(parsed.cmd, parsed.args, parsed.opts); } catch (err) { return Promise.reject(err); } let removeExitHandler; if (parsed.opts.cleanup) { removeExitHandler = onExit(() => { spawned.kill(); }); } let timeoutId = null; let timedOut = false; const cleanup = () => { if (timeoutId) { clearTimeout(timeoutId); timeoutId = null; } if (removeExitHandler) { removeExitHandler(); } }; if (parsed.opts.timeout > 0) { timeoutId = setTimeout(() => { timeoutId = null; timedOut = true; spawned.kill(parsed.opts.killSignal); }, parsed.opts.timeout); } const processDone = new Promise(resolve => { spawned.on('exit', (code, signal) => { cleanup(); resolve({code, signal}); }); spawned.on('error', err => { cleanup(); resolve({error: err}); }); if (spawned.stdin) { spawned.stdin.on('error', err => { cleanup(); resolve({error: err}); }); } }); function destroy() { if (spawned.stdout) { spawned.stdout.destroy(); } if (spawned.stderr) { spawned.stderr.destroy(); } } const handlePromise = () => pFinally(Promise.all([ processDone, getStream(spawned, 'stdout', {encoding, buffer, maxBuffer}), getStream(spawned, 'stderr', {encoding, buffer, maxBuffer}) ]).then(arr => { const result = arr[0]; result.stdout = arr[1]; result.stderr = arr[2]; if (result.error || result.code !== 0 || result.signal !== null) { const err = makeError(result, { joinedCmd, parsed, timedOut }); // TODO: missing some timeout logic for killed // https://github.com/nodejs/node/blob/master/lib/child_process.js#L203 // err.killed = spawned.killed || killed; err.killed = err.killed || spawned.killed; if (!parsed.opts.reject) { return err; } throw err; } return { stdout: handleOutput(parsed.opts, result.stdout), stderr: handleOutput(parsed.opts, result.stderr), code: 0, failed: false, killed: false, signal: null, cmd: joinedCmd, timedOut: false }; }), destroy); crossSpawn._enoent.hookChildProcess(spawned, parsed.parsed); handleInput(spawned, parsed.opts.input); spawned.then = (onfulfilled, onrejected) => handlePromise().then(onfulfilled, onrejected); spawned.catch = onrejected => handlePromise().catch(onrejected); return spawned; }; // TODO: set `stderr: 'ignore'` when that option is implemented module.exports.stdout = (...args) => module.exports(...args).then(x => x.stdout); // TODO: set `stdout: 'ignore'` when that option is implemented module.exports.stderr = (...args) => module.exports(...args).then(x => x.stderr); module.exports.shell = (cmd, opts) => handleShell(module.exports, cmd, opts); module.exports.sync = (cmd, args, opts) => { const parsed = handleArgs(cmd, args, opts); const joinedCmd = joinCmd(cmd, args); if (isStream(parsed.opts.input)) { throw new TypeError('The `input` option cannot be a stream in sync mode'); } const result = childProcess.spawnSync(parsed.cmd, parsed.args, parsed.opts); result.code = result.status; if (result.error || result.status !== 0 || result.signal !== null) { const err = makeError(result, { joinedCmd, parsed }); if (!parsed.opts.reject) { return err; } throw err; } return { stdout: handleOutput(parsed.opts, result.stdout), stderr: handleOutput(parsed.opts, result.stderr), code: 0, failed: false, signal: null, cmd: joinedCmd, timedOut: false }; }; module.exports.shellSync = (cmd, opts) => handleShell(module.exports.sync, cmd, opts); /***/ }), /***/ 2160: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; // Older verions of Node.js might not have `util.getSystemErrorName()`. // In that case, fall back to a deprecated internal. const util = __nccwpck_require__(1669); let uv; if (typeof util.getSystemErrorName === 'function') { module.exports = util.getSystemErrorName; } else { try { uv = process.binding('uv'); if (typeof uv.errname !== 'function') { throw new TypeError('uv.errname is not a function'); } } catch (err) { console.error('execa/lib/errname: unable to establish process.binding(\'uv\')', err); uv = null; } module.exports = code => errname(uv, code); } // Used for testing the fallback behavior module.exports.__test__ = errname; function errname(uv, code) { if (uv) { return uv.errname(code); } if (!(code < 0)) { throw new Error('err >= 0'); } return `Unknown system error ${code}`; } /***/ }), /***/ 7023: /***/ ((module) => { "use strict"; const alias = ['stdin', 'stdout', 'stderr']; const hasAlias = opts => alias.some(x => Boolean(opts[x])); module.exports = opts => { if (!opts) { return null; } if (opts.stdio && hasAlias(opts)) { throw new Error(`It's not possible to provide \`stdio\` in combination with one of ${alias.map(x => `\`${x}\``).join(', ')}`); } if (typeof opts.stdio === 'string') { return opts.stdio; } const stdio = opts.stdio || []; if (!Array.isArray(stdio)) { throw new TypeError(`Expected \`stdio\` to be of type \`string\` or \`Array\`, got \`${typeof stdio}\``); } const result = []; const len = Math.max(stdio.length, alias.length); for (let i = 0; i < len; i++) { let value = null; if (stdio[i] !== undefined) { value = stdio[i]; } else if (opts[alias[i]] !== undefined) { value = opts[alias[i]]; } result[i] = value; } return result; }; /***/ }), /***/ 9286: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const {PassThrough} = __nccwpck_require__(2413); module.exports = options => { options = Object.assign({}, options); const {array} = options; let {encoding} = options; const buffer = encoding === 'buffer'; let objectMode = false; if (array) { objectMode = !(encoding || buffer); } else { encoding = encoding || 'utf8'; } if (buffer) { encoding = null; } let len = 0; const ret = []; const stream = new PassThrough({objectMode}); if (encoding) { stream.setEncoding(encoding); } stream.on('data', chunk => { ret.push(chunk); if (objectMode) { len = ret.length; } else { len += chunk.length; } }); stream.getBufferedValue = () => { if (array) { return ret; } return buffer ? Buffer.concat(ret, len) : ret.join(''); }; stream.getBufferedLength = () => len; return stream; }; /***/ }), /***/ 2560: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const pump = __nccwpck_require__(8341); const bufferStream = __nccwpck_require__(9286); class MaxBufferError extends Error { constructor() { super('maxBuffer exceeded'); this.name = 'MaxBufferError'; } } function getStream(inputStream, options) { if (!inputStream) { return Promise.reject(new Error('Expected a stream')); } options = Object.assign({maxBuffer: Infinity}, options); const {maxBuffer} = options; let stream; return new Promise((resolve, reject) => { const rejectPromise = error => { if (error) { // A null check error.bufferedData = stream.getBufferedValue(); } reject(error); }; stream = pump(inputStream, bufferStream(options), error => { if (error) { rejectPromise(error); return; } resolve(); }); stream.on('data', () => { if (stream.getBufferedLength() > maxBuffer) { rejectPromise(new MaxBufferError()); } }); }).then(() => stream.getBufferedValue()); } module.exports = getStream; module.exports.buffer = (stream, options) => getStream(stream, Object.assign({}, options, {encoding: 'buffer'})); module.exports.array = (stream, options) => getStream(stream, Object.assign({}, options, {array: true})); module.exports.MaxBufferError = MaxBufferError; /***/ }), /***/ 2597: /***/ ((module) => { "use strict"; var isStream = module.exports = function (stream) { return stream !== null && typeof stream === 'object' && typeof stream.pipe === 'function'; }; isStream.writable = function (stream) { return isStream(stream) && stream.writable !== false && typeof stream._write === 'function' && typeof stream._writableState === 'object'; }; isStream.readable = function (stream) { return isStream(stream) && stream.readable !== false && typeof stream._read === 'function' && typeof stream._readableState === 'object'; }; isStream.duplex = function (stream) { return isStream.writable(stream) && isStream.readable(stream); }; isStream.transform = function (stream) { return isStream.duplex(stream) && typeof stream._transform === 'function' && typeof stream._transformState === 'object'; }; /***/ }), /***/ 2509: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const path = __nccwpck_require__(5622); const pathKey = __nccwpck_require__(7299); module.exports = opts => { opts = Object.assign({ cwd: process.cwd(), path: process.env[pathKey()] }, opts); let prev; let pth = path.resolve(opts.cwd); const ret = []; while (prev !== pth) { ret.push(path.join(pth, 'node_modules/.bin')); prev = pth; pth = path.resolve(pth, '..'); } // ensure the running `node` binary is used ret.push(path.dirname(process.execPath)); return ret.concat(opts.path).join(path.delimiter); }; module.exports.env = opts => { opts = Object.assign({ env: process.env }, opts); const env = Object.assign({}, opts.env); const path = pathKey({env}); opts.path = env[path]; env[path] = module.exports(opts); return env; }; /***/ }), /***/ 7299: /***/ ((module) => { "use strict"; module.exports = opts => { opts = opts || {}; const env = opts.env || process.env; const platform = opts.platform || process.platform; if (platform !== 'win32') { return 'PATH'; } return Object.keys(env).find(x => x.toUpperCase() === 'PATH') || 'Path'; }; /***/ }), /***/ 9317: /***/ ((module, exports) => { exports = module.exports = SemVer var debug /* istanbul ignore next */ if (typeof process === 'object' && process.env && process.env.NODE_DEBUG && /\bsemver\b/i.test(process.env.NODE_DEBUG)) { debug = function () { var args = Array.prototype.slice.call(arguments, 0) args.unshift('SEMVER') console.log.apply(console, args) } } else { debug = function () {} } // Note: this is the semver.org version of the spec that it implements // Not necessarily the package version of this code. exports.SEMVER_SPEC_VERSION = '2.0.0' var MAX_LENGTH = 256 var MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER || /* istanbul ignore next */ 9007199254740991 // Max safe segment length for coercion. var MAX_SAFE_COMPONENT_LENGTH = 16 // The actual regexps go on exports.re var re = exports.re = [] var src = exports.src = [] var R = 0 // The following Regular Expressions can be used for tokenizing, // validating, and parsing SemVer version strings. // ## Numeric Identifier // A single `0`, or a non-zero digit followed by zero or more digits. var NUMERICIDENTIFIER = R++ src[NUMERICIDENTIFIER] = '0|[1-9]\\d*' var NUMERICIDENTIFIERLOOSE = R++ src[NUMERICIDENTIFIERLOOSE] = '[0-9]+' // ## Non-numeric Identifier // Zero or more digits, followed by a letter or hyphen, and then zero or // more letters, digits, or hyphens. var NONNUMERICIDENTIFIER = R++ src[NONNUMERICIDENTIFIER] = '\\d*[a-zA-Z-][a-zA-Z0-9-]*' // ## Main Version // Three dot-separated numeric identifiers. var MAINVERSION = R++ src[MAINVERSION] = '(' + src[NUMERICIDENTIFIER] + ')\\.' + '(' + src[NUMERICIDENTIFIER] + ')\\.' + '(' + src[NUMERICIDENTIFIER] + ')' var MAINVERSIONLOOSE = R++ src[MAINVERSIONLOOSE] = '(' + src[NUMERICIDENTIFIERLOOSE] + ')\\.' + '(' + src[NUMERICIDENTIFIERLOOSE] + ')\\.' + '(' + src[NUMERICIDENTIFIERLOOSE] + ')' // ## Pre-release Version Identifier // A numeric identifier, or a non-numeric identifier. var PRERELEASEIDENTIFIER = R++ src[PRERELEASEIDENTIFIER] = '(?:' + src[NUMERICIDENTIFIER] + '|' + src[NONNUMERICIDENTIFIER] + ')' var PRERELEASEIDENTIFIERLOOSE = R++ src[PRERELEASEIDENTIFIERLOOSE] = '(?:' + src[NUMERICIDENTIFIERLOOSE] + '|' + src[NONNUMERICIDENTIFIER] + ')' // ## Pre-release Version // Hyphen, followed by one or more dot-separated pre-release version // identifiers. var PRERELEASE = R++ src[PRERELEASE] = '(?:-(' + src[PRERELEASEIDENTIFIER] + '(?:\\.' + src[PRERELEASEIDENTIFIER] + ')*))' var PRERELEASELOOSE = R++ src[PRERELEASELOOSE] = '(?:-?(' + src[PRERELEASEIDENTIFIERLOOSE] + '(?:\\.' + src[PRERELEASEIDENTIFIERLOOSE] + ')*))' // ## Build Metadata Identifier // Any combination of digits, letters, or hyphens. var BUILDIDENTIFIER = R++ src[BUILDIDENTIFIER] = '[0-9A-Za-z-]+' // ## Build Metadata // Plus sign, followed by one or more period-separated build metadata // identifiers. var BUILD = R++ src[BUILD] = '(?:\\+(' + src[BUILDIDENTIFIER] + '(?:\\.' + src[BUILDIDENTIFIER] + ')*))' // ## Full Version String // A main version, followed optionally by a pre-release version and // build metadata. // Note that the only major, minor, patch, and pre-release sections of // the version string are capturing groups. The build metadata is not a // capturing group, because it should not ever be used in version // comparison. var FULL = R++ var FULLPLAIN = 'v?' + src[MAINVERSION] + src[PRERELEASE] + '?' + src[BUILD] + '?' src[FULL] = '^' + FULLPLAIN + '$' // like full, but allows v1.2.3 and =1.2.3, which people do sometimes. // also, 1.0.0alpha1 (prerelease without the hyphen) which is pretty // common in the npm registry. var LOOSEPLAIN = '[v=\\s]*' + src[MAINVERSIONLOOSE] + src[PRERELEASELOOSE] + '?' + src[BUILD] + '?' var LOOSE = R++ src[LOOSE] = '^' + LOOSEPLAIN + '$' var GTLT = R++ src[GTLT] = '((?:<|>)?=?)' // Something like "2.*" or "1.2.x". // Note that "x.x" is a valid xRange identifer, meaning "any version" // Only the first item is strictly required. var XRANGEIDENTIFIERLOOSE = R++ src[XRANGEIDENTIFIERLOOSE] = src[NUMERICIDENTIFIERLOOSE] + '|x|X|\\*' var XRANGEIDENTIFIER = R++ src[XRANGEIDENTIFIER] = src[NUMERICIDENTIFIER] + '|x|X|\\*' var XRANGEPLAIN = R++ src[XRANGEPLAIN] = '[v=\\s]*(' + src[XRANGEIDENTIFIER] + ')' + '(?:\\.(' + src[XRANGEIDENTIFIER] + ')' + '(?:\\.(' + src[XRANGEIDENTIFIER] + ')' + '(?:' + src[PRERELEASE] + ')?' + src[BUILD] + '?' + ')?)?' var XRANGEPLAINLOOSE = R++ src[XRANGEPLAINLOOSE] = '[v=\\s]*(' + src[XRANGEIDENTIFIERLOOSE] + ')' + '(?:\\.(' + src[XRANGEIDENTIFIERLOOSE] + ')' + '(?:\\.(' + src[XRANGEIDENTIFIERLOOSE] + ')' + '(?:' + src[PRERELEASELOOSE] + ')?' + src[BUILD] + '?' + ')?)?' var XRANGE = R++ src[XRANGE] = '^' + src[GTLT] + '\\s*' + src[XRANGEPLAIN] + '$' var XRANGELOOSE = R++ src[XRANGELOOSE] = '^' + src[GTLT] + '\\s*' + src[XRANGEPLAINLOOSE] + '$' // Coercion. // Extract anything that could conceivably be a part of a valid semver var COERCE = R++ src[COERCE] = '(?:^|[^\\d])' + '(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '})' + '(?:\\.(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' + '(?:\\.(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' + '(?:$|[^\\d])' // Tilde ranges. // Meaning is "reasonably at or greater than" var LONETILDE = R++ src[LONETILDE] = '(?:~>?)' var TILDETRIM = R++ src[TILDETRIM] = '(\\s*)' + src[LONETILDE] + '\\s+' re[TILDETRIM] = new RegExp(src[TILDETRIM], 'g') var tildeTrimReplace = '$1~' var TILDE = R++ src[TILDE] = '^' + src[LONETILDE] + src[XRANGEPLAIN] + '$' var TILDELOOSE = R++ src[TILDELOOSE] = '^' + src[LONETILDE] + src[XRANGEPLAINLOOSE] + '$' // Caret ranges. // Meaning is "at least and backwards compatible with" var LONECARET = R++ src[LONECARET] = '(?:\\^)' var CARETTRIM = R++ src[CARETTRIM] = '(\\s*)' + src[LONECARET] + '\\s+' re[CARETTRIM] = new RegExp(src[CARETTRIM], 'g') var caretTrimReplace = '$1^' var CARET = R++ src[CARET] = '^' + src[LONECARET] + src[XRANGEPLAIN] + '$' var CARETLOOSE = R++ src[CARETLOOSE] = '^' + src[LONECARET] + src[XRANGEPLAINLOOSE] + '$' // A simple gt/lt/eq thing, or just "" to indicate "any version" var COMPARATORLOOSE = R++ src[COMPARATORLOOSE] = '^' + src[GTLT] + '\\s*(' + LOOSEPLAIN + ')$|^$' var COMPARATOR = R++ src[COMPARATOR] = '^' + src[GTLT] + '\\s*(' + FULLPLAIN + ')$|^$' // An expression to strip any whitespace between the gtlt and the thing // it modifies, so that `> 1.2.3` ==> `>1.2.3` var COMPARATORTRIM = R++ src[COMPARATORTRIM] = '(\\s*)' + src[GTLT] + '\\s*(' + LOOSEPLAIN + '|' + src[XRANGEPLAIN] + ')' // this one has to use the /g flag re[COMPARATORTRIM] = new RegExp(src[COMPARATORTRIM], 'g') var comparatorTrimReplace = '$1$2$3' // Something like `1.2.3 - 1.2.4` // Note that these all use the loose form, because they'll be // checked against either the strict or loose comparator form // later. var HYPHENRANGE = R++ src[HYPHENRANGE] = '^\\s*(' + src[XRANGEPLAIN] + ')' + '\\s+-\\s+' + '(' + src[XRANGEPLAIN] + ')' + '\\s*$' var HYPHENRANGELOOSE = R++ src[HYPHENRANGELOOSE] = '^\\s*(' + src[XRANGEPLAINLOOSE] + ')' + '\\s+-\\s+' + '(' + src[XRANGEPLAINLOOSE] + ')' + '\\s*$' // Star ranges basically just allow anything at all. var STAR = R++ src[STAR] = '(<|>)?=?\\s*\\*' // Compile to actual regexp objects. // All are flag-free, unless they were created above with a flag. for (var i = 0; i < R; i++) { debug(i, src[i]) if (!re[i]) { re[i] = new RegExp(src[i]) } } exports.parse = parse function parse (version, options) { if (!options || typeof options !== 'object') { options = { loose: !!options, includePrerelease: false } } if (version instanceof SemVer) { return version } if (typeof version !== 'string') { return null } if (version.length > MAX_LENGTH) { return null } var r = options.loose ? re[LOOSE] : re[FULL] if (!r.test(version)) { return null } try { return new SemVer(version, options) } catch (er) { return null } } exports.valid = valid function valid (version, options) { var v = parse(version, options) return v ? v.version : null } exports.clean = clean function clean (version, options) { var s = parse(version.trim().replace(/^[=v]+/, ''), options) return s ? s.version : null } exports.SemVer = SemVer function SemVer (version, options) { if (!options || typeof options !== 'object') { options = { loose: !!options, includePrerelease: false } } if (version instanceof SemVer) { if (version.loose === options.loose) { return version } else { version = version.version } } else if (typeof version !== 'string') { throw new TypeError('Invalid Version: ' + version) } if (version.length > MAX_LENGTH) { throw new TypeError('version is longer than ' + MAX_LENGTH + ' characters') } if (!(this instanceof SemVer)) { return new SemVer(version, options) } debug('SemVer', version, options) this.options = options this.loose = !!options.loose var m = version.trim().match(options.loose ? re[LOOSE] : re[FULL]) if (!m) { throw new TypeError('Invalid Version: ' + version) } this.raw = version // these are actually numbers this.major = +m[1] this.minor = +m[2] this.patch = +m[3] if (this.major > MAX_SAFE_INTEGER || this.major < 0) { throw new TypeError('Invalid major version') } if (this.minor > MAX_SAFE_INTEGER || this.minor < 0) { throw new TypeError('Invalid minor version') } if (this.patch > MAX_SAFE_INTEGER || this.patch < 0) { throw new TypeError('Invalid patch version') } // numberify any prerelease numeric ids if (!m[4]) { this.prerelease = [] } else { this.prerelease = m[4].split('.').map(function (id) { if (/^[0-9]+$/.test(id)) { var num = +id if (num >= 0 && num < MAX_SAFE_INTEGER) { return num } } return id }) } this.build = m[5] ? m[5].split('.') : [] this.format() } SemVer.prototype.format = function () { this.version = this.major + '.' + this.minor + '.' + this.patch if (this.prerelease.length) { this.version += '-' + this.prerelease.join('.') } return this.version } SemVer.prototype.toString = function () { return this.version } SemVer.prototype.compare = function (other) { debug('SemVer.compare', this.version, this.options, other) if (!(other instanceof SemVer)) { other = new SemVer(other, this.options) } return this.compareMain(other) || this.comparePre(other) } SemVer.prototype.compareMain = function (other) { if (!(other instanceof SemVer)) { other = new SemVer(other, this.options) } return compareIdentifiers(this.major, other.major) || compareIdentifiers(this.minor, other.minor) || compareIdentifiers(this.patch, other.patch) } SemVer.prototype.comparePre = function (other) { if (!(other instanceof SemVer)) { other = new SemVer(other, this.options) } // NOT having a prerelease is > having one if (this.prerelease.length && !other.prerelease.length) { return -1 } else if (!this.prerelease.length && other.prerelease.length) { return 1 } else if (!this.prerelease.length && !other.prerelease.length) { return 0 } var i = 0 do { var a = this.prerelease[i] var b = other.prerelease[i] debug('prerelease compare', i, a, b) if (a === undefined && b === undefined) { return 0 } else if (b === undefined) { return 1 } else if (a === undefined) { return -1 } else if (a === b) { continue } else { return compareIdentifiers(a, b) } } while (++i) } // preminor will bump the version up to the next minor release, and immediately // down to pre-release. premajor and prepatch work the same way. SemVer.prototype.inc = function (release, identifier) { switch (release) { case 'premajor': this.prerelease.length = 0 this.patch = 0 this.minor = 0 this.major++ this.inc('pre', identifier) break case 'preminor': this.prerelease.length = 0 this.patch = 0 this.minor++ this.inc('pre', identifier) break case 'prepatch': // If this is already a prerelease, it will bump to the next version // drop any prereleases that might already exist, since they are not // relevant at this point. this.prerelease.length = 0 this.inc('patch', identifier) this.inc('pre', identifier) break // If the input is a non-prerelease version, this acts the same as // prepatch. case 'prerelease': if (this.prerelease.length === 0) { this.inc('patch', identifier) } this.inc('pre', identifier) break case 'major': // If this is a pre-major version, bump up to the same major version. // Otherwise increment major. // 1.0.0-5 bumps to 1.0.0 // 1.1.0 bumps to 2.0.0 if (this.minor !== 0 || this.patch !== 0 || this.prerelease.length === 0) { this.major++ } this.minor = 0 this.patch = 0 this.prerelease = [] break case 'minor': // If this is a pre-minor version, bump up to the same minor version. // Otherwise increment minor. // 1.2.0-5 bumps to 1.2.0 // 1.2.1 bumps to 1.3.0 if (this.patch !== 0 || this.prerelease.length === 0) { this.minor++ } this.patch = 0 this.prerelease = [] break case 'patch': // If this is not a pre-release version, it will increment the patch. // If it is a pre-release it will bump up to the same patch version. // 1.2.0-5 patches to 1.2.0 // 1.2.0 patches to 1.2.1 if (this.prerelease.length === 0) { this.patch++ } this.prerelease = [] break // This probably shouldn't be used publicly. // 1.0.0 "pre" would become 1.0.0-0 which is the wrong direction. case 'pre': if (this.prerelease.length === 0) { this.prerelease = [0] } else { var i = this.prerelease.length while (--i >= 0) { if (typeof this.prerelease[i] === 'number') { this.prerelease[i]++ i = -2 } } if (i === -1) { // didn't increment anything this.prerelease.push(0) } } if (identifier) { // 1.2.0-beta.1 bumps to 1.2.0-beta.2, // 1.2.0-beta.fooblz or 1.2.0-beta bumps to 1.2.0-beta.0 if (this.prerelease[0] === identifier) { if (isNaN(this.prerelease[1])) { this.prerelease = [identifier, 0] } } else { this.prerelease = [identifier, 0] } } break default: throw new Error('invalid increment argument: ' + release) } this.format() this.raw = this.version return this } exports.inc = inc function inc (version, release, loose, identifier) { if (typeof (loose) === 'string') { identifier = loose loose = undefined } try { return new SemVer(version, loose).inc(release, identifier).version } catch (er) { return null } } exports.diff = diff function diff (version1, version2) { if (eq(version1, version2)) { return null } else { var v1 = parse(version1) var v2 = parse(version2) var prefix = '' if (v1.prerelease.length || v2.prerelease.length) { prefix = 'pre' var defaultResult = 'prerelease' } for (var key in v1) { if (key === 'major' || key === 'minor' || key === 'patch') { if (v1[key] !== v2[key]) { return prefix + key } } } return defaultResult // may be undefined } } exports.compareIdentifiers = compareIdentifiers var numeric = /^[0-9]+$/ function compareIdentifiers (a, b) { var anum = numeric.test(a) var bnum = numeric.test(b) if (anum && bnum) { a = +a b = +b } return a === b ? 0 : (anum && !bnum) ? -1 : (bnum && !anum) ? 1 : a < b ? -1 : 1 } exports.rcompareIdentifiers = rcompareIdentifiers function rcompareIdentifiers (a, b) { return compareIdentifiers(b, a) } exports.major = major function major (a, loose) { return new SemVer(a, loose).major } exports.minor = minor function minor (a, loose) { return new SemVer(a, loose).minor } exports.patch = patch function patch (a, loose) { return new SemVer(a, loose).patch } exports.compare = compare function compare (a, b, loose) { return new SemVer(a, loose).compare(new SemVer(b, loose)) } exports.compareLoose = compareLoose function compareLoose (a, b) { return compare(a, b, true) } exports.rcompare = rcompare function rcompare (a, b, loose) { return compare(b, a, loose) } exports.sort = sort function sort (list, loose) { return list.sort(function (a, b) { return exports.compare(a, b, loose) }) } exports.rsort = rsort function rsort (list, loose) { return list.sort(function (a, b) { return exports.rcompare(a, b, loose) }) } exports.gt = gt function gt (a, b, loose) { return compare(a, b, loose) > 0 } exports.lt = lt function lt (a, b, loose) { return compare(a, b, loose) < 0 } exports.eq = eq function eq (a, b, loose) { return compare(a, b, loose) === 0 } exports.neq = neq function neq (a, b, loose) { return compare(a, b, loose) !== 0 } exports.gte = gte function gte (a, b, loose) { return compare(a, b, loose) >= 0 } exports.lte = lte function lte (a, b, loose) { return compare(a, b, loose) <= 0 } exports.cmp = cmp function cmp (a, op, b, loose) { switch (op) { case '===': if (typeof a === 'object') a = a.version if (typeof b === 'object') b = b.version return a === b case '!==': if (typeof a === 'object') a = a.version if (typeof b === 'object') b = b.version return a !== b case '': case '=': case '==': return eq(a, b, loose) case '!=': return neq(a, b, loose) case '>': return gt(a, b, loose) case '>=': return gte(a, b, loose) case '<': return lt(a, b, loose) case '<=': return lte(a, b, loose) default: throw new TypeError('Invalid operator: ' + op) } } exports.Comparator = Comparator function Comparator (comp, options) { if (!options || typeof options !== 'object') { options = { loose: !!options, includePrerelease: false } } if (comp instanceof Comparator) { if (comp.loose === !!options.loose) { return comp } else { comp = comp.value } } if (!(this instanceof Comparator)) { return new Comparator(comp, options) } debug('comparator', comp, options) this.options = options this.loose = !!options.loose this.parse(comp) if (this.semver === ANY) { this.value = '' } else { this.value = this.operator + this.semver.version } debug('comp', this) } var ANY = {} Comparator.prototype.parse = function (comp) { var r = this.options.loose ? re[COMPARATORLOOSE] : re[COMPARATOR] var m = comp.match(r) if (!m) { throw new TypeError('Invalid comparator: ' + comp) } this.operator = m[1] if (this.operator === '=') { this.operator = '' } // if it literally is just '>' or '' then allow anything. if (!m[2]) { this.semver = ANY } else { this.semver = new SemVer(m[2], this.options.loose) } } Comparator.prototype.toString = function () { return this.value } Comparator.prototype.test = function (version) { debug('Comparator.test', version, this.options.loose) if (this.semver === ANY) { return true } if (typeof version === 'string') { version = new SemVer(version, this.options) } return cmp(version, this.operator, this.semver, this.options) } Comparator.prototype.intersects = function (comp, options) { if (!(comp instanceof Comparator)) { throw new TypeError('a Comparator is required') } if (!options || typeof options !== 'object') { options = { loose: !!options, includePrerelease: false } } var rangeTmp if (this.operator === '') { rangeTmp = new Range(comp.value, options) return satisfies(this.value, rangeTmp, options) } else if (comp.operator === '') { rangeTmp = new Range(this.value, options) return satisfies(comp.semver, rangeTmp, options) } var sameDirectionIncreasing = (this.operator === '>=' || this.operator === '>') && (comp.operator === '>=' || comp.operator === '>') var sameDirectionDecreasing = (this.operator === '<=' || this.operator === '<') && (comp.operator === '<=' || comp.operator === '<') var sameSemVer = this.semver.version === comp.semver.version var differentDirectionsInclusive = (this.operator === '>=' || this.operator === '<=') && (comp.operator === '>=' || comp.operator === '<=') var oppositeDirectionsLessThan = cmp(this.semver, '<', comp.semver, options) && ((this.operator === '>=' || this.operator === '>') && (comp.operator === '<=' || comp.operator === '<')) var oppositeDirectionsGreaterThan = cmp(this.semver, '>', comp.semver, options) && ((this.operator === '<=' || this.operator === '<') && (comp.operator === '>=' || comp.operator === '>')) return sameDirectionIncreasing || sameDirectionDecreasing || (sameSemVer && differentDirectionsInclusive) || oppositeDirectionsLessThan || oppositeDirectionsGreaterThan } exports.Range = Range function Range (range, options) { if (!options || typeof options !== 'object') { options = { loose: !!options, includePrerelease: false } } if (range instanceof Range) { if (range.loose === !!options.loose && range.includePrerelease === !!options.includePrerelease) { return range } else { return new Range(range.raw, options) } } if (range instanceof Comparator) { return new Range(range.value, options) } if (!(this instanceof Range)) { return new Range(range, options) } this.options = options this.loose = !!options.loose this.includePrerelease = !!options.includePrerelease // First, split based on boolean or || this.raw = range this.set = range.split(/\s*\|\|\s*/).map(function (range) { return this.parseRange(range.trim()) }, this).filter(function (c) { // throw out any that are not relevant for whatever reason return c.length }) if (!this.set.length) { throw new TypeError('Invalid SemVer Range: ' + range) } this.format() } Range.prototype.format = function () { this.range = this.set.map(function (comps) { return comps.join(' ').trim() }).join('||').trim() return this.range } Range.prototype.toString = function () { return this.range } Range.prototype.parseRange = function (range) { var loose = this.options.loose range = range.trim() // `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4` var hr = loose ? re[HYPHENRANGELOOSE] : re[HYPHENRANGE] range = range.replace(hr, hyphenReplace) debug('hyphen replace', range) // `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5` range = range.replace(re[COMPARATORTRIM], comparatorTrimReplace) debug('comparator trim', range, re[COMPARATORTRIM]) // `~ 1.2.3` => `~1.2.3` range = range.replace(re[TILDETRIM], tildeTrimReplace) // `^ 1.2.3` => `^1.2.3` range = range.replace(re[CARETTRIM], caretTrimReplace) // normalize spaces range = range.split(/\s+/).join(' ') // At this point, the range is completely trimmed and // ready to be split into comparators. var compRe = loose ? re[COMPARATORLOOSE] : re[COMPARATOR] var set = range.split(' ').map(function (comp) { return parseComparator(comp, this.options) }, this).join(' ').split(/\s+/) if (this.options.loose) { // in loose mode, throw out any that are not valid comparators set = set.filter(function (comp) { return !!comp.match(compRe) }) } set = set.map(function (comp) { return new Comparator(comp, this.options) }, this) return set } Range.prototype.intersects = function (range, options) { if (!(range instanceof Range)) { throw new TypeError('a Range is required') } return this.set.some(function (thisComparators) { return thisComparators.every(function (thisComparator) { return range.set.some(function (rangeComparators) { return rangeComparators.every(function (rangeComparator) { return thisComparator.intersects(rangeComparator, options) }) }) }) }) } // Mostly just for testing and legacy API reasons exports.toComparators = toComparators function toComparators (range, options) { return new Range(range, options).set.map(function (comp) { return comp.map(function (c) { return c.value }).join(' ').trim().split(' ') }) } // comprised of xranges, tildes, stars, and gtlt's at this point. // already replaced the hyphen ranges // turn into a set of JUST comparators. function parseComparator (comp, options) { debug('comp', comp, options) comp = replaceCarets(comp, options) debug('caret', comp) comp = replaceTildes(comp, options) debug('tildes', comp) comp = replaceXRanges(comp, options) debug('xrange', comp) comp = replaceStars(comp, options) debug('stars', comp) return comp } function isX (id) { return !id || id.toLowerCase() === 'x' || id === '*' } // ~, ~> --> * (any, kinda silly) // ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0 <3.0.0 // ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0 <2.1.0 // ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0 <1.3.0 // ~1.2.3, ~>1.2.3 --> >=1.2.3 <1.3.0 // ~1.2.0, ~>1.2.0 --> >=1.2.0 <1.3.0 function replaceTildes (comp, options) { return comp.trim().split(/\s+/).map(function (comp) { return replaceTilde(comp, options) }).join(' ') } function replaceTilde (comp, options) { var r = options.loose ? re[TILDELOOSE] : re[TILDE] return comp.replace(r, function (_, M, m, p, pr) { debug('tilde', comp, _, M, m, p, pr) var ret if (isX(M)) { ret = '' } else if (isX(m)) { ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0' } else if (isX(p)) { // ~1.2 == >=1.2.0 <1.3.0 ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0' } else if (pr) { debug('replaceTilde pr', pr) ret = '>=' + M + '.' + m + '.' + p + '-' + pr + ' <' + M + '.' + (+m + 1) + '.0' } else { // ~1.2.3 == >=1.2.3 <1.3.0 ret = '>=' + M + '.' + m + '.' + p + ' <' + M + '.' + (+m + 1) + '.0' } debug('tilde return', ret) return ret }) } // ^ --> * (any, kinda silly) // ^2, ^2.x, ^2.x.x --> >=2.0.0 <3.0.0 // ^2.0, ^2.0.x --> >=2.0.0 <3.0.0 // ^1.2, ^1.2.x --> >=1.2.0 <2.0.0 // ^1.2.3 --> >=1.2.3 <2.0.0 // ^1.2.0 --> >=1.2.0 <2.0.0 function replaceCarets (comp, options) { return comp.trim().split(/\s+/).map(function (comp) { return replaceCaret(comp, options) }).join(' ') } function replaceCaret (comp, options) { debug('caret', comp, options) var r = options.loose ? re[CARETLOOSE] : re[CARET] return comp.replace(r, function (_, M, m, p, pr) { debug('caret', comp, _, M, m, p, pr) var ret if (isX(M)) { ret = '' } else if (isX(m)) { ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0' } else if (isX(p)) { if (M === '0') { ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0' } else { ret = '>=' + M + '.' + m + '.0 <' + (+M + 1) + '.0.0' } } else if (pr) { debug('replaceCaret pr', pr) if (M === '0') { if (m === '0') { ret = '>=' + M + '.' + m + '.' + p + '-' + pr + ' <' + M + '.' + m + '.' + (+p + 1) } else { ret = '>=' + M + '.' + m + '.' + p + '-' + pr + ' <' + M + '.' + (+m + 1) + '.0' } } else { ret = '>=' + M + '.' + m + '.' + p + '-' + pr + ' <' + (+M + 1) + '.0.0' } } else { debug('no pr') if (M === '0') { if (m === '0') { ret = '>=' + M + '.' + m + '.' + p + ' <' + M + '.' + m + '.' + (+p + 1) } else { ret = '>=' + M + '.' + m + '.' + p + ' <' + M + '.' + (+m + 1) + '.0' } } else { ret = '>=' + M + '.' + m + '.' + p + ' <' + (+M + 1) + '.0.0' } } debug('caret return', ret) return ret }) } function replaceXRanges (comp, options) { debug('replaceXRanges', comp, options) return comp.split(/\s+/).map(function (comp) { return replaceXRange(comp, options) }).join(' ') } function replaceXRange (comp, options) { comp = comp.trim() var r = options.loose ? re[XRANGELOOSE] : re[XRANGE] return comp.replace(r, function (ret, gtlt, M, m, p, pr) { debug('xRange', comp, ret, gtlt, M, m, p, pr) var xM = isX(M) var xm = xM || isX(m) var xp = xm || isX(p) var anyX = xp if (gtlt === '=' && anyX) { gtlt = '' } if (xM) { if (gtlt === '>' || gtlt === '<') { // nothing is allowed ret = '<0.0.0' } else { // nothing is forbidden ret = '*' } } else if (gtlt && anyX) { // we know patch is an x, because we have any x at all. // replace X with 0 if (xm) { m = 0 } p = 0 if (gtlt === '>') { // >1 => >=2.0.0 // >1.2 => >=1.3.0 // >1.2.3 => >= 1.2.4 gtlt = '>=' if (xm) { M = +M + 1 m = 0 p = 0 } else { m = +m + 1 p = 0 } } else if (gtlt === '<=') { // <=0.7.x is actually <0.8.0, since any 0.7.x should // pass. Similarly, <=7.x is actually <8.0.0, etc. gtlt = '<' if (xm) { M = +M + 1 } else { m = +m + 1 } } ret = gtlt + M + '.' + m + '.' + p } else if (xm) { ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0' } else if (xp) { ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0' } debug('xRange return', ret) return ret }) } // Because * is AND-ed with everything else in the comparator, // and '' means "any version", just remove the *s entirely. function replaceStars (comp, options) { debug('replaceStars', comp, options) // Looseness is ignored here. star is always as loose as it gets! return comp.trim().replace(re[STAR], '') } // This function is passed to string.replace(re[HYPHENRANGE]) // M, m, patch, prerelease, build // 1.2 - 3.4.5 => >=1.2.0 <=3.4.5 // 1.2.3 - 3.4 => >=1.2.0 <3.5.0 Any 3.4.x will do // 1.2 - 3.4 => >=1.2.0 <3.5.0 function hyphenReplace ($0, from, fM, fm, fp, fpr, fb, to, tM, tm, tp, tpr, tb) { if (isX(fM)) { from = '' } else if (isX(fm)) { from = '>=' + fM + '.0.0' } else if (isX(fp)) { from = '>=' + fM + '.' + fm + '.0' } else { from = '>=' + from } if (isX(tM)) { to = '' } else if (isX(tm)) { to = '<' + (+tM + 1) + '.0.0' } else if (isX(tp)) { to = '<' + tM + '.' + (+tm + 1) + '.0' } else if (tpr) { to = '<=' + tM + '.' + tm + '.' + tp + '-' + tpr } else { to = '<=' + to } return (from + ' ' + to).trim() } // if ANY of the sets match ALL of its comparators, then pass Range.prototype.test = function (version) { if (!version) { return false } if (typeof version === 'string') { version = new SemVer(version, this.options) } for (var i = 0; i < this.set.length; i++) { if (testSet(this.set[i], version, this.options)) { return true } } return false } function testSet (set, version, options) { for (var i = 0; i < set.length; i++) { if (!set[i].test(version)) { return false } } if (version.prerelease.length && !options.includePrerelease) { // Find the set of versions that are allowed to have prereleases // For example, ^1.2.3-pr.1 desugars to >=1.2.3-pr.1 <2.0.0 // That should allow `1.2.3-pr.2` to pass. // However, `1.2.4-alpha.notready` should NOT be allowed, // even though it's within the range set by the comparators. for (i = 0; i < set.length; i++) { debug(set[i].semver) if (set[i].semver === ANY) { continue } if (set[i].semver.prerelease.length > 0) { var allowed = set[i].semver if (allowed.major === version.major && allowed.minor === version.minor && allowed.patch === version.patch) { return true } } } // Version has a -pre, but it's not one of the ones we like. return false } return true } exports.satisfies = satisfies function satisfies (version, range, options) { try { range = new Range(range, options) } catch (er) { return false } return range.test(version) } exports.maxSatisfying = maxSatisfying function maxSatisfying (versions, range, options) { var max = null var maxSV = null try { var rangeObj = new Range(range, options) } catch (er) { return null } versions.forEach(function (v) { if (rangeObj.test(v)) { // satisfies(v, range, options) if (!max || maxSV.compare(v) === -1) { // compare(max, v, true) max = v maxSV = new SemVer(max, options) } } }) return max } exports.minSatisfying = minSatisfying function minSatisfying (versions, range, options) { var min = null var minSV = null try { var rangeObj = new Range(range, options) } catch (er) { return null } versions.forEach(function (v) { if (rangeObj.test(v)) { // satisfies(v, range, options) if (!min || minSV.compare(v) === 1) { // compare(min, v, true) min = v minSV = new SemVer(min, options) } } }) return min } exports.minVersion = minVersion function minVersion (range, loose) { range = new Range(range, loose) var minver = new SemVer('0.0.0') if (range.test(minver)) { return minver } minver = new SemVer('0.0.0-0') if (range.test(minver)) { return minver } minver = null for (var i = 0; i < range.set.length; ++i) { var comparators = range.set[i] comparators.forEach(function (comparator) { // Clone to avoid manipulating the comparator's semver object. var compver = new SemVer(comparator.semver.version) switch (comparator.operator) { case '>': if (compver.prerelease.length === 0) { compver.patch++ } else { compver.prerelease.push(0) } compver.raw = compver.format() /* fallthrough */ case '': case '>=': if (!minver || gt(minver, compver)) { minver = compver } break case '<': case '<=': /* Ignore maximum versions */ break /* istanbul ignore next */ default: throw new Error('Unexpected operation: ' + comparator.operator) } }) } if (minver && range.test(minver)) { return minver } return null } exports.validRange = validRange function validRange (range, options) { try { // Return '*' instead of '' so that truthiness works. // This will throw if it's invalid anyway return new Range(range, options).range || '*' } catch (er) { return null } } // Determine if version is less than all the versions possible in the range exports.ltr = ltr function ltr (version, range, options) { return outside(version, range, '<', options) } // Determine if version is greater than all the versions possible in the range. exports.gtr = gtr function gtr (version, range, options) { return outside(version, range, '>', options) } exports.outside = outside function outside (version, range, hilo, options) { version = new SemVer(version, options) range = new Range(range, options) var gtfn, ltefn, ltfn, comp, ecomp switch (hilo) { case '>': gtfn = gt ltefn = lte ltfn = lt comp = '>' ecomp = '>=' break case '<': gtfn = lt ltefn = gte ltfn = gt comp = '<' ecomp = '<=' break default: throw new TypeError('Must provide a hilo val of "<" or ">"') } // If it satisifes the range it is not outside if (satisfies(version, range, options)) { return false } // From now on, variable terms are as if we're in "gtr" mode. // but note that everything is flipped for the "ltr" function. for (var i = 0; i < range.set.length; ++i) { var comparators = range.set[i] var high = null var low = null comparators.forEach(function (comparator) { if (comparator.semver === ANY) { comparator = new Comparator('>=0.0.0') } high = high || comparator low = low || comparator if (gtfn(comparator.semver, high.semver, options)) { high = comparator } else if (ltfn(comparator.semver, low.semver, options)) { low = comparator } }) // If the edge version comparator has a operator then our version // isn't outside it if (high.operator === comp || high.operator === ecomp) { return false } // If the lowest version comparator has an operator and our version // is less than it then it isn't higher than the range if ((!low.operator || low.operator === comp) && ltefn(version, low.semver)) { return false } else if (low.operator === ecomp && ltfn(version, low.semver)) { return false } } return true } exports.prerelease = prerelease function prerelease (version, options) { var parsed = parse(version, options) return (parsed && parsed.prerelease.length) ? parsed.prerelease : null } exports.intersects = intersects function intersects (r1, r2, options) { r1 = new Range(r1, options) r2 = new Range(r2, options) return r1.intersects(r2) } exports.coerce = coerce function coerce (version) { if (version instanceof SemVer) { return version } if (typeof version !== 'string') { return null } var match = version.match(re[COERCE]) if (match == null) { return null } return parse(match[1] + '.' + (match[2] || '0') + '.' + (match[3] || '0')) } /***/ }), /***/ 2116: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; var shebangRegex = __nccwpck_require__(2998); module.exports = function (str) { var match = str.match(shebangRegex); if (!match) { return null; } var arr = match[0].replace(/#! ?/, '').split(' '); var bin = arr[0].split('/').pop(); var arg = arr[1]; return (bin === 'env' ? arg : bin + (arg ? ' ' + arg : '') ); }; /***/ }), /***/ 2998: /***/ ((module) => { "use strict"; module.exports = /^#!.*/; /***/ }), /***/ 3411: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { module.exports = which which.sync = whichSync var isWindows = process.platform === 'win32' || process.env.OSTYPE === 'cygwin' || process.env.OSTYPE === 'msys' var path = __nccwpck_require__(5622) var COLON = isWindows ? ';' : ':' var isexe = __nccwpck_require__(7126) function getNotFoundError (cmd) { var er = new Error('not found: ' + cmd) er.code = 'ENOENT' return er } function getPathInfo (cmd, opt) { var colon = opt.colon || COLON var pathEnv = opt.path || process.env.PATH || '' var pathExt = [''] pathEnv = pathEnv.split(colon) var pathExtExe = '' if (isWindows) { pathEnv.unshift(process.cwd()) pathExtExe = (opt.pathExt || process.env.PATHEXT || '.EXE;.CMD;.BAT;.COM') pathExt = pathExtExe.split(colon) // Always test the cmd itself first. isexe will check to make sure // it's found in the pathExt set. if (cmd.indexOf('.') !== -1 && pathExt[0] !== '') pathExt.unshift('') } // If it has a slash, then we don't bother searching the pathenv. // just check the file itself, and that's it. if (cmd.match(/\//) || isWindows && cmd.match(/\\/)) pathEnv = [''] return { env: pathEnv, ext: pathExt, extExe: pathExtExe } } function which (cmd, opt, cb) { if (typeof opt === 'function') { cb = opt opt = {} } var info = getPathInfo(cmd, opt) var pathEnv = info.env var pathExt = info.ext var pathExtExe = info.extExe var found = [] ;(function F (i, l) { if (i === l) { if (opt.all && found.length) return cb(null, found) else return cb(getNotFoundError(cmd)) } var pathPart = pathEnv[i] if (pathPart.charAt(0) === '"' && pathPart.slice(-1) === '"') pathPart = pathPart.slice(1, -1) var p = path.join(pathPart, cmd) if (!pathPart && (/^\.[\\\/]/).test(cmd)) { p = cmd.slice(0, 2) + p } ;(function E (ii, ll) { if (ii === ll) return F(i + 1, l) var ext = pathExt[ii] isexe(p + ext, { pathExt: pathExtExe }, function (er, is) { if (!er && is) { if (opt.all) found.push(p + ext) else return cb(null, p + ext) } return E(ii + 1, ll) }) })(0, pathExt.length) })(0, pathEnv.length) } function whichSync (cmd, opt) { opt = opt || {} var info = getPathInfo(cmd, opt) var pathEnv = info.env var pathExt = info.ext var pathExtExe = info.extExe var found = [] for (var i = 0, l = pathEnv.length; i < l; i ++) { var pathPart = pathEnv[i] if (pathPart.charAt(0) === '"' && pathPart.slice(-1) === '"') pathPart = pathPart.slice(1, -1) var p = path.join(pathPart, cmd) if (!pathPart && /^\.[\\\/]/.test(cmd)) { p = cmd.slice(0, 2) + p } for (var j = 0, ll = pathExt.length; j < ll; j ++) { var cur = p + pathExt[j] var is try { is = isexe.sync(cur, { pathExt: pathExtExe }) if (is) { if (opt.all) found.push(cur) else return cur } } catch (ex) {} } } if (opt.all && found.length) return found if (opt.nothrow) return null throw getNotFoundError(cmd) } /***/ }), /***/ 2940: /***/ ((module) => { // Returns a wrapper function that returns a wrapped callback // The wrapper function should do some stuff, and return a // presumably different callback function. // This makes sure that own properties are retained, so that // decorations and such are not lost along the way. module.exports = wrappy function wrappy (fn, cb) { if (fn && cb) return wrappy(fn)(cb) if (typeof fn !== 'function') throw new TypeError('need wrapper function') Object.keys(fn).forEach(function (k) { wrapper[k] = fn[k] }) return wrapper function wrapper() { var args = new Array(arguments.length) for (var i = 0; i < args.length; i++) { args[i] = arguments[i] } var ret = fn.apply(this, args) var cb = args[args.length-1] if (typeof ret === 'function' && ret !== cb) { Object.keys(cb).forEach(function (k) { ret[k] = cb[k] }) } return ret } } /***/ }), /***/ 4570: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { const core = __nccwpck_require__(2186) const yaml = __nccwpck_require__(1917) const fs = __nccwpck_require__(5630) const path = __nccwpck_require__(5622) __nccwpck_require__(2437).config() const REPLACE_DEFAULT = true const getVar = ({ key, default: dft, required = false, type = 'string' }) => { const coreVar = core.getInput(key) const envVar = process.env[key] if (key === 'PR_LABELS' && (coreVar === false || envVar === 'false')) return undefined if (coreVar !== undefined && coreVar.length >= 1) { if (type === 'array') return coreVar.split('\n') if (type === 'boolean') return coreVar === 'false' ? false : Boolean(coreVar) return coreVar } if (envVar !== undefined && envVar.length >= 1) { if (type === 'array') return envVar.split(',') if (type === 'boolean') return envVar === 'true' return envVar } if (required === true) return core.setFailed(`Variable ${ key } missing.`) return dft } const context = { GITHUB_TOKEN: getVar({ key: 'GH_PAT', required: true }), GIT_EMAIL: getVar({ key: 'GIT_EMAIL' }), GIT_USERNAME: getVar({ key: 'GIT_USERNAME' }), CONFIG_PATH: getVar({ key: 'CONFIG_PATH', default: '.github/sync.yml' }), COMMIT_PREFIX: getVar({ key: 'COMMIT_PREFIX', default: '🔄' }), COMMIT_EACH_FILE: getVar({ key: 'COMMIT_EACH_FILE', type: 'boolean', default: true }), PR_LABELS: getVar({ key: 'PR_LABELS', default: [ 'sync' ], type: 'array' }), ASSIGNEES: getVar({ key: 'ASSIGNEES', type: 'array' }), TMP_DIR: getVar({ key: 'TMP_DIR', default: `tmp-${ Date.now().toString() }` }), DRY_RUN: getVar({ key: 'DRY_RUN', type: 'boolean', default: false }), SKIP_CLEANUP: getVar({ key: 'SKIP_CLEANUP', type: 'boolean', default: false }), OVERWRITE_EXISTING_PR: getVar({ key: 'OVERWRITE_EXISTING_PR', type: 'boolean', default: true }), GITHUB_REPOSITORY: getVar({ key: 'GITHUB_REPOSITORY', required: true }), SKIP_PR: getVar({ key: 'SKIP_PR', type: 'boolean', default: false }) } core.setSecret(context.GITHUB_TOKEN) core.debug(JSON.stringify(context, null, 2)) const parseRepoName = (fullRepo) => { let host = 'github.com' if (fullRepo.startsWith('http')) { const url = new URL(fullRepo) host = url.host fullRepo = url.pathname.replace(/^\/+/, '') // Remove leading slash core.info('Using custom host') } const user = fullRepo.split('/')[0] const name = fullRepo.split('/')[1].split('@')[0] const branch = fullRepo.split('/')[1].split('@')[1] || 'default' return { fullName: `${ host }/${ user }/${ name }`, host, user, name, branch } } const parseExclude = (text, src) => { if (text === undefined || typeof text !== 'string') return undefined const files = text.split('\n').filter((i) => i) return files.map((file) => path.join(src, file)) } const parseFiles = (files) => { return files.map((item) => { if (typeof item === 'string') { return { source: item, dest: item, replace: REPLACE_DEFAULT } } if (item.source !== undefined) { return { source: item.source, dest: item.dest !== undefined ? item.dest : item.source, replace: item.replace !== undefined ? item.replace : REPLACE_DEFAULT, exclude: parseExclude(item.exclude, item.source) } } core.warn('Warn: No source files specified') }) } const parseConfig = async () => { const fileContent = await fs.promises.readFile(context.CONFIG_PATH) const configObject = yaml.load(fileContent.toString()) const result = {} Object.keys(configObject).forEach((key) => { if (key === 'group') { const rawObject = configObject[key] const groups = Array.isArray(rawObject) ? rawObject : [ rawObject ] groups.forEach((group) => { const repos = typeof group.repos === 'string' ? group.repos.split('\n').filter((n) => n) : group.repos repos.forEach((name) => { const files = parseFiles(group.files) const repo = parseRepoName(name) if (result[repo.fullName] !== undefined) { result[repo.fullName].files.push(...files) return } result[repo.fullName] = { repo, files } }) }) } else { const files = parseFiles(configObject[key]) const repo = parseRepoName(key) if (result[repo.fullName] !== undefined) { result[repo.fullName].files.push(...files) return } result[repo.fullName] = { repo, files } } }) return Object.values(result) } while (fs.existsSync(context.TMP_DIR)) { context.TMP_DIR = `tmp-${ Date.now().toString() }` core.warn(`TEMP_DIR already exists. Using "${ context.TMP_DIR }" now.`) } module.exports = { ...context, parseConfig } /***/ }), /***/ 109: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { const { parse } = __nccwpck_require__(1150) const core = __nccwpck_require__(2186) const path = __nccwpck_require__(5622) const { GITHUB_TOKEN, GIT_USERNAME, GIT_EMAIL, TMP_DIR, COMMIT_PREFIX, GITHUB_REPOSITORY, OVERWRITE_EXISTING_PR } = __nccwpck_require__(4570) const { dedent, execCmd } = __nccwpck_require__(8505) const init = (repo) => { let github let baseBranch let prBranch let existingPr const workingDir = path.join(TMP_DIR, repo.fullName) const gitUrl = `https://${ GITHUB_TOKEN }@${ repo.fullName }.git` const clone = () => { core.debug(`Cloning ${ repo.fullName } into ${ workingDir }`) return execCmd( `git clone --depth 1 ${ repo.branch !== 'default' ? '--branch "' + repo.branch + '"' : '' } ${ gitUrl } ${ workingDir }` ) } const setIdentity = async (client) => { let username = GIT_USERNAME let email = GIT_EMAIL github = client if (email === undefined) { const { data } = await github.users.getAuthenticated() email = data.email username = data.login } core.debug(`Setting git user to email: ${ email }, username: ${ username }`) return execCmd( `git config --local user.name "${ username }" && git config --local user.email "${ email }"`, workingDir ) } const getBaseBranch = async () => { baseBranch = await execCmd( `git rev-parse --abbrev-ref HEAD`, workingDir ) } const createPrBranch = async () => { let newBranch = `repo-sync/${ GITHUB_REPOSITORY.split('/')[1] }/${ repo.branch }` if (OVERWRITE_EXISTING_PR === false) { newBranch += `-${ Math.round((new Date()).getTime() / 1000) }` } core.debug(`Creating PR Branch ${ newBranch }`) await execCmd( `git checkout -b "${ newBranch }"`, workingDir ) prBranch = newBranch } const add = async (file) => { return execCmd( `git add -f ${ file }`, workingDir ) } const hasChanges = async () => { const statusOutput = await execCmd( `git status --porcelain`, workingDir ) return parse(statusOutput).length !== 0 } const commit = async (msg) => { const message = msg !== undefined ? msg : `${ COMMIT_PREFIX } Synced file(s) with ${ GITHUB_REPOSITORY }` return execCmd( `git commit -m "${ message }"`, workingDir ) } const status = async () => { return execCmd( `git status`, workingDir ) } const push = async () => { return execCmd( `git push ${ gitUrl } --force`, workingDir ) } const findExistingPr = async () => { const { data } = await github.pulls.list({ owner: repo.user, repo: repo.name, state: 'open', head: `${ repo.user }:${ prBranch }` }) existingPr = data[0] return existingPr } const setPrWarning = async () => { await github.pulls.update({ owner: repo.user, repo: repo.name, pull_number: existingPr.number, body: dedent(` ⚠️ This PR is being automatically resynced ⚠️ ${ existingPr.body } `) }) } const removePrWarning = async () => { await github.pulls.update({ owner: repo.user, repo: repo.name, pull_number: existingPr.number, body: existingPr.body.replace('⚠️ This PR is being automatically resynced ⚠️', '') }) } const createOrUpdatePr = async (changedFiles) => { const body = dedent(` Synced local file(s) with [${ GITHUB_REPOSITORY }](https://github.com/${ GITHUB_REPOSITORY }). ${ changedFiles } --- This PR was created automatically by the [repo-file-sync-action](https://github.com/BetaHuhn/repo-file-sync-action) workflow run [#${ process.env.GITHUB_RUN_ID || 0 }](https://github.com/${ GITHUB_REPOSITORY }/actions/runs/${ process.env.GITHUB_RUN_ID || 0 }) `) if (existingPr) { core.info(`Overwriting existing PR`) const { data } = await github.pulls.update({ owner: repo.user, repo: repo.name, pull_number: existingPr.number, body: body }) return data } core.info(`Creating new PR`) const { data } = await github.pulls.create({ owner: repo.user, repo: repo.name, title: `${ COMMIT_PREFIX } Synced file(s) with ${ GITHUB_REPOSITORY }`, body: body, head: prBranch, base: baseBranch }) return data } return { workingDir, clone, setIdentity, getBaseBranch, createPrBranch, add, hasChanges, commit, status, push, findExistingPr, setPrWarning, removePrWarning, createOrUpdatePr } } module.exports = { init } /***/ }), /***/ 8505: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { const fs = __nccwpck_require__(5630) const { exec } = __nccwpck_require__(3129) const core = __nccwpck_require__(2186) // From https://github.com/toniov/p-iteration/blob/master/lib/static-methods.js - MIT © Antonio V const forEach = async (array, callback) => { for (let index = 0; index < array.length; index++) { // eslint-disable-next-line callback-return await callback(array[index], index, array) } } // From https://github.com/MartinKolarik/dedent-js/blob/master/src/index.ts - MIT © 2015 Martin Kolárik const dedent = function(templateStrings, ...values) { const matches = [] const strings = typeof templateStrings === 'string' ? [ templateStrings ] : templateStrings.slice() strings[strings.length - 1] = strings[strings.length - 1].replace(/\r?\n([\t ]*)$/, '') for (let i = 0; i < strings.length; i++) { let match // eslint-disable-next-line no-cond-assign if (match = strings[i].match(/\n[\t ]+/g)) { matches.push(...match) } } if (matches.length) { const size = Math.min(...matches.map((value) => value.length - 1)) const pattern = new RegExp(`\n[\t ]{${ size }}`, 'g') for (let i = 0; i < strings.length; i++) { strings[i] = strings[i].replace(pattern, '\n') } } strings[0] = strings[0].replace(/^\r?\n/, '') let string = strings[0] for (let i = 0; i < values.length; i++) { string += values[i] + strings[i + 1] } return string } const execCmd = (command, workingDir) => { core.debug(`EXEC: "${ command }" IN ${ workingDir }`) return new Promise((resolve, reject) => { exec( command, { cwd: workingDir }, function(error, stdout) { error ? reject(error) : resolve(stdout.trim()) } ) }) } const addTrailingSlash = (str) => str.endsWith('/') ? str : str + '/' const pathIsDirectory = async (path) => { const stat = await fs.lstat(path) return stat.isDirectory() } const copy = async (src, dest, exclude) => { core.debug(`CP: ${ src } TO ${ dest }`) const filterFunc = (file) => { if (exclude.includes(file)) { core.debug(`Excluding file ${ file }`) return false } return true } return fs.copy(src, dest, (exclude !== undefined && { filter: filterFunc })) } const remove = async (src) => { core.debug(`RM: ${ src }`) return fs.remove(src) } module.exports = { forEach, dedent, addTrailingSlash, pathIsDirectory, execCmd, copy, remove } /***/ }), /***/ 4351: /***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { const core = __nccwpck_require__(2186) const github = __nccwpck_require__(5438) const fs = __nccwpck_require__(5747) const Git = __nccwpck_require__(109) const { forEach, dedent, addTrailingSlash, pathIsDirectory, copy, remove } = __nccwpck_require__(8505) const { parseConfig, GITHUB_TOKEN, COMMIT_EACH_FILE, COMMIT_PREFIX, PR_LABELS, ASSIGNEES, DRY_RUN, TMP_DIR, SKIP_CLEANUP, OVERWRITE_EXISTING_PR, SKIP_PR } = __nccwpck_require__(4570) const run = async () => { const client = new github.GitHub(GITHUB_TOKEN) const repos = await parseConfig() await forEach(repos, async (item) => { core.info(`Repository Info`) core.info(`Slug : ${ item.repo.name }`) core.info(`Owner : ${ item.repo.user }`) core.info(`Https Url : https://${ item.repo.fullName }`) core.info(`Branch : ${ item.repo.branch }`) core.info(' ') try { const git = Git.init(item.repo) // Clone and setup the git repository locally await git.clone() await git.setIdentity(client) await git.getBaseBranch() let existingPr if (SKIP_PR === false) { await git.createPrBranch() // Check for existing PR and add warning message that the PR maybe about to change existingPr = OVERWRITE_EXISTING_PR ? await git.findExistingPr() : undefined if (existingPr && DRY_RUN === false) { core.info(`Found existing PR ${ existingPr.number }`) await git.setPrWarning() } } core.info(`Locally syncing file(s) between source and target repository`) const modified = [] // Loop through all selected files of the source repo await forEach(item.files, async (file) => { const fileExists = fs.existsSync(file.source) if (fileExists === false) return core.warning(`Source ${ file.source } not found`) const localDestination = `${ git.workingDir }/${ file.dest }` const destExists = fs.existsSync(localDestination) if (destExists === true && file.replace === false) return core.warning(`File(s) already exist(s) in destination and 'replace' option is set to false`) const isDirectory = await pathIsDirectory(file.source) const source = isDirectory ? `${ addTrailingSlash(file.source) }` : file.source if (isDirectory) core.warning(`Source is directory`) await copy(source, localDestination, file.exclude) await git.add(file.dest) // Commit each file separately, if option is set to false commit all files at once later if (COMMIT_EACH_FILE === true) { const hasChanges = await git.hasChanges() if (hasChanges === false) return core.debug('File(s) already up to date') core.debug(`Creating commit for file(s) ${ file.dest }`) // Use different commit/pr message based on if the source is a directory or file const directory = isDirectory ? 'directory' : '' const otherFiles = isDirectory ? 'and copied all sub files/folders' : '' const message = { true: { commit: `${ COMMIT_PREFIX } Synced local '${ file.dest }' with remote '${ file.source }'`, pr: `Synced local ${ directory } ${ file.dest } with remote ${ directory } ${ file.source }` }, false: { commit: `${ COMMIT_PREFIX } Created local '${ file.dest }' from remote '${ file.source }'`, pr: `Created local ${ directory } ${ file.dest } ${ otherFiles } from remote ${ directory } ${ file.source }` } } // Commit and add file to modified array so we later know if there are any changes to actually push await git.commit(message[destExists].commit) modified.push({ dest: file.dest, source: file.source, message: message[destExists].pr }) } }) if (DRY_RUN) { core.warning('Dry run, no changes will be pushed') core.debug('Git Status:') core.debug(await git.status()) return } const hasChanges = await git.hasChanges() // If no changes left and nothing was modified we can assume nothing has changed/needs to be pushed if (hasChanges === false && modified.length < 1) { core.info('File(s) already up to date') if (existingPr) await git.removePrWarning() return } // If there are still local changes left (i.e. not committed each file separately), commit them before pushing if (hasChanges === true) { core.debug(`Creating commit for remaining files`) await git.commit() modified.push({ dest: git.workingDir }) } core.info(`Pushing changes to target repository`) await git.push() if (SKIP_PR === false) { // If each file was committed separately, list them in the PR description const changedFiles = dedent(`
Changed files
    ${ modified.map((file) => `
  • ${ file.message }
  • `).join('') }
`) const pullRequest = await git.createOrUpdatePr(COMMIT_EACH_FILE ? changedFiles : '') core.info(`Pull Request #${ pullRequest.number } created/updated: ${ pullRequest.html_url }`) core.setOutput('pull_request_number', pullRequest.number) core.setOutput('pull_request_url', pullRequest.html_url) if (PR_LABELS !== undefined && PR_LABELS.length > 0) { core.info(`Adding label(s) "${ PR_LABELS.join(', ') }" to PR`) await client.issues.addLabels({ owner: item.repo.user, repo: item.repo.name, issue_number: pullRequest.number, labels: PR_LABELS }) } if (ASSIGNEES !== undefined && ASSIGNEES.length > 0) { core.info(`Adding assignee(s) "${ ASSIGNEES.join(', ') }" to PR`) await client.issues.addAssignees({ owner: item.repo.user, repo: item.repo.name, issue_number: pullRequest.number, assignees: ASSIGNEES }) } } core.info(' ') } catch (err) { core.error(err.message) core.error(err) } }) if (SKIP_CLEANUP === true) { core.info('Skipping cleanup') return } await remove(TMP_DIR) core.info('Cleanup complete') } run() .then(() => {}) .catch((err) => { core.error('ERROR', err) core.setFailed(err.message) }) /***/ }), /***/ 2877: /***/ ((module) => { module.exports = eval("require")("encoding"); /***/ }), /***/ 2357: /***/ ((module) => { "use strict"; module.exports = require("assert");; /***/ }), /***/ 3129: /***/ ((module) => { "use strict"; module.exports = require("child_process");; /***/ }), /***/ 7619: /***/ ((module) => { "use strict"; module.exports = require("constants");; /***/ }), /***/ 8614: /***/ ((module) => { "use strict"; module.exports = require("events");; /***/ }), /***/ 5747: /***/ ((module) => { "use strict"; module.exports = require("fs");; /***/ }), /***/ 8605: /***/ ((module) => { "use strict"; module.exports = require("http");; /***/ }), /***/ 7211: /***/ ((module) => { "use strict"; module.exports = require("https");; /***/ }), /***/ 1631: /***/ ((module) => { "use strict"; module.exports = require("net");; /***/ }), /***/ 2087: /***/ ((module) => { "use strict"; module.exports = require("os");; /***/ }), /***/ 5622: /***/ ((module) => { "use strict"; module.exports = require("path");; /***/ }), /***/ 2413: /***/ ((module) => { "use strict"; module.exports = require("stream");; /***/ }), /***/ 4016: /***/ ((module) => { "use strict"; module.exports = require("tls");; /***/ }), /***/ 8835: /***/ ((module) => { "use strict"; module.exports = require("url");; /***/ }), /***/ 1669: /***/ ((module) => { "use strict"; module.exports = require("util");; /***/ }), /***/ 8761: /***/ ((module) => { "use strict"; module.exports = require("zlib");; /***/ }) /******/ }); /************************************************************************/ /******/ // The module cache /******/ var __webpack_module_cache__ = {}; /******/ /******/ // The require function /******/ function __nccwpck_require__(moduleId) { /******/ // Check if module is in cache /******/ if(__webpack_module_cache__[moduleId]) { /******/ return __webpack_module_cache__[moduleId].exports; /******/ } /******/ // Create a new module (and put it into the cache) /******/ var module = __webpack_module_cache__[moduleId] = { /******/ // no module.id needed /******/ // no module.loaded needed /******/ exports: {} /******/ }; /******/ /******/ // Execute the module function /******/ var threw = true; /******/ try { /******/ __webpack_modules__[moduleId].call(module.exports, module, module.exports, __nccwpck_require__); /******/ threw = false; /******/ } finally { /******/ if(threw) delete __webpack_module_cache__[moduleId]; /******/ } /******/ /******/ // Return the exports of the module /******/ return module.exports; /******/ } /******/ /************************************************************************/ /******/ /* webpack/runtime/compat */ /******/ /******/ __nccwpck_require__.ab = __dirname + "/";/************************************************************************/ /******/ // module exports must be returned from runtime so entry inlining is disabled /******/ // startup /******/ // Load entry module and return exports /******/ return __nccwpck_require__(4351); /******/ })() ;