| module.exports = |
| /******/ (function(modules, runtime) { // webpackBootstrap |
| /******/ "use strict"; |
| /******/ // The module cache |
| /******/ var installedModules = {}; |
| /******/ |
| /******/ // The require function |
| /******/ function __webpack_require__(moduleId) { |
| /******/ |
| /******/ // Check if module is in cache |
| /******/ if(installedModules[moduleId]) { |
| /******/ return installedModules[moduleId].exports; |
| /******/ } |
| /******/ // Create a new module (and put it into the cache) |
| /******/ var module = installedModules[moduleId] = { |
| /******/ i: moduleId, |
| /******/ l: false, |
| /******/ exports: {} |
| /******/ }; |
| /******/ |
| /******/ // Execute the module function |
| /******/ var threw = true; |
| /******/ try { |
| /******/ modules[moduleId].call(module.exports, module, module.exports, __webpack_require__); |
| /******/ threw = false; |
| /******/ } finally { |
| /******/ if(threw) delete installedModules[moduleId]; |
| /******/ } |
| /******/ |
| /******/ // Flag the module as loaded |
| /******/ module.l = true; |
| /******/ |
| /******/ // Return the exports of the module |
| /******/ return module.exports; |
| /******/ } |
| /******/ |
| /******/ |
| /******/ __webpack_require__.ab = __dirname + "/"; |
| /******/ |
| /******/ // the startup function |
| /******/ function startup() { |
| /******/ // Load entry module and return exports |
| /******/ return __webpack_require__(198); |
| /******/ }; |
| /******/ |
| /******/ // run startup |
| /******/ return startup(); |
| /******/ }) |
| /************************************************************************/ |
| /******/ ({ |
| |
| /***/ 0: |
| /***/ (function(module, __unusedexports, __webpack_require__) { |
| |
| const { requestLog } = __webpack_require__(916); |
| const { |
| restEndpointMethods |
| } = __webpack_require__(842); |
| |
| const Core = __webpack_require__(529); |
| |
| const CORE_PLUGINS = [ |
| __webpack_require__(190), |
| __webpack_require__(19), // deprecated: remove in v17 |
| requestLog, |
| __webpack_require__(148), |
| restEndpointMethods, |
| __webpack_require__(430), |
| |
| __webpack_require__(850) // deprecated: remove in v17 |
| ]; |
| |
| const OctokitRest = Core.plugin(CORE_PLUGINS); |
| |
| function DeprecatedOctokit(options) { |
| const warn = |
| options && options.log && options.log.warn |
| ? options.log.warn |
| : console.warn; |
| warn( |
| '[@octokit/rest] `const Octokit = require("@octokit/rest")` is deprecated. Use `const { Octokit } = require("@octokit/rest")` instead' |
| ); |
| return new OctokitRest(options); |
| } |
| |
| const Octokit = Object.assign(DeprecatedOctokit, { |
| Octokit: OctokitRest |
| }); |
| |
| Object.keys(OctokitRest).forEach(key => { |
| /* istanbul ignore else */ |
| if (OctokitRest.hasOwnProperty(key)) { |
| Octokit[key] = OctokitRest[key]; |
| } |
| }); |
| |
| module.exports = Octokit; |
| |
| |
| /***/ }), |
| |
| /***/ 1: |
| /***/ (function(__unusedmodule, exports, __webpack_require__) { |
| |
| "use strict"; |
| |
| var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { |
| function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } |
| return new (P || (P = Promise))(function (resolve, reject) { |
| function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } |
| function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } |
| function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } |
| step((generator = generator.apply(thisArg, _arguments || [])).next()); |
| }); |
| }; |
| Object.defineProperty(exports, "__esModule", { value: true }); |
| const childProcess = __webpack_require__(129); |
| const path = __webpack_require__(622); |
| const util_1 = __webpack_require__(669); |
| const ioUtil = __webpack_require__(672); |
| const exec = util_1.promisify(childProcess.exec); |
| /** |
| * Copies a file or folder. |
| * Based off of shelljs - https://github.com/shelljs/shelljs/blob/9237f66c52e5daa40458f94f9565e18e8132f5a6/src/cp.js |
| * |
| * @param source source path |
| * @param dest destination path |
| * @param options optional. See CopyOptions. |
| */ |
| function cp(source, dest, options = {}) { |
| return __awaiter(this, void 0, void 0, function* () { |
| const { force, recursive } = readCopyOptions(options); |
| const destStat = (yield ioUtil.exists(dest)) ? yield ioUtil.stat(dest) : null; |
| // Dest is an existing file, but not forcing |
| if (destStat && destStat.isFile() && !force) { |
| return; |
| } |
| // If dest is an existing directory, should copy inside. |
| const newDest = destStat && destStat.isDirectory() |
| ? path.join(dest, path.basename(source)) |
| : dest; |
| if (!(yield ioUtil.exists(source))) { |
| throw new Error(`no such file or directory: ${source}`); |
| } |
| const sourceStat = yield ioUtil.stat(source); |
| if (sourceStat.isDirectory()) { |
| if (!recursive) { |
| throw new Error(`Failed to copy. ${source} is a directory, but tried to copy without recursive flag.`); |
| } |
| else { |
| yield cpDirRecursive(source, newDest, 0, force); |
| } |
| } |
| else { |
| if (path.relative(source, newDest) === '') { |
| // a file cannot be copied to itself |
| throw new Error(`'${newDest}' and '${source}' are the same file`); |
| } |
| yield copyFile(source, newDest, force); |
| } |
| }); |
| } |
| exports.cp = cp; |
| /** |
| * Moves a path. |
| * |
| * @param source source path |
| * @param dest destination path |
| * @param options optional. See MoveOptions. |
| */ |
| function mv(source, dest, options = {}) { |
| return __awaiter(this, void 0, void 0, function* () { |
| if (yield ioUtil.exists(dest)) { |
| let destExists = true; |
| if (yield ioUtil.isDirectory(dest)) { |
| // If dest is directory copy src into dest |
| dest = path.join(dest, path.basename(source)); |
| destExists = yield ioUtil.exists(dest); |
| } |
| if (destExists) { |
| if (options.force == null || options.force) { |
| yield rmRF(dest); |
| } |
| else { |
| throw new Error('Destination already exists'); |
| } |
| } |
| } |
| yield mkdirP(path.dirname(dest)); |
| yield ioUtil.rename(source, dest); |
| }); |
| } |
| exports.mv = mv; |
| /** |
| * Remove a path recursively with force |
| * |
| * @param inputPath path to remove |
| */ |
| function rmRF(inputPath) { |
| return __awaiter(this, void 0, void 0, function* () { |
| if (ioUtil.IS_WINDOWS) { |
| // Node doesn't provide a delete operation, only an unlink function. This means that if the file is being used by another |
| // program (e.g. antivirus), it won't be deleted. To address this, we shell out the work to rd/del. |
| try { |
| if (yield ioUtil.isDirectory(inputPath, true)) { |
| yield exec(`rd /s /q "${inputPath}"`); |
| } |
| else { |
| yield exec(`del /f /a "${inputPath}"`); |
| } |
| } |
| catch (err) { |
| // if you try to delete a file that doesn't exist, desired result is achieved |
| // other errors are valid |
| if (err.code !== 'ENOENT') |
| throw err; |
| } |
| // Shelling out fails to remove a symlink folder with missing source, this unlink catches that |
| try { |
| yield ioUtil.unlink(inputPath); |
| } |
| catch (err) { |
| // if you try to delete a file that doesn't exist, desired result is achieved |
| // other errors are valid |
| if (err.code !== 'ENOENT') |
| throw err; |
| } |
| } |
| else { |
| let isDir = false; |
| try { |
| isDir = yield ioUtil.isDirectory(inputPath); |
| } |
| catch (err) { |
| // if you try to delete a file that doesn't exist, desired result is achieved |
| // other errors are valid |
| if (err.code !== 'ENOENT') |
| throw err; |
| return; |
| } |
| if (isDir) { |
| yield exec(`rm -rf "${inputPath}"`); |
| } |
| else { |
| yield ioUtil.unlink(inputPath); |
| } |
| } |
| }); |
| } |
| exports.rmRF = rmRF; |
| /** |
| * Make a directory. Creates the full path with folders in between |
| * Will throw if it fails |
| * |
| * @param fsPath path to create |
| * @returns Promise<void> |
| */ |
| function mkdirP(fsPath) { |
| return __awaiter(this, void 0, void 0, function* () { |
| yield ioUtil.mkdirP(fsPath); |
| }); |
| } |
| exports.mkdirP = mkdirP; |
| /** |
| * Returns path of a tool had the tool actually been invoked. Resolves via paths. |
| * If you check and the tool does not exist, it will throw. |
| * |
| * @param tool name of the tool |
| * @param check whether to check if tool exists |
| * @returns Promise<string> path to tool |
| */ |
| function which(tool, check) { |
| return __awaiter(this, void 0, void 0, function* () { |
| if (!tool) { |
| throw new Error("parameter 'tool' is required"); |
| } |
| // recursive when check=true |
| if (check) { |
| const result = yield which(tool, false); |
| if (!result) { |
| if (ioUtil.IS_WINDOWS) { |
| throw new Error(`Unable to locate executable file: ${tool}. Please verify either the file path exists or the file can be found within a directory specified by the PATH environment variable. Also verify the file has a valid extension for an executable file.`); |
| } |
| else { |
| throw new Error(`Unable to locate executable file: ${tool}. Please verify either the file path exists or the file can be found within a directory specified by the PATH environment variable. Also check the file mode to verify the file is executable.`); |
| } |
| } |
| } |
| try { |
| // build the list of extensions to try |
| const extensions = []; |
| if (ioUtil.IS_WINDOWS && process.env.PATHEXT) { |
| for (const extension of process.env.PATHEXT.split(path.delimiter)) { |
| if (extension) { |
| extensions.push(extension); |
| } |
| } |
| } |
| // if it's rooted, return it if exists. otherwise return empty. |
| if (ioUtil.isRooted(tool)) { |
| const filePath = yield ioUtil.tryGetExecutablePath(tool, extensions); |
| if (filePath) { |
| return filePath; |
| } |
| return ''; |
| } |
| // if any path separators, return empty |
| if (tool.includes('/') || (ioUtil.IS_WINDOWS && tool.includes('\\'))) { |
| return ''; |
| } |
| // build the list of directories |
| // |
| // Note, technically "where" checks the current directory on Windows. From a toolkit perspective, |
| // it feels like we should not do this. Checking the current directory seems like more of a use |
| // case of a shell, and the which() function exposed by the toolkit should strive for consistency |
| // across platforms. |
| const directories = []; |
| if (process.env.PATH) { |
| for (const p of process.env.PATH.split(path.delimiter)) { |
| if (p) { |
| directories.push(p); |
| } |
| } |
| } |
| // return the first match |
| for (const directory of directories) { |
| const filePath = yield ioUtil.tryGetExecutablePath(directory + path.sep + tool, extensions); |
| if (filePath) { |
| return filePath; |
| } |
| } |
| return ''; |
| } |
| catch (err) { |
| throw new Error(`which failed with message ${err.message}`); |
| } |
| }); |
| } |
| exports.which = which; |
| function readCopyOptions(options) { |
| const force = options.force == null ? true : options.force; |
| const recursive = Boolean(options.recursive); |
| return { force, recursive }; |
| } |
| function cpDirRecursive(sourceDir, destDir, currentDepth, force) { |
| return __awaiter(this, void 0, void 0, function* () { |
| // Ensure there is not a run away recursive copy |
| if (currentDepth >= 255) |
| return; |
| currentDepth++; |
| yield mkdirP(destDir); |
| const files = yield ioUtil.readdir(sourceDir); |
| for (const fileName of files) { |
| const srcFile = `${sourceDir}/${fileName}`; |
| const destFile = `${destDir}/${fileName}`; |
| const srcFileStat = yield ioUtil.lstat(srcFile); |
| if (srcFileStat.isDirectory()) { |
| // Recurse |
| yield cpDirRecursive(srcFile, destFile, currentDepth, force); |
| } |
| else { |
| yield copyFile(srcFile, destFile, force); |
| } |
| } |
| // Change the mode for the newly created directory |
| yield ioUtil.chmod(destDir, (yield ioUtil.stat(sourceDir)).mode); |
| }); |
| } |
| // Buffered file copy |
| function copyFile(srcFile, destFile, force) { |
| return __awaiter(this, void 0, void 0, function* () { |
| if ((yield ioUtil.lstat(srcFile)).isSymbolicLink()) { |
| // unlink/re-link it |
| try { |
| yield ioUtil.lstat(destFile); |
| yield ioUtil.unlink(destFile); |
| } |
| catch (e) { |
| // Try to override file permission |
| if (e.code === 'EPERM') { |
| yield ioUtil.chmod(destFile, '0666'); |
| yield ioUtil.unlink(destFile); |
| } |
| // other errors = it doesn't exist, no work to do |
| } |
| // Copy over symlink |
| const symlinkFull = yield ioUtil.readlink(srcFile); |
| yield ioUtil.symlink(symlinkFull, destFile, ioUtil.IS_WINDOWS ? 'junction' : null); |
| } |
| else if (!(yield ioUtil.exists(destFile)) || force) { |
| yield ioUtil.copyFile(srcFile, destFile); |
| } |
| }); |
| } |
| //# sourceMappingURL=io.js.map |
| |
| /***/ }), |
| |
| /***/ 2: |
| /***/ (function(module, __unusedexports, __webpack_require__) { |
| |
| "use strict"; |
| |
| const os = __webpack_require__(87); |
| const macosRelease = __webpack_require__(118); |
| const winRelease = __webpack_require__(49); |
| |
| const osName = (platform, release) => { |
| if (!platform && release) { |
| throw new Error('You can\'t specify a `release` without specifying `platform`'); |
| } |
| |
| platform = platform || os.platform(); |
| |
| let id; |
| |
| if (platform === 'darwin') { |
| if (!release && os.platform() === 'darwin') { |
| release = os.release(); |
| } |
| |
| const prefix = release ? (Number(release.split('.')[0]) > 15 ? 'macOS' : 'OS X') : 'macOS'; |
| id = release ? macosRelease(release).name : ''; |
| return prefix + (id ? ' ' + id : ''); |
| } |
| |
| if (platform === 'linux') { |
| if (!release && os.platform() === 'linux') { |
| release = os.release(); |
| } |
| |
| id = release ? release.replace(/^(\d+\.\d+).*/, '$1') : ''; |
| return 'Linux' + (id ? ' ' + id : ''); |
| } |
| |
| if (platform === 'win32') { |
| if (!release && os.platform() === 'win32') { |
| release = os.release(); |
| } |
| |
| id = release ? winRelease(release) : ''; |
| return 'Windows' + (id ? ' ' + id : ''); |
| } |
| |
| return platform; |
| }; |
| |
| module.exports = osName; |
| |
| |
| /***/ }), |
| |
| /***/ 9: |
| /***/ (function(module, __unusedexports, __webpack_require__) { |
| |
| var once = __webpack_require__(969); |
| |
| var noop = function() {}; |
| |
| var isRequest = function(stream) { |
| return stream.setHeader && typeof stream.abort === 'function'; |
| }; |
| |
| var isChildProcess = function(stream) { |
| return stream.stdio && Array.isArray(stream.stdio) && stream.stdio.length === 3 |
| }; |
| |
| var eos = function(stream, opts, callback) { |
| if (typeof opts === 'function') return eos(stream, null, opts); |
| if (!opts) opts = {}; |
| |
| callback = once(callback || noop); |
| |
| var ws = stream._writableState; |
| var rs = stream._readableState; |
| var readable = opts.readable || (opts.readable !== false && stream.readable); |
| var writable = opts.writable || (opts.writable !== false && stream.writable); |
| var cancelled = false; |
| |
| var onlegacyfinish = function() { |
| if (!stream.writable) onfinish(); |
| }; |
| |
| var onfinish = function() { |
| writable = false; |
| if (!readable) callback.call(stream); |
| }; |
| |
| var onend = function() { |
| readable = false; |
| if (!writable) callback.call(stream); |
| }; |
| |
| var onexit = function(exitCode) { |
| callback.call(stream, exitCode ? new Error('exited with error code: ' + exitCode) : null); |
| }; |
| |
| var onerror = function(err) { |
| callback.call(stream, err); |
| }; |
| |
| var onclose = function() { |
| process.nextTick(onclosenexttick); |
| }; |
| |
| var onclosenexttick = function() { |
| if (cancelled) return; |
| if (readable && !(rs && (rs.ended && !rs.destroyed))) return callback.call(stream, new Error('premature close')); |
| if (writable && !(ws && (ws.ended && !ws.destroyed))) return callback.call(stream, new Error('premature close')); |
| }; |
| |
| var onrequest = function() { |
| stream.req.on('finish', onfinish); |
| }; |
| |
| if (isRequest(stream)) { |
| stream.on('complete', onfinish); |
| stream.on('abort', onclose); |
| if (stream.req) onrequest(); |
| else stream.on('request', onrequest); |
| } else if (writable && !ws) { // legacy streams |
| stream.on('end', onlegacyfinish); |
| stream.on('close', onlegacyfinish); |
| } |
| |
| if (isChildProcess(stream)) stream.on('exit', onexit); |
| |
| stream.on('end', onend); |
| stream.on('finish', onfinish); |
| if (opts.error !== false) stream.on('error', onerror); |
| stream.on('close', onclose); |
| |
| return function() { |
| cancelled = true; |
| stream.removeListener('complete', onfinish); |
| stream.removeListener('abort', onclose); |
| stream.removeListener('request', onrequest); |
| if (stream.req) stream.req.removeListener('finish', onfinish); |
| stream.removeListener('end', onlegacyfinish); |
| stream.removeListener('close', onlegacyfinish); |
| stream.removeListener('finish', onfinish); |
| stream.removeListener('exit', onexit); |
| stream.removeListener('end', onend); |
| stream.removeListener('error', onerror); |
| stream.removeListener('close', onclose); |
| }; |
| }; |
| |
| module.exports = eos; |
| |
| |
| /***/ }), |
| |
| /***/ 11: |
| /***/ (function(module) { |
| |
| // Returns a wrapper function that returns a wrapped callback |
| // The wrapper function should do some stuff, and return a |
| // presumably different callback function. |
| // This makes sure that own properties are retained, so that |
| // decorations and such are not lost along the way. |
| module.exports = wrappy |
| function wrappy (fn, cb) { |
| if (fn && cb) return wrappy(fn)(cb) |
| |
| if (typeof fn !== 'function') |
| throw new TypeError('need wrapper function') |
| |
| Object.keys(fn).forEach(function (k) { |
| wrapper[k] = fn[k] |
| }) |
| |
| return wrapper |
| |
| function wrapper() { |
| var args = new Array(arguments.length) |
| for (var i = 0; i < args.length; i++) { |
| args[i] = arguments[i] |
| } |
| var ret = fn.apply(this, args) |
| var cb = args[args.length-1] |
| if (typeof ret === 'function' && ret !== cb) { |
| Object.keys(cb).forEach(function (k) { |
| ret[k] = cb[k] |
| }) |
| } |
| return ret |
| } |
| } |
| |
| |
| /***/ }), |
| |
| /***/ 16: |
| /***/ (function(module) { |
| |
| module.exports = require("tls"); |
| |
| /***/ }), |
| |
| /***/ 18: |
| /***/ (function(module) { |
| |
| module.exports = eval("require")("encoding"); |
| |
| |
| /***/ }), |
| |
| /***/ 19: |
| /***/ (function(module, __unusedexports, __webpack_require__) { |
| |
| module.exports = authenticationPlugin; |
| |
| const { Deprecation } = __webpack_require__(692); |
| const once = __webpack_require__(969); |
| |
| const deprecateAuthenticate = once((log, deprecation) => log.warn(deprecation)); |
| |
| const authenticate = __webpack_require__(674); |
| const beforeRequest = __webpack_require__(471); |
| const requestError = __webpack_require__(349); |
| |
| function authenticationPlugin(octokit, options) { |
| if (options.auth) { |
| octokit.authenticate = () => { |
| deprecateAuthenticate( |
| octokit.log, |
| new Deprecation( |
| '[@octokit/rest] octokit.authenticate() is deprecated and has no effect when "auth" option is set on Octokit constructor' |
| ) |
| ); |
| }; |
| return; |
| } |
| const state = { |
| octokit, |
| auth: false |
| }; |
| octokit.authenticate = authenticate.bind(null, state); |
| octokit.hook.before("request", beforeRequest.bind(null, state)); |
| octokit.hook.error("request", requestError.bind(null, state)); |
| } |
| |
| |
| /***/ }), |
| |
| /***/ 20: |
| /***/ (function(module, __unusedexports, __webpack_require__) { |
| |
| "use strict"; |
| |
| |
| const cp = __webpack_require__(129); |
| const parse = __webpack_require__(568); |
| const enoent = __webpack_require__(881); |
| |
| function spawn(command, args, options) { |
| // Parse the arguments |
| const parsed = parse(command, args, options); |
| |
| // Spawn the child process |
| const spawned = cp.spawn(parsed.command, parsed.args, parsed.options); |
| |
| // Hook into child process "exit" event to emit an error if the command |
| // does not exists, see: https://github.com/IndigoUnited/node-cross-spawn/issues/16 |
| enoent.hookChildProcess(spawned, parsed); |
| |
| return spawned; |
| } |
| |
| function spawnSync(command, args, options) { |
| // Parse the arguments |
| const parsed = parse(command, args, options); |
| |
| // Spawn the child process |
| const result = cp.spawnSync(parsed.command, parsed.args, parsed.options); |
| |
| // Analyze if the command does not exist, see: https://github.com/IndigoUnited/node-cross-spawn/issues/16 |
| result.error = result.error || enoent.verifyENOENTSync(result.status, parsed); |
| |
| return result; |
| } |
| |
| module.exports = spawn; |
| module.exports.spawn = spawn; |
| module.exports.sync = spawnSync; |
| |
| module.exports._parse = parse; |
| module.exports._enoent = enoent; |
| |
| |
| /***/ }), |
| |
| /***/ 23: |
| /***/ (function(module, __unusedexports, __webpack_require__) { |
| |
| "use strict"; |
| // Standard YAML's JSON schema. |
| // http://www.yaml.org/spec/1.2/spec.html#id2803231 |
| // |
| // NOTE: JS-YAML does not support schema-specific tag resolution restrictions. |
| // So, this schema is not such strict as defined in the YAML specification. |
| // It allows numbers in binary notaion, use `Null` and `NULL` as `null`, etc. |
| |
| |
| |
| |
| |
| var Schema = __webpack_require__(43); |
| |
| |
| module.exports = new Schema({ |
| include: [ |
| __webpack_require__(581) |
| ], |
| implicit: [ |
| __webpack_require__(809), |
| __webpack_require__(228), |
| __webpack_require__(44), |
| __webpack_require__(417) |
| ] |
| }); |
| |
| |
| /***/ }), |
| |
| /***/ 39: |
| /***/ (function(module) { |
| |
| "use strict"; |
| |
| module.exports = opts => { |
| opts = opts || {}; |
| |
| const env = opts.env || process.env; |
| const platform = opts.platform || process.platform; |
| |
| if (platform !== 'win32') { |
| return 'PATH'; |
| } |
| |
| return Object.keys(env).find(x => x.toUpperCase() === 'PATH') || 'Path'; |
| }; |
| |
| |
| /***/ }), |
| |
| /***/ 43: |
| /***/ (function(module, __unusedexports, __webpack_require__) { |
| |
| "use strict"; |
| |
| |
| /*eslint-disable max-len*/ |
| |
| var common = __webpack_require__(740); |
| var YAMLException = __webpack_require__(556); |
| var Type = __webpack_require__(945); |
| |
| |
| function compileList(schema, name, result) { |
| var exclude = []; |
| |
| schema.include.forEach(function (includedSchema) { |
| result = compileList(includedSchema, name, result); |
| }); |
| |
| schema[name].forEach(function (currentType) { |
| result.forEach(function (previousType, previousIndex) { |
| if (previousType.tag === currentType.tag && previousType.kind === currentType.kind) { |
| exclude.push(previousIndex); |
| } |
| }); |
| |
| result.push(currentType); |
| }); |
| |
| return result.filter(function (type, index) { |
| return exclude.indexOf(index) === -1; |
| }); |
| } |
| |
| |
| function compileMap(/* lists... */) { |
| var result = { |
| scalar: {}, |
| sequence: {}, |
| mapping: {}, |
| fallback: {} |
| }, index, length; |
| |
| function collectType(type) { |
| result[type.kind][type.tag] = result['fallback'][type.tag] = type; |
| } |
| |
| for (index = 0, length = arguments.length; index < length; index += 1) { |
| arguments[index].forEach(collectType); |
| } |
| return result; |
| } |
| |
| |
| function Schema(definition) { |
| this.include = definition.include || []; |
| this.implicit = definition.implicit || []; |
| this.explicit = definition.explicit || []; |
| |
| this.implicit.forEach(function (type) { |
| if (type.loadKind && type.loadKind !== 'scalar') { |
| throw new YAMLException('There is a non-scalar type in the implicit list of a schema. Implicit resolving of such types is not supported.'); |
| } |
| }); |
| |
| this.compiledImplicit = compileList(this, 'implicit', []); |
| this.compiledExplicit = compileList(this, 'explicit', []); |
| this.compiledTypeMap = compileMap(this.compiledImplicit, this.compiledExplicit); |
| } |
| |
| |
| Schema.DEFAULT = null; |
| |
| |
| Schema.create = function createSchema() { |
| var schemas, types; |
| |
| switch (arguments.length) { |
| case 1: |
| schemas = Schema.DEFAULT; |
| types = arguments[0]; |
| break; |
| |
| case 2: |
| schemas = arguments[0]; |
| types = arguments[1]; |
| break; |
| |
| default: |
| throw new YAMLException('Wrong number of arguments for Schema.create function'); |
| } |
| |
| schemas = common.toArray(schemas); |
| types = common.toArray(types); |
| |
| if (!schemas.every(function (schema) { return schema instanceof Schema; })) { |
| throw new YAMLException('Specified list of super schemas (or a single Schema object) contains a non-Schema object.'); |
| } |
| |
| if (!types.every(function (type) { return type instanceof Type; })) { |
| throw new YAMLException('Specified list of YAML types (or a single Type object) contains a non-Type object.'); |
| } |
| |
| return new Schema({ |
| include: schemas, |
| explicit: types |
| }); |
| }; |
| |
| |
| module.exports = Schema; |
| |
| |
| /***/ }), |
| |
| /***/ 44: |
| /***/ (function(module, __unusedexports, __webpack_require__) { |
| |
| "use strict"; |
| |
| |
| var common = __webpack_require__(740); |
| var Type = __webpack_require__(945); |
| |
| function isHexCode(c) { |
| return ((0x30/* 0 */ <= c) && (c <= 0x39/* 9 */)) || |
| ((0x41/* A */ <= c) && (c <= 0x46/* F */)) || |
| ((0x61/* a */ <= c) && (c <= 0x66/* f */)); |
| } |
| |
| function isOctCode(c) { |
| return ((0x30/* 0 */ <= c) && (c <= 0x37/* 7 */)); |
| } |
| |
| function isDecCode(c) { |
| return ((0x30/* 0 */ <= c) && (c <= 0x39/* 9 */)); |
| } |
| |
| function resolveYamlInteger(data) { |
| if (data === null) return false; |
| |
| var max = data.length, |
| index = 0, |
| hasDigits = false, |
| ch; |
| |
| if (!max) return false; |
| |
| ch = data[index]; |
| |
| // sign |
| if (ch === '-' || ch === '+') { |
| ch = data[++index]; |
| } |
| |
| if (ch === '0') { |
| // 0 |
| if (index + 1 === max) return true; |
| ch = data[++index]; |
| |
| // base 2, base 8, base 16 |
| |
| if (ch === 'b') { |
| // base 2 |
| index++; |
| |
| for (; index < max; index++) { |
| ch = data[index]; |
| if (ch === '_') continue; |
| if (ch !== '0' && ch !== '1') return false; |
| hasDigits = true; |
| } |
| return hasDigits && ch !== '_'; |
| } |
| |
| |
| if (ch === 'x') { |
| // base 16 |
| index++; |
| |
| for (; index < max; index++) { |
| ch = data[index]; |
| if (ch === '_') continue; |
| if (!isHexCode(data.charCodeAt(index))) return false; |
| hasDigits = true; |
| } |
| return hasDigits && ch !== '_'; |
| } |
| |
| // base 8 |
| for (; index < max; index++) { |
| ch = data[index]; |
| if (ch === '_') continue; |
| if (!isOctCode(data.charCodeAt(index))) return false; |
| hasDigits = true; |
| } |
| return hasDigits && ch !== '_'; |
| } |
| |
| // base 10 (except 0) or base 60 |
| |
| // value should not start with `_`; |
| if (ch === '_') return false; |
| |
| for (; index < max; index++) { |
| ch = data[index]; |
| if (ch === '_') continue; |
| if (ch === ':') break; |
| if (!isDecCode(data.charCodeAt(index))) { |
| return false; |
| } |
| hasDigits = true; |
| } |
| |
| // Should have digits and should not end with `_` |
| if (!hasDigits || ch === '_') return false; |
| |
| // if !base60 - done; |
| if (ch !== ':') return true; |
| |
| // base60 almost not used, no needs to optimize |
| return /^(:[0-5]?[0-9])+$/.test(data.slice(index)); |
| } |
| |
| function constructYamlInteger(data) { |
| var value = data, sign = 1, ch, base, digits = []; |
| |
| if (value.indexOf('_') !== -1) { |
| value = value.replace(/_/g, ''); |
| } |
| |
| ch = value[0]; |
| |
| if (ch === '-' || ch === '+') { |
| if (ch === '-') sign = -1; |
| value = value.slice(1); |
| ch = value[0]; |
| } |
| |
| if (value === '0') return 0; |
| |
| if (ch === '0') { |
| if (value[1] === 'b') return sign * parseInt(value.slice(2), 2); |
| if (value[1] === 'x') return sign * parseInt(value, 16); |
| return sign * parseInt(value, 8); |
| } |
| |
| if (value.indexOf(':') !== -1) { |
| value.split(':').forEach(function (v) { |
| digits.unshift(parseInt(v, 10)); |
| }); |
| |
| value = 0; |
| base = 1; |
| |
| digits.forEach(function (d) { |
| value += (d * base); |
| base *= 60; |
| }); |
| |
| return sign * value; |
| |
| } |
| |
| return sign * parseInt(value, 10); |
| } |
| |
| function isInteger(object) { |
| return (Object.prototype.toString.call(object)) === '[object Number]' && |
| (object % 1 === 0 && !common.isNegativeZero(object)); |
| } |
| |
| module.exports = new Type('tag:yaml.org,2002:int', { |
| kind: 'scalar', |
| resolve: resolveYamlInteger, |
| construct: constructYamlInteger, |
| predicate: isInteger, |
| represent: { |
| binary: function (obj) { return obj >= 0 ? '0b' + obj.toString(2) : '-0b' + obj.toString(2).slice(1); }, |
| octal: function (obj) { return obj >= 0 ? '0' + obj.toString(8) : '-0' + obj.toString(8).slice(1); }, |
| decimal: function (obj) { return obj.toString(10); }, |
| /* eslint-disable max-len */ |
| hexadecimal: function (obj) { return obj >= 0 ? '0x' + obj.toString(16).toUpperCase() : '-0x' + obj.toString(16).toUpperCase().slice(1); } |
| }, |
| defaultStyle: 'decimal', |
| styleAliases: { |
| binary: [ 2, 'bin' ], |
| octal: [ 8, 'oct' ], |
| decimal: [ 10, 'dec' ], |
| hexadecimal: [ 16, 'hex' ] |
| } |
| }); |
| |
| |
| /***/ }), |
| |
| /***/ 47: |
| /***/ (function(module, __unusedexports, __webpack_require__) { |
| |
| module.exports = factory; |
| |
| const Octokit = __webpack_require__(402); |
| const registerPlugin = __webpack_require__(855); |
| |
| function factory(plugins) { |
| const Api = Octokit.bind(null, plugins || []); |
| Api.plugin = registerPlugin.bind(null, plugins || []); |
| return Api; |
| } |
| |
| |
| /***/ }), |
| |
| /***/ 48: |
| /***/ (function(module, exports) { |
| |
| exports = module.exports = SemVer |
| |
| var debug |
| /* istanbul ignore next */ |
| if (typeof process === 'object' && |
| process.env && |
| process.env.NODE_DEBUG && |
| /\bsemver\b/i.test(process.env.NODE_DEBUG)) { |
| debug = function () { |
| var args = Array.prototype.slice.call(arguments, 0) |
| args.unshift('SEMVER') |
| console.log.apply(console, args) |
| } |
| } else { |
| debug = function () {} |
| } |
| |
| // Note: this is the semver.org version of the spec that it implements |
| // Not necessarily the package version of this code. |
| exports.SEMVER_SPEC_VERSION = '2.0.0' |
| |
| var MAX_LENGTH = 256 |
| var MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER || |
| /* istanbul ignore next */ 9007199254740991 |
| |
| // Max safe segment length for coercion. |
| var MAX_SAFE_COMPONENT_LENGTH = 16 |
| |
| // The actual regexps go on exports.re |
| var re = exports.re = [] |
| var src = exports.src = [] |
| var R = 0 |
| |
| // The following Regular Expressions can be used for tokenizing, |
| // validating, and parsing SemVer version strings. |
| |
| // ## Numeric Identifier |
| // A single `0`, or a non-zero digit followed by zero or more digits. |
| |
| var NUMERICIDENTIFIER = R++ |
| src[NUMERICIDENTIFIER] = '0|[1-9]\\d*' |
| var NUMERICIDENTIFIERLOOSE = R++ |
| src[NUMERICIDENTIFIERLOOSE] = '[0-9]+' |
| |
| // ## Non-numeric Identifier |
| // Zero or more digits, followed by a letter or hyphen, and then zero or |
| // more letters, digits, or hyphens. |
| |
| var NONNUMERICIDENTIFIER = R++ |
| src[NONNUMERICIDENTIFIER] = '\\d*[a-zA-Z-][a-zA-Z0-9-]*' |
| |
| // ## Main Version |
| // Three dot-separated numeric identifiers. |
| |
| var MAINVERSION = R++ |
| src[MAINVERSION] = '(' + src[NUMERICIDENTIFIER] + ')\\.' + |
| '(' + src[NUMERICIDENTIFIER] + ')\\.' + |
| '(' + src[NUMERICIDENTIFIER] + ')' |
| |
| var MAINVERSIONLOOSE = R++ |
| src[MAINVERSIONLOOSE] = '(' + src[NUMERICIDENTIFIERLOOSE] + ')\\.' + |
| '(' + src[NUMERICIDENTIFIERLOOSE] + ')\\.' + |
| '(' + src[NUMERICIDENTIFIERLOOSE] + ')' |
| |
| // ## Pre-release Version Identifier |
| // A numeric identifier, or a non-numeric identifier. |
| |
| var PRERELEASEIDENTIFIER = R++ |
| src[PRERELEASEIDENTIFIER] = '(?:' + src[NUMERICIDENTIFIER] + |
| '|' + src[NONNUMERICIDENTIFIER] + ')' |
| |
| var PRERELEASEIDENTIFIERLOOSE = R++ |
| src[PRERELEASEIDENTIFIERLOOSE] = '(?:' + src[NUMERICIDENTIFIERLOOSE] + |
| '|' + src[NONNUMERICIDENTIFIER] + ')' |
| |
| // ## Pre-release Version |
| // Hyphen, followed by one or more dot-separated pre-release version |
| // identifiers. |
| |
| var PRERELEASE = R++ |
| src[PRERELEASE] = '(?:-(' + src[PRERELEASEIDENTIFIER] + |
| '(?:\\.' + src[PRERELEASEIDENTIFIER] + ')*))' |
| |
| var PRERELEASELOOSE = R++ |
| src[PRERELEASELOOSE] = '(?:-?(' + src[PRERELEASEIDENTIFIERLOOSE] + |
| '(?:\\.' + src[PRERELEASEIDENTIFIERLOOSE] + ')*))' |
| |
| // ## Build Metadata Identifier |
| // Any combination of digits, letters, or hyphens. |
| |
| var BUILDIDENTIFIER = R++ |
| src[BUILDIDENTIFIER] = '[0-9A-Za-z-]+' |
| |
| // ## Build Metadata |
| // Plus sign, followed by one or more period-separated build metadata |
| // identifiers. |
| |
| var BUILD = R++ |
| src[BUILD] = '(?:\\+(' + src[BUILDIDENTIFIER] + |
| '(?:\\.' + src[BUILDIDENTIFIER] + ')*))' |
| |
| // ## Full Version String |
| // A main version, followed optionally by a pre-release version and |
| // build metadata. |
| |
| // Note that the only major, minor, patch, and pre-release sections of |
| // the version string are capturing groups. The build metadata is not a |
| // capturing group, because it should not ever be used in version |
| // comparison. |
| |
| var FULL = R++ |
| var FULLPLAIN = 'v?' + src[MAINVERSION] + |
| src[PRERELEASE] + '?' + |
| src[BUILD] + '?' |
| |
| src[FULL] = '^' + FULLPLAIN + '$' |
| |
| // like full, but allows v1.2.3 and =1.2.3, which people do sometimes. |
| // also, 1.0.0alpha1 (prerelease without the hyphen) which is pretty |
| // common in the npm registry. |
| var LOOSEPLAIN = '[v=\\s]*' + src[MAINVERSIONLOOSE] + |
| src[PRERELEASELOOSE] + '?' + |
| src[BUILD] + '?' |
| |
| var LOOSE = R++ |
| src[LOOSE] = '^' + LOOSEPLAIN + '$' |
| |
| var GTLT = R++ |
| src[GTLT] = '((?:<|>)?=?)' |
| |
| // Something like "2.*" or "1.2.x". |
| // Note that "x.x" is a valid xRange identifer, meaning "any version" |
| // Only the first item is strictly required. |
| var XRANGEIDENTIFIERLOOSE = R++ |
| src[XRANGEIDENTIFIERLOOSE] = src[NUMERICIDENTIFIERLOOSE] + '|x|X|\\*' |
| var XRANGEIDENTIFIER = R++ |
| src[XRANGEIDENTIFIER] = src[NUMERICIDENTIFIER] + '|x|X|\\*' |
| |
| var XRANGEPLAIN = R++ |
| src[XRANGEPLAIN] = '[v=\\s]*(' + src[XRANGEIDENTIFIER] + ')' + |
| '(?:\\.(' + src[XRANGEIDENTIFIER] + ')' + |
| '(?:\\.(' + src[XRANGEIDENTIFIER] + ')' + |
| '(?:' + src[PRERELEASE] + ')?' + |
| src[BUILD] + '?' + |
| ')?)?' |
| |
| var XRANGEPLAINLOOSE = R++ |
| src[XRANGEPLAINLOOSE] = '[v=\\s]*(' + src[XRANGEIDENTIFIERLOOSE] + ')' + |
| '(?:\\.(' + src[XRANGEIDENTIFIERLOOSE] + ')' + |
| '(?:\\.(' + src[XRANGEIDENTIFIERLOOSE] + ')' + |
| '(?:' + src[PRERELEASELOOSE] + ')?' + |
| src[BUILD] + '?' + |
| ')?)?' |
| |
| var XRANGE = R++ |
| src[XRANGE] = '^' + src[GTLT] + '\\s*' + src[XRANGEPLAIN] + '$' |
| var XRANGELOOSE = R++ |
| src[XRANGELOOSE] = '^' + src[GTLT] + '\\s*' + src[XRANGEPLAINLOOSE] + '$' |
| |
| // Coercion. |
| // Extract anything that could conceivably be a part of a valid semver |
| var COERCE = R++ |
| src[COERCE] = '(?:^|[^\\d])' + |
| '(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '})' + |
| '(?:\\.(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' + |
| '(?:\\.(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' + |
| '(?:$|[^\\d])' |
| |
| // Tilde ranges. |
| // Meaning is "reasonably at or greater than" |
| var LONETILDE = R++ |
| src[LONETILDE] = '(?:~>?)' |
| |
| var TILDETRIM = R++ |
| src[TILDETRIM] = '(\\s*)' + src[LONETILDE] + '\\s+' |
| re[TILDETRIM] = new RegExp(src[TILDETRIM], 'g') |
| var tildeTrimReplace = '$1~' |
| |
| var TILDE = R++ |
| src[TILDE] = '^' + src[LONETILDE] + src[XRANGEPLAIN] + '$' |
| var TILDELOOSE = R++ |
| src[TILDELOOSE] = '^' + src[LONETILDE] + src[XRANGEPLAINLOOSE] + '$' |
| |
| // Caret ranges. |
| // Meaning is "at least and backwards compatible with" |
| var LONECARET = R++ |
| src[LONECARET] = '(?:\\^)' |
| |
| var CARETTRIM = R++ |
| src[CARETTRIM] = '(\\s*)' + src[LONECARET] + '\\s+' |
| re[CARETTRIM] = new RegExp(src[CARETTRIM], 'g') |
| var caretTrimReplace = '$1^' |
| |
| var CARET = R++ |
| src[CARET] = '^' + src[LONECARET] + src[XRANGEPLAIN] + '$' |
| var CARETLOOSE = R++ |
| src[CARETLOOSE] = '^' + src[LONECARET] + src[XRANGEPLAINLOOSE] + '$' |
| |
| // A simple gt/lt/eq thing, or just "" to indicate "any version" |
| var COMPARATORLOOSE = R++ |
| src[COMPARATORLOOSE] = '^' + src[GTLT] + '\\s*(' + LOOSEPLAIN + ')$|^$' |
| var COMPARATOR = R++ |
| src[COMPARATOR] = '^' + src[GTLT] + '\\s*(' + FULLPLAIN + ')$|^$' |
| |
| // An expression to strip any whitespace between the gtlt and the thing |
| // it modifies, so that `> 1.2.3` ==> `>1.2.3` |
| var COMPARATORTRIM = R++ |
| src[COMPARATORTRIM] = '(\\s*)' + src[GTLT] + |
| '\\s*(' + LOOSEPLAIN + '|' + src[XRANGEPLAIN] + ')' |
| |
| // this one has to use the /g flag |
| re[COMPARATORTRIM] = new RegExp(src[COMPARATORTRIM], 'g') |
| var comparatorTrimReplace = '$1$2$3' |
| |
| // Something like `1.2.3 - 1.2.4` |
| // Note that these all use the loose form, because they'll be |
| // checked against either the strict or loose comparator form |
| // later. |
| var HYPHENRANGE = R++ |
| src[HYPHENRANGE] = '^\\s*(' + src[XRANGEPLAIN] + ')' + |
| '\\s+-\\s+' + |
| '(' + src[XRANGEPLAIN] + ')' + |
| '\\s*$' |
| |
| var HYPHENRANGELOOSE = R++ |
| src[HYPHENRANGELOOSE] = '^\\s*(' + src[XRANGEPLAINLOOSE] + ')' + |
| '\\s+-\\s+' + |
| '(' + src[XRANGEPLAINLOOSE] + ')' + |
| '\\s*$' |
| |
| // Star ranges basically just allow anything at all. |
| var STAR = R++ |
| src[STAR] = '(<|>)?=?\\s*\\*' |
| |
| // Compile to actual regexp objects. |
| // All are flag-free, unless they were created above with a flag. |
| for (var i = 0; i < R; i++) { |
| debug(i, src[i]) |
| if (!re[i]) { |
| re[i] = new RegExp(src[i]) |
| } |
| } |
| |
| exports.parse = parse |
| function parse (version, options) { |
| if (!options || typeof options !== 'object') { |
| options = { |
| loose: !!options, |
| includePrerelease: false |
| } |
| } |
| |
| if (version instanceof SemVer) { |
| return version |
| } |
| |
| if (typeof version !== 'string') { |
| return null |
| } |
| |
| if (version.length > MAX_LENGTH) { |
| return null |
| } |
| |
| var r = options.loose ? re[LOOSE] : re[FULL] |
| if (!r.test(version)) { |
| return null |
| } |
| |
| try { |
| return new SemVer(version, options) |
| } catch (er) { |
| return null |
| } |
| } |
| |
| exports.valid = valid |
| function valid (version, options) { |
| var v = parse(version, options) |
| return v ? v.version : null |
| } |
| |
| exports.clean = clean |
| function clean (version, options) { |
| var s = parse(version.trim().replace(/^[=v]+/, ''), options) |
| return s ? s.version : null |
| } |
| |
| exports.SemVer = SemVer |
| |
| function SemVer (version, options) { |
| if (!options || typeof options !== 'object') { |
| options = { |
| loose: !!options, |
| includePrerelease: false |
| } |
| } |
| if (version instanceof SemVer) { |
| if (version.loose === options.loose) { |
| return version |
| } else { |
| version = version.version |
| } |
| } else if (typeof version !== 'string') { |
| throw new TypeError('Invalid Version: ' + version) |
| } |
| |
| if (version.length > MAX_LENGTH) { |
| throw new TypeError('version is longer than ' + MAX_LENGTH + ' characters') |
| } |
| |
| if (!(this instanceof SemVer)) { |
| return new SemVer(version, options) |
| } |
| |
| debug('SemVer', version, options) |
| this.options = options |
| this.loose = !!options.loose |
| |
| var m = version.trim().match(options.loose ? re[LOOSE] : re[FULL]) |
| |
| if (!m) { |
| throw new TypeError('Invalid Version: ' + version) |
| } |
| |
| this.raw = version |
| |
| // these are actually numbers |
| this.major = +m[1] |
| this.minor = +m[2] |
| this.patch = +m[3] |
| |
| if (this.major > MAX_SAFE_INTEGER || this.major < 0) { |
| throw new TypeError('Invalid major version') |
| } |
| |
| if (this.minor > MAX_SAFE_INTEGER || this.minor < 0) { |
| throw new TypeError('Invalid minor version') |
| } |
| |
| if (this.patch > MAX_SAFE_INTEGER || this.patch < 0) { |
| throw new TypeError('Invalid patch version') |
| } |
| |
| // numberify any prerelease numeric ids |
| if (!m[4]) { |
| this.prerelease = [] |
| } else { |
| this.prerelease = m[4].split('.').map(function (id) { |
| if (/^[0-9]+$/.test(id)) { |
| var num = +id |
| if (num >= 0 && num < MAX_SAFE_INTEGER) { |
| return num |
| } |
| } |
| return id |
| }) |
| } |
| |
| this.build = m[5] ? m[5].split('.') : [] |
| this.format() |
| } |
| |
| SemVer.prototype.format = function () { |
| this.version = this.major + '.' + this.minor + '.' + this.patch |
| if (this.prerelease.length) { |
| this.version += '-' + this.prerelease.join('.') |
| } |
| return this.version |
| } |
| |
| SemVer.prototype.toString = function () { |
| return this.version |
| } |
| |
| SemVer.prototype.compare = function (other) { |
| debug('SemVer.compare', this.version, this.options, other) |
| if (!(other instanceof SemVer)) { |
| other = new SemVer(other, this.options) |
| } |
| |
| return this.compareMain(other) || this.comparePre(other) |
| } |
| |
| SemVer.prototype.compareMain = function (other) { |
| if (!(other instanceof SemVer)) { |
| other = new SemVer(other, this.options) |
| } |
| |
| return compareIdentifiers(this.major, other.major) || |
| compareIdentifiers(this.minor, other.minor) || |
| compareIdentifiers(this.patch, other.patch) |
| } |
| |
| SemVer.prototype.comparePre = function (other) { |
| if (!(other instanceof SemVer)) { |
| other = new SemVer(other, this.options) |
| } |
| |
| // NOT having a prerelease is > having one |
| if (this.prerelease.length && !other.prerelease.length) { |
| return -1 |
| } else if (!this.prerelease.length && other.prerelease.length) { |
| return 1 |
| } else if (!this.prerelease.length && !other.prerelease.length) { |
| return 0 |
| } |
| |
| var i = 0 |
| do { |
| var a = this.prerelease[i] |
| var b = other.prerelease[i] |
| debug('prerelease compare', i, a, b) |
| if (a === undefined && b === undefined) { |
| return 0 |
| } else if (b === undefined) { |
| return 1 |
| } else if (a === undefined) { |
| return -1 |
| } else if (a === b) { |
| continue |
| } else { |
| return compareIdentifiers(a, b) |
| } |
| } while (++i) |
| } |
| |
| // preminor will bump the version up to the next minor release, and immediately |
| // down to pre-release. premajor and prepatch work the same way. |
| SemVer.prototype.inc = function (release, identifier) { |
| switch (release) { |
| case 'premajor': |
| this.prerelease.length = 0 |
| this.patch = 0 |
| this.minor = 0 |
| this.major++ |
| this.inc('pre', identifier) |
| break |
| case 'preminor': |
| this.prerelease.length = 0 |
| this.patch = 0 |
| this.minor++ |
| this.inc('pre', identifier) |
| break |
| case 'prepatch': |
| // If this is already a prerelease, it will bump to the next version |
| // drop any prereleases that might already exist, since they are not |
| // relevant at this point. |
| this.prerelease.length = 0 |
| this.inc('patch', identifier) |
| this.inc('pre', identifier) |
| break |
| // If the input is a non-prerelease version, this acts the same as |
| // prepatch. |
| case 'prerelease': |
| if (this.prerelease.length === 0) { |
| this.inc('patch', identifier) |
| } |
| this.inc('pre', identifier) |
| break |
| |
| case 'major': |
| // If this is a pre-major version, bump up to the same major version. |
| // Otherwise increment major. |
| // 1.0.0-5 bumps to 1.0.0 |
| // 1.1.0 bumps to 2.0.0 |
| if (this.minor !== 0 || |
| this.patch !== 0 || |
| this.prerelease.length === 0) { |
| this.major++ |
| } |
| this.minor = 0 |
| this.patch = 0 |
| this.prerelease = [] |
| break |
| case 'minor': |
| // If this is a pre-minor version, bump up to the same minor version. |
| // Otherwise increment minor. |
| // 1.2.0-5 bumps to 1.2.0 |
| // 1.2.1 bumps to 1.3.0 |
| if (this.patch !== 0 || this.prerelease.length === 0) { |
| this.minor++ |
| } |
| this.patch = 0 |
| this.prerelease = [] |
| break |
| case 'patch': |
| // If this is not a pre-release version, it will increment the patch. |
| // If it is a pre-release it will bump up to the same patch version. |
| // 1.2.0-5 patches to 1.2.0 |
| // 1.2.0 patches to 1.2.1 |
| if (this.prerelease.length === 0) { |
| this.patch++ |
| } |
| this.prerelease = [] |
| break |
| // This probably shouldn't be used publicly. |
| // 1.0.0 "pre" would become 1.0.0-0 which is the wrong direction. |
| case 'pre': |
| if (this.prerelease.length === 0) { |
| this.prerelease = [0] |
| } else { |
| var i = this.prerelease.length |
| while (--i >= 0) { |
| if (typeof this.prerelease[i] === 'number') { |
| this.prerelease[i]++ |
| i = -2 |
| } |
| } |
| if (i === -1) { |
| // didn't increment anything |
| this.prerelease.push(0) |
| } |
| } |
| if (identifier) { |
| // 1.2.0-beta.1 bumps to 1.2.0-beta.2, |
| // 1.2.0-beta.fooblz or 1.2.0-beta bumps to 1.2.0-beta.0 |
| if (this.prerelease[0] === identifier) { |
| if (isNaN(this.prerelease[1])) { |
| this.prerelease = [identifier, 0] |
| } |
| } else { |
| this.prerelease = [identifier, 0] |
| } |
| } |
| break |
| |
| default: |
| throw new Error('invalid increment argument: ' + release) |
| } |
| this.format() |
| this.raw = this.version |
| return this |
| } |
| |
| exports.inc = inc |
| function inc (version, release, loose, identifier) { |
| if (typeof (loose) === 'string') { |
| identifier = loose |
| loose = undefined |
| } |
| |
| try { |
| return new SemVer(version, loose).inc(release, identifier).version |
| } catch (er) { |
| return null |
| } |
| } |
| |
| exports.diff = diff |
| function diff (version1, version2) { |
| if (eq(version1, version2)) { |
| return null |
| } else { |
| var v1 = parse(version1) |
| var v2 = parse(version2) |
| var prefix = '' |
| if (v1.prerelease.length || v2.prerelease.length) { |
| prefix = 'pre' |
| var defaultResult = 'prerelease' |
| } |
| for (var key in v1) { |
| if (key === 'major' || key === 'minor' || key === 'patch') { |
| if (v1[key] !== v2[key]) { |
| return prefix + key |
| } |
| } |
| } |
| return defaultResult // may be undefined |
| } |
| } |
| |
| exports.compareIdentifiers = compareIdentifiers |
| |
| var numeric = /^[0-9]+$/ |
| function compareIdentifiers (a, b) { |
| var anum = numeric.test(a) |
| var bnum = numeric.test(b) |
| |
| if (anum && bnum) { |
| a = +a |
| b = +b |
| } |
| |
| return a === b ? 0 |
| : (anum && !bnum) ? -1 |
| : (bnum && !anum) ? 1 |
| : a < b ? -1 |
| : 1 |
| } |
| |
| exports.rcompareIdentifiers = rcompareIdentifiers |
| function rcompareIdentifiers (a, b) { |
| return compareIdentifiers(b, a) |
| } |
| |
| exports.major = major |
| function major (a, loose) { |
| return new SemVer(a, loose).major |
| } |
| |
| exports.minor = minor |
| function minor (a, loose) { |
| return new SemVer(a, loose).minor |
| } |
| |
| exports.patch = patch |
| function patch (a, loose) { |
| return new SemVer(a, loose).patch |
| } |
| |
| exports.compare = compare |
| function compare (a, b, loose) { |
| return new SemVer(a, loose).compare(new SemVer(b, loose)) |
| } |
| |
| exports.compareLoose = compareLoose |
| function compareLoose (a, b) { |
| return compare(a, b, true) |
| } |
| |
| exports.rcompare = rcompare |
| function rcompare (a, b, loose) { |
| return compare(b, a, loose) |
| } |
| |
| exports.sort = sort |
| function sort (list, loose) { |
| return list.sort(function (a, b) { |
| return exports.compare(a, b, loose) |
| }) |
| } |
| |
| exports.rsort = rsort |
| function rsort (list, loose) { |
| return list.sort(function (a, b) { |
| return exports.rcompare(a, b, loose) |
| }) |
| } |
| |
| exports.gt = gt |
| function gt (a, b, loose) { |
| return compare(a, b, loose) > 0 |
| } |
| |
| exports.lt = lt |
| function lt (a, b, loose) { |
| return compare(a, b, loose) < 0 |
| } |
| |
| exports.eq = eq |
| function eq (a, b, loose) { |
| return compare(a, b, loose) === 0 |
| } |
| |
| exports.neq = neq |
| function neq (a, b, loose) { |
| return compare(a, b, loose) !== 0 |
| } |
| |
| exports.gte = gte |
| function gte (a, b, loose) { |
| return compare(a, b, loose) >= 0 |
| } |
| |
| exports.lte = lte |
| function lte (a, b, loose) { |
| return compare(a, b, loose) <= 0 |
| } |
| |
| exports.cmp = cmp |
| function cmp (a, op, b, loose) { |
| switch (op) { |
| case '===': |
| if (typeof a === 'object') |
| a = a.version |
| if (typeof b === 'object') |
| b = b.version |
| return a === b |
| |
| case '!==': |
| if (typeof a === 'object') |
| a = a.version |
| if (typeof b === 'object') |
| b = b.version |
| return a !== b |
| |
| case '': |
| case '=': |
| case '==': |
| return eq(a, b, loose) |
| |
| case '!=': |
| return neq(a, b, loose) |
| |
| case '>': |
| return gt(a, b, loose) |
| |
| case '>=': |
| return gte(a, b, loose) |
| |
| case '<': |
| return lt(a, b, loose) |
| |
| case '<=': |
| return lte(a, b, loose) |
| |
| default: |
| throw new TypeError('Invalid operator: ' + op) |
| } |
| } |
| |
| exports.Comparator = Comparator |
| function Comparator (comp, options) { |
| if (!options || typeof options !== 'object') { |
| options = { |
| loose: !!options, |
| includePrerelease: false |
| } |
| } |
| |
| if (comp instanceof Comparator) { |
| if (comp.loose === !!options.loose) { |
| return comp |
| } else { |
| comp = comp.value |
| } |
| } |
| |
| if (!(this instanceof Comparator)) { |
| return new Comparator(comp, options) |
| } |
| |
| debug('comparator', comp, options) |
| this.options = options |
| this.loose = !!options.loose |
| this.parse(comp) |
| |
| if (this.semver === ANY) { |
| this.value = '' |
| } else { |
| this.value = this.operator + this.semver.version |
| } |
| |
| debug('comp', this) |
| } |
| |
| var ANY = {} |
| Comparator.prototype.parse = function (comp) { |
| var r = this.options.loose ? re[COMPARATORLOOSE] : re[COMPARATOR] |
| var m = comp.match(r) |
| |
| if (!m) { |
| throw new TypeError('Invalid comparator: ' + comp) |
| } |
| |
| this.operator = m[1] |
| if (this.operator === '=') { |
| this.operator = '' |
| } |
| |
| // if it literally is just '>' or '' then allow anything. |
| if (!m[2]) { |
| this.semver = ANY |
| } else { |
| this.semver = new SemVer(m[2], this.options.loose) |
| } |
| } |
| |
| Comparator.prototype.toString = function () { |
| return this.value |
| } |
| |
| Comparator.prototype.test = function (version) { |
| debug('Comparator.test', version, this.options.loose) |
| |
| if (this.semver === ANY) { |
| return true |
| } |
| |
| if (typeof version === 'string') { |
| version = new SemVer(version, this.options) |
| } |
| |
| return cmp(version, this.operator, this.semver, this.options) |
| } |
| |
| Comparator.prototype.intersects = function (comp, options) { |
| if (!(comp instanceof Comparator)) { |
| throw new TypeError('a Comparator is required') |
| } |
| |
| if (!options || typeof options !== 'object') { |
| options = { |
| loose: !!options, |
| includePrerelease: false |
| } |
| } |
| |
| var rangeTmp |
| |
| if (this.operator === '') { |
| rangeTmp = new Range(comp.value, options) |
| return satisfies(this.value, rangeTmp, options) |
| } else if (comp.operator === '') { |
| rangeTmp = new Range(this.value, options) |
| return satisfies(comp.semver, rangeTmp, options) |
| } |
| |
| var sameDirectionIncreasing = |
| (this.operator === '>=' || this.operator === '>') && |
| (comp.operator === '>=' || comp.operator === '>') |
| var sameDirectionDecreasing = |
| (this.operator === '<=' || this.operator === '<') && |
| (comp.operator === '<=' || comp.operator === '<') |
| var sameSemVer = this.semver.version === comp.semver.version |
| var differentDirectionsInclusive = |
| (this.operator === '>=' || this.operator === '<=') && |
| (comp.operator === '>=' || comp.operator === '<=') |
| var oppositeDirectionsLessThan = |
| cmp(this.semver, '<', comp.semver, options) && |
| ((this.operator === '>=' || this.operator === '>') && |
| (comp.operator === '<=' || comp.operator === '<')) |
| var oppositeDirectionsGreaterThan = |
| cmp(this.semver, '>', comp.semver, options) && |
| ((this.operator === '<=' || this.operator === '<') && |
| (comp.operator === '>=' || comp.operator === '>')) |
| |
| return sameDirectionIncreasing || sameDirectionDecreasing || |
| (sameSemVer && differentDirectionsInclusive) || |
| oppositeDirectionsLessThan || oppositeDirectionsGreaterThan |
| } |
| |
| exports.Range = Range |
| function Range (range, options) { |
| if (!options || typeof options !== 'object') { |
| options = { |
| loose: !!options, |
| includePrerelease: false |
| } |
| } |
| |
| if (range instanceof Range) { |
| if (range.loose === !!options.loose && |
| range.includePrerelease === !!options.includePrerelease) { |
| return range |
| } else { |
| return new Range(range.raw, options) |
| } |
| } |
| |
| if (range instanceof Comparator) { |
| return new Range(range.value, options) |
| } |
| |
| if (!(this instanceof Range)) { |
| return new Range(range, options) |
| } |
| |
| this.options = options |
| this.loose = !!options.loose |
| this.includePrerelease = !!options.includePrerelease |
| |
| // First, split based on boolean or || |
| this.raw = range |
| this.set = range.split(/\s*\|\|\s*/).map(function (range) { |
| return this.parseRange(range.trim()) |
| }, this).filter(function (c) { |
| // throw out any that are not relevant for whatever reason |
| return c.length |
| }) |
| |
| if (!this.set.length) { |
| throw new TypeError('Invalid SemVer Range: ' + range) |
| } |
| |
| this.format() |
| } |
| |
| Range.prototype.format = function () { |
| this.range = this.set.map(function (comps) { |
| return comps.join(' ').trim() |
| }).join('||').trim() |
| return this.range |
| } |
| |
| Range.prototype.toString = function () { |
| return this.range |
| } |
| |
| Range.prototype.parseRange = function (range) { |
| var loose = this.options.loose |
| range = range.trim() |
| // `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4` |
| var hr = loose ? re[HYPHENRANGELOOSE] : re[HYPHENRANGE] |
| range = range.replace(hr, hyphenReplace) |
| debug('hyphen replace', range) |
| // `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5` |
| range = range.replace(re[COMPARATORTRIM], comparatorTrimReplace) |
| debug('comparator trim', range, re[COMPARATORTRIM]) |
| |
| // `~ 1.2.3` => `~1.2.3` |
| range = range.replace(re[TILDETRIM], tildeTrimReplace) |
| |
| // `^ 1.2.3` => `^1.2.3` |
| range = range.replace(re[CARETTRIM], caretTrimReplace) |
| |
| // normalize spaces |
| range = range.split(/\s+/).join(' ') |
| |
| // At this point, the range is completely trimmed and |
| // ready to be split into comparators. |
| |
| var compRe = loose ? re[COMPARATORLOOSE] : re[COMPARATOR] |
| var set = range.split(' ').map(function (comp) { |
| return parseComparator(comp, this.options) |
| }, this).join(' ').split(/\s+/) |
| if (this.options.loose) { |
| // in loose mode, throw out any that are not valid comparators |
| set = set.filter(function (comp) { |
| return !!comp.match(compRe) |
| }) |
| } |
| set = set.map(function (comp) { |
| return new Comparator(comp, this.options) |
| }, this) |
| |
| return set |
| } |
| |
| Range.prototype.intersects = function (range, options) { |
| if (!(range instanceof Range)) { |
| throw new TypeError('a Range is required') |
| } |
| |
| return this.set.some(function (thisComparators) { |
| return thisComparators.every(function (thisComparator) { |
| return range.set.some(function (rangeComparators) { |
| return rangeComparators.every(function (rangeComparator) { |
| return thisComparator.intersects(rangeComparator, options) |
| }) |
| }) |
| }) |
| }) |
| } |
| |
| // Mostly just for testing and legacy API reasons |
| exports.toComparators = toComparators |
| function toComparators (range, options) { |
| return new Range(range, options).set.map(function (comp) { |
| return comp.map(function (c) { |
| return c.value |
| }).join(' ').trim().split(' ') |
| }) |
| } |
| |
| // comprised of xranges, tildes, stars, and gtlt's at this point. |
| // already replaced the hyphen ranges |
| // turn into a set of JUST comparators. |
| function parseComparator (comp, options) { |
| debug('comp', comp, options) |
| comp = replaceCarets(comp, options) |
| debug('caret', comp) |
| comp = replaceTildes(comp, options) |
| debug('tildes', comp) |
| comp = replaceXRanges(comp, options) |
| debug('xrange', comp) |
| comp = replaceStars(comp, options) |
| debug('stars', comp) |
| return comp |
| } |
| |
| function isX (id) { |
| return !id || id.toLowerCase() === 'x' || id === '*' |
| } |
| |
| // ~, ~> --> * (any, kinda silly) |
| // ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0 <3.0.0 |
| // ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0 <2.1.0 |
| // ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0 <1.3.0 |
| // ~1.2.3, ~>1.2.3 --> >=1.2.3 <1.3.0 |
| // ~1.2.0, ~>1.2.0 --> >=1.2.0 <1.3.0 |
| function replaceTildes (comp, options) { |
| return comp.trim().split(/\s+/).map(function (comp) { |
| return replaceTilde(comp, options) |
| }).join(' ') |
| } |
| |
| function replaceTilde (comp, options) { |
| var r = options.loose ? re[TILDELOOSE] : re[TILDE] |
| return comp.replace(r, function (_, M, m, p, pr) { |
| debug('tilde', comp, _, M, m, p, pr) |
| var ret |
| |
| if (isX(M)) { |
| ret = '' |
| } else if (isX(m)) { |
| ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0' |
| } else if (isX(p)) { |
| // ~1.2 == >=1.2.0 <1.3.0 |
| ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0' |
| } else if (pr) { |
| debug('replaceTilde pr', pr) |
| ret = '>=' + M + '.' + m + '.' + p + '-' + pr + |
| ' <' + M + '.' + (+m + 1) + '.0' |
| } else { |
| // ~1.2.3 == >=1.2.3 <1.3.0 |
| ret = '>=' + M + '.' + m + '.' + p + |
| ' <' + M + '.' + (+m + 1) + '.0' |
| } |
| |
| debug('tilde return', ret) |
| return ret |
| }) |
| } |
| |
| // ^ --> * (any, kinda silly) |
| // ^2, ^2.x, ^2.x.x --> >=2.0.0 <3.0.0 |
| // ^2.0, ^2.0.x --> >=2.0.0 <3.0.0 |
| // ^1.2, ^1.2.x --> >=1.2.0 <2.0.0 |
| // ^1.2.3 --> >=1.2.3 <2.0.0 |
| // ^1.2.0 --> >=1.2.0 <2.0.0 |
| function replaceCarets (comp, options) { |
| return comp.trim().split(/\s+/).map(function (comp) { |
| return replaceCaret(comp, options) |
| }).join(' ') |
| } |
| |
| function replaceCaret (comp, options) { |
| debug('caret', comp, options) |
| var r = options.loose ? re[CARETLOOSE] : re[CARET] |
| return comp.replace(r, function (_, M, m, p, pr) { |
| debug('caret', comp, _, M, m, p, pr) |
| var ret |
| |
| if (isX(M)) { |
| ret = '' |
| } else if (isX(m)) { |
| ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0' |
| } else if (isX(p)) { |
| if (M === '0') { |
| ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0' |
| } else { |
| ret = '>=' + M + '.' + m + '.0 <' + (+M + 1) + '.0.0' |
| } |
| } else if (pr) { |
| debug('replaceCaret pr', pr) |
| if (M === '0') { |
| if (m === '0') { |
| ret = '>=' + M + '.' + m + '.' + p + '-' + pr + |
| ' <' + M + '.' + m + '.' + (+p + 1) |
| } else { |
| ret = '>=' + M + '.' + m + '.' + p + '-' + pr + |
| ' <' + M + '.' + (+m + 1) + '.0' |
| } |
| } else { |
| ret = '>=' + M + '.' + m + '.' + p + '-' + pr + |
| ' <' + (+M + 1) + '.0.0' |
| } |
| } else { |
| debug('no pr') |
| if (M === '0') { |
| if (m === '0') { |
| ret = '>=' + M + '.' + m + '.' + p + |
| ' <' + M + '.' + m + '.' + (+p + 1) |
| } else { |
| ret = '>=' + M + '.' + m + '.' + p + |
| ' <' + M + '.' + (+m + 1) + '.0' |
| } |
| } else { |
| ret = '>=' + M + '.' + m + '.' + p + |
| ' <' + (+M + 1) + '.0.0' |
| } |
| } |
| |
| debug('caret return', ret) |
| return ret |
| }) |
| } |
| |
| function replaceXRanges (comp, options) { |
| debug('replaceXRanges', comp, options) |
| return comp.split(/\s+/).map(function (comp) { |
| return replaceXRange(comp, options) |
| }).join(' ') |
| } |
| |
| function replaceXRange (comp, options) { |
| comp = comp.trim() |
| var r = options.loose ? re[XRANGELOOSE] : re[XRANGE] |
| return comp.replace(r, function (ret, gtlt, M, m, p, pr) { |
| debug('xRange', comp, ret, gtlt, M, m, p, pr) |
| var xM = isX(M) |
| var xm = xM || isX(m) |
| var xp = xm || isX(p) |
| var anyX = xp |
| |
| if (gtlt === '=' && anyX) { |
| gtlt = '' |
| } |
| |
| if (xM) { |
| if (gtlt === '>' || gtlt === '<') { |
| // nothing is allowed |
| ret = '<0.0.0' |
| } else { |
| // nothing is forbidden |
| ret = '*' |
| } |
| } else if (gtlt && anyX) { |
| // we know patch is an x, because we have any x at all. |
| // replace X with 0 |
| if (xm) { |
| m = 0 |
| } |
| p = 0 |
| |
| if (gtlt === '>') { |
| // >1 => >=2.0.0 |
| // >1.2 => >=1.3.0 |
| // >1.2.3 => >= 1.2.4 |
| gtlt = '>=' |
| if (xm) { |
| M = +M + 1 |
| m = 0 |
| p = 0 |
| } else { |
| m = +m + 1 |
| p = 0 |
| } |
| } else if (gtlt === '<=') { |
| // <=0.7.x is actually <0.8.0, since any 0.7.x should |
| // pass. Similarly, <=7.x is actually <8.0.0, etc. |
| gtlt = '<' |
| if (xm) { |
| M = +M + 1 |
| } else { |
| m = +m + 1 |
| } |
| } |
| |
| ret = gtlt + M + '.' + m + '.' + p |
| } else if (xm) { |
| ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0' |
| } else if (xp) { |
| ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0' |
| } |
| |
| debug('xRange return', ret) |
| |
| return ret |
| }) |
| } |
| |
| // Because * is AND-ed with everything else in the comparator, |
| // and '' means "any version", just remove the *s entirely. |
| function replaceStars (comp, options) { |
| debug('replaceStars', comp, options) |
| // Looseness is ignored here. star is always as loose as it gets! |
| return comp.trim().replace(re[STAR], '') |
| } |
| |
| // This function is passed to string.replace(re[HYPHENRANGE]) |
| // M, m, patch, prerelease, build |
| // 1.2 - 3.4.5 => >=1.2.0 <=3.4.5 |
| // 1.2.3 - 3.4 => >=1.2.0 <3.5.0 Any 3.4.x will do |
| // 1.2 - 3.4 => >=1.2.0 <3.5.0 |
| function hyphenReplace ($0, |
| from, fM, fm, fp, fpr, fb, |
| to, tM, tm, tp, tpr, tb) { |
| if (isX(fM)) { |
| from = '' |
| } else if (isX(fm)) { |
| from = '>=' + fM + '.0.0' |
| } else if (isX(fp)) { |
| from = '>=' + fM + '.' + fm + '.0' |
| } else { |
| from = '>=' + from |
| } |
| |
| if (isX(tM)) { |
| to = '' |
| } else if (isX(tm)) { |
| to = '<' + (+tM + 1) + '.0.0' |
| } else if (isX(tp)) { |
| to = '<' + tM + '.' + (+tm + 1) + '.0' |
| } else if (tpr) { |
| to = '<=' + tM + '.' + tm + '.' + tp + '-' + tpr |
| } else { |
| to = '<=' + to |
| } |
| |
| return (from + ' ' + to).trim() |
| } |
| |
| // if ANY of the sets match ALL of its comparators, then pass |
| Range.prototype.test = function (version) { |
| if (!version) { |
| return false |
| } |
| |
| if (typeof version === 'string') { |
| version = new SemVer(version, this.options) |
| } |
| |
| for (var i = 0; i < this.set.length; i++) { |
| if (testSet(this.set[i], version, this.options)) { |
| return true |
| } |
| } |
| return false |
| } |
| |
| function testSet (set, version, options) { |
| for (var i = 0; i < set.length; i++) { |
| if (!set[i].test(version)) { |
| return false |
| } |
| } |
| |
| if (version.prerelease.length && !options.includePrerelease) { |
| // Find the set of versions that are allowed to have prereleases |
| // For example, ^1.2.3-pr.1 desugars to >=1.2.3-pr.1 <2.0.0 |
| // That should allow `1.2.3-pr.2` to pass. |
| // However, `1.2.4-alpha.notready` should NOT be allowed, |
| // even though it's within the range set by the comparators. |
| for (i = 0; i < set.length; i++) { |
| debug(set[i].semver) |
| if (set[i].semver === ANY) { |
| continue |
| } |
| |
| if (set[i].semver.prerelease.length > 0) { |
| var allowed = set[i].semver |
| if (allowed.major === version.major && |
| allowed.minor === version.minor && |
| allowed.patch === version.patch) { |
| return true |
| } |
| } |
| } |
| |
| // Version has a -pre, but it's not one of the ones we like. |
| return false |
| } |
| |
| return true |
| } |
| |
| exports.satisfies = satisfies |
| function satisfies (version, range, options) { |
| try { |
| range = new Range(range, options) |
| } catch (er) { |
| return false |
| } |
| return range.test(version) |
| } |
| |
| exports.maxSatisfying = maxSatisfying |
| function maxSatisfying (versions, range, options) { |
| var max = null |
| var maxSV = null |
| try { |
| var rangeObj = new Range(range, options) |
| } catch (er) { |
| return null |
| } |
| versions.forEach(function (v) { |
| if (rangeObj.test(v)) { |
| // satisfies(v, range, options) |
| if (!max || maxSV.compare(v) === -1) { |
| // compare(max, v, true) |
| max = v |
| maxSV = new SemVer(max, options) |
| } |
| } |
| }) |
| return max |
| } |
| |
| exports.minSatisfying = minSatisfying |
| function minSatisfying (versions, range, options) { |
| var min = null |
| var minSV = null |
| try { |
| var rangeObj = new Range(range, options) |
| } catch (er) { |
| return null |
| } |
| versions.forEach(function (v) { |
| if (rangeObj.test(v)) { |
| // satisfies(v, range, options) |
| if (!min || minSV.compare(v) === 1) { |
| // compare(min, v, true) |
| min = v |
| minSV = new SemVer(min, options) |
| } |
| } |
| }) |
| return min |
| } |
| |
| exports.minVersion = minVersion |
| function minVersion (range, loose) { |
| range = new Range(range, loose) |
| |
| var minver = new SemVer('0.0.0') |
| if (range.test(minver)) { |
| return minver |
| } |
| |
| minver = new SemVer('0.0.0-0') |
| if (range.test(minver)) { |
| return minver |
| } |
| |
| minver = null |
| for (var i = 0; i < range.set.length; ++i) { |
| var comparators = range.set[i] |
| |
| comparators.forEach(function (comparator) { |
| // Clone to avoid manipulating the comparator's semver object. |
| var compver = new SemVer(comparator.semver.version) |
| switch (comparator.operator) { |
| case '>': |
| if (compver.prerelease.length === 0) { |
| compver.patch++ |
| } else { |
| compver.prerelease.push(0) |
| } |
| compver.raw = compver.format() |
| /* fallthrough */ |
| case '': |
| case '>=': |
| if (!minver || gt(minver, compver)) { |
| minver = compver |
| } |
| break |
| case '<': |
| case '<=': |
| /* Ignore maximum versions */ |
| break |
| /* istanbul ignore next */ |
| default: |
| throw new Error('Unexpected operation: ' + comparator.operator) |
| } |
| }) |
| } |
| |
| if (minver && range.test(minver)) { |
| return minver |
| } |
| |
| return null |
| } |
| |
| exports.validRange = validRange |
| function validRange (range, options) { |
| try { |
| // Return '*' instead of '' so that truthiness works. |
| // This will throw if it's invalid anyway |
| return new Range(range, options).range || '*' |
| } catch (er) { |
| return null |
| } |
| } |
| |
| // Determine if version is less than all the versions possible in the range |
| exports.ltr = ltr |
| function ltr (version, range, options) { |
| return outside(version, range, '<', options) |
| } |
| |
| // Determine if version is greater than all the versions possible in the range. |
| exports.gtr = gtr |
| function gtr (version, range, options) { |
| return outside(version, range, '>', options) |
| } |
| |
| exports.outside = outside |
| function outside (version, range, hilo, options) { |
| version = new SemVer(version, options) |
| range = new Range(range, options) |
| |
| var gtfn, ltefn, ltfn, comp, ecomp |
| switch (hilo) { |
| case '>': |
| gtfn = gt |
| ltefn = lte |
| ltfn = lt |
| comp = '>' |
| ecomp = '>=' |
| break |
| case '<': |
| gtfn = lt |
| ltefn = gte |
| ltfn = gt |
| comp = '<' |
| ecomp = '<=' |
| break |
| default: |
| throw new TypeError('Must provide a hilo val of "<" or ">"') |
| } |
| |
| // If it satisifes the range it is not outside |
| if (satisfies(version, range, options)) { |
| return false |
| } |
| |
| // From now on, variable terms are as if we're in "gtr" mode. |
| // but note that everything is flipped for the "ltr" function. |
| |
| for (var i = 0; i < range.set.length; ++i) { |
| var comparators = range.set[i] |
| |
| var high = null |
| var low = null |
| |
| comparators.forEach(function (comparator) { |
| if (comparator.semver === ANY) { |
| comparator = new Comparator('>=0.0.0') |
| } |
| high = high || comparator |
| low = low || comparator |
| if (gtfn(comparator.semver, high.semver, options)) { |
| high = comparator |
| } else if (ltfn(comparator.semver, low.semver, options)) { |
| low = comparator |
| } |
| }) |
| |
| // If the edge version comparator has a operator then our version |
| // isn't outside it |
| if (high.operator === comp || high.operator === ecomp) { |
| return false |
| } |
| |
| // If the lowest version comparator has an operator and our version |
| // is less than it then it isn't higher than the range |
| if ((!low.operator || low.operator === comp) && |
| ltefn(version, low.semver)) { |
| return false |
| } else if (low.operator === ecomp && ltfn(version, low.semver)) { |
| return false |
| } |
| } |
| return true |
| } |
| |
| exports.prerelease = prerelease |
| function prerelease (version, options) { |
| var parsed = parse(version, options) |
| return (parsed && parsed.prerelease.length) ? parsed.prerelease : null |
| } |
| |
| exports.intersects = intersects |
| function intersects (r1, r2, options) { |
| r1 = new Range(r1, options) |
| r2 = new Range(r2, options) |
| return r1.intersects(r2) |
| } |
| |
| exports.coerce = coerce |
| function coerce (version) { |
| if (version instanceof SemVer) { |
| return version |
| } |
| |
| if (typeof version !== 'string') { |
| return null |
| } |
| |
| var match = version.match(re[COERCE]) |
| |
| if (match == null) { |
| return null |
| } |
| |
| return parse(match[1] + |
| '.' + (match[2] || '0') + |
| '.' + (match[3] || '0')) |
| } |
| |
| |
| /***/ }), |
| |
| /***/ 49: |
| /***/ (function(module, __unusedexports, __webpack_require__) { |
| |
| "use strict"; |
| |
| const os = __webpack_require__(87); |
| const execa = __webpack_require__(955); |
| |
| // Reference: https://www.gaijin.at/en/lstwinver.php |
| const names = new Map([ |
| ['10.0', '10'], |
| ['6.3', '8.1'], |
| ['6.2', '8'], |
| ['6.1', '7'], |
| ['6.0', 'Vista'], |
| ['5.2', 'Server 2003'], |
| ['5.1', 'XP'], |
| ['5.0', '2000'], |
| ['4.9', 'ME'], |
| ['4.1', '98'], |
| ['4.0', '95'] |
| ]); |
| |
| const windowsRelease = release => { |
| const version = /\d+\.\d/.exec(release || os.release()); |
| |
| if (release && !version) { |
| throw new Error('`release` argument doesn\'t match `n.n`'); |
| } |
| |
| const ver = (version || [])[0]; |
| |
| // Server 2008, 2012, 2016, and 2019 versions are ambiguous with desktop versions and must be detected at runtime. |
| // If `release` is omitted or we're on a Windows system, and the version number is an ambiguous version |
| // then use `wmic` to get the OS caption: https://msdn.microsoft.com/en-us/library/aa394531(v=vs.85).aspx |
| // If `wmic` is obsoloete (later versions of Windows 10), use PowerShell instead. |
| // If the resulting caption contains the year 2008, 2012, 2016 or 2019, it is a server version, so return a server OS name. |
| if ((!release || release === os.release()) && ['6.1', '6.2', '6.3', '10.0'].includes(ver)) { |
| let stdout; |
| try { |
| stdout = execa.sync('powershell', ['(Get-CimInstance -ClassName Win32_OperatingSystem).caption']).stdout || ''; |
| } catch (_) { |
| stdout = execa.sync('wmic', ['os', 'get', 'Caption']).stdout || ''; |
| } |
| |
| const year = (stdout.match(/2008|2012|2016|2019/) || [])[0]; |
| |
| if (year) { |
| return `Server ${year}`; |
| } |
| } |
| |
| return names.get(ver); |
| }; |
| |
| module.exports = windowsRelease; |
| |
| |
| /***/ }), |
| |
| /***/ 82: |
| /***/ (function(module, __unusedexports, __webpack_require__) { |
| |
| "use strict"; |
| |
| |
| var Type = __webpack_require__(945); |
| |
| var YAML_DATE_REGEXP = new RegExp( |
| '^([0-9][0-9][0-9][0-9])' + // [1] year |
| '-([0-9][0-9])' + // [2] month |
| '-([0-9][0-9])$'); // [3] day |
| |
| var YAML_TIMESTAMP_REGEXP = new RegExp( |
| '^([0-9][0-9][0-9][0-9])' + // [1] year |
| '-([0-9][0-9]?)' + // [2] month |
| '-([0-9][0-9]?)' + // [3] day |
| '(?:[Tt]|[ \\t]+)' + // ... |
| '([0-9][0-9]?)' + // [4] hour |
| ':([0-9][0-9])' + // [5] minute |
| ':([0-9][0-9])' + // [6] second |
| '(?:\\.([0-9]*))?' + // [7] fraction |
| '(?:[ \\t]*(Z|([-+])([0-9][0-9]?)' + // [8] tz [9] tz_sign [10] tz_hour |
| '(?::([0-9][0-9]))?))?$'); // [11] tz_minute |
| |
| function resolveYamlTimestamp(data) { |
| if (data === null) return false; |
| if (YAML_DATE_REGEXP.exec(data) !== null) return true; |
| if (YAML_TIMESTAMP_REGEXP.exec(data) !== null) return true; |
| return false; |
| } |
| |
| function constructYamlTimestamp(data) { |
| var match, year, month, day, hour, minute, second, fraction = 0, |
| delta = null, tz_hour, tz_minute, date; |
| |
| match = YAML_DATE_REGEXP.exec(data); |
| if (match === null) match = YAML_TIMESTAMP_REGEXP.exec(data); |
| |
| if (match === null) throw new Error('Date resolve error'); |
| |
| // match: [1] year [2] month [3] day |
| |
| year = +(match[1]); |
| month = +(match[2]) - 1; // JS month starts with 0 |
| day = +(match[3]); |
| |
| if (!match[4]) { // no hour |
| return new Date(Date.UTC(year, month, day)); |
| } |
| |
| // match: [4] hour [5] minute [6] second [7] fraction |
| |
| hour = +(match[4]); |
| minute = +(match[5]); |
| second = +(match[6]); |
| |
| if (match[7]) { |
| fraction = match[7].slice(0, 3); |
| while (fraction.length < 3) { // milli-seconds |
| fraction += '0'; |
| } |
| fraction = +fraction; |
| } |
| |
| // match: [8] tz [9] tz_sign [10] tz_hour [11] tz_minute |
| |
| if (match[9]) { |
| tz_hour = +(match[10]); |
| tz_minute = +(match[11] || 0); |
| delta = (tz_hour * 60 + tz_minute) * 60000; // delta in mili-seconds |
| if (match[9] === '-') delta = -delta; |
| } |
| |
| date = new Date(Date.UTC(year, month, day, hour, minute, second, fraction)); |
| |
| if (delta) date.setTime(date.getTime() - delta); |
| |
| return date; |
| } |
| |
| function representYamlTimestamp(object /*, style*/) { |
| return object.toISOString(); |
| } |
| |
| module.exports = new Type('tag:yaml.org,2002:timestamp', { |
| kind: 'scalar', |
| resolve: resolveYamlTimestamp, |
| construct: constructYamlTimestamp, |
| instanceOf: Date, |
| represent: representYamlTimestamp |
| }); |
| |
| |
| /***/ }), |
| |
| /***/ 87: |
| /***/ (function(module) { |
| |
| module.exports = require("os"); |
| |
| /***/ }), |
| |
| /***/ 93: |
| /***/ (function(module, __unusedexports, __webpack_require__) { |
| |
| "use strict"; |
| |
| |
| |
| var common = __webpack_require__(740); |
| |
| |
| function Mark(name, buffer, position, line, column) { |
| this.name = name; |
| this.buffer = buffer; |
| this.position = position; |
| this.line = line; |
| this.column = column; |
| } |
| |
| |
| Mark.prototype.getSnippet = function getSnippet(indent, maxLength) { |
| var head, start, tail, end, snippet; |
| |
| if (!this.buffer) return null; |
| |
| indent = indent || 4; |
| maxLength = maxLength || 75; |
| |
| head = ''; |
| start = this.position; |
| |
| while (start > 0 && '\x00\r\n\x85\u2028\u2029'.indexOf(this.buffer.charAt(start - 1)) === -1) { |
| start -= 1; |
| if (this.position - start > (maxLength / 2 - 1)) { |
| head = ' ... '; |
| start += 5; |
| break; |
| } |
| } |
| |
| tail = ''; |
| end = this.position; |
| |
| while (end < this.buffer.length && '\x00\r\n\x85\u2028\u2029'.indexOf(this.buffer.charAt(end)) === -1) { |
| end += 1; |
| if (end - this.position > (maxLength / 2 - 1)) { |
| tail = ' ... '; |
| end -= 5; |
| break; |
| } |
| } |
| |
| snippet = this.buffer.slice(start, end); |
| |
| return common.repeat(' ', indent) + head + snippet + tail + '\n' + |
| common.repeat(' ', indent + this.position - start + head.length) + '^'; |
| }; |
| |
| |
| Mark.prototype.toString = function toString(compact) { |
| var snippet, where = ''; |
| |
| if (this.name) { |
| where += 'in "' + this.name + '" '; |
| } |
| |
| where += 'at line ' + (this.line + 1) + ', column ' + (this.column + 1); |
| |
| if (!compact) { |
| snippet = this.getSnippet(); |
| |
| if (snippet) { |
| where += ':\n' + snippet; |
| } |
| } |
| |
| return where; |
| }; |
| |
| |
| module.exports = Mark; |
| |
| |
| /***/ }), |
| |
| /***/ 100: |
| /***/ (function(module, __unusedexports, __webpack_require__) { |
| |
| "use strict"; |
| |
| |
| var Type = __webpack_require__(945); |
| |
| var _hasOwnProperty = Object.prototype.hasOwnProperty; |
| |
| function resolveYamlSet(data) { |
| if (data === null) return true; |
| |
| var key, object = data; |
| |
| for (key in object) { |
| if (_hasOwnProperty.call(object, key)) { |
| if (object[key] !== null) return false; |
| } |
| } |
| |
| return true; |
| } |
| |
| function constructYamlSet(data) { |
| return data !== null ? data : {}; |
| } |
| |
| module.exports = new Type('tag:yaml.org,2002:set', { |
| kind: 'mapping', |
| resolve: resolveYamlSet, |
| construct: constructYamlSet |
| }); |
| |
| |
| /***/ }), |
| |
| /***/ 118: |
| /***/ (function(module, __unusedexports, __webpack_require__) { |
| |
| "use strict"; |
| |
| const os = __webpack_require__(87); |
| |
| const nameMap = new Map([ |
| [19, 'Catalina'], |
| [18, 'Mojave'], |
| [17, 'High Sierra'], |
| [16, 'Sierra'], |
| [15, 'El Capitan'], |
| [14, 'Yosemite'], |
| [13, 'Mavericks'], |
| [12, 'Mountain Lion'], |
| [11, 'Lion'], |
| [10, 'Snow Leopard'], |
| [9, 'Leopard'], |
| [8, 'Tiger'], |
| [7, 'Panther'], |
| [6, 'Jaguar'], |
| [5, 'Puma'] |
| ]); |
| |
| const macosRelease = release => { |
| release = Number((release || os.release()).split('.')[0]); |
| return { |
| name: nameMap.get(release), |
| version: '10.' + (release - 4) |
| }; |
| }; |
| |
| module.exports = macosRelease; |
| // TODO: remove this in the next major version |
| module.exports.default = macosRelease; |
| |
| |
| /***/ }), |
| |
| /***/ 126: |
| /***/ (function(module) { |
| |
| /** |
| * lodash (Custom Build) <https://lodash.com/> |
| * Build: `lodash modularize exports="npm" -o ./` |
| * Copyright jQuery Foundation and other contributors <https://jquery.org/> |
| * Released under MIT license <https://lodash.com/license> |
| * Based on Underscore.js 1.8.3 <http://underscorejs.org/LICENSE> |
| * Copyright Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors |
| */ |
| |
| /** Used as the size to enable large array optimizations. */ |
| var LARGE_ARRAY_SIZE = 200; |
| |
| /** Used to stand-in for `undefined` hash values. */ |
| var HASH_UNDEFINED = '__lodash_hash_undefined__'; |
| |
| /** Used as references for various `Number` constants. */ |
| var INFINITY = 1 / 0; |
| |
| /** `Object#toString` result references. */ |
| var funcTag = '[object Function]', |
| genTag = '[object GeneratorFunction]'; |
| |
| /** |
| * Used to match `RegExp` |
| * [syntax characters](http://ecma-international.org/ecma-262/7.0/#sec-patterns). |
| */ |
| var reRegExpChar = /[\\^$.*+?()[\]{}|]/g; |
| |
| /** Used to detect host constructors (Safari). */ |
| var reIsHostCtor = /^\[object .+?Constructor\]$/; |
| |
| /** Detect free variable `global` from Node.js. */ |
| var freeGlobal = typeof global == 'object' && global && global.Object === Object && global; |
| |
| /** Detect free variable `self`. */ |
| var freeSelf = typeof self == 'object' && self && self.Object === Object && self; |
| |
| /** Used as a reference to the global object. */ |
| var root = freeGlobal || freeSelf || Function('return this')(); |
| |
| /** |
| * A specialized version of `_.includes` for arrays without support for |
| * specifying an index to search from. |
| * |
| * @private |
| * @param {Array} [array] The array to inspect. |
| * @param {*} target The value to search for. |
| * @returns {boolean} Returns `true` if `target` is found, else `false`. |
| */ |
| function arrayIncludes(array, value) { |
| var length = array ? array.length : 0; |
| return !!length && baseIndexOf(array, value, 0) > -1; |
| } |
| |
| /** |
| * This function is like `arrayIncludes` except that it accepts a comparator. |
| * |
| * @private |
| * @param {Array} [array] The array to inspect. |
| * @param {*} target The value to search for. |
| * @param {Function} comparator The comparator invoked per element. |
| * @returns {boolean} Returns `true` if `target` is found, else `false`. |
| */ |
| function arrayIncludesWith(array, value, comparator) { |
| var index = -1, |
| length = array ? array.length : 0; |
| |
| while (++index < length) { |
| if (comparator(value, array[index])) { |
| return true; |
| } |
| } |
| return false; |
| } |
| |
| /** |
| * The base implementation of `_.findIndex` and `_.findLastIndex` without |
| * support for iteratee shorthands. |
| * |
| * @private |
| * @param {Array} array The array to inspect. |
| * @param {Function} predicate The function invoked per iteration. |
| * @param {number} fromIndex The index to search from. |
| * @param {boolean} [fromRight] Specify iterating from right to left. |
| * @returns {number} Returns the index of the matched value, else `-1`. |
| */ |
| function baseFindIndex(array, predicate, fromIndex, fromRight) { |
| var length = array.length, |
| index = fromIndex + (fromRight ? 1 : -1); |
| |
| while ((fromRight ? index-- : ++index < length)) { |
| if (predicate(array[index], index, array)) { |
| return index; |
| } |
| } |
| return -1; |
| } |
| |
| /** |
| * The base implementation of `_.indexOf` without `fromIndex` bounds checks. |
| * |
| * @private |
| * @param {Array} array The array to inspect. |
| * @param {*} value The value to search for. |
| * @param {number} fromIndex The index to search from. |
| * @returns {number} Returns the index of the matched value, else `-1`. |
| */ |
| function baseIndexOf(array, value, fromIndex) { |
| if (value !== value) { |
| return baseFindIndex(array, baseIsNaN, fromIndex); |
| } |
| var index = fromIndex - 1, |
| length = array.length; |
| |
| while (++index < length) { |
| if (array[index] === value) { |
| return index; |
| } |
| } |
| return -1; |
| } |
| |
| /** |
| * The base implementation of `_.isNaN` without support for number objects. |
| * |
| * @private |
| * @param {*} value The value to check. |
| * @returns {boolean} Returns `true` if `value` is `NaN`, else `false`. |
| */ |
| function baseIsNaN(value) { |
| return value !== value; |
| } |
| |
| /** |
| * Checks if a cache value for `key` exists. |
| * |
| * @private |
| * @param {Object} cache The cache to query. |
| * @param {string} key The key of the entry to check. |
| * @returns {boolean} Returns `true` if an entry for `key` exists, else `false`. |
| */ |
| function cacheHas(cache, key) { |
| return cache.has(key); |
| } |
| |
| /** |
| * Gets the value at `key` of `object`. |
| * |
| * @private |
| * @param {Object} [object] The object to query. |
| * @param {string} key The key of the property to get. |
| * @returns {*} Returns the property value. |
| */ |
| function getValue(object, key) { |
| return object == null ? undefined : object[key]; |
| } |
| |
| /** |
| * Checks if `value` is a host object in IE < 9. |
| * |
| * @private |
| * @param {*} value The value to check. |
| * @returns {boolean} Returns `true` if `value` is a host object, else `false`. |
| */ |
| function isHostObject(value) { |
| // Many host objects are `Object` objects that can coerce to strings |
| // despite having improperly defined `toString` methods. |
| var result = false; |
| if (value != null && typeof value.toString != 'function') { |
| try { |
| result = !!(value + ''); |
| } catch (e) {} |
| } |
| return result; |
| } |
| |
| /** |
| * Converts `set` to an array of its values. |
| * |
| * @private |
| * @param {Object} set The set to convert. |
| * @returns {Array} Returns the values. |
| */ |
| function setToArray(set) { |
| var index = -1, |
| result = Array(set.size); |
| |
| set.forEach(function(value) { |
| result[++index] = value; |
| }); |
| return result; |
| } |
| |
| /** Used for built-in method references. */ |
| var arrayProto = Array.prototype, |
| funcProto = Function.prototype, |
| objectProto = Object.prototype; |
| |
| /** Used to detect overreaching core-js shims. */ |
| var coreJsData = root['__core-js_shared__']; |
| |
| /** Used to detect methods masquerading as native. */ |
| var maskSrcKey = (function() { |
| var uid = /[^.]+$/.exec(coreJsData && coreJsData.keys && coreJsData.keys.IE_PROTO || ''); |
| return uid ? ('Symbol(src)_1.' + uid) : ''; |
| }()); |
| |
| /** Used to resolve the decompiled source of functions. */ |
| var funcToString = funcProto.toString; |
| |
| /** Used to check objects for own properties. */ |
| var hasOwnProperty = objectProto.hasOwnProperty; |
| |
| /** |
| * Used to resolve the |
| * [`toStringTag`](http://ecma-international.org/ecma-262/7.0/#sec-object.prototype.tostring) |
| * of values. |
| */ |
| var objectToString = objectProto.toString; |
| |
| /** Used to detect if a method is native. */ |
| var reIsNative = RegExp('^' + |
| funcToString.call(hasOwnProperty).replace(reRegExpChar, '\\$&') |
| .replace(/hasOwnProperty|(function).*?(?=\\\()| for .+?(?=\\\])/g, '$1.*?') + '$' |
| ); |
| |
| /** Built-in value references. */ |
| var splice = arrayProto.splice; |
| |
| /* Built-in method references that are verified to be native. */ |
| var Map = getNative(root, 'Map'), |
| Set = getNative(root, 'Set'), |
| nativeCreate = getNative(Object, 'create'); |
| |
| /** |
| * Creates a hash object. |
| * |
| * @private |
| * @constructor |
| * @param {Array} [entries] The key-value pairs to cache. |
| */ |
| function Hash(entries) { |
| var index = -1, |
| length = entries ? entries.length : 0; |
| |
| this.clear(); |
| while (++index < length) { |
| var entry = entries[index]; |
| this.set(entry[0], entry[1]); |
| } |
| } |
| |
| /** |
| * Removes all key-value entries from the hash. |
| * |
| * @private |
| * @name clear |
| * @memberOf Hash |
| */ |
| function hashClear() { |
| this.__data__ = nativeCreate ? nativeCreate(null) : {}; |
| } |
| |
| /** |
| * Removes `key` and its value from the hash. |
| * |
| * @private |
| * @name delete |
| * @memberOf Hash |
| * @param {Object} hash The hash to modify. |
| * @param {string} key The key of the value to remove. |
| * @returns {boolean} Returns `true` if the entry was removed, else `false`. |
| */ |
| function hashDelete(key) { |
| return this.has(key) && delete this.__data__[key]; |
| } |
| |
| /** |
| * Gets the hash value for `key`. |
| * |
| * @private |
| * @name get |
| * @memberOf Hash |
| * @param {string} key The key of the value to get. |
| * @returns {*} Returns the entry value. |
| */ |
| function hashGet(key) { |
| var data = this.__data__; |
| if (nativeCreate) { |
| var result = data[key]; |
| return result === HASH_UNDEFINED ? undefined : result; |
| } |
| return hasOwnProperty.call(data, key) ? data[key] : undefined; |
| } |
| |
| /** |
| * Checks if a hash value for `key` exists. |
| * |
| * @private |
| * @name has |
| * @memberOf Hash |
| * @param {string} key The key of the entry to check. |
| * @returns {boolean} Returns `true` if an entry for `key` exists, else `false`. |
| */ |
| function hashHas(key) { |
| var data = this.__data__; |
| return nativeCreate ? data[key] !== undefined : hasOwnProperty.call(data, key); |
| } |
| |
| /** |
| * Sets the hash `key` to `value`. |
| * |
| * @private |
| * @name set |
| * @memberOf Hash |
| * @param {string} key The key of the value to set. |
| * @param {*} value The value to set. |
| * @returns {Object} Returns the hash instance. |
| */ |
| function hashSet(key, value) { |
| var data = this.__data__; |
| data[key] = (nativeCreate && value === undefined) ? HASH_UNDEFINED : value; |
| return this; |
| } |
| |
| // Add methods to `Hash`. |
| Hash.prototype.clear = hashClear; |
| Hash.prototype['delete'] = hashDelete; |
| Hash.prototype.get = hashGet; |
| Hash.prototype.has = hashHas; |
| Hash.prototype.set = hashSet; |
| |
| /** |
| * Creates an list cache object. |
| * |
| * @private |
| * @constructor |
| * @param {Array} [entries] The key-value pairs to cache. |
| */ |
| function ListCache(entries) { |
| var index = -1, |
| length = entries ? entries.length : 0; |
| |
| this.clear(); |
| while (++index < length) { |
| var entry = entries[index]; |
| this.set(entry[0], entry[1]); |
| } |
| } |
| |
| /** |
| * Removes all key-value entries from the list cache. |
| * |
| * @private |
| * @name clear |
| * @memberOf ListCache |
| */ |
| function listCacheClear() { |
| this.__data__ = []; |
| } |
| |
| /** |
| * Removes `key` and its value from the list cache. |
| * |
| * @private |
| * @name delete |
| * @memberOf ListCache |
| * @param {string} key The key of the value to remove. |
| * @returns {boolean} Returns `true` if the entry was removed, else `false`. |
| */ |
| function listCacheDelete(key) { |
| var data = this.__data__, |
| index = assocIndexOf(data, key); |
| |
| if (index < 0) { |
| return false; |
| } |
| var lastIndex = data.length - 1; |
| if (index == lastIndex) { |
| data.pop(); |
| } else { |
| splice.call(data, index, 1); |
| } |
| return true; |
| } |
| |
| /** |
| * Gets the list cache value for `key`. |
| * |
| * @private |
| * @name get |
| * @memberOf ListCache |
| * @param {string} key The key of the value to get. |
| * @returns {*} Returns the entry value. |
| */ |
| function listCacheGet(key) { |
| var data = this.__data__, |
| index = assocIndexOf(data, key); |
| |
| return index < 0 ? undefined : data[index][1]; |
| } |
| |
| /** |
| * Checks if a list cache value for `key` exists. |
| * |
| * @private |
| * @name has |
| * @memberOf ListCache |
| * @param {string} key The key of the entry to check. |
| * @returns {boolean} Returns `true` if an entry for `key` exists, else `false`. |
| */ |
| function listCacheHas(key) { |
| return assocIndexOf(this.__data__, key) > -1; |
| } |
| |
| /** |
| * Sets the list cache `key` to `value`. |
| * |
| * @private |
| * @name set |
| * @memberOf ListCache |
| * @param {string} key The key of the value to set. |
| * @param {*} value The value to set. |
| * @returns {Object} Returns the list cache instance. |
| */ |
| function listCacheSet(key, value) { |
| var data = this.__data__, |
| index = assocIndexOf(data, key); |
| |
| if (index < 0) { |
| data.push([key, value]); |
| } else { |
| data[index][1] = value; |
| } |
| return this; |
| } |
| |
| // Add methods to `ListCache`. |
| ListCache.prototype.clear = listCacheClear; |
| ListCache.prototype['delete'] = listCacheDelete; |
| ListCache.prototype.get = listCacheGet; |
| ListCache.prototype.has = listCacheHas; |
| ListCache.prototype.set = listCacheSet; |
| |
| /** |
| * Creates a map cache object to store key-value pairs. |
| * |
| * @private |
| * @constructor |
| * @param {Array} [entries] The key-value pairs to cache. |
| */ |
| function MapCache(entries) { |
| var index = -1, |
| length = entries ? entries.length : 0; |
| |
| this.clear(); |
| while (++index < length) { |
| var entry = entries[index]; |
| this.set(entry[0], entry[1]); |
| } |
| } |
| |
| /** |
| * Removes all key-value entries from the map. |
| * |
| * @private |
| * @name clear |
| * @memberOf MapCache |
| */ |
| function mapCacheClear() { |
| this.__data__ = { |
| 'hash': new Hash, |
| 'map': new (Map || ListCache), |
| 'string': new Hash |
| }; |
| } |
| |
| /** |
| * Removes `key` and its value from the map. |
| * |
| * @private |
| * @name delete |
| * @memberOf MapCache |
| * @param {string} key The key of the value to remove. |
| * @returns {boolean} Returns `true` if the entry was removed, else `false`. |
| */ |
| function mapCacheDelete(key) { |
| return getMapData(this, key)['delete'](key); |
| } |
| |
| /** |
| * Gets the map value for `key`. |
| * |
| * @private |
| * @name get |
| * @memberOf MapCache |
| * @param {string} key The key of the value to get. |
| * @returns {*} Returns the entry value. |
| */ |
| function mapCacheGet(key) { |
| return getMapData(this, key).get(key); |
| } |
| |
| /** |
| * Checks if a map value for `key` exists. |
| * |
| * @private |
| * @name has |
| * @memberOf MapCache |
| * @param {string} key The key of the entry to check. |
| * @returns {boolean} Returns `true` if an entry for `key` exists, else `false`. |
| */ |
| function mapCacheHas(key) { |
| return getMapData(this, key).has(key); |
| } |
| |
| /** |
| * Sets the map `key` to `value`. |
| * |
| * @private |
| * @name set |
| * @memberOf MapCache |
| * @param {string} key The key of the value to set. |
| * @param {*} value The value to set. |
| * @returns {Object} Returns the map cache instance. |
| */ |
| function mapCacheSet(key, value) { |
| getMapData(this, key).set(key, value); |
| return this; |
| } |
| |
| // Add methods to `MapCache`. |
| MapCache.prototype.clear = mapCacheClear; |
| MapCache.prototype['delete'] = mapCacheDelete; |
| MapCache.prototype.get = mapCacheGet; |
| MapCache.prototype.has = mapCacheHas; |
| MapCache.prototype.set = mapCacheSet; |
| |
| /** |
| * |
| * Creates an array cache object to store unique values. |
| * |
| * @private |
| * @constructor |
| * @param {Array} [values] The values to cache. |
| */ |
| function SetCache(values) { |
| var index = -1, |
| length = values ? values.length : 0; |
| |
| this.__data__ = new MapCache; |
| while (++index < length) { |
| this.add(values[index]); |
| } |
| } |
| |
| /** |
| * Adds `value` to the array cache. |
| * |
| * @private |
| * @name add |
| * @memberOf SetCache |
| * @alias push |
| * @param {*} value The value to cache. |
| * @returns {Object} Returns the cache instance. |
| */ |
| function setCacheAdd(value) { |
| this.__data__.set(value, HASH_UNDEFINED); |
| return this; |
| } |
| |
| /** |
| * Checks if `value` is in the array cache. |
| * |
| * @private |
| * @name has |
| * @memberOf SetCache |
| * @param {*} value The value to search for. |
| * @returns {number} Returns `true` if `value` is found, else `false`. |
| */ |
| function setCacheHas(value) { |
| return this.__data__.has(value); |
| } |
| |
| // Add methods to `SetCache`. |
| SetCache.prototype.add = SetCache.prototype.push = setCacheAdd; |
| SetCache.prototype.has = setCacheHas; |
| |
| /** |
| * Gets the index at which the `key` is found in `array` of key-value pairs. |
| * |
| * @private |
| * @param {Array} array The array to inspect. |
| * @param {*} key The key to search for. |
| * @returns {number} Returns the index of the matched value, else `-1`. |
| */ |
| function assocIndexOf(array, key) { |
| var length = array.length; |
| while (length--) { |
| if (eq(array[length][0], key)) { |
| return length; |
| } |
| } |
| return -1; |
| } |
| |
| /** |
| * The base implementation of `_.isNative` without bad shim checks. |
| * |
| * @private |
| * @param {*} value The value to check. |
| * @returns {boolean} Returns `true` if `value` is a native function, |
| * else `false`. |
| */ |
| function baseIsNative(value) { |
| if (!isObject(value) || isMasked(value)) { |
| return false; |
| } |
| var pattern = (isFunction(value) || isHostObject(value)) ? reIsNative : reIsHostCtor; |
| return pattern.test(toSource(value)); |
| } |
| |
| /** |
| * The base implementation of `_.uniqBy` without support for iteratee shorthands. |
| * |
| * @private |
| * @param {Array} array The array to inspect. |
| * @param {Function} [iteratee] The iteratee invoked per element. |
| * @param {Function} [comparator] The comparator invoked per element. |
| * @returns {Array} Returns the new duplicate free array. |
| */ |
| function baseUniq(array, iteratee, comparator) { |
| var index = -1, |
| includes = arrayIncludes, |
| length = array.length, |
| isCommon = true, |
| result = [], |
| seen = result; |
| |
| if (comparator) { |
| isCommon = false; |
| includes = arrayIncludesWith; |
| } |
| else if (length >= LARGE_ARRAY_SIZE) { |
| var set = iteratee ? null : createSet(array); |
| if (set) { |
| return setToArray(set); |
| } |
| isCommon = false; |
| includes = cacheHas; |
| seen = new SetCache; |
| } |
| else { |
| seen = iteratee ? [] : result; |
| } |
| outer: |
| while (++index < length) { |
| var value = array[index], |
| computed = iteratee ? iteratee(value) : value; |
| |
| value = (comparator || value !== 0) ? value : 0; |
| if (isCommon && computed === computed) { |
| var seenIndex = seen.length; |
| while (seenIndex--) { |
| if (seen[seenIndex] === computed) { |
| continue outer; |
| } |
| } |
| if (iteratee) { |
| seen.push(computed); |
| } |
| result.push(value); |
| } |
| else if (!includes(seen, computed, comparator)) { |
| if (seen !== result) { |
| seen.push(computed); |
| } |
| result.push(value); |
| } |
| } |
| return result; |
| } |
| |
| /** |
| * Creates a set object of `values`. |
| * |
| * @private |
| * @param {Array} values The values to add to the set. |
| * @returns {Object} Returns the new set. |
| */ |
| var createSet = !(Set && (1 / setToArray(new Set([,-0]))[1]) == INFINITY) ? noop : function(values) { |
| return new Set(values); |
| }; |
| |
| /** |
| * Gets the data for `map`. |
| * |
| * @private |
| * @param {Object} map The map to query. |
| * @param {string} key The reference key. |
| * @returns {*} Returns the map data. |
| */ |
| function getMapData(map, key) { |
| var data = map.__data__; |
| return isKeyable(key) |
| ? data[typeof key == 'string' ? 'string' : 'hash'] |
| : data.map; |
| } |
| |
| /** |
| * Gets the native function at `key` of `object`. |
| * |
| * @private |
| * @param {Object} object The object to query. |
| * @param {string} key The key of the method to get. |
| * @returns {*} Returns the function if it's native, else `undefined`. |
| */ |
| function getNative(object, key) { |
| var value = getValue(object, key); |
| return baseIsNative(value) ? value : undefined; |
| } |
| |
| /** |
| * Checks if `value` is suitable for use as unique object key. |
| * |
| * @private |
| * @param {*} value The value to check. |
| * @returns {boolean} Returns `true` if `value` is suitable, else `false`. |
| */ |
| function isKeyable(value) { |
| var type = typeof value; |
| return (type == 'string' || type == 'number' || type == 'symbol' || type == 'boolean') |
| ? (value !== '__proto__') |
| : (value === null); |
| } |
| |
| /** |
| * Checks if `func` has its source masked. |
| * |
| * @private |
| * @param {Function} func The function to check. |
| * @returns {boolean} Returns `true` if `func` is masked, else `false`. |
| */ |
| function isMasked(func) { |
| return !!maskSrcKey && (maskSrcKey in func); |
| } |
| |
| /** |
| * Converts `func` to its source code. |
| * |
| * @private |
| * @param {Function} func The function to process. |
| * @returns {string} Returns the source code. |
| */ |
| function toSource(func) { |
| if (func != null) { |
| try { |
| return funcToString.call(func); |
| } catch (e) {} |
| try { |
| return (func + ''); |
| } catch (e) {} |
| } |
| return ''; |
| } |
| |
| /** |
| * Creates a duplicate-free version of an array, using |
| * [`SameValueZero`](http://ecma-international.org/ecma-262/7.0/#sec-samevaluezero) |
| * for equality comparisons, in which only the first occurrence of each |
| * element is kept. |
| * |
| * @static |
| * @memberOf _ |
| * @since 0.1.0 |
| * @category Array |
| * @param {Array} array The array to inspect. |
| * @returns {Array} Returns the new duplicate free array. |
| * @example |
| * |
| * _.uniq([2, 1, 2]); |
| * // => [2, 1] |
| */ |
| function uniq(array) { |
| return (array && array.length) |
| ? baseUniq(array) |
| : []; |
| } |
| |
| /** |
| * Performs a |
| * [`SameValueZero`](http://ecma-international.org/ecma-262/7.0/#sec-samevaluezero) |
| * comparison between two values to determine if they are equivalent. |
| * |
| * @static |
| * @memberOf _ |
| * @since 4.0.0 |
| * @category Lang |
| * @param {*} value The value to compare. |
| * @param {*} other The other value to compare. |
| * @returns {boolean} Returns `true` if the values are equivalent, else `false`. |
| * @example |
| * |
| * var object = { 'a': 1 }; |
| * var other = { 'a': 1 }; |
| * |
| * _.eq(object, object); |
| * // => true |
| * |
| * _.eq(object, other); |
| * // => false |
| * |
| * _.eq('a', 'a'); |
| * // => true |
| * |
| * _.eq('a', Object('a')); |
| * // => false |
| * |
| * _.eq(NaN, NaN); |
| * // => true |
| */ |
| function eq(value, other) { |
| return value === other || (value !== value && other !== other); |
| } |
| |
| /** |
| * Checks if `value` is classified as a `Function` object. |
| * |
| * @static |
| * @memberOf _ |
| * @since 0.1.0 |
| * @category Lang |
| * @param {*} value The value to check. |
| * @returns {boolean} Returns `true` if `value` is a function, else `false`. |
| * @example |
| * |
| * _.isFunction(_); |
| * // => true |
| * |
| * _.isFunction(/abc/); |
| * // => false |
| */ |
| function isFunction(value) { |
| // The use of `Object#toString` avoids issues with the `typeof` operator |
| // in Safari 8-9 which returns 'object' for typed array and other constructors. |
| var tag = isObject(value) ? objectToString.call(value) : ''; |
| return tag == funcTag || tag == genTag; |
| } |
| |
| /** |
| * Checks if `value` is the |
| * [language type](http://www.ecma-international.org/ecma-262/7.0/#sec-ecmascript-language-types) |
| * of `Object`. (e.g. arrays, functions, objects, regexes, `new Number(0)`, and `new String('')`) |
| * |
| * @static |
| * @memberOf _ |
| * @since 0.1.0 |
| * @category Lang |
| * @param {*} value The value to check. |
| * @returns {boolean} Returns `true` if `value` is an object, else `false`. |
| * @example |
| * |
| * _.isObject({}); |
| * // => true |
| * |
| * _.isObject([1, 2, 3]); |
| * // => true |
| * |
| * _.isObject(_.noop); |
| * // => true |
| * |
| * _.isObject(null); |
| * // => false |
| */ |
| function isObject(value) { |
| var type = typeof value; |
| return !!value && (type == 'object' || type == 'function'); |
| } |
| |
| /** |
| * This method returns `undefined`. |
| * |
| * @static |
| * @memberOf _ |
| * @since 2.3.0 |
| * @category Util |
| * @example |
| * |
| * _.times(2, _.noop); |
| * // => [undefined, undefined] |
| */ |
| function noop() { |
| // No operation performed. |
| } |
| |
| module.exports = uniq; |
| |
| |
| /***/ }), |
| |
| /***/ 129: |
| /***/ (function(module) { |
| |
| module.exports = require("child_process"); |
| |
| /***/ }), |
| |
| /***/ 136: |
| /***/ (function(__unusedmodule, exports, __webpack_require__) { |
| |
| "use strict"; |
| |
| var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { |
| if (k2 === undefined) k2 = k; |
| Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); |
| }) : (function(o, m, k, k2) { |
| if (k2 === undefined) k2 = k; |
| o[k2] = m[k]; |
| })); |
| var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { |
| Object.defineProperty(o, "default", { enumerable: true, value: v }); |
| }) : function(o, v) { |
| o["default"] = v; |
| }); |
| var __importStar = (this && this.__importStar) || function (mod) { |
| if (mod && mod.__esModule) return mod; |
| var result = {}; |
| if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); |
| __setModuleDefault(result, mod); |
| return result; |
| }; |
| var __importDefault = (this && this.__importDefault) || function (mod) { |
| return (mod && mod.__esModule) ? mod : { "default": mod }; |
| }; |
| Object.defineProperty(exports, "__esModule", { value: true }); |
| exports.isGitSha = exports.getShortName = exports.getCurrentRef = exports.listAllFilesAsAdded = exports.parseGitDiffOutput = exports.getChangesSinceMergeBase = exports.getChangesOnHead = exports.getChanges = exports.getChangesInLastCommit = exports.HEAD = exports.NULL_SHA = void 0; |
| const exec_1 = __importDefault(__webpack_require__(807)); |
| const core = __importStar(__webpack_require__(470)); |
| const file_1 = __webpack_require__(258); |
| exports.NULL_SHA = '0000000000000000000000000000000000000000'; |
| exports.HEAD = 'HEAD'; |
| async function getChangesInLastCommit() { |
| core.startGroup(`Change detection in last commit`); |
| let output = ''; |
| try { |
| output = (await exec_1.default('git', ['log', '--format=', '--no-renames', '--name-status', '-z', '-n', '1'])).stdout; |
| } |
| finally { |
| fixStdOutNullTermination(); |
| core.endGroup(); |
| } |
| return parseGitDiffOutput(output); |
| } |
| exports.getChangesInLastCommit = getChangesInLastCommit; |
| async function getChanges(baseRef) { |
| if (!(await hasCommit(baseRef))) { |
| // Fetch single commit |
| core.startGroup(`Fetching ${baseRef} from origin`); |
| await exec_1.default('git', ['fetch', '--depth=1', '--no-tags', 'origin', baseRef]); |
| core.endGroup(); |
| } |
| // Get differences between ref and HEAD |
| core.startGroup(`Change detection ${baseRef}..HEAD`); |
| let output = ''; |
| try { |
| // Two dots '..' change detection - directly compares two versions |
| output = (await exec_1.default('git', ['diff', '--no-renames', '--name-status', '-z', `${baseRef}..HEAD`])).stdout; |
| } |
| finally { |
| fixStdOutNullTermination(); |
| core.endGroup(); |
| } |
| return parseGitDiffOutput(output); |
| } |
| exports.getChanges = getChanges; |
| async function getChangesOnHead() { |
| // Get current changes - both staged and unstaged |
| core.startGroup(`Change detection on HEAD`); |
| let output = ''; |
| try { |
| output = (await exec_1.default('git', ['diff', '--no-renames', '--name-status', '-z', 'HEAD'])).stdout; |
| } |
| finally { |
| fixStdOutNullTermination(); |
| core.endGroup(); |
| } |
| return parseGitDiffOutput(output); |
| } |
| exports.getChangesOnHead = getChangesOnHead; |
| async function getChangesSinceMergeBase(base, ref, initialFetchDepth) { |
| const baseRef = `remotes/origin/${base}`; |
| async function hasMergeBase() { |
| return (await exec_1.default('git', ['merge-base', baseRef, ref], { ignoreReturnCode: true })).code === 0; |
| } |
| let noMergeBase = false; |
| core.startGroup(`Searching for merge-base ${baseRef}...${ref}`); |
| try { |
| if (!(await hasMergeBase())) { |
| await exec_1.default('git', ['fetch', `--depth=${initialFetchDepth}`, 'origin', base, ref]); |
| let depth = initialFetchDepth; |
| let lastCommitCount = await getCommitCount(); |
| while (!(await hasMergeBase())) { |
| depth = Math.min(depth * 2, Number.MAX_SAFE_INTEGER); |
| await exec_1.default('git', ['fetch', `--deepen=${depth}`, 'origin', base, ref]); |
| const commitCount = await getCommitCount(); |
| if (commitCount === lastCommitCount) { |
| core.info('No more commits were fetched'); |
| core.info('Last attempt will be to fetch full history'); |
| await exec_1.default('git', ['fetch']); |
| if (!(await hasMergeBase())) { |
| noMergeBase = true; |
| } |
| break; |
| } |
| lastCommitCount = commitCount; |
| } |
| } |
| } |
| finally { |
| core.endGroup(); |
| } |
| if (noMergeBase) { |
| core.warning('No merge base found - all files will be listed as added'); |
| return await listAllFilesAsAdded(); |
| } |
| // Get changes introduced on HEAD compared to ref |
| core.startGroup(`Change detection ${baseRef}...${ref}`); |
| let output = ''; |
| try { |
| // Three dots '...' change detection - finds merge-base and compares against it |
| output = (await exec_1.default('git', ['diff', '--no-renames', '--name-status', '-z', `${baseRef}...${ref}`])).stdout; |
| } |
| finally { |
| fixStdOutNullTermination(); |
| core.endGroup(); |
| } |
| return parseGitDiffOutput(output); |
| } |
| exports.getChangesSinceMergeBase = getChangesSinceMergeBase; |
| function parseGitDiffOutput(output) { |
| const tokens = output.split('\u0000').filter(s => s.length > 0); |
| const files = []; |
| for (let i = 0; i + 1 < tokens.length; i += 2) { |
| files.push({ |
| status: statusMap[tokens[i]], |
| filename: tokens[i + 1] |
| }); |
| } |
| return files; |
| } |
| exports.parseGitDiffOutput = parseGitDiffOutput; |
| async function listAllFilesAsAdded() { |
| core.startGroup('Listing all files tracked by git'); |
| let output = ''; |
| try { |
| output = (await exec_1.default('git', ['ls-files', '-z'])).stdout; |
| } |
| finally { |
| fixStdOutNullTermination(); |
| core.endGroup(); |
| } |
| return output |
| .split('\u0000') |
| .filter(s => s.length > 0) |
| .map(path => ({ |
| status: file_1.ChangeStatus.Added, |
| filename: path |
| })); |
| } |
| exports.listAllFilesAsAdded = listAllFilesAsAdded; |
| async function getCurrentRef() { |
| core.startGroup(`Determining current ref`); |
| try { |
| const branch = (await exec_1.default('git', ['branch', '--show-current'])).stdout.trim(); |
| if (branch) { |
| return branch; |
| } |
| const describe = await exec_1.default('git', ['describe', '--tags', '--exact-match'], { ignoreReturnCode: true }); |
| if (describe.code === 0) { |
| return describe.stdout.trim(); |
| } |
| return (await exec_1.default('git', ['rev-parse', exports.HEAD])).stdout.trim(); |
| } |
| finally { |
| core.endGroup(); |
| } |
| } |
| exports.getCurrentRef = getCurrentRef; |
| function getShortName(ref) { |
| if (!ref) |
| return ''; |
| const heads = 'refs/heads/'; |
| const tags = 'refs/tags/'; |
| if (ref.startsWith(heads)) |
| return ref.slice(heads.length); |
| if (ref.startsWith(tags)) |
| return ref.slice(tags.length); |
| return ref; |
| } |
| exports.getShortName = getShortName; |
| function isGitSha(ref) { |
| return /^[a-z0-9]{40}$/.test(ref); |
| } |
| exports.isGitSha = isGitSha; |
| async function hasCommit(ref) { |
| core.startGroup(`Checking if commit for ${ref} is locally available`); |
| try { |
| return (await exec_1.default('git', ['cat-file', '-e', `${ref}^{commit}`], { ignoreReturnCode: true })).code === 0; |
| } |
| finally { |
| core.endGroup(); |
| } |
| } |
| async function getCommitCount() { |
| const output = (await exec_1.default('git', ['rev-list', '--count', '--all'])).stdout; |
| const count = parseInt(output); |
| return isNaN(count) ? 0 : count; |
| } |
| function fixStdOutNullTermination() { |
| // Previous command uses NULL as delimiters and output is printed to stdout. |
| // We have to make sure next thing written to stdout will start on new line. |
| // Otherwise things like ::set-output wouldn't work. |
| core.info(''); |
| } |
| const statusMap = { |
| A: file_1.ChangeStatus.Added, |
| C: file_1.ChangeStatus.Copied, |
| D: file_1.ChangeStatus.Deleted, |
| M: file_1.ChangeStatus.Modified, |
| R: file_1.ChangeStatus.Renamed, |
| U: file_1.ChangeStatus.Unmerged |
| }; |
| |
| |
| /***/ }), |
| |
| /***/ 141: |
| /***/ (function(__unusedmodule, exports, __webpack_require__) { |
| |
| "use strict"; |
| |
| |
| var net = __webpack_require__(631); |
| var tls = __webpack_require__(16); |
| var http = __webpack_require__(605); |
| var https = __webpack_require__(211); |
| var events = __webpack_require__(614); |
| var assert = __webpack_require__(357); |
| var util = __webpack_require__(669); |
| |
| |
| exports.httpOverHttp = httpOverHttp; |
| exports.httpsOverHttp = httpsOverHttp; |
| exports.httpOverHttps = httpOverHttps; |
| exports.httpsOverHttps = httpsOverHttps; |
| |
| |
| function httpOverHttp(options) { |
| var agent = new TunnelingAgent(options); |
| agent.request = http.request; |
| return agent; |
| } |
| |
| function httpsOverHttp(options) { |
| var agent = new TunnelingAgent(options); |
| agent.request = http.request; |
| agent.createSocket = createSecureSocket; |
| agent.defaultPort = 443; |
| return agent; |
| } |
| |
| function httpOverHttps(options) { |
| var agent = new TunnelingAgent(options); |
| agent.request = https.request; |
| return agent; |
| } |
| |
| function httpsOverHttps(options) { |
| var agent = new TunnelingAgent(options); |
| agent.request = https.request; |
| agent.createSocket = createSecureSocket; |
| agent.defaultPort = 443; |
| return agent; |
| } |
| |
| |
| function TunnelingAgent(options) { |
| var self = this; |
| self.options = options || {}; |
| self.proxyOptions = self.options.proxy || {}; |
| self.maxSockets = self.options.maxSockets || http.Agent.defaultMaxSockets; |
| self.requests = []; |
| self.sockets = []; |
| |
| self.on('free', function onFree(socket, host, port, localAddress) { |
| var options = toOptions(host, port, localAddress); |
| for (var i = 0, len = self.requests.length; i < len; ++i) { |
| var pending = self.requests[i]; |
| if (pending.host === options.host && pending.port === options.port) { |
| // Detect the request to connect same origin server, |
| // reuse the connection. |
| self.requests.splice(i, 1); |
| pending.request.onSocket(socket); |
| return; |
| } |
| } |
| socket.destroy(); |
| self.removeSocket(socket); |
| }); |
| } |
| util.inherits(TunnelingAgent, events.EventEmitter); |
| |
| TunnelingAgent.prototype.addRequest = function addRequest(req, host, port, localAddress) { |
| var self = this; |
| var options = mergeOptions({request: req}, self.options, toOptions(host, port, localAddress)); |
| |
| if (self.sockets.length >= this.maxSockets) { |
| // We are over limit so we'll add it to the queue. |
| self.requests.push(options); |
| return; |
| } |
| |
| // If we are under maxSockets create a new one. |
| self.createSocket(options, function(socket) { |
| socket.on('free', onFree); |
| socket.on('close', onCloseOrRemove); |
| socket.on('agentRemove', onCloseOrRemove); |
| req.onSocket(socket); |
| |
| function onFree() { |
| self.emit('free', socket, options); |
| } |
| |
| function onCloseOrRemove(err) { |
| self.removeSocket(socket); |
| socket.removeListener('free', onFree); |
| socket.removeListener('close', onCloseOrRemove); |
| socket.removeListener('agentRemove', onCloseOrRemove); |
| } |
| }); |
| }; |
| |
| TunnelingAgent.prototype.createSocket = function createSocket(options, cb) { |
| var self = this; |
| var placeholder = {}; |
| self.sockets.push(placeholder); |
| |
| var connectOptions = mergeOptions({}, self.proxyOptions, { |
| method: 'CONNECT', |
| path: options.host + ':' + options.port, |
| agent: false, |
| headers: { |
| host: options.host + ':' + options.port |
| } |
| }); |
| if (options.localAddress) { |
| connectOptions.localAddress = options.localAddress; |
| } |
| if (connectOptions.proxyAuth) { |
| connectOptions.headers = connectOptions.headers || {}; |
| connectOptions.headers['Proxy-Authorization'] = 'Basic ' + |
| new Buffer(connectOptions.proxyAuth).toString('base64'); |
| } |
| |
| debug('making CONNECT request'); |
| var connectReq = self.request(connectOptions); |
| connectReq.useChunkedEncodingByDefault = false; // for v0.6 |
| connectReq.once('response', onResponse); // for v0.6 |
| connectReq.once('upgrade', onUpgrade); // for v0.6 |
| connectReq.once('connect', onConnect); // for v0.7 or later |
| connectReq.once('error', onError); |
| connectReq.end(); |
| |
| function onResponse(res) { |
| // Very hacky. This is necessary to avoid http-parser leaks. |
| res.upgrade = true; |
| } |
| |
| function onUpgrade(res, socket, head) { |
| // Hacky. |
| process.nextTick(function() { |
| onConnect(res, socket, head); |
| }); |
| } |
| |
| function onConnect(res, socket, head) { |
| connectReq.removeAllListeners(); |
| socket.removeAllListeners(); |
| |
| if (res.statusCode !== 200) { |
| debug('tunneling socket could not be established, statusCode=%d', |
| res.statusCode); |
| socket.destroy(); |
| var error = new Error('tunneling socket could not be established, ' + |
| 'statusCode=' + res.statusCode); |
| error.code = 'ECONNRESET'; |
| options.request.emit('error', error); |
| self.removeSocket(placeholder); |
| return; |
| } |
| if (head.length > 0) { |
| debug('got illegal response body from proxy'); |
| socket.destroy(); |
| var error = new Error('got illegal response body from proxy'); |
| error.code = 'ECONNRESET'; |
| options.request.emit('error', error); |
| self.removeSocket(placeholder); |
| return; |
| } |
| debug('tunneling connection has established'); |
| self.sockets[self.sockets.indexOf(placeholder)] = socket; |
| return cb(socket); |
| } |
| |
| function onError(cause) { |
| connectReq.removeAllListeners(); |
| |
| debug('tunneling socket could not be established, cause=%s\n', |
| cause.message, cause.stack); |
| var error = new Error('tunneling socket could not be established, ' + |
| 'cause=' + cause.message); |
| error.code = 'ECONNRESET'; |
| options.request.emit('error', error); |
| self.removeSocket(placeholder); |
| } |
| }; |
| |
| TunnelingAgent.prototype.removeSocket = function removeSocket(socket) { |
| var pos = this.sockets.indexOf(socket) |
| if (pos === -1) { |
| return; |
| } |
| this.sockets.splice(pos, 1); |
| |
| var pending = this.requests.shift(); |
| if (pending) { |
| // If we have pending requests and a socket gets closed a new one |
| // needs to be created to take over in the pool for the one that closed. |
| this.createSocket(pending, function(socket) { |
| pending.request.onSocket(socket); |
| }); |
| } |
| }; |
| |
| function createSecureSocket(options, cb) { |
| var self = this; |
| TunnelingAgent.prototype.createSocket.call(self, options, function(socket) { |
| var hostHeader = options.request.getHeader('host'); |
| var tlsOptions = mergeOptions({}, self.options, { |
| socket: socket, |
| servername: hostHeader ? hostHeader.replace(/:.*$/, '') : options.host |
| }); |
| |
| // 0 is dummy port for v0.6 |
| var secureSocket = tls.connect(0, tlsOptions); |
| self.sockets[self.sockets.indexOf(socket)] = secureSocket; |
| cb(secureSocket); |
| }); |
| } |
| |
| |
| function toOptions(host, port, localAddress) { |
| if (typeof host === 'string') { // since v0.10 |
| return { |
| host: host, |
| port: port, |
| localAddress: localAddress |
| }; |
| } |
| return host; // for v0.11 or later |
| } |
| |
| function mergeOptions(target) { |
| for (var i = 1, len = arguments.length; i < len; ++i) { |
| var overrides = arguments[i]; |
| if (typeof overrides === 'object') { |
| var keys = Object.keys(overrides); |
| for (var j = 0, keyLen = keys.length; j < keyLen; ++j) { |
| var k = keys[j]; |
| if (overrides[k] !== undefined) { |
| target[k] = overrides[k]; |
| } |
| } |
| } |
| } |
| return target; |
| } |
| |
| |
| var debug; |
| if (process.env.NODE_DEBUG && /\btunnel\b/.test(process.env.NODE_DEBUG)) { |
| debug = function() { |
| var args = Array.prototype.slice.call(arguments); |
| if (typeof args[0] === 'string') { |
| args[0] = 'TUNNEL: ' + args[0]; |
| } else { |
| args.unshift('TUNNEL:'); |
| } |
| console.error.apply(console, args); |
| } |
| } else { |
| debug = function() {}; |
| } |
| exports.debug = debug; // for test |
| |
| |
| /***/ }), |
| |
| /***/ 143: |
| /***/ (function(module, __unusedexports, __webpack_require__) { |
| |
| module.exports = withAuthorizationPrefix; |
| |
| const atob = __webpack_require__(368); |
| |
| const REGEX_IS_BASIC_AUTH = /^[\w-]+:/; |
| |
| function withAuthorizationPrefix(authorization) { |
| if (/^(basic|bearer|token) /i.test(authorization)) { |
| return authorization; |
| } |
| |
| try { |
| if (REGEX_IS_BASIC_AUTH.test(atob(authorization))) { |
| return `basic ${authorization}`; |
| } |
| } catch (error) {} |
| |
| if (authorization.split(/\./).length === 3) { |
| return `bearer ${authorization}`; |
| } |
| |
| return `token ${authorization}`; |
| } |
| |
| |
| /***/ }), |
| |
| /***/ 145: |
| /***/ (function(module, __unusedexports, __webpack_require__) { |
| |
| "use strict"; |
| |
| const pump = __webpack_require__(453); |
| const bufferStream = __webpack_require__(966); |
| |
| class MaxBufferError extends Error { |
| constructor() { |
| super('maxBuffer exceeded'); |
| this.name = 'MaxBufferError'; |
| } |
| } |
| |
| function getStream(inputStream, options) { |
| if (!inputStream) { |
| return Promise.reject(new Error('Expected a stream')); |
| } |
| |
| options = Object.assign({maxBuffer: Infinity}, options); |
| |
| const {maxBuffer} = options; |
| |
| let stream; |
| return new Promise((resolve, reject) => { |
| const rejectPromise = error => { |
| if (error) { // A null check |
| error.bufferedData = stream.getBufferedValue(); |
| } |
| reject(error); |
| }; |
| |
| stream = pump(inputStream, bufferStream(options), error => { |
| if (error) { |
| rejectPromise(error); |
| return; |
| } |
| |
| resolve(); |
| }); |
| |
| stream.on('data', () => { |
| if (stream.getBufferedLength() > maxBuffer) { |
| rejectPromise(new MaxBufferError()); |
| } |
| }); |
| }).then(() => stream.getBufferedValue()); |
| } |
| |
| module.exports = getStream; |
| module.exports.buffer = (stream, options) => getStream(stream, Object.assign({}, options, {encoding: 'buffer'})); |
| module.exports.array = (stream, options) => getStream(stream, Object.assign({}, options, {array: true})); |
| module.exports.MaxBufferError = MaxBufferError; |
| |
| |
| /***/ }), |
| |
| /***/ 148: |
| /***/ (function(module, __unusedexports, __webpack_require__) { |
| |
| module.exports = paginatePlugin; |
| |
| const { paginateRest } = __webpack_require__(299); |
| |
| function paginatePlugin(octokit) { |
| Object.assign(octokit, paginateRest(octokit)); |
| } |
| |
| |
| /***/ }), |
| |
| /***/ 168: |
| /***/ (function(module) { |
| |
| "use strict"; |
| |
| const alias = ['stdin', 'stdout', 'stderr']; |
| |
| const hasAlias = opts => alias.some(x => Boolean(opts[x])); |
| |
| module.exports = opts => { |
| if (!opts) { |
| return null; |
| } |
| |
| if (opts.stdio && hasAlias(opts)) { |
| throw new Error(`It's not possible to provide \`stdio\` in combination with one of ${alias.map(x => `\`${x}\``).join(', ')}`); |
| } |
| |
| if (typeof opts.stdio === 'string') { |
| return opts.stdio; |
| } |
| |
| const stdio = opts.stdio || []; |
| |
| if (!Array.isArray(stdio)) { |
| throw new TypeError(`Expected \`stdio\` to be of type \`string\` or \`Array\`, got \`${typeof stdio}\``); |
| } |
| |
| const result = []; |
| const len = Math.max(stdio.length, alias.length); |
| |
| for (let i = 0; i < len; i++) { |
| let value = null; |
| |
| if (stdio[i] !== undefined) { |
| value = stdio[i]; |
| } else if (opts[alias[i]] !== undefined) { |
| value = opts[alias[i]]; |
| } |
| |
| result[i] = value; |
| } |
| |
| return result; |
| }; |
| |
| |
| /***/ }), |
| |
| /***/ 181: |
| /***/ (function(module, __unusedexports, __webpack_require__) { |
| |
| "use strict"; |
| |
| |
| var Type = __webpack_require__(945); |
| |
| var _hasOwnProperty = Object.prototype.hasOwnProperty; |
| var _toString = Object.prototype.toString; |
| |
| function resolveYamlOmap(data) { |
| if (data === null) return true; |
| |
| var objectKeys = [], index, length, pair, pairKey, pairHasKey, |
| object = data; |
| |
| for (index = 0, length = object.length; index < length; index += 1) { |
| pair = object[index]; |
| pairHasKey = false; |
| |
| if (_toString.call(pair) !== '[object Object]') return false; |
| |
| for (pairKey in pair) { |
| if (_hasOwnProperty.call(pair, pairKey)) { |
| if (!pairHasKey) pairHasKey = true; |
| else return false; |
| } |
| } |
| |
| if (!pairHasKey) return false; |
| |
| if (objectKeys.indexOf(pairKey) === -1) objectKeys.push(pairKey); |
| else return false; |
| } |
| |
| return true; |
| } |
| |
| function constructYamlOmap(data) { |
| return data !== null ? data : []; |
| } |
| |
| module.exports = new Type('tag:yaml.org,2002:omap', { |
| kind: 'sequence', |
| resolve: resolveYamlOmap, |
| construct: constructYamlOmap |
| }); |
| |
| |
| /***/ }), |
| |
| /***/ 190: |
| /***/ (function(module, __unusedexports, __webpack_require__) { |
| |
| module.exports = authenticationPlugin; |
| |
| const { createTokenAuth } = __webpack_require__(813); |
| const { Deprecation } = __webpack_require__(692); |
| const once = __webpack_require__(969); |
| |
| const beforeRequest = __webpack_require__(863); |
| const requestError = __webpack_require__(293); |
| const validate = __webpack_require__(954); |
| const withAuthorizationPrefix = __webpack_require__(143); |
| |
| const deprecateAuthBasic = once((log, deprecation) => log.warn(deprecation)); |
| const deprecateAuthObject = once((log, deprecation) => log.warn(deprecation)); |
| |
| function authenticationPlugin(octokit, options) { |
| // If `options.authStrategy` is set then use it and pass in `options.auth` |
| if (options.authStrategy) { |
| const auth = options.authStrategy(options.auth); |
| octokit.hook.wrap("request", auth.hook); |
| octokit.auth = auth; |
| return; |
| } |
| |
| // If neither `options.authStrategy` nor `options.auth` are set, the `octokit` instance |
| // is unauthenticated. The `octokit.auth()` method is a no-op and no request hook is registred. |
| if (!options.auth) { |
| octokit.auth = () => |
| Promise.resolve({ |
| type: "unauthenticated" |
| }); |
| return; |
| } |
| |
| const isBasicAuthString = |
| typeof options.auth === "string" && |
| /^basic/.test(withAuthorizationPrefix(options.auth)); |
| |
| // If only `options.auth` is set to a string, use the default token authentication strategy. |
| if (typeof options.auth === "string" && !isBasicAuthString) { |
| const auth = createTokenAuth(options.auth); |
| octokit.hook.wrap("request", auth.hook); |
| octokit.auth = auth; |
| return; |
| } |
| |
| // Otherwise log a deprecation message |
| const [deprecationMethod, deprecationMessapge] = isBasicAuthString |
| ? [ |
| deprecateAuthBasic, |
| 'Setting the "new Octokit({ auth })" option to a Basic Auth string is deprecated. Use https://github.com/octokit/auth-basic.js instead. See (https://octokit.github.io/rest.js/#authentication)' |
| ] |
| : [ |
| deprecateAuthObject, |
| 'Setting the "new Octokit({ auth })" option to an object without also setting the "authStrategy" option is deprecated and will be removed in v17. See (https://octokit.github.io/rest.js/#authentication)' |
| ]; |
| deprecationMethod( |
| octokit.log, |
| new Deprecation("[@octokit/rest] " + deprecationMessapge) |
| ); |
| |
| octokit.auth = () => |
| Promise.resolve({ |
| type: "deprecated", |
| message: deprecationMessapge |
| }); |
| |
| validate(options.auth); |
| |
| const state = { |
| octokit, |
| auth: options.auth |
| }; |
| |
| octokit.hook.before("request", beforeRequest.bind(null, state)); |
| octokit.hook.error("request", requestError.bind(null, state)); |
| } |
| |
| |
| /***/ }), |
| |
| /***/ 197: |
| /***/ (function(module, __unusedexports, __webpack_require__) { |
| |
| module.exports = isexe |
| isexe.sync = sync |
| |
| var fs = __webpack_require__(747) |
| |
| function isexe (path, options, cb) { |
| fs.stat(path, function (er, stat) { |
| cb(er, er ? false : checkStat(stat, options)) |
| }) |
| } |
| |
| function sync (path, options) { |
| return checkStat(fs.statSync(path), options) |
| } |
| |
| function checkStat (stat, options) { |
| return stat.isFile() && checkMode(stat, options) |
| } |
| |
| function checkMode (stat, options) { |
| var mod = stat.mode |
| var uid = stat.uid |
| var gid = stat.gid |
| |
| var myUid = options.uid !== undefined ? |
| options.uid : process.getuid && process.getuid() |
| var myGid = options.gid !== undefined ? |
| options.gid : process.getgid && process.getgid() |
| |
| var u = parseInt('100', 8) |
| var g = parseInt('010', 8) |
| var o = parseInt('001', 8) |
| var ug = u | g |
| |
| var ret = (mod & o) || |
| (mod & g) && gid === myGid || |
| (mod & u) && uid === myUid || |
| (mod & ug) && myUid === 0 |
| |
| return ret |
| } |
| |
| |
| /***/ }), |
| |
| /***/ 198: |
| /***/ (function(__unusedmodule, exports, __webpack_require__) { |
| |
| "use strict"; |
| |
| var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { |
| if (k2 === undefined) k2 = k; |
| Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); |
| }) : (function(o, m, k, k2) { |
| if (k2 === undefined) k2 = k; |
| o[k2] = m[k]; |
| })); |
| var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { |
| Object.defineProperty(o, "default", { enumerable: true, value: v }); |
| }) : function(o, v) { |
| o["default"] = v; |
| }); |
| var __importStar = (this && this.__importStar) || function (mod) { |
| if (mod && mod.__esModule) return mod; |
| var result = {}; |
| if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); |
| __setModuleDefault(result, mod); |
| return result; |
| }; |
| Object.defineProperty(exports, "__esModule", { value: true }); |
| const fs = __importStar(__webpack_require__(747)); |
| const core = __importStar(__webpack_require__(470)); |
| const github = __importStar(__webpack_require__(469)); |
| const filter_1 = __webpack_require__(235); |
| const file_1 = __webpack_require__(258); |
| const git = __importStar(__webpack_require__(136)); |
| const shell_escape_1 = __webpack_require__(206); |
| const csv_escape_1 = __webpack_require__(410); |
| async function run() { |
| try { |
| const workingDirectory = core.getInput('working-directory', { required: false }); |
| if (workingDirectory) { |
| process.chdir(workingDirectory); |
| } |
| const token = core.getInput('token', { required: false }); |
| const base = core.getInput('base', { required: false }); |
| const filtersInput = core.getInput('filters', { required: true }); |
| const filtersYaml = isPathInput(filtersInput) ? getConfigFileContent(filtersInput) : filtersInput; |
| const listFiles = core.getInput('list-files', { required: false }).toLowerCase() || 'none'; |
| const initialFetchDepth = parseInt(core.getInput('initial-fetch-depth', { required: false })) || 10; |
| if (!isExportFormat(listFiles)) { |
| core.setFailed(`Input parameter 'list-files' is set to invalid value '${listFiles}'`); |
| return; |
| } |
| const filter = new filter_1.Filter(filtersYaml); |
| const files = await getChangedFiles(token, base, initialFetchDepth); |
| const results = filter.match(files); |
| exportResults(results, listFiles); |
| } |
| catch (error) { |
| core.setFailed(error.message); |
| } |
| } |
| function isPathInput(text) { |
| return !(text.includes('\n') || text.includes(':')); |
| } |
| function getConfigFileContent(configPath) { |
| if (!fs.existsSync(configPath)) { |
| throw new Error(`Configuration file '${configPath}' not found`); |
| } |
| if (!fs.lstatSync(configPath).isFile()) { |
| throw new Error(`'${configPath}' is not a file.`); |
| } |
| return fs.readFileSync(configPath, { encoding: 'utf8' }); |
| } |
| async function getChangedFiles(token, base, initialFetchDepth) { |
| // if base is 'HEAD' only local uncommitted changes will be detected |
| // This is the simplest case as we don't need to fetch more commits or evaluate current/before refs |
| if (base === git.HEAD) { |
| return await git.getChangesOnHead(); |
| } |
| if (github.context.eventName === 'pull_request' || github.context.eventName === 'pull_request_target') { |
| const pr = github.context.payload.pull_request; |
| if (token) { |
| return await getChangedFilesFromApi(token, pr); |
| } |
| core.info('Github token is not available - changes will be detected from PRs merge commit'); |
| return await git.getChangesInLastCommit(); |
| } |
| else { |
| return getChangedFilesFromGit(base, initialFetchDepth); |
| } |
| } |
| async function getChangedFilesFromGit(base, initialFetchDepth) { |
| var _a; |
| const defaultRef = (_a = github.context.payload.repository) === null || _a === void 0 ? void 0 : _a.default_branch; |
| const beforeSha = github.context.eventName === 'push' ? github.context.payload.before : null; |
| const ref = git.getShortName(github.context.ref) || |
| (core.warning(`'ref' field is missing in event payload - using current branch, tag or commit SHA`), |
| await git.getCurrentRef()); |
| const baseRef = git.getShortName(base) || defaultRef; |
| if (!baseRef) { |
| throw new Error("This action requires 'base' input to be configured or 'repository.default_branch' to be set in the event payload"); |
| } |
| const isBaseRefSha = git.isGitSha(baseRef); |
| const isBaseRefSameAsRef = baseRef === ref; |
| // If base is commit SHA we will do comparison against the referenced commit |
| // Or if base references same branch it was pushed to, we will do comparison against the previously pushed commit |
| if (isBaseRefSha || isBaseRefSameAsRef) { |
| if (!isBaseRefSha && !beforeSha) { |
| core.warning(`'before' field is missing in event payload - changes will be detected from last commit`); |
| return await git.getChangesInLastCommit(); |
| } |
| const baseSha = isBaseRefSha ? baseRef : beforeSha; |
| // If there is no previously pushed commit, |
| // we will do comparison against the default branch or return all as added |
| if (baseSha === git.NULL_SHA) { |
| if (defaultRef && baseRef !== defaultRef) { |
| core.info(`First push of a branch detected - changes will be detected against the default branch ${defaultRef}`); |
| return await git.getChangesSinceMergeBase(defaultRef, ref, initialFetchDepth); |
| } |
| else { |
| core.info('Initial push detected - all files will be listed as added'); |
| return await git.listAllFilesAsAdded(); |
| } |
| } |
| core.info(`Changes will be detected against commit (${baseSha})`); |
| return await git.getChanges(baseSha); |
| } |
| // Changes introduced by current branch against the base branch |
| core.info(`Changes will be detected against the branch ${baseRef}`); |
| return await git.getChangesSinceMergeBase(baseRef, ref, initialFetchDepth); |
| } |
| // Uses github REST api to get list of files changed in PR |
| async function getChangedFilesFromApi(token, pullRequest) { |
| core.startGroup(`Fetching list of changed files for PR#${pullRequest.number} from Github API`); |
| core.info(`Number of changed_files is ${pullRequest.changed_files}`); |
| const client = new github.GitHub(token); |
| const pageSize = 100; |
| const files = []; |
| for (let page = 1; (page - 1) * pageSize < pullRequest.changed_files; page++) { |
| core.info(`Invoking listFiles(pull_number: ${pullRequest.number}, page: ${page}, per_page: ${pageSize})`); |
| const response = await client.pulls.listFiles({ |
| owner: github.context.repo.owner, |
| repo: github.context.repo.repo, |
| pull_number: pullRequest.number, |
| page, |
| per_page: pageSize |
| }); |
| for (const row of response.data) { |
| core.info(`[${row.status}] ${row.filename}`); |
| // There's no obvious use-case for detection of renames |
| // Therefore we treat it as if rename detection in git diff was turned off. |
| // Rename is replaced by delete of original filename and add of new filename |
| if (row.status === file_1.ChangeStatus.Renamed) { |
| files.push({ |
| filename: row.filename, |
| status: file_1.ChangeStatus.Added |
| }); |
| files.push({ |
| // 'previous_filename' for some unknown reason isn't in the type definition or documentation |
| filename: row.previous_filename, |
| status: file_1.ChangeStatus.Deleted |
| }); |
| } |
| else { |
| // Github status and git status variants are same except for deleted files |
| const status = row.status === 'removed' ? file_1.ChangeStatus.Deleted : row.status; |
| files.push({ |
| filename: row.filename, |
| status |
| }); |
| } |
| } |
| } |
| core.endGroup(); |
| return files; |
| } |
| function exportResults(results, format) { |
| core.info('Results:'); |
| const changes = []; |
| for (const [key, files] of Object.entries(results)) { |
| const value = files.length > 0; |
| core.startGroup(`Filter ${key} = ${value}`); |
| if (files.length > 0) { |
| changes.push(key); |
| core.info('Matching files:'); |
| for (const file of files) { |
| core.info(`${file.filename} [${file.status}]`); |
| } |
| } |
| else { |
| core.info('Matching files: none'); |
| } |
| core.setOutput(key, value); |
| core.setOutput(`${key}_count`, files.length); |
| if (format !== 'none') { |
| const filesValue = serializeExport(files, format); |
| core.setOutput(`${key}_files`, filesValue); |
| } |
| core.endGroup(); |
| } |
| if (results['changes'] === undefined) { |
| const changesJson = JSON.stringify(changes); |
| core.info(`Changes output set to ${changesJson}`); |
| core.setOutput('changes', changesJson); |
| } |
| else { |
| core.info('Cannot set changes output variable - name already used by filter output'); |
| } |
| } |
| function serializeExport(files, format) { |
| const fileNames = files.map(file => file.filename); |
| switch (format) { |
| case 'csv': |
| return fileNames.map(csv_escape_1.csvEscape).join(','); |
| case 'json': |
| return JSON.stringify(fileNames); |
| case 'escape': |
| return fileNames.map(shell_escape_1.backslashEscape).join(' '); |
| case 'shell': |
| return fileNames.map(shell_escape_1.shellEscape).join(' '); |
| default: |
| return ''; |
| } |
| } |
| function isExportFormat(value) { |
| return ['none', 'csv', 'shell', 'json', 'escape'].includes(value); |
| } |
| run(); |
| |
| |
| /***/ }), |
| |
| /***/ 199: |
| /***/ (function(module, __unusedexports, __webpack_require__) { |
| |
| "use strict"; |
| |
| |
| const path = __webpack_require__(622); |
| const WIN_SLASH = '\\\\/'; |
| const WIN_NO_SLASH = `[^${WIN_SLASH}]`; |
| |
| /** |
| * Posix glob regex |
| */ |
| |
| const DOT_LITERAL = '\\.'; |
| const PLUS_LITERAL = '\\+'; |
| const QMARK_LITERAL = '\\?'; |
| const SLASH_LITERAL = '\\/'; |
| const ONE_CHAR = '(?=.)'; |
| const QMARK = '[^/]'; |
| const END_ANCHOR = `(?:${SLASH_LITERAL}|$)`; |
| const START_ANCHOR = `(?:^|${SLASH_LITERAL})`; |
| const DOTS_SLASH = `${DOT_LITERAL}{1,2}${END_ANCHOR}`; |
| const NO_DOT = `(?!${DOT_LITERAL})`; |
| const NO_DOTS = `(?!${START_ANCHOR}${DOTS_SLASH})`; |
| const NO_DOT_SLASH = `(?!${DOT_LITERAL}{0,1}${END_ANCHOR})`; |
| const NO_DOTS_SLASH = `(?!${DOTS_SLASH})`; |
| const QMARK_NO_DOT = `[^.${SLASH_LITERAL}]`; |
| const STAR = `${QMARK}*?`; |
| |
| const POSIX_CHARS = { |
| DOT_LITERAL, |
| PLUS_LITERAL, |
| QMARK_LITERAL, |
| SLASH_LITERAL, |
| ONE_CHAR, |
| QMARK, |
| END_ANCHOR, |
| DOTS_SLASH, |
| NO_DOT, |
| NO_DOTS, |
| NO_DOT_SLASH, |
| NO_DOTS_SLASH, |
| QMARK_NO_DOT, |
| STAR, |
| START_ANCHOR |
| }; |
| |
| /** |
| * Windows glob regex |
| */ |
| |
| const WINDOWS_CHARS = { |
| ...POSIX_CHARS, |
| |
| SLASH_LITERAL: `[${WIN_SLASH}]`, |
| QMARK: WIN_NO_SLASH, |
| STAR: `${WIN_NO_SLASH}*?`, |
| DOTS_SLASH: `${DOT_LITERAL}{1,2}(?:[${WIN_SLASH}]|$)`, |
| NO_DOT: `(?!${DOT_LITERAL})`, |
| NO_DOTS: `(?!(?:^|[${WIN_SLASH}])${DOT_LITERAL}{1,2}(?:[${WIN_SLASH}]|$))`, |
| NO_DOT_SLASH: `(?!${DOT_LITERAL}{0,1}(?:[${WIN_SLASH}]|$))`, |
| NO_DOTS_SLASH: `(?!${DOT_LITERAL}{1,2}(?:[${WIN_SLASH}]|$))`, |
| QMARK_NO_DOT: `[^.${WIN_SLASH}]`, |
| START_ANCHOR: `(?:^|[${WIN_SLASH}])`, |
| END_ANCHOR: `(?:[${WIN_SLASH}]|$)` |
| }; |
| |
| /** |
| * POSIX Bracket Regex |
| */ |
| |
| const POSIX_REGEX_SOURCE = { |
| alnum: 'a-zA-Z0-9', |
| alpha: 'a-zA-Z', |
| ascii: '\\x00-\\x7F', |
| blank: ' \\t', |
| cntrl: '\\x00-\\x1F\\x7F', |
| digit: '0-9', |
| graph: '\\x21-\\x7E', |
| lower: 'a-z', |
| print: '\\x20-\\x7E ', |
| punct: '\\-!"#$%&\'()\\*+,./:;<=>?@[\\]^_`{|}~', |
| space: ' \\t\\r\\n\\v\\f', |
| upper: 'A-Z', |
| word: 'A-Za-z0-9_', |
| xdigit: 'A-Fa-f0-9' |
| }; |
| |
| module.exports = { |
| MAX_LENGTH: 1024 * 64, |
| POSIX_REGEX_SOURCE, |
| |
| // regular expressions |
| REGEX_BACKSLASH: /\\(?![*+?^${}(|)[\]])/g, |
| REGEX_NON_SPECIAL_CHARS: /^[^@![\].,$*+?^{}()|\\/]+/, |
| REGEX_SPECIAL_CHARS: /[-*+?.^${}(|)[\]]/, |
| REGEX_SPECIAL_CHARS_BACKREF: /(\\?)((\W)(\3*))/g, |
| REGEX_SPECIAL_CHARS_GLOBAL: /([-*+?.^${}(|)[\]])/g, |
| REGEX_REMOVE_BACKSLASH: /(?:\[.*?[^\\]\]|\\(?=.))/g, |
| |
| // Replace globs with equivalent patterns to reduce parsing time. |
| REPLACEMENTS: { |
| '***': '*', |
| '**/**': '**', |
| '**/**/**': '**' |
| }, |
| |
| // Digits |
| CHAR_0: 48, /* 0 */ |
| CHAR_9: 57, /* 9 */ |
| |
| // Alphabet chars. |
| CHAR_UPPERCASE_A: 65, /* A */ |
| CHAR_LOWERCASE_A: 97, /* a */ |
| CHAR_UPPERCASE_Z: 90, /* Z */ |
| CHAR_LOWERCASE_Z: 122, /* z */ |
| |
| CHAR_LEFT_PARENTHESES: 40, /* ( */ |
| CHAR_RIGHT_PARENTHESES: 41, /* ) */ |
| |
| CHAR_ASTERISK: 42, /* * */ |
| |
| // Non-alphabetic chars. |
| CHAR_AMPERSAND: 38, /* & */ |
| CHAR_AT: 64, /* @ */ |
| CHAR_BACKWARD_SLASH: 92, /* \ */ |
| CHAR_CARRIAGE_RETURN: 13, /* \r */ |
| CHAR_CIRCUMFLEX_ACCENT: 94, /* ^ */ |
| CHAR_COLON: 58, /* : */ |
| CHAR_COMMA: 44, /* , */ |
| CHAR_DOT: 46, /* . */ |
| CHAR_DOUBLE_QUOTE: 34, /* " */ |
| CHAR_EQUAL: 61, /* = */ |
| CHAR_EXCLAMATION_MARK: 33, /* ! */ |
| CHAR_FORM_FEED: 12, /* \f */ |
| CHAR_FORWARD_SLASH: 47, /* / */ |
| CHAR_GRAVE_ACCENT: 96, /* ` */ |
| CHAR_HASH: 35, /* # */ |
| CHAR_HYPHEN_MINUS: 45, /* - */ |
| CHAR_LEFT_ANGLE_BRACKET: 60, /* < */ |
| CHAR_LEFT_CURLY_BRACE: 123, /* { */ |
| CHAR_LEFT_SQUARE_BRACKET: 91, /* [ */ |
| CHAR_LINE_FEED: 10, /* \n */ |
| CHAR_NO_BREAK_SPACE: 160, /* \u00A0 */ |
| CHAR_PERCENT: 37, /* % */ |
| CHAR_PLUS: 43, /* + */ |
| CHAR_QUESTION_MARK: 63, /* ? */ |
| CHAR_RIGHT_ANGLE_BRACKET: 62, /* > */ |
| CHAR_RIGHT_CURLY_BRACE: 125, /* } */ |
| CHAR_RIGHT_SQUARE_BRACKET: 93, /* ] */ |
| CHAR_SEMICOLON: 59, /* ; */ |
| CHAR_SINGLE_QUOTE: 39, /* ' */ |
| CHAR_SPACE: 32, /* */ |
| CHAR_TAB: 9, /* \t */ |
| CHAR_UNDERSCORE: 95, /* _ */ |
| CHAR_VERTICAL_LINE: 124, /* | */ |
| CHAR_ZERO_WIDTH_NOBREAK_SPACE: 65279, /* \uFEFF */ |
| |
| SEP: path.sep, |
| |
| /** |
| * Create EXTGLOB_CHARS |
| */ |
| |
| extglobChars(chars) { |
| return { |
| '!': { type: 'negate', open: '(?:(?!(?:', close: `))${chars.STAR})` }, |
| '?': { type: 'qmark', open: '(?:', close: ')?' }, |
| '+': { type: 'plus', open: '(?:', close: ')+' }, |
| '*': { type: 'star', open: '(?:', close: ')*' }, |
| '@': { type: 'at', open: '(?:', close: ')' } |
| }; |
| }, |
| |
| /** |
| * Create GLOB_CHARS |
| */ |
| |
| globChars(win32) { |
| return win32 === true ? WINDOWS_CHARS : POSIX_CHARS; |
| } |
| }; |
| |
| |
| /***/ }), |
| |
| /***/ 206: |
| /***/ (function(__unusedmodule, exports) { |
| |
| "use strict"; |
| |
| Object.defineProperty(exports, "__esModule", { value: true }); |
| exports.shellEscape = exports.backslashEscape = void 0; |
| // Backslash escape every character except small subset of definitely safe characters |
| function backslashEscape(value) { |
| return value.replace(/([^a-zA-Z0-9,._+:@%/-])/gm, '\\$1'); |
| } |
| exports.backslashEscape = backslashEscape; |
| // Returns filename escaped for usage as shell argument. |
| // Applies "human readable" approach with as few escaping applied as possible |
| function shellEscape(value) { |
| if (value === '') |
| return value; |
| // Only safe characters |
| if (/^[a-zA-Z0-9,._+:@%/-]+$/m.test(value)) { |
| return value; |
| } |
| if (value.includes("'")) { |
| // Only safe characters, single quotes and white-spaces |
| if (/^[a-zA-Z0-9,._+:@%/'\s-]+$/m.test(value)) { |
| return `"${value}"`; |
| } |
| // Split by single quote and apply escaping recursively |
| return value.split("'").map(shellEscape).join("\\'"); |
| } |
| // Contains some unsafe characters but no single quote |
| return `'${value}'`; |
| } |
| exports.shellEscape = shellEscape; |
| |
| |
| /***/ }), |
| |
| /***/ 211: |
| /***/ (function(module) { |
| |
| module.exports = require("https"); |
| |
| /***/ }), |
| |
| /***/ 215: |
| /***/ (function(module) { |
| |
| module.exports = {"_args":[["@octokit/rest@16.43.1","C:\\Users\\Michal\\Workspace\\dorny\\pr-changed-files-filter"]],"_from":"@octokit/rest@16.43.1","_id":"@octokit/rest@16.43.1","_inBundle":false,"_integrity":"sha512-gfFKwRT/wFxq5qlNjnW2dh+qh74XgTQ2B179UX5K1HYCluioWj8Ndbgqw2PVqa1NnVJkGHp2ovMpVn/DImlmkw==","_location":"/@octokit/rest","_phantomChildren":{"@types/node":"14.0.5","deprecation":"2.3.1","once":"1.4.0","os-name":"3.1.0"},"_requested":{"type":"version","registry":true,"raw":"@octokit/rest@16.43.1","name":"@octokit/rest","escapedName":"@octokit%2frest","scope":"@octokit","rawSpec":"16.43.1","saveSpec":null,"fetchSpec":"16.43.1"},"_requiredBy":["/@actions/github"],"_resolved":"https://registry.npmjs.org/@octokit/rest/-/rest-16.43.1.tgz","_spec":"16.43.1","_where":"C:\\Users\\Michal\\Workspace\\dorny\\pr-changed-files-filter","author":{"name":"Gregor Martynus","url":"https://github.com/gr2m"},"bugs":{"url":"https://github.com/octokit/rest.js/issues"},"bundlesize":[{"path":"./dist/octokit-rest.min.js.gz","maxSize":"33 kB"}],"contributors":[{"name":"Mike de Boer","email":"info@mikedeboer.nl"},{"name":"Fabian Jakobs","email":"fabian@c9.io"},{"name":"Joe Gallo","email":"joe@brassafrax.com"},{"name":"Gregor Martynus","url":"https://github.com/gr2m"}],"dependencies":{"@octokit/auth-token":"^2.4.0","@octokit/plugin-paginate-rest":"^1.1.1","@octokit/plugin-request-log":"^1.0.0","@octokit/plugin-rest-endpoint-methods":"2.4.0","@octokit/request":"^5.2.0","@octokit/request-error":"^1.0.2","atob-lite":"^2.0.0","before-after-hook":"^2.0.0","btoa-lite":"^1.0.0","deprecation":"^2.0.0","lodash.get":"^4.4.2","lodash.set":"^4.3.2","lodash.uniq":"^4.5.0","octokit-pagination-methods":"^1.1.0","once":"^1.4.0","universal-user-agent":"^4.0.0"},"description":"GitHub REST API client for Node.js","devDependencies":{"@gimenete/type-writer":"^0.1.3","@octokit/auth":"^1.1.1","@octokit/fixtures-server":"^5.0.6","@octokit/graphql":"^4.2.0","@types/node":"^13.1.0","bundlesize":"^0.18.0","chai":"^4.1.2","compression-webpack-plugin":"^3.1.0","cypress":"^3.0.0","glob":"^7.1.2","http-proxy-agent":"^4.0.0","lodash.camelcase":"^4.3.0","lodash.merge":"^4.6.1","lodash.upperfirst":"^4.3.1","lolex":"^5.1.2","mkdirp":"^1.0.0","mocha":"^7.0.1","mustache":"^4.0.0","nock":"^11.3.3","npm-run-all":"^4.1.2","nyc":"^15.0.0","prettier":"^1.14.2","proxy":"^1.0.0","semantic-release":"^17.0.0","sinon":"^8.0.0","sinon-chai":"^3.0.0","sort-keys":"^4.0.0","string-to-arraybuffer":"^1.0.0","string-to-jsdoc-comment":"^1.0.0","typescript":"^3.3.1","webpack":"^4.0.0","webpack-bundle-analyzer":"^3.0.0","webpack-cli":"^3.0.0"},"files":["index.js","index.d.ts","lib","plugins"],"homepage":"https://github.com/octokit/rest.js#readme","keywords":["octokit","github","rest","api-client"],"license":"MIT","name":"@octokit/rest","nyc":{"ignore":["test"]},"publishConfig":{"access":"public"},"release":{"publish":["@semantic-release/npm",{"path":"@semantic-release/github","assets":["dist/*","!dist/*.map.gz"]}]},"repository":{"type":"git","url":"git+https://github.com/octokit/rest.js.git"},"scripts":{"build":"npm-run-all build:*","build:browser":"npm-run-all build:browser:*","build:browser:development":"webpack --mode development --entry . --output-library=Octokit --output=./dist/octokit-rest.js --profile --json > dist/bundle-stats.json","build:browser:production":"webpack --mode production --entry . --plugin=compression-webpack-plugin --output-library=Octokit --output-path=./dist --output-filename=octokit-rest.min.js --devtool source-map","build:ts":"npm run -s update-endpoints:typescript","coverage":"nyc report --reporter=html && open coverage/index.html","generate-bundle-report":"webpack-bundle-analyzer dist/bundle-stats.json --mode=static --no-open --report dist/bundle-report.html","lint":"prettier --check '{lib,plugins,scripts,test}/**/*.{js,json,ts}' 'docs/*.{js,json}' 'docs/src/**/*' index.js README.md package.json","lint:fix":"prettier --write '{lib,plugins,scripts,test}/**/*.{js,json,ts}' 'docs/*.{js,json}' 'docs/src/**/*' index.js README.md package.json","postvalidate:ts":"tsc --noEmit --target es6 test/typescript-validate.ts","prebuild:browser":"mkdirp dist/","pretest":"npm run -s lint","prevalidate:ts":"npm run -s build:ts","start-fixtures-server":"octokit-fixtures-server","test":"nyc mocha test/mocha-node-setup.js \"test/*/**/*-test.js\"","test:browser":"cypress run --browser chrome","update-endpoints":"npm-run-all update-endpoints:*","update-endpoints:fetch-json":"node scripts/update-endpoints/fetch-json","update-endpoints:typescript":"node scripts/update-endpoints/typescript","validate:ts":"tsc --target es6 --noImplicitAny index.d.ts"},"types":"index.d.ts","version":"16.43.1"}; |
| |
| /***/ }), |
| |
| /***/ 224: |
| /***/ (function(__unusedmodule, exports, __webpack_require__) { |
| |
| "use strict"; |
| |
| |
| const path = __webpack_require__(622); |
| const win32 = process.platform === 'win32'; |
| const { |
| REGEX_BACKSLASH, |
| REGEX_REMOVE_BACKSLASH, |
| REGEX_SPECIAL_CHARS, |
| REGEX_SPECIAL_CHARS_GLOBAL |
| } = __webpack_require__(199); |
| |
| exports.isObject = val => val !== null && typeof val === 'object' && !Array.isArray(val); |
| exports.hasRegexChars = str => REGEX_SPECIAL_CHARS.test(str); |
| exports.isRegexChar = str => str.length === 1 && exports.hasRegexChars(str); |
| exports.escapeRegex = str => str.replace(REGEX_SPECIAL_CHARS_GLOBAL, '\\$1'); |
| exports.toPosixSlashes = str => str.replace(REGEX_BACKSLASH, '/'); |
| |
| exports.removeBackslashes = str => { |
| return str.replace(REGEX_REMOVE_BACKSLASH, match => { |
| return match === '\\' ? '' : match; |
| }); |
| }; |
| |
| exports.supportsLookbehinds = () => { |
| const segs = process.version.slice(1).split('.').map(Number); |
| if (segs.length === 3 && segs[0] >= 9 || (segs[0] === 8 && segs[1] >= 10)) { |
| return true; |
| } |
| return false; |
| }; |
| |
| exports.isWindows = options => { |
| if (options && typeof options.windows === 'boolean') { |
| return options.windows; |
| } |
| return win32 === true || path.sep === '\\'; |
| }; |
| |
| exports.escapeLast = (input, char, lastIdx) => { |
| const idx = input.lastIndexOf(char, lastIdx); |
| if (idx === -1) return input; |
| if (input[idx - 1] === '\\') return exports.escapeLast(input, char, idx - 1); |
| return `${input.slice(0, idx)}\\${input.slice(idx)}`; |
| }; |
| |
| exports.removePrefix = (input, state = {}) => { |
| let output = input; |
| if (output.startsWith('./')) { |
| output = output.slice(2); |
| state.prefix = './'; |
| } |
| return output; |
| }; |
| |
| exports.wrapOutput = (input, state = {}, options = {}) => { |
| const prepend = options.contains ? '' : '^'; |
| const append = options.contains ? '' : '$'; |
| |
| let output = `${prepend}(?:${input})${append}`; |
| if (state.negated === true) { |
| output = `(?:^(?!${output}).*$)`; |
| } |
| return output; |
| }; |
| |
| |
| /***/ }), |
| |
| /***/ 228: |
| /***/ (function(module, __unusedexports, __webpack_require__) { |
| |
| "use strict"; |
| |
| |
| var Type = __webpack_require__(945); |
| |
| function resolveYamlBoolean(data) { |
| if (data === null) return false; |
| |
| var max = data.length; |
| |
| return (max === 4 && (data === 'true' || data === 'True' || data === 'TRUE')) || |
| (max === 5 && (data === 'false' || data === 'False' || data === 'FALSE')); |
| } |
| |
| function constructYamlBoolean(data) { |
| return data === 'true' || |
| data === 'True' || |
| data === 'TRUE'; |
| } |
| |
| function isBoolean(object) { |
| return Object.prototype.toString.call(object) === '[object Boolean]'; |
| } |
| |
| module.exports = new Type('tag:yaml.org,2002:bool', { |
| kind: 'scalar', |
| resolve: resolveYamlBoolean, |
| construct: constructYamlBoolean, |
| predicate: isBoolean, |
| represent: { |
| lowercase: function (object) { return object ? 'true' : 'false'; }, |
| uppercase: function (object) { return object ? 'TRUE' : 'FALSE'; }, |
| camelcase: function (object) { return object ? 'True' : 'False'; } |
| }, |
| defaultStyle: 'lowercase' |
| }); |
| |
| |
| /***/ }), |
| |
| /***/ 235: |
| /***/ (function(__unusedmodule, exports, __webpack_require__) { |
| |
| "use strict"; |
| |
| var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { |
| if (k2 === undefined) k2 = k; |
| Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); |
| }) : (function(o, m, k, k2) { |
| if (k2 === undefined) k2 = k; |
| o[k2] = m[k]; |
| })); |
| var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { |
| Object.defineProperty(o, "default", { enumerable: true, value: v }); |
| }) : function(o, v) { |
| o["default"] = v; |
| }); |
| var __importStar = (this && this.__importStar) || function (mod) { |
| if (mod && mod.__esModule) return mod; |
| var result = {}; |
| if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); |
| __setModuleDefault(result, mod); |
| return result; |
| }; |
| var __importDefault = (this && this.__importDefault) || function (mod) { |
| return (mod && mod.__esModule) ? mod : { "default": mod }; |
| }; |
| Object.defineProperty(exports, "__esModule", { value: true }); |
| exports.Filter = void 0; |
| const jsyaml = __importStar(__webpack_require__(414)); |
| const picomatch_1 = __importDefault(__webpack_require__(827)); |
| // Minimatch options used in all matchers |
| const MatchOptions = { |
| dot: true |
| }; |
| class Filter { |
| // Creates instance of Filter and load rules from YAML if it's provided |
| constructor(yaml) { |
| this.rules = {}; |
| if (yaml) { |
| this.load(yaml); |
| } |
| } |
| // Load rules from YAML string |
| load(yaml) { |
| if (!yaml) { |
| return; |
| } |
| const doc = jsyaml.safeLoad(yaml); |
| if (typeof doc !== 'object') { |
| this.throwInvalidFormatError('Root element is not an object'); |
| } |
| for (const [key, item] of Object.entries(doc)) { |
| this.rules[key] = this.parseFilterItemYaml(item); |
| } |
| } |
| match(files) { |
| const result = {}; |
| for (const [key, patterns] of Object.entries(this.rules)) { |
| result[key] = files.filter(file => this.isMatch(file, patterns)); |
| } |
| return result; |
| } |
| isMatch(file, patterns) { |
| return patterns.some(rule => (rule.status === undefined || rule.status.includes(file.status)) && rule.isMatch(file.filename)); |
| } |
| parseFilterItemYaml(item) { |
| if (Array.isArray(item)) { |
| return flat(item.map(i => this.parseFilterItemYaml(i))); |
| } |
| if (typeof item === 'string') { |
| return [{ status: undefined, isMatch: picomatch_1.default(item, MatchOptions) }]; |
| } |
| if (typeof item === 'object') { |
| return Object.entries(item).map(([key, pattern]) => { |
| if (typeof key !== 'string' || (typeof pattern !== 'string' && !Array.isArray(pattern))) { |
| this.throwInvalidFormatError(`Expected [key:string]= pattern:string | string[], but [${key}:${typeof key}]= ${pattern}:${typeof pattern} found`); |
| } |
| return { |
| status: key |
| .split('|') |
| .map(x => x.trim()) |
| .filter(x => x.length > 0) |
| .map(x => x.toLowerCase()), |
| isMatch: picomatch_1.default(pattern, MatchOptions) |
| }; |
| }); |
| } |
| this.throwInvalidFormatError(`Unexpected element type '${typeof item}'`); |
| } |
| throwInvalidFormatError(message) { |
| throw new Error(`Invalid filter YAML format: ${message}.`); |
| } |
| } |
| exports.Filter = Filter; |
| // Creates a new array with all sub-array elements concatenated |
| // In future could be replaced by Array.prototype.flat (supported on Node.js 11+) |
| function flat(arr) { |
| return arr.reduce((acc, val) => acc.concat(val), []); |
| } |
| |
| |
| /***/ }), |
| |
| /***/ 258: |
| /***/ (function(__unusedmodule, exports) { |
| |
| "use strict"; |
| |
| Object.defineProperty(exports, "__esModule", { value: true }); |
| exports.ChangeStatus = void 0; |
| var ChangeStatus; |
| (function (ChangeStatus) { |
| ChangeStatus["Added"] = "added"; |
| ChangeStatus["Copied"] = "copied"; |
| ChangeStatus["Deleted"] = "deleted"; |
| ChangeStatus["Modified"] = "modified"; |
| ChangeStatus["Renamed"] = "renamed"; |
| ChangeStatus["Unmerged"] = "unmerged"; |
| })(ChangeStatus = exports.ChangeStatus || (exports.ChangeStatus = {})); |
| |
| |
| /***/ }), |
| |
| /***/ 260: |
| /***/ (function(module, __unusedexports, __webpack_require__) { |
| |
| // Note: since nyc uses this module to output coverage, any lines |
| // that are in the direct sync flow of nyc's outputCoverage are |
| // ignored, since we can never get coverage for them. |
| var assert = __webpack_require__(357) |
| var signals = __webpack_require__(654) |
| |
| var EE = __webpack_require__(614) |
| /* istanbul ignore if */ |
| if (typeof EE !== 'function') { |
| EE = EE.EventEmitter |
| } |
| |
| var emitter |
| if (process.__signal_exit_emitter__) { |
| emitter = process.__signal_exit_emitter__ |
| } else { |
| emitter = process.__signal_exit_emitter__ = new EE() |
| emitter.count = 0 |
| emitter.emitted = {} |
| } |
| |
| // Because this emitter is a global, we have to check to see if a |
| // previous version of this library failed to enable infinite listeners. |
| // I know what you're about to say. But literally everything about |
| // signal-exit is a compromise with evil. Get used to it. |
| if (!emitter.infinite) { |
| emitter.setMaxListeners(Infinity) |
| emitter.infinite = true |
| } |
| |
| module.exports = function (cb, opts) { |
| assert.equal(typeof cb, 'function', 'a callback must be provided for exit handler') |
| |
| if (loaded === false) { |
| load() |
| } |
| |
| var ev = 'exit' |
| if (opts && opts.alwaysLast) { |
| ev = 'afterexit' |
| } |
| |
| var remove = function () { |
| emitter.removeListener(ev, cb) |
| if (emitter.listeners('exit').length === 0 && |
| emitter.listeners('afterexit').length === 0) { |
| unload() |
| } |
| } |
| emitter.on(ev, cb) |
| |
| return remove |
| } |
| |
| module.exports.unload = unload |
| function unload () { |
| if (!loaded) { |
| return |
| } |
| loaded = false |
| |
| signals.forEach(function (sig) { |
| try { |
| process.removeListener(sig, sigListeners[sig]) |
| } catch (er) {} |
| }) |
| process.emit = originalProcessEmit |
| process.reallyExit = originalProcessReallyExit |
| emitter.count -= 1 |
| } |
| |
| function emit (event, code, signal) { |
| if (emitter.emitted[event]) { |
| return |
| } |
| emitter.emitted[event] = true |
| emitter.emit(event, code, signal) |
| } |
| |
| // { <signal>: <listener fn>, ... } |
| var sigListeners = {} |
| signals.forEach(function (sig) { |
| sigListeners[sig] = function listener () { |
| // If there are no other listeners, an exit is coming! |
| // Simplest way: remove us and then re-send the signal. |
| // We know that this will kill the process, so we can |
| // safely emit now. |
| var listeners = process.listeners(sig) |
| if (listeners.length === emitter.count) { |
| unload() |
| emit('exit', null, sig) |
| /* istanbul ignore next */ |
| emit('afterexit', null, sig) |
| /* istanbul ignore next */ |
| process.kill(process.pid, sig) |
| } |
| } |
| }) |
| |
| module.exports.signals = function () { |
| return signals |
| } |
| |
| module.exports.load = load |
| |
| var loaded = false |
| |
| function load () { |
| if (loaded) { |
| return |
| } |
| loaded = true |
| |
| // This is the number of onSignalExit's that are in play. |
| // It's important so that we can count the correct number of |
| // listeners on signals, and don't wait for the other one to |
| // handle it instead of us. |
| emitter.count += 1 |
| |
| signals = signals.filter(function (sig) { |
| try { |
| process.on(sig, sigListeners[sig]) |
| return true |
| } catch (er) { |
| return false |
| } |
| }) |
| |
| process.emit = processEmit |
| process.reallyExit = processReallyExit |
| } |
| |
| var originalProcessReallyExit = process.reallyExit |
| function processReallyExit (code) { |
| process.exitCode = code || 0 |
| emit('exit', process.exitCode, null) |
| /* istanbul ignore next */ |
| emit('afterexit', process.exitCode, null) |
| /* istanbul ignore next */ |
| originalProcessReallyExit.call(process, process.exitCode) |
| } |
| |
| var originalProcessEmit = process.emit |
| function processEmit (ev, arg) { |
| if (ev === 'exit') { |
| if (arg !== undefined) { |
| process.exitCode = arg |
| } |
| var ret = originalProcessEmit.apply(this, arguments) |
| emit('exit', process.exitCode, null) |
| /* istanbul ignore next */ |
| emit('afterexit', process.exitCode, null) |
| return ret |
| } else { |
| return originalProcessEmit.apply(this, arguments) |
| } |
| } |
| |
| |
| /***/ }), |
| |
| /***/ 262: |
| /***/ (function(__unusedmodule, exports, __webpack_require__) { |
| |
| "use strict"; |
| |
| Object.defineProperty(exports, "__esModule", { value: true }); |
| const fs_1 = __webpack_require__(747); |
| const os_1 = __webpack_require__(87); |
| class Context { |
| /** |
| * Hydrate the context from the environment |
| */ |
| constructor() { |
| this.payload = {}; |
| if (process.env.GITHUB_EVENT_PATH) { |
| if (fs_1.existsSync(process.env.GITHUB_EVENT_PATH)) { |
| this.payload = JSON.parse(fs_1.readFileSync(process.env.GITHUB_EVENT_PATH, { encoding: 'utf8' })); |
| } |
| else { |
| const path = process.env.GITHUB_EVENT_PATH; |
| process.stdout.write(`GITHUB_EVENT_PATH ${path} does not exist${os_1.EOL}`); |
| } |
| } |
| this.eventName = process.env.GITHUB_EVENT_NAME; |
| this.sha = process.env.GITHUB_SHA; |
| this.ref = process.env.GITHUB_REF; |
| this.workflow = process.env.GITHUB_WORKFLOW; |
| this.action = process.env.GITHUB_ACTION; |
| this.actor = process.env.GITHUB_ACTOR; |
| } |
| get issue() { |
| const payload = this.payload; |
| return Object.assign(Object.assign({}, this.repo), { number: (payload.issue || payload.pull_request || payload).number }); |
| } |
| get repo() { |
| if (process.env.GITHUB_REPOSITORY) { |
| const [owner, repo] = process.env.GITHUB_REPOSITORY.split('/'); |
| return { owner, repo }; |
| } |
| if (this.payload.repository) { |
| return { |
| owner: this.payload.repository.owner.login, |
| repo: this.payload.repository.name |
| }; |
| } |
| throw new Error("context.repo requires a GITHUB_REPOSITORY environment variable like 'owner/repo'"); |
| } |
| } |
| exports.Context = Context; |
| //# sourceMappingURL=context.js.map |
| |
| /***/ }), |
| |
| /***/ 265: |
| /***/ (function(module, __unusedexports, __webpack_require__) { |
| |
| module.exports = getPage |
| |
| const deprecate = __webpack_require__(370) |
| const getPageLinks = __webpack_require__(577) |
| const HttpError = __webpack_require__(297) |
| |
| function getPage (octokit, link, which, headers) { |
| deprecate(`octokit.get${which.charAt(0).toUpperCase() + which.slice(1)}Page() – You can use octokit.paginate or async iterators instead: https://github.com/octokit/rest.js#pagination.`) |
| const url = getPageLinks(link)[which] |
| |
| if (!url) { |
| const urlError = new HttpError(`No ${which} page found`, 404) |
| return Promise.reject(urlError) |
| } |
| |
| const requestOptions = { |
| url, |
| headers: applyAcceptHeader(link, headers) |
| } |
| |
| const promise = octokit.request(requestOptions) |
| |
| return promise |
| } |
| |
| function applyAcceptHeader (res, headers) { |
| const previous = res.headers && res.headers['x-github-media-type'] |
| |
| if (!previous || (headers && headers.accept)) { |
| return headers |
| } |
| headers = headers || {} |
| headers.accept = 'application/vnd.' + previous |
| .replace('; param=', '.') |
| .replace('; format=', '+') |
| |
| return headers |
| } |
| |
| |
| /***/ }), |
| |
| /***/ 280: |
| /***/ (function(module) { |
| |
| module.exports = register |
| |
| function register (state, name, method, options) { |
| if (typeof method !== 'function') { |
| throw new Error('method for before hook must be a function') |
| } |
| |
| if (!options) { |
| options = {} |
| } |
| |
| if (Array.isArray(name)) { |
| return name.reverse().reduce(function (callback, name) { |
| return register.bind(null, state, name, callback, options) |
| }, method)() |
| } |
| |
| return Promise.resolve() |
| .then(function () { |
| if (!state.registry[name]) { |
| return method(options) |
| } |
| |
| return (state.registry[name]).reduce(function (method, registered) { |
| return registered.hook.bind(null, method, options) |
| }, method)() |
| }) |
| } |
| |
| |
| /***/ }), |
| |
| /***/ 293: |
| /***/ (function(module, __unusedexports, __webpack_require__) { |
| |
| module.exports = authenticationRequestError; |
| |
| const { RequestError } = __webpack_require__(497); |
| |
| function authenticationRequestError(state, error, options) { |
| if (!error.headers) throw error; |
| |
| const otpRequired = /required/.test(error.headers["x-github-otp"] || ""); |
| // handle "2FA required" error only |
| if (error.status !== 401 || !otpRequired) { |
| throw error; |
| } |
| |
| if ( |
| error.status === 401 && |
| otpRequired && |
| error.request && |
| error.request.headers["x-github-otp"] |
| ) { |
| if (state.otp) { |
| delete state.otp; // no longer valid, request again |
| } else { |
| throw new RequestError( |
| "Invalid one-time password for two-factor authentication", |
| 401, |
| { |
| headers: error.headers, |
| request: options |
| } |
| ); |
| } |
| } |
| |
| if (typeof state.auth.on2fa !== "function") { |
| throw new RequestError( |
| "2FA required, but options.on2fa is not a function. See https://github.com/octokit/rest.js#authentication", |
| 401, |
| { |
| headers: error.headers, |
| request: options |
| } |
| ); |
| } |
| |
| return Promise.resolve() |
| .then(() => { |
| return state.auth.on2fa(); |
| }) |
| .then(oneTimePassword => { |
| const newOptions = Object.assign(options, { |
| headers: Object.assign(options.headers, { |
| "x-github-otp": oneTimePassword |
| }) |
| }); |
| return state.octokit.request(newOptions).then(response => { |
| // If OTP still valid, then persist it for following requests |
| state.otp = oneTimePassword; |
| return response; |
| }); |
| }); |
| } |
| |
| |
| /***/ }), |
| |
| /***/ 294: |
| /***/ (function(module, __unusedexports, __webpack_require__) { |
| |
| module.exports = parseOptions; |
| |
| const { Deprecation } = __webpack_require__(692); |
| const { getUserAgent } = __webpack_require__(619); |
| const once = __webpack_require__(969); |
| |
| const pkg = __webpack_require__(215); |
| |
| const deprecateOptionsTimeout = once((log, deprecation) => |
| log.warn(deprecation) |
| ); |
| const deprecateOptionsAgent = once((log, deprecation) => log.warn(deprecation)); |
| const deprecateOptionsHeaders = once((log, deprecation) => |
| log.warn(deprecation) |
| ); |
| |
| function parseOptions(options, log, hook) { |
| if (options.headers) { |
| options.headers = Object.keys(options.headers).reduce((newObj, key) => { |
| newObj[key.toLowerCase()] = options.headers[key]; |
| return newObj; |
| }, {}); |
| } |
| |
| const clientDefaults = { |
| headers: options.headers || {}, |
| request: options.request || {}, |
| mediaType: { |
| previews: [], |
| format: "" |
| } |
| }; |
| |
| if (options.baseUrl) { |
| clientDefaults.baseUrl = options.baseUrl; |
| } |
| |
| if (options.userAgent) { |
| clientDefaults.headers["user-agent"] = options.userAgent; |
| } |
| |
| if (options.previews) { |
| clientDefaults.mediaType.previews = options.previews; |
| } |
| |
| if (options.timeZone) { |
| clientDefaults.headers["time-zone"] = options.timeZone; |
| } |
| |
| if (options.timeout) { |
| deprecateOptionsTimeout( |
| log, |
| new Deprecation( |
| "[@octokit/rest] new Octokit({timeout}) is deprecated. Use {request: {timeout}} instead. See https://github.com/octokit/request.js#request" |
| ) |
| ); |
| clientDefaults.request.timeout = options.timeout; |
| } |
| |
| if (options.agent) { |
| deprecateOptionsAgent( |
| log, |
| new Deprecation( |
| "[@octokit/rest] new Octokit({agent}) is deprecated. Use {request: {agent}} instead. See https://github.com/octokit/request.js#request" |
| ) |
| ); |
| clientDefaults.request.agent = options.agent; |
| } |
| |
| if (options.headers) { |
| deprecateOptionsHeaders( |
| log, |
| new Deprecation( |
| "[@octokit/rest] new Octokit({headers}) is deprecated. Use {userAgent, previews} instead. See https://github.com/octokit/request.js#request" |
| ) |
| ); |
| } |
| |
| const userAgentOption = clientDefaults.headers["user-agent"]; |
| const defaultUserAgent = `octokit.js/${pkg.version} ${getUserAgent()}`; |
| |
| clientDefaults.headers["user-agent"] = [userAgentOption, defaultUserAgent] |
| .filter(Boolean) |
| .join(" "); |
| |
| clientDefaults.request.hook = hook.bind(null, "request"); |
| |
| return clientDefaults; |
| } |
| |
| |
| /***/ }), |
| |
| /***/ 297: |
| /***/ (function(module) { |
| |
| module.exports = class HttpError extends Error { |
| constructor (message, code, headers) { |
| super(message) |
| |
| // Maintains proper stack trace (only available on V8) |
| /* istanbul ignore next */ |
| if (Error.captureStackTrace) { |
| Error.captureStackTrace(this, this.constructor) |
| } |
| |
| this.name = 'HttpError' |
| this.code = code |
| this.headers = headers |
| } |
| } |
| |
| |
| /***/ }), |
| |
| /***/ 299: |
| /***/ (function(__unusedmodule, exports) { |
| |
| "use strict"; |
| |
| |
| Object.defineProperty(exports, '__esModule', { value: true }); |
| |
| const VERSION = "1.1.2"; |
| |
| /** |
| * Some “list” response that can be paginated have a different response structure |
| * |
| * They have a `total_count` key in the response (search also has `incomplete_results`, |
| * /installation/repositories also has `repository_selection`), as well as a key with |
| * the list of the items which name varies from endpoint to endpoint: |
| * |
| * - https://developer.github.com/v3/search/#example (key `items`) |
| * - https://developer.github.com/v3/checks/runs/#response-3 (key: `check_runs`) |
| * - https://developer.github.com/v3/checks/suites/#response-1 (key: `check_suites`) |
| * - https://developer.github.com/v3/apps/installations/#list-repositories (key: `repositories`) |
| * - https://developer.github.com/v3/apps/installations/#list-installations-for-a-user (key `installations`) |
| * |
| * Octokit normalizes these responses so that paginated results are always returned following |
| * the same structure. One challenge is that if the list response has only one page, no Link |
| * header is provided, so this header alone is not sufficient to check wether a response is |
| * paginated or not. For the exceptions with the namespace, a fallback check for the route |
| * paths has to be added in order to normalize the response. We cannot check for the total_count |
| * property because it also exists in the response of Get the combined status for a specific ref. |
| */ |
| const REGEX = [/^\/search\//, /^\/repos\/[^/]+\/[^/]+\/commits\/[^/]+\/(check-runs|check-suites)([^/]|$)/, /^\/installation\/repositories([^/]|$)/, /^\/user\/installations([^/]|$)/, /^\/repos\/[^/]+\/[^/]+\/actions\/secrets([^/]|$)/, /^\/repos\/[^/]+\/[^/]+\/actions\/workflows(\/[^/]+\/runs)?([^/]|$)/, /^\/repos\/[^/]+\/[^/]+\/actions\/runs(\/[^/]+\/(artifacts|jobs))?([^/]|$)/]; |
| function normalizePaginatedListResponse(octokit, url, response) { |
| const path = url.replace(octokit.request.endpoint.DEFAULTS.baseUrl, ""); |
| const responseNeedsNormalization = REGEX.find(regex => regex.test(path)); |
| if (!responseNeedsNormalization) return; // keep the additional properties intact as there is currently no other way |
| // to retrieve the same information. |
| |
| const incompleteResults = response.data.incomplete_results; |
| const repositorySelection = response.data.repository_selection; |
| const totalCount = response.data.total_count; |
| delete response.data.incomplete_results; |
| delete response.data.repository_selection; |
| delete response.data.total_count; |
| const namespaceKey = Object.keys(response.data)[0]; |
| const data = response.data[namespaceKey]; |
| response.data = data; |
| |
| if (typeof incompleteResults !== "undefined") { |
| response.data.incomplete_results = incompleteResults; |
| } |
| |
| if (typeof repositorySelection !== "undefined") { |
| response.data.repository_selection = repositorySelection; |
| } |
| |
| response.data.total_count = totalCount; |
| Object.defineProperty(response.data, namespaceKey, { |
| get() { |
| octokit.log.warn(`[@octokit/paginate-rest] "response.data.${namespaceKey}" is deprecated for "GET ${path}". Get the results directly from "response.data"`); |
| return Array.from(data); |
| } |
| |
| }); |
| } |
| |
| function iterator(octokit, route, parameters) { |
| const options = octokit.request.endpoint(route, parameters); |
| const method = options.method; |
| const headers = options.headers; |
| let url = options.url; |
| return { |
| [Symbol.asyncIterator]: () => ({ |
| next() { |
| if (!url) { |
| return Promise.resolve({ |
| done: true |
| }); |
| } |
| |
| return octokit.request({ |
| method, |
| url, |
| headers |
| }).then(response => { |
| normalizePaginatedListResponse(octokit, url, response); // `response.headers.link` format: |
| // '<https://api.github.com/users/aseemk/followers?page=2>; rel="next", <https://api.github.com/users/aseemk/followers?page=2>; rel="last"' |
| // sets `url` to undefined if "next" URL is not present or `link` header is not set |
| |
| url = ((response.headers.link || "").match(/<([^>]+)>;\s*rel="next"/) || [])[1]; |
| return { |
| value: response |
| }; |
| }); |
| } |
| |
| }) |
| }; |
| } |
| |
| function paginate(octokit, route, parameters, mapFn) { |
| if (typeof parameters === "function") { |
| mapFn = parameters; |
| parameters = undefined; |
| } |
| |
| return gather(octokit, [], iterator(octokit, route, parameters)[Symbol.asyncIterator](), mapFn); |
| } |
| |
| function gather(octokit, results, iterator, mapFn) { |
| return iterator.next().then(result => { |
| if (result.done) { |
| return results; |
| } |
| |
| let earlyExit = false; |
| |
| function done() { |
| earlyExit = true; |
| } |
| |
| results = results.concat(mapFn ? mapFn(result.value, done) : result.value.data); |
| |
| if (earlyExit) { |
| return results; |
| } |
| |
| return gather(octokit, results, iterator, mapFn); |
| }); |
| } |
| |
| /** |
| * @param octokit Octokit instance |
| * @param options Options passed to Octokit constructor |
| */ |
| |
| function paginateRest(octokit) { |
| return { |
| paginate: Object.assign(paginate.bind(null, octokit), { |
| iterator: iterator.bind(null, octokit) |
| }) |
| }; |
| } |
| paginateRest.VERSION = VERSION; |
| |
| exports.paginateRest = paginateRest; |
| //# sourceMappingURL=index.js.map |
| |
| |
| /***/ }), |
| |
| /***/ 323: |
| /***/ (function(module) { |
| |
| "use strict"; |
| |
| |
| var isStream = module.exports = function (stream) { |
| return stream !== null && typeof stream === 'object' && typeof stream.pipe === 'function'; |
| }; |
| |
| isStream.writable = function (stream) { |
| return isStream(stream) && stream.writable !== false && typeof stream._write === 'function' && typeof stream._writableState === 'object'; |
| }; |
| |
| isStream.readable = function (stream) { |
| return isStream(stream) && stream.readable !== false && typeof stream._read === 'function' && typeof stream._readableState === 'object'; |
| }; |
| |
| isStream.duplex = function (stream) { |
| return isStream.writable(stream) && isStream.readable(stream); |
| }; |
| |
| isStream.transform = function (stream) { |
| return isStream.duplex(stream) && typeof stream._transform === 'function' && typeof stream._transformState === 'object'; |
| }; |
| |
| |
| /***/ }), |
| |
| /***/ 336: |
| /***/ (function(module, __unusedexports, __webpack_require__) { |
| |
| module.exports = hasLastPage |
| |
| const deprecate = __webpack_require__(370) |
| const getPageLinks = __webpack_require__(577) |
| |
| function hasLastPage (link) { |
| deprecate(`octokit.hasLastPage() – You can use octokit.paginate or async iterators instead: https://github.com/octokit/rest.js#pagination.`) |
| return getPageLinks(link).last |
| } |
| |
| |
| /***/ }), |
| |
| /***/ 348: |
| /***/ (function(module, __unusedexports, __webpack_require__) { |
| |
| "use strict"; |
| |
| |
| module.exports = validate; |
| |
| const { RequestError } = __webpack_require__(497); |
| const get = __webpack_require__(854); |
| const set = __webpack_require__(883); |
| |
| function validate(octokit, options) { |
| if (!options.request.validate) { |
| return; |
| } |
| const { validate: params } = options.request; |
| |
| Object.keys(params).forEach(parameterName => { |
| const parameter = get(params, parameterName); |
| |
| const expectedType = parameter.type; |
| let parentParameterName; |
| let parentValue; |
| let parentParamIsPresent = true; |
| let parentParameterIsArray = false; |
| |
| if (/\./.test(parameterName)) { |
| parentParameterName = parameterName.replace(/\.[^.]+$/, ""); |
| parentParameterIsArray = parentParameterName.slice(-2) === "[]"; |
| if (parentParameterIsArray) { |
| parentParameterName = parentParameterName.slice(0, -2); |
| } |
| parentValue = get(options, parentParameterName); |
| parentParamIsPresent = |
| parentParameterName === "headers" || |
| (typeof parentValue === "object" && parentValue !== null); |
| } |
| |
| const values = parentParameterIsArray |
| ? (get(options, parentParameterName) || []).map( |
| value => value[parameterName.split(/\./).pop()] |
| ) |
| : [get(options, parameterName)]; |
| |
| values.forEach((value, i) => { |
| const valueIsPresent = typeof value !== "undefined"; |
| const valueIsNull = value === null; |
| const currentParameterName = parentParameterIsArray |
| ? parameterName.replace(/\[\]/, `[${i}]`) |
| : parameterName; |
| |
| if (!parameter.required && !valueIsPresent) { |
| return; |
| } |
| |
| // if the parent parameter is of type object but allows null |
| // then the child parameters can be ignored |
| if (!parentParamIsPresent) { |
| return; |
| } |
| |
| if (parameter.allowNull && valueIsNull) { |
| return; |
| } |
| |
| if (!parameter.allowNull && valueIsNull) { |
| throw new RequestError( |
| `'${currentParameterName}' cannot be null`, |
| 400, |
| { |
| request: options |
| } |
| ); |
| } |
| |
| if (parameter.required && !valueIsPresent) { |
| throw new RequestError( |
| `Empty value for parameter '${currentParameterName}': ${JSON.stringify( |
| value |
| )}`, |
| 400, |
| { |
| request: options |
| } |
| ); |
| } |
| |
| // parse to integer before checking for enum |
| // so that string "1" will match enum with number 1 |
| if (expectedType === "integer") { |
| const unparsedValue = value; |
| value = parseInt(value, 10); |
| if (isNaN(value)) { |
| throw new RequestError( |
| `Invalid value for parameter '${currentParameterName}': ${JSON.stringify( |
| unparsedValue |
| )} is NaN`, |
| 400, |
| { |
| request: options |
| } |
| ); |
| } |
| } |
| |
| if (parameter.enum && parameter.enum.indexOf(String(value)) === -1) { |
| throw new RequestError( |
| `Invalid value for parameter '${currentParameterName}': ${JSON.stringify( |
| value |
| )}`, |
| 400, |
| { |
| request: options |
| } |
| ); |
| } |
| |
| if (parameter.validation) { |
| const regex = new RegExp(parameter.validation); |
| if (!regex.test(value)) { |
| throw new RequestError( |
| `Invalid value for parameter '${currentParameterName}': ${JSON.stringify( |
| value |
| )}`, |
| 400, |
| { |
| request: options |
| } |
| ); |
| } |
| } |
| |
| if (expectedType === "object" && typeof value === "string") { |
| try { |
| value = JSON.parse(value); |
| } catch (exception) { |
| throw new RequestError( |
| `JSON parse error of value for parameter '${currentParameterName}': ${JSON.stringify( |
| value |
| )}`, |
| 400, |
| { |
| request: options |
| } |
| ); |
| } |
| } |
| |
| set(options, parameter.mapTo || currentParameterName, value); |
| }); |
| }); |
| |
| return options; |
| } |
| |
| |
| /***/ }), |
| |
| /***/ 349: |
| /***/ (function(module, __unusedexports, __webpack_require__) { |
| |
| module.exports = authenticationRequestError; |
| |
| const { RequestError } = __webpack_require__(497); |
| |
| function authenticationRequestError(state, error, options) { |
| /* istanbul ignore next */ |
| if (!error.headers) throw error; |
| |
| const otpRequired = /required/.test(error.headers["x-github-otp"] || ""); |
| // handle "2FA required" error only |
| if (error.status !== 401 || !otpRequired) { |
| throw error; |
| } |
| |
| if ( |
| error.status === 401 && |
| otpRequired && |
| error.request && |
| error.request.headers["x-github-otp"] |
| ) { |
| throw new RequestError( |
| "Invalid one-time password for two-factor authentication", |
| 401, |
| { |
| headers: error.headers, |
| request: options |
| } |
| ); |
| } |
| |
| if (typeof state.auth.on2fa !== "function") { |
| throw new RequestError( |
| "2FA required, but options.on2fa is not a function. See https://github.com/octokit/rest.js#authentication", |
| 401, |
| { |
| headers: error.headers, |
| request: options |
| } |
| ); |
| } |
| |
| return Promise.resolve() |
| .then(() => { |
| return state.auth.on2fa(); |
| }) |
| .then(oneTimePassword => { |
| const newOptions = Object.assign(options, { |
| headers: Object.assign( |
| { "x-github-otp": oneTimePassword }, |
| options.headers |
| ) |
| }); |
| return state.octokit.request(newOptions); |
| }); |
| } |
| |
| |
| /***/ }), |
| |
| /***/ 352: |
| /***/ (function(module, __unusedexports, __webpack_require__) { |
| |
| "use strict"; |
| |
| |
| var esprima; |
| |
| // Browserified version does not have esprima |
| // |
| // 1. For node.js just require module as deps |
| // 2. For browser try to require mudule via external AMD system. |
| // If not found - try to fallback to window.esprima. If not |
| // found too - then fail to parse. |
| // |
| try { |
| // workaround to exclude package from browserify list. |
| var _require = require; |
| esprima = _require('esprima'); |
| } catch (_) { |
| /* eslint-disable no-redeclare */ |
| /* global window */ |
| if (typeof window !== 'undefined') esprima = window.esprima; |
| } |
| |
| var Type = __webpack_require__(945); |
| |
| function resolveJavascriptFunction(data) { |
| if (data === null) return false; |
| |
| try { |
| var source = '(' + data + ')', |
| ast = esprima.parse(source, { range: true }); |
| |
| if (ast.type !== 'Program' || |
| ast.body.length !== 1 || |
| ast.body[0].type !== 'ExpressionStatement' || |
| (ast.body[0].expression.type !== 'ArrowFunctionExpression' && |
| ast.body[0].expression.type !== 'FunctionExpression')) { |
| return false; |
| } |
| |
| return true; |
| } catch (err) { |
| return false; |
| } |
| } |
| |
| function constructJavascriptFunction(data) { |
| /*jslint evil:true*/ |
| |
| var source = '(' + data + ')', |
| ast = esprima.parse(source, { range: true }), |
| params = [], |
| body; |
| |
| if (ast.type !== 'Program' || |
| ast.body.length !== 1 || |
| ast.body[0].type !== 'ExpressionStatement' || |
| (ast.body[0].expression.type !== 'ArrowFunctionExpression' && |
| ast.body[0].expression.type !== 'FunctionExpression')) { |
| throw new Error('Failed to resolve function'); |
| } |
| |
| ast.body[0].expression.params.forEach(function (param) { |
| params.push(param.name); |
| }); |
| |
| body = ast.body[0].expression.body.range; |
| |
| // Esprima's ranges include the first '{' and the last '}' characters on |
| // function expressions. So cut them out. |
| if (ast.body[0].expression.body.type === 'BlockStatement') { |
| /*eslint-disable no-new-func*/ |
| return new Function(params, source.slice(body[0] + 1, body[1] - 1)); |
| } |
| // ES6 arrow functions can omit the BlockStatement. In that case, just return |
| // the body. |
| /*eslint-disable no-new-func*/ |
| return new Function(params, 'return ' + source.slice(body[0], body[1])); |
| } |
| |
| function representJavascriptFunction(object /*, style*/) { |
| return object.toString(); |
| } |
| |
| function isFunction(object) { |
| return Object.prototype.toString.call(object) === '[object Function]'; |
| } |
| |
| module.exports = new Type('tag:yaml.org,2002:js/function', { |
| kind: 'scalar', |
| resolve: resolveJavascriptFunction, |
| construct: constructJavascriptFunction, |
| predicate: isFunction, |
| represent: representJavascriptFunction |
| }); |
| |
| |
| /***/ }), |
| |
| /***/ 357: |
| /***/ (function(module) { |
| |
| module.exports = require("assert"); |
| |
| /***/ }), |
| |
| /***/ 366: |
| /***/ (function(module, __unusedexports, __webpack_require__) { |
| |
| "use strict"; |
| |
| |
| const path = __webpack_require__(622); |
| const scan = __webpack_require__(537); |
| const parse = __webpack_require__(806); |
| const utils = __webpack_require__(224); |
| const constants = __webpack_require__(199); |
| const isObject = val => val && typeof val === 'object' && !Array.isArray(val); |
| |
| /** |
| * Creates a matcher function from one or more glob patterns. The |
| * returned function takes a string to match as its first argument, |
| * and returns true if the string is a match. The returned matcher |
| * function also takes a boolean as the second argument that, when true, |
| * returns an object with additional information. |
| * |
| * ```js |
| * const picomatch = require('picomatch'); |
| * // picomatch(glob[, options]); |
| * |
| * const isMatch = picomatch('*.!(*a)'); |
| * console.log(isMatch('a.a')); //=> false |
| * console.log(isMatch('a.b')); //=> true |
| * ``` |
| * @name picomatch |
| * @param {String|Array} `globs` One or more glob patterns. |
| * @param {Object=} `options` |
| * @return {Function=} Returns a matcher function. |
| * @api public |
| */ |
| |
| const picomatch = (glob, options, returnState = false) => { |
| if (Array.isArray(glob)) { |
| const fns = glob.map(input => picomatch(input, options, returnState)); |
| const arrayMatcher = str => { |
| for (const isMatch of fns) { |
| const state = isMatch(str); |
| if (state) return state; |
| } |
| return false; |
| }; |
| return arrayMatcher; |
| } |
| |
| const isState = isObject(glob) && glob.tokens && glob.input; |
| |
| if (glob === '' || (typeof glob !== 'string' && !isState)) { |
| throw new TypeError('Expected pattern to be a non-empty string'); |
| } |
| |
| const opts = options || {}; |
| const posix = utils.isWindows(options); |
| const regex = isState |
| ? picomatch.compileRe(glob, options) |
| : picomatch.makeRe(glob, options, false, true); |
| |
| const state = regex.state; |
| delete regex.state; |
| |
| let isIgnored = () => false; |
| if (opts.ignore) { |
| const ignoreOpts = { ...options, ignore: null, onMatch: null, onResult: null }; |
| isIgnored = picomatch(opts.ignore, ignoreOpts, returnState); |
| } |
| |
| const matcher = (input, returnObject = false) => { |
| const { isMatch, match, output } = picomatch.test(input, regex, options, { glob, posix }); |
| const result = { glob, state, regex, posix, input, output, match, isMatch }; |
| |
| if (typeof opts.onResult === 'function') { |
| opts.onResult(result); |
| } |
| |
| if (isMatch === false) { |
| result.isMatch = false; |
| return returnObject ? result : false; |
| } |
| |
| if (isIgnored(input)) { |
| if (typeof opts.onIgnore === 'function') { |
| opts.onIgnore(result); |
| } |
| result.isMatch = false; |
| return returnObject ? result : false; |
| } |
| |
| if (typeof opts.onMatch === 'function') { |
| opts.onMatch(result); |
| } |
| return returnObject ? result : true; |
| }; |
| |
| if (returnState) { |
| matcher.state = state; |
| } |
| |
| return matcher; |
| }; |
| |
| /** |
| * Test `input` with the given `regex`. This is used by the main |
| * `picomatch()` function to test the input string. |
| * |
| * ```js |
| * const picomatch = require('picomatch'); |
| * // picomatch.test(input, regex[, options]); |
| * |
| * console.log(picomatch.test('foo/bar', /^(?:([^/]*?)\/([^/]*?))$/)); |
| * // { isMatch: true, match: [ 'foo/', 'foo', 'bar' ], output: 'foo/bar' } |
| * ``` |
| * @param {String} `input` String to test. |
| * @param {RegExp} `regex` |
| * @return {Object} Returns an object with matching info. |
| * @api public |
| */ |
| |
| picomatch.test = (input, regex, options, { glob, posix } = {}) => { |
| if (typeof input !== 'string') { |
| throw new TypeError('Expected input to be a string'); |
| } |
| |
| if (input === '') { |
| return { isMatch: false, output: '' }; |
| } |
| |
| const opts = options || {}; |
| const format = opts.format || (posix ? utils.toPosixSlashes : null); |
| let match = input === glob; |
| let output = (match && format) ? format(input) : input; |
| |
| if (match === false) { |
| output = format ? format(input) : input; |
| match = output === glob; |
| } |
| |
| if (match === false || opts.capture === true) { |
| if (opts.matchBase === true || opts.basename === true) { |
| match = picomatch.matchBase(input, regex, options, posix); |
| } else { |
| match = regex.exec(output); |
| } |
| } |
| |
| return { isMatch: Boolean(match), match, output }; |
| }; |
| |
| /** |
| * Match the basename of a filepath. |
| * |
| * ```js |
| * const picomatch = require('picomatch'); |
| * // picomatch.matchBase(input, glob[, options]); |
| * console.log(picomatch.matchBase('foo/bar.js', '*.js'); // true |
| * ``` |
| * @param {String} `input` String to test. |
| * @param {RegExp|String} `glob` Glob pattern or regex created by [.makeRe](#makeRe). |
| * @return {Boolean} |
| * @api public |
| */ |
| |
| picomatch.matchBase = (input, glob, options, posix = utils.isWindows(options)) => { |
| const regex = glob instanceof RegExp ? glob : picomatch.makeRe(glob, options); |
| return regex.test(path.basename(input)); |
| }; |
| |
| /** |
| * Returns true if **any** of the given glob `patterns` match the specified `string`. |
| * |
| * ```js |
| * const picomatch = require('picomatch'); |
| * // picomatch.isMatch(string, patterns[, options]); |
| * |
| * console.log(picomatch.isMatch('a.a', ['b.*', '*.a'])); //=> true |
| * console.log(picomatch.isMatch('a.a', 'b.*')); //=> false |
| * ``` |
| * @param {String|Array} str The string to test. |
| * @param {String|Array} patterns One or more glob patterns to use for matching. |
| * @param {Object} [options] See available [options](#options). |
| * @return {Boolean} Returns true if any patterns match `str` |
| * @api public |
| */ |
| |
| picomatch.isMatch = (str, patterns, options) => picomatch(patterns, options)(str); |
| |
| /** |
| * Parse a glob pattern to create the source string for a regular |
| * expression. |
| * |
| * ```js |
| * const picomatch = require('picomatch'); |
| * const result = picomatch.parse(pattern[, options]); |
| * ``` |
| * @param {String} `pattern` |
| * @param {Object} `options` |
| * @return {Object} Returns an object with useful properties and output to be used as a regex source string. |
| * @api public |
| */ |
| |
| picomatch.parse = (pattern, options) => { |
| if (Array.isArray(pattern)) return pattern.map(p => picomatch.parse(p, options)); |
| return parse(pattern, { ...options, fastpaths: false }); |
| }; |
| |
| /** |
| * Scan a glob pattern to separate the pattern into segments. |
| * |
| * ```js |
| * const picomatch = require('picomatch'); |
| * // picomatch.scan(input[, options]); |
| * |
| * const result = picomatch.scan('!./foo/*.js'); |
| * console.log(result); |
| * { prefix: '!./', |
| * input: '!./foo/*.js', |
| * start: 3, |
| * base: 'foo', |
| * glob: '*.js', |
| * isBrace: false, |
| * isBracket: false, |
| * isGlob: true, |
| * isExtglob: false, |
| * isGlobstar: false, |
| * negated: true } |
| * ``` |
| * @param {String} `input` Glob pattern to scan. |
| * @param {Object} `options` |
| * @return {Object} Returns an object with |
| * @api public |
| */ |
| |
| picomatch.scan = (input, options) => scan(input, options); |
| |
| /** |
| * Create a regular expression from a parsed glob pattern. |
| * |
| * ```js |
| * const picomatch = require('picomatch'); |
| * const state = picomatch.parse('*.js'); |
| * // picomatch.compileRe(state[, options]); |
| * |
| * console.log(picomatch.compileRe(state)); |
| * //=> /^(?:(?!\.)(?=.)[^/]*?\.js)$/ |
| * ``` |
| * @param {String} `state` The object returned from the `.parse` method. |
| * @param {Object} `options` |
| * @return {RegExp} Returns a regex created from the given pattern. |
| * @api public |
| */ |
| |
| picomatch.compileRe = (parsed, options, returnOutput = false, returnState = false) => { |
| if (returnOutput === true) { |
| return parsed.output; |
| } |
| |
| const opts = options || {}; |
| const prepend = opts.contains ? '' : '^'; |
| const append = opts.contains ? '' : '$'; |
| |
| let source = `${prepend}(?:${parsed.output})${append}`; |
| if (parsed && parsed.negated === true) { |
| source = `^(?!${source}).*$`; |
| } |
| |
| const regex = picomatch.toRegex(source, options); |
| if (returnState === true) { |
| regex.state = parsed; |
| } |
| |
| return regex; |
| }; |
| |
| picomatch.makeRe = (input, options, returnOutput = false, returnState = false) => { |
| if (!input || typeof input !== 'string') { |
| throw new TypeError('Expected a non-empty string'); |
| } |
| |
| const opts = options || {}; |
| let parsed = { negated: false, fastpaths: true }; |
| let prefix = ''; |
| let output; |
| |
| if (input.startsWith('./')) { |
| input = input.slice(2); |
| prefix = parsed.prefix = './'; |
| } |
| |
| if (opts.fastpaths !== false && (input[0] === '.' || input[0] === '*')) { |
| output = parse.fastpaths(input, options); |
| } |
| |
| if (output === undefined) { |
| parsed = parse(input, options); |
| parsed.prefix = prefix + (parsed.prefix || ''); |
| } else { |
| parsed.output = output; |
| } |
| |
| return picomatch.compileRe(parsed, options, returnOutput, returnState); |
| }; |
| |
| /** |
| * Create a regular expression from the given regex source string. |
| * |
| * ```js |
| * const picomatch = require('picomatch'); |
| * // picomatch.toRegex(source[, options]); |
| * |
| * const { output } = picomatch.parse('*.js'); |
| * console.log(picomatch.toRegex(output)); |
| * //=> /^(?:(?!\.)(?=.)[^/]*?\.js)$/ |
| * ``` |
| * @param {String} `source` Regular expression source string. |
| * @param {Object} `options` |
| * @return {RegExp} |
| * @api public |
| */ |
| |
| picomatch.toRegex = (source, options) => { |
| try { |
| const opts = options || {}; |
| return new RegExp(source, opts.flags || (opts.nocase ? 'i' : '')); |
| } catch (err) { |
| if (options && options.debug === true) throw err; |
| return /$^/; |
| } |
| }; |
| |
| /** |
| * Picomatch constants. |
| * @return {Object} |
| */ |
| |
| picomatch.constants = constants; |
| |
| /** |
| * Expose "picomatch" |
| */ |
| |
| module.exports = picomatch; |
| |
| |
| /***/ }), |
| |
| /***/ 368: |
| /***/ (function(module) { |
| |
| module.exports = function atob(str) { |
| return Buffer.from(str, 'base64').toString('binary') |
| } |
| |
| |
| /***/ }), |
| |
| /***/ 369: |
| /***/ (function(module, exports, __webpack_require__) { |
| |
| "use strict"; |
| |
| |
| Object.defineProperty(exports, '__esModule', { value: true }); |
| |
| function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } |
| |
| var Stream = _interopDefault(__webpack_require__(413)); |
| var http = _interopDefault(__webpack_require__(605)); |
| var Url = _interopDefault(__webpack_require__(835)); |
| var https = _interopDefault(__webpack_require__(211)); |
| var zlib = _interopDefault(__webpack_require__(761)); |
| |
| // Based on https://github.com/tmpvar/jsdom/blob/aa85b2abf07766ff7bf5c1f6daafb3726f2f2db5/lib/jsdom/living/blob.js |
| |
| // fix for "Readable" isn't a named export issue |
| const Readable = Stream.Readable; |
| |
| const BUFFER = Symbol('buffer'); |
| const TYPE = Symbol('type'); |
| |
| class Blob { |
| constructor() { |
| this[TYPE] = ''; |
| |
| const blobParts = arguments[0]; |
| const options = arguments[1]; |
| |
| const buffers = []; |
| let size = 0; |
| |
| if (blobParts) { |
| const a = blobParts; |
| const length = Number(a.length); |
| for (let i = 0; i < length; i++) { |
| const element = a[i]; |
| let buffer; |
| if (element instanceof Buffer) { |
| buffer = element; |
| } else if (ArrayBuffer.isView(element)) { |
| buffer = Buffer.from(element.buffer, element.byteOffset, element.byteLength); |
| } else if (element instanceof ArrayBuffer) { |
| buffer = Buffer.from(element); |
| } else if (element instanceof Blob) { |
| buffer = element[BUFFER]; |
| } else { |
| buffer = Buffer.from(typeof element === 'string' ? element : String(element)); |
| } |
| size += buffer.length; |
| buffers.push(buffer); |
| } |
| } |
| |
| this[BUFFER] = Buffer.concat(buffers); |
| |
| let type = options && options.type !== undefined && String(options.type).toLowerCase(); |
| if (type && !/[^\u0020-\u007E]/.test(type)) { |
| this[TYPE] = type; |
| } |
| } |
| get size() { |
| return this[BUFFER].length; |
| } |
| get type() { |
| return this[TYPE]; |
| } |
| text() { |
| return Promise.resolve(this[BUFFER].toString()); |
| } |
| arrayBuffer() { |
| const buf = this[BUFFER]; |
| const ab = buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); |
| return Promise.resolve(ab); |
| } |
| stream() { |
| const readable = new Readable(); |
| readable._read = function () {}; |
| readable.push(this[BUFFER]); |
| readable.push(null); |
| return readable; |
| } |
| toString() { |
| return '[object Blob]'; |
| } |
| slice() { |
| const size = this.size; |
| |
| const start = arguments[0]; |
| const end = arguments[1]; |
| let relativeStart, relativeEnd; |
| if (start === undefined) { |
| relativeStart = 0; |
| } else if (start < 0) { |
| relativeStart = Math.max(size + start, 0); |
| } else { |
| relativeStart = Math.min(start, size); |
| } |
| if (end === undefined) { |
| relativeEnd = size; |
| } else if (end < 0) { |
| relativeEnd = Math.max(size + end, 0); |
| } else { |
| relativeEnd = Math.min(end, size); |
| } |
| const span = Math.max(relativeEnd - relativeStart, 0); |
| |
| const buffer = this[BUFFER]; |
| const slicedBuffer = buffer.slice(relativeStart, relativeStart + span); |
| const blob = new Blob([], { type: arguments[2] }); |
| blob[BUFFER] = slicedBuffer; |
| return blob; |
| } |
| } |
| |
| Object.defineProperties(Blob.prototype, { |
| size: { enumerable: true }, |
| type: { enumerable: true }, |
| slice: { enumerable: true } |
| }); |
| |
| Object.defineProperty(Blob.prototype, Symbol.toStringTag, { |
| value: 'Blob', |
| writable: false, |
| enumerable: false, |
| configurable: true |
| }); |
| |
| /** |
| * fetch-error.js |
| * |
| * FetchError interface for operational errors |
| */ |
| |
| /** |
| * Create FetchError instance |
| * |
| * @param String message Error message for human |
| * @param String type Error type for machine |
| * @param String systemError For Node.js system error |
| * @return FetchError |
| */ |
| function FetchError(message, type, systemError) { |
| Error.call(this, message); |
| |
| this.message = message; |
| this.type = type; |
| |
| // when err.type is `system`, err.code contains system error code |
| if (systemError) { |
| this.code = this.errno = systemError.code; |
| } |
| |
| // hide custom error implementation details from end-users |
| Error.captureStackTrace(this, this.constructor); |
| } |
| |
| FetchError.prototype = Object.create(Error.prototype); |
| FetchError.prototype.constructor = FetchError; |
| FetchError.prototype.name = 'FetchError'; |
| |
| let convert; |
| try { |
| convert = __webpack_require__(18).convert; |
| } catch (e) {} |
| |
| const INTERNALS = Symbol('Body internals'); |
| |
| // fix an issue where "PassThrough" isn't a named export for node <10 |
| const PassThrough = Stream.PassThrough; |
| |
| /** |
| * Body mixin |
| * |
| * Ref: https://fetch.spec.whatwg.org/#body |
| * |
| * @param Stream body Readable stream |
| * @param Object opts Response options |
| * @return Void |
| */ |
| function Body(body) { |
| var _this = this; |
| |
| var _ref = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}, |
| _ref$size = _ref.size; |
| |
| let size = _ref$size === undefined ? 0 : _ref$size; |
| var _ref$timeout = _ref.timeout; |
| let timeout = _ref$timeout === undefined ? 0 : _ref$timeout; |
| |
| if (body == null) { |
| // body is undefined or null |
| body = null; |
| } else if (isURLSearchParams(body)) { |
| // body is a URLSearchParams |
| body = Buffer.from(body.toString()); |
| } else if (isBlob(body)) ; else if (Buffer.isBuffer(body)) ; else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') { |
| // body is ArrayBuffer |
| body = Buffer.from(body); |
| } else if (ArrayBuffer.isView(body)) { |
| // body is ArrayBufferView |
| body = Buffer.from(body.buffer, body.byteOffset, body.byteLength); |
| } else if (body instanceof Stream) ; else { |
| // none of the above |
| // coerce to string then buffer |
| body = Buffer.from(String(body)); |
| } |
| this[INTERNALS] = { |
| body, |
| disturbed: false, |
| error: null |
| }; |
| this.size = size; |
| this.timeout = timeout; |
| |
| if (body instanceof Stream) { |
| body.on('error', function (err) { |
| const error = err.name === 'AbortError' ? err : new FetchError(`Invalid response body while trying to fetch ${_this.url}: ${err.message}`, 'system', err); |
| _this[INTERNALS].error = error; |
| }); |
| } |
| } |
| |
| Body.prototype = { |
| get body() { |
| return this[INTERNALS].body; |
| }, |
| |
| get bodyUsed() { |
| return this[INTERNALS].disturbed; |
| }, |
| |
| /** |
| * Decode response as ArrayBuffer |
| * |
| * @return Promise |
| */ |
| arrayBuffer() { |
| return consumeBody.call(this).then(function (buf) { |
| return buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); |
| }); |
| }, |
| |
| /** |
| * Return raw response as Blob |
| * |
| * @return Promise |
| */ |
| blob() { |
| let ct = this.headers && this.headers.get('content-type') || ''; |
| return consumeBody.call(this).then(function (buf) { |
| return Object.assign( |
| // Prevent copying |
| new Blob([], { |
| type: ct.toLowerCase() |
| }), { |
| [BUFFER]: buf |
| }); |
| }); |
| }, |
| |
| /** |
| * Decode response as json |
| * |
| * @return Promise |
| */ |
| json() { |
| var _this2 = this; |
| |
| return consumeBody.call(this).then(function (buffer) { |
| try { |
| return JSON.parse(buffer.toString()); |
| } catch (err) { |
| return Body.Promise.reject(new FetchError(`invalid json response body at ${_this2.url} reason: ${err.message}`, 'invalid-json')); |
| } |
| }); |
| }, |
| |
| /** |
| * Decode response as text |
| * |
| * @return Promise |
| */ |
| text() { |
| return consumeBody.call(this).then(function (buffer) { |
| return buffer.toString(); |
| }); |
| }, |
| |
| /** |
| * Decode response as buffer (non-spec api) |
| * |
| * @return Promise |
| */ |
| buffer() { |
| return consumeBody.call(this); |
| }, |
| |
| /** |
| * Decode response as text, while automatically detecting the encoding and |
| * trying to decode to UTF-8 (non-spec api) |
| * |
| * @return Promise |
| */ |
| textConverted() { |
| var _this3 = this; |
| |
| return consumeBody.call(this).then(function (buffer) { |
| return convertBody(buffer, _this3.headers); |
| }); |
| } |
| }; |
| |
| // In browsers, all properties are enumerable. |
| Object.defineProperties(Body.prototype, { |
| body: { enumerable: true }, |
| bodyUsed: { enumerable: true }, |
| arrayBuffer: { enumerable: true }, |
| blob: { enumerable: true }, |
| json: { enumerable: true }, |
| text: { enumerable: true } |
| }); |
| |
| Body.mixIn = function (proto) { |
| for (const name of Object.getOwnPropertyNames(Body.prototype)) { |
| // istanbul ignore else: future proof |
| if (!(name in proto)) { |
| const desc = Object.getOwnPropertyDescriptor(Body.prototype, name); |
| Object.defineProperty(proto, name, desc); |
| } |
| } |
| }; |
| |
| /** |
| * Consume and convert an entire Body to a Buffer. |
| * |
| * Ref: https://fetch.spec.whatwg.org/#concept-body-consume-body |
| * |
| * @return Promise |
| */ |
| function consumeBody() { |
| var _this4 = this; |
| |
| if (this[INTERNALS].disturbed) { |
| return Body.Promise.reject(new TypeError(`body used already for: ${this.url}`)); |
| } |
| |
| this[INTERNALS].disturbed = true; |
| |
| if (this[INTERNALS].error) { |
| return Body.Promise.reject(this[INTERNALS].error); |
| } |
| |
| let body = this.body; |
| |
| // body is null |
| if (body === null) { |
| return Body.Promise.resolve(Buffer.alloc(0)); |
| } |
| |
| // body is blob |
| if (isBlob(body)) { |
| body = body.stream(); |
| } |
| |
| // body is buffer |
| if (Buffer.isBuffer(body)) { |
| return Body.Promise.resolve(body); |
| } |
| |
| // istanbul ignore if: should never happen |
| if (!(body instanceof Stream)) { |
| return Body.Promise.resolve(Buffer.alloc(0)); |
| } |
| |
| // body is stream |
| // get ready to actually consume the body |
| let accum = []; |
| let accumBytes = 0; |
| let abort = false; |
| |
| return new Body.Promise(function (resolve, reject) { |
| let resTimeout; |
| |
| // allow timeout on slow response body |
| if (_this4.timeout) { |
| resTimeout = setTimeout(function () { |
| abort = true; |
| reject(new FetchError(`Response timeout while trying to fetch ${_this4.url} (over ${_this4.timeout}ms)`, 'body-timeout')); |
| }, _this4.timeout); |
| } |
| |
| // handle stream errors |
| body.on('error', function (err) { |
| if (err.name === 'AbortError') { |
| // if the request was aborted, reject with this Error |
| abort = true; |
| reject(err); |
| } else { |
| // other errors, such as incorrect content-encoding |
| reject(new FetchError(`Invalid response body while trying to fetch ${_this4.url}: ${err.message}`, 'system', err)); |
| } |
| }); |
| |
| body.on('data', function (chunk) { |
| if (abort || chunk === null) { |
| return; |
| } |
| |
| if (_this4.size && accumBytes + chunk.length > _this4.size) { |
| abort = true; |
| reject(new FetchError(`content size at ${_this4.url} over limit: ${_this4.size}`, 'max-size')); |
| return; |
| } |
| |
| accumBytes += chunk.length; |
| accum.push(chunk); |
| }); |
| |
| body.on('end', function () { |
| if (abort) { |
| return; |
| } |
| |
| clearTimeout(resTimeout); |
| |
| try { |
| resolve(Buffer.concat(accum, accumBytes)); |
| } catch (err) { |
| // handle streams that have accumulated too much data (issue #414) |
| reject(new FetchError(`Could not create Buffer from response body for ${_this4.url}: ${err.message}`, 'system', err)); |
| } |
| }); |
| }); |
| } |
| |
| /** |
| * Detect buffer encoding and convert to target encoding |
| * ref: http://www.w3.org/TR/2011/WD-html5-20110113/parsing.html#determining-the-character-encoding |
| * |
| * @param Buffer buffer Incoming buffer |
| * @param String encoding Target encoding |
| * @return String |
| */ |
| function convertBody(buffer, headers) { |
| if (typeof convert !== 'function') { |
| throw new Error('The package `encoding` must be installed to use the textConverted() function'); |
| } |
| |
| const ct = headers.get('content-type'); |
| let charset = 'utf-8'; |
| let res, str; |
| |
| // header |
| if (ct) { |
| res = /charset=([^;]*)/i.exec(ct); |
| } |
| |
| // no charset in content type, peek at response body for at most 1024 bytes |
| str = buffer.slice(0, 1024).toString(); |
| |
| // html5 |
| if (!res && str) { |
| res = /<meta.+?charset=(['"])(.+?)\1/i.exec(str); |
| } |
| |
| // html4 |
| if (!res && str) { |
| res = /<meta[\s]+?http-equiv=(['"])content-type\1[\s]+?content=(['"])(.+?)\2/i.exec(str); |
| |
| if (res) { |
| res = /charset=(.*)/i.exec(res.pop()); |
| } |
| } |
| |
| // xml |
| if (!res && str) { |
| res = /<\?xml.+?encoding=(['"])(.+?)\1/i.exec(str); |
| } |
| |
| // found charset |
| if (res) { |
| charset = res.pop(); |
| |
| // prevent decode issues when sites use incorrect encoding |
| // ref: https://hsivonen.fi/encoding-menu/ |
| if (charset === 'gb2312' || charset === 'gbk') { |
| charset = 'gb18030'; |
| } |
| } |
| |
| // turn raw buffers into a single utf-8 buffer |
| return convert(buffer, 'UTF-8', charset).toString(); |
| } |
| |
| /** |
| * Detect a URLSearchParams object |
| * ref: https://github.com/bitinn/node-fetch/issues/296#issuecomment-307598143 |
| * |
| * @param Object obj Object to detect by type or brand |
| * @return String |
| */ |
| function isURLSearchParams(obj) { |
| // Duck-typing as a necessary condition. |
| if (typeof obj !== 'object' || typeof obj.append !== 'function' || typeof obj.delete !== 'function' || typeof obj.get !== 'function' || typeof obj.getAll !== 'function' || typeof obj.has !== 'function' || typeof obj.set !== 'function') { |
| return false; |
| } |
| |
| // Brand-checking and more duck-typing as optional condition. |
| return obj.constructor.name === 'URLSearchParams' || Object.prototype.toString.call(obj) === '[object URLSearchParams]' || typeof obj.sort === 'function'; |
| } |
| |
| /** |
| * Check if `obj` is a W3C `Blob` object (which `File` inherits from) |
| * @param {*} obj |
| * @return {boolean} |
| */ |
| function isBlob(obj) { |
| return typeof obj === 'object' && typeof obj.arrayBuffer === 'function' && typeof obj.type === 'string' && typeof obj.stream === 'function' && typeof obj.constructor === 'function' && typeof obj.constructor.name === 'string' && /^(Blob|File)$/.test(obj.constructor.name) && /^(Blob|File)$/.test(obj[Symbol.toStringTag]); |
| } |
| |
| /** |
| * Clone body given Res/Req instance |
| * |
| * @param Mixed instance Response or Request instance |
| * @return Mixed |
| */ |
| function clone(instance) { |
| let p1, p2; |
| let body = instance.body; |
| |
| // don't allow cloning a used body |
| if (instance.bodyUsed) { |
| throw new Error('cannot clone body after it is used'); |
| } |
| |
| // check that body is a stream and not form-data object |
| // note: we can't clone the form-data object without having it as a dependency |
| if (body instanceof Stream && typeof body.getBoundary !== 'function') { |
| // tee instance body |
| p1 = new PassThrough(); |
| p2 = new PassThrough(); |
| body.pipe(p1); |
| body.pipe(p2); |
| // set instance body to teed body and return the other teed body |
| instance[INTERNALS].body = p1; |
| body = p2; |
| } |
| |
| return body; |
| } |
| |
| /** |
| * Performs the operation "extract a `Content-Type` value from |object|" as |
| * specified in the specification: |
| * https://fetch.spec.whatwg.org/#concept-bodyinit-extract |
| * |
| * This function assumes that instance.body is present. |
| * |
| * @param Mixed instance Any options.body input |
| */ |
| function extractContentType(body) { |
| if (body === null) { |
| // body is null |
| return null; |
| } else if (typeof body === 'string') { |
| // body is string |
| return 'text/plain;charset=UTF-8'; |
| } else if (isURLSearchParams(body)) { |
| // body is a URLSearchParams |
| return 'application/x-www-form-urlencoded;charset=UTF-8'; |
| } else if (isBlob(body)) { |
| // body is blob |
| return body.type || null; |
| } else if (Buffer.isBuffer(body)) { |
| // body is buffer |
| return null; |
| } else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') { |
| // body is ArrayBuffer |
| return null; |
| } else if (ArrayBuffer.isView(body)) { |
| // body is ArrayBufferView |
| return null; |
| } else if (typeof body.getBoundary === 'function') { |
| // detect form data input from form-data module |
| return `multipart/form-data;boundary=${body.getBoundary()}`; |
| } else if (body instanceof Stream) { |
| // body is stream |
| // can't really do much about this |
| return null; |
| } else { |
| // Body constructor defaults other things to string |
| return 'text/plain;charset=UTF-8'; |
| } |
| } |
| |
| /** |
| * The Fetch Standard treats this as if "total bytes" is a property on the body. |
| * For us, we have to explicitly get it with a function. |
| * |
| * ref: https://fetch.spec.whatwg.org/#concept-body-total-bytes |
| * |
| * @param Body instance Instance of Body |
| * @return Number? Number of bytes, or null if not possible |
| */ |
| function getTotalBytes(instance) { |
| const body = instance.body; |
| |
| |
| if (body === null) { |
| // body is null |
| return 0; |
| } else if (isBlob(body)) { |
| return body.size; |
| } else if (Buffer.isBuffer(body)) { |
| // body is buffer |
| return body.length; |
| } else if (body && typeof body.getLengthSync === 'function') { |
| // detect form data input from form-data module |
| if (body._lengthRetrievers && body._lengthRetrievers.length == 0 || // 1.x |
| body.hasKnownLength && body.hasKnownLength()) { |
| // 2.x |
| return body.getLengthSync(); |
| } |
| return null; |
| } else { |
| // body is stream |
| return null; |
| } |
| } |
| |
| /** |
| * Write a Body to a Node.js WritableStream (e.g. http.Request) object. |
| * |
| * @param Body instance Instance of Body |
| * @return Void |
| */ |
| function writeToStream(dest, instance) { |
| const body = instance.body; |
| |
| |
| if (body === null) { |
| // body is null |
| dest.end(); |
| } else if (isBlob(body)) { |
| body.stream().pipe(dest); |
| } else if (Buffer.isBuffer(body)) { |
| // body is buffer |
| dest.write(body); |
| dest.end(); |
| } else { |
| // body is stream |
| body.pipe(dest); |
| } |
| } |
| |
| // expose Promise |
| Body.Promise = global.Promise; |
| |
| /** |
| * headers.js |
| * |
| * Headers class offers convenient helpers |
| */ |
| |
| const invalidTokenRegex = /[^\^_`a-zA-Z\-0-9!#$%&'*+.|~]/; |
| const invalidHeaderCharRegex = /[^\t\x20-\x7e\x80-\xff]/; |
| |
| function validateName(name) { |
| name = `${name}`; |
| if (invalidTokenRegex.test(name) || name === '') { |
| throw new TypeError(`${name} is not a legal HTTP header name`); |
| } |
| } |
| |
| function validateValue(value) { |
| value = `${value}`; |
| if (invalidHeaderCharRegex.test(value)) { |
| throw new TypeError(`${value} is not a legal HTTP header value`); |
| } |
| } |
| |
| /** |
| * Find the key in the map object given a header name. |
| * |
| * Returns undefined if not found. |
| * |
| * @param String name Header name |
| * @return String|Undefined |
| */ |
| function find(map, name) { |
| name = name.toLowerCase(); |
| for (const key in map) { |
| if (key.toLowerCase() === name) { |
| return key; |
| } |
| } |
| return undefined; |
| } |
| |
| const MAP = Symbol('map'); |
| class Headers { |
| /** |
| * Headers class |
| * |
| * @param Object headers Response headers |
| * @return Void |
| */ |
| constructor() { |
| let init = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : undefined; |
| |
| this[MAP] = Object.create(null); |
| |
| if (init instanceof Headers) { |
| const rawHeaders = init.raw(); |
| const headerNames = Object.keys(rawHeaders); |
| |
| for (const headerName of headerNames) { |
| for (const value of rawHeaders[headerName]) { |
| this.append(headerName, value); |
| } |
| } |
| |
| return; |
| } |
| |
| // We don't worry about converting prop to ByteString here as append() |
| // will handle it. |
| if (init == null) ; else if (typeof init === 'object') { |
| const method = init[Symbol.iterator]; |
| if (method != null) { |
| if (typeof method !== 'function') { |
| throw new TypeError('Header pairs must be iterable'); |
| } |
| |
| // sequence<sequence<ByteString>> |
| // Note: per spec we have to first exhaust the lists then process them |
| const pairs = []; |
| for (const pair of init) { |
| if (typeof pair !== 'object' || typeof pair[Symbol.iterator] !== 'function') { |
| throw new TypeError('Each header pair must be iterable'); |
| } |
| pairs.push(Array.from(pair)); |
| } |
| |
| for (const pair of pairs) { |
| if (pair.length !== 2) { |
| throw new TypeError('Each header pair must be a name/value tuple'); |
| } |
| this.append(pair[0], pair[1]); |
| } |
| } else { |
| // record<ByteString, ByteString> |
| for (const key of Object.keys(init)) { |
| const value = init[key]; |
| this.append(key, value); |
| } |
| } |
| } else { |
| throw new TypeError('Provided initializer must be an object'); |
| } |
| } |
| |
| /** |
| * Return combined header value given name |
| * |
| * @param String name Header name |
| * @return Mixed |
| */ |
| get(name) { |
| name = `${name}`; |
| validateName(name); |
| const key = find(this[MAP], name); |
| if (key === undefined) { |
| return null; |
| } |
| |
| return this[MAP][key].join(', '); |
| } |
| |
| /** |
| * Iterate over all headers |
| * |
| * @param Function callback Executed for each item with parameters (value, name, thisArg) |
| * @param Boolean thisArg `this` context for callback function |
| * @return Void |
| */ |
| forEach(callback) { |
| let thisArg = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : undefined; |
| |
| let pairs = getHeaders(this); |
| let i = 0; |
| while (i < pairs.length) { |
| var _pairs$i = pairs[i]; |
| const name = _pairs$i[0], |
| value = _pairs$i[1]; |
| |
| callback.call(thisArg, value, name, this); |
| pairs = getHeaders(this); |
| i++; |
| } |
| } |
| |
| /** |
| * Overwrite header values given name |
| * |
| * @param String name Header name |
| * @param String value Header value |
| * @return Void |
| */ |
| set(name, value) { |
| name = `${name}`; |
| value = `${value}`; |
| validateName(name); |
| validateValue(value); |
| const key = find(this[MAP], name); |
| this[MAP][key !== undefined ? key : name] = [value]; |
| } |
| |
| /** |
| * Append a value onto existing header |
| * |
| * @param String name Header name |
| * @param String value Header value |
| * @return Void |
| */ |
| append(name, value) { |
| name = `${name}`; |
| value = `${value}`; |
| validateName(name); |
| validateValue(value); |
| const key = find(this[MAP], name); |
| if (key !== undefined) { |
| this[MAP][key].push(value); |
| } else { |
| this[MAP][name] = [value]; |
| } |
| } |
| |
| /** |
| * Check for header name existence |
| * |
| * @param String name Header name |
| * @return Boolean |
| */ |
| has(name) { |
| name = `${name}`; |
| validateName(name); |
| return find(this[MAP], name) !== undefined; |
| } |
| |
| /** |
| * Delete all header values given name |
| * |
| * @param String name Header name |
| * @return Void |
| */ |
| delete(name) { |
| name = `${name}`; |
| validateName(name); |
| const key = find(this[MAP], name); |
| if (key !== undefined) { |
| delete this[MAP][key]; |
| } |
| } |
| |
| /** |
| * Return raw headers (non-spec api) |
| * |
| * @return Object |
| */ |
| raw() { |
| return this[MAP]; |
| } |
| |
| /** |
| * Get an iterator on keys. |
| * |
| * @return Iterator |
| */ |
| keys() { |
| return createHeadersIterator(this, 'key'); |
| } |
| |
| /** |
| * Get an iterator on values. |
| * |
| * @return Iterator |
| */ |
| values() { |
| return createHeadersIterator(this, 'value'); |
| } |
| |
| /** |
| * Get an iterator on entries. |
| * |
| * This is the default iterator of the Headers object. |
| * |
| * @return Iterator |
| */ |
| [Symbol.iterator]() { |
| return createHeadersIterator(this, 'key+value'); |
| } |
| } |
| Headers.prototype.entries = Headers.prototype[Symbol.iterator]; |
| |
| Object.defineProperty(Headers.prototype, Symbol.toStringTag, { |
| value: 'Headers', |
| writable: false, |
| enumerable: false, |
| configurable: true |
| }); |
| |
| Object.defineProperties(Headers.prototype, { |
| get: { enumerable: true }, |
| forEach: { enumerable: true }, |
| set: { enumerable: true }, |
| append: { enumerable: true }, |
| has: { enumerable: true }, |
| delete: { enumerable: true }, |
| keys: { enumerable: true }, |
| values: { enumerable: true }, |
| entries: { enumerable: true } |
| }); |
| |
| function getHeaders(headers) { |
| let kind = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'key+value'; |
| |
| const keys = Object.keys(headers[MAP]).sort(); |
| return keys.map(kind === 'key' ? function (k) { |
| return k.toLowerCase(); |
| } : kind === 'value' ? function (k) { |
| return headers[MAP][k].join(', '); |
| } : function (k) { |
| return [k.toLowerCase(), headers[MAP][k].join(', ')]; |
| }); |
| } |
| |
| const INTERNAL = Symbol('internal'); |
| |
| function createHeadersIterator(target, kind) { |
| const iterator = Object.create(HeadersIteratorPrototype); |
| iterator[INTERNAL] = { |
| target, |
| kind, |
| index: 0 |
| }; |
| return iterator; |
| } |
| |
| const HeadersIteratorPrototype = Object.setPrototypeOf({ |
| next() { |
| // istanbul ignore if |
| if (!this || Object.getPrototypeOf(this) !== HeadersIteratorPrototype) { |
| throw new TypeError('Value of `this` is not a HeadersIterator'); |
| } |
| |
| var _INTERNAL = this[INTERNAL]; |
| const target = _INTERNAL.target, |
| kind = _INTERNAL.kind, |
| index = _INTERNAL.index; |
| |
| const values = getHeaders(target, kind); |
| const len = values.length; |
| if (index >= len) { |
| return { |
| value: undefined, |
| done: true |
| }; |
| } |
| |
| this[INTERNAL].index = index + 1; |
| |
| return { |
| value: values[index], |
| done: false |
| }; |
| } |
| }, Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]()))); |
| |
| Object.defineProperty(HeadersIteratorPrototype, Symbol.toStringTag, { |
| value: 'HeadersIterator', |
| writable: false, |
| enumerable: false, |
| configurable: true |
| }); |
| |
| /** |
| * Export the Headers object in a form that Node.js can consume. |
| * |
| * @param Headers headers |
| * @return Object |
| */ |
| function exportNodeCompatibleHeaders(headers) { |
| const obj = Object.assign({ __proto__: null }, headers[MAP]); |
| |
| // http.request() only supports string as Host header. This hack makes |
| // specifying custom Host header possible. |
| const hostHeaderKey = find(headers[MAP], 'Host'); |
| if (hostHeaderKey !== undefined) { |
| obj[hostHeaderKey] = obj[hostHeaderKey][0]; |
| } |
| |
| return obj; |
| } |
| |
| /** |
| * Create a Headers object from an object of headers, ignoring those that do |
| * not conform to HTTP grammar productions. |
| * |
| * @param Object obj Object of headers |
| * @return Headers |
| */ |
| function createHeadersLenient(obj) { |
| const headers = new Headers(); |
| for (const name of Object.keys(obj)) { |
| if (invalidTokenRegex.test(name)) { |
| continue; |
| } |
| if (Array.isArray(obj[name])) { |
| for (const val of obj[name]) { |
| if (invalidHeaderCharRegex.test(val)) { |
| continue; |
| } |
| if (headers[MAP][name] === undefined) { |
| headers[MAP][name] = [val]; |
| } else { |
| headers[MAP][name].push(val); |
| } |
| } |
| } else if (!invalidHeaderCharRegex.test(obj[name])) { |
| headers[MAP][name] = [obj[name]]; |
| } |
| } |
| return headers; |
| } |
| |
| const INTERNALS$1 = Symbol('Response internals'); |
| |
| // fix an issue where "STATUS_CODES" aren't a named export for node <10 |
| const STATUS_CODES = http.STATUS_CODES; |
| |
| /** |
| * Response class |
| * |
| * @param Stream body Readable stream |
| * @param Object opts Response options |
| * @return Void |
| */ |
| class Response { |
| constructor() { |
| let body = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : null; |
| let opts = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; |
| |
| Body.call(this, body, opts); |
| |
| const status = opts.status || 200; |
| const headers = new Headers(opts.headers); |
| |
| if (body != null && !headers.has('Content-Type')) { |
| const contentType = extractContentType(body); |
| if (contentType) { |
| headers.append('Content-Type', contentType); |
| } |
| } |
| |
| this[INTERNALS$1] = { |
| url: opts.url, |
| status, |
| statusText: opts.statusText || STATUS_CODES[status], |
| headers, |
| counter: opts.counter |
| }; |
| } |
| |
| get url() { |
| return this[INTERNALS$1].url || ''; |
| } |
| |
| get status() { |
| return this[INTERNALS$1].status; |
| } |
| |
| /** |
| * Convenience property representing if the request ended normally |
| */ |
| get ok() { |
| return this[INTERNALS$1].status >= 200 && this[INTERNALS$1].status < 300; |
| } |
| |
| get redirected() { |
| return this[INTERNALS$1].counter > 0; |
| } |
| |
| get statusText() { |
| return this[INTERNALS$1].statusText; |
| } |
| |
| get headers() { |
| return this[INTERNALS$1].headers; |
| } |
| |
| /** |
| * Clone this response |
| * |
| * @return Response |
| */ |
| clone() { |
| return new Response(clone(this), { |
| url: this.url, |
| status: this.status, |
| statusText: this.statusText, |
| headers: this.headers, |
| ok: this.ok, |
| redirected: this.redirected |
| }); |
| } |
| } |
| |
| Body.mixIn(Response.prototype); |
| |
| Object.defineProperties(Response.prototype, { |
| url: { enumerable: true }, |
| status: { enumerable: true }, |
| ok: { enumerable: true }, |
| redirected: { enumerable: true }, |
| statusText: { enumerable: true }, |
| headers: { enumerable: true }, |
| clone: { enumerable: true } |
| }); |
| |
| Object.defineProperty(Response.prototype, Symbol.toStringTag, { |
| value: 'Response', |
| writable: false, |
| enumerable: false, |
| configurable: true |
| }); |
| |
| const INTERNALS$2 = Symbol('Request internals'); |
| |
| // fix an issue where "format", "parse" aren't a named export for node <10 |
| const parse_url = Url.parse; |
| const format_url = Url.format; |
| |
| const streamDestructionSupported = 'destroy' in Stream.Readable.prototype; |
| |
| /** |
| * Check if a value is an instance of Request. |
| * |
| * @param Mixed input |
| * @return Boolean |
| */ |
| function isRequest(input) { |
| return typeof input === 'object' && typeof input[INTERNALS$2] === 'object'; |
| } |
| |
| function isAbortSignal(signal) { |
| const proto = signal && typeof signal === 'object' && Object.getPrototypeOf(signal); |
| return !!(proto && proto.constructor.name === 'AbortSignal'); |
| } |
| |
| /** |
| * Request class |
| * |
| * @param Mixed input Url or Request instance |
| * @param Object init Custom options |
| * @return Void |
| */ |
| class Request { |
| constructor(input) { |
| let init = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; |
| |
| let parsedURL; |
| |
| // normalize input |
| if (!isRequest(input)) { |
| if (input && input.href) { |
| // in order to support Node.js' Url objects; though WHATWG's URL objects |
| // will fall into this branch also (since their `toString()` will return |
| // `href` property anyway) |
| parsedURL = parse_url(input.href); |
| } else { |
| // coerce input to a string before attempting to parse |
| parsedURL = parse_url(`${input}`); |
| } |
| input = {}; |
| } else { |
| parsedURL = parse_url(input.url); |
| } |
| |
| let method = init.method || input.method || 'GET'; |
| method = method.toUpperCase(); |
| |
| if ((init.body != null || isRequest(input) && input.body !== null) && (method === 'GET' || method === 'HEAD')) { |
| throw new TypeError('Request with GET/HEAD method cannot have body'); |
| } |
| |
| let inputBody = init.body != null ? init.body : isRequest(input) && input.body !== null ? clone(input) : null; |
| |
| Body.call(this, inputBody, { |
| timeout: init.timeout || input.timeout || 0, |
| size: init.size || input.size || 0 |
| }); |
| |
| const headers = new Headers(init.headers || input.headers || {}); |
| |
| if (inputBody != null && !headers.has('Content-Type')) { |
| const contentType = extractContentType(inputBody); |
| if (contentType) { |
| headers.append('Content-Type', contentType); |
| } |
| } |
| |
| let signal = isRequest(input) ? input.signal : null; |
| if ('signal' in init) signal = init.signal; |
| |
| if (signal != null && !isAbortSignal(signal)) { |
| throw new TypeError('Expected signal to be an instanceof AbortSignal'); |
| } |
| |
| this[INTERNALS$2] = { |
| method, |
| redirect: init.redirect || input.redirect || 'follow', |
| headers, |
| parsedURL, |
| signal |
| }; |
| |
| // node-fetch-only options |
| this.follow = init.follow !== undefined ? init.follow : input.follow !== undefined ? input.follow : 20; |
| this.compress = init.compress !== undefined ? init.compress : input.compress !== undefined ? input.compress : true; |
| this.counter = init.counter || input.counter || 0; |
| this.agent = init.agent || input.agent; |
| } |
| |
| get method() { |
| return this[INTERNALS$2].method; |
| } |
| |
| get url() { |
| return format_url(this[INTERNALS$2].parsedURL); |
| } |
| |
| get headers() { |
| return this[INTERNALS$2].headers; |
| } |
| |
| get redirect() { |
| return this[INTERNALS$2].redirect; |
| } |
| |
| get signal() { |
| return this[INTERNALS$2].signal; |
| } |
| |
| /** |
| * Clone this request |
| * |
| * @return Request |
| */ |
| clone() { |
| return new Request(this); |
| } |
| } |
| |
| Body.mixIn(Request.prototype); |
| |
| Object.defineProperty(Request.prototype, Symbol.toStringTag, { |
| value: 'Request', |
| writable: false, |
| enumerable: false, |
| configurable: true |
| }); |
| |
| Object.defineProperties(Request.prototype, { |
| method: { enumerable: true }, |
| url: { enumerable: true }, |
| headers: { enumerable: true }, |
| redirect: { enumerable: true }, |
| clone: { enumerable: true }, |
| signal: { enumerable: true } |
| }); |
| |
| /** |
| * Convert a Request to Node.js http request options. |
| * |
| * @param Request A Request instance |
| * @return Object The options object to be passed to http.request |
| */ |
| function getNodeRequestOptions(request) { |
| const parsedURL = request[INTERNALS$2].parsedURL; |
| const headers = new Headers(request[INTERNALS$2].headers); |
| |
| // fetch step 1.3 |
| if (!headers.has('Accept')) { |
| headers.set('Accept', '*/*'); |
| } |
| |
| // Basic fetch |
| if (!parsedURL.protocol || !parsedURL.hostname) { |
| throw new TypeError('Only absolute URLs are supported'); |
| } |
| |
| if (!/^https?:$/.test(parsedURL.protocol)) { |
| throw new TypeError('Only HTTP(S) protocols are supported'); |
| } |
| |
| if (request.signal && request.body instanceof Stream.Readable && !streamDestructionSupported) { |
| throw new Error('Cancellation of streamed requests with AbortSignal is not supported in node < 8'); |
| } |
| |
| // HTTP-network-or-cache fetch steps 2.4-2.7 |
| let contentLengthValue = null; |
| if (request.body == null && /^(POST|PUT)$/i.test(request.method)) { |
| contentLengthValue = '0'; |
| } |
| if (request.body != null) { |
| const totalBytes = getTotalBytes(request); |
| if (typeof totalBytes === 'number') { |
| contentLengthValue = String(totalBytes); |
| } |
| } |
| if (contentLengthValue) { |
| headers.set('Content-Length', contentLengthValue); |
| } |
| |
| // HTTP-network-or-cache fetch step 2.11 |
| if (!headers.has('User-Agent')) { |
| headers.set('User-Agent', 'node-fetch/1.0 (+https://github.com/bitinn/node-fetch)'); |
| } |
| |
| // HTTP-network-or-cache fetch step 2.15 |
| if (request.compress && !headers.has('Accept-Encoding')) { |
| headers.set('Accept-Encoding', 'gzip,deflate'); |
| } |
| |
| let agent = request.agent; |
| if (typeof agent === 'function') { |
| agent = agent(parsedURL); |
| } |
| |
| if (!headers.has('Connection') && !agent) { |
| headers.set('Connection', 'close'); |
| } |
| |
| // HTTP-network fetch step 4.2 |
| // chunked encoding is handled by Node.js |
| |
| return Object.assign({}, parsedURL, { |
| method: request.method, |
| headers: exportNodeCompatibleHeaders(headers), |
| agent |
| }); |
| } |
| |
| /** |
| * abort-error.js |
| * |
| * AbortError interface for cancelled requests |
| */ |
| |
| /** |
| * Create AbortError instance |
| * |
| * @param String message Error message for human |
| * @return AbortError |
| */ |
| function AbortError(message) { |
| Error.call(this, message); |
| |
| this.type = 'aborted'; |
| this.message = message; |
| |
| // hide custom error implementation details from end-users |
| Error.captureStackTrace(this, this.constructor); |
| } |
| |
| AbortError.prototype = Object.create(Error.prototype); |
| AbortError.prototype.constructor = AbortError; |
| AbortError.prototype.name = 'AbortError'; |
| |
| // fix an issue where "PassThrough", "resolve" aren't a named export for node <10 |
| const PassThrough$1 = Stream.PassThrough; |
| const resolve_url = Url.resolve; |
| |
| /** |
| * Fetch function |
| * |
| * @param Mixed url Absolute url or Request instance |
| * @param Object opts Fetch options |
| * @return Promise |
| */ |
| function fetch(url, opts) { |
| |
| // allow custom promise |
| if (!fetch.Promise) { |
| throw new Error('native promise missing, set fetch.Promise to your favorite alternative'); |
| } |
| |
| Body.Promise = fetch.Promise; |
| |
| // wrap http.request into fetch |
| return new fetch.Promise(function (resolve, reject) { |
| // build request object |
| const request = new Request(url, opts); |
| const options = getNodeRequestOptions(request); |
| |
| const send = (options.protocol === 'https:' ? https : http).request; |
| const signal = request.signal; |
| |
| let response = null; |
| |
| const abort = function abort() { |
| let error = new AbortError('The user aborted a request.'); |
| reject(error); |
| if (request.body && request.body instanceof Stream.Readable) { |
| request.body.destroy(error); |
| } |
| if (!response || !response.body) return; |
| response.body.emit('error', error); |
| }; |
| |
| if (signal && signal.aborted) { |
| abort(); |
| return; |
| } |
| |
| const abortAndFinalize = function abortAndFinalize() { |
| abort(); |
| finalize(); |
| }; |
| |
| // send request |
| const req = send(options); |
| let reqTimeout; |
| |
| if (signal) { |
| signal.addEventListener('abort', abortAndFinalize); |
| } |
| |
| function finalize() { |
| req.abort(); |
| if (signal) signal.removeEventListener('abort', abortAndFinalize); |
| clearTimeout(reqTimeout); |
| } |
| |
| if (request.timeout) { |
| req.once('socket', function (socket) { |
| reqTimeout = setTimeout(function () { |
| reject(new FetchError(`network timeout at: ${request.url}`, 'request-timeout')); |
| finalize(); |
| }, request.timeout); |
| }); |
| } |
| |
| req.on('error', function (err) { |
| reject(new FetchError(`request to ${request.url} failed, reason: ${err.message}`, 'system', err)); |
| finalize(); |
| }); |
| |
| req.on('response', function (res) { |
| clearTimeout(reqTimeout); |
| |
| const headers = createHeadersLenient(res.headers); |
| |
| // HTTP fetch step 5 |
| if (fetch.isRedirect(res.statusCode)) { |
| // HTTP fetch step 5.2 |
| const location = headers.get('Location'); |
| |
| // HTTP fetch step 5.3 |
| const locationURL = location === null ? null : resolve_url(request.url, location); |
| |
| // HTTP fetch step 5.5 |
| switch (request.redirect) { |
| case 'error': |
| reject(new FetchError(`redirect mode is set to error: ${request.url}`, 'no-redirect')); |
| finalize(); |
| return; |
| case 'manual': |
| // node-fetch-specific step: make manual redirect a bit easier to use by setting the Location header value to the resolved URL. |
| if (locationURL !== null) { |
| // handle corrupted header |
| try { |
| headers.set('Location', locationURL); |
| } catch (err) { |
| // istanbul ignore next: nodejs server prevent invalid response headers, we can't test this through normal request |
| reject(err); |
| } |
| } |
| break; |
| case 'follow': |
| // HTTP-redirect fetch step 2 |
| if (locationURL === null) { |
| break; |
| } |
| |
| // HTTP-redirect fetch step 5 |
| if (request.counter >= request.follow) { |
| reject(new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect')); |
| finalize(); |
| return; |
| } |
| |
| // HTTP-redirect fetch step 6 (counter increment) |
| // Create a new Request object. |
| const requestOpts = { |
| headers: new Headers(request.headers), |
| follow: request.follow, |
| counter: request.counter + 1, |
| agent: request.agent, |
| compress: request.compress, |
| method: request.method, |
| body: request.body, |
| signal: request.signal, |
| timeout: request.timeout |
| }; |
| |
| // HTTP-redirect fetch step 9 |
| if (res.statusCode !== 303 && request.body && getTotalBytes(request) === null) { |
| reject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect')); |
| finalize(); |
| return; |
| } |
| |
| // HTTP-redirect fetch step 11 |
| if (res.statusCode === 303 || (res.statusCode === 301 || res.statusCode === 302) && request.method === 'POST') { |
| requestOpts.method = 'GET'; |
| requestOpts.body = undefined; |
| requestOpts.headers.delete('content-length'); |
| } |
| |
| // HTTP-redirect fetch step 15 |
| resolve(fetch(new Request(locationURL, requestOpts))); |
| finalize(); |
| return; |
| } |
| } |
| |
| // prepare response |
| res.once('end', function () { |
| if (signal) signal.removeEventListener('abort', abortAndFinalize); |
| }); |
| let body = res.pipe(new PassThrough$1()); |
| |
| const response_options = { |
| url: request.url, |
| status: res.statusCode, |
| statusText: res.statusMessage, |
| headers: headers, |
| size: request.size, |
| timeout: request.timeout, |
| counter: request.counter |
| }; |
| |
| // HTTP-network fetch step 12.1.1.3 |
| const codings = headers.get('Content-Encoding'); |
| |
| // HTTP-network fetch step 12.1.1.4: handle content codings |
| |
| // in following scenarios we ignore compression support |
| // 1. compression support is disabled |
| // 2. HEAD request |
| // 3. no Content-Encoding header |
| // 4. no content response (204) |
| // 5. content not modified response (304) |
| if (!request.compress || request.method === 'HEAD' || codings === null || res.statusCode === 204 || res.statusCode === 304) { |
| response = new Response(body, response_options); |
| resolve(response); |
| return; |
| } |
| |
| // For Node v6+ |
| // Be less strict when decoding compressed responses, since sometimes |
| // servers send slightly invalid responses that are still accepted |
| // by common browsers. |
| // Always using Z_SYNC_FLUSH is what cURL does. |
| const zlibOptions = { |
| flush: zlib.Z_SYNC_FLUSH, |
| finishFlush: zlib.Z_SYNC_FLUSH |
| }; |
| |
| // for gzip |
| if (codings == 'gzip' || codings == 'x-gzip') { |
| body = body.pipe(zlib.createGunzip(zlibOptions)); |
| response = new Response(body, response_options); |
| resolve(response); |
| return; |
| } |
| |
| // for deflate |
| if (codings == 'deflate' || codings == 'x-deflate') { |
| // handle the infamous raw deflate response from old servers |
| // a hack for old IIS and Apache servers |
| const raw = res.pipe(new PassThrough$1()); |
| raw.once('data', function (chunk) { |
| // see http://stackoverflow.com/questions/37519828 |
| if ((chunk[0] & 0x0F) === 0x08) { |
| body = body.pipe(zlib.createInflate()); |
| } else { |
| body = body.pipe(zlib.createInflateRaw()); |
| } |
| response = new Response(body, response_options); |
| resolve(response); |
| }); |
| return; |
| } |
| |
| // for br |
| if (codings == 'br' && typeof zlib.createBrotliDecompress === 'function') { |
| body = body.pipe(zlib.createBrotliDecompress()); |
| response = new Response(body, response_options); |
| resolve(response); |
| return; |
| } |
| |
| // otherwise, use response as-is |
| response = new Response(body, response_options); |
| resolve(response); |
| }); |
| |
| writeToStream(req, request); |
| }); |
| } |
| /** |
| * Redirect code matching |
| * |
| * @param Number code Status code |
| * @return Boolean |
| */ |
| fetch.isRedirect = function (code) { |
| return code === 301 || code === 302 || code === 303 || code === 307 || code === 308; |
| }; |
| |
| // expose Promise |
| fetch.Promise = global.Promise; |
| |
| module.exports = exports = fetch; |
| Object.defineProperty(exports, "__esModule", { value: true }); |
| exports.default = exports; |
| exports.Headers = Headers; |
| exports.Request = Request; |
| exports.Response = Response; |
| exports.FetchError = FetchError; |
| |
| |
| /***/ }), |
| |
| /***/ 370: |
| /***/ (function(module) { |
| |
| module.exports = deprecate |
| |
| const loggedMessages = {} |
| |
| function deprecate (message) { |
| if (loggedMessages[message]) { |
| return |
| } |
| |
| console.warn(`DEPRECATED (@octokit/rest): ${message}`) |
| loggedMessages[message] = 1 |
| } |
| |
| |
| /***/ }), |
| |
| /***/ 385: |
| /***/ (function(__unusedmodule, exports, __webpack_require__) { |
| |
| "use strict"; |
| |
| |
| Object.defineProperty(exports, '__esModule', { value: true }); |
| |
| function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } |
| |
| var isPlainObject = _interopDefault(__webpack_require__(626)); |
| var universalUserAgent = __webpack_require__(796); |
| |
| function lowercaseKeys(object) { |
| if (!object) { |
| return {}; |
| } |
| |
| return Object.keys(object).reduce((newObj, key) => { |
| newObj[key.toLowerCase()] = object[key]; |
| return newObj; |
| }, {}); |
| } |
| |
| function mergeDeep(defaults, options) { |
| const result = Object.assign({}, defaults); |
| Object.keys(options).forEach(key => { |
| if (isPlainObject(options[key])) { |
| if (!(key in defaults)) Object.assign(result, { |
| [key]: options[key] |
| });else result[key] = mergeDeep(defaults[key], options[key]); |
| } else { |
| Object.assign(result, { |
| [key]: options[key] |
| }); |
| } |
| }); |
| return result; |
| } |
| |
| function merge(defaults, route, options) { |
| if (typeof route === "string") { |
| let [method, url] = route.split(" "); |
| options = Object.assign(url ? { |
| method, |
| url |
| } : { |
| url: method |
| }, options); |
| } else { |
| options = Object.assign({}, route); |
| } // lowercase header names before merging with defaults to avoid duplicates |
| |
| |
| options.headers = lowercaseKeys(options.headers); |
| const mergedOptions = mergeDeep(defaults || {}, options); // mediaType.previews arrays are merged, instead of overwritten |
| |
| if (defaults && defaults.mediaType.previews.length) { |
| mergedOptions.mediaType.previews = defaults.mediaType.previews.filter(preview => !mergedOptions.mediaType.previews.includes(preview)).concat(mergedOptions.mediaType.previews); |
| } |
| |
| mergedOptions.mediaType.previews = mergedOptions.mediaType.previews.map(preview => preview.replace(/-preview/, "")); |
| return mergedOptions; |
| } |
| |
| function addQueryParameters(url, parameters) { |
| const separator = /\?/.test(url) ? "&" : "?"; |
| const names = Object.keys(parameters); |
| |
| if (names.length === 0) { |
| return url; |
| } |
| |
| return url + separator + names.map(name => { |
| if (name === "q") { |
| return "q=" + parameters.q.split("+").map(encodeURIComponent).join("+"); |
| } |
| |
| return `${name}=${encodeURIComponent(parameters[name])}`; |
| }).join("&"); |
| } |
| |
| const urlVariableRegex = /\{[^}]+\}/g; |
| |
| function removeNonChars(variableName) { |
| return variableName.replace(/^\W+|\W+$/g, "").split(/,/); |
| } |
| |
| function extractUrlVariableNames(url) { |
| const matches = url.match(urlVariableRegex); |
| |
| if (!matches) { |
| return []; |
| } |
| |
| return matches.map(removeNonChars).reduce((a, b) => a.concat(b), []); |
| } |
| |
| function omit(object, keysToOmit) { |
| return Object.keys(object).filter(option => !keysToOmit.includes(option)).reduce((obj, key) => { |
| obj[key] = object[key]; |
| return obj; |
| }, {}); |
| } |
| |
| // Based on https://github.com/bramstein/url-template, licensed under BSD |
| // TODO: create separate package. |
| // |
| // Copyright (c) 2012-2014, Bram Stein |
| // All rights reserved. |
| // Redistribution and use in source and binary forms, with or without |
| // modification, are permitted provided that the following conditions |
| // are met: |
| // 1. Redistributions of source code must retain the above copyright |
| // notice, this list of conditions and the following disclaimer. |
| // 2. Redistributions in binary form must reproduce the above copyright |
| // notice, this list of conditions and the following disclaimer in the |
| // documentation and/or other materials provided with the distribution. |
| // 3. The name of the author may not be used to endorse or promote products |
| // derived from this software without specific prior written permission. |
| // THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR IMPLIED |
| // WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF |
| // MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO |
| // EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, |
| // INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, |
| // BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, |
| // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY |
| // OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING |
| // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, |
| // EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
| |
| /* istanbul ignore file */ |
| function encodeReserved(str) { |
| return str.split(/(%[0-9A-Fa-f]{2})/g).map(function (part) { |
| if (!/%[0-9A-Fa-f]/.test(part)) { |
| part = encodeURI(part).replace(/%5B/g, "[").replace(/%5D/g, "]"); |
| } |
| |
| return part; |
| }).join(""); |
| } |
| |
| function encodeUnreserved(str) { |
| return encodeURIComponent(str).replace(/[!'()*]/g, function (c) { |
| return "%" + c.charCodeAt(0).toString(16).toUpperCase(); |
| }); |
| } |
| |
| function encodeValue(operator, value, key) { |
| value = operator === "+" || operator === "#" ? encodeReserved(value) : encodeUnreserved(value); |
| |
| if (key) { |
| return encodeUnreserved(key) + "=" + value; |
| } else { |
| return value; |
| } |
| } |
| |
| function isDefined(value) { |
| return value !== undefined && value !== null; |
| } |
| |
| function isKeyOperator(operator) { |
| return operator === ";" || operator === "&" || operator === "?"; |
| } |
| |
| function getValues(context, operator, key, modifier) { |
| var value = context[key], |
| result = []; |
| |
| if (isDefined(value) && value !== "") { |
| if (typeof value === "string" || typeof value === "number" || typeof value === "boolean") { |
| value = value.toString(); |
| |
| if (modifier && modifier !== "*") { |
| value = value.substring(0, parseInt(modifier, 10)); |
| } |
| |
| result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : "")); |
| } else { |
| if (modifier === "*") { |
| if (Array.isArray(value)) { |
| value.filter(isDefined).forEach(function (value) { |
| result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : "")); |
| }); |
| } else { |
| Object.keys(value).forEach(function (k) { |
| if (isDefined(value[k])) { |
| result.push(encodeValue(operator, value[k], k)); |
| } |
| }); |
| } |
| } else { |
| const tmp = []; |
| |
| if (Array.isArray(value)) { |
| value.filter(isDefined).forEach(function (value) { |
| tmp.push(encodeValue(operator, value)); |
| }); |
| } else { |
| Object.keys(value).forEach(function (k) { |
| if (isDefined(value[k])) { |
| tmp.push(encodeUnreserved(k)); |
| tmp.push(encodeValue(operator, value[k].toString())); |
| } |
| }); |
| } |
| |
| if (isKeyOperator(operator)) { |
| result.push(encodeUnreserved(key) + "=" + tmp.join(",")); |
| } else if (tmp.length !== 0) { |
| result.push(tmp.join(",")); |
| } |
| } |
| } |
| } else { |
| if (operator === ";") { |
| if (isDefined(value)) { |
| result.push(encodeUnreserved(key)); |
| } |
| } else if (value === "" && (operator === "&" || operator === "?")) { |
| result.push(encodeUnreserved(key) + "="); |
| } else if (value === "") { |
| result.push(""); |
| } |
| } |
| |
| return result; |
| } |
| |
| function parseUrl(template) { |
| return { |
| expand: expand.bind(null, template) |
| }; |
| } |
| |
| function expand(template, context) { |
| var operators = ["+", "#", ".", "/", ";", "?", "&"]; |
| return template.replace(/\{([^\{\}]+)\}|([^\{\}]+)/g, function (_, expression, literal) { |
| if (expression) { |
| let operator = ""; |
| const values = []; |
| |
| if (operators.indexOf(expression.charAt(0)) !== -1) { |
| operator = expression.charAt(0); |
| expression = expression.substr(1); |
| } |
| |
| expression.split(/,/g).forEach(function (variable) { |
| var tmp = /([^:\*]*)(?::(\d+)|(\*))?/.exec(variable); |
| values.push(getValues(context, operator, tmp[1], tmp[2] || tmp[3])); |
| }); |
| |
| if (operator && operator !== "+") { |
| var separator = ","; |
| |
| if (operator === "?") { |
| separator = "&"; |
| } else if (operator !== "#") { |
| separator = operator; |
| } |
| |
| return (values.length !== 0 ? operator : "") + values.join(separator); |
| } else { |
| return values.join(","); |
| } |
| } else { |
| return encodeReserved(literal); |
| } |
| }); |
| } |
| |
| function parse(options) { |
| // https://fetch.spec.whatwg.org/#methods |
| let method = options.method.toUpperCase(); // replace :varname with {varname} to make it RFC 6570 compatible |
| |
| let url = (options.url || "/").replace(/:([a-z]\w+)/g, "{+$1}"); |
| let headers = Object.assign({}, options.headers); |
| let body; |
| let parameters = omit(options, ["method", "baseUrl", "url", "headers", "request", "mediaType"]); // extract variable names from URL to calculate remaining variables later |
| |
| const urlVariableNames = extractUrlVariableNames(url); |
| url = parseUrl(url).expand(parameters); |
| |
| if (!/^http/.test(url)) { |
| url = options.baseUrl + url; |
| } |
| |
| const omittedParameters = Object.keys(options).filter(option => urlVariableNames.includes(option)).concat("baseUrl"); |
| const remainingParameters = omit(parameters, omittedParameters); |
| const isBinaryRequset = /application\/octet-stream/i.test(headers.accept); |
| |
| if (!isBinaryRequset) { |
| if (options.mediaType.format) { |
| // e.g. application/vnd.github.v3+json => application/vnd.github.v3.raw |
| headers.accept = headers.accept.split(/,/).map(preview => preview.replace(/application\/vnd(\.\w+)(\.v3)?(\.\w+)?(\+json)?$/, `application/vnd$1$2.${options.mediaType.format}`)).join(","); |
| } |
| |
| if (options.mediaType.previews.length) { |
| const previewsFromAcceptHeader = headers.accept.match(/[\w-]+(?=-preview)/g) || []; |
| headers.accept = previewsFromAcceptHeader.concat(options.mediaType.previews).map(preview => { |
| const format = options.mediaType.format ? `.${options.mediaType.format}` : "+json"; |
| return `application/vnd.github.${preview}-preview${format}`; |
| }).join(","); |
| } |
| } // for GET/HEAD requests, set URL query parameters from remaining parameters |
| // for PATCH/POST/PUT/DELETE requests, set request body from remaining parameters |
| |
| |
| if (["GET", "HEAD"].includes(method)) { |
| url = addQueryParameters(url, remainingParameters); |
| } else { |
| if ("data" in remainingParameters) { |
| body = remainingParameters.data; |
| } else { |
| if (Object.keys(remainingParameters).length) { |
| body = remainingParameters; |
| } else { |
| headers["content-length"] = 0; |
| } |
| } |
| } // default content-type for JSON if body is set |
| |
| |
| if (!headers["content-type"] && typeof body !== "undefined") { |
| headers["content-type"] = "application/json; charset=utf-8"; |
| } // GitHub expects 'content-length: 0' header for PUT/PATCH requests without body. |
| // fetch does not allow to set `content-length` header, but we can set body to an empty string |
| |
| |
| if (["PATCH", "PUT"].includes(method) && typeof body === "undefined") { |
| body = ""; |
| } // Only return body/request keys if present |
| |
| |
| return Object.assign({ |
| method, |
| url, |
| headers |
| }, typeof body !== "undefined" ? { |
| body |
| } : null, options.request ? { |
| request: options.request |
| } : null); |
| } |
| |
| function endpointWithDefaults(defaults, route, options) { |
| return parse(merge(defaults, route, options)); |
| } |
| |
| function withDefaults(oldDefaults, newDefaults) { |
| const DEFAULTS = merge(oldDefaults, newDefaults); |
| const endpoint = endpointWithDefaults.bind(null, DEFAULTS); |
| return Object.assign(endpoint, { |
| DEFAULTS, |
| defaults: withDefaults.bind(null, DEFAULTS), |
| merge: merge.bind(null, DEFAULTS), |
| parse |
| }); |
| } |
| |
| const VERSION = "6.0.1"; |
| |
| const userAgent = `octokit-endpoint.js/${VERSION} ${universalUserAgent.getUserAgent()}`; // DEFAULTS has all properties set that EndpointOptions has, except url. |
| // So we use RequestParameters and add method as additional required property. |
| |
| const DEFAULTS = { |
| method: "GET", |
| baseUrl: "https://api.github.com", |
| headers: { |
| accept: "application/vnd.github.v3+json", |
| "user-agent": userAgent |
| }, |
| mediaType: { |
| format: "", |
| previews: [] |
| } |
| }; |
| |
| const endpoint = withDefaults(null, DEFAULTS); |
| |
| exports.endpoint = endpoint; |
| //# sourceMappingURL=index.js.map |
| |
| |
| /***/ }), |
| |
| /***/ 386: |
| /***/ (function(module, __unusedexports, __webpack_require__) { |
| |
| "use strict"; |
| |
| |
| var Type = __webpack_require__(945); |
| |
| function resolveJavascriptUndefined() { |
| return true; |
| } |
| |
| function constructJavascriptUndefined() { |
| /*eslint-disable no-undefined*/ |
| return undefined; |
| } |
| |
| function representJavascriptUndefined() { |
| return ''; |
| } |
| |
| function isUndefined(object) { |
| return typeof object === 'undefined'; |
| } |
| |
| module.exports = new Type('tag:yaml.org,2002:js/undefined', { |
| kind: 'scalar', |
| resolve: resolveJavascriptUndefined, |
| construct: constructJavascriptUndefined, |
| predicate: isUndefined, |
| represent: representJavascriptUndefined |
| }); |
| |
| |
| /***/ }), |
| |
| /***/ 389: |
| /***/ (function(module, __unusedexports, __webpack_require__) { |
| |
| "use strict"; |
| |
| |
| const fs = __webpack_require__(747); |
| const shebangCommand = __webpack_require__(866); |
| |
| function readShebang(command) { |
| // Read the first 150 bytes from the file |
| const size = 150; |
| let buffer; |
| |
| if (Buffer.alloc) { |
| // Node.js v4.5+ / v5.10+ |
| buffer = Buffer.alloc(size); |
| } else { |
| // Old Node.js API |
| buffer = new Buffer(size); |
| buffer.fill(0); // zero-fill |
| } |
| |
| let fd; |
| |
| try { |
| fd = fs.openSync(command, 'r'); |
| fs.readSync(fd, buffer, 0, size, 0); |
| fs.closeSync(fd); |
| } catch (e) { /* Empty */ } |
| |
| // Attempt to extract shebang (null is returned if not a shebang) |
| return shebangCommand(buffer.toString()); |
| } |
| |
| module.exports = readShebang; |
| |
| |
| /***/ }), |
| |
| /***/ 402: |
| /***/ (function(module, __unusedexports, __webpack_require__) { |
| |
| module.exports = Octokit; |
| |
| const { request } = __webpack_require__(753); |
| const Hook = __webpack_require__(523); |
| |
| const parseClientOptions = __webpack_require__(294); |
| |
| function Octokit(plugins, options) { |
| options = options || {}; |
| const hook = new Hook.Collection(); |
| const log = Object.assign( |
| { |
| debug: () => {}, |
| info: () => {}, |
| warn: console.warn, |
| error: console.error |
| }, |
| options && options.log |
| ); |
| const api = { |
| hook, |
| log, |
| request: request.defaults(parseClientOptions(options, log, hook)) |
| }; |
| |
| plugins.forEach(pluginFunction => pluginFunction(api, options)); |
| |
| return api; |
| } |
| |
| |
| /***/ }), |
| |
| /***/ 410: |
| /***/ (function(__unusedmodule, exports) { |
| |
| "use strict"; |
| |
| Object.defineProperty(exports, "__esModule", { value: true }); |
| exports.csvEscape = void 0; |
| // Returns filename escaped for CSV |
| // Wraps file name into "..." only when it contains some potentially unsafe character |
| function csvEscape(value) { |
| if (value === '') |
| return value; |
| // Only safe characters |
| if (/^[a-zA-Z0-9._+:@%/-]+$/m.test(value)) { |
| return value; |
| } |
| // https://tools.ietf.org/html/rfc4180 |
| // If double-quotes are used to enclose fields, then a double-quote |
| // appearing inside a field must be escaped by preceding it with |
| // another double quote |
| return `"${value.replace(/"/g, '""')}"`; |
| } |
| exports.csvEscape = csvEscape; |
| |
| |
| /***/ }), |
| |
| /***/ 413: |
| /***/ (function(module) { |
| |
| module.exports = require("stream"); |
| |
| /***/ }), |
| |
| /***/ 414: |
| /***/ (function(module, __unusedexports, __webpack_require__) { |
| |
| "use strict"; |
| |
| |
| |
| var yaml = __webpack_require__(819); |
| |
| |
| module.exports = yaml; |
| |
| |
| /***/ }), |
| |
| /***/ 417: |
| /***/ (function(module, __unusedexports, __webpack_require__) { |
| |
| "use strict"; |
| |
| |
| var common = __webpack_require__(740); |
| var Type = __webpack_require__(945); |
| |
| var YAML_FLOAT_PATTERN = new RegExp( |
| // 2.5e4, 2.5 and integers |
| '^(?:[-+]?(?:0|[1-9][0-9_]*)(?:\\.[0-9_]*)?(?:[eE][-+]?[0-9]+)?' + |
| // .2e4, .2 |
| // special case, seems not from spec |
| '|\\.[0-9_]+(?:[eE][-+]?[0-9]+)?' + |
| // 20:59 |
| '|[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+\\.[0-9_]*' + |
| // .inf |
| '|[-+]?\\.(?:inf|Inf|INF)' + |
| // .nan |
| '|\\.(?:nan|NaN|NAN))$'); |
| |
| function resolveYamlFloat(data) { |
| if (data === null) return false; |
| |
| if (!YAML_FLOAT_PATTERN.test(data) || |
| // Quick hack to not allow integers end with `_` |
| // Probably should update regexp & check speed |
| data[data.length - 1] === '_') { |
| return false; |
| } |
| |
| return true; |
| } |
| |
| function constructYamlFloat(data) { |
| var value, sign, base, digits; |
| |
| value = data.replace(/_/g, '').toLowerCase(); |
| sign = value[0] === '-' ? -1 : 1; |
| digits = []; |
| |
| if ('+-'.indexOf(value[0]) >= 0) { |
| value = value.slice(1); |
| } |
| |
| if (value === '.inf') { |
| return (sign === 1) ? Number.POSITIVE_INFINITY : Number.NEGATIVE_INFINITY; |
| |
| } else if (value === '.nan') { |
| return NaN; |
| |
| } else if (value.indexOf(':') >= 0) { |
| value.split(':').forEach(function (v) { |
| digits.unshift(parseFloat(v, 10)); |
| }); |
| |
| value = 0.0; |
| base = 1; |
| |
| digits.forEach(function (d) { |
| value += d * base; |
| base *= 60; |
| }); |
| |
| return sign * value; |
| |
| } |
| return sign * parseFloat(value, 10); |
| } |
| |
| |
| var SCIENTIFIC_WITHOUT_DOT = /^[-+]?[0-9]+e/; |
| |
| function representYamlFloat(object, style) { |
| var res; |
| |
| if (isNaN(object)) { |
| switch (style) { |
| case 'lowercase': return '.nan'; |
| case 'uppercase': return '.NAN'; |
| case 'camelcase': return '.NaN'; |
| } |
| } else if (Number.POSITIVE_INFINITY === object) { |
| switch (style) { |
| case 'lowercase': return '.inf'; |
| case 'uppercase': return '.INF'; |
| case 'camelcase': return '.Inf'; |
| } |
| } else if (Number.NEGATIVE_INFINITY === object) { |
| switch (style) { |
| case 'lowercase': return '-.inf'; |
| case 'uppercase': return '-.INF'; |
| case 'camelcase': return '-.Inf'; |
| } |
| } else if (common.isNegativeZero(object)) { |
| return '-0.0'; |
| } |
| |
| res = object.toString(10); |
| |
| // JS stringifier can build scientific format without dots: 5e-100, |
| // while YAML requres dot: 5.e-100. Fix it with simple hack |
| |
| return SCIENTIFIC_WITHOUT_DOT.test(res) ? res.replace('e', '.e') : res; |
| } |
| |
| function isFloat(object) { |
| return (Object.prototype.toString.call(object) === '[object Number]') && |
| (object % 1 !== 0 || common.isNegativeZero(object)); |
| } |
| |
| module.exports = new Type('tag:yaml.org,2002:float', { |
| kind: 'scalar', |
| resolve: resolveYamlFloat, |
| construct: constructYamlFloat, |
| predicate: isFloat, |
| represent: representYamlFloat, |
| defaultStyle: 'lowercase' |
| }); |
| |
| |
| /***/ }), |
| |
| /***/ 427: |
| /***/ (function(module, __unusedexports, __webpack_require__) { |
| |
| "use strict"; |
| |
| // Older verions of Node.js might not have `util.getSystemErrorName()`. |
| // In that case, fall back to a deprecated internal. |
| const util = __webpack_require__(669); |
| |
| let uv; |
| |
| if (typeof util.getSystemErrorName === 'function') { |
| module.exports = util.getSystemErrorName; |
| } else { |
| try { |
| uv = process.binding('uv'); |
| |
| if (typeof uv.errname !== 'function') { |
| throw new TypeError('uv.errname is not a function'); |
| } |
| } catch (err) { |
| console.error('execa/lib/errname: unable to establish process.binding(\'uv\')', err); |
| uv = null; |
| } |
| |
| module.exports = code => errname(uv, code); |
| } |
| |
| // Used for testing the fallback behavior |
| module.exports.__test__ = errname; |
| |
| function errname(uv, code) { |
| if (uv) { |
| return uv.errname(code); |
| } |
| |
| if (!(code < 0)) { |
| throw new Error('err >= 0'); |
| } |
| |
| return `Unknown system error ${code}`; |
| } |
| |
| |
| |
| /***/ }), |
| |
| /***/ 430: |
| /***/ (function(module, __unusedexports, __webpack_require__) { |
| |
| module.exports = octokitValidate; |
| |
| const validate = __webpack_require__(348); |
| |
| function octokitValidate(octokit) { |
| octokit.hook.before("request", validate.bind(null, octokit)); |
| } |
| |
| |
| /***/ }), |
| |
| /***/ 431: |
| /***/ (function(__unusedmodule, exports, __webpack_require__) { |
| |
| "use strict"; |
| |
| var __importStar = (this && this.__importStar) || function (mod) { |
| if (mod && mod.__esModule) return mod; |
| var result = {}; |
| if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; |
| result["default"] = mod; |
| return result; |
| }; |
| Object.defineProperty(exports, "__esModule", { value: true }); |
| const os = __importStar(__webpack_require__(87)); |
| /** |
| * Commands |
| * |
| * Command Format: |
| * ::name key=value,key=value::message |
| * |
| * Examples: |
| * ::warning::This is the message |
| * ::set-env name=MY_VAR::some value |
| */ |
| function issueCommand(command, properties, message) { |
| const cmd = new Command(command, properties, message); |
| process.stdout.write(cmd.toString() + os.EOL); |
| } |
| exports.issueCommand = issueCommand; |
| function issue(name, message = '') { |
| issueCommand(name, {}, message); |
| } |
| exports.issue = issue; |
| const CMD_STRING = '::'; |
| class Command { |
| constructor(command, properties, message) { |
| if (!command) { |
| command = 'missing.command'; |
| } |
| this.command = command; |
| this.properties = properties; |
| this.message = message; |
| } |
| toString() { |
| let cmdStr = CMD_STRING + this.command; |
| if (this.properties && Object.keys(this.properties).length > 0) { |
| cmdStr += ' '; |
| let first = true; |
| for (const key in this.properties) { |
| if (this.properties.hasOwnProperty(key)) { |
| const val = this.properties[key]; |
| if (val) { |
| if (first) { |
| first = false; |
| } |
| else { |
| cmdStr += ','; |
| } |
| cmdStr += `${key}=${escapeProperty(val)}`; |
| } |
| } |
| } |
| } |
| cmdStr += `${CMD_STRING}${escapeData(this.message)}`; |
| return cmdStr; |
| } |
| } |
| /** |
| * Sanitizes an input into a string so it can be passed into issueCommand safely |
| * @param input input to sanitize into a string |
| */ |
| function toCommandValue(input) { |
| if (input === null || input === undefined) { |
| return ''; |
| } |
| else if (typeof input === 'string' || input instanceof String) { |
| return input; |
| } |
| return JSON.stringify(input); |
| } |
| exports.toCommandValue = toCommandValue; |
| function escapeData(s) { |
| return toCommandValue(s) |
| .replace(/%/g, '%25') |
| .replace(/\r/g, '%0D') |
| .replace(/\n/g, '%0A'); |
| } |
| function escapeProperty(s) { |
| return toCommandValue(s) |
| .replace(/%/g, '%25') |
| .replace(/\r/g, '%0D') |
| .replace(/\n/g, '%0A') |
| .replace(/:/g, '%3A') |
| .replace(/,/g, '%2C'); |
| } |
| //# sourceMappingURL=command.js.map |
| |
| /***/ }), |
| |
| /***/ 453: |
| /***/ (function(module, __unusedexports, __webpack_require__) { |
| |
| var once = __webpack_require__(969) |
| var eos = __webpack_require__(9) |
| var fs = __webpack_require__(747) // we only need fs to get the ReadStream and WriteStream prototypes |
| |
| var noop = function () {} |
| var ancient = /^v?\.0/.test(process.version) |
| |
| var isFn = function (fn) { |
| return typeof fn === 'function' |
| } |
| |
| var isFS = function (stream) { |
| if (!ancient) return false // newer node version do not need to care about fs is a special way |
| if (!fs) return false // browser |
| return (stream instanceof (fs.ReadStream || noop) || stream instanceof (fs.WriteStream || noop)) && isFn(stream.close) |
| } |
| |
| var isRequest = function (stream) { |
| return stream.setHeader && isFn(stream.abort) |
| } |
| |
| var destroyer = function (stream, reading, writing, callback) { |
| callback = once(callback) |
| |
| var closed = false |
| stream.on('close', function () { |
| closed = true |
| }) |
| |
| eos(stream, {readable: reading, writable: writing}, function (err) { |
| if (err) return callback(err) |
| closed = true |
| callback() |
| }) |
| |
| var destroyed = false |
| return function (err) { |
| if (closed) return |
| if (destroyed) return |
| destroyed = true |
| |
| if (isFS(stream)) return stream.close(noop) // use close for fs streams to avoid fd leaks |
| if (isRequest(stream)) return stream.abort() // request.destroy just do .end - .abort is what we want |
| |
| if (isFn(stream.destroy)) return stream.destroy() |
| |
| callback(err || new Error('stream was destroyed')) |
| } |
| } |
| |
| var call = function (fn) { |
| fn() |
| } |
| |
| var pipe = function (from, to) { |
| return from.pipe(to) |
| } |
| |
| var pump = function () { |
| var streams = Array.prototype.slice.call(arguments) |
| var callback = isFn(streams[streams.length - 1] || noop) && streams.pop() || noop |
| |
| if (Array.isArray(streams[0])) streams = streams[0] |
| if (streams.length < 2) throw new Error('pump requires two streams per minimum') |
| |
| var error |
| var destroys = streams.map(function (stream, i) { |
| var reading = i < streams.length - 1 |
| var writing = i > 0 |
| return destroyer(stream, reading, writing, function (err) { |
| if (!error) error = err |
| if (err) destroys.forEach(call) |
| if (reading) return |
| destroys.forEach(call) |
| callback(error) |
| }) |
| }) |
| |
| return streams.reduce(pipe) |
| } |
| |
| module.exports = pump |
| |
| |
| /***/ }), |
| |
| /***/ 457: |
| /***/ (function(module, __unusedexports, __webpack_require__) { |
| |
| "use strict"; |
| |
| |
| /*eslint-disable max-len,no-use-before-define*/ |
| |
| var common = __webpack_require__(740); |
| var YAMLException = __webpack_require__(556); |
| var Mark = __webpack_require__(93); |
| var DEFAULT_SAFE_SCHEMA = __webpack_require__(723); |
| var DEFAULT_FULL_SCHEMA = __webpack_require__(910); |
| |
| |
| var _hasOwnProperty = Object.prototype.hasOwnProperty; |
| |
| |
| var CONTEXT_FLOW_IN = 1; |
| var CONTEXT_FLOW_OUT = 2; |
| var CONTEXT_BLOCK_IN = 3; |
| var CONTEXT_BLOCK_OUT = 4; |
| |
| |
| var CHOMPING_CLIP = 1; |
| var CHOMPING_STRIP = 2; |
| var CHOMPING_KEEP = 3; |
| |
| |
| var PATTERN_NON_PRINTABLE = /[\x00-\x08\x0B\x0C\x0E-\x1F\x7F-\x84\x86-\x9F\uFFFE\uFFFF]|[\uD800-\uDBFF](?![\uDC00-\uDFFF])|(?:[^\uD800-\uDBFF]|^)[\uDC00-\uDFFF]/; |
| var PATTERN_NON_ASCII_LINE_BREAKS = /[\x85\u2028\u2029]/; |
| var PATTERN_FLOW_INDICATORS = /[,\[\]\{\}]/; |
| var PATTERN_TAG_HANDLE = /^(?:!|!!|![a-z\-]+!)$/i; |
| var PATTERN_TAG_URI = /^(?:!|[^,\[\]\{\}])(?:%[0-9a-f]{2}|[0-9a-z\-#;\/\?:@&=\+\$,_\.!~\*'\(\)\[\]])*$/i; |
| |
| |
| function _class(obj) { return Object.prototype.toString.call(obj); } |
| |
| function is_EOL(c) { |
| return (c === 0x0A/* LF */) || (c === 0x0D/* CR */); |
| } |
| |
| function is_WHITE_SPACE(c) { |
| return (c === 0x09/* Tab */) || (c === 0x20/* Space */); |
| } |
| |
| function is_WS_OR_EOL(c) { |
| return (c === 0x09/* Tab */) || |
| (c === 0x20/* Space */) || |
| (c === 0x0A/* LF */) || |
| (c === 0x0D/* CR */); |
| } |
| |
| function is_FLOW_INDICATOR(c) { |
| return c === 0x2C/* , */ || |
| c === 0x5B/* [ */ || |
| c === 0x5D/* ] */ || |
| c === 0x7B/* { */ || |
| c === 0x7D/* } */; |
| } |
| |
| function fromHexCode(c) { |
| var lc; |
| |
| if ((0x30/* 0 */ <= c) && (c <= 0x39/* 9 */)) { |
| return c - 0x30; |
| } |
| |
| /*eslint-disable no-bitwise*/ |
| lc = c | 0x20; |
| |
| if ((0x61/* a */ <= lc) && (lc <= 0x66/* f */)) { |
| return lc - 0x61 + 10; |
| } |
| |
| return -1; |
| } |
| |
| function escapedHexLen(c) { |
| if (c === 0x78/* x */) { return 2; } |
| if (c === 0x75/* u */) { return 4; } |
| if (c === 0x55/* U */) { return 8; } |
| return 0; |
| } |
| |
| function fromDecimalCode(c) { |
| if ((0x30/* 0 */ <= c) && (c <= 0x39/* 9 */)) { |
| return c - 0x30; |
| } |
| |
| return -1; |
| } |
| |
| function simpleEscapeSequence(c) { |
| /* eslint-disable indent */ |
| return (c === 0x30/* 0 */) ? '\x00' : |
| (c === 0x61/* a */) ? '\x07' : |
| (c === 0x62/* b */) ? '\x08' : |
| (c === 0x74/* t */) ? '\x09' : |
| (c === 0x09/* Tab */) ? '\x09' : |
| (c === 0x6E/* n */) ? '\x0A' : |
| (c === 0x76/* v */) ? '\x0B' : |
| (c === 0x66/* f */) ? '\x0C' : |
| (c === 0x72/* r */) ? '\x0D' : |
| (c === 0x65/* e */) ? '\x1B' : |
| (c === 0x20/* Space */) ? ' ' : |
| (c === 0x22/* " */) ? '\x22' : |
| (c === 0x2F/* / */) ? '/' : |
| (c === 0x5C/* \ */) ? '\x5C' : |
| (c === 0x4E/* N */) ? '\x85' : |
| (c === 0x5F/* _ */) ? '\xA0' : |
| (c === 0x4C/* L */) ? '\u2028' : |
| (c === 0x50/* P */) ? '\u2029' : ''; |
| } |
| |
| function charFromCodepoint(c) { |
| if (c <= 0xFFFF) { |
| return String.fromCharCode(c); |
| } |
| // Encode UTF-16 surrogate pair |
| // https://en.wikipedia.org/wiki/UTF-16#Code_points_U.2B010000_to_U.2B10FFFF |
| return String.fromCharCode( |
| ((c - 0x010000) >> 10) + 0xD800, |
| ((c - 0x010000) & 0x03FF) + 0xDC00 |
| ); |
| } |
| |
| var simpleEscapeCheck = new Array(256); // integer, for fast access |
| var simpleEscapeMap = new Array(256); |
| for (var i = 0; i < 256; i++) { |
| simpleEscapeCheck[i] = simpleEscapeSequence(i) ? 1 : 0; |
| simpleEscapeMap[i] = simpleEscapeSequence(i); |
| } |
| |
| |
| function State(input, options) { |
| this.input = input; |
| |
| this.filename = options['filename'] || null; |
| this.schema = options['schema'] || DEFAULT_FULL_SCHEMA; |
| this.onWarning = options['onWarning'] || null; |
| this.legacy = options['legacy'] || false; |
| this.json = options['json'] || false; |
| this.listener = options['listener'] || null; |
| |
| this.implicitTypes = this.schema.compiledImplicit; |
| this.typeMap = this.schema.compiledTypeMap; |
| |
| this.length = input.length; |
| this.position = 0; |
| this.line = 0; |
| this.lineStart = 0; |
| this.lineIndent = 0; |
| |
| this.documents = []; |
| |
| /* |
| this.version; |
| this.checkLineBreaks; |
| this.tagMap; |
| this.anchorMap; |
| this.tag; |
| this.anchor; |
| this.kind; |
| this.result;*/ |
| |
| } |
| |
| |
| function generateError(state, message) { |
| return new YAMLException( |
| message, |
| new Mark(state.filename, state.input, state.position, state.line, (state.position - state.lineStart))); |
| } |
| |
| function throwError(state, message) { |
| throw generateError(state, message); |
| } |
| |
| function throwWarning(state, message) { |
| if (state.onWarning) { |
| state.onWarning.call(null, generateError(state, message)); |
| } |
| } |
| |
| |
| var directiveHandlers = { |
| |
| YAML: function handleYamlDirective(state, name, args) { |
| |
| var match, major, minor; |
| |
| if (state.version !== null) { |
| throwError(state, 'duplication of %YAML directive'); |
| } |
| |
| if (args.length !== 1) { |
| throwError(state, 'YAML directive accepts exactly one argument'); |
| } |
| |
| match = /^([0-9]+)\.([0-9]+)$/.exec(args[0]); |
| |
| if (match === null) { |
| throwError(state, 'ill-formed argument of the YAML directive'); |
| } |
| |
| major = parseInt(match[1], 10); |
| minor = parseInt(match[2], 10); |
| |
| if (major !== 1) { |
| throwError(state, 'unacceptable YAML version of the document'); |
| } |
| |
| state.version = args[0]; |
| state.checkLineBreaks = (minor < 2); |
| |
| if (minor !== 1 && minor !== 2) { |
| throwWarning(state, 'unsupported YAML version of the document'); |
| } |
| }, |
| |
| TAG: function handleTagDirective(state, name, args) { |
| |
| var handle, prefix; |
| |
| if (args.length !== 2) { |
| throwError(state, 'TAG directive accepts exactly two arguments'); |
| } |
| |
| handle = args[0]; |
| prefix = args[1]; |
| |
| if (!PATTERN_TAG_HANDLE.test(handle)) { |
| throwError(state, 'ill-formed tag handle (first argument) of the TAG directive'); |
| } |
| |
| if (_hasOwnProperty.call(state.tagMap, handle)) { |
| throwError(state, 'there is a previously declared suffix for "' + handle + '" tag handle'); |
| } |
| |
| if (!PATTERN_TAG_URI.test(prefix)) { |
| throwError(state, 'ill-formed tag prefix (second argument) of the TAG directive'); |
| } |
| |
| state.tagMap[handle] = prefix; |
| } |
| }; |
| |
| |
| function captureSegment(state, start, end, checkJson) { |
| var _position, _length, _character, _result; |
| |
| if (start < end) { |
| _result = state.input.slice(start, end); |
| |
| if (checkJson) { |
| for (_position = 0, _length = _result.length; _position < _length; _position += 1) { |
| _character = _result.charCodeAt(_position); |
| if (!(_character === 0x09 || |
| (0x20 <= _character && _character <= 0x10FFFF))) { |
| throwError(state, 'expected valid JSON character'); |
| } |
| } |
| } else if (PATTERN_NON_PRINTABLE.test(_result)) { |
| throwError(state, 'the stream contains non-printable characters'); |
| } |
| |
| state.result += _result; |
| } |
| } |
| |
| function mergeMappings(state, destination, source, overridableKeys) { |
| var sourceKeys, key, index, quantity; |
| |
| if (!common.isObject(source)) { |
| throwError(state, 'cannot merge mappings; the provided source object is unacceptable'); |
| } |
| |
| sourceKeys = Object.keys(source); |
| |
| for (index = 0, quantity = sourceKeys.length; index < quantity; index += 1) { |
| key = sourceKeys[index]; |
| |
| if (!_hasOwnProperty.call(destination, key)) { |
| destination[key] = source[key]; |
| overridableKeys[key] = true; |
| } |
| } |
| } |
| |
| function storeMappingPair(state, _result, overridableKeys, keyTag, keyNode, valueNode, startLine, startPos) { |
| var index, quantity; |
| |
| // The output is a plain object here, so keys can only be strings. |
| // We need to convert keyNode to a string, but doing so can hang the process |
| // (deeply nested arrays that explode exponentially using aliases). |
| if (Array.isArray(keyNode)) { |
| keyNode = Array.prototype.slice.call(keyNode); |
| |
| for (index = 0, quantity = keyNode.length; index < quantity; index += 1) { |
| if (Array.isArray(keyNode[index])) { |
| throwError(state, 'nested arrays are not supported inside keys'); |
| } |
| |
| if (typeof keyNode === 'object' && _class(keyNode[index]) === '[object Object]') { |
| keyNode[index] = '[object Object]'; |
| } |
| } |
| } |
| |
| // Avoid code execution in load() via toString property |
| // (still use its own toString for arrays, timestamps, |
| // and whatever user schema extensions happen to have @@toStringTag) |
| if (typeof keyNode === 'object' && _class(keyNode) === '[object Object]') { |
| keyNode = '[object Object]'; |
| } |
| |
| |
| keyNode = String(keyNode); |
| |
| if (_result === null) { |
| _result = {}; |
| } |
| |
| if (keyTag === 'tag:yaml.org,2002:merge') { |
| if (Array.isArray(valueNode)) { |
| for (index = 0, quantity = valueNode.length; index < quantity; index += 1) { |
| mergeMappings(state, _result, valueNode[index], overridableKeys); |
| } |
| } else { |
| mergeMappings(state, _result, valueNode, overridableKeys); |
| } |
| } else { |
| if (!state.json && |
| !_hasOwnProperty.call(overridableKeys, keyNode) && |
| _hasOwnProperty.call(_result, keyNode)) { |
| state.line = startLine || state.line; |
| state.position = startPos || state.position; |
| throwError(state, 'duplicated mapping key'); |
| } |
| _result[keyNode] = valueNode; |
| delete overridableKeys[keyNode]; |
| } |
| |
| return _result; |
| } |
| |
| function readLineBreak(state) { |
| var ch; |
| |
| ch = state.input.charCodeAt(state.position); |
| |
| if (ch === 0x0A/* LF */) { |
| state.position++; |
| } else if (ch === 0x0D/* CR */) { |
| state.position++; |
| if (state.input.charCodeAt(state.position) === 0x0A/* LF */) { |
| state.position++; |
| } |
| } else { |
| throwError(state, 'a line break is expected'); |
| } |
| |
| state.line += 1; |
| state.lineStart = state.position; |
| } |
| |
| function skipSeparationSpace(state, allowComments, checkIndent) { |
| var lineBreaks = 0, |
| ch = state.input.charCodeAt(state.position); |
| |
| while (ch !== 0) { |
| while (is_WHITE_SPACE(ch)) { |
| ch = state.input.charCodeAt(++state.position); |
| } |
| |
| if (allowComments && ch === 0x23/* # */) { |
| do { |
| ch = state.input.charCodeAt(++state.position); |
| } while (ch !== 0x0A/* LF */ && ch !== 0x0D/* CR */ && ch !== 0); |
| } |
| |
| if (is_EOL(ch)) { |
| readLineBreak(state); |
| |
| ch = state.input.charCodeAt(state.position); |
| lineBreaks++; |
| state.lineIndent = 0; |
| |
| while (ch === 0x20/* Space */) { |
| state.lineIndent++; |
| ch = state.input.charCodeAt(++state.position); |
| } |
| } else { |
| break; |
| } |
| } |
| |
| if (checkIndent !== -1 && lineBreaks !== 0 && state.lineIndent < checkIndent) { |
| throwWarning(state, 'deficient indentation'); |
| } |
| |
| return lineBreaks; |
| } |
| |
| function testDocumentSeparator(state) { |
| var _position = state.position, |
| ch; |
| |
| ch = state.input.charCodeAt(_position); |
| |
| // Condition state.position === state.lineStart is tested |
| // in parent on each call, for efficiency. No needs to test here again. |
| if ((ch === 0x2D/* - */ || ch === 0x2E/* . */) && |
| ch === state.input.charCodeAt(_position + 1) && |
| ch === state.input.charCodeAt(_position + 2)) { |
| |
| _position += 3; |
| |
| ch = state.input.charCodeAt(_position); |
| |
| if (ch === 0 || is_WS_OR_EOL(ch)) { |
| return true; |
| } |
| } |
| |
| return false; |
| } |
| |
| function writeFoldedLines(state, count) { |
| if (count === 1) { |
| state.result += ' '; |
| } else if (count > 1) { |
| state.result += common.repeat('\n', count - 1); |
| } |
| } |
| |
| |
| function readPlainScalar(state, nodeIndent, withinFlowCollection) { |
| var preceding, |
| following, |
| captureStart, |
| captureEnd, |
| hasPendingContent, |
| _line, |
| _lineStart, |
| _lineIndent, |
| _kind = state.kind, |
| _result = state.result, |
| ch; |
| |
| ch = state.input.charCodeAt(state.position); |
| |
| if (is_WS_OR_EOL(ch) || |
| is_FLOW_INDICATOR(ch) || |
| ch === 0x23/* # */ || |
| ch === 0x26/* & */ || |
| ch === 0x2A/* * */ || |
| ch === 0x21/* ! */ || |
| ch === 0x7C/* | */ || |
| ch === 0x3E/* > */ || |
| ch === 0x27/* ' */ || |
| ch === 0x22/* " */ || |
| ch === 0x25/* % */ || |
| ch === 0x40/* @ */ || |
| ch === 0x60/* ` */) { |
| return false; |
| } |
| |
| if (ch === 0x3F/* ? */ || ch === 0x2D/* - */) { |
| following = state.input.charCodeAt(state.position + 1); |
| |
| if (is_WS_OR_EOL(following) || |
| withinFlowCollection && is_FLOW_INDICATOR(following)) { |
| return false; |
| } |
| } |
| |
| state.kind = 'scalar'; |
| state.result = ''; |
| captureStart = captureEnd = state.position; |
| hasPendingContent = false; |
| |
| while (ch !== 0) { |
| if (ch === 0x3A/* : */) { |
| following = state.input.charCodeAt(state.position + 1); |
| |
| if (is_WS_OR_EOL(following) || |
| withinFlowCollection && is_FLOW_INDICATOR(following)) { |
| break; |
| } |
| |
| } else if (ch === 0x23/* # */) { |
| preceding = state.input.charCodeAt(state.position - 1); |
| |
| if (is_WS_OR_EOL(preceding)) { |
| break; |
| } |
| |
| } else if ((state.position === state.lineStart && testDocumentSeparator(state)) || |
| withinFlowCollection && is_FLOW_INDICATOR(ch)) { |
| break; |
| |
| } else if (is_EOL(ch)) { |
| _line = state.line; |
| _lineStart = state.lineStart; |
| _lineIndent = state.lineIndent; |
| skipSeparationSpace(state, false, -1); |
| |
| if (state.lineIndent >= nodeIndent) { |
| hasPendingContent = true; |
| ch = state.input.charCodeAt(state.position); |
| continue; |
| } else { |
| state.position = captureEnd; |
| state.line = _line; |
| state.lineStart = _lineStart; |
| state.lineIndent = _lineIndent; |
| break; |
| } |
| } |
| |
| if (hasPendingContent) { |
| captureSegment(state, captureStart, captureEnd, false); |
| writeFoldedLines(state, state.line - _line); |
| captureStart = captureEnd = state.position; |
| hasPendingContent = false; |
| } |
| |
| if (!is_WHITE_SPACE(ch)) { |
| captureEnd = state.position + 1; |
| } |
| |
| ch = state.input.charCodeAt(++state.position); |
| } |
| |
| captureSegment(state, captureStart, captureEnd, false); |
| |
| if (state.result) { |
| return true; |
| } |
| |
| state.kind = _kind; |
| state.result = _result; |
| return false; |
| } |
| |
| function readSingleQuotedScalar(state, nodeIndent) { |
| var ch, |
| captureStart, captureEnd; |
| |
| ch = state.input.charCodeAt(state.position); |
| |
| if (ch !== 0x27/* ' */) { |
| return false; |
| } |
| |
| state.kind = 'scalar'; |
| state.result = ''; |
| state.position++; |
| captureStart = captureEnd = state.position; |
| |
| while ((ch = state.input.charCodeAt(state.position)) !== 0) { |
| if (ch === 0x27/* ' */) { |
| captureSegment(state, captureStart, state.position, true); |
| ch = state.input.charCodeAt(++state.position); |
| |
| if (ch === 0x27/* ' */) { |
| captureStart = state.position; |
| state.position++; |
| captureEnd = state.position; |
| } else { |
| return true; |
| } |
| |
| } else if (is_EOL(ch)) { |
| captureSegment(state, captureStart, captureEnd, true); |
| writeFoldedLines(state, skipSeparationSpace(state, false, nodeIndent)); |
| captureStart = captureEnd = state.position; |
| |
| } else if (state.position === state.lineStart && testDocumentSeparator(state)) { |
| throwError(state, 'unexpected end of the document within a single quoted scalar'); |
| |
| } else { |
| state.position++; |
| captureEnd = state.position; |
| } |
| } |
| |
| throwError(state, 'unexpected end of the stream within a single quoted scalar'); |
| } |
| |
| function readDoubleQuotedScalar(state, nodeIndent) { |
| var captureStart, |
| captureEnd, |
| hexLength, |
| hexResult, |
| tmp, |
| ch; |
| |
| ch = state.input.charCodeAt(state.position); |
| |
| if (ch !== 0x22/* " */) { |
| return false; |
| } |
| |
| state.kind = 'scalar'; |
| state.result = ''; |
| state.position++; |
| captureStart = captureEnd = state.position; |
| |
| while ((ch = state.input.charCodeAt(state.position)) !== 0) { |
| if (ch === 0x22/* " */) { |
| captureSegment(state, captureStart, state.position, true); |
| state.position++; |
| return true; |
| |
| } else if (ch === 0x5C/* \ */) { |
| captureSegment(state, captureStart, state.position, true); |
| ch = state.input.charCodeAt(++state.position); |
| |
| if (is_EOL(ch)) { |
| skipSeparationSpace(state, false, nodeIndent); |
| |
| // TODO: rework to inline fn with no type cast? |
| } else if (ch < 256 && simpleEscapeCheck[ch]) { |
| state.result += simpleEscapeMap[ch]; |
| state.position++; |
| |
| } else if ((tmp = escapedHexLen(ch)) > 0) { |
| hexLength = tmp; |
| hexResult = 0; |
| |
| for (; hexLength > 0; hexLength--) { |
| ch = state.input.charCodeAt(++state.position); |
| |
| if ((tmp = fromHexCode(ch)) >= 0) { |
| hexResult = (hexResult << 4) + tmp; |
| |
| } else { |
| throwError(state, 'expected hexadecimal character'); |
| } |
| } |
| |
| state.result += charFromCodepoint(hexResult); |
| |
| state.position++; |
| |
| } else { |
| throwError(state, 'unknown escape sequence'); |
| } |
| |
| captureStart = captureEnd = state.position; |
| |
| } else if (is_EOL(ch)) { |
| captureSegment(state, captureStart, captureEnd, true); |
| writeFoldedLines(state, skipSeparationSpace(state, false, nodeIndent)); |
| captureStart = captureEnd = state.position; |
| |
| } else if (state.position === state.lineStart && testDocumentSeparator(state)) { |
| throwError(state, 'unexpected end of the document within a double quoted scalar'); |
| |
| } else { |
| state.position++; |
| captureEnd = state.position; |
| } |
| } |
| |
| throwError(state, 'unexpected end of the stream within a double quoted scalar'); |
| } |
| |
| function readFlowCollection(state, nodeIndent) { |
| var readNext = true, |
| _line, |
| _tag = state.tag, |
| _result, |
| _anchor = state.anchor, |
| following, |
| terminator, |
| isPair, |
| isExplicitPair, |
| isMapping, |
| overridableKeys = {}, |
| keyNode, |
| keyTag, |
| valueNode, |
| ch; |
| |
| ch = state.input.charCodeAt(state.position); |
| |
| if (ch === 0x5B/* [ */) { |
| terminator = 0x5D;/* ] */ |
| isMapping = false; |
| _result = []; |
| } else if (ch === 0x7B/* { */) { |
| terminator = 0x7D;/* } */ |
| isMapping = true; |
| _result = {}; |
| } else { |
| return false; |
| } |
| |
| if (state.anchor !== null) { |
| state.anchorMap[state.anchor] = _result; |
| } |
| |
| ch = state.input.charCodeAt(++state.position); |
| |
| while (ch !== 0) { |
| skipSeparationSpace(state, true, nodeIndent); |
| |
| ch = state.input.charCodeAt(state.position); |
| |
| if (ch === terminator) { |
| state.position++; |
| state.tag = _tag; |
| state.anchor = _anchor; |
| state.kind = isMapping ? 'mapping' : 'sequence'; |
| state.result = _result; |
| return true; |
| } else if (!readNext) { |
| throwError(state, 'missed comma between flow collection entries'); |
| } |
| |
| keyTag = keyNode = valueNode = null; |
| isPair = isExplicitPair = false; |
| |
| if (ch === 0x3F/* ? */) { |
| following = state.input.charCodeAt(state.position + 1); |
| |
| if (is_WS_OR_EOL(following)) { |
| isPair = isExplicitPair = true; |
| state.position++; |
| skipSeparationSpace(state, true, nodeIndent); |
| } |
| } |
| |
| _line = state.line; |
| composeNode(state, nodeIndent, CONTEXT_FLOW_IN, false, true); |
| keyTag = state.tag; |
| keyNode = state.result; |
| skipSeparationSpace(state, true, nodeIndent); |
| |
| ch = state.input.charCodeAt(state.position); |
| |
| if ((isExplicitPair || state.line === _line) && ch === 0x3A/* : */) { |
| isPair = true; |
| ch = state.input.charCodeAt(++state.position); |
| skipSeparationSpace(state, true, nodeIndent); |
| composeNode(state, nodeIndent, CONTEXT_FLOW_IN, false, true); |
| valueNode = state.result; |
| } |
| |
| if (isMapping) { |
| storeMappingPair(state, _result, overridableKeys, keyTag, keyNode, valueNode); |
| } else if (isPair) { |
| _result.push(storeMappingPair(state, null, overridableKeys, keyTag, keyNode, valueNode)); |
| } else { |
| _result.push(keyNode); |
| } |
| |
| skipSeparationSpace(state, true, nodeIndent); |
| |
| ch = state.input.charCodeAt(state.position); |
| |
| if (ch === 0x2C/* , */) { |
| readNext = true; |
| ch = state.input.charCodeAt(++state.position); |
| } else { |
| readNext = false; |
| } |
| } |
| |
| throwError(state, 'unexpected end of the stream within a flow collection'); |
| } |
| |
| function readBlockScalar(state, nodeIndent) { |
| var captureStart, |
| folding, |
| chomping = CHOMPING_CLIP, |
| didReadContent = false, |
| detectedIndent = false, |
| textIndent = nodeIndent, |
| emptyLines = 0, |
| atMoreIndented = false, |
| tmp, |
| ch; |
| |
| ch = state.input.charCodeAt(state.position); |
| |
| if (ch === 0x7C/* | */) { |
| folding = false; |
| } else if (ch === 0x3E/* > */) { |
| folding = true; |
| } else { |
| return false; |
| } |
| |
| state.kind = 'scalar'; |
| state.result = ''; |
| |
| while (ch !== 0) { |
| ch = state.input.charCodeAt(++state.position); |
| |
| if (ch === 0x2B/* + */ || ch === 0x2D/* - */) { |
| if (CHOMPING_CLIP === chomping) { |
| chomping = (ch === 0x2B/* + */) ? CHOMPING_KEEP : CHOMPING_STRIP; |
| } else { |
| throwError(state, 'repeat of a chomping mode identifier'); |
| } |
| |
| } else if ((tmp = fromDecimalCode(ch)) >= 0) { |
| if (tmp === 0) { |
| throwError(state, 'bad explicit indentation width of a block scalar; it cannot be less than one'); |
| } else if (!detectedIndent) { |
| textIndent = nodeIndent + tmp - 1; |
| detectedIndent = true; |
| } else { |
| throwError(state, 'repeat of an indentation width identifier'); |
| } |
| |
| } else { |
| break; |
| } |
| } |
| |
| if (is_WHITE_SPACE(ch)) { |
| do { ch = state.input.charCodeAt(++state.position); } |
| while (is_WHITE_SPACE(ch)); |
| |
| if (ch === 0x23/* # */) { |
| do { ch = state.input.charCodeAt(++state.position); } |
| while (!is_EOL(ch) && (ch !== 0)); |
| } |
| } |
| |
| while (ch !== 0) { |
| readLineBreak(state); |
| state.lineIndent = 0; |
| |
| ch = state.input.charCodeAt(state.position); |
| |
| while ((!detectedIndent || state.lineIndent < textIndent) && |
| (ch === 0x20/* Space */)) { |
| state.lineIndent++; |
| ch = state.input.charCodeAt(++state.position); |
| } |
| |
| if (!detectedIndent && state.lineIndent > textIndent) { |
| textIndent = state.lineIndent; |
| } |
| |
| if (is_EOL(ch)) { |
| emptyLines++; |
| continue; |
| } |
| |
| // End of the scalar. |
| if (state.lineIndent < textIndent) { |
| |
| // Perform the chomping. |
| if (chomping === CHOMPING_KEEP) { |
| state.result += common.repeat('\n', didReadContent ? 1 + emptyLines : emptyLines); |
| } else if (chomping === CHOMPING_CLIP) { |
| if (didReadContent) { // i.e. only if the scalar is not empty. |
| state.result += '\n'; |
| } |
| } |
| |
| // Break this `while` cycle and go to the funciton's epilogue. |
| break; |
| } |
| |
| // Folded style: use fancy rules to handle line breaks. |
| if (folding) { |
| |
| // Lines starting with white space characters (more-indented lines) are not folded. |
| if (is_WHITE_SPACE(ch)) { |
| atMoreIndented = true; |
| // except for the first content line (cf. Example 8.1) |
| state.result += common.repeat('\n', didReadContent ? 1 + emptyLines : emptyLines); |
| |
| // End of more-indented block. |
| } else if (atMoreIndented) { |
| atMoreIndented = false; |
| state.result += common.repeat('\n', emptyLines + 1); |
| |
| // Just one line break - perceive as the same line. |
| } else if (emptyLines === 0) { |
| if (didReadContent) { // i.e. only if we have already read some scalar content. |
| state.result += ' '; |
| } |
| |
| // Several line breaks - perceive as different lines. |
| } else { |
| state.result += common.repeat('\n', emptyLines); |
| } |
| |
| // Literal style: just add exact number of line breaks between content lines. |
| } else { |
| // Keep all line breaks except the header line break. |
| state.result += common.repeat('\n', didReadContent ? 1 + emptyLines : emptyLines); |
| } |
| |
| didReadContent = true; |
| detectedIndent = true; |
| emptyLines = 0; |
| captureStart = state.position; |
| |
| while (!is_EOL(ch) && (ch !== 0)) { |
| ch = state.input.charCodeAt(++state.position); |
| } |
| |
| captureSegment(state, captureStart, state.position, false); |
| } |
| |
| return true; |
| } |
| |
| function readBlockSequence(state, nodeIndent) { |
| var _line, |
| _tag = state.tag, |
| _anchor = state.anchor, |
| _result = [], |
| following, |
| detected = false, |
| ch; |
| |
| if (state.anchor !== null) { |
| state.anchorMap[state.anchor] = _result; |
| } |
| |
| ch = state.input.charCodeAt(state.position); |
| |
| while (ch !== 0) { |
| |
| if (ch !== 0x2D/* - */) { |
| break; |
| } |
| |
| following = state.input.charCodeAt(state.position + 1); |
| |
| if (!is_WS_OR_EOL(following)) { |
| break; |
| } |
| |
| detected = true; |
| state.position++; |
| |
| if (skipSeparationSpace(state, true, -1)) { |
| if (state.lineIndent <= nodeIndent) { |
| _result.push(null); |
| ch = state.input.charCodeAt(state.position); |
| continue; |
| } |
| } |
| |
| _line = state.line; |
| composeNode(state, nodeIndent, CONTEXT_BLOCK_IN, false, true); |
| _result.push(state.result); |
| skipSeparationSpace(state, true, -1); |
| |
| ch = state.input.charCodeAt(state.position); |
| |
| if ((state.line === _line || state.lineIndent > nodeIndent) && (ch !== 0)) { |
| throwError(state, 'bad indentation of a sequence entry'); |
| } else if (state.lineIndent < nodeIndent) { |
| break; |
| } |
| } |
| |
| if (detected) { |
| state.tag = _tag; |
| state.anchor = _anchor; |
| state.kind = 'sequence'; |
| state.result = _result; |
| return true; |
| } |
| return false; |
| } |
| |
| function readBlockMapping(state, nodeIndent, flowIndent) { |
| var following, |
| allowCompact, |
| _line, |
| _pos, |
| _tag = state.tag, |
| _anchor = state.anchor, |
| _result = {}, |
| overridableKeys = {}, |
| keyTag = null, |
| keyNode = null, |
| valueNode = null, |
| atExplicitKey = false, |
| detected = false, |
| ch; |
| |
| if (state.anchor !== null) { |
| state.anchorMap[state.anchor] = _result; |
| } |
| |
| ch = state.input.charCodeAt(state.position); |
| |
| while (ch !== 0) { |
| following = state.input.charCodeAt(state.position + 1); |
| _line = state.line; // Save the current line. |
| _pos = state.position; |
| |
| // |
| // Explicit notation case. There are two separate blocks: |
| // first for the key (denoted by "?") and second for the value (denoted by ":") |
| // |
| if ((ch === 0x3F/* ? */ || ch === 0x3A/* : */) && is_WS_OR_EOL(following)) { |
| |
| if (ch === 0x3F/* ? */) { |
| if (atExplicitKey) { |
| storeMappingPair(state, _result, overridableKeys, keyTag, keyNode, null); |
| keyTag = keyNode = valueNode = null; |
| } |
| |
| detected = true; |
| atExplicitKey = true; |
| allowCompact = true; |
| |
| } else if (atExplicitKey) { |
| // i.e. 0x3A/* : */ === character after the explicit key. |
| atExplicitKey = false; |
| allowCompact = true; |
| |
| } else { |
| throwError(state, 'incomplete explicit mapping pair; a key node is missed; or followed by a non-tabulated empty line'); |
| } |
| |
| state.position += 1; |
| ch = following; |
| |
| // |
| // Implicit notation case. Flow-style node as the key first, then ":", and the value. |
| // |
| } else if (composeNode(state, flowIndent, CONTEXT_FLOW_OUT, false, true)) { |
| |
| if (state.line === _line) { |
| ch = state.input.charCodeAt(state.position); |
| |
| while (is_WHITE_SPACE(ch)) { |
| ch = state.input.charCodeAt(++state.position); |
| } |
| |
| if (ch === 0x3A/* : */) { |
| ch = state.input.charCodeAt(++state.position); |
| |
| if (!is_WS_OR_EOL(ch)) { |
| throwError(state, 'a whitespace character is expected after the key-value separator within a block mapping'); |
| } |
| |
| if (atExplicitKey) { |
| storeMappingPair(state, _result, overridableKeys, keyTag, keyNode, null); |
| keyTag = keyNode = valueNode = null; |
| } |
| |
| detected = true; |
| atExplicitKey = false; |
| allowCompact = false; |
| keyTag = state.tag; |
| keyNode = state.result; |
| |
| } else if (detected) { |
| throwError(state, 'can not read an implicit mapping pair; a colon is missed'); |
| |
| } else { |
| state.tag = _tag; |
| state.anchor = _anchor; |
| return true; // Keep the result of `composeNode`. |
| } |
| |
| } else if (detected) { |
| throwError(state, 'can not read a block mapping entry; a multiline key may not be an implicit key'); |
| |
| } else { |
| state.tag = _tag; |
| state.anchor = _anchor; |
| return true; // Keep the result of `composeNode`. |
| } |
| |
| } else { |
| break; // Reading is done. Go to the epilogue. |
| } |
| |
| // |
| // Common reading code for both explicit and implicit notations. |
| // |
| if (state.line === _line || state.lineIndent > nodeIndent) { |
| if (composeNode(state, nodeIndent, CONTEXT_BLOCK_OUT, true, allowCompact)) { |
| if (atExplicitKey) { |
| keyNode = state.result; |
| } else { |
| valueNode = state.result; |
| } |
| } |
| |
| if (!atExplicitKey) { |
| storeMappingPair(state, _result, overridableKeys, keyTag, keyNode, valueNode, _line, _pos); |
| keyTag = keyNode = valueNode = null; |
| } |
| |
| skipSeparationSpace(state, true, -1); |
| ch = state.input.charCodeAt(state.position); |
| } |
| |
| if (state.lineIndent > nodeIndent && (ch !== 0)) { |
| throwError(state, 'bad indentation of a mapping entry'); |
| } else if (state.lineIndent < nodeIndent) { |
| break; |
| } |
| } |
| |
| // |
| // Epilogue. |
| // |
| |
| // Special case: last mapping's node contains only the key in explicit notation. |
| if (atExplicitKey) { |
| storeMappingPair(state, _result, overridableKeys, keyTag, keyNode, null); |
| } |
| |
| // Expose the resulting mapping. |
| if (detected) { |
| state.tag = _tag; |
| state.anchor = _anchor; |
| state.kind = 'mapping'; |
| state.result = _result; |
| } |
| |
| return detected; |
| } |
| |
| function readTagProperty(state) { |
| var _position, |
| isVerbatim = false, |
| isNamed = false, |
| tagHandle, |
| tagName, |
| ch; |
| |
| ch = state.input.charCodeAt(state.position); |
| |
| if (ch !== 0x21/* ! */) return false; |
| |
| if (state.tag !== null) { |
| throwError(state, 'duplication of a tag property'); |
| } |
| |
| ch = state.input.charCodeAt(++state.position); |
| |
| if (ch === 0x3C/* < */) { |
| isVerbatim = true; |
| ch = state.input.charCodeAt(++state.position); |
| |
| } else if (ch === 0x21/* ! */) { |
| isNamed = true; |
| tagHandle = '!!'; |
| ch = state.input.charCodeAt(++state.position); |
| |
| } else { |
| tagHandle = '!'; |
| } |
| |
| _position = state.position; |
| |
| if (isVerbatim) { |
| do { ch = state.input.charCodeAt(++state.position); } |
| while (ch !== 0 && ch !== 0x3E/* > */); |
| |
| if (state.position < state.length) { |
| tagName = state.input.slice(_position, state.position); |
| ch = state.input.charCodeAt(++state.position); |
| } else { |
| throwError(state, 'unexpected end of the stream within a verbatim tag'); |
| } |
| } else { |
| while (ch !== 0 && !is_WS_OR_EOL(ch)) { |
| |
| if (ch === 0x21/* ! */) { |
| if (!isNamed) { |
| tagHandle = state.input.slice(_position - 1, state.position + 1); |
| |
| if (!PATTERN_TAG_HANDLE.test(tagHandle)) { |
| throwError(state, 'named tag handle cannot contain such characters'); |
| } |
| |
| isNamed = true; |
| _position = state.position + 1; |
| } else { |
| throwError(state, 'tag suffix cannot contain exclamation marks'); |
| } |
| } |
| |
| ch = state.input.charCodeAt(++state.position); |
| } |
| |
| tagName = state.input.slice(_position, state.position); |
| |
| if (PATTERN_FLOW_INDICATORS.test(tagName)) { |
| throwError(state, 'tag suffix cannot contain flow indicator characters'); |
| } |
| } |
| |
| if (tagName && !PATTERN_TAG_URI.test(tagName)) { |
| throwError(state, 'tag name cannot contain such characters: ' + tagName); |
| } |
| |
| if (isVerbatim) { |
| state.tag = tagName; |
| |
| } else if (_hasOwnProperty.call(state.tagMap, tagHandle)) { |
| state.tag = state.tagMap[tagHandle] + tagName; |
| |
| } else if (tagHandle === '!') { |
| state.tag = '!' + tagName; |
| |
| } else if (tagHandle === '!!') { |
| state.tag = 'tag:yaml.org,2002:' + tagName; |
| |
| } else { |
| throwError(state, 'undeclared tag handle "' + tagHandle + '"'); |
| } |
| |
| return true; |
| } |
| |
| function readAnchorProperty(state) { |
| var _position, |
| ch; |
| |
| ch = state.input.charCodeAt(state.position); |
| |
| if (ch !== 0x26/* & */) return false; |
| |
| if (state.anchor !== null) { |
| throwError(state, 'duplication of an anchor property'); |
| } |
| |
| ch = state.input.charCodeAt(++state.position); |
| _position = state.position; |
| |
| while (ch !== 0 && !is_WS_OR_EOL(ch) && !is_FLOW_INDICATOR(ch)) { |
| ch = state.input.charCodeAt(++state.position); |
| } |
| |
| if (state.position === _position) { |
| throwError(state, 'name of an anchor node must contain at least one character'); |
| } |
| |
| state.anchor = state.input.slice(_position, state.position); |
| return true; |
| } |
| |
| function readAlias(state) { |
| var _position, alias, |
| ch; |
| |
| ch = state.input.charCodeAt(state.position); |
| |
| if (ch !== 0x2A/* * */) return false; |
| |
| ch = state.input.charCodeAt(++state.position); |
| _position = state.position; |
| |
| while (ch !== 0 && !is_WS_OR_EOL(ch) && !is_FLOW_INDICATOR(ch)) { |
| ch = state.input.charCodeAt(++state.position); |
| } |
| |
| if (state.position === _position) { |
| throwError(state, 'name of an alias node must contain at least one character'); |
| } |
| |
| alias = state.input.slice(_position, state.position); |
| |
| if (!state.anchorMap.hasOwnProperty(alias)) { |
| throwError(state, 'unidentified alias "' + alias + '"'); |
| } |
| |
| state.result = state.anchorMap[alias]; |
| skipSeparationSpace(state, true, -1); |
| return true; |
| } |
| |
| function composeNode(state, parentIndent, nodeContext, allowToSeek, allowCompact) { |
| var allowBlockStyles, |
| allowBlockScalars, |
| allowBlockCollections, |
| indentStatus = 1, // 1: this>parent, 0: this=parent, -1: this<parent |
| atNewLine = false, |
| hasContent = false, |
| typeIndex, |
| typeQuantity, |
| type, |
| flowIndent, |
| blockIndent; |
| |
| if (state.listener !== null) { |
| state.listener('open', state); |
| } |
| |
| state.tag = null; |
| state.anchor = null; |
| state.kind = null; |
| state.result = null; |
| |
| allowBlockStyles = allowBlockScalars = allowBlockCollections = |
| CONTEXT_BLOCK_OUT === nodeContext || |
| CONTEXT_BLOCK_IN === nodeContext; |
| |
| if (allowToSeek) { |
| if (skipSeparationSpace(state, true, -1)) { |
| atNewLine = true; |
| |
| if (state.lineIndent > parentIndent) { |
| indentStatus = 1; |
| } else if (state.lineIndent === parentIndent) { |
| indentStatus = 0; |
| } else if (state.lineIndent < parentIndent) { |
| indentStatus = -1; |
| } |
| } |
| } |
| |
| if (indentStatus === 1) { |
| while (readTagProperty(state) || readAnchorProperty(state)) { |
| if (skipSeparationSpace(state, true, -1)) { |
| atNewLine = true; |
| allowBlockCollections = allowBlockStyles; |
| |
| if (state.lineIndent > parentIndent) { |
| indentStatus = 1; |
| } else if (state.lineIndent === parentIndent) { |
| indentStatus = 0; |
| } else if (state.lineIndent < parentIndent) { |
| indentStatus = -1; |
| } |
| } else { |
| allowBlockCollections = false; |
| } |
| } |
| } |
| |
| if (allowBlockCollections) { |
| allowBlockCollections = atNewLine || allowCompact; |
| } |
| |
| if (indentStatus === 1 || CONTEXT_BLOCK_OUT === nodeContext) { |
| if (CONTEXT_FLOW_IN === nodeContext || CONTEXT_FLOW_OUT === nodeContext) { |
| flowIndent = parentIndent; |
| } else { |
| flowIndent = parentIndent + 1; |
| } |
| |
| blockIndent = state.position - state.lineStart; |
| |
| if (indentStatus === 1) { |
| if (allowBlockCollections && |
| (readBlockSequence(state, blockIndent) || |
| readBlockMapping(state, blockIndent, flowIndent)) || |
| readFlowCollection(state, flowIndent)) { |
| hasContent = true; |
| } else { |
| if ((allowBlockScalars && readBlockScalar(state, flowIndent)) || |
| readSingleQuotedScalar(state, flowIndent) || |
| readDoubleQuotedScalar(state, flowIndent)) { |
| hasContent = true; |
| |
| } else if (readAlias(state)) { |
| hasContent = true; |
| |
| if (state.tag !== null || state.anchor !== null) { |
| throwError(state, 'alias node should not have any properties'); |
| } |
| |
| } else if (readPlainScalar(state, flowIndent, CONTEXT_FLOW_IN === nodeContext)) { |
| hasContent = true; |
| |
| if (state.tag === null) { |
| state.tag = '?'; |
| } |
| } |
| |
| if (state.anchor !== null) { |
| state.anchorMap[state.anchor] = state.result; |
| } |
| } |
| } else if (indentStatus === 0) { |
| // Special case: block sequences are allowed to have same indentation level as the parent. |
| // http://www.yaml.org/spec/1.2/spec.html#id2799784 |
| hasContent = allowBlockCollections && readBlockSequence(state, blockIndent); |
| } |
| } |
| |
| if (state.tag !== null && state.tag !== '!') { |
| if (state.tag === '?') { |
| // Implicit resolving is not allowed for non-scalar types, and '?' |
| // non-specific tag is only automatically assigned to plain scalars. |
| // |
| // We only need to check kind conformity in case user explicitly assigns '?' |
| // tag, for example like this: "!<?> [0]" |
| // |
| if (state.result !== null && state.kind !== 'scalar') { |
| throwError(state, 'unacceptable node kind for !<?> tag; it should be "scalar", not "' + state.kind + '"'); |
| } |
| |
| for (typeIndex = 0, typeQuantity = state.implicitTypes.length; typeIndex < typeQuantity; typeIndex += 1) { |
| type = state.implicitTypes[typeIndex]; |
| |
| if (type.resolve(state.result)) { // `state.result` updated in resolver if matched |
| state.result = type.construct(state.result); |
| state.tag = type.tag; |
| if (state.anchor !== null) { |
| state.anchorMap[state.anchor] = state.result; |
| } |
| break; |
| } |
| } |
| } else if (_hasOwnProperty.call(state.typeMap[state.kind || 'fallback'], state.tag)) { |
| type = state.typeMap[state.kind || 'fallback'][state.tag]; |
| |
| if (state.result !== null && type.kind !== state.kind) { |
| throwError(state, 'unacceptable node kind for !<' + state.tag + '> tag; it should be "' + type.kind + '", not "' + state.kind + '"'); |
| } |
| |
| if (!type.resolve(state.result)) { // `state.result` updated in resolver if matched |
| throwError(state, 'cannot resolve a node with !<' + state.tag + '> explicit tag'); |
| } else { |
| state.result = type.construct(state.result); |
| if (state.anchor !== null) { |
| state.anchorMap[state.anchor] = state.result; |
| } |
| } |
| } else { |
| throwError(state, 'unknown tag !<' + state.tag + '>'); |
| } |
| } |
| |
| if (state.listener !== null) { |
| state.listener('close', state); |
| } |
| return state.tag !== null || state.anchor !== null || hasContent; |
| } |
| |
| function readDocument(state) { |
| var documentStart = state.position, |
| _position, |
| directiveName, |
| directiveArgs, |
| hasDirectives = false, |
| ch; |
| |
| state.version = null; |
| state.checkLineBreaks = state.legacy; |
| state.tagMap = {}; |
| state.anchorMap = {}; |
| |
| while ((ch = state.input.charCodeAt(state.position)) !== 0) { |
| skipSeparationSpace(state, true, -1); |
| |
| ch = state.input.charCodeAt(state.position); |
| |
| if (state.lineIndent > 0 || ch !== 0x25/* % */) { |
| break; |
| } |
| |
| hasDirectives = true; |
| ch = state.input.charCodeAt(++state.position); |
| _position = state.position; |
| |
| while (ch !== 0 && !is_WS_OR_EOL(ch)) { |
| ch = state.input.charCodeAt(++state.position); |
| } |
| |
| directiveName = state.input.slice(_position, state.position); |
| directiveArgs = []; |
| |
| if (directiveName.length < 1) { |
| throwError(state, 'directive name must not be less than one character in length'); |
| } |
| |
| while (ch !== 0) { |
| while (is_WHITE_SPACE(ch)) { |
| ch = state.input.charCodeAt(++state.position); |
| } |
| |
| if (ch === 0x23/* # */) { |
| do { ch = state.input.charCodeAt(++state.position); } |
| while (ch !== 0 && !is_EOL(ch)); |
| break; |
| } |
| |
| if (is_EOL(ch)) break; |
| |
| _position = state.position; |
| |
| while (ch !== 0 && !is_WS_OR_EOL(ch)) { |
| ch = state.input.charCodeAt(++state.position); |
| } |
| |
| directiveArgs.push(state.input.slice(_position, state.position)); |
| } |
| |
| if (ch !== 0) readLineBreak(state); |
| |
| if (_hasOwnProperty.call(directiveHandlers, directiveName)) { |
| directiveHandlers[directiveName](state, directiveName, directiveArgs); |
| } else { |
| throwWarning(state, 'unknown document directive "' + directiveName + '"'); |
| } |
| } |
| |
| skipSeparationSpace(state, true, -1); |
| |
| if (state.lineIndent === 0 && |
| state.input.charCodeAt(state.position) === 0x2D/* - */ && |
| state.input.charCodeAt(state.position + 1) === 0x2D/* - */ && |
| state.input.charCodeAt(state.position + 2) === 0x2D/* - */) { |
| state.position += 3; |
| skipSeparationSpace(state, true, -1); |
| |
| } else if (hasDirectives) { |
| throwError(state, 'directives end mark is expected'); |
| } |
| |
| composeNode(state, state.lineIndent - 1, CONTEXT_BLOCK_OUT, false, true); |
| skipSeparationSpace(state, true, -1); |
| |
| if (state.checkLineBreaks && |
| PATTERN_NON_ASCII_LINE_BREAKS.test(state.input.slice(documentStart, state.position))) { |
| throwWarning(state, 'non-ASCII line breaks are interpreted as content'); |
| } |
| |
| state.documents.push(state.result); |
| |
| if (state.position === state.lineStart && testDocumentSeparator(state)) { |
| |
| if (state.input.charCodeAt(state.position) === 0x2E/* . */) { |
| state.position += 3; |
| skipSeparationSpace(state, true, -1); |
| } |
| return; |
| } |
| |
| if (state.position < (state.length - 1)) { |
| throwError(state, 'end of the stream or a document separator is expected'); |
| } else { |
| return; |
| } |
| } |
| |
| |
| function loadDocuments(input, options) { |
| input = String(input); |
| options = options || {}; |
| |
| if (input.length !== 0) { |
| |
| // Add tailing `\n` if not exists |
| if (input.charCodeAt(input.length - 1) !== 0x0A/* LF */ && |
| input.charCodeAt(input.length - 1) !== 0x0D/* CR */) { |
| input += '\n'; |
| } |
| |
| // Strip BOM |
| if (input.charCodeAt(0) === 0xFEFF) { |
| input = input.slice(1); |
| } |
| } |
| |
| var state = new State(input, options); |
| |
| var nullpos = input.indexOf('\0'); |
| |
| if (nullpos !== -1) { |
| state.position = nullpos; |
| throwError(state, 'null byte is not allowed in input'); |
| } |
| |
| // Use 0 as string terminator. That significantly simplifies bounds check. |
| state.input += '\0'; |
| |
| while (state.input.charCodeAt(state.position) === 0x20/* Space */) { |
| state.lineIndent += 1; |
| state.position += 1; |
| } |
| |
| while (state.position < (state.length - 1)) { |
| readDocument(state); |
| } |
| |
| return state.documents; |
| } |
| |
| |
| function loadAll(input, iterator, options) { |
| if (iterator !== null && typeof iterator === 'object' && typeof options === 'undefined') { |
| options = iterator; |
| iterator = null; |
| } |
| |
| var documents = loadDocuments(input, options); |
| |
| if (typeof iterator !== 'function') { |
| return documents; |
| } |
| |
| for (var index = 0, length = documents.length; index < length; index += 1) { |
| iterator(documents[index]); |
| } |
| } |
| |
| |
| function load(input, options) { |
| var documents = loadDocuments(input, options); |
| |
| if (documents.length === 0) { |
| /*eslint-disable no-undefined*/ |
| return undefined; |
| } else if (documents.length === 1) { |
| return documents[0]; |
| } |
| throw new YAMLException('expected a single document in the stream, but found more'); |
| } |
| |
| |
| function safeLoadAll(input, iterator, options) { |
| if (typeof iterator === 'object' && iterator !== null && typeof options === 'undefined') { |
| options = iterator; |
| iterator = null; |
| } |
| |
| return loadAll(input, iterator, common.extend({ schema: DEFAULT_SAFE_SCHEMA }, options)); |
| } |
| |
| |
| function safeLoad(input, options) { |
| return load(input, common.extend({ schema: DEFAULT_SAFE_SCHEMA }, options)); |
| } |
| |
| |
| module.exports.loadAll = loadAll; |
| module.exports.load = load; |
| module.exports.safeLoadAll = safeLoadAll; |
| module.exports.safeLoad = safeLoad; |
| |
| |
| /***/ }), |
| |
| /***/ 462: |
| /***/ (function(module) { |
| |
| "use strict"; |
| |
| |
| // See http://www.robvanderwoude.com/escapechars.php |
| const metaCharsRegExp = /([()\][%!^"`<>&|;, *?])/g; |
| |
| function escapeCommand(arg) { |
| // Escape meta chars |
| arg = arg.replace(metaCharsRegExp, '^$1'); |
| |
| return arg; |
| } |
| |
| function escapeArgument(arg, doubleEscapeMetaChars) { |
| // Convert to string |
| arg = `${arg}`; |
| |
| // Algorithm below is based on https://qntm.org/cmd |
| |
| // Sequence of backslashes followed by a double quote: |
| // double up all the backslashes and escape the double quote |
| arg = arg.replace(/(\\*)"/g, '$1$1\\"'); |
| |
| // Sequence of backslashes followed by the end of the string |
| // (which will become a double quote later): |
| // double up all the backslashes |
| arg = arg.replace(/(\\*)$/, '$1$1'); |
| |
| // All other backslashes occur literally |
| |
| // Quote the whole thing: |
| arg = `"${arg}"`; |
| |
| // Escape meta chars |
| arg = arg.replace(metaCharsRegExp, '^$1'); |
| |
| // Double escape meta chars if necessary |
| if (doubleEscapeMetaChars) { |
| arg = arg.replace(metaCharsRegExp, '^$1'); |
| } |
| |
| return arg; |
| } |
| |
| module.exports.command = escapeCommand; |
| module.exports.argument = escapeArgument; |
| |
| |
| /***/ }), |
| |
| /***/ 463: |
| /***/ (function(__unusedmodule, exports, __webpack_require__) { |
| |
| "use strict"; |
| |
| |
| Object.defineProperty(exports, '__esModule', { value: true }); |
| |
| function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } |
| |
| var deprecation = __webpack_require__(692); |
| var once = _interopDefault(__webpack_require__(969)); |
| |
| const logOnce = once(deprecation => console.warn(deprecation)); |
| /** |
| * Error with extra properties to help with debugging |
| */ |
| |
| class RequestError extends Error { |
| constructor(message, statusCode, options) { |
| super(message); // Maintains proper stack trace (only available on V8) |
| |
| /* istanbul ignore next */ |
| |
| if (Error.captureStackTrace) { |
| Error.captureStackTrace(this, this.constructor); |
| } |
| |
| this.name = "HttpError"; |
| this.status = statusCode; |
| Object.defineProperty(this, "code", { |
| get() { |
| logOnce(new deprecation.Deprecation("[@octokit/request-error] `error.code` is deprecated, use `error.status`.")); |
| return statusCode; |
| } |
| |
| }); |
| this.headers = options.headers || {}; // redact request credentials without mutating original request options |
| |
| const requestCopy = Object.assign({}, options.request); |
| |
| if (options.request.headers.authorization) { |
| requestCopy.headers = Object.assign({}, options.request.headers, { |
| authorization: options.request.headers.authorization.replace(/ .*$/, " [REDACTED]") |
| }); |
| } |
| |
| requestCopy.url = requestCopy.url // client_id & client_secret can be passed as URL query parameters to increase rate limit |
| // see https://developer.github.com/v3/#increasing-the-unauthenticated-rate-limit-for-oauth-applications |
| .replace(/\bclient_secret=\w+/g, "client_secret=[REDACTED]") // OAuth tokens can be passed as URL query parameters, although it is not recommended |
| // see https://developer.github.com/v3/#oauth2-token-sent-in-a-header |
| .replace(/\baccess_token=\w+/g, "access_token=[REDACTED]"); |
| this.request = requestCopy; |
| } |
| |
| } |
| |
| exports.RequestError = RequestError; |
| //# sourceMappingURL=index.js.map |
| |
| |
| /***/ }), |
| |
| /***/ 469: |
| /***/ (function(__unusedmodule, exports, __webpack_require__) { |
| |
| "use strict"; |
| |
| var __importStar = (this && this.__importStar) || function (mod) { |
| if (mod && mod.__esModule) return mod; |
| var result = {}; |
| if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; |
| result["default"] = mod; |
| return result; |
| }; |
| Object.defineProperty(exports, "__esModule", { value: true }); |
| // Originally pulled from https://github.com/JasonEtco/actions-toolkit/blob/master/src/github.ts |
| const graphql_1 = __webpack_require__(898); |
| const rest_1 = __webpack_require__(0); |
| const Context = __importStar(__webpack_require__(262)); |
| const httpClient = __importStar(__webpack_require__(539)); |
| // We need this in order to extend Octokit |
| rest_1.Octokit.prototype = new rest_1.Octokit(); |
| exports.context = new Context.Context(); |
| class GitHub extends rest_1.Octokit { |
| constructor(token, opts) { |
| super(GitHub.getOctokitOptions(GitHub.disambiguate(token, opts))); |
| this.graphql = GitHub.getGraphQL(GitHub.disambiguate(token, opts)); |
| } |
| /** |
| * Disambiguates the constructor overload parameters |
| */ |
| static disambiguate(token, opts) { |
| return [ |
| typeof token === 'string' ? token : '', |
| typeof token === 'object' ? token : opts || {} |
| ]; |
| } |
| static getOctokitOptions(args) { |
| const token = args[0]; |
| const options = Object.assign({}, args[1]); // Shallow clone - don't mutate the object provided by the caller |
| // Base URL - GHES or Dotcom |
| options.baseUrl = options.baseUrl || this.getApiBaseUrl(); |
| // Auth |
| const auth = GitHub.getAuthString(token, options); |
| if (auth) { |
| options.auth = auth; |
| } |
| // Proxy |
| const agent = GitHub.getProxyAgent(options.baseUrl, options); |
| if (agent) { |
| // Shallow clone - don't mutate the object provided by the caller |
| options.request = options.request ? Object.assign({}, options.request) : {}; |
| // Set the agent |
| options.request.agent = agent; |
| } |
| return options; |
| } |
| static getGraphQL(args) { |
| const defaults = {}; |
| defaults.baseUrl = this.getGraphQLBaseUrl(); |
| const token = args[0]; |
| const options = args[1]; |
| // Authorization |
| const auth = this.getAuthString(token, options); |
| if (auth) { |
| defaults.headers = { |
| authorization: auth |
| }; |
| } |
| // Proxy |
| const agent = GitHub.getProxyAgent(defaults.baseUrl, options); |
| if (agent) { |
| defaults.request = { agent }; |
| } |
| return graphql_1.graphql.defaults(defaults); |
| } |
| static getAuthString(token, options) { |
| // Validate args |
| if (!token && !options.auth) { |
| throw new Error('Parameter token or opts.auth is required'); |
| } |
| else if (token && options.auth) { |
| throw new Error('Parameters token and opts.auth may not both be specified'); |
| } |
| return typeof options.auth === 'string' ? options.auth : `token ${token}`; |
| } |
| static getProxyAgent(destinationUrl, options) { |
| var _a; |
| if (!((_a = options.request) === null || _a === void 0 ? void 0 : _a.agent)) { |
| if (httpClient.getProxyUrl(destinationUrl)) { |
| const hc = new httpClient.HttpClient(); |
| return hc.getAgent(destinationUrl); |
| } |
| } |
| return undefined; |
| } |
| static getApiBaseUrl() { |
| return process.env['GITHUB_API_URL'] || 'https://api.github.com'; |
| } |
| static getGraphQLBaseUrl() { |
| let url = process.env['GITHUB_GRAPHQL_URL'] || 'https://api.github.com/graphql'; |
| // Shouldn't be a trailing slash, but remove if so |
| if (url.endsWith('/')) { |
| url = url.substr(0, url.length - 1); |
| } |
| // Remove trailing "/graphql" |
| if (url.toUpperCase().endsWith('/GRAPHQL')) { |
| url = url.substr(0, url.length - '/graphql'.length); |
| } |
| return url; |
| } |
| } |
| exports.GitHub = GitHub; |
| //# sourceMappingURL=github.js.map |
| |
| /***/ }), |
| |
| /***/ 470: |
| /***/ (function(__unusedmodule, exports, __webpack_require__) { |
| |
| "use strict"; |
| |
| var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { |
| function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } |
| return new (P || (P = Promise))(function (resolve, reject) { |
| function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } |
| function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } |
| function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } |
| step((generator = generator.apply(thisArg, _arguments || [])).next()); |
| }); |
| }; |
| var __importStar = (this && this.__importStar) || function (mod) { |
| if (mod && mod.__esModule) return mod; |
| var result = {}; |
| if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; |
| result["default"] = mod; |
| return result; |
| }; |
| Object.defineProperty(exports, "__esModule", { value: true }); |
| const command_1 = __webpack_require__(431); |
| const os = __importStar(__webpack_require__(87)); |
| const path = __importStar(__webpack_require__(622)); |
| /** |
| * The code to exit an action |
| */ |
| var ExitCode; |
| (function (ExitCode) { |
| /** |
| * A code indicating that the action was successful |
| */ |
| ExitCode[ExitCode["Success"] = 0] = "Success"; |
| /** |
| * A code indicating that the action was a failure |
| */ |
| ExitCode[ExitCode["Failure"] = 1] = "Failure"; |
| })(ExitCode = exports.ExitCode || (exports.ExitCode = {})); |
| //----------------------------------------------------------------------- |
| // Variables |
| //----------------------------------------------------------------------- |
| /** |
| * Sets env variable for this action and future actions in the job |
| * @param name the name of the variable to set |
| * @param val the value of the variable. Non-string values will be converted to a string via JSON.stringify |
| */ |
| // eslint-disable-next-line @typescript-eslint/no-explicit-any |
| function exportVariable(name, val) { |
| const convertedVal = command_1.toCommandValue(val); |
| process.env[name] = convertedVal; |
| command_1.issueCommand('set-env', { name }, convertedVal); |
| } |
| exports.exportVariable = exportVariable; |
| /** |
| * Registers a secret which will get masked from logs |
| * @param secret value of the secret |
| */ |
| function setSecret(secret) { |
| command_1.issueCommand('add-mask', {}, secret); |
| } |
| exports.setSecret = setSecret; |
| /** |
| * Prepends inputPath to the PATH (for this action and future actions) |
| * @param inputPath |
| */ |
| function addPath(inputPath) { |
| command_1.issueCommand('add-path', {}, inputPath); |
| process.env['PATH'] = `${inputPath}${path.delimiter}${process.env['PATH']}`; |
| } |
| exports.addPath = addPath; |
| /** |
| * Gets the value of an input. The value is also trimmed. |
| * |
| * @param name name of the input to get |
| * @param options optional. See InputOptions. |
| * @returns string |
| */ |
| function getInput(name, options) { |
| const val = process.env[`INPUT_${name.replace(/ /g, '_').toUpperCase()}`] || ''; |
| if (options && options.required && !val) { |
| throw new Error(`Input required and not supplied: ${name}`); |
| } |
| return val.trim(); |
| } |
| exports.getInput = getInput; |
| /** |
| * Sets the value of an output. |
| * |
| * @param name name of the output to set |
| * @param value value to store. Non-string values will be converted to a string via JSON.stringify |
| */ |
| // eslint-disable-next-line @typescript-eslint/no-explicit-any |
| function setOutput(name, value) { |
| command_1.issueCommand('set-output', { name }, value); |
| } |
| exports.setOutput = setOutput; |
| /** |
| * Enables or disables the echoing of commands into stdout for the rest of the step. |
| * Echoing is disabled by default if ACTIONS_STEP_DEBUG is not set. |
| * |
| */ |
| function setCommandEcho(enabled) { |
| command_1.issue('echo', enabled ? 'on' : 'off'); |
| } |
| exports.setCommandEcho = setCommandEcho; |
| //----------------------------------------------------------------------- |
| // Results |
| //----------------------------------------------------------------------- |
| /** |
| * Sets the action status to failed. |
| * When the action exits it will be with an exit code of 1 |
| * @param message add error issue message |
| */ |
| function setFailed(message) { |
| process.exitCode = ExitCode.Failure; |
| error(message); |
| } |
| exports.setFailed = setFailed; |
| //----------------------------------------------------------------------- |
| // Logging Commands |
| //----------------------------------------------------------------------- |
| /** |
| * Gets whether Actions Step Debug is on or not |
| */ |
| function isDebug() { |
| return process.env['RUNNER_DEBUG'] === '1'; |
| } |
| exports.isDebug = isDebug; |
| /** |
| * Writes debug message to user log |
| * @param message debug message |
| */ |
| function debug(message) { |
| command_1.issueCommand('debug', {}, message); |
| } |
| exports.debug = debug; |
| /** |
| * Adds an error issue |
| * @param message error issue message. Errors will be converted to string via toString() |
| */ |
| function error(message) { |
| command_1.issue('error', message instanceof Error ? message.toString() : message); |
| } |
| exports.error = error; |
| /** |
| * Adds an warning issue |
| * @param message warning issue message. Errors will be converted to string via toString() |
| */ |
| function warning(message) { |
| command_1.issue('warning', message instanceof Error ? message.toString() : message); |
| } |
| exports.warning = warning; |
| /** |
| * Writes info to log with console.log. |
| * @param message info message |
| */ |
| function info(message) { |
| process.stdout.write(message + os.EOL); |
| } |
| exports.info = info; |
| /** |
| * Begin an output group. |
| * |
| * Output until the next `groupEnd` will be foldable in this group |
| * |
| * @param name The name of the output group |
| */ |
| function startGroup(name) { |
| command_1.issue('group', name); |
| } |
| exports.startGroup = startGroup; |
| /** |
| * End an output group. |
| */ |
| function endGroup() { |
| command_1.issue('endgroup'); |
| } |
| exports.endGroup = endGroup; |
| /** |
| * Wrap an asynchronous function call in a group. |
| * |
| * Returns the same type as the function itself. |
| * |
| * @param name The name of the group |
| * @param fn The function to wrap in the group |
| */ |
| function group(name, fn) { |
| return __awaiter(this, void 0, void 0, function* () { |
| startGroup(name); |
| let result; |
| try { |
| result = yield fn(); |
| } |
| finally { |
| endGroup(); |
| } |
| return result; |
| }); |
| } |
| exports.group = group; |
| //----------------------------------------------------------------------- |
| // Wrapper action state |
| //----------------------------------------------------------------------- |
| /** |
| * Saves state for current action, the state can only be retrieved by this action's post job execution. |
| * |
| * @param name name of the state to store |
| * @param value value to store. Non-string values will be converted to a string via JSON.stringify |
| */ |
| // eslint-disable-next-line @typescript-eslint/no-explicit-any |
| function saveState(name, value) { |
| command_1.issueCommand('save-state', { name }, value); |
| } |
| exports.saveState = saveState; |
| /** |
| * Gets the value of an state set by this action's main execution. |
| * |
| * @param name name of the state to get |
| * @returns string |
| */ |
| function getState(name) { |
| return process.env[`STATE_${name}`] || ''; |
| } |
| exports.getState = getState; |
| //# sourceMappingURL=core.js.map |
| |
| /***/ }), |
| |
| /***/ 471: |
| /***/ (function(module, __unusedexports, __webpack_require__) { |
| |
| module.exports = authenticationBeforeRequest; |
| |
| const btoa = __webpack_require__(675); |
| const uniq = __webpack_require__(126); |
| |
| function authenticationBeforeRequest(state, options) { |
| if (!state.auth.type) { |
| return; |
| } |
| |
| if (state.auth.type === "basic") { |
| const hash = btoa(`${state.auth.username}:${state.auth.password}`); |
| options.headers.authorization = `Basic ${hash}`; |
| return; |
| } |
| |
| if (state.auth.type === "token") { |
| options.headers.authorization = `token ${state.auth.token}`; |
| return; |
| } |
| |
| if (state.auth.type === "app") { |
| options.headers.authorization = `Bearer ${state.auth.token}`; |
| const acceptHeaders = options.headers.accept |
| .split(",") |
| .concat("application/vnd.github.machine-man-preview+json"); |
| options.headers.accept = uniq(acceptHeaders) |
| .filter(Boolean) |
| .join(","); |
| return; |
| } |
| |
| options.url += options.url.indexOf("?") === -1 ? "?" : "&"; |
| |
| if (state.auth.token) { |
| options.url += `access_token=${encodeURIComponent(state.auth.token)}`; |
| return; |
| } |
| |
| const key = encodeURIComponent(state.auth.key); |
| const secret = encodeURIComponent(state.auth.secret); |
| options.url += `client_id=${key}&client_secret=${secret}`; |
| } |
| |
| |
| /***/ }), |
| |
| /***/ 489: |
| /***/ (function(module, __unusedexports, __webpack_require__) { |
| |
| "use strict"; |
| |
| |
| const path = __webpack_require__(622); |
| const which = __webpack_require__(814); |
| const pathKey = __webpack_require__(39)(); |
| |
| function resolveCommandAttempt(parsed, withoutPathExt) { |
| const cwd = process.cwd(); |
| const hasCustomCwd = parsed.options.cwd != null; |
| |
| // If a custom `cwd` was specified, we need to change the process cwd |
| // because `which` will do stat calls but does not support a custom cwd |
| if (hasCustomCwd) { |
| try { |
| process.chdir(parsed.options.cwd); |
| } catch (err) { |
| /* Empty */ |
| } |
| } |
| |
| let resolved; |
| |
| try { |
| resolved = which.sync(parsed.command, { |
| path: (parsed.options.env || process.env)[pathKey], |
| pathExt: withoutPathExt ? path.delimiter : undefined, |
| }); |
| } catch (e) { |
| /* Empty */ |
| } finally { |
| process.chdir(cwd); |
| } |
| |
| // If we successfully resolved, ensure that an absolute path is returned |
| // Note that when a custom `cwd` was used, we need to resolve to an absolute path based on it |
| if (resolved) { |
| resolved = path.resolve(hasCustomCwd ? parsed.options.cwd : '', resolved); |
| } |
| |
| return resolved; |
| } |
| |
| function resolveCommand(parsed) { |
| return resolveCommandAttempt(parsed) || resolveCommandAttempt(parsed, true); |
| } |
| |
| module.exports = resolveCommand; |
| |
| |
| /***/ }), |
| |
| /***/ 497: |
| /***/ (function(__unusedmodule, exports, __webpack_require__) { |
| |
| "use strict"; |
| |
| |
| Object.defineProperty(exports, '__esModule', { value: true }); |
| |
| function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } |
| |
| var deprecation = __webpack_require__(692); |
| var once = _interopDefault(__webpack_require__(969)); |
| |
| const logOnce = once(deprecation => console.warn(deprecation)); |
| /** |
| * Error with extra properties to help with debugging |
| */ |
| |
| class RequestError extends Error { |
| constructor(message, statusCode, options) { |
| super(message); // Maintains proper stack trace (only available on V8) |
| |
| /* istanbul ignore next */ |
| |
| if (Error.captureStackTrace) { |
| Error.captureStackTrace(this, this.constructor); |
| } |
| |
| this.name = "HttpError"; |
| this.status = statusCode; |
| Object.defineProperty(this, "code", { |
| get() { |
| logOnce(new deprecation.Deprecation("[@octokit/request-error] `error.code` is deprecated, use `error.status`.")); |
| return statusCode; |
| } |
| |
| }); |
| this.headers = options.headers || {}; // redact request credentials without mutating original request options |
| |
| const requestCopy = Object.assign({}, options.request); |
| |
| if (options.request.headers.authorization) { |
| requestCopy.headers = Object.assign({}, options.request.headers, { |
| authorization: options.request.headers.authorization.replace(/ .*$/, " [REDACTED]") |
| }); |
| } |
| |
| requestCopy.url = requestCopy.url // client_id & client_secret can be passed as URL query parameters to increase rate limit |
| // see https://developer.github.com/v3/#increasing-the-unauthenticated-rate-limit-for-oauth-applications |
| .replace(/\bclient_secret=\w+/g, "client_secret=[REDACTED]") // OAuth tokens can be passed as URL query parameters, although it is not recommended |
| // see https://developer.github.com/v3/#oauth2-token-sent-in-a-header |
| .replace(/\baccess_token=\w+/g, "access_token=[REDACTED]"); |
| this.request = requestCopy; |
| } |
| |
| } |
| |
| exports.RequestError = RequestError; |
| //# sourceMappingURL=index.js.map |
| |
| |
| /***/ }), |
| |
| /***/ 510: |
| /***/ (function(module) { |
| |
| module.exports = addHook |
| |
| function addHook (state, kind, name, hook) { |
| var orig = hook |
| if (!state.registry[name]) { |
| state.registry[name] = [] |
| } |
| |
| if (kind === 'before') { |
| hook = function (method, options) { |
| return Promise.resolve() |
| .then(orig.bind(null, options)) |
| .then(method.bind(null, options)) |
| } |
| } |
| |
| if (kind === 'after') { |
| hook = function (method, options) { |
| var result |
| return Promise.resolve() |
| .then(method.bind(null, options)) |
| .then(function (result_) { |
| result = result_ |
| return orig(result, options) |
| }) |
| .then(function () { |
| return result |
| }) |
| } |
| } |
| |
| if (kind === 'error') { |
| hook = function (method, options) { |
| return Promise.resolve() |
| .then(method.bind(null, options)) |
| .catch(function (error) { |
| return orig(error, options) |
| }) |
| } |
| } |
| |
| state.registry[name].push({ |
| hook: hook, |
| orig: orig |
| }) |
| } |
| |
| |
| /***/ }), |
| |
| /***/ 523: |
| /***/ (function(module, __unusedexports, __webpack_require__) { |
| |
| var register = __webpack_require__(280) |
| var addHook = __webpack_require__(510) |
| var removeHook = __webpack_require__(763) |
| |
| // bind with array of arguments: https://stackoverflow.com/a/21792913 |
| var bind = Function.bind |
| var bindable = bind.bind(bind) |
| |
| function bindApi (hook, state, name) { |
| var removeHookRef = bindable(removeHook, null).apply(null, name ? [state, name] : [state]) |
| hook.api = { remove: removeHookRef } |
| hook.remove = removeHookRef |
| |
| ;['before', 'error', 'after', 'wrap'].forEach(function (kind) { |
| var args = name ? [state, kind, name] : [state, kind] |
| hook[kind] = hook.api[kind] = bindable(addHook, null).apply(null, args) |
| }) |
| } |
| |
| function HookSingular () { |
| var singularHookName = 'h' |
| var singularHookState = { |
| registry: {} |
| } |
| var singularHook = register.bind(null, singularHookState, singularHookName) |
| bindApi(singularHook, singularHookState, singularHookName) |
| return singularHook |
| } |
| |
| function HookCollection () { |
| var state = { |
| registry: {} |
| } |
| |
| var hook = register.bind(null, state) |
| bindApi(hook, state) |
| |
| return hook |
| } |
| |
| var collectionHookDeprecationMessageDisplayed = false |
| function Hook () { |
| if (!collectionHookDeprecationMessageDisplayed) { |
| console.warn('[before-after-hook]: "Hook()" repurposing warning, use "Hook.Collection()". Read more: https://git.io/upgrade-before-after-hook-to-1.4') |
| collectionHookDeprecationMessageDisplayed = true |
| } |
| return HookCollection() |
| } |
| |
| Hook.Singular = HookSingular.bind() |
| Hook.Collection = HookCollection.bind() |
| |
| module.exports = Hook |
| // expose constructors as a named property for TypeScript |
| module.exports.Hook = Hook |
| module.exports.Singular = Hook.Singular |
| module.exports.Collection = Hook.Collection |
| |
| |
| /***/ }), |
| |
| /***/ 529: |
| /***/ (function(module, __unusedexports, __webpack_require__) { |
| |
| const factory = __webpack_require__(47); |
| |
| module.exports = factory(); |
| |
| |
| /***/ }), |
| |
| /***/ 536: |
| /***/ (function(module, __unusedexports, __webpack_require__) { |
| |
| module.exports = hasFirstPage |
| |
| const deprecate = __webpack_require__(370) |
| const getPageLinks = __webpack_require__(577) |
| |
| function hasFirstPage (link) { |
| deprecate(`octokit.hasFirstPage() – You can use octokit.paginate or async iterators instead: https://github.com/octokit/rest.js#pagination.`) |
| return getPageLinks(link).first |
| } |
| |
| |
| /***/ }), |
| |
| /***/ 537: |
| /***/ (function(module, __unusedexports, __webpack_require__) { |
| |
| "use strict"; |
| |
| |
| const utils = __webpack_require__(224); |
| const { |
| CHAR_ASTERISK, /* * */ |
| CHAR_AT, /* @ */ |
| CHAR_BACKWARD_SLASH, /* \ */ |
| CHAR_COMMA, /* , */ |
| CHAR_DOT, /* . */ |
| CHAR_EXCLAMATION_MARK, /* ! */ |
| CHAR_FORWARD_SLASH, /* / */ |
| CHAR_LEFT_CURLY_BRACE, /* { */ |
| CHAR_LEFT_PARENTHESES, /* ( */ |
| CHAR_LEFT_SQUARE_BRACKET, /* [ */ |
| CHAR_PLUS, /* + */ |
| CHAR_QUESTION_MARK, /* ? */ |
| CHAR_RIGHT_CURLY_BRACE, /* } */ |
| CHAR_RIGHT_PARENTHESES, /* ) */ |
| CHAR_RIGHT_SQUARE_BRACKET /* ] */ |
| } = __webpack_require__(199); |
| |
| const isPathSeparator = code => { |
| return code === CHAR_FORWARD_SLASH || code === CHAR_BACKWARD_SLASH; |
| }; |
| |
| const depth = token => { |
| if (token.isPrefix !== true) { |
| token.depth = token.isGlobstar ? Infinity : 1; |
| } |
| }; |
| |
| /** |
| * Quickly scans a glob pattern and returns an object with a handful of |
| * useful properties, like `isGlob`, `path` (the leading non-glob, if it exists), |
| * `glob` (the actual pattern), and `negated` (true if the path starts with `!`). |
| * |
| * ```js |
| * const pm = require('picomatch'); |
| * console.log(pm.scan('foo/bar/*.js')); |
| * { isGlob: true, input: 'foo/bar/*.js', base: 'foo/bar', glob: '*.js' } |
| * ``` |
| * @param {String} `str` |
| * @param {Object} `options` |
| * @return {Object} Returns an object with tokens and regex source string. |
| * @api public |
| */ |
| |
| const scan = (input, options) => { |
| const opts = options || {}; |
| |
| const length = input.length - 1; |
| const scanToEnd = opts.parts === true || opts.scanToEnd === true; |
| const slashes = []; |
| const tokens = []; |
| const parts = []; |
| |
| let str = input; |
| let index = -1; |
| let start = 0; |
| let lastIndex = 0; |
| let isBrace = false; |
| let isBracket = false; |
| let isGlob = false; |
| let isExtglob = false; |
| let isGlobstar = false; |
| let braceEscaped = false; |
| let backslashes = false; |
| let negated = false; |
| let finished = false; |
| let braces = 0; |
| let prev; |
| let code; |
| let token = { value: '', depth: 0, isGlob: false }; |
| |
| const eos = () => index >= length; |
| const peek = () => str.charCodeAt(index + 1); |
| const advance = () => { |
| prev = code; |
| return str.charCodeAt(++index); |
| }; |
| |
| while (index < length) { |
| code = advance(); |
| let next; |
| |
| if (code === CHAR_BACKWARD_SLASH) { |
| backslashes = token.backslashes = true; |
| code = advance(); |
| |
| if (code === CHAR_LEFT_CURLY_BRACE) { |
| braceEscaped = true; |
| } |
| continue; |
| } |
| |
| if (braceEscaped === true || code === CHAR_LEFT_CURLY_BRACE) { |
| braces++; |
| |
| while (eos() !== true && (code = advance())) { |
| if (code === CHAR_BACKWARD_SLASH) { |
| backslashes = token.backslashes = true; |
| advance(); |
| continue; |
| } |
| |
| if (code === CHAR_LEFT_CURLY_BRACE) { |
| braces++; |
| continue; |
| } |
| |
| if (braceEscaped !== true && code === CHAR_DOT && (code = advance()) === CHAR_DOT) { |
| isBrace = token.isBrace = true; |
| isGlob = token.isGlob = true; |
| finished = true; |
| |
| if (scanToEnd === true) { |
| continue; |
| } |
| |
| break; |
| } |
| |
| if (braceEscaped !== true && code === CHAR_COMMA) { |
| isBrace = token.isBrace = true; |
| isGlob = token.isGlob = true; |
| finished = true; |
| |
| if (scanToEnd === true) { |
| continue; |
| } |
| |
| break; |
| } |
| |
| if (code === CHAR_RIGHT_CURLY_BRACE) { |
| braces--; |
| |
| if (braces === 0) { |
| braceEscaped = false; |
| isBrace = token.isBrace = true; |
| finished = true; |
| break; |
| } |
| } |
| } |
| |
| if (scanToEnd === true) { |
| continue; |
| } |
| |
| break; |
| } |
| |
| if (code === CHAR_FORWARD_SLASH) { |
| slashes.push(index); |
| tokens.push(token); |
| token = { value: '', depth: 0, isGlob: false }; |
| |
| if (finished === true) continue; |
| if (prev === CHAR_DOT && index === (start + 1)) { |
| start += 2; |
| continue; |
| } |
| |
| lastIndex = index + 1; |
| continue; |
| } |
| |
| if (opts.noext !== true) { |
| const isExtglobChar = code === CHAR_PLUS |
| || code === CHAR_AT |
| || code === CHAR_ASTERISK |
| || code === CHAR_QUESTION_MARK |
| || code === CHAR_EXCLAMATION_MARK; |
| |
| if (isExtglobChar === true && peek() === CHAR_LEFT_PARENTHESES) { |
| isGlob = token.isGlob = true; |
| isExtglob = token.isExtglob = true; |
| finished = true; |
| |
| if (scanToEnd === true) { |
| while (eos() !== true && (code = advance())) { |
| if (code === CHAR_BACKWARD_SLASH) { |
| backslashes = token.backslashes = true; |
| code = advance(); |
| continue; |
| } |
| |
| if (code === CHAR_RIGHT_PARENTHESES) { |
| isGlob = token.isGlob = true; |
| finished = true; |
| break; |
| } |
| } |
| continue; |
| } |
| break; |
| } |
| } |
| |
| if (code === CHAR_ASTERISK) { |
| if (prev === CHAR_ASTERISK) isGlobstar = token.isGlobstar = true; |
| isGlob = token.isGlob = true; |
| finished = true; |
| |
| if (scanToEnd === true) { |
| continue; |
| } |
| break; |
| } |
| |
| if (code === CHAR_QUESTION_MARK) { |
| isGlob = token.isGlob = true; |
| finished = true; |
| |
| if (scanToEnd === true) { |
| continue; |
| } |
| break; |
| } |
| |
| if (code === CHAR_LEFT_SQUARE_BRACKET) { |
| while (eos() !== true && (next = advance())) { |
| if (next === CHAR_BACKWARD_SLASH) { |
| backslashes = token.backslashes = true; |
| advance(); |
| continue; |
| } |
| |
| if (next === CHAR_RIGHT_SQUARE_BRACKET) { |
| isBracket = token.isBracket = true; |
| isGlob = token.isGlob = true; |
| finished = true; |
| |
| if (scanToEnd === true) { |
| continue; |
| } |
| break; |
| } |
| } |
| } |
| |
| if (opts.nonegate !== true && code === CHAR_EXCLAMATION_MARK && index === start) { |
| negated = token.negated = true; |
| start++; |
| continue; |
| } |
| |
| if (opts.noparen !== true && code === CHAR_LEFT_PARENTHESES) { |
| isGlob = token.isGlob = true; |
| |
| if (scanToEnd === true) { |
| while (eos() !== true && (code = advance())) { |
| if (code === CHAR_LEFT_PARENTHESES) { |
| backslashes = token.backslashes = true; |
| code = advance(); |
| continue; |
| } |
| |
| if (code === CHAR_RIGHT_PARENTHESES) { |
| finished = true; |
| break; |
| } |
| } |
| continue; |
| } |
| break; |
| } |
| |
| if (isGlob === true) { |
| finished = true; |
| |
| if (scanToEnd === true) { |
| continue; |
| } |
| |
| break; |
| } |
| } |
| |
| if (opts.noext === true) { |
| isExtglob = false; |
| isGlob = false; |
| } |
| |
| let base = str; |
| let prefix = ''; |
| let glob = ''; |
| |
| if (start > 0) { |
| prefix = str.slice(0, start); |
| str = str.slice(start); |
| lastIndex -= start; |
| } |
| |
| if (base && isGlob === true && lastIndex > 0) { |
| base = str.slice(0, lastIndex); |
| glob = str.slice(lastIndex); |
| } else if (isGlob === true) { |
| base = ''; |
| glob = str; |
| } else { |
| base = str; |
| } |
| |
| if (base && base !== '' && base !== '/' && base !== str) { |
| if (isPathSeparator(base.charCodeAt(base.length - 1))) { |
| base = base.slice(0, -1); |
| } |
| } |
| |
| if (opts.unescape === true) { |
| if (glob) glob = utils.removeBackslashes(glob); |
| |
| if (base && backslashes === true) { |
| base = utils.removeBackslashes(base); |
| } |
| } |
| |
| const state = { |
| prefix, |
| input, |
| start, |
| base, |
| glob, |
| isBrace, |
| isBracket, |
| isGlob, |
| isExtglob, |
| isGlobstar, |
| negated |
| }; |
| |
| if (opts.tokens === true) { |
| state.maxDepth = 0; |
| if (!isPathSeparator(code)) { |
| tokens.push(token); |
| } |
| state.tokens = tokens; |
| } |
| |
| if (opts.parts === true || opts.tokens === true) { |
| let prevIndex; |
| |
| for (let idx = 0; idx < slashes.length; idx++) { |
| const n = prevIndex ? prevIndex + 1 : start; |
| const i = slashes[idx]; |
| const value = input.slice(n, i); |
| if (opts.tokens) { |
| if (idx === 0 && start !== 0) { |
| tokens[idx].isPrefix = true; |
| tokens[idx].value = prefix; |
| } else { |
| tokens[idx].value = value; |
| } |
| depth(tokens[idx]); |
| state.maxDepth += tokens[idx].depth; |
| } |
| if (idx !== 0 || value !== '') { |
| parts.push(value); |
| } |
| prevIndex = i; |
| } |
| |
| if (prevIndex && prevIndex + 1 < input.length) { |
| const value = input.slice(prevIndex + 1); |
| parts.push(value); |
| |
| if (opts.tokens) { |
| tokens[tokens.length - 1].value = value; |
| depth(tokens[tokens.length - 1]); |
| state.maxDepth += tokens[tokens.length - 1].depth; |
| } |
| } |
| |
| state.slashes = slashes; |
| state.parts = parts; |
| } |
| |
| return state; |
| }; |
| |
| module.exports = scan; |
| |
| |
| /***/ }), |
| |
| /***/ 539: |
| /***/ (function(__unusedmodule, exports, __webpack_require__) { |
| |
| "use strict"; |
| |
| Object.defineProperty(exports, "__esModule", { value: true }); |
| const url = __webpack_require__(835); |
| const http = __webpack_require__(605); |
| const https = __webpack_require__(211); |
| const pm = __webpack_require__(950); |
| let tunnel; |
| var HttpCodes; |
| (function (HttpCodes) { |
| HttpCodes[HttpCodes["OK"] = 200] = "OK"; |
| HttpCodes[HttpCodes["MultipleChoices"] = 300] = "MultipleChoices"; |
| HttpCodes[HttpCodes["MovedPermanently"] = 301] = "MovedPermanently"; |
| HttpCodes[HttpCodes["ResourceMoved"] = 302] = "ResourceMoved"; |
| HttpCodes[HttpCodes["SeeOther"] = 303] = "SeeOther"; |
| HttpCodes[HttpCodes["NotModified"] = 304] = "NotModified"; |
| HttpCodes[HttpCodes["UseProxy"] = 305] = "UseProxy"; |
| HttpCodes[HttpCodes["SwitchProxy"] = 306] = "SwitchProxy"; |
| HttpCodes[HttpCodes["TemporaryRedirect"] = 307] = "TemporaryRedirect"; |
| HttpCodes[HttpCodes["PermanentRedirect"] = 308] = "PermanentRedirect"; |
| HttpCodes[HttpCodes["BadRequest"] = 400] = "BadRequest"; |
| HttpCodes[HttpCodes["Unauthorized"] = 401] = "Unauthorized"; |
| HttpCodes[HttpCodes["PaymentRequired"] = 402] = "PaymentRequired"; |
| HttpCodes[HttpCodes["Forbidden"] = 403] = "Forbidden"; |
| HttpCodes[HttpCodes["NotFound"] = 404] = "NotFound"; |
| HttpCodes[HttpCodes["MethodNotAllowed"] = 405] = "MethodNotAllowed"; |
| HttpCodes[HttpCodes["NotAcceptable"] = 406] = "NotAcceptable"; |
| HttpCodes[HttpCodes["ProxyAuthenticationRequired"] = 407] = "ProxyAuthenticationRequired"; |
| HttpCodes[HttpCodes["RequestTimeout"] = 408] = "RequestTimeout"; |
| HttpCodes[HttpCodes["Conflict"] = 409] = "Conflict"; |
| HttpCodes[HttpCodes["Gone"] = 410] = "Gone"; |
| HttpCodes[HttpCodes["TooManyRequests"] = 429] = "TooManyRequests"; |
| HttpCodes[HttpCodes["InternalServerError"] = 500] = "InternalServerError"; |
| HttpCodes[HttpCodes["NotImplemented"] = 501] = "NotImplemented"; |
| HttpCodes[HttpCodes["BadGateway"] = 502] = "BadGateway"; |
| HttpCodes[HttpCodes["ServiceUnavailable"] = 503] = "ServiceUnavailable"; |
| HttpCodes[HttpCodes["GatewayTimeout"] = 504] = "GatewayTimeout"; |
| })(HttpCodes = exports.HttpCodes || (exports.HttpCodes = {})); |
| var Headers; |
| (function (Headers) { |
| Headers["Accept"] = "accept"; |
| Headers["ContentType"] = "content-type"; |
| })(Headers = exports.Headers || (exports.Headers = {})); |
| var MediaTypes; |
| (function (MediaTypes) { |
| MediaTypes["ApplicationJson"] = "application/json"; |
| })(MediaTypes = exports.MediaTypes || (exports.MediaTypes = {})); |
| /** |
| * Returns the proxy URL, depending upon the supplied url and proxy environment variables. |
| * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com |
| */ |
| function getProxyUrl(serverUrl) { |
| let proxyUrl = pm.getProxyUrl(url.parse(serverUrl)); |
| return proxyUrl ? proxyUrl.href : ''; |
| } |
| exports.getProxyUrl = getProxyUrl; |
| const HttpRedirectCodes = [ |
| HttpCodes.MovedPermanently, |
| HttpCodes.ResourceMoved, |
| HttpCodes.SeeOther, |
| HttpCodes.TemporaryRedirect, |
| HttpCodes.PermanentRedirect |
| ]; |
| const HttpResponseRetryCodes = [ |
| HttpCodes.BadGateway, |
| HttpCodes.ServiceUnavailable, |
| HttpCodes.GatewayTimeout |
| ]; |
| const RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD']; |
| const ExponentialBackoffCeiling = 10; |
| const ExponentialBackoffTimeSlice = 5; |
| class HttpClientResponse { |
| constructor(message) { |
| this.message = message; |
| } |
| readBody() { |
| return new Promise(async (resolve, reject) => { |
| let output = Buffer.alloc(0); |
| this.message.on('data', (chunk) => { |
| output = Buffer.concat([output, chunk]); |
| }); |
| this.message.on('end', () => { |
| resolve(output.toString()); |
| }); |
| }); |
| } |
| } |
| exports.HttpClientResponse = HttpClientResponse; |
| function isHttps(requestUrl) { |
| let parsedUrl = url.parse(requestUrl); |
| return parsedUrl.protocol === 'https:'; |
| } |
| exports.isHttps = isHttps; |
| class HttpClient { |
| constructor(userAgent, handlers, requestOptions) { |
| this._ignoreSslError = false; |
| this._allowRedirects = true; |
| this._allowRedirectDowngrade = false; |
| this._maxRedirects = 50; |
| this._allowRetries = false; |
| this._maxRetries = 1; |
| this._keepAlive = false; |
| this._disposed = false; |
| this.userAgent = userAgent; |
| this.handlers = handlers || []; |
| this.requestOptions = requestOptions; |
| if (requestOptions) { |
| if (requestOptions.ignoreSslError != null) { |
| this._ignoreSslError = requestOptions.ignoreSslError; |
| } |
| this._socketTimeout = requestOptions.socketTimeout; |
| if (requestOptions.allowRedirects != null) { |
| this._allowRedirects = requestOptions.allowRedirects; |
| } |
| if (requestOptions.allowRedirectDowngrade != null) { |
| this._allowRedirectDowngrade = requestOptions.allowRedirectDowngrade; |
| } |
| if (requestOptions.maxRedirects != null) { |
| this._maxRedirects = Math.max(requestOptions.maxRedirects, 0); |
| } |
| if (requestOptions.keepAlive != null) { |
| this._keepAlive = requestOptions.keepAlive; |
| } |
| if (requestOptions.allowRetries != null) { |
| this._allowRetries = requestOptions.allowRetries; |
| } |
| if (requestOptions.maxRetries != null) { |
| this._maxRetries = requestOptions.maxRetries; |
| } |
| } |
| } |
| options(requestUrl, additionalHeaders) { |
| return this.request('OPTIONS', requestUrl, null, additionalHeaders || {}); |
| } |
| get(requestUrl, additionalHeaders) { |
| return this.request('GET', requestUrl, null, additionalHeaders || {}); |
| } |
| del(requestUrl, additionalHeaders) { |
| return this.request('DELETE', requestUrl, null, additionalHeaders || {}); |
| } |
| post(requestUrl, data, additionalHeaders) { |
| return this.request('POST', requestUrl, data, additionalHeaders || {}); |
| } |
| patch(requestUrl, data, additionalHeaders) { |
| return this.request('PATCH', requestUrl, data, additionalHeaders || {}); |
| } |
| put(requestUrl, data, additionalHeaders) { |
| return this.request('PUT', requestUrl, data, additionalHeaders || {}); |
| } |
| head(requestUrl, additionalHeaders) { |
| return this.request('HEAD', requestUrl, null, additionalHeaders || {}); |
| } |
| sendStream(verb, requestUrl, stream, additionalHeaders) { |
| return this.request(verb, requestUrl, stream, additionalHeaders); |
| } |
| /** |
| * Gets a typed object from an endpoint |
| * Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise |
| */ |
| async getJson(requestUrl, additionalHeaders = {}) { |
| additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); |
| let res = await this.get(requestUrl, additionalHeaders); |
| return this._processResponse(res, this.requestOptions); |
| } |
| async postJson(requestUrl, obj, additionalHeaders = {}) { |
| let data = JSON.stringify(obj, null, 2); |
| additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); |
| additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); |
| let res = await this.post(requestUrl, data, additionalHeaders); |
| return this._processResponse(res, this.requestOptions); |
| } |
| async putJson(requestUrl, obj, additionalHeaders = {}) { |
| let data = JSON.stringify(obj, null, 2); |
| additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); |
| additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); |
| let res = await this.put(requestUrl, data, additionalHeaders); |
| return this._processResponse(res, this.requestOptions); |
| } |
| async patchJson(requestUrl, obj, additionalHeaders = {}) { |
| let data = JSON.stringify(obj, null, 2); |
| additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); |
| additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); |
| let res = await this.patch(requestUrl, data, additionalHeaders); |
| return this._processResponse(res, this.requestOptions); |
| } |
| /** |
| * Makes a raw http request. |
| * All other methods such as get, post, patch, and request ultimately call this. |
| * Prefer get, del, post and patch |
| */ |
| async request(verb, requestUrl, data, headers) { |
| if (this._disposed) { |
| throw new Error('Client has already been disposed.'); |
| } |
| let parsedUrl = url.parse(requestUrl); |
| let info = this._prepareRequest(verb, parsedUrl, headers); |
| // Only perform retries on reads since writes may not be idempotent. |
| let maxTries = this._allowRetries && RetryableHttpVerbs.indexOf(verb) != -1 |
| ? this._maxRetries + 1 |
| : 1; |
| let numTries = 0; |
| let response; |
| while (numTries < maxTries) { |
| response = await this.requestRaw(info, data); |
| // Check if it's an authentication challenge |
| if (response && |
| response.message && |
| response.message.statusCode === HttpCodes.Unauthorized) { |
| let authenticationHandler; |
| for (let i = 0; i < this.handlers.length; i++) { |
| if (this.handlers[i].canHandleAuthentication(response)) { |
| authenticationHandler = this.handlers[i]; |
| break; |
| } |
| } |
| if (authenticationHandler) { |
| return authenticationHandler.handleAuthentication(this, info, data); |
| } |
| else { |
| // We have received an unauthorized response but have no handlers to handle it. |
| // Let the response return to the caller. |
| return response; |
| } |
| } |
| let redirectsRemaining = this._maxRedirects; |
| while (HttpRedirectCodes.indexOf(response.message.statusCode) != -1 && |
| this._allowRedirects && |
| redirectsRemaining > 0) { |
| const redirectUrl = response.message.headers['location']; |
| if (!redirectUrl) { |
| // if there's no location to redirect to, we won't |
| break; |
| } |
| let parsedRedirectUrl = url.parse(redirectUrl); |
| if (parsedUrl.protocol == 'https:' && |
| parsedUrl.protocol != parsedRedirectUrl.protocol && |
| !this._allowRedirectDowngrade) { |
| throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.'); |
| } |
| // we need to finish reading the response before reassigning response |
| // which will leak the open socket. |
| await response.readBody(); |
| // strip authorization header if redirected to a different hostname |
| if (parsedRedirectUrl.hostname !== parsedUrl.hostname) { |
| for (let header in headers) { |
| // header names are case insensitive |
| if (header.toLowerCase() === 'authorization') { |
| delete headers[header]; |
| } |
| } |
| } |
| // let's make the request with the new redirectUrl |
| info = this._prepareRequest(verb, parsedRedirectUrl, headers); |
| response = await this.requestRaw(info, data); |
| redirectsRemaining--; |
| } |
| if (HttpResponseRetryCodes.indexOf(response.message.statusCode) == -1) { |
| // If not a retry code, return immediately instead of retrying |
| return response; |
| } |
| numTries += 1; |
| if (numTries < maxTries) { |
| await response.readBody(); |
| await this._performExponentialBackoff(numTries); |
| } |
| } |
| return response; |
| } |
| /** |
| * Needs to be called if keepAlive is set to true in request options. |
| */ |
| dispose() { |
| if (this._agent) { |
| this._agent.destroy(); |
| } |
| this._disposed = true; |
| } |
| /** |
| * Raw request. |
| * @param info |
| * @param data |
| */ |
| requestRaw(info, data) { |
| return new Promise((resolve, reject) => { |
| let callbackForResult = function (err, res) { |
| if (err) { |
| reject(err); |
| } |
| resolve(res); |
| }; |
| this.requestRawWithCallback(info, data, callbackForResult); |
| }); |
| } |
| /** |
| * Raw request with callback. |
| * @param info |
| * @param data |
| * @param onResult |
| */ |
| requestRawWithCallback(info, data, onResult) { |
| let socket; |
| if (typeof data === 'string') { |
| info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8'); |
| } |
| let callbackCalled = false; |
| let handleResult = (err, res) => { |
| if (!callbackCalled) { |
| callbackCalled = true; |
| onResult(err, res); |
| } |
| }; |
| let req = info.httpModule.request(info.options, (msg) => { |
| let res = new HttpClientResponse(msg); |
| handleResult(null, res); |
| }); |
| req.on('socket', sock => { |
| socket = sock; |
| }); |
| // If we ever get disconnected, we want the socket to timeout eventually |
| req.setTimeout(this._socketTimeout || 3 * 60000, () => { |
| if (socket) { |
| socket.end(); |
| } |
| handleResult(new Error('Request timeout: ' + info.options.path), null); |
| }); |
| req.on('error', function (err) { |
| // err has statusCode property |
| // res should have headers |
| handleResult(err, null); |
| }); |
| if (data && typeof data === 'string') { |
| req.write(data, 'utf8'); |
| } |
| if (data && typeof data !== 'string') { |
| data.on('close', function () { |
| req.end(); |
| }); |
| data.pipe(req); |
| } |
| else { |
| req.end(); |
| } |
| } |
| /** |
| * Gets an http agent. This function is useful when you need an http agent that handles |
| * routing through a proxy server - depending upon the url and proxy environment variables. |
| * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com |
| */ |
| getAgent(serverUrl) { |
| let parsedUrl = url.parse(serverUrl); |
| return this._getAgent(parsedUrl); |
| } |
| _prepareRequest(method, requestUrl, headers) { |
| const info = {}; |
| info.parsedUrl = requestUrl; |
| const usingSsl = info.parsedUrl.protocol === 'https:'; |
| info.httpModule = usingSsl ? https : http; |
| const defaultPort = usingSsl ? 443 : 80; |
| info.options = {}; |
| info.options.host = info.parsedUrl.hostname; |
| info.options.port = info.parsedUrl.port |
| ? parseInt(info.parsedUrl.port) |
| : defaultPort; |
| info.options.path = |
| (info.parsedUrl.pathname || '') + (info.parsedUrl.search || ''); |
| info.options.method = method; |
| info.options.headers = this._mergeHeaders(headers); |
| if (this.userAgent != null) { |
| info.options.headers['user-agent'] = this.userAgent; |
| } |
| info.options.agent = this._getAgent(info.parsedUrl); |
| // gives handlers an opportunity to participate |
| if (this.handlers) { |
| this.handlers.forEach(handler => { |
| handler.prepareRequest(info.options); |
| }); |
| } |
| return info; |
| } |
| _mergeHeaders(headers) { |
| const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {}); |
| if (this.requestOptions && this.requestOptions.headers) { |
| return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers)); |
| } |
| return lowercaseKeys(headers || {}); |
| } |
| _getExistingOrDefaultHeader(additionalHeaders, header, _default) { |
| const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {}); |
| let clientHeader; |
| if (this.requestOptions && this.requestOptions.headers) { |
| clientHeader = lowercaseKeys(this.requestOptions.headers)[header]; |
| } |
| return additionalHeaders[header] || clientHeader || _default; |
| } |
| _getAgent(parsedUrl) { |
| let agent; |
| let proxyUrl = pm.getProxyUrl(parsedUrl); |
| let useProxy = proxyUrl && proxyUrl.hostname; |
| if (this._keepAlive && useProxy) { |
| agent = this._proxyAgent; |
| } |
| if (this._keepAlive && !useProxy) { |
| agent = this._agent; |
| } |
| // if agent is already assigned use that agent. |
| if (!!agent) { |
| return agent; |
| } |
| const usingSsl = parsedUrl.protocol === 'https:'; |
| let maxSockets = 100; |
| if (!!this.requestOptions) { |
| maxSockets = this.requestOptions.maxSockets || http.globalAgent.maxSockets; |
| } |
| if (useProxy) { |
| // If using proxy, need tunnel |
| if (!tunnel) { |
| tunnel = __webpack_require__(856); |
| } |
| const agentOptions = { |
| maxSockets: maxSockets, |
| keepAlive: this._keepAlive, |
| proxy: { |
| proxyAuth: proxyUrl.auth, |
| host: proxyUrl.hostname, |
| port: proxyUrl.port |
| } |
| }; |
| let tunnelAgent; |
| const overHttps = proxyUrl.protocol === 'https:'; |
| if (usingSsl) { |
| tunnelAgent = overHttps ? tunnel.httpsOverHttps : tunnel.httpsOverHttp; |
| } |
| else { |
| tunnelAgent = overHttps ? tunnel.httpOverHttps : tunnel.httpOverHttp; |
| } |
| agent = tunnelAgent(agentOptions); |
| this._proxyAgent = agent; |
| } |
| // if reusing agent across request and tunneling agent isn't assigned create a new agent |
| if (this._keepAlive && !agent) { |
| const options = { keepAlive: this._keepAlive, maxSockets: maxSockets }; |
| agent = usingSsl ? new https.Agent(options) : new http.Agent(options); |
| this._agent = agent; |
| } |
| // if not using private agent and tunnel agent isn't setup then use global agent |
| if (!agent) { |
| agent = usingSsl ? https.globalAgent : http.globalAgent; |
| } |
| if (usingSsl && this._ignoreSslError) { |
| // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process |
| // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options |
| // we have to cast it to any and change it directly |
| agent.options = Object.assign(agent.options || {}, { |
| rejectUnauthorized: false |
| }); |
| } |
| return agent; |
| } |
| _performExponentialBackoff(retryNumber) { |
| retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber); |
| const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber); |
| return new Promise(resolve => setTimeout(() => resolve(), ms)); |
| } |
| static dateTimeDeserializer(key, value) { |
| if (typeof value === 'string') { |
| let a = new Date(value); |
| if (!isNaN(a.valueOf())) { |
| return a; |
| } |
| } |
| return value; |
| } |
| async _processResponse(res, options) { |
| return new Promise(async (resolve, reject) => { |
| const statusCode = res.message.statusCode; |
| const response = { |
| statusCode: statusCode, |
| result: null, |
| headers: {} |
| }; |
| // not found leads to null obj returned |
| if (statusCode == HttpCodes.NotFound) { |
| resolve(response); |
| } |
| let obj; |
| let contents; |
| // get the result from the body |
| try { |
| contents = await res.readBody(); |
| if (contents && contents.length > 0) { |
| if (options && options.deserializeDates) { |
| obj = JSON.parse(contents, HttpClient.dateTimeDeserializer); |
| } |
| else { |
| obj = JSON.parse(contents); |
| } |
| response.result = obj; |
| } |
| response.headers = res.message.headers; |
| } |
| catch (err) { |
| // Invalid resource (contents not json); leaving result obj null |
| } |
| // note that 3xx redirects are handled by the http layer. |
| if (statusCode > 299) { |
| let msg; |
| // if exception/error in body, attempt to get better error |
| if (obj && obj.message) { |
| msg = obj.message; |
| } |
| else if (contents && contents.length > 0) { |
| // it may be the case that the exception is in the body message as string |
| msg = contents; |
| } |
| else { |
| msg = 'Failed request: (' + statusCode + ')'; |
| } |
| let err = new Error(msg); |
| // attach statusCode and body obj (if available) to the error object |
| err['statusCode'] = statusCode; |
| if (response.result) { |
| err['result'] = response.result; |
| } |
| reject(err); |
| } |
| else { |
| resolve(response); |
| } |
| }); |
| } |
| } |
| exports.HttpClient = HttpClient; |
| |
| |
| /***/ }), |
| |
| /***/ 548: |
| /***/ (function(module) { |
| |
| "use strict"; |
| |
| |
| /*! |
| * isobject <https://github.com/jonschlinkert/isobject> |
| * |
| * Copyright (c) 2014-2017, Jon Schlinkert. |
| * Released under the MIT License. |
| */ |
| |
| function isObject(val) { |
| return val != null && typeof val === 'object' && Array.isArray(val) === false; |
| } |
| |
| /*! |
| * is-plain-object <https://github.com/jonschlinkert/is-plain-object> |
| * |
| * Copyright (c) 2014-2017, Jon Schlinkert. |
| * Released under the MIT License. |
| */ |
| |
| function isObjectObject(o) { |
| return isObject(o) === true |
| && Object.prototype.toString.call(o) === '[object Object]'; |
| } |
| |
| function isPlainObject(o) { |
| var ctor,prot; |
| |
| if (isObjectObject(o) === false) return false; |
| |
| // If has modified constructor |
| ctor = o.constructor; |
| if (typeof ctor !== 'function') return false; |
| |
| // If has modified prototype |
| prot = ctor.prototype; |
| if (isObjectObject(prot) === false) return false; |
| |
| // If constructor does not have an Object-specific method |
| if (prot.hasOwnProperty('isPrototypeOf') === false) { |
| return false; |
| } |
| |
| // Most likely a plain Object |
| return true; |
| } |
| |
| module.exports = isPlainObject; |
| |
| |
| /***/ }), |
| |
| /***/ 550: |
| /***/ (function(module, __unusedexports, __webpack_require__) { |
| |
| module.exports = getNextPage |
| |
| const getPage = __webpack_require__(265) |
| |
| function getNextPage (octokit, link, headers) { |
| return getPage(octokit, link, 'next', headers) |
| } |
| |
| |
| /***/ }), |
| |
| /***/ 556: |
| /***/ (function(module) { |
| |
| "use strict"; |
| // YAML error class. http://stackoverflow.com/questions/8458984 |
| // |
| |
| |
| function YAMLException(reason, mark) { |
| // Super constructor |
| Error.call(this); |
| |
| this.name = 'YAMLException'; |
| this.reason = reason; |
| this.mark = mark; |
| this.message = (this.reason || '(unknown reason)') + (this.mark ? ' ' + this.mark.toString() : ''); |
| |
| // Include stack trace in error object |
| if (Error.captureStackTrace) { |
| // Chrome and NodeJS |
| Error.captureStackTrace(this, this.constructor); |
| } else { |
| // FF, IE 10+ and Safari 6+. Fallback for others |
| this.stack = (new Error()).stack || ''; |
| } |
| } |
| |
| |
| // Inherit from Error |
| YAMLException.prototype = Object.create(Error.prototype); |
| YAMLException.prototype.constructor = YAMLException; |
| |
| |
| YAMLException.prototype.toString = function toString(compact) { |
| var result = this.name + ': '; |
| |
| result += this.reason || '(unknown reason)'; |
| |
| if (!compact && this.mark) { |
| result += ' ' + this.mark.toString(); |
| } |
| |
| return result; |
| }; |
| |
| |
| module.exports = YAMLException; |
| |
| |
| /***/ }), |
| |
| /***/ 558: |
| /***/ (function(module, __unusedexports, __webpack_require__) { |
| |
| module.exports = hasPreviousPage |
| |
| const deprecate = __webpack_require__(370) |
| const getPageLinks = __webpack_require__(577) |
| |
| function hasPreviousPage (link) { |
| deprecate(`octokit.hasPreviousPage() – You can use octokit.paginate or async iterators instead: https://github.com/octokit/rest.js#pagination.`) |
| return getPageLinks(link).prev |
| } |
| |
| |
| /***/ }), |
| |
| /***/ 563: |
| /***/ (function(module, __unusedexports, __webpack_require__) { |
| |
| module.exports = getPreviousPage |
| |
| const getPage = __webpack_require__(265) |
| |
| function getPreviousPage (octokit, link, headers) { |
| return getPage(octokit, link, 'prev', headers) |
| } |
| |
| |
| /***/ }), |
| |
| /***/ 568: |
| /***/ (function(module, __unusedexports, __webpack_require__) { |
| |
| "use strict"; |
| |
| |
| const path = __webpack_require__(622); |
| const niceTry = __webpack_require__(948); |
| const resolveCommand = __webpack_require__(489); |
| const escape = __webpack_require__(462); |
| const readShebang = __webpack_require__(389); |
| const semver = __webpack_require__(48); |
| |
| const isWin = process.platform === 'win32'; |
| const isExecutableRegExp = /\.(?:com|exe)$/i; |
| const isCmdShimRegExp = /node_modules[\\/].bin[\\/][^\\/]+\.cmd$/i; |
| |
| // `options.shell` is supported in Node ^4.8.0, ^5.7.0 and >= 6.0.0 |
| const supportsShellOption = niceTry(() => semver.satisfies(process.version, '^4.8.0 || ^5.7.0 || >= 6.0.0', true)) || false; |
| |
| function detectShebang(parsed) { |
| parsed.file = resolveCommand(parsed); |
| |
| const shebang = parsed.file && readShebang(parsed.file); |
| |
| if (shebang) { |
| parsed.args.unshift(parsed.file); |
| parsed.command = shebang; |
| |
| return resolveCommand(parsed); |
| } |
| |
| return parsed.file; |
| } |
| |
| function parseNonShell(parsed) { |
| if (!isWin) { |
| return parsed; |
| } |
| |
| // Detect & add support for shebangs |
| const commandFile = detectShebang(parsed); |
| |
| // We don't need a shell if the command filename is an executable |
| const needsShell = !isExecutableRegExp.test(commandFile); |
| |
| // If a shell is required, use cmd.exe and take care of escaping everything correctly |
| // Note that `forceShell` is an hidden option used only in tests |
| if (parsed.options.forceShell || needsShell) { |
| // Need to double escape meta chars if the command is a cmd-shim located in `node_modules/.bin/` |
| // The cmd-shim simply calls execute the package bin file with NodeJS, proxying any argument |
| // Because the escape of metachars with ^ gets interpreted when the cmd.exe is first called, |
| // we need to double escape them |
| const needsDoubleEscapeMetaChars = isCmdShimRegExp.test(commandFile); |
| |
| // Normalize posix paths into OS compatible paths (e.g.: foo/bar -> foo\bar) |
| // This is necessary otherwise it will always fail with ENOENT in those cases |
| parsed.command = path.normalize(parsed.command); |
| |
| // Escape command & arguments |
| parsed.command = escape.command(parsed.command); |
| parsed.args = parsed.args.map((arg) => escape.argument(arg, needsDoubleEscapeMetaChars)); |
| |
| const shellCommand = [parsed.command].concat(parsed.args).join(' '); |
| |
| parsed.args = ['/d', '/s', '/c', `"${shellCommand}"`]; |
| parsed.command = process.env.comspec || 'cmd.exe'; |
| parsed.options.windowsVerbatimArguments = true; // Tell node's spawn that the arguments are already escaped |
| } |
| |
| return parsed; |
| } |
| |
| function parseShell(parsed) { |
| // If node supports the shell option, there's no need to mimic its behavior |
| if (supportsShellOption) { |
| return parsed; |
| } |
| |
| // Mimic node shell option |
| // See https://github.com/nodejs/node/blob/b9f6a2dc059a1062776133f3d4fd848c4da7d150/lib/child_process.js#L335 |
| const shellCommand = [parsed.command].concat(parsed.args).join(' '); |
| |
| if (isWin) { |
| parsed.command = typeof parsed.options.shell === 'string' ? parsed.options.shell : process.env.comspec || 'cmd.exe'; |
| parsed.args = ['/d', '/s', '/c', `"${shellCommand}"`]; |
| parsed.options.windowsVerbatimArguments = true; // Tell node's spawn that the arguments are already escaped |
| } else { |
| if (typeof parsed.options.shell === 'string') { |
| parsed.command = parsed.options.shell; |
| } else if (process.platform === 'android') { |
| parsed.command = '/system/bin/sh'; |
| } else { |
| parsed.command = '/bin/sh'; |
| } |
| |
| parsed.args = ['-c', shellCommand]; |
| } |
| |
| return parsed; |
| } |
| |
| function parse(command, args, options) { |
| // Normalize arguments, similar to nodejs |
| if (args && !Array.isArray(args)) { |
| options = args; |
| args = null; |
| } |
| |
| args = args ? args.slice(0) : []; // Clone array to avoid changing the original |
| options = Object.assign({}, options); // Clone object to avoid changing the original |
| |
| // Build our parsed object |
| const parsed = { |
| command, |
| args, |
| options, |
| file: undefined, |
| original: { |
| command, |
| args, |
| }, |
| }; |
| |
| // Delegate further parsing to shell or non-shell |
| return options.shell ? parseShell(parsed) : parseNonShell(parsed); |
| } |
| |
| module.exports = parse; |
| |
| |
| /***/ }), |
| |
| /***/ 574: |
| /***/ (function(module, __unusedexports, __webpack_require__) { |
| |
| "use strict"; |
| |
| |
| var Type = __webpack_require__(945); |
| |
| module.exports = new Type('tag:yaml.org,2002:str', { |
| kind: 'scalar', |
| construct: function (data) { return data !== null ? data : ''; } |
| }); |
| |
| |
| /***/ }), |
| |
| /***/ 577: |
| /***/ (function(module) { |
| |
| module.exports = getPageLinks |
| |
| function getPageLinks (link) { |
| link = link.link || link.headers.link || '' |
| |
| const links = {} |
| |
| // link format: |
| // '<https://api.github.com/users/aseemk/followers?page=2>; rel="next", <https://api.github.com/users/aseemk/followers?page=2>; rel="last"' |
| link.replace(/<([^>]*)>;\s*rel="([\w]*)"/g, (m, uri, type) => { |
| links[type] = uri |
| }) |
| |
| return links |
| } |
| |
| |
| /***/ }), |
| |
| /***/ 581: |
| /***/ (function(module, __unusedexports, __webpack_require__) { |
| |
| "use strict"; |
| // Standard YAML's Failsafe schema. |
| // http://www.yaml.org/spec/1.2/spec.html#id2802346 |
| |
| |
| |
| |
| |
| var Schema = __webpack_require__(43); |
| |
| |
| module.exports = new Schema({ |
| explicit: [ |
| __webpack_require__(574), |
| __webpack_require__(921), |
| __webpack_require__(988) |
| ] |
| }); |
| |
| |
| /***/ }), |
| |
| /***/ 605: |
| /***/ (function(module) { |
| |
| module.exports = require("http"); |
| |
| /***/ }), |
| |
| /***/ 611: |
| /***/ (function(module, __unusedexports, __webpack_require__) { |
| |
| "use strict"; |
| // Standard YAML's Core schema. |
| // http://www.yaml.org/spec/1.2/spec.html#id2804923 |
| // |
| // NOTE: JS-YAML does not support schema-specific tag resolution restrictions. |
| // So, Core schema has no distinctions from JSON schema is JS-YAML. |
| |
| |
| |
| |
| |
| var Schema = __webpack_require__(43); |
| |
| |
| module.exports = new Schema({ |
| include: [ |
| __webpack_require__(23) |
| ] |
| }); |
| |
| |
| /***/ }), |
| |
| /***/ 614: |
| /***/ (function(module) { |
| |
| module.exports = require("events"); |
| |
| /***/ }), |
| |
| /***/ 619: |
| /***/ (function(__unusedmodule, exports, __webpack_require__) { |
| |
| "use strict"; |
| |
| |
| Object.defineProperty(exports, '__esModule', { value: true }); |
| |
| function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } |
| |
| var osName = _interopDefault(__webpack_require__(2)); |
| |
| function getUserAgent() { |
| try { |
| return `Node.js/${process.version.substr(1)} (${osName()}; ${process.arch})`; |
| } catch (error) { |
| if (/wmic os get Caption/.test(error.message)) { |
| return "Windows <version undetectable>"; |
| } |
| |
| throw error; |
| } |
| } |
| |
| exports.getUserAgent = getUserAgent; |
| //# sourceMappingURL=index.js.map |
| |
| |
| /***/ }), |
| |
| /***/ 621: |
| /***/ (function(module, __unusedexports, __webpack_require__) { |
| |
| "use strict"; |
| |
| const path = __webpack_require__(622); |
| const pathKey = __webpack_require__(39); |
| |
| module.exports = opts => { |
| opts = Object.assign({ |
| cwd: process.cwd(), |
| path: process.env[pathKey()] |
| }, opts); |
| |
| let prev; |
| let pth = path.resolve(opts.cwd); |
| const ret = []; |
| |
| while (prev !== pth) { |
| ret.push(path.join(pth, 'node_modules/.bin')); |
| prev = pth; |
| pth = path.resolve(pth, '..'); |
| } |
| |
| // ensure the running `node` binary is used |
| ret.push(path.dirname(process.execPath)); |
| |
| return ret.concat(opts.path).join(path.delimiter); |
| }; |
| |
| module.exports.env = opts => { |
| opts = Object.assign({ |
| env: process.env |
| }, opts); |
| |
| const env = Object.assign({}, opts.env); |
| const path = pathKey({env}); |
| |
| opts.path = env[path]; |
| env[path] = module.exports(opts); |
| |
| return env; |
| }; |
| |
| |
| /***/ }), |
| |
| /***/ 622: |
| /***/ (function(module) { |
| |
| module.exports = require("path"); |
| |
| /***/ }), |
| |
| /***/ 626: |
| /***/ (function(module) { |
| |
| "use strict"; |
| |
| |
| /*! |
| * isobject <https://github.com/jonschlinkert/isobject> |
| * |
| * Copyright (c) 2014-2017, Jon Schlinkert. |
| * Released under the MIT License. |
| */ |
| |
| function isObject(val) { |
| return val != null && typeof val === 'object' && Array.isArray(val) === false; |
| } |
| |
| /*! |
| * is-plain-object <https://github.com/jonschlinkert/is-plain-object> |
| * |
| * Copyright (c) 2014-2017, Jon Schlinkert. |
| * Released under the MIT License. |
| */ |
| |
| function isObjectObject(o) { |
| return isObject(o) === true |
| && Object.prototype.toString.call(o) === '[object Object]'; |
| } |
| |
| function isPlainObject(o) { |
| var ctor,prot; |
| |
| if (isObjectObject(o) === false) return false; |
| |
| // If has modified constructor |
| ctor = o.constructor; |
| if (typeof ctor !== 'function') return false; |
| |
| // If has modified prototype |
| prot = ctor.prototype; |
| if (isObjectObject(prot) === false) return false; |
| |
| // If constructor does not have an Object-specific method |
| if (prot.hasOwnProperty('isPrototypeOf') === false) { |
| return false; |
| } |
| |
| // Most likely a plain Object |
| return true; |
| } |
| |
| module.exports = isPlainObject; |
| |
| |
| /***/ }), |
| |
| /***/ 629: |
| /***/ (function(module, __unusedexports, __webpack_require__) { |
| |
| "use strict"; |
| |
| |
| var Type = __webpack_require__(945); |
| |
| function resolveJavascriptRegExp(data) { |
| if (data === null) return false; |
| if (data.length === 0) return false; |
| |
| var regexp = data, |
| tail = /\/([gim]*)$/.exec(data), |
| modifiers = ''; |
| |
| // if regexp starts with '/' it can have modifiers and must be properly closed |
| // `/foo/gim` - modifiers tail can be maximum 3 chars |
| if (regexp[0] === '/') { |
| if (tail) modifiers = tail[1]; |
| |
| if (modifiers.length > 3) return false; |
| // if expression starts with /, is should be properly terminated |
| if (regexp[regexp.length - modifiers.length - 1] !== '/') return false; |
| } |
| |
| return true; |
| } |
| |
| function constructJavascriptRegExp(data) { |
| var regexp = data, |
| tail = /\/([gim]*)$/.exec(data), |
| modifiers = ''; |
| |
| // `/foo/gim` - tail can be maximum 4 chars |
| if (regexp[0] === '/') { |
| if (tail) modifiers = tail[1]; |
| regexp = regexp.slice(1, regexp.length - modifiers.length - 1); |
| } |
| |
| return new RegExp(regexp, modifiers); |
| } |
| |
| function representJavascriptRegExp(object /*, style*/) { |
| var result = '/' + object.source + '/'; |
| |
| if (object.global) result += 'g'; |
| if (object.multiline) result += 'm'; |
| if (object.ignoreCase) result += 'i'; |
| |
| return result; |
| } |
| |
| function isRegExp(object) { |
| return Object.prototype.toString.call(object) === '[object RegExp]'; |
| } |
| |
| module.exports = new Type('tag:yaml.org,2002:js/regexp', { |
| kind: 'scalar', |
| resolve: resolveJavascriptRegExp, |
| construct: constructJavascriptRegExp, |
| predicate: isRegExp, |
| represent: representJavascriptRegExp |
| }); |
| |
| |
| /***/ }), |
| |
| /***/ 631: |
| /***/ (function(module) { |
| |
| module.exports = require("net"); |
| |
| /***/ }), |
| |
| /***/ 633: |
| /***/ (function(module, __unusedexports, __webpack_require__) { |
| |
| "use strict"; |
| |
| |
| var Type = __webpack_require__(945); |
| |
| function resolveYamlMerge(data) { |
| return data === '<<' || data === null; |
| } |
| |
| module.exports = new Type('tag:yaml.org,2002:merge', { |
| kind: 'scalar', |
| resolve: resolveYamlMerge |
| }); |
| |
| |
| /***/ }), |
| |
| /***/ 649: |
| /***/ (function(module, __unusedexports, __webpack_require__) { |
| |
| module.exports = getLastPage |
| |
| const getPage = __webpack_require__(265) |
| |
| function getLastPage (octokit, link, headers) { |
| return getPage(octokit, link, 'last', headers) |
| } |
| |
| |
| /***/ }), |
| |
| /***/ 654: |
| /***/ (function(module) { |
| |
| // This is not the set of all possible signals. |
| // |
| // It IS, however, the set of all signals that trigger |
| // an exit on either Linux or BSD systems. Linux is a |
| // superset of the signal names supported on BSD, and |
| // the unknown signals just fail to register, so we can |
| // catch that easily enough. |
| // |
| // Don't bother with SIGKILL. It's uncatchable, which |
| // means that we can't fire any callbacks anyway. |
| // |
| // If a user does happen to register a handler on a non- |
| // fatal signal like SIGWINCH or something, and then |
| // exit, it'll end up firing `process.emit('exit')`, so |
| // the handler will be fired anyway. |
| // |
| // SIGBUS, SIGFPE, SIGSEGV and SIGILL, when not raised |
| // artificially, inherently leave the process in a |
| // state from which it is not safe to try and enter JS |
| // listeners. |
| module.exports = [ |
| 'SIGABRT', |
| 'SIGALRM', |
| 'SIGHUP', |
| 'SIGINT', |
| 'SIGTERM' |
| ] |
| |
| if (process.platform !== 'win32') { |
| module.exports.push( |
| 'SIGVTALRM', |
| 'SIGXCPU', |
| 'SIGXFSZ', |
| 'SIGUSR2', |
| 'SIGTRAP', |
| 'SIGSYS', |
| 'SIGQUIT', |
| 'SIGIOT' |
| // should detect profiler and enable/disable accordingly. |
| // see #21 |
| // 'SIGPROF' |
| ) |
| } |
| |
| if (process.platform === 'linux') { |
| module.exports.push( |
| 'SIGIO', |
| 'SIGPOLL', |
| 'SIGPWR', |
| 'SIGSTKFLT', |
| 'SIGUNUSED' |
| ) |
| } |
| |
| |
| /***/ }), |
| |
| /***/ 669: |
| /***/ (function(module) { |
| |
| module.exports = require("util"); |
| |
| /***/ }), |
| |
| /***/ 672: |
| /***/ (function(__unusedmodule, exports, __webpack_require__) { |
| |
| "use strict"; |
| |
| var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { |
| function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } |
| return new (P || (P = Promise))(function (resolve, reject) { |
| function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } |
| function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } |
| function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } |
| step((generator = generator.apply(thisArg, _arguments || [])).next()); |
| }); |
| }; |
| var _a; |
| Object.defineProperty(exports, "__esModule", { value: true }); |
| const assert_1 = __webpack_require__(357); |
| const fs = __webpack_require__(747); |
| const path = __webpack_require__(622); |
| _a = fs.promises, exports.chmod = _a.chmod, exports.copyFile = _a.copyFile, exports.lstat = _a.lstat, exports.mkdir = _a.mkdir, exports.readdir = _a.readdir, exports.readlink = _a.readlink, exports.rename = _a.rename, exports.rmdir = _a.rmdir, exports.stat = _a.stat, exports.symlink = _a.symlink, exports.unlink = _a.unlink; |
| exports.IS_WINDOWS = process.platform === 'win32'; |
| function exists(fsPath) { |
| return __awaiter(this, void 0, void 0, function* () { |
| try { |
| yield exports.stat(fsPath); |
| } |
| catch (err) { |
| if (err.code === 'ENOENT') { |
| return false; |
| } |
| throw err; |
| } |
| return true; |
| }); |
| } |
| exports.exists = exists; |
| function isDirectory(fsPath, useStat = false) { |
| return __awaiter(this, void 0, void 0, function* () { |
| const stats = useStat ? yield exports.stat(fsPath) : yield exports.lstat(fsPath); |
| return stats.isDirectory(); |
| }); |
| } |
| exports.isDirectory = isDirectory; |
| /** |
| * On OSX/Linux, true if path starts with '/'. On Windows, true for paths like: |
| * \, \hello, \\hello\share, C:, and C:\hello (and corresponding alternate separator cases). |
| */ |
| function isRooted(p) { |
| p = normalizeSeparators(p); |
| if (!p) { |
| throw new Error('isRooted() parameter "p" cannot be empty'); |
| } |
| if (exports.IS_WINDOWS) { |
| return (p.startsWith('\\') || /^[A-Z]:/i.test(p) // e.g. \ or \hello or \\hello |
| ); // e.g. C: or C:\hello |
| } |
| return p.startsWith('/'); |
| } |
| exports.isRooted = isRooted; |
| /** |
| * Recursively create a directory at `fsPath`. |
| * |
| * This implementation is optimistic, meaning it attempts to create the full |
| * path first, and backs up the path stack from there. |
| * |
| * @param fsPath The path to create |
| * @param maxDepth The maximum recursion depth |
| * @param depth The current recursion depth |
| */ |
| function mkdirP(fsPath, maxDepth = 1000, depth = 1) { |
| return __awaiter(this, void 0, void 0, function* () { |
| assert_1.ok(fsPath, 'a path argument must be provided'); |
| fsPath = path.resolve(fsPath); |
| if (depth >= maxDepth) |
| return exports.mkdir(fsPath); |
| try { |
| yield exports.mkdir(fsPath); |
| return; |
| } |
| catch (err) { |
| switch (err.code) { |
| case 'ENOENT': { |
| yield mkdirP(path.dirname(fsPath), maxDepth, depth + 1); |
| yield exports.mkdir(fsPath); |
| return; |
| } |
| default: { |
| let stats; |
| try { |
| stats = yield exports.stat(fsPath); |
| } |
| catch (err2) { |
| throw err; |
| } |
| if (!stats.isDirectory()) |
| throw err; |
| } |
| } |
| } |
| }); |
| } |
| exports.mkdirP = mkdirP; |
| /** |
| * Best effort attempt to determine whether a file exists and is executable. |
| * @param filePath file path to check |
| * @param extensions additional file extensions to try |
| * @return if file exists and is executable, returns the file path. otherwise empty string. |
| */ |
| function tryGetExecutablePath(filePath, extensions) { |
| return __awaiter(this, void 0, void 0, function* () { |
| let stats = undefined; |
| try { |
| // test file exists |
| stats = yield exports.stat(filePath); |
| } |
| catch (err) { |
| if (err.code !== 'ENOENT') { |
| // eslint-disable-next-line no-console |
| console.log(`Unexpected error attempting to determine if executable file exists '${filePath}': ${err}`); |
| } |
| } |
| if (stats && stats.isFile()) { |
| if (exports.IS_WINDOWS) { |
| // on Windows, test for valid extension |
| const upperExt = path.extname(filePath).toUpperCase(); |
| if (extensions.some(validExt => validExt.toUpperCase() === upperExt)) { |
| return filePath; |
| } |
| } |
| else { |
| if (isUnixExecutable(stats)) { |
| return filePath; |
| } |
| } |
| } |
| // try each extension |
| const originalFilePath = filePath; |
| for (const extension of extensions) { |
| filePath = originalFilePath + extension; |
| stats = undefined; |
| try { |
| stats = yield exports.stat(filePath); |
| } |
| catch (err) { |
| if (err.code !== 'ENOENT') { |
| // eslint-disable-next-line no-console |
| console.log(`Unexpected error attempting to determine if executable file exists '${filePath}': ${err}`); |
| } |
| } |
| if (stats && stats.isFile()) { |
| if (exports.IS_WINDOWS) { |
| // preserve the case of the actual file (since an extension was appended) |
| try { |
| const directory = path.dirname(filePath); |
| const upperName = path.basename(filePath).toUpperCase(); |
| for (const actualName of yield exports.readdir(directory)) { |
| if (upperName === actualName.toUpperCase()) { |
| filePath = path.join(directory, actualName); |
| break; |
| } |
| } |
| } |
| catch (err) { |
| // eslint-disable-next-line no-console |
| console.log(`Unexpected error attempting to determine the actual case of the file '${filePath}': ${err}`); |
| } |
| return filePath; |
| } |
| else { |
| if (isUnixExecutable(stats)) { |
| return filePath; |
| } |
| } |
| } |
| } |
| return ''; |
| }); |
| } |
| exports.tryGetExecutablePath = tryGetExecutablePath; |
| function normalizeSeparators(p) { |
| p = p || ''; |
| if (exports.IS_WINDOWS) { |
| // convert slashes on Windows |
| p = p.replace(/\//g, '\\'); |
| // remove redundant slashes |
| return p.replace(/\\\\+/g, '\\'); |
| } |
| // remove redundant slashes |
| return p.replace(/\/\/+/g, '/'); |
| } |
| // on Mac/Linux, test the execute bit |
| // R W X R W X R W X |
| // 256 128 64 32 16 8 4 2 1 |
| function isUnixExecutable(stats) { |
| return ((stats.mode & 1) > 0 || |
| ((stats.mode & 8) > 0 && stats.gid === process.getgid()) || |
| ((stats.mode & 64) > 0 && stats.uid === process.getuid())); |
| } |
| //# sourceMappingURL=io-util.js.map |
| |
| /***/ }), |
| |
| /***/ 674: |
| /***/ (function(module, __unusedexports, __webpack_require__) { |
| |
| module.exports = authenticate; |
| |
| const { Deprecation } = __webpack_require__(692); |
| const once = __webpack_require__(969); |
| |
| const deprecateAuthenticate = once((log, deprecation) => log.warn(deprecation)); |
| |
| function authenticate(state, options) { |
| deprecateAuthenticate( |
| state.octokit.log, |
| new Deprecation( |
| '[@octokit/rest] octokit.authenticate() is deprecated. Use "auth" constructor option instead.' |
| ) |
| ); |
| |
| if (!options) { |
| state.auth = false; |
| return; |
| } |
| |
| switch (options.type) { |
| case "basic": |
| if (!options.username || !options.password) { |
| throw new Error( |
| "Basic authentication requires both a username and password to be set" |
| ); |
| } |
| break; |
| |
| case "oauth": |
| if (!options.token && !(options.key && options.secret)) { |
| throw new Error( |
| "OAuth2 authentication requires a token or key & secret to be set" |
| ); |
| } |
| break; |
| |
| case "token": |
| case "app": |
| if (!options.token) { |
| throw new Error("Token authentication requires a token to be set"); |
| } |
| break; |
| |
| default: |
| throw new Error( |
| "Invalid authentication type, must be 'basic', 'oauth', 'token' or 'app'" |
| ); |
| } |
| |
| state.auth = options; |
| } |
| |
| |
| /***/ }), |
| |
| /***/ 675: |
| /***/ (function(module) { |
| |
| module.exports = function btoa(str) { |
| return new Buffer(str).toString('base64') |
| } |
| |
| |
| /***/ }), |
| |
| /***/ 685: |
| /***/ (function(module, __unusedexports, __webpack_require__) { |
| |
| "use strict"; |
| |
| |
| /*eslint-disable no-use-before-define*/ |
| |
| var common = __webpack_require__(740); |
| var YAMLException = __webpack_require__(556); |
| var DEFAULT_FULL_SCHEMA = __webpack_require__(910); |
| var DEFAULT_SAFE_SCHEMA = __webpack_require__(723); |
| |
| var _toString = Object.prototype.toString; |
| var _hasOwnProperty = Object.prototype.hasOwnProperty; |
| |
| var CHAR_TAB = 0x09; /* Tab */ |
| var CHAR_LINE_FEED = 0x0A; /* LF */ |
| var CHAR_CARRIAGE_RETURN = 0x0D; /* CR */ |
| var CHAR_SPACE = 0x20; /* Space */ |
| var CHAR_EXCLAMATION = 0x21; /* ! */ |
| var CHAR_DOUBLE_QUOTE = 0x22; /* " */ |
| var CHAR_SHARP = 0x23; /* # */ |
| var CHAR_PERCENT = 0x25; /* % */ |
| var CHAR_AMPERSAND = 0x26; /* & */ |
| var CHAR_SINGLE_QUOTE = 0x27; /* ' */ |
| var CHAR_ASTERISK = 0x2A; /* * */ |
| var CHAR_COMMA = 0x2C; /* , */ |
| var CHAR_MINUS = 0x2D; /* - */ |
| var CHAR_COLON = 0x3A; /* : */ |
| var CHAR_EQUALS = 0x3D; /* = */ |
| var CHAR_GREATER_THAN = 0x3E; /* > */ |
| var CHAR_QUESTION = 0x3F; /* ? */ |
| var CHAR_COMMERCIAL_AT = 0x40; /* @ */ |
| var CHAR_LEFT_SQUARE_BRACKET = 0x5B; /* [ */ |
| var CHAR_RIGHT_SQUARE_BRACKET = 0x5D; /* ] */ |
| var CHAR_GRAVE_ACCENT = 0x60; /* ` */ |
| var CHAR_LEFT_CURLY_BRACKET = 0x7B; /* { */ |
| var CHAR_VERTICAL_LINE = 0x7C; /* | */ |
| var CHAR_RIGHT_CURLY_BRACKET = 0x7D; /* } */ |
| |
| var ESCAPE_SEQUENCES = {}; |
| |
| ESCAPE_SEQUENCES[0x00] = '\\0'; |
| ESCAPE_SEQUENCES[0x07] = '\\a'; |
| ESCAPE_SEQUENCES[0x08] = '\\b'; |
| ESCAPE_SEQUENCES[0x09] = '\\t'; |
| ESCAPE_SEQUENCES[0x0A] = '\\n'; |
| ESCAPE_SEQUENCES[0x0B] = '\\v'; |
| ESCAPE_SEQUENCES[0x0C] = '\\f'; |
| ESCAPE_SEQUENCES[0x0D] = '\\r'; |
| ESCAPE_SEQUENCES[0x1B] = '\\e'; |
| ESCAPE_SEQUENCES[0x22] = '\\"'; |
| ESCAPE_SEQUENCES[0x5C] = '\\\\'; |
| ESCAPE_SEQUENCES[0x85] = '\\N'; |
| ESCAPE_SEQUENCES[0xA0] = '\\_'; |
| ESCAPE_SEQUENCES[0x2028] = '\\L'; |
| ESCAPE_SEQUENCES[0x2029] = '\\P'; |
| |
| var DEPRECATED_BOOLEANS_SYNTAX = [ |
| 'y', 'Y', 'yes', 'Yes', 'YES', 'on', 'On', 'ON', |
| 'n', 'N', 'no', 'No', 'NO', 'off', 'Off', 'OFF' |
| ]; |
| |
| function compileStyleMap(schema, map) { |
| var result, keys, index, length, tag, style, type; |
| |
| if (map === null) return {}; |
| |
| result = {}; |
| keys = Object.keys(map); |
| |
| for (index = 0, length = keys.length; index < length; index += 1) { |
| tag = keys[index]; |
| style = String(map[tag]); |
| |
| if (tag.slice(0, 2) === '!!') { |
| tag = 'tag:yaml.org,2002:' + tag.slice(2); |
| } |
| type = schema.compiledTypeMap['fallback'][tag]; |
| |
| if (type && _hasOwnProperty.call(type.styleAliases, style)) { |
| style = type.styleAliases[style]; |
| } |
| |
| result[tag] = style; |
| } |
| |
| return result; |
| } |
| |
| function encodeHex(character) { |
| var string, handle, length; |
| |
| string = character.toString(16).toUpperCase(); |
| |
| if (character <= 0xFF) { |
| handle = 'x'; |
| length = 2; |
| } else if (character <= 0xFFFF) { |
| handle = 'u'; |
| length = 4; |
| } else if (character <= 0xFFFFFFFF) { |
| handle = 'U'; |
| length = 8; |
| } else { |
| throw new YAMLException('code point within a string may not be greater than 0xFFFFFFFF'); |
| } |
| |
| return '\\' + handle + common.repeat('0', length - string.length) + string; |
| } |
| |
| function State(options) { |
| this.schema = options['schema'] || DEFAULT_FULL_SCHEMA; |
| this.indent = Math.max(1, (options['indent'] || 2)); |
| this.noArrayIndent = options['noArrayIndent'] || false; |
| this.skipInvalid = options['skipInvalid'] || false; |
| this.flowLevel = (common.isNothing(options['flowLevel']) ? -1 : options['flowLevel']); |
| this.styleMap = compileStyleMap(this.schema, options['styles'] || null); |
| this.sortKeys = options['sortKeys'] || false; |
| this.lineWidth = options['lineWidth'] || 80; |
| this.noRefs = options['noRefs'] || false; |
| this.noCompatMode = options['noCompatMode'] || false; |
| this.condenseFlow = options['condenseFlow'] || false; |
| |
| this.implicitTypes = this.schema.compiledImplicit; |
| this.explicitTypes = this.schema.compiledExplicit; |
| |
| this.tag = null; |
| this.result = ''; |
| |
| this.duplicates = []; |
| this.usedDuplicates = null; |
| } |
| |
| // Indents every line in a string. Empty lines (\n only) are not indented. |
| function indentString(string, spaces) { |
| var ind = common.repeat(' ', spaces), |
| position = 0, |
| next = -1, |
| result = '', |
| line, |
| length = string.length; |
| |
| while (position < length) { |
| next = string.indexOf('\n', position); |
| if (next === -1) { |
| line = string.slice(position); |
| position = length; |
| } else { |
| line = string.slice(position, next + 1); |
| position = next + 1; |
| } |
| |
| if (line.length && line !== '\n') result += ind; |
| |
| result += line; |
| } |
| |
| return result; |
| } |
| |
| function generateNextLine(state, level) { |
| return '\n' + common.repeat(' ', state.indent * level); |
| } |
| |
| function testImplicitResolving(state, str) { |
| var index, length, type; |
| |
| for (index = 0, length = state.implicitTypes.length; index < length; index += 1) { |
| type = state.implicitTypes[index]; |
| |
| if (type.resolve(str)) { |
| return true; |
| } |
| } |
| |
| return false; |
| } |
| |
| // [33] s-white ::= s-space | s-tab |
| function isWhitespace(c) { |
| return c === CHAR_SPACE || c === CHAR_TAB; |
| } |
| |
| // Returns true if the character can be printed without escaping. |
| // From YAML 1.2: "any allowed characters known to be non-printable |
| // should also be escaped. [However,] This isn’t mandatory" |
| // Derived from nb-char - \t - #x85 - #xA0 - #x2028 - #x2029. |
| function isPrintable(c) { |
| return (0x00020 <= c && c <= 0x00007E) |
| || ((0x000A1 <= c && c <= 0x00D7FF) && c !== 0x2028 && c !== 0x2029) |
| || ((0x0E000 <= c && c <= 0x00FFFD) && c !== 0xFEFF /* BOM */) |
| || (0x10000 <= c && c <= 0x10FFFF); |
| } |
| |
| // [34] ns-char ::= nb-char - s-white |
| // [27] nb-char ::= c-printable - b-char - c-byte-order-mark |
| // [26] b-char ::= b-line-feed | b-carriage-return |
| // [24] b-line-feed ::= #xA /* LF */ |
| // [25] b-carriage-return ::= #xD /* CR */ |
| // [3] c-byte-order-mark ::= #xFEFF |
| function isNsChar(c) { |
| return isPrintable(c) && !isWhitespace(c) |
| // byte-order-mark |
| && c !== 0xFEFF |
| // b-char |
| && c !== CHAR_CARRIAGE_RETURN |
| && c !== CHAR_LINE_FEED; |
| } |
| |
| // Simplified test for values allowed after the first character in plain style. |
| function isPlainSafe(c, prev) { |
| // Uses a subset of nb-char - c-flow-indicator - ":" - "#" |
| // where nb-char ::= c-printable - b-char - c-byte-order-mark. |
| return isPrintable(c) && c !== 0xFEFF |
| // - c-flow-indicator |
| && c !== CHAR_COMMA |
| && c !== CHAR_LEFT_SQUARE_BRACKET |
| && c !== CHAR_RIGHT_SQUARE_BRACKET |
| && c !== CHAR_LEFT_CURLY_BRACKET |
| && c !== CHAR_RIGHT_CURLY_BRACKET |
| // - ":" - "#" |
| // /* An ns-char preceding */ "#" |
| && c !== CHAR_COLON |
| && ((c !== CHAR_SHARP) || (prev && isNsChar(prev))); |
| } |
| |
| // Simplified test for values allowed as the first character in plain style. |
| function isPlainSafeFirst(c) { |
| // Uses a subset of ns-char - c-indicator |
| // where ns-char = nb-char - s-white. |
| return isPrintable(c) && c !== 0xFEFF |
| && !isWhitespace(c) // - s-white |
| // - (c-indicator ::= |
| // “-” | “?” | “:” | “,” | “[” | “]” | “{” | “}” |
| && c !== CHAR_MINUS |
| && c !== CHAR_QUESTION |
| && c !== CHAR_COLON |
| && c !== CHAR_COMMA |
| && c !== CHAR_LEFT_SQUARE_BRACKET |
| && c !== CHAR_RIGHT_SQUARE_BRACKET |
| && c !== CHAR_LEFT_CURLY_BRACKET |
| && c !== CHAR_RIGHT_CURLY_BRACKET |
| // | “#” | “&” | “*” | “!” | “|” | “=” | “>” | “'” | “"” |
| && c !== CHAR_SHARP |
| && c !== CHAR_AMPERSAND |
| && c !== CHAR_ASTERISK |
| && c !== CHAR_EXCLAMATION |
| && c !== CHAR_VERTICAL_LINE |
| && c !== CHAR_EQUALS |
| && c !== CHAR_GREATER_THAN |
| && c !== CHAR_SINGLE_QUOTE |
| && c !== CHAR_DOUBLE_QUOTE |
| // | “%” | “@” | “`”) |
| && c !== CHAR_PERCENT |
| && c !== CHAR_COMMERCIAL_AT |
| && c !== CHAR_GRAVE_ACCENT; |
| } |
| |
| // Determines whether block indentation indicator is required. |
| function needIndentIndicator(string) { |
| var leadingSpaceRe = /^\n* /; |
| return leadingSpaceRe.test(string); |
| } |
| |
| var STYLE_PLAIN = 1, |
| STYLE_SINGLE = 2, |
| STYLE_LITERAL = 3, |
| STYLE_FOLDED = 4, |
| STYLE_DOUBLE = 5; |
| |
| // Determines which scalar styles are possible and returns the preferred style. |
| // lineWidth = -1 => no limit. |
| // Pre-conditions: str.length > 0. |
| // Post-conditions: |
| // STYLE_PLAIN or STYLE_SINGLE => no \n are in the string. |
| // STYLE_LITERAL => no lines are suitable for folding (or lineWidth is -1). |
| // STYLE_FOLDED => a line > lineWidth and can be folded (and lineWidth != -1). |
| function chooseScalarStyle(string, singleLineOnly, indentPerLevel, lineWidth, testAmbiguousType) { |
| var i; |
| var char, prev_char; |
| var hasLineBreak = false; |
| var hasFoldableLine = false; // only checked if shouldTrackWidth |
| var shouldTrackWidth = lineWidth !== -1; |
| var previousLineBreak = -1; // count the first line correctly |
| var plain = isPlainSafeFirst(string.charCodeAt(0)) |
| && !isWhitespace(string.charCodeAt(string.length - 1)); |
| |
| if (singleLineOnly) { |
| // Case: no block styles. |
| // Check for disallowed characters to rule out plain and single. |
| for (i = 0; i < string.length; i++) { |
| char = string.charCodeAt(i); |
| if (!isPrintable(char)) { |
| return STYLE_DOUBLE; |
| } |
| prev_char = i > 0 ? string.charCodeAt(i - 1) : null; |
| plain = plain && isPlainSafe(char, prev_char); |
| } |
| } else { |
| // Case: block styles permitted. |
| for (i = 0; i < string.length; i++) { |
| char = string.charCodeAt(i); |
| if (char === CHAR_LINE_FEED) { |
| hasLineBreak = true; |
| // Check if any line can be folded. |
| if (shouldTrackWidth) { |
| hasFoldableLine = hasFoldableLine || |
| // Foldable line = too long, and not more-indented. |
| (i - previousLineBreak - 1 > lineWidth && |
| string[previousLineBreak + 1] !== ' '); |
| previousLineBreak = i; |
| } |
| } else if (!isPrintable(char)) { |
| return STYLE_DOUBLE; |
| } |
| prev_char = i > 0 ? string.charCodeAt(i - 1) : null; |
| plain = plain && isPlainSafe(char, prev_char); |
| } |
| // in case the end is missing a \n |
| hasFoldableLine = hasFoldableLine || (shouldTrackWidth && |
| (i - previousLineBreak - 1 > lineWidth && |
| string[previousLineBreak + 1] !== ' ')); |
| } |
| // Although every style can represent \n without escaping, prefer block styles |
| // for multiline, since they're more readable and they don't add empty lines. |
| // Also prefer folding a super-long line. |
| if (!hasLineBreak && !hasFoldableLine) { |
| // Strings interpretable as another type have to be quoted; |
| // e.g. the string 'true' vs. the boolean true. |
| return plain && !testAmbiguousType(string) |
| ? STYLE_PLAIN : STYLE_SINGLE; |
| } |
| // Edge case: block indentation indicator can only have one digit. |
| if (indentPerLevel > 9 && needIndentIndicator(string)) { |
| return STYLE_DOUBLE; |
| } |
| // At this point we know block styles are valid. |
| // Prefer literal style unless we want to fold. |
| return hasFoldableLine ? STYLE_FOLDED : STYLE_LITERAL; |
| } |
| |
| // Note: line breaking/folding is implemented for only the folded style. |
| // NB. We drop the last trailing newline (if any) of a returned block scalar |
| // since the dumper adds its own newline. This always works: |
| // • No ending newline => unaffected; already using strip "-" chomping. |
| // • Ending newline => removed then restored. |
| // Importantly, this keeps the "+" chomp indicator from gaining an extra line. |
| function writeScalar(state, string, level, iskey) { |
| state.dump = (function () { |
| if (string.length === 0) { |
| return "''"; |
| } |
| if (!state.noCompatMode && |
| DEPRECATED_BOOLEANS_SYNTAX.indexOf(string) !== -1) { |
| return "'" + string + "'"; |
| } |
| |
| var indent = state.indent * Math.max(1, level); // no 0-indent scalars |
| // As indentation gets deeper, let the width decrease monotonically |
| // to the lower bound min(state.lineWidth, 40). |
| // Note that this implies |
| // state.lineWidth ≤ 40 + state.indent: width is fixed at the lower bound. |
| // state.lineWidth > 40 + state.indent: width decreases until the lower bound. |
| // This behaves better than a constant minimum width which disallows narrower options, |
| // or an indent threshold which causes the width to suddenly increase. |
| var lineWidth = state.lineWidth === -1 |
| ? -1 : Math.max(Math.min(state.lineWidth, 40), state.lineWidth - indent); |
| |
| // Without knowing if keys are implicit/explicit, assume implicit for safety. |
| var singleLineOnly = iskey |
| // No block styles in flow mode. |
| || (state.flowLevel > -1 && level >= state.flowLevel); |
| function testAmbiguity(string) { |
| return testImplicitResolving(state, string); |
| } |
| |
| switch (chooseScalarStyle(string, singleLineOnly, state.indent, lineWidth, testAmbiguity)) { |
| case STYLE_PLAIN: |
| return string; |
| case STYLE_SINGLE: |
| return "'" + string.replace(/'/g, "''") + "'"; |
| case STYLE_LITERAL: |
| return '|' + blockHeader(string, state.indent) |
| + dropEndingNewline(indentString(string, indent)); |
| case STYLE_FOLDED: |
| return '>' + blockHeader(string, state.indent) |
| + dropEndingNewline(indentString(foldString(string, lineWidth), indent)); |
| case STYLE_DOUBLE: |
| return '"' + escapeString(string, lineWidth) + '"'; |
| default: |
| throw new YAMLException('impossible error: invalid scalar style'); |
| } |
| }()); |
| } |
| |
| // Pre-conditions: string is valid for a block scalar, 1 <= indentPerLevel <= 9. |
| function blockHeader(string, indentPerLevel) { |
| var indentIndicator = needIndentIndicator(string) ? String(indentPerLevel) : ''; |
| |
| // note the special case: the string '\n' counts as a "trailing" empty line. |
| var clip = string[string.length - 1] === '\n'; |
| var keep = clip && (string[string.length - 2] === '\n' || string === '\n'); |
| var chomp = keep ? '+' : (clip ? '' : '-'); |
| |
| return indentIndicator + chomp + '\n'; |
| } |
| |
| // (See the note for writeScalar.) |
| function dropEndingNewline(string) { |
| return string[string.length - 1] === '\n' ? string.slice(0, -1) : string; |
| } |
| |
| // Note: a long line without a suitable break point will exceed the width limit. |
| // Pre-conditions: every char in str isPrintable, str.length > 0, width > 0. |
| function foldString(string, width) { |
| // In folded style, $k$ consecutive newlines output as $k+1$ newlines— |
| // unless they're before or after a more-indented line, or at the very |
| // beginning or end, in which case $k$ maps to $k$. |
| // Therefore, parse each chunk as newline(s) followed by a content line. |
| var lineRe = /(\n+)([^\n]*)/g; |
| |
| // first line (possibly an empty line) |
| var result = (function () { |
| var nextLF = string.indexOf('\n'); |
| nextLF = nextLF !== -1 ? nextLF : string.length; |
| lineRe.lastIndex = nextLF; |
| return foldLine(string.slice(0, nextLF), width); |
| }()); |
| // If we haven't reached the first content line yet, don't add an extra \n. |
| var prevMoreIndented = string[0] === '\n' || string[0] === ' '; |
| var moreIndented; |
| |
| // rest of the lines |
| var match; |
| while ((match = lineRe.exec(string))) { |
| var prefix = match[1], line = match[2]; |
| moreIndented = (line[0] === ' '); |
| result += prefix |
| + (!prevMoreIndented && !moreIndented && line !== '' |
| ? '\n' : '') |
| + foldLine(line, width); |
| prevMoreIndented = moreIndented; |
| } |
| |
| return result; |
| } |
| |
| // Greedy line breaking. |
| // Picks the longest line under the limit each time, |
| // otherwise settles for the shortest line over the limit. |
| // NB. More-indented lines *cannot* be folded, as that would add an extra \n. |
| function foldLine(line, width) { |
| if (line === '' || line[0] === ' ') return line; |
| |
| // Since a more-indented line adds a \n, breaks can't be followed by a space. |
| var breakRe = / [^ ]/g; // note: the match index will always be <= length-2. |
| var match; |
| // start is an inclusive index. end, curr, and next are exclusive. |
| var start = 0, end, curr = 0, next = 0; |
| var result = ''; |
| |
| // Invariants: 0 <= start <= length-1. |
| // 0 <= curr <= next <= max(0, length-2). curr - start <= width. |
| // Inside the loop: |
| // A match implies length >= 2, so curr and next are <= length-2. |
| while ((match = breakRe.exec(line))) { |
| next = match.index; |
| // maintain invariant: curr - start <= width |
| if (next - start > width) { |
| end = (curr > start) ? curr : next; // derive end <= length-2 |
| result += '\n' + line.slice(start, end); |
| // skip the space that was output as \n |
| start = end + 1; // derive start <= length-1 |
| } |
| curr = next; |
| } |
| |
| // By the invariants, start <= length-1, so there is something left over. |
| // It is either the whole string or a part starting from non-whitespace. |
| result += '\n'; |
| // Insert a break if the remainder is too long and there is a break available. |
| if (line.length - start > width && curr > start) { |
| result += line.slice(start, curr) + '\n' + line.slice(curr + 1); |
| } else { |
| result += line.slice(start); |
| } |
| |
| return result.slice(1); // drop extra \n joiner |
| } |
| |
| // Escapes a double-quoted string. |
| function escapeString(string) { |
| var result = ''; |
| var char, nextChar; |
| var escapeSeq; |
| |
| for (var i = 0; i < string.length; i++) { |
| char = string.charCodeAt(i); |
| // Check for surrogate pairs (reference Unicode 3.0 section "3.7 Surrogates"). |
| if (char >= 0xD800 && char <= 0xDBFF/* high surrogate */) { |
| nextChar = string.charCodeAt(i + 1); |
| if (nextChar >= 0xDC00 && nextChar <= 0xDFFF/* low surrogate */) { |
| // Combine the surrogate pair and store it escaped. |
| result += encodeHex((char - 0xD800) * 0x400 + nextChar - 0xDC00 + 0x10000); |
| // Advance index one extra since we already used that char here. |
| i++; continue; |
| } |
| } |
| escapeSeq = ESCAPE_SEQUENCES[char]; |
| result += !escapeSeq && isPrintable(char) |
| ? string[i] |
| : escapeSeq || encodeHex(char); |
| } |
| |
| return result; |
| } |
| |
| function writeFlowSequence(state, level, object) { |
| var _result = '', |
| _tag = state.tag, |
| index, |
| length; |
| |
| for (index = 0, length = object.length; index < length; index += 1) { |
| // Write only valid elements. |
| if (writeNode(state, level, object[index], false, false)) { |
| if (index !== 0) _result += ',' + (!state.condenseFlow ? ' ' : ''); |
| _result += state.dump; |
| } |
| } |
| |
| state.tag = _tag; |
| state.dump = '[' + _result + ']'; |
| } |
| |
| function writeBlockSequence(state, level, object, compact) { |
| var _result = '', |
| _tag = state.tag, |
| index, |
| length; |
| |
| for (index = 0, length = object.length; index < length; index += 1) { |
| // Write only valid elements. |
| if (writeNode(state, level + 1, object[index], true, true)) { |
| if (!compact || index !== 0) { |
| _result += generateNextLine(state, level); |
| } |
| |
| if (state.dump && CHAR_LINE_FEED === state.dump.charCodeAt(0)) { |
| _result += '-'; |
| } else { |
| _result += '- '; |
| } |
| |
| _result += state.dump; |
| } |
| } |
| |
| state.tag = _tag; |
| state.dump = _result || '[]'; // Empty sequence if no valid values. |
| } |
| |
| function writeFlowMapping(state, level, object) { |
| var _result = '', |
| _tag = state.tag, |
| objectKeyList = Object.keys(object), |
| index, |
| length, |
| objectKey, |
| objectValue, |
| pairBuffer; |
| |
| for (index = 0, length = objectKeyList.length; index < length; index += 1) { |
| |
| pairBuffer = ''; |
| if (index !== 0) pairBuffer += ', '; |
| |
| if (state.condenseFlow) pairBuffer += '"'; |
| |
| objectKey = objectKeyList[index]; |
| objectValue = object[objectKey]; |
| |
| if (!writeNode(state, level, objectKey, false, false)) { |
| continue; // Skip this pair because of invalid key; |
| } |
| |
| if (state.dump.length > 1024) pairBuffer += '? '; |
| |
| pairBuffer += state.dump + (state.condenseFlow ? '"' : '') + ':' + (state.condenseFlow ? '' : ' '); |
| |
| if (!writeNode(state, level, objectValue, false, false)) { |
| continue; // Skip this pair because of invalid value. |
| } |
| |
| pairBuffer += state.dump; |
| |
| // Both key and value are valid. |
| _result += pairBuffer; |
| } |
| |
| state.tag = _tag; |
| state.dump = '{' + _result + '}'; |
| } |
| |
| function writeBlockMapping(state, level, object, compact) { |
| var _result = '', |
| _tag = state.tag, |
| objectKeyList = Object.keys(object), |
| index, |
| length, |
| objectKey, |
| objectValue, |
| explicitPair, |
| pairBuffer; |
| |
| // Allow sorting keys so that the output file is deterministic |
| if (state.sortKeys === true) { |
| // Default sorting |
| objectKeyList.sort(); |
| } else if (typeof state.sortKeys === 'function') { |
| // Custom sort function |
| objectKeyList.sort(state.sortKeys); |
| } else if (state.sortKeys) { |
| // Something is wrong |
| throw new YAMLException('sortKeys must be a boolean or a function'); |
| } |
| |
| for (index = 0, length = objectKeyList.length; index < length; index += 1) { |
| pairBuffer = ''; |
| |
| if (!compact || index !== 0) { |
| pairBuffer += generateNextLine(state, level); |
| } |
| |
| objectKey = objectKeyList[index]; |
| objectValue = object[objectKey]; |
| |
| if (!writeNode(state, level + 1, objectKey, true, true, true)) { |
| continue; // Skip this pair because of invalid key. |
| } |
| |
| explicitPair = (state.tag !== null && state.tag !== '?') || |
| (state.dump && state.dump.length > 1024); |
| |
| if (explicitPair) { |
| if (state.dump && CHAR_LINE_FEED === state.dump.charCodeAt(0)) { |
| pairBuffer += '?'; |
| } else { |
| pairBuffer += '? '; |
| } |
| } |
| |
| pairBuffer += state.dump; |
| |
| if (explicitPair) { |
| pairBuffer += generateNextLine(state, level); |
| } |
| |
| if (!writeNode(state, level + 1, objectValue, true, explicitPair)) { |
| continue; // Skip this pair because of invalid value. |
| } |
| |
| if (state.dump && CHAR_LINE_FEED === state.dump.charCodeAt(0)) { |
| pairBuffer += ':'; |
| } else { |
| pairBuffer += ': '; |
| } |
| |
| pairBuffer += state.dump; |
| |
| // Both key and value are valid. |
| _result += pairBuffer; |
| } |
| |
| state.tag = _tag; |
| state.dump = _result || '{}'; // Empty mapping if no valid pairs. |
| } |
| |
| function detectType(state, object, explicit) { |
| var _result, typeList, index, length, type, style; |
| |
| typeList = explicit ? state.explicitTypes : state.implicitTypes; |
| |
| for (index = 0, length = typeList.length; index < length; index += 1) { |
| type = typeList[index]; |
| |
| if ((type.instanceOf || type.predicate) && |
| (!type.instanceOf || ((typeof object === 'object') && (object instanceof type.instanceOf))) && |
| (!type.predicate || type.predicate(object))) { |
| |
| state.tag = explicit ? type.tag : '?'; |
| |
| if (type.represent) { |
| style = state.styleMap[type.tag] || type.defaultStyle; |
| |
| if (_toString.call(type.represent) === '[object Function]') { |
| _result = type.represent(object, style); |
| } else if (_hasOwnProperty.call(type.represent, style)) { |
| _result = type.represent[style](object, style); |
| } else { |
| throw new YAMLException('!<' + type.tag + '> tag resolver accepts not "' + style + '" style'); |
| } |
| |
| state.dump = _result; |
| } |
| |
| return true; |
| } |
| } |
| |
| return false; |
| } |
| |
| // Serializes `object` and writes it to global `result`. |
| // Returns true on success, or false on invalid object. |
| // |
| function writeNode(state, level, object, block, compact, iskey) { |
| state.tag = null; |
| state.dump = object; |
| |
| if (!detectType(state, object, false)) { |
| detectType(state, object, true); |
| } |
| |
| var type = _toString.call(state.dump); |
| |
| if (block) { |
| block = (state.flowLevel < 0 || state.flowLevel > level); |
| } |
| |
| var objectOrArray = type === '[object Object]' || type === '[object Array]', |
| duplicateIndex, |
| duplicate; |
| |
| if (objectOrArray) { |
| duplicateIndex = state.duplicates.indexOf(object); |
| duplicate = duplicateIndex !== -1; |
| } |
| |
| if ((state.tag !== null && state.tag !== '?') || duplicate || (state.indent !== 2 && level > 0)) { |
| compact = false; |
| } |
| |
| if (duplicate && state.usedDuplicates[duplicateIndex]) { |
| state.dump = '*ref_' + duplicateIndex; |
| } else { |
| if (objectOrArray && duplicate && !state.usedDuplicates[duplicateIndex]) { |
| state.usedDuplicates[duplicateIndex] = true; |
| } |
| if (type === '[object Object]') { |
| if (block && (Object.keys(state.dump).length !== 0)) { |
| writeBlockMapping(state, level, state.dump, compact); |
| if (duplicate) { |
| state.dump = '&ref_' + duplicateIndex + state.dump; |
| } |
| } else { |
| writeFlowMapping(state, level, state.dump); |
| if (duplicate) { |
| state.dump = '&ref_' + duplicateIndex + ' ' + state.dump; |
| } |
| } |
| } else if (type === '[object Array]') { |
| var arrayLevel = (state.noArrayIndent && (level > 0)) ? level - 1 : level; |
| if (block && (state.dump.length !== 0)) { |
| writeBlockSequence(state, arrayLevel, state.dump, compact); |
| if (duplicate) { |
| state.dump = '&ref_' + duplicateIndex + state.dump; |
| } |
| } else { |
| writeFlowSequence(state, arrayLevel, state.dump); |
| if (duplicate) { |
| state.dump = '&ref_' + duplicateIndex + ' ' + state.dump; |
| } |
| } |
| } else if (type === '[object String]') { |
| if (state.tag !== '?') { |
| writeScalar(state, state.dump, level, iskey); |
| } |
| } else { |
| if (state.skipInvalid) return false; |
| throw new YAMLException('unacceptable kind of an object to dump ' + type); |
| } |
| |
| if (state.tag !== null && state.tag !== '?') { |
| state.dump = '!<' + state.tag + '> ' + state.dump; |
| } |
| } |
| |
| return true; |
| } |
| |
| function getDuplicateReferences(object, state) { |
| var objects = [], |
| duplicatesIndexes = [], |
| index, |
| length; |
| |
| inspectNode(object, objects, duplicatesIndexes); |
| |
| for (index = 0, length = duplicatesIndexes.length; index < length; index += 1) { |
| state.duplicates.push(objects[duplicatesIndexes[index]]); |
| } |
| state.usedDuplicates = new Array(length); |
| } |
| |
| function inspectNode(object, objects, duplicatesIndexes) { |
| var objectKeyList, |
| index, |
| length; |
| |
| if (object !== null && typeof object === 'object') { |
| index = objects.indexOf(object); |
| if (index !== -1) { |
| if (duplicatesIndexes.indexOf(index) === -1) { |
| duplicatesIndexes.push(index); |
| } |
| } else { |
| objects.push(object); |
| |
| if (Array.isArray(object)) { |
| for (index = 0, length = object.length; index < length; index += 1) { |
| inspectNode(object[index], objects, duplicatesIndexes); |
| } |
| } else { |
| objectKeyList = Object.keys(object); |
| |
| for (index = 0, length = objectKeyList.length; index < length; index += 1) { |
| inspectNode(object[objectKeyList[index]], objects, duplicatesIndexes); |
| } |
| } |
| } |
| } |
| } |
| |
| function dump(input, options) { |
| options = options || {}; |
| |
| var state = new State(options); |
| |
| if (!state.noRefs) getDuplicateReferences(input, state); |
| |
| if (writeNode(state, 0, input, true, true)) return state.dump + '\n'; |
| |
| return ''; |
| } |
| |
| function safeDump(input, options) { |
| return dump(input, common.extend({ schema: DEFAULT_SAFE_SCHEMA }, options)); |
| } |
| |
| module.exports.dump = dump; |
| module.exports.safeDump = safeDump; |
| |
| |
| /***/ }), |
| |
| /***/ 686: |
| /***/ (function(__unusedmodule, exports, __webpack_require__) { |
| |
| "use strict"; |
| |
| var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { |
| function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } |
| return new (P || (P = Promise))(function (resolve, reject) { |
| function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } |
| function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } |
| function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } |
| step((generator = generator.apply(thisArg, _arguments || [])).next()); |
| }); |
| }; |
| var __importStar = (this && this.__importStar) || function (mod) { |
| if (mod && mod.__esModule) return mod; |
| var result = {}; |
| if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; |
| result["default"] = mod; |
| return result; |
| }; |
| Object.defineProperty(exports, "__esModule", { value: true }); |
| const os = __importStar(__webpack_require__(87)); |
| const events = __importStar(__webpack_require__(614)); |
| const child = __importStar(__webpack_require__(129)); |
| const path = __importStar(__webpack_require__(622)); |
| const io = __importStar(__webpack_require__(1)); |
| const ioUtil = __importStar(__webpack_require__(672)); |
| /* eslint-disable @typescript-eslint/unbound-method */ |
| const IS_WINDOWS = process.platform === 'win32'; |
| /* |
| * Class for running command line tools. Handles quoting and arg parsing in a platform agnostic way. |
| */ |
| class ToolRunner extends events.EventEmitter { |
| constructor(toolPath, args, options) { |
| super(); |
| if (!toolPath) { |
| throw new Error("Parameter 'toolPath' cannot be null or empty."); |
| } |
| this.toolPath = toolPath; |
| this.args = args || []; |
| this.options = options || {}; |
| } |
| _debug(message) { |
| if (this.options.listeners && this.options.listeners.debug) { |
| this.options.listeners.debug(message); |
| } |
| } |
| _getCommandString(options, noPrefix) { |
| const toolPath = this._getSpawnFileName(); |
| const args = this._getSpawnArgs(options); |
| let cmd = noPrefix ? '' : '[command]'; // omit prefix when piped to a second tool |
| if (IS_WINDOWS) { |
| // Windows + cmd file |
| if (this._isCmdFile()) { |
| cmd += toolPath; |
| for (const a of args) { |
| cmd += ` ${a}`; |
| } |
| } |
| // Windows + verbatim |
| else if (options.windowsVerbatimArguments) { |
| cmd += `"${toolPath}"`; |
| for (const a of args) { |
| cmd += ` ${a}`; |
| } |
| } |
| // Windows (regular) |
| else { |
| cmd += this._windowsQuoteCmdArg(toolPath); |
| for (const a of args) { |
| cmd += ` ${this._windowsQuoteCmdArg(a)}`; |
| } |
| } |
| } |
| else { |
| // OSX/Linux - this can likely be improved with some form of quoting. |
| // creating processes on Unix is fundamentally different than Windows. |
| // on Unix, execvp() takes an arg array. |
| cmd += toolPath; |
| for (const a of args) { |
| cmd += ` ${a}`; |
| } |
| } |
| return cmd; |
| } |
| _processLineBuffer(data, strBuffer, onLine) { |
| try { |
| let s = strBuffer + data.toString(); |
| let n = s.indexOf(os.EOL); |
| while (n > -1) { |
| const line = s.substring(0, n); |
| onLine(line); |
| // the rest of the string ... |
| s = s.substring(n + os.EOL.length); |
| n = s.indexOf(os.EOL); |
| } |
| strBuffer = s; |
| } |
| catch (err) { |
| // streaming lines to console is best effort. Don't fail a build. |
| this._debug(`error processing line. Failed with error ${err}`); |
| } |
| } |
| _getSpawnFileName() { |
| if (IS_WINDOWS) { |
| if (this._isCmdFile()) { |
| return process.env['COMSPEC'] || 'cmd.exe'; |
| } |
| } |
| return this.toolPath; |
| } |
| _getSpawnArgs(options) { |
| if (IS_WINDOWS) { |
| if (this._isCmdFile()) { |
| let argline = `/D /S /C "${this._windowsQuoteCmdArg(this.toolPath)}`; |
| for (const a of this.args) { |
| argline += ' '; |
| argline += options.windowsVerbatimArguments |
| ? a |
| : this._windowsQuoteCmdArg(a); |
| } |
| argline += '"'; |
| return [argline]; |
| } |
| } |
| return this.args; |
| } |
| _endsWith(str, end) { |
| return str.endsWith(end); |
| } |
| _isCmdFile() { |
| const upperToolPath = this.toolPath.toUpperCase(); |
| return (this._endsWith(upperToolPath, '.CMD') || |
| this._endsWith(upperToolPath, '.BAT')); |
| } |
| _windowsQuoteCmdArg(arg) { |
| // for .exe, apply the normal quoting rules that libuv applies |
| if (!this._isCmdFile()) { |
| return this._uvQuoteCmdArg(arg); |
| } |
| // otherwise apply quoting rules specific to the cmd.exe command line parser. |
| // the libuv rules are generic and are not designed specifically for cmd.exe |
| // command line parser. |
| // |
| // for a detailed description of the cmd.exe command line parser, refer to |
| // http://stackoverflow.com/questions/4094699/how-does-the-windows-command-interpreter-cmd-exe-parse-scripts/7970912#7970912 |
| // need quotes for empty arg |
| if (!arg) { |
| return '""'; |
| } |
| // determine whether the arg needs to be quoted |
| const cmdSpecialChars = [ |
| ' ', |
| '\t', |
| '&', |
| '(', |
| ')', |
| '[', |
| ']', |
| '{', |
| '}', |
| '^', |
| '=', |
| ';', |
| '!', |
| "'", |
| '+', |
| ',', |
| '`', |
| '~', |
| '|', |
| '<', |
| '>', |
| '"' |
| ]; |
| let needsQuotes = false; |
| for (const char of arg) { |
| if (cmdSpecialChars.some(x => x === char)) { |
| needsQuotes = true; |
| break; |
| } |
| } |
| // short-circuit if quotes not needed |
| if (!needsQuotes) { |
| return arg; |
| } |
| // the following quoting rules are very similar to the rules that by libuv applies. |
| // |
| // 1) wrap the string in quotes |
| // |
| // 2) double-up quotes - i.e. " => "" |
| // |
| // this is different from the libuv quoting rules. libuv replaces " with \", which unfortunately |
| // doesn't work well with a cmd.exe command line. |
| // |
| // note, replacing " with "" also works well if the arg is passed to a downstream .NET console app. |
| // for example, the command line: |
| // foo.exe "myarg:""my val""" |
| // is parsed by a .NET console app into an arg array: |
| // [ "myarg:\"my val\"" ] |
| // which is the same end result when applying libuv quoting rules. although the actual |
| // command line from libuv quoting rules would look like: |
| // foo.exe "myarg:\"my val\"" |
| // |
| // 3) double-up slashes that precede a quote, |
| // e.g. hello \world => "hello \world" |
| // hello\"world => "hello\\""world" |
| // hello\\"world => "hello\\\\""world" |
| // hello world\ => "hello world\\" |
| // |
| // technically this is not required for a cmd.exe command line, or the batch argument parser. |
| // the reasons for including this as a .cmd quoting rule are: |
| // |
| // a) this is optimized for the scenario where the argument is passed from the .cmd file to an |
| // external program. many programs (e.g. .NET console apps) rely on the slash-doubling rule. |
| // |
| // b) it's what we've been doing previously (by deferring to node default behavior) and we |
| // haven't heard any complaints about that aspect. |
| // |
| // note, a weakness of the quoting rules chosen here, is that % is not escaped. in fact, % cannot be |
| // escaped when used on the command line directly - even though within a .cmd file % can be escaped |
| // by using %%. |
| // |
| // the saving grace is, on the command line, %var% is left as-is if var is not defined. this contrasts |
| // the line parsing rules within a .cmd file, where if var is not defined it is replaced with nothing. |
| // |
| // one option that was explored was replacing % with ^% - i.e. %var% => ^%var^%. this hack would |
| // often work, since it is unlikely that var^ would exist, and the ^ character is removed when the |
| // variable is used. the problem, however, is that ^ is not removed when %* is used to pass the args |
| // to an external program. |
| // |
| // an unexplored potential solution for the % escaping problem, is to create a wrapper .cmd file. |
| // % can be escaped within a .cmd file. |
| let reverse = '"'; |
| let quoteHit = true; |
| for (let i = arg.length; i > 0; i--) { |
| // walk the string in reverse |
| reverse += arg[i - 1]; |
| if (quoteHit && arg[i - 1] === '\\') { |
| reverse += '\\'; // double the slash |
| } |
| else if (arg[i - 1] === '"') { |
| quoteHit = true; |
| reverse += '"'; // double the quote |
| } |
| else { |
| quoteHit = false; |
| } |
| } |
| reverse += '"'; |
| return reverse |
| .split('') |
| .reverse() |
| .join(''); |
| } |
| _uvQuoteCmdArg(arg) { |
| // Tool runner wraps child_process.spawn() and needs to apply the same quoting as |
| // Node in certain cases where the undocumented spawn option windowsVerbatimArguments |
| // is used. |
| // |
| // Since this function is a port of quote_cmd_arg from Node 4.x (technically, lib UV, |
| // see https://github.com/nodejs/node/blob/v4.x/deps/uv/src/win/process.c for details), |
| // pasting copyright notice from Node within this function: |
| // |
| // Copyright Joyent, Inc. and other Node contributors. All rights reserved. |
| // |
| // Permission is hereby granted, free of charge, to any person obtaining a copy |
| // of this software and associated documentation files (the "Software"), to |
| // deal in the Software without restriction, including without limitation the |
| // rights to use, copy, modify, merge, publish, distribute, sublicense, and/or |
| // sell copies of the Software, and to permit persons to whom the Software is |
| // furnished to do so, subject to the following conditions: |
| // |
| // The above copyright notice and this permission notice shall be included in |
| // all copies or substantial portions of the Software. |
| // |
| // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR |
| // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, |
| // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE |
| // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER |
| // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING |
| // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS |
| // IN THE SOFTWARE. |
| if (!arg) { |
| // Need double quotation for empty argument |
| return '""'; |
| } |
| if (!arg.includes(' ') && !arg.includes('\t') && !arg.includes('"')) { |
| // No quotation needed |
| return arg; |
| } |
| if (!arg.includes('"') && !arg.includes('\\')) { |
| // No embedded double quotes or backslashes, so I can just wrap |
| // quote marks around the whole thing. |
| return `"${arg}"`; |
| } |
| // Expected input/output: |
| // input : hello"world |
| // output: "hello\"world" |
| // input : hello""world |
| // output: "hello\"\"world" |
| // input : hello\world |
| // output: hello\world |
| // input : hello\\world |
| // output: hello\\world |
| // input : hello\"world |
| // output: "hello\\\"world" |
| // input : hello\\"world |
| // output: "hello\\\\\"world" |
| // input : hello world\ |
| // output: "hello world\\" - note the comment in libuv actually reads "hello world\" |
| // but it appears the comment is wrong, it should be "hello world\\" |
| let reverse = '"'; |
| let quoteHit = true; |
| for (let i = arg.length; i > 0; i--) { |
| // walk the string in reverse |
| reverse += arg[i - 1]; |
| if (quoteHit && arg[i - 1] === '\\') { |
| reverse += '\\'; |
| } |
| else if (arg[i - 1] === '"') { |
| quoteHit = true; |
| reverse += '\\'; |
| } |
| else { |
| quoteHit = false; |
| } |
| } |
| reverse += '"'; |
| return reverse |
| .split('') |
| .reverse() |
| .join(''); |
| } |
| _cloneExecOptions(options) { |
| options = options || {}; |
| const result = { |
| cwd: options.cwd || process.cwd(), |
| env: options.env || process.env, |
| silent: options.silent || false, |
| windowsVerbatimArguments: options.windowsVerbatimArguments || false, |
| failOnStdErr: options.failOnStdErr || false, |
| ignoreReturnCode: options.ignoreReturnCode || false, |
| delay: options.delay || 10000 |
| }; |
| result.outStream = options.outStream || process.stdout; |
| result.errStream = options.errStream || process.stderr; |
| return result; |
| } |
| _getSpawnOptions(options, toolPath) { |
| options = options || {}; |
| const result = {}; |
| result.cwd = options.cwd; |
| result.env = options.env; |
| result['windowsVerbatimArguments'] = |
| options.windowsVerbatimArguments || this._isCmdFile(); |
| if (options.windowsVerbatimArguments) { |
| result.argv0 = `"${toolPath}"`; |
| } |
| return result; |
| } |
| /** |
| * Exec a tool. |
| * Output will be streamed to the live console. |
| * Returns promise with return code |
| * |
| * @param tool path to tool to exec |
| * @param options optional exec options. See ExecOptions |
| * @returns number |
| */ |
| exec() { |
| return __awaiter(this, void 0, void 0, function* () { |
| // root the tool path if it is unrooted and contains relative pathing |
| if (!ioUtil.isRooted(this.toolPath) && |
| (this.toolPath.includes('/') || |
| (IS_WINDOWS && this.toolPath.includes('\\')))) { |
| // prefer options.cwd if it is specified, however options.cwd may also need to be rooted |
| this.toolPath = path.resolve(process.cwd(), this.options.cwd || process.cwd(), this.toolPath); |
| } |
| // if the tool is only a file name, then resolve it from the PATH |
| // otherwise verify it exists (add extension on Windows if necessary) |
| this.toolPath = yield io.which(this.toolPath, true); |
| return new Promise((resolve, reject) => { |
| this._debug(`exec tool: ${this.toolPath}`); |
| this._debug('arguments:'); |
| for (const arg of this.args) { |
| this._debug(` ${arg}`); |
| } |
| const optionsNonNull = this._cloneExecOptions(this.options); |
| if (!optionsNonNull.silent && optionsNonNull.outStream) { |
| optionsNonNull.outStream.write(this._getCommandString(optionsNonNull) + os.EOL); |
| } |
| const state = new ExecState(optionsNonNull, this.toolPath); |
| state.on('debug', (message) => { |
| this._debug(message); |
| }); |
| const fileName = this._getSpawnFileName(); |
| const cp = child.spawn(fileName, this._getSpawnArgs(optionsNonNull), this._getSpawnOptions(this.options, fileName)); |
| const stdbuffer = ''; |
| if (cp.stdout) { |
| cp.stdout.on('data', (data) => { |
| if (this.options.listeners && this.options.listeners.stdout) { |
| this.options.listeners.stdout(data); |
| } |
| if (!optionsNonNull.silent && optionsNonNull.outStream) { |
| optionsNonNull.outStream.write(data); |
| } |
| this._processLineBuffer(data, stdbuffer, (line) => { |
| if (this.options.listeners && this.options.listeners.stdline) { |
| this.options.listeners.stdline(line); |
| } |
| }); |
| }); |
| } |
| const errbuffer = ''; |
| if (cp.stderr) { |
| cp.stderr.on('data', (data) => { |
| state.processStderr = true; |
| if (this.options.listeners && this.options.listeners.stderr) { |
| this.options.listeners.stderr(data); |
| } |
| if (!optionsNonNull.silent && |
| optionsNonNull.errStream && |
| optionsNonNull.outStream) { |
| const s = optionsNonNull.failOnStdErr |
| ? optionsNonNull.errStream |
| : optionsNonNull.outStream; |
| s.write(data); |
| } |
| this._processLineBuffer(data, errbuffer, (line) => { |
| if (this.options.listeners && this.options.listeners.errline) { |
| this.options.listeners.errline(line); |
| } |
| }); |
| }); |
| } |
| cp.on('error', (err) => { |
| state.processError = err.message; |
| state.processExited = true; |
| state.processClosed = true; |
| state.CheckComplete(); |
| }); |
| cp.on('exit', (code) => { |
| state.processExitCode = code; |
| state.processExited = true; |
| this._debug(`Exit code ${code} received from tool '${this.toolPath}'`); |
| state.CheckComplete(); |
| }); |
| cp.on('close', (code) => { |
| state.processExitCode = code; |
| state.processExited = true; |
| state.processClosed = true; |
| this._debug(`STDIO streams have closed for tool '${this.toolPath}'`); |
| state.CheckComplete(); |
| }); |
| state.on('done', (error, exitCode) => { |
| if (stdbuffer.length > 0) { |
| this.emit('stdline', stdbuffer); |
| } |
| if (errbuffer.length > 0) { |
| this.emit('errline', errbuffer); |
| } |
| cp.removeAllListeners(); |
| if (error) { |
| reject(error); |
| } |
| else { |
| resolve(exitCode); |
| } |
| }); |
| if (this.options.input) { |
| if (!cp.stdin) { |
| throw new Error('child process missing stdin'); |
| } |
| cp.stdin.end(this.options.input); |
| } |
| }); |
| }); |
| } |
| } |
| exports.ToolRunner = ToolRunner; |
| /** |
| * Convert an arg string to an array of args. Handles escaping |
| * |
| * @param argString string of arguments |
| * @returns string[] array of arguments |
| */ |
| function argStringToArray(argString) { |
| const args = []; |
| let inQuotes = false; |
| let escaped = false; |
| let arg = ''; |
| function append(c) { |
| // we only escape double quotes. |
| if (escaped && c !== '"') { |
| arg += '\\'; |
| } |
| arg += c; |
| escaped = false; |
| } |
| for (let i = 0; i < argString.length; i++) { |
| const c = argString.charAt(i); |
| if (c === '"') { |
| if (!escaped) { |
| inQuotes = !inQuotes; |
| } |
| else { |
| append(c); |
| } |
| continue; |
| } |
| if (c === '\\' && escaped) { |
| append(c); |
| continue; |
| } |
| if (c === '\\' && inQuotes) { |
| escaped = true; |
| continue; |
| } |
| if (c === ' ' && !inQuotes) { |
| if (arg.length > 0) { |
| args.push(arg); |
| arg = ''; |
| } |
| continue; |
| } |
| append(c); |
| } |
| if (arg.length > 0) { |
| args.push(arg.trim()); |
| } |
| return args; |
| } |
| exports.argStringToArray = argStringToArray; |
| class ExecState extends events.EventEmitter { |
| constructor(options, toolPath) { |
| super(); |
| this.processClosed = false; // tracks whether the process has exited and stdio is closed |
| this.processError = ''; |
| this.processExitCode = 0; |
| this.processExited = false; // tracks whether the process has exited |
| this.processStderr = false; // tracks whether stderr was written to |
| this.delay = 10000; // 10 seconds |
| this.done = false; |
| this.timeout = null; |
| if (!toolPath) { |
| throw new Error('toolPath must not be empty'); |
| } |
| this.options = options; |
| this.toolPath = toolPath; |
| if (options.delay) { |
| this.delay = options.delay; |
| } |
| } |
| CheckComplete() { |
| if (this.done) { |
| return; |
| } |
| if (this.processClosed) { |
| this._setResult(); |
| } |
| else if (this.processExited) { |
| this.timeout = setTimeout(ExecState.HandleTimeout, this.delay, this); |
| } |
| } |
| _debug(message) { |
| this.emit('debug', message); |
| } |
| _setResult() { |
| // determine whether there is an error |
| let error; |
| if (this.processExited) { |
| if (this.processError) { |
| error = new Error(`There was an error when attempting to execute the process '${this.toolPath}'. This may indicate the process failed to start. Error: ${this.processError}`); |
| } |
| else if (this.processExitCode !== 0 && !this.options.ignoreReturnCode) { |
| error = new Error(`The process '${this.toolPath}' failed with exit code ${this.processExitCode}`); |
| } |
| else if (this.processStderr && this.options.failOnStdErr) { |
| error = new Error(`The process '${this.toolPath}' failed because one or more lines were written to the STDERR stream`); |
| } |
| } |
| // clear the timeout |
| if (this.timeout) { |
| clearTimeout(this.timeout); |
| this.timeout = null; |
| } |
| this.done = true; |
| this.emit('done', error, this.processExitCode); |
| } |
| static HandleTimeout(state) { |
| if (state.done) { |
| return; |
| } |
| if (!state.processClosed && state.processExited) { |
| const message = `The STDIO streams did not close within ${state.delay / |
| 1000} seconds of the exit event from process '${state.toolPath}'. This may indicate a child process inherited the STDIO streams and has not yet exited.`; |
| state._debug(message); |
| } |
| state._setResult(); |
| } |
| } |
| //# sourceMappingURL=toolrunner.js.map |
| |
| /***/ }), |
| |
| /***/ 692: |
| /***/ (function(__unusedmodule, exports) { |
| |
| "use strict"; |
| |
| |
| Object.defineProperty(exports, '__esModule', { value: true }); |
| |
| class Deprecation extends Error { |
| constructor(message) { |
| super(message); // Maintains proper stack trace (only available on V8) |
| |
| /* istanbul ignore next */ |
| |
| if (Error.captureStackTrace) { |
| Error.captureStackTrace(this, this.constructor); |
| } |
| |
| this.name = 'Deprecation'; |
| } |
| |
| } |
| |
| exports.Deprecation = Deprecation; |
| |
| |
| /***/ }), |
| |
| /***/ 697: |
| /***/ (function(module) { |
| |
| "use strict"; |
| |
| module.exports = (promise, onFinally) => { |
| onFinally = onFinally || (() => {}); |
| |
| return promise.then( |
| val => new Promise(resolve => { |
| resolve(onFinally()); |
| }).then(() => val), |
| err => new Promise(resolve => { |
| resolve(onFinally()); |
| }).then(() => { |
| throw err; |
| }) |
| ); |
| }; |
| |
| |
| /***/ }), |
| |
| /***/ 723: |
| /***/ (function(module, __unusedexports, __webpack_require__) { |
| |
| "use strict"; |
| // JS-YAML's default schema for `safeLoad` function. |
| // It is not described in the YAML specification. |
| // |
| // This schema is based on standard YAML's Core schema and includes most of |
| // extra types described at YAML tag repository. (http://yaml.org/type/) |
| |
| |
| |
| |
| |
| var Schema = __webpack_require__(43); |
| |
| |
| module.exports = new Schema({ |
| include: [ |
| __webpack_require__(611) |
| ], |
| implicit: [ |
| __webpack_require__(82), |
| __webpack_require__(633) |
| ], |
| explicit: [ |
| __webpack_require__(913), |
| __webpack_require__(181), |
| __webpack_require__(947), |
| __webpack_require__(100) |
| ] |
| }); |
| |
| |
| /***/ }), |
| |
| /***/ 740: |
| /***/ (function(module) { |
| |
| "use strict"; |
| |
| |
| |
| function isNothing(subject) { |
| return (typeof subject === 'undefined') || (subject === null); |
| } |
| |
| |
| function isObject(subject) { |
| return (typeof subject === 'object') && (subject !== null); |
| } |
| |
| |
| function toArray(sequence) { |
| if (Array.isArray(sequence)) return sequence; |
| else if (isNothing(sequence)) return []; |
| |
| return [ sequence ]; |
| } |
| |
| |
| function extend(target, source) { |
| var index, length, key, sourceKeys; |
| |
| if (source) { |
| sourceKeys = Object.keys(source); |
| |
| for (index = 0, length = sourceKeys.length; index < length; index += 1) { |
| key = sourceKeys[index]; |
| target[key] = source[key]; |
| } |
| } |
| |
| return target; |
| } |
| |
| |
| function repeat(string, count) { |
| var result = '', cycle; |
| |
| for (cycle = 0; cycle < count; cycle += 1) { |
| result += string; |
| } |
| |
| return result; |
| } |
| |
| |
| function isNegativeZero(number) { |
| return (number === 0) && (Number.NEGATIVE_INFINITY === 1 / number); |
| } |
| |
| |
| module.exports.isNothing = isNothing; |
| module.exports.isObject = isObject; |
| module.exports.toArray = toArray; |
| module.exports.repeat = repeat; |
| module.exports.isNegativeZero = isNegativeZero; |
| module.exports.extend = extend; |
| |
| |
| /***/ }), |
| |
| /***/ 742: |
| /***/ (function(module, __unusedexports, __webpack_require__) { |
| |
| var fs = __webpack_require__(747) |
| var core |
| if (process.platform === 'win32' || global.TESTING_WINDOWS) { |
| core = __webpack_require__(818) |
| } else { |
| core = __webpack_require__(197) |
| } |
| |
| module.exports = isexe |
| isexe.sync = sync |
| |
| function isexe (path, options, cb) { |
| if (typeof options === 'function') { |
| cb = options |
| options = {} |
| } |
| |
| if (!cb) { |
| if (typeof Promise !== 'function') { |
| throw new TypeError('callback not provided') |
| } |
| |
| return new Promise(function (resolve, reject) { |
| isexe(path, options || {}, function (er, is) { |
| if (er) { |
| reject(er) |
| } else { |
| resolve(is) |
| } |
| }) |
| }) |
| } |
| |
| core(path, options || {}, function (er, is) { |
| // ignore EACCES because that just means we aren't allowed to run it |
| if (er) { |
| if (er.code === 'EACCES' || options && options.ignoreErrors) { |
| er = null |
| is = false |
| } |
| } |
| cb(er, is) |
| }) |
| } |
| |
| function sync (path, options) { |
| // my kingdom for a filtered catch |
| try { |
| return core.sync(path, options || {}) |
| } catch (er) { |
| if (options && options.ignoreErrors || er.code === 'EACCES') { |
| return false |
| } else { |
| throw er |
| } |
| } |
| } |
| |
| |
| /***/ }), |
| |
| /***/ 747: |
| /***/ (function(module) { |
| |
| module.exports = require("fs"); |
| |
| /***/ }), |
| |
| /***/ 753: |
| /***/ (function(__unusedmodule, exports, __webpack_require__) { |
| |
| "use strict"; |
| |
| |
| Object.defineProperty(exports, '__esModule', { value: true }); |
| |
| function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } |
| |
| var endpoint = __webpack_require__(385); |
| var universalUserAgent = __webpack_require__(796); |
| var isPlainObject = _interopDefault(__webpack_require__(548)); |
| var nodeFetch = _interopDefault(__webpack_require__(369)); |
| var requestError = __webpack_require__(463); |
| |
| const VERSION = "5.4.2"; |
| |
| function getBufferResponse(response) { |
| return response.arrayBuffer(); |
| } |
| |
| function fetchWrapper(requestOptions) { |
| if (isPlainObject(requestOptions.body) || Array.isArray(requestOptions.body)) { |
| requestOptions.body = JSON.stringify(requestOptions.body); |
| } |
| |
| let headers = {}; |
| let status; |
| let url; |
| const fetch = requestOptions.request && requestOptions.request.fetch || nodeFetch; |
| return fetch(requestOptions.url, Object.assign({ |
| method: requestOptions.method, |
| body: requestOptions.body, |
| headers: requestOptions.headers, |
| redirect: requestOptions.redirect |
| }, requestOptions.request)).then(response => { |
| url = response.url; |
| status = response.status; |
| |
| for (const keyAndValue of response.headers) { |
| headers[keyAndValue[0]] = keyAndValue[1]; |
| } |
| |
| if (status === 204 || status === 205) { |
| return; |
| } // GitHub API returns 200 for HEAD requests |
| |
| |
| if (requestOptions.method === "HEAD") { |
| if (status < 400) { |
| return; |
| } |
| |
| throw new requestError.RequestError(response.statusText, status, { |
| headers, |
| request: requestOptions |
| }); |
| } |
| |
| if (status === 304) { |
| throw new requestError.RequestError("Not modified", status, { |
| headers, |
| request: requestOptions |
| }); |
| } |
| |
| if (status >= 400) { |
| return response.text().then(message => { |
| const error = new requestError.RequestError(message, status, { |
| headers, |
| request: requestOptions |
| }); |
| |
| try { |
| let responseBody = JSON.parse(error.message); |
| Object.assign(error, responseBody); |
| let errors = responseBody.errors; // Assumption `errors` would always be in Array format |
| |
| error.message = error.message + ": " + errors.map(JSON.stringify).join(", "); |
| } catch (e) {// ignore, see octokit/rest.js#684 |
| } |
| |
| throw error; |
| }); |
| } |
| |
| const contentType = response.headers.get("content-type"); |
| |
| if (/application\/json/.test(contentType)) { |
| return response.json(); |
| } |
| |
| if (!contentType || /^text\/|charset=utf-8$/.test(contentType)) { |
| return response.text(); |
| } |
| |
| return getBufferResponse(response); |
| }).then(data => { |
| return { |
| status, |
| url, |
| headers, |
| data |
| }; |
| }).catch(error => { |
| if (error instanceof requestError.RequestError) { |
| throw error; |
| } |
| |
| throw new requestError.RequestError(error.message, 500, { |
| headers, |
| request: requestOptions |
| }); |
| }); |
| } |
| |
| function withDefaults(oldEndpoint, newDefaults) { |
| const endpoint = oldEndpoint.defaults(newDefaults); |
| |
| const newApi = function (route, parameters) { |
| const endpointOptions = endpoint.merge(route, parameters); |
| |
| if (!endpointOptions.request || !endpointOptions.request.hook) { |
| return fetchWrapper(endpoint.parse(endpointOptions)); |
| } |
| |
| const request = (route, parameters) => { |
| return fetchWrapper(endpoint.parse(endpoint.merge(route, parameters))); |
| }; |
| |
| Object.assign(request, { |
| endpoint, |
| defaults: withDefaults.bind(null, endpoint) |
| }); |
| return endpointOptions.request.hook(request, endpointOptions); |
| }; |
| |
| return Object.assign(newApi, { |
| endpoint, |
| defaults: withDefaults.bind(null, endpoint) |
| }); |
| } |
| |
| const request = withDefaults(endpoint.endpoint, { |
| headers: { |
| "user-agent": `octokit-request.js/${VERSION} ${universalUserAgent.getUserAgent()}` |
| } |
| }); |
| |
| exports.request = request; |
| //# sourceMappingURL=index.js.map |
| |
| |
| /***/ }), |
| |
| /***/ 761: |
| /***/ (function(module) { |
| |
| module.exports = require("zlib"); |
| |
| /***/ }), |
| |
| /***/ 763: |
| /***/ (function(module) { |
| |
| module.exports = removeHook |
| |
| function removeHook (state, name, method) { |
| if (!state.registry[name]) { |
| return |
| } |
| |
| var index = state.registry[name] |
| .map(function (registered) { return registered.orig }) |
| .indexOf(method) |
| |
| if (index === -1) { |
| return |
| } |
| |
| state.registry[name].splice(index, 1) |
| } |
| |
| |
| /***/ }), |
| |
| /***/ 768: |
| /***/ (function(module) { |
| |
| "use strict"; |
| |
| module.exports = function (x) { |
| var lf = typeof x === 'string' ? '\n' : '\n'.charCodeAt(); |
| var cr = typeof x === 'string' ? '\r' : '\r'.charCodeAt(); |
| |
| if (x[x.length - 1] === lf) { |
| x = x.slice(0, x.length - 1); |
| } |
| |
| if (x[x.length - 1] === cr) { |
| x = x.slice(0, x.length - 1); |
| } |
| |
| return x; |
| }; |
| |
| |
| /***/ }), |
| |
| /***/ 777: |
| /***/ (function(module, __unusedexports, __webpack_require__) { |
| |
| module.exports = getFirstPage |
| |
| const getPage = __webpack_require__(265) |
| |
| function getFirstPage (octokit, link, headers) { |
| return getPage(octokit, link, 'first', headers) |
| } |
| |
| |
| /***/ }), |
| |
| /***/ 796: |
| /***/ (function(__unusedmodule, exports, __webpack_require__) { |
| |
| "use strict"; |
| |
| |
| Object.defineProperty(exports, '__esModule', { value: true }); |
| |
| function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } |
| |
| var osName = _interopDefault(__webpack_require__(2)); |
| |
| function getUserAgent() { |
| try { |
| return `Node.js/${process.version.substr(1)} (${osName()}; ${process.arch})`; |
| } catch (error) { |
| if (/wmic os get Caption/.test(error.message)) { |
| return "Windows <version undetectable>"; |
| } |
| |
| return "<environment undetectable>"; |
| } |
| } |
| |
| exports.getUserAgent = getUserAgent; |
| //# sourceMappingURL=index.js.map |
| |
| |
| /***/ }), |
| |
| /***/ 806: |
| /***/ (function(module, __unusedexports, __webpack_require__) { |
| |
| "use strict"; |
| |
| |
| const constants = __webpack_require__(199); |
| const utils = __webpack_require__(224); |
| |
| /** |
| * Constants |
| */ |
| |
| const { |
| MAX_LENGTH, |
| POSIX_REGEX_SOURCE, |
| REGEX_NON_SPECIAL_CHARS, |
| REGEX_SPECIAL_CHARS_BACKREF, |
| REPLACEMENTS |
| } = constants; |
| |
| /** |
| * Helpers |
| */ |
| |
| const expandRange = (args, options) => { |
| if (typeof options.expandRange === 'function') { |
| return options.expandRange(...args, options); |
| } |
| |
| args.sort(); |
| const value = `[${args.join('-')}]`; |
| |
| try { |
| /* eslint-disable-next-line no-new */ |
| new RegExp(value); |
| } catch (ex) { |
| return args.map(v => utils.escapeRegex(v)).join('..'); |
| } |
| |
| return value; |
| }; |
| |
| /** |
| * Create the message for a syntax error |
| */ |
| |
| const syntaxError = (type, char) => { |
| return `Missing ${type}: "${char}" - use "\\\\${char}" to match literal characters`; |
| }; |
| |
| /** |
| * Parse the given input string. |
| * @param {String} input |
| * @param {Object} options |
| * @return {Object} |
| */ |
| |
| const parse = (input, options) => { |
| if (typeof input !== 'string') { |
| throw new TypeError('Expected a string'); |
| } |
| |
| input = REPLACEMENTS[input] || input; |
| |
| const opts = { ...options }; |
| const max = typeof opts.maxLength === 'number' ? Math.min(MAX_LENGTH, opts.maxLength) : MAX_LENGTH; |
| |
| let len = input.length; |
| if (len > max) { |
| throw new SyntaxError(`Input length: ${len}, exceeds maximum allowed length: ${max}`); |
| } |
| |
| const bos = { type: 'bos', value: '', output: opts.prepend || '' }; |
| const tokens = [bos]; |
| |
| const capture = opts.capture ? '' : '?:'; |
| const win32 = utils.isWindows(options); |
| |
| // create constants based on platform, for windows or posix |
| const PLATFORM_CHARS = constants.globChars(win32); |
| const EXTGLOB_CHARS = constants.extglobChars(PLATFORM_CHARS); |
| |
| const { |
| DOT_LITERAL, |
| PLUS_LITERAL, |
| SLASH_LITERAL, |
| ONE_CHAR, |
| DOTS_SLASH, |
| NO_DOT, |
| NO_DOT_SLASH, |
| NO_DOTS_SLASH, |
| QMARK, |
| QMARK_NO_DOT, |
| STAR, |
| START_ANCHOR |
| } = PLATFORM_CHARS; |
| |
| const globstar = (opts) => { |
| return `(${capture}(?:(?!${START_ANCHOR}${opts.dot ? DOTS_SLASH : DOT_LITERAL}).)*?)`; |
| }; |
| |
| const nodot = opts.dot ? '' : NO_DOT; |
| const qmarkNoDot = opts.dot ? QMARK : QMARK_NO_DOT; |
| let star = opts.bash === true ? globstar(opts) : STAR; |
| |
| if (opts.capture) { |
| star = `(${star})`; |
| } |
| |
| // minimatch options support |
| if (typeof opts.noext === 'boolean') { |
| opts.noextglob = opts.noext; |
| } |
| |
| const state = { |
| input, |
| index: -1, |
| start: 0, |
| dot: opts.dot === true, |
| consumed: '', |
| output: '', |
| prefix: '', |
| backtrack: false, |
| negated: false, |
| brackets: 0, |
| braces: 0, |
| parens: 0, |
| quotes: 0, |
| globstar: false, |
| tokens |
| }; |
| |
| input = utils.removePrefix(input, state); |
| len = input.length; |
| |
| const extglobs = []; |
| const braces = []; |
| const stack = []; |
| let prev = bos; |
| let value; |
| |
| /** |
| * Tokenizing helpers |
| */ |
| |
| const eos = () => state.index === len - 1; |
| const peek = state.peek = (n = 1) => input[state.index + n]; |
| const advance = state.advance = () => input[++state.index]; |
| const remaining = () => input.slice(state.index + 1); |
| const consume = (value = '', num = 0) => { |
| state.consumed += value; |
| state.index += num; |
| }; |
| const append = token => { |
| state.output += token.output != null ? token.output : token.value; |
| consume(token.value); |
| }; |
| |
| const negate = () => { |
| let count = 1; |
| |
| while (peek() === '!' && (peek(2) !== '(' || peek(3) === '?')) { |
| advance(); |
| state.start++; |
| count++; |
| } |
| |
| if (count % 2 === 0) { |
| return false; |
| } |
| |
| state.negated = true; |
| state.start++; |
| return true; |
| }; |
| |
| const increment = type => { |
| state[type]++; |
| stack.push(type); |
| }; |
| |
| const decrement = type => { |
| state[type]--; |
| stack.pop(); |
| }; |
| |
| /** |
| * Push tokens onto the tokens array. This helper speeds up |
| * tokenizing by 1) helping us avoid backtracking as much as possible, |
| * and 2) helping us avoid creating extra tokens when consecutive |
| * characters are plain text. This improves performance and simplifies |
| * lookbehinds. |
| */ |
| |
| const push = tok => { |
| if (prev.type === 'globstar') { |
| const isBrace = state.braces > 0 && (tok.type === 'comma' || tok.type === 'brace'); |
| const isExtglob = tok.extglob === true || (extglobs.length && (tok.type === 'pipe' || tok.type === 'paren')); |
| |
| if (tok.type !== 'slash' && tok.type !== 'paren' && !isBrace && !isExtglob) { |
| state.output = state.output.slice(0, -prev.output.length); |
| prev.type = 'star'; |
| prev.value = '*'; |
| prev.output = star; |
| state.output += prev.output; |
| } |
| } |
| |
| if (extglobs.length && tok.type !== 'paren' && !EXTGLOB_CHARS[tok.value]) { |
| extglobs[extglobs.length - 1].inner += tok.value; |
| } |
| |
| if (tok.value || tok.output) append(tok); |
| if (prev && prev.type === 'text' && tok.type === 'text') { |
| prev.value += tok.value; |
| prev.output = (prev.output || '') + tok.value; |
| return; |
| } |
| |
| tok.prev = prev; |
| tokens.push(tok); |
| prev = tok; |
| }; |
| |
| const extglobOpen = (type, value) => { |
| const token = { ...EXTGLOB_CHARS[value], conditions: 1, inner: '' }; |
| |
| token.prev = prev; |
| token.parens = state.parens; |
| token.output = state.output; |
| const output = (opts.capture ? '(' : '') + token.open; |
| |
| increment('parens'); |
| push({ type, value, output: state.output ? '' : ONE_CHAR }); |
| push({ type: 'paren', extglob: true, value: advance(), output }); |
| extglobs.push(token); |
| }; |
| |
| const extglobClose = token => { |
| let output = token.close + (opts.capture ? ')' : ''); |
| |
| if (token.type === 'negate') { |
| let extglobStar = star; |
| |
| if (token.inner && token.inner.length > 1 && token.inner.includes('/')) { |
| extglobStar = globstar(opts); |
| } |
| |
| if (extglobStar !== star || eos() || /^\)+$/.test(remaining())) { |
| output = token.close = `)$))${extglobStar}`; |
| } |
| |
| if (token.prev.type === 'bos' && eos()) { |
| state.negatedExtglob = true; |
| } |
| } |
| |
| push({ type: 'paren', extglob: true, value, output }); |
| decrement('parens'); |
| }; |
| |
| /** |
| * Fast paths |
| */ |
| |
| if (opts.fastpaths !== false && !/(^[*!]|[/()[\]{}"])/.test(input)) { |
| let backslashes = false; |
| |
| let output = input.replace(REGEX_SPECIAL_CHARS_BACKREF, (m, esc, chars, first, rest, index) => { |
| if (first === '\\') { |
| backslashes = true; |
| return m; |
| } |
| |
| if (first === '?') { |
| if (esc) { |
| return esc + first + (rest ? QMARK.repeat(rest.length) : ''); |
| } |
| if (index === 0) { |
| return qmarkNoDot + (rest ? QMARK.repeat(rest.length) : ''); |
| } |
| return QMARK.repeat(chars.length); |
| } |
| |
| if (first === '.') { |
| return DOT_LITERAL.repeat(chars.length); |
| } |
| |
| if (first === '*') { |
| if (esc) { |
| return esc + first + (rest ? star : ''); |
| } |
| return star; |
| } |
| return esc ? m : `\\${m}`; |
| }); |
| |
| if (backslashes === true) { |
| if (opts.unescape === true) { |
| output = output.replace(/\\/g, ''); |
| } else { |
| output = output.replace(/\\+/g, m => { |
| return m.length % 2 === 0 ? '\\\\' : (m ? '\\' : ''); |
| }); |
| } |
| } |
| |
| if (output === input && opts.contains === true) { |
| state.output = input; |
| return state; |
| } |
| |
| state.output = utils.wrapOutput(output, state, options); |
| return state; |
| } |
| |
| /** |
| * Tokenize input until we reach end-of-string |
| */ |
| |
| while (!eos()) { |
| value = advance(); |
| |
| if (value === '\u0000') { |
| continue; |
| } |
| |
| /** |
| * Escaped characters |
| */ |
| |
| if (value === '\\') { |
| const next = peek(); |
| |
| if (next === '/' && opts.bash !== true) { |
| continue; |
| } |
| |
| if (next === '.' || next === ';') { |
| continue; |
| } |
| |
| if (!next) { |
| value += '\\'; |
| push({ type: 'text', value }); |
| continue; |
| } |
| |
| // collapse slashes to reduce potential for exploits |
| const match = /^\\+/.exec(remaining()); |
| let slashes = 0; |
| |
| if (match && match[0].length > 2) { |
| slashes = match[0].length; |
| state.index += slashes; |
| if (slashes % 2 !== 0) { |
| value += '\\'; |
| } |
| } |
| |
| if (opts.unescape === true) { |
| value = advance() || ''; |
| } else { |
| value += advance() || ''; |
| } |
| |
| if (state.brackets === 0) { |
| push({ type: 'text', value }); |
| continue; |
| } |
| } |
| |
| /** |
| * If we're inside a regex character class, continue |
| * until we reach the closing bracket. |
| */ |
| |
| if (state.brackets > 0 && (value !== ']' || prev.value === '[' || prev.value === '[^')) { |
| if (opts.posix !== false && value === ':') { |
| const inner = prev.value.slice(1); |
| if (inner.includes('[')) { |
| prev.posix = true; |
| |
| if (inner.includes(':')) { |
| const idx = prev.value.lastIndexOf('['); |
| const pre = prev.value.slice(0, idx); |
| const rest = prev.value.slice(idx + 2); |
| const posix = POSIX_REGEX_SOURCE[rest]; |
| if (posix) { |
| prev.value = pre + posix; |
| state.backtrack = true; |
| advance(); |
| |
| if (!bos.output && tokens.indexOf(prev) === 1) { |
| bos.output = ONE_CHAR; |
| } |
| continue; |
| } |
| } |
| } |
| } |
| |
| if ((value === '[' && peek() !== ':') || (value === '-' && peek() === ']')) { |
| value = `\\${value}`; |
| } |
| |
| if (value === ']' && (prev.value === '[' || prev.value === '[^')) { |
| value = `\\${value}`; |
| } |
| |
| if (opts.posix === true && value === '!' && prev.value === '[') { |
| value = '^'; |
| } |
| |
| prev.value += value; |
| append({ value }); |
| continue; |
| } |
| |
| /** |
| * If we're inside a quoted string, continue |
| * until we reach the closing double quote. |
| */ |
| |
| if (state.quotes === 1 && value !== '"') { |
| value = utils.escapeRegex(value); |
| prev.value += value; |
| append({ value }); |
| continue; |
| } |
| |
| /** |
| * Double quotes |
| */ |
| |
| if (value === '"') { |
| state.quotes = state.quotes === 1 ? 0 : 1; |
| if (opts.keepQuotes === true) { |
| push({ type: 'text', value }); |
| } |
| continue; |
| } |
| |
| /** |
| * Parentheses |
| */ |
| |
| if (value === '(') { |
| increment('parens'); |
| push({ type: 'paren', value }); |
| continue; |
| } |
| |
| if (value === ')') { |
| if (state.parens === 0 && opts.strictBrackets === true) { |
| throw new SyntaxError(syntaxError('opening', '(')); |
| } |
| |
| const extglob = extglobs[extglobs.length - 1]; |
| if (extglob && state.parens === extglob.parens + 1) { |
| extglobClose(extglobs.pop()); |
| continue; |
| } |
| |
| push({ type: 'paren', value, output: state.parens ? ')' : '\\)' }); |
| decrement('parens'); |
| continue; |
| } |
| |
| /** |
| * Square brackets |
| */ |
| |
| if (value === '[') { |
| if (opts.nobracket === true || !remaining().includes(']')) { |
| if (opts.nobracket !== true && opts.strictBrackets === true) { |
| throw new SyntaxError(syntaxError('closing', ']')); |
| } |
| |
| value = `\\${value}`; |
| } else { |
| increment('brackets'); |
| } |
| |
| push({ type: 'bracket', value }); |
| continue; |
| } |
| |
| if (value === ']') { |
| if (opts.nobracket === true || (prev && prev.type === 'bracket' && prev.value.length === 1)) { |
| push({ type: 'text', value, output: `\\${value}` }); |
| continue; |
| } |
| |
| if (state.brackets === 0) { |
| if (opts.strictBrackets === true) { |
| throw new SyntaxError(syntaxError('opening', '[')); |
| } |
| |
| push({ type: 'text', value, output: `\\${value}` }); |
| continue; |
| } |
| |
| decrement('brackets'); |
| |
| const prevValue = prev.value.slice(1); |
| if (prev.posix !== true && prevValue[0] === '^' && !prevValue.includes('/')) { |
| value = `/${value}`; |
| } |
| |
| prev.value += value; |
| append({ value }); |
| |
| // when literal brackets are explicitly disabled |
| // assume we should match with a regex character class |
| if (opts.literalBrackets === false || utils.hasRegexChars(prevValue)) { |
| continue; |
| } |
| |
| const escaped = utils.escapeRegex(prev.value); |
| state.output = state.output.slice(0, -prev.value.length); |
| |
| // when literal brackets are explicitly enabled |
| // assume we should escape the brackets to match literal characters |
| if (opts.literalBrackets === true) { |
| state.output += escaped; |
| prev.value = escaped; |
| continue; |
| } |
| |
| // when the user specifies nothing, try to match both |
| prev.value = `(${capture}${escaped}|${prev.value})`; |
| state.output += prev.value; |
| continue; |
| } |
| |
| /** |
| * Braces |
| */ |
| |
| if (value === '{' && opts.nobrace !== true) { |
| increment('braces'); |
| |
| const open = { |
| type: 'brace', |
| value, |
| output: '(', |
| outputIndex: state.output.length, |
| tokensIndex: state.tokens.length |
| }; |
| |
| braces.push(open); |
| push(open); |
| continue; |
| } |
| |
| if (value === '}') { |
| const brace = braces[braces.length - 1]; |
| |
| if (opts.nobrace === true || !brace) { |
| push({ type: 'text', value, output: value }); |
| continue; |
| } |
| |
| let output = ')'; |
| |
| if (brace.dots === true) { |
| const arr = tokens.slice(); |
| const range = []; |
| |
| for (let i = arr.length - 1; i >= 0; i--) { |
| tokens.pop(); |
| if (arr[i].type === 'brace') { |
| break; |
| } |
| if (arr[i].type !== 'dots') { |
| range.unshift(arr[i].value); |
| } |
| } |
| |
| output = expandRange(range, opts); |
| state.backtrack = true; |
| } |
| |
| if (brace.comma !== true && brace.dots !== true) { |
| const out = state.output.slice(0, brace.outputIndex); |
| const toks = state.tokens.slice(brace.tokensIndex); |
| brace.value = brace.output = '\\{'; |
| value = output = '\\}'; |
| state.output = out; |
| for (const t of toks) { |
| state.output += (t.output || t.value); |
| } |
| } |
| |
| push({ type: 'brace', value, output }); |
| decrement('braces'); |
| braces.pop(); |
| continue; |
| } |
| |
| /** |
| * Pipes |
| */ |
| |
| if (value === '|') { |
| if (extglobs.length > 0) { |
| extglobs[extglobs.length - 1].conditions++; |
| } |
| push({ type: 'text', value }); |
| continue; |
| } |
| |
| /** |
| * Commas |
| */ |
| |
| if (value === ',') { |
| let output = value; |
| |
| const brace = braces[braces.length - 1]; |
| if (brace && stack[stack.length - 1] === 'braces') { |
| brace.comma = true; |
| output = '|'; |
| } |
| |
| push({ type: 'comma', value, output }); |
| continue; |
| } |
| |
| /** |
| * Slashes |
| */ |
| |
| if (value === '/') { |
| // if the beginning of the glob is "./", advance the start |
| // to the current index, and don't add the "./" characters |
| // to the state. This greatly simplifies lookbehinds when |
| // checking for BOS characters like "!" and "." (not "./") |
| if (prev.type === 'dot' && state.index === state.start + 1) { |
| state.start = state.index + 1; |
| state.consumed = ''; |
| state.output = ''; |
| tokens.pop(); |
| prev = bos; // reset "prev" to the first token |
| continue; |
| } |
| |
| push({ type: 'slash', value, output: SLASH_LITERAL }); |
| continue; |
| } |
| |
| /** |
| * Dots |
| */ |
| |
| if (value === '.') { |
| if (state.braces > 0 && prev.type === 'dot') { |
| if (prev.value === '.') prev.output = DOT_LITERAL; |
| const brace = braces[braces.length - 1]; |
| prev.type = 'dots'; |
| prev.output += value; |
| prev.value += value; |
| brace.dots = true; |
| continue; |
| } |
| |
| if ((state.braces + state.parens) === 0 && prev.type !== 'bos' && prev.type !== 'slash') { |
| push({ type: 'text', value, output: DOT_LITERAL }); |
| continue; |
| } |
| |
| push({ type: 'dot', value, output: DOT_LITERAL }); |
| continue; |
| } |
| |
| /** |
| * Question marks |
| */ |
| |
| if (value === '?') { |
| const isGroup = prev && prev.value === '('; |
| if (!isGroup && opts.noextglob !== true && peek() === '(' && peek(2) !== '?') { |
| extglobOpen('qmark', value); |
| continue; |
| } |
| |
| if (prev && prev.type === 'paren') { |
| const next = peek(); |
| let output = value; |
| |
| if (next === '<' && !utils.supportsLookbehinds()) { |
| throw new Error('Node.js v10 or higher is required for regex lookbehinds'); |
| } |
| |
| if ((prev.value === '(' && !/[!=<:]/.test(next)) || (next === '<' && !/<([!=]|\w+>)/.test(remaining()))) { |
| output = `\\${value}`; |
| } |
| |
| push({ type: 'text', value, output }); |
| continue; |
| } |
| |
| if (opts.dot !== true && (prev.type === 'slash' || prev.type === 'bos')) { |
| push({ type: 'qmark', value, output: QMARK_NO_DOT }); |
| continue; |
| } |
| |
| push({ type: 'qmark', value, output: QMARK }); |
| continue; |
| } |
| |
| /** |
| * Exclamation |
| */ |
| |
| if (value === '!') { |
| if (opts.noextglob !== true && peek() === '(') { |
| if (peek(2) !== '?' || !/[!=<:]/.test(peek(3))) { |
| extglobOpen('negate', value); |
| continue; |
| } |
| } |
| |
| if (opts.nonegate !== true && state.index === 0) { |
| negate(); |
| continue; |
| } |
| } |
| |
| /** |
| * Plus |
| */ |
| |
| if (value === '+') { |
| if (opts.noextglob !== true && peek() === '(' && peek(2) !== '?') { |
| extglobOpen('plus', value); |
| continue; |
| } |
| |
| if ((prev && prev.value === '(') || opts.regex === false) { |
| push({ type: 'plus', value, output: PLUS_LITERAL }); |
| continue; |
| } |
| |
| if ((prev && (prev.type === 'bracket' || prev.type === 'paren' || prev.type === 'brace')) || state.parens > 0) { |
| push({ type: 'plus', value }); |
| continue; |
| } |
| |
| push({ type: 'plus', value: PLUS_LITERAL }); |
| continue; |
| } |
| |
| /** |
| * Plain text |
| */ |
| |
| if (value === '@') { |
| if (opts.noextglob !== true && peek() === '(' && peek(2) !== '?') { |
| push({ type: 'at', extglob: true, value, output: '' }); |
| continue; |
| } |
| |
| push({ type: 'text', value }); |
| continue; |
| } |
| |
| /** |
| * Plain text |
| */ |
| |
| if (value !== '*') { |
| if (value === '$' || value === '^') { |
| value = `\\${value}`; |
| } |
| |
| const match = REGEX_NON_SPECIAL_CHARS.exec(remaining()); |
| if (match) { |
| value += match[0]; |
| state.index += match[0].length; |
| } |
| |
| push({ type: 'text', value }); |
| continue; |
| } |
| |
| /** |
| * Stars |
| */ |
| |
| if (prev && (prev.type === 'globstar' || prev.star === true)) { |
| prev.type = 'star'; |
| prev.star = true; |
| prev.value += value; |
| prev.output = star; |
| state.backtrack = true; |
| state.globstar = true; |
| consume(value); |
| continue; |
| } |
| |
| let rest = remaining(); |
| if (opts.noextglob !== true && /^\([^?]/.test(rest)) { |
| extglobOpen('star', value); |
| continue; |
| } |
| |
| if (prev.type === 'star') { |
| if (opts.noglobstar === true) { |
| consume(value); |
| continue; |
| } |
| |
| const prior = prev.prev; |
| const before = prior.prev; |
| const isStart = prior.type === 'slash' || prior.type === 'bos'; |
| const afterStar = before && (before.type === 'star' || before.type === 'globstar'); |
| |
| if (opts.bash === true && (!isStart || (rest[0] && rest[0] !== '/'))) { |
| push({ type: 'star', value, output: '' }); |
| continue; |
| } |
| |
| const isBrace = state.braces > 0 && (prior.type === 'comma' || prior.type === 'brace'); |
| const isExtglob = extglobs.length && (prior.type === 'pipe' || prior.type === 'paren'); |
| if (!isStart && prior.type !== 'paren' && !isBrace && !isExtglob) { |
| push({ type: 'star', value, output: '' }); |
| continue; |
| } |
| |
| // strip consecutive `/**/` |
| while (rest.slice(0, 3) === '/**') { |
| const after = input[state.index + 4]; |
| if (after && after !== '/') { |
| break; |
| } |
| rest = rest.slice(3); |
| consume('/**', 3); |
| } |
| |
| if (prior.type === 'bos' && eos()) { |
| prev.type = 'globstar'; |
| prev.value += value; |
| prev.output = globstar(opts); |
| state.output = prev.output; |
| state.globstar = true; |
| consume(value); |
| continue; |
| } |
| |
| if (prior.type === 'slash' && prior.prev.type !== 'bos' && !afterStar && eos()) { |
| state.output = state.output.slice(0, -(prior.output + prev.output).length); |
| prior.output = `(?:${prior.output}`; |
| |
| prev.type = 'globstar'; |
| prev.output = globstar(opts) + (opts.strictSlashes ? ')' : '|$)'); |
| prev.value += value; |
| state.globstar = true; |
| state.output += prior.output + prev.output; |
| consume(value); |
| continue; |
| } |
| |
| if (prior.type === 'slash' && prior.prev.type !== 'bos' && rest[0] === '/') { |
| const end = rest[1] !== void 0 ? '|$' : ''; |
| |
| state.output = state.output.slice(0, -(prior.output + prev.output).length); |
| prior.output = `(?:${prior.output}`; |
| |
| prev.type = 'globstar'; |
| prev.output = `${globstar(opts)}${SLASH_LITERAL}|${SLASH_LITERAL}${end})`; |
| prev.value += value; |
| |
| state.output += prior.output + prev.output; |
| state.globstar = true; |
| |
| consume(value + advance()); |
| |
| push({ type: 'slash', value: '/', output: '' }); |
| continue; |
| } |
| |
| if (prior.type === 'bos' && rest[0] === '/') { |
| prev.type = 'globstar'; |
| prev.value += value; |
| prev.output = `(?:^|${SLASH_LITERAL}|${globstar(opts)}${SLASH_LITERAL})`; |
| state.output = prev.output; |
| state.globstar = true; |
| consume(value + advance()); |
| push({ type: 'slash', value: '/', output: '' }); |
| continue; |
| } |
| |
| // remove single star from output |
| state.output = state.output.slice(0, -prev.output.length); |
| |
| // reset previous token to globstar |
| prev.type = 'globstar'; |
| prev.output = globstar(opts); |
| prev.value += value; |
| |
| // reset output with globstar |
| state.output += prev.output; |
| state.globstar = true; |
| consume(value); |
| continue; |
| } |
| |
| const token = { type: 'star', value, output: star }; |
| |
| if (opts.bash === true) { |
| token.output = '.*?'; |
| if (prev.type === 'bos' || prev.type === 'slash') { |
| token.output = nodot + token.output; |
| } |
| push(token); |
| continue; |
| } |
| |
| if (prev && (prev.type === 'bracket' || prev.type === 'paren') && opts.regex === true) { |
| token.output = value; |
| push(token); |
| continue; |
| } |
| |
| if (state.index === state.start || prev.type === 'slash' || prev.type === 'dot') { |
| if (prev.type === 'dot') { |
| state.output += NO_DOT_SLASH; |
| prev.output += NO_DOT_SLASH; |
| |
| } else if (opts.dot === true) { |
| state.output += NO_DOTS_SLASH; |
| prev.output += NO_DOTS_SLASH; |
| |
| } else { |
| state.output += nodot; |
| prev.output += nodot; |
| } |
| |
| if (peek() !== '*') { |
| state.output += ONE_CHAR; |
| prev.output += ONE_CHAR; |
| } |
| } |
| |
| push(token); |
| } |
| |
| while (state.brackets > 0) { |
| if (opts.strictBrackets === true) throw new SyntaxError(syntaxError('closing', ']')); |
| state.output = utils.escapeLast(state.output, '['); |
| decrement('brackets'); |
| } |
| |
| while (state.parens > 0) { |
| if (opts.strictBrackets === true) throw new SyntaxError(syntaxError('closing', ')')); |
| state.output = utils.escapeLast(state.output, '('); |
| decrement('parens'); |
| } |
| |
| while (state.braces > 0) { |
| if (opts.strictBrackets === true) throw new SyntaxError(syntaxError('closing', '}')); |
| state.output = utils.escapeLast(state.output, '{'); |
| decrement('braces'); |
| } |
| |
| if (opts.strictSlashes !== true && (prev.type === 'star' || prev.type === 'bracket')) { |
| push({ type: 'maybe_slash', value: '', output: `${SLASH_LITERAL}?` }); |
| } |
| |
| // rebuild the output if we had to backtrack at any point |
| if (state.backtrack === true) { |
| state.output = ''; |
| |
| for (const token of state.tokens) { |
| state.output += token.output != null ? token.output : token.value; |
| |
| if (token.suffix) { |
| state.output += token.suffix; |
| } |
| } |
| } |
| |
| return state; |
| }; |
| |
| /** |
| * Fast paths for creating regular expressions for common glob patterns. |
| * This can significantly speed up processing and has very little downside |
| * impact when none of the fast paths match. |
| */ |
| |
| parse.fastpaths = (input, options) => { |
| const opts = { ...options }; |
| const max = typeof opts.maxLength === 'number' ? Math.min(MAX_LENGTH, opts.maxLength) : MAX_LENGTH; |
| const len = input.length; |
| if (len > max) { |
| throw new SyntaxError(`Input length: ${len}, exceeds maximum allowed length: ${max}`); |
| } |
| |
| input = REPLACEMENTS[input] || input; |
| const win32 = utils.isWindows(options); |
| |
| // create constants based on platform, for windows or posix |
| const { |
| DOT_LITERAL, |
| SLASH_LITERAL, |
| ONE_CHAR, |
| DOTS_SLASH, |
| NO_DOT, |
| NO_DOTS, |
| NO_DOTS_SLASH, |
| STAR, |
| START_ANCHOR |
| } = constants.globChars(win32); |
| |
| const nodot = opts.dot ? NO_DOTS : NO_DOT; |
| const slashDot = opts.dot ? NO_DOTS_SLASH : NO_DOT; |
| const capture = opts.capture ? '' : '?:'; |
| const state = { negated: false, prefix: '' }; |
| let star = opts.bash === true ? '.*?' : STAR; |
| |
| if (opts.capture) { |
| star = `(${star})`; |
| } |
| |
| const globstar = (opts) => { |
| if (opts.noglobstar === true) return star; |
| return `(${capture}(?:(?!${START_ANCHOR}${opts.dot ? DOTS_SLASH : DOT_LITERAL}).)*?)`; |
| }; |
| |
| const create = str => { |
| switch (str) { |
| case '*': |
| return `${nodot}${ONE_CHAR}${star}`; |
| |
| case '.*': |
| return `${DOT_LITERAL}${ONE_CHAR}${star}`; |
| |
| case '*.*': |
| return `${nodot}${star}${DOT_LITERAL}${ONE_CHAR}${star}`; |
| |
| case '*/*': |
| return `${nodot}${star}${SLASH_LITERAL}${ONE_CHAR}${slashDot}${star}`; |
| |
| case '**': |
| return nodot + globstar(opts); |
| |
| case '**/*': |
| return `(?:${nodot}${globstar(opts)}${SLASH_LITERAL})?${slashDot}${ONE_CHAR}${star}`; |
| |
| case '**/*.*': |
| return `(?:${nodot}${globstar(opts)}${SLASH_LITERAL})?${slashDot}${star}${DOT_LITERAL}${ONE_CHAR}${star}`; |
| |
| case '**/.*': |
| return `(?:${nodot}${globstar(opts)}${SLASH_LITERAL})?${DOT_LITERAL}${ONE_CHAR}${star}`; |
| |
| default: { |
| const match = /^(.*?)\.(\w+)$/.exec(str); |
| if (!match) return; |
| |
| const source = create(match[1]); |
| if (!source) return; |
| |
| return source + DOT_LITERAL + match[2]; |
| } |
| } |
| }; |
| |
| const output = utils.removePrefix(input, state); |
| let source = create(output); |
| |
| if (source && opts.strictSlashes !== true) { |
| source += `${SLASH_LITERAL}?`; |
| } |
| |
| return source; |
| }; |
| |
| module.exports = parse; |
| |
| |
| /***/ }), |
| |
| /***/ 807: |
| /***/ (function(__unusedmodule, exports, __webpack_require__) { |
| |
| "use strict"; |
| |
| Object.defineProperty(exports, "__esModule", { value: true }); |
| const exec_1 = __webpack_require__(986); |
| // Wraps original exec() function |
| // Returns exit code and whole stdout/stderr |
| async function exec(commandLine, args, options) { |
| options = options || {}; |
| let stdout = ''; |
| let stderr = ''; |
| options.listeners = { |
| stdout: (data) => (stdout += data.toString()), |
| stderr: (data) => (stderr += data.toString()) |
| }; |
| const code = await exec_1.exec(commandLine, args, options); |
| return { code, stdout, stderr }; |
| } |
| exports.default = exec; |
| |
| |
| /***/ }), |
| |
| /***/ 809: |
| /***/ (function(module, __unusedexports, __webpack_require__) { |
| |
| "use strict"; |
| |
| |
| var Type = __webpack_require__(945); |
| |
| function resolveYamlNull(data) { |
| if (data === null) return true; |
| |
| var max = data.length; |
| |
| return (max === 1 && data === '~') || |
| (max === 4 && (data === 'null' || data === 'Null' || data === 'NULL')); |
| } |
| |
| function constructYamlNull() { |
| return null; |
| } |
| |
| function isNull(object) { |
| return object === null; |
| } |
| |
| module.exports = new Type('tag:yaml.org,2002:null', { |
| kind: 'scalar', |
| resolve: resolveYamlNull, |
| construct: constructYamlNull, |
| predicate: isNull, |
| represent: { |
| canonical: function () { return '~'; }, |
| lowercase: function () { return 'null'; }, |
| uppercase: function () { return 'NULL'; }, |
| camelcase: function () { return 'Null'; } |
| }, |
| defaultStyle: 'lowercase' |
| }); |
| |
| |
| /***/ }), |
| |
| /***/ 813: |
| /***/ (function(__unusedmodule, exports) { |
| |
| "use strict"; |
| |
| |
| Object.defineProperty(exports, '__esModule', { value: true }); |
| |
| async function auth(token) { |
| const tokenType = token.split(/\./).length === 3 ? "app" : /^v\d+\./.test(token) ? "installation" : "oauth"; |
| return { |
| type: "token", |
| token: token, |
| tokenType |
| }; |
| } |
| |
| /** |
| * Prefix token for usage in the Authorization header |
| * |
| * @param token OAuth token or JSON Web Token |
| */ |
| function withAuthorizationPrefix(token) { |
| if (token.split(/\./).length === 3) { |
| return `bearer ${token}`; |
| } |
| |
| return `token ${token}`; |
| } |
| |
| async function hook(token, request, route, parameters) { |
| const endpoint = request.endpoint.merge(route, parameters); |
| endpoint.headers.authorization = withAuthorizationPrefix(token); |
| return request(endpoint); |
| } |
| |
| const createTokenAuth = function createTokenAuth(token) { |
| if (!token) { |
| throw new Error("[@octokit/auth-token] No token passed to createTokenAuth"); |
| } |
| |
| if (typeof token !== "string") { |
| throw new Error("[@octokit/auth-token] Token passed to createTokenAuth is not a string"); |
| } |
| |
| token = token.replace(/^(token|bearer) +/i, ""); |
| return Object.assign(auth.bind(null, token), { |
| hook: hook.bind(null, token) |
| }); |
| }; |
| |
| exports.createTokenAuth = createTokenAuth; |
| //# sourceMappingURL=index.js.map |
| |
| |
| /***/ }), |
| |
| /***/ 814: |
| /***/ (function(module, __unusedexports, __webpack_require__) { |
| |
| module.exports = which |
| which.sync = whichSync |
| |
| var isWindows = process.platform === 'win32' || |
| process.env.OSTYPE === 'cygwin' || |
| process.env.OSTYPE === 'msys' |
| |
| var path = __webpack_require__(622) |
| var COLON = isWindows ? ';' : ':' |
| var isexe = __webpack_require__(742) |
| |
| function getNotFoundError (cmd) { |
| var er = new Error('not found: ' + cmd) |
| er.code = 'ENOENT' |
| |
| return er |
| } |
| |
| function getPathInfo (cmd, opt) { |
| var colon = opt.colon || COLON |
| var pathEnv = opt.path || process.env.PATH || '' |
| var pathExt = [''] |
| |
| pathEnv = pathEnv.split(colon) |
| |
| var pathExtExe = '' |
| if (isWindows) { |
| pathEnv.unshift(process.cwd()) |
| pathExtExe = (opt.pathExt || process.env.PATHEXT || '.EXE;.CMD;.BAT;.COM') |
| pathExt = pathExtExe.split(colon) |
| |
| |
| // Always test the cmd itself first. isexe will check to make sure |
| // it's found in the pathExt set. |
| if (cmd.indexOf('.') !== -1 && pathExt[0] !== '') |
| pathExt.unshift('') |
| } |
| |
| // If it has a slash, then we don't bother searching the pathenv. |
| // just check the file itself, and that's it. |
| if (cmd.match(/\//) || isWindows && cmd.match(/\\/)) |
| pathEnv = [''] |
| |
| return { |
| env: pathEnv, |
| ext: pathExt, |
| extExe: pathExtExe |
| } |
| } |
| |
| function which (cmd, opt, cb) { |
| if (typeof opt === 'function') { |
| cb = opt |
| opt = {} |
| } |
| |
| var info = getPathInfo(cmd, opt) |
| var pathEnv = info.env |
| var pathExt = info.ext |
| var pathExtExe = info.extExe |
| var found = [] |
| |
| ;(function F (i, l) { |
| if (i === l) { |
| if (opt.all && found.length) |
| return cb(null, found) |
| else |
| return cb(getNotFoundError(cmd)) |
| } |
| |
| var pathPart = pathEnv[i] |
| if (pathPart.charAt(0) === '"' && pathPart.slice(-1) === '"') |
| pathPart = pathPart.slice(1, -1) |
| |
| var p = path.join(pathPart, cmd) |
| if (!pathPart && (/^\.[\\\/]/).test(cmd)) { |
| p = cmd.slice(0, 2) + p |
| } |
| ;(function E (ii, ll) { |
| if (ii === ll) return F(i + 1, l) |
| var ext = pathExt[ii] |
| isexe(p + ext, { pathExt: pathExtExe }, function (er, is) { |
| if (!er && is) { |
| if (opt.all) |
| found.push(p + ext) |
| else |
| return cb(null, p + ext) |
| } |
| return E(ii + 1, ll) |
| }) |
| })(0, pathExt.length) |
| })(0, pathEnv.length) |
| } |
| |
| function whichSync (cmd, opt) { |
| opt = opt || {} |
| |
| var info = getPathInfo(cmd, opt) |
| var pathEnv = info.env |
| var pathExt = info.ext |
| var pathExtExe = info.extExe |
| var found = [] |
| |
| for (var i = 0, l = pathEnv.length; i < l; i ++) { |
| var pathPart = pathEnv[i] |
| if (pathPart.charAt(0) === '"' && pathPart.slice(-1) === '"') |
| pathPart = pathPart.slice(1, -1) |
| |
| var p = path.join(pathPart, cmd) |
| if (!pathPart && /^\.[\\\/]/.test(cmd)) { |
| p = cmd.slice(0, 2) + p |
| } |
| for (var j = 0, ll = pathExt.length; j < ll; j ++) { |
| var cur = p + pathExt[j] |
| var is |
| try { |
| is = isexe.sync(cur, { pathExt: pathExtExe }) |
| if (is) { |
| if (opt.all) |
| found.push(cur) |
| else |
| return cur |
| } |
| } catch (ex) {} |
| } |
| } |
| |
| if (opt.all && found.length) |
| return found |
| |
| if (opt.nothrow) |
| return null |
| |
| throw getNotFoundError(cmd) |
| } |
| |
| |
| /***/ }), |
| |
| /***/ 816: |
| /***/ (function(module) { |
| |
| "use strict"; |
| |
| module.exports = /^#!.*/; |
| |
| |
| /***/ }), |
| |
| /***/ 818: |
| /***/ (function(module, __unusedexports, __webpack_require__) { |
| |
| module.exports = isexe |
| isexe.sync = sync |
| |
| var fs = __webpack_require__(747) |
| |
| function checkPathExt (path, options) { |
| var pathext = options.pathExt !== undefined ? |
| options.pathExt : process.env.PATHEXT |
| |
| if (!pathext) { |
| return true |
| } |
| |
| pathext = pathext.split(';') |
| if (pathext.indexOf('') !== -1) { |
| return true |
| } |
| for (var i = 0; i < pathext.length; i++) { |
| var p = pathext[i].toLowerCase() |
| if (p && path.substr(-p.length).toLowerCase() === p) { |
| return true |
| } |
| } |
| return false |
| } |
| |
| function checkStat (stat, path, options) { |
| if (!stat.isSymbolicLink() && !stat.isFile()) { |
| return false |
| } |
| return checkPathExt(path, options) |
| } |
| |
| function isexe (path, options, cb) { |
| fs.stat(path, function (er, stat) { |
| cb(er, er ? false : checkStat(stat, path, options)) |
| }) |
| } |
| |
| function sync (path, options) { |
| return checkStat(fs.statSync(path), path, options) |
| } |
| |
| |
| /***/ }), |
| |
| /***/ 819: |
| /***/ (function(module, __unusedexports, __webpack_require__) { |
| |
| "use strict"; |
| |
| |
| |
| var loader = __webpack_require__(457); |
| var dumper = __webpack_require__(685); |
| |
| |
| function deprecated(name) { |
| return function () { |
| throw new Error('Function ' + name + ' is deprecated and cannot be used.'); |
| }; |
| } |
| |
| |
| module.exports.Type = __webpack_require__(945); |
| module.exports.Schema = __webpack_require__(43); |
| module.exports.FAILSAFE_SCHEMA = __webpack_require__(581); |
| module.exports.JSON_SCHEMA = __webpack_require__(23); |
| module.exports.CORE_SCHEMA = __webpack_require__(611); |
| module.exports.DEFAULT_SAFE_SCHEMA = __webpack_require__(723); |
| module.exports.DEFAULT_FULL_SCHEMA = __webpack_require__(910); |
| module.exports.load = loader.load; |
| module.exports.loadAll = loader.loadAll; |
| module.exports.safeLoad = loader.safeLoad; |
| module.exports.safeLoadAll = loader.safeLoadAll; |
| module.exports.dump = dumper.dump; |
| module.exports.safeDump = dumper.safeDump; |
| module.exports.YAMLException = __webpack_require__(556); |
| |
| // Deprecated schema names from JS-YAML 2.0.x |
| module.exports.MINIMAL_SCHEMA = __webpack_require__(581); |
| module.exports.SAFE_SCHEMA = __webpack_require__(723); |
| module.exports.DEFAULT_SCHEMA = __webpack_require__(910); |
| |
| // Deprecated functions from JS-YAML 1.x.x |
| module.exports.scan = deprecated('scan'); |
| module.exports.parse = deprecated('parse'); |
| module.exports.compose = deprecated('compose'); |
| module.exports.addConstructor = deprecated('addConstructor'); |
| |
| |
| /***/ }), |
| |
| /***/ 827: |
| /***/ (function(module, __unusedexports, __webpack_require__) { |
| |
| "use strict"; |
| |
| |
| module.exports = __webpack_require__(366); |
| |
| |
| /***/ }), |
| |
| /***/ 835: |
| /***/ (function(module) { |
| |
| module.exports = require("url"); |
| |
| /***/ }), |
| |
| /***/ 842: |
| /***/ (function(__unusedmodule, exports, __webpack_require__) { |
| |
| "use strict"; |
| |
| |
| Object.defineProperty(exports, '__esModule', { value: true }); |
| |
| var deprecation = __webpack_require__(692); |
| |
| var endpointsByScope = { |
| actions: { |
| cancelWorkflowRun: { |
| method: "POST", |
| params: { |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| }, |
| run_id: { |
| required: true, |
| type: "integer" |
| } |
| }, |
| url: "/repos/:owner/:repo/actions/runs/:run_id/cancel" |
| }, |
| createOrUpdateSecretForRepo: { |
| method: "PUT", |
| params: { |
| encrypted_value: { |
| type: "string" |
| }, |
| key_id: { |
| type: "string" |
| }, |
| name: { |
| required: true, |
| type: "string" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/actions/secrets/:name" |
| }, |
| createRegistrationToken: { |
| method: "POST", |
| params: { |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/actions/runners/registration-token" |
| }, |
| createRemoveToken: { |
| method: "POST", |
| params: { |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/actions/runners/remove-token" |
| }, |
| deleteArtifact: { |
| method: "DELETE", |
| params: { |
| artifact_id: { |
| required: true, |
| type: "integer" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/actions/artifacts/:artifact_id" |
| }, |
| deleteSecretFromRepo: { |
| method: "DELETE", |
| params: { |
| name: { |
| required: true, |
| type: "string" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/actions/secrets/:name" |
| }, |
| downloadArtifact: { |
| method: "GET", |
| params: { |
| archive_format: { |
| required: true, |
| type: "string" |
| }, |
| artifact_id: { |
| required: true, |
| type: "integer" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/actions/artifacts/:artifact_id/:archive_format" |
| }, |
| getArtifact: { |
| method: "GET", |
| params: { |
| artifact_id: { |
| required: true, |
| type: "integer" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/actions/artifacts/:artifact_id" |
| }, |
| getPublicKey: { |
| method: "GET", |
| params: { |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/actions/secrets/public-key" |
| }, |
| getSecret: { |
| method: "GET", |
| params: { |
| name: { |
| required: true, |
| type: "string" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/actions/secrets/:name" |
| }, |
| getSelfHostedRunner: { |
| method: "GET", |
| params: { |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| }, |
| runner_id: { |
| required: true, |
| type: "integer" |
| } |
| }, |
| url: "/repos/:owner/:repo/actions/runners/:runner_id" |
| }, |
| getWorkflow: { |
| method: "GET", |
| params: { |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| }, |
| workflow_id: { |
| required: true, |
| type: "integer" |
| } |
| }, |
| url: "/repos/:owner/:repo/actions/workflows/:workflow_id" |
| }, |
| getWorkflowJob: { |
| method: "GET", |
| params: { |
| job_id: { |
| required: true, |
| type: "integer" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/actions/jobs/:job_id" |
| }, |
| getWorkflowRun: { |
| method: "GET", |
| params: { |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| }, |
| run_id: { |
| required: true, |
| type: "integer" |
| } |
| }, |
| url: "/repos/:owner/:repo/actions/runs/:run_id" |
| }, |
| listDownloadsForSelfHostedRunnerApplication: { |
| method: "GET", |
| params: { |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/actions/runners/downloads" |
| }, |
| listJobsForWorkflowRun: { |
| method: "GET", |
| params: { |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| }, |
| run_id: { |
| required: true, |
| type: "integer" |
| } |
| }, |
| url: "/repos/:owner/:repo/actions/runs/:run_id/jobs" |
| }, |
| listRepoWorkflowRuns: { |
| method: "GET", |
| params: { |
| actor: { |
| type: "string" |
| }, |
| branch: { |
| type: "string" |
| }, |
| event: { |
| type: "string" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| }, |
| status: { |
| enum: ["completed", "status", "conclusion"], |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/actions/runs" |
| }, |
| listRepoWorkflows: { |
| method: "GET", |
| params: { |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/actions/workflows" |
| }, |
| listSecretsForRepo: { |
| method: "GET", |
| params: { |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/actions/secrets" |
| }, |
| listSelfHostedRunnersForRepo: { |
| method: "GET", |
| params: { |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/actions/runners" |
| }, |
| listWorkflowJobLogs: { |
| method: "GET", |
| params: { |
| job_id: { |
| required: true, |
| type: "integer" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/actions/jobs/:job_id/logs" |
| }, |
| listWorkflowRunArtifacts: { |
| method: "GET", |
| params: { |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| }, |
| run_id: { |
| required: true, |
| type: "integer" |
| } |
| }, |
| url: "/repos/:owner/:repo/actions/runs/:run_id/artifacts" |
| }, |
| listWorkflowRunLogs: { |
| method: "GET", |
| params: { |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| }, |
| run_id: { |
| required: true, |
| type: "integer" |
| } |
| }, |
| url: "/repos/:owner/:repo/actions/runs/:run_id/logs" |
| }, |
| listWorkflowRuns: { |
| method: "GET", |
| params: { |
| actor: { |
| type: "string" |
| }, |
| branch: { |
| type: "string" |
| }, |
| event: { |
| type: "string" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| }, |
| status: { |
| enum: ["completed", "status", "conclusion"], |
| type: "string" |
| }, |
| workflow_id: { |
| required: true, |
| type: "integer" |
| } |
| }, |
| url: "/repos/:owner/:repo/actions/workflows/:workflow_id/runs" |
| }, |
| reRunWorkflow: { |
| method: "POST", |
| params: { |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| }, |
| run_id: { |
| required: true, |
| type: "integer" |
| } |
| }, |
| url: "/repos/:owner/:repo/actions/runs/:run_id/rerun" |
| }, |
| removeSelfHostedRunner: { |
| method: "DELETE", |
| params: { |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| }, |
| runner_id: { |
| required: true, |
| type: "integer" |
| } |
| }, |
| url: "/repos/:owner/:repo/actions/runners/:runner_id" |
| } |
| }, |
| activity: { |
| checkStarringRepo: { |
| method: "GET", |
| params: { |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/user/starred/:owner/:repo" |
| }, |
| deleteRepoSubscription: { |
| method: "DELETE", |
| params: { |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/subscription" |
| }, |
| deleteThreadSubscription: { |
| method: "DELETE", |
| params: { |
| thread_id: { |
| required: true, |
| type: "integer" |
| } |
| }, |
| url: "/notifications/threads/:thread_id/subscription" |
| }, |
| getRepoSubscription: { |
| method: "GET", |
| params: { |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/subscription" |
| }, |
| getThread: { |
| method: "GET", |
| params: { |
| thread_id: { |
| required: true, |
| type: "integer" |
| } |
| }, |
| url: "/notifications/threads/:thread_id" |
| }, |
| getThreadSubscription: { |
| method: "GET", |
| params: { |
| thread_id: { |
| required: true, |
| type: "integer" |
| } |
| }, |
| url: "/notifications/threads/:thread_id/subscription" |
| }, |
| listEventsForOrg: { |
| method: "GET", |
| params: { |
| org: { |
| required: true, |
| type: "string" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| username: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/users/:username/events/orgs/:org" |
| }, |
| listEventsForUser: { |
| method: "GET", |
| params: { |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| username: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/users/:username/events" |
| }, |
| listFeeds: { |
| method: "GET", |
| params: {}, |
| url: "/feeds" |
| }, |
| listNotifications: { |
| method: "GET", |
| params: { |
| all: { |
| type: "boolean" |
| }, |
| before: { |
| type: "string" |
| }, |
| page: { |
| type: "integer" |
| }, |
| participating: { |
| type: "boolean" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| since: { |
| type: "string" |
| } |
| }, |
| url: "/notifications" |
| }, |
| listNotificationsForRepo: { |
| method: "GET", |
| params: { |
| all: { |
| type: "boolean" |
| }, |
| before: { |
| type: "string" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| page: { |
| type: "integer" |
| }, |
| participating: { |
| type: "boolean" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| }, |
| since: { |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/notifications" |
| }, |
| listPublicEvents: { |
| method: "GET", |
| params: { |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| } |
| }, |
| url: "/events" |
| }, |
| listPublicEventsForOrg: { |
| method: "GET", |
| params: { |
| org: { |
| required: true, |
| type: "string" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| } |
| }, |
| url: "/orgs/:org/events" |
| }, |
| listPublicEventsForRepoNetwork: { |
| method: "GET", |
| params: { |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/networks/:owner/:repo/events" |
| }, |
| listPublicEventsForUser: { |
| method: "GET", |
| params: { |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| username: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/users/:username/events/public" |
| }, |
| listReceivedEventsForUser: { |
| method: "GET", |
| params: { |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| username: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/users/:username/received_events" |
| }, |
| listReceivedPublicEventsForUser: { |
| method: "GET", |
| params: { |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| username: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/users/:username/received_events/public" |
| }, |
| listRepoEvents: { |
| method: "GET", |
| params: { |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/events" |
| }, |
| listReposStarredByAuthenticatedUser: { |
| method: "GET", |
| params: { |
| direction: { |
| enum: ["asc", "desc"], |
| type: "string" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| sort: { |
| enum: ["created", "updated"], |
| type: "string" |
| } |
| }, |
| url: "/user/starred" |
| }, |
| listReposStarredByUser: { |
| method: "GET", |
| params: { |
| direction: { |
| enum: ["asc", "desc"], |
| type: "string" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| sort: { |
| enum: ["created", "updated"], |
| type: "string" |
| }, |
| username: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/users/:username/starred" |
| }, |
| listReposWatchedByUser: { |
| method: "GET", |
| params: { |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| username: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/users/:username/subscriptions" |
| }, |
| listStargazersForRepo: { |
| method: "GET", |
| params: { |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/stargazers" |
| }, |
| listWatchedReposForAuthenticatedUser: { |
| method: "GET", |
| params: { |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| } |
| }, |
| url: "/user/subscriptions" |
| }, |
| listWatchersForRepo: { |
| method: "GET", |
| params: { |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/subscribers" |
| }, |
| markAsRead: { |
| method: "PUT", |
| params: { |
| last_read_at: { |
| type: "string" |
| } |
| }, |
| url: "/notifications" |
| }, |
| markNotificationsAsReadForRepo: { |
| method: "PUT", |
| params: { |
| last_read_at: { |
| type: "string" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/notifications" |
| }, |
| markThreadAsRead: { |
| method: "PATCH", |
| params: { |
| thread_id: { |
| required: true, |
| type: "integer" |
| } |
| }, |
| url: "/notifications/threads/:thread_id" |
| }, |
| setRepoSubscription: { |
| method: "PUT", |
| params: { |
| ignored: { |
| type: "boolean" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| }, |
| subscribed: { |
| type: "boolean" |
| } |
| }, |
| url: "/repos/:owner/:repo/subscription" |
| }, |
| setThreadSubscription: { |
| method: "PUT", |
| params: { |
| ignored: { |
| type: "boolean" |
| }, |
| thread_id: { |
| required: true, |
| type: "integer" |
| } |
| }, |
| url: "/notifications/threads/:thread_id/subscription" |
| }, |
| starRepo: { |
| method: "PUT", |
| params: { |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/user/starred/:owner/:repo" |
| }, |
| unstarRepo: { |
| method: "DELETE", |
| params: { |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/user/starred/:owner/:repo" |
| } |
| }, |
| apps: { |
| addRepoToInstallation: { |
| headers: { |
| accept: "application/vnd.github.machine-man-preview+json" |
| }, |
| method: "PUT", |
| params: { |
| installation_id: { |
| required: true, |
| type: "integer" |
| }, |
| repository_id: { |
| required: true, |
| type: "integer" |
| } |
| }, |
| url: "/user/installations/:installation_id/repositories/:repository_id" |
| }, |
| checkAccountIsAssociatedWithAny: { |
| method: "GET", |
| params: { |
| account_id: { |
| required: true, |
| type: "integer" |
| } |
| }, |
| url: "/marketplace_listing/accounts/:account_id" |
| }, |
| checkAccountIsAssociatedWithAnyStubbed: { |
| method: "GET", |
| params: { |
| account_id: { |
| required: true, |
| type: "integer" |
| } |
| }, |
| url: "/marketplace_listing/stubbed/accounts/:account_id" |
| }, |
| checkAuthorization: { |
| deprecated: "octokit.apps.checkAuthorization() is deprecated, see https://developer.github.com/v3/apps/oauth_applications/#check-an-authorization", |
| method: "GET", |
| params: { |
| access_token: { |
| required: true, |
| type: "string" |
| }, |
| client_id: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/applications/:client_id/tokens/:access_token" |
| }, |
| checkToken: { |
| headers: { |
| accept: "application/vnd.github.doctor-strange-preview+json" |
| }, |
| method: "POST", |
| params: { |
| access_token: { |
| type: "string" |
| }, |
| client_id: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/applications/:client_id/token" |
| }, |
| createContentAttachment: { |
| headers: { |
| accept: "application/vnd.github.corsair-preview+json" |
| }, |
| method: "POST", |
| params: { |
| body: { |
| required: true, |
| type: "string" |
| }, |
| content_reference_id: { |
| required: true, |
| type: "integer" |
| }, |
| title: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/content_references/:content_reference_id/attachments" |
| }, |
| createFromManifest: { |
| headers: { |
| accept: "application/vnd.github.fury-preview+json" |
| }, |
| method: "POST", |
| params: { |
| code: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/app-manifests/:code/conversions" |
| }, |
| createInstallationToken: { |
| headers: { |
| accept: "application/vnd.github.machine-man-preview+json" |
| }, |
| method: "POST", |
| params: { |
| installation_id: { |
| required: true, |
| type: "integer" |
| }, |
| permissions: { |
| type: "object" |
| }, |
| repository_ids: { |
| type: "integer[]" |
| } |
| }, |
| url: "/app/installations/:installation_id/access_tokens" |
| }, |
| deleteAuthorization: { |
| headers: { |
| accept: "application/vnd.github.doctor-strange-preview+json" |
| }, |
| method: "DELETE", |
| params: { |
| access_token: { |
| type: "string" |
| }, |
| client_id: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/applications/:client_id/grant" |
| }, |
| deleteInstallation: { |
| headers: { |
| accept: "application/vnd.github.gambit-preview+json,application/vnd.github.machine-man-preview+json" |
| }, |
| method: "DELETE", |
| params: { |
| installation_id: { |
| required: true, |
| type: "integer" |
| } |
| }, |
| url: "/app/installations/:installation_id" |
| }, |
| deleteToken: { |
| headers: { |
| accept: "application/vnd.github.doctor-strange-preview+json" |
| }, |
| method: "DELETE", |
| params: { |
| access_token: { |
| type: "string" |
| }, |
| client_id: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/applications/:client_id/token" |
| }, |
| findOrgInstallation: { |
| deprecated: "octokit.apps.findOrgInstallation() has been renamed to octokit.apps.getOrgInstallation() (2019-04-10)", |
| headers: { |
| accept: "application/vnd.github.machine-man-preview+json" |
| }, |
| method: "GET", |
| params: { |
| org: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/orgs/:org/installation" |
| }, |
| findRepoInstallation: { |
| deprecated: "octokit.apps.findRepoInstallation() has been renamed to octokit.apps.getRepoInstallation() (2019-04-10)", |
| headers: { |
| accept: "application/vnd.github.machine-man-preview+json" |
| }, |
| method: "GET", |
| params: { |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/installation" |
| }, |
| findUserInstallation: { |
| deprecated: "octokit.apps.findUserInstallation() has been renamed to octokit.apps.getUserInstallation() (2019-04-10)", |
| headers: { |
| accept: "application/vnd.github.machine-man-preview+json" |
| }, |
| method: "GET", |
| params: { |
| username: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/users/:username/installation" |
| }, |
| getAuthenticated: { |
| headers: { |
| accept: "application/vnd.github.machine-man-preview+json" |
| }, |
| method: "GET", |
| params: {}, |
| url: "/app" |
| }, |
| getBySlug: { |
| headers: { |
| accept: "application/vnd.github.machine-man-preview+json" |
| }, |
| method: "GET", |
| params: { |
| app_slug: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/apps/:app_slug" |
| }, |
| getInstallation: { |
| headers: { |
| accept: "application/vnd.github.machine-man-preview+json" |
| }, |
| method: "GET", |
| params: { |
| installation_id: { |
| required: true, |
| type: "integer" |
| } |
| }, |
| url: "/app/installations/:installation_id" |
| }, |
| getOrgInstallation: { |
| headers: { |
| accept: "application/vnd.github.machine-man-preview+json" |
| }, |
| method: "GET", |
| params: { |
| org: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/orgs/:org/installation" |
| }, |
| getRepoInstallation: { |
| headers: { |
| accept: "application/vnd.github.machine-man-preview+json" |
| }, |
| method: "GET", |
| params: { |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/installation" |
| }, |
| getUserInstallation: { |
| headers: { |
| accept: "application/vnd.github.machine-man-preview+json" |
| }, |
| method: "GET", |
| params: { |
| username: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/users/:username/installation" |
| }, |
| listAccountsUserOrOrgOnPlan: { |
| method: "GET", |
| params: { |
| direction: { |
| enum: ["asc", "desc"], |
| type: "string" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| plan_id: { |
| required: true, |
| type: "integer" |
| }, |
| sort: { |
| enum: ["created", "updated"], |
| type: "string" |
| } |
| }, |
| url: "/marketplace_listing/plans/:plan_id/accounts" |
| }, |
| listAccountsUserOrOrgOnPlanStubbed: { |
| method: "GET", |
| params: { |
| direction: { |
| enum: ["asc", "desc"], |
| type: "string" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| plan_id: { |
| required: true, |
| type: "integer" |
| }, |
| sort: { |
| enum: ["created", "updated"], |
| type: "string" |
| } |
| }, |
| url: "/marketplace_listing/stubbed/plans/:plan_id/accounts" |
| }, |
| listInstallationReposForAuthenticatedUser: { |
| headers: { |
| accept: "application/vnd.github.machine-man-preview+json" |
| }, |
| method: "GET", |
| params: { |
| installation_id: { |
| required: true, |
| type: "integer" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| } |
| }, |
| url: "/user/installations/:installation_id/repositories" |
| }, |
| listInstallations: { |
| headers: { |
| accept: "application/vnd.github.machine-man-preview+json" |
| }, |
| method: "GET", |
| params: { |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| } |
| }, |
| url: "/app/installations" |
| }, |
| listInstallationsForAuthenticatedUser: { |
| headers: { |
| accept: "application/vnd.github.machine-man-preview+json" |
| }, |
| method: "GET", |
| params: { |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| } |
| }, |
| url: "/user/installations" |
| }, |
| listMarketplacePurchasesForAuthenticatedUser: { |
| method: "GET", |
| params: { |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| } |
| }, |
| url: "/user/marketplace_purchases" |
| }, |
| listMarketplacePurchasesForAuthenticatedUserStubbed: { |
| method: "GET", |
| params: { |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| } |
| }, |
| url: "/user/marketplace_purchases/stubbed" |
| }, |
| listPlans: { |
| method: "GET", |
| params: { |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| } |
| }, |
| url: "/marketplace_listing/plans" |
| }, |
| listPlansStubbed: { |
| method: "GET", |
| params: { |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| } |
| }, |
| url: "/marketplace_listing/stubbed/plans" |
| }, |
| listRepos: { |
| headers: { |
| accept: "application/vnd.github.machine-man-preview+json" |
| }, |
| method: "GET", |
| params: { |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| } |
| }, |
| url: "/installation/repositories" |
| }, |
| removeRepoFromInstallation: { |
| headers: { |
| accept: "application/vnd.github.machine-man-preview+json" |
| }, |
| method: "DELETE", |
| params: { |
| installation_id: { |
| required: true, |
| type: "integer" |
| }, |
| repository_id: { |
| required: true, |
| type: "integer" |
| } |
| }, |
| url: "/user/installations/:installation_id/repositories/:repository_id" |
| }, |
| resetAuthorization: { |
| deprecated: "octokit.apps.resetAuthorization() is deprecated, see https://developer.github.com/v3/apps/oauth_applications/#reset-an-authorization", |
| method: "POST", |
| params: { |
| access_token: { |
| required: true, |
| type: "string" |
| }, |
| client_id: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/applications/:client_id/tokens/:access_token" |
| }, |
| resetToken: { |
| headers: { |
| accept: "application/vnd.github.doctor-strange-preview+json" |
| }, |
| method: "PATCH", |
| params: { |
| access_token: { |
| type: "string" |
| }, |
| client_id: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/applications/:client_id/token" |
| }, |
| revokeAuthorizationForApplication: { |
| deprecated: "octokit.apps.revokeAuthorizationForApplication() is deprecated, see https://developer.github.com/v3/apps/oauth_applications/#revoke-an-authorization-for-an-application", |
| method: "DELETE", |
| params: { |
| access_token: { |
| required: true, |
| type: "string" |
| }, |
| client_id: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/applications/:client_id/tokens/:access_token" |
| }, |
| revokeGrantForApplication: { |
| deprecated: "octokit.apps.revokeGrantForApplication() is deprecated, see https://developer.github.com/v3/apps/oauth_applications/#revoke-a-grant-for-an-application", |
| method: "DELETE", |
| params: { |
| access_token: { |
| required: true, |
| type: "string" |
| }, |
| client_id: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/applications/:client_id/grants/:access_token" |
| }, |
| revokeInstallationToken: { |
| headers: { |
| accept: "application/vnd.github.gambit-preview+json" |
| }, |
| method: "DELETE", |
| params: {}, |
| url: "/installation/token" |
| } |
| }, |
| checks: { |
| create: { |
| headers: { |
| accept: "application/vnd.github.antiope-preview+json" |
| }, |
| method: "POST", |
| params: { |
| actions: { |
| type: "object[]" |
| }, |
| "actions[].description": { |
| required: true, |
| type: "string" |
| }, |
| "actions[].identifier": { |
| required: true, |
| type: "string" |
| }, |
| "actions[].label": { |
| required: true, |
| type: "string" |
| }, |
| completed_at: { |
| type: "string" |
| }, |
| conclusion: { |
| enum: ["success", "failure", "neutral", "cancelled", "timed_out", "action_required"], |
| type: "string" |
| }, |
| details_url: { |
| type: "string" |
| }, |
| external_id: { |
| type: "string" |
| }, |
| head_sha: { |
| required: true, |
| type: "string" |
| }, |
| name: { |
| required: true, |
| type: "string" |
| }, |
| output: { |
| type: "object" |
| }, |
| "output.annotations": { |
| type: "object[]" |
| }, |
| "output.annotations[].annotation_level": { |
| enum: ["notice", "warning", "failure"], |
| required: true, |
| type: "string" |
| }, |
| "output.annotations[].end_column": { |
| type: "integer" |
| }, |
| "output.annotations[].end_line": { |
| required: true, |
| type: "integer" |
| }, |
| "output.annotations[].message": { |
| required: true, |
| type: "string" |
| }, |
| "output.annotations[].path": { |
| required: true, |
| type: "string" |
| }, |
| "output.annotations[].raw_details": { |
| type: "string" |
| }, |
| "output.annotations[].start_column": { |
| type: "integer" |
| }, |
| "output.annotations[].start_line": { |
| required: true, |
| type: "integer" |
| }, |
| "output.annotations[].title": { |
| type: "string" |
| }, |
| "output.images": { |
| type: "object[]" |
| }, |
| "output.images[].alt": { |
| required: true, |
| type: "string" |
| }, |
| "output.images[].caption": { |
| type: "string" |
| }, |
| "output.images[].image_url": { |
| required: true, |
| type: "string" |
| }, |
| "output.summary": { |
| required: true, |
| type: "string" |
| }, |
| "output.text": { |
| type: "string" |
| }, |
| "output.title": { |
| required: true, |
| type: "string" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| }, |
| started_at: { |
| type: "string" |
| }, |
| status: { |
| enum: ["queued", "in_progress", "completed"], |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/check-runs" |
| }, |
| createSuite: { |
| headers: { |
| accept: "application/vnd.github.antiope-preview+json" |
| }, |
| method: "POST", |
| params: { |
| head_sha: { |
| required: true, |
| type: "string" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/check-suites" |
| }, |
| get: { |
| headers: { |
| accept: "application/vnd.github.antiope-preview+json" |
| }, |
| method: "GET", |
| params: { |
| check_run_id: { |
| required: true, |
| type: "integer" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/check-runs/:check_run_id" |
| }, |
| getSuite: { |
| headers: { |
| accept: "application/vnd.github.antiope-preview+json" |
| }, |
| method: "GET", |
| params: { |
| check_suite_id: { |
| required: true, |
| type: "integer" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/check-suites/:check_suite_id" |
| }, |
| listAnnotations: { |
| headers: { |
| accept: "application/vnd.github.antiope-preview+json" |
| }, |
| method: "GET", |
| params: { |
| check_run_id: { |
| required: true, |
| type: "integer" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/check-runs/:check_run_id/annotations" |
| }, |
| listForRef: { |
| headers: { |
| accept: "application/vnd.github.antiope-preview+json" |
| }, |
| method: "GET", |
| params: { |
| check_name: { |
| type: "string" |
| }, |
| filter: { |
| enum: ["latest", "all"], |
| type: "string" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| ref: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| }, |
| status: { |
| enum: ["queued", "in_progress", "completed"], |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/commits/:ref/check-runs" |
| }, |
| listForSuite: { |
| headers: { |
| accept: "application/vnd.github.antiope-preview+json" |
| }, |
| method: "GET", |
| params: { |
| check_name: { |
| type: "string" |
| }, |
| check_suite_id: { |
| required: true, |
| type: "integer" |
| }, |
| filter: { |
| enum: ["latest", "all"], |
| type: "string" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| }, |
| status: { |
| enum: ["queued", "in_progress", "completed"], |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/check-suites/:check_suite_id/check-runs" |
| }, |
| listSuitesForRef: { |
| headers: { |
| accept: "application/vnd.github.antiope-preview+json" |
| }, |
| method: "GET", |
| params: { |
| app_id: { |
| type: "integer" |
| }, |
| check_name: { |
| type: "string" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| ref: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/commits/:ref/check-suites" |
| }, |
| rerequestSuite: { |
| headers: { |
| accept: "application/vnd.github.antiope-preview+json" |
| }, |
| method: "POST", |
| params: { |
| check_suite_id: { |
| required: true, |
| type: "integer" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/check-suites/:check_suite_id/rerequest" |
| }, |
| setSuitesPreferences: { |
| headers: { |
| accept: "application/vnd.github.antiope-preview+json" |
| }, |
| method: "PATCH", |
| params: { |
| auto_trigger_checks: { |
| type: "object[]" |
| }, |
| "auto_trigger_checks[].app_id": { |
| required: true, |
| type: "integer" |
| }, |
| "auto_trigger_checks[].setting": { |
| required: true, |
| type: "boolean" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/check-suites/preferences" |
| }, |
| update: { |
| headers: { |
| accept: "application/vnd.github.antiope-preview+json" |
| }, |
| method: "PATCH", |
| params: { |
| actions: { |
| type: "object[]" |
| }, |
| "actions[].description": { |
| required: true, |
| type: "string" |
| }, |
| "actions[].identifier": { |
| required: true, |
| type: "string" |
| }, |
| "actions[].label": { |
| required: true, |
| type: "string" |
| }, |
| check_run_id: { |
| required: true, |
| type: "integer" |
| }, |
| completed_at: { |
| type: "string" |
| }, |
| conclusion: { |
| enum: ["success", "failure", "neutral", "cancelled", "timed_out", "action_required"], |
| type: "string" |
| }, |
| details_url: { |
| type: "string" |
| }, |
| external_id: { |
| type: "string" |
| }, |
| name: { |
| type: "string" |
| }, |
| output: { |
| type: "object" |
| }, |
| "output.annotations": { |
| type: "object[]" |
| }, |
| "output.annotations[].annotation_level": { |
| enum: ["notice", "warning", "failure"], |
| required: true, |
| type: "string" |
| }, |
| "output.annotations[].end_column": { |
| type: "integer" |
| }, |
| "output.annotations[].end_line": { |
| required: true, |
| type: "integer" |
| }, |
| "output.annotations[].message": { |
| required: true, |
| type: "string" |
| }, |
| "output.annotations[].path": { |
| required: true, |
| type: "string" |
| }, |
| "output.annotations[].raw_details": { |
| type: "string" |
| }, |
| "output.annotations[].start_column": { |
| type: "integer" |
| }, |
| "output.annotations[].start_line": { |
| required: true, |
| type: "integer" |
| }, |
| "output.annotations[].title": { |
| type: "string" |
| }, |
| "output.images": { |
| type: "object[]" |
| }, |
| "output.images[].alt": { |
| required: true, |
| type: "string" |
| }, |
| "output.images[].caption": { |
| type: "string" |
| }, |
| "output.images[].image_url": { |
| required: true, |
| type: "string" |
| }, |
| "output.summary": { |
| required: true, |
| type: "string" |
| }, |
| "output.text": { |
| type: "string" |
| }, |
| "output.title": { |
| type: "string" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| }, |
| started_at: { |
| type: "string" |
| }, |
| status: { |
| enum: ["queued", "in_progress", "completed"], |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/check-runs/:check_run_id" |
| } |
| }, |
| codesOfConduct: { |
| getConductCode: { |
| headers: { |
| accept: "application/vnd.github.scarlet-witch-preview+json" |
| }, |
| method: "GET", |
| params: { |
| key: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/codes_of_conduct/:key" |
| }, |
| getForRepo: { |
| headers: { |
| accept: "application/vnd.github.scarlet-witch-preview+json" |
| }, |
| method: "GET", |
| params: { |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/community/code_of_conduct" |
| }, |
| listConductCodes: { |
| headers: { |
| accept: "application/vnd.github.scarlet-witch-preview+json" |
| }, |
| method: "GET", |
| params: {}, |
| url: "/codes_of_conduct" |
| } |
| }, |
| emojis: { |
| get: { |
| method: "GET", |
| params: {}, |
| url: "/emojis" |
| } |
| }, |
| gists: { |
| checkIsStarred: { |
| method: "GET", |
| params: { |
| gist_id: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/gists/:gist_id/star" |
| }, |
| create: { |
| method: "POST", |
| params: { |
| description: { |
| type: "string" |
| }, |
| files: { |
| required: true, |
| type: "object" |
| }, |
| "files.content": { |
| type: "string" |
| }, |
| public: { |
| type: "boolean" |
| } |
| }, |
| url: "/gists" |
| }, |
| createComment: { |
| method: "POST", |
| params: { |
| body: { |
| required: true, |
| type: "string" |
| }, |
| gist_id: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/gists/:gist_id/comments" |
| }, |
| delete: { |
| method: "DELETE", |
| params: { |
| gist_id: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/gists/:gist_id" |
| }, |
| deleteComment: { |
| method: "DELETE", |
| params: { |
| comment_id: { |
| required: true, |
| type: "integer" |
| }, |
| gist_id: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/gists/:gist_id/comments/:comment_id" |
| }, |
| fork: { |
| method: "POST", |
| params: { |
| gist_id: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/gists/:gist_id/forks" |
| }, |
| get: { |
| method: "GET", |
| params: { |
| gist_id: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/gists/:gist_id" |
| }, |
| getComment: { |
| method: "GET", |
| params: { |
| comment_id: { |
| required: true, |
| type: "integer" |
| }, |
| gist_id: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/gists/:gist_id/comments/:comment_id" |
| }, |
| getRevision: { |
| method: "GET", |
| params: { |
| gist_id: { |
| required: true, |
| type: "string" |
| }, |
| sha: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/gists/:gist_id/:sha" |
| }, |
| list: { |
| method: "GET", |
| params: { |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| since: { |
| type: "string" |
| } |
| }, |
| url: "/gists" |
| }, |
| listComments: { |
| method: "GET", |
| params: { |
| gist_id: { |
| required: true, |
| type: "string" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| } |
| }, |
| url: "/gists/:gist_id/comments" |
| }, |
| listCommits: { |
| method: "GET", |
| params: { |
| gist_id: { |
| required: true, |
| type: "string" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| } |
| }, |
| url: "/gists/:gist_id/commits" |
| }, |
| listForks: { |
| method: "GET", |
| params: { |
| gist_id: { |
| required: true, |
| type: "string" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| } |
| }, |
| url: "/gists/:gist_id/forks" |
| }, |
| listPublic: { |
| method: "GET", |
| params: { |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| since: { |
| type: "string" |
| } |
| }, |
| url: "/gists/public" |
| }, |
| listPublicForUser: { |
| method: "GET", |
| params: { |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| since: { |
| type: "string" |
| }, |
| username: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/users/:username/gists" |
| }, |
| listStarred: { |
| method: "GET", |
| params: { |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| since: { |
| type: "string" |
| } |
| }, |
| url: "/gists/starred" |
| }, |
| star: { |
| method: "PUT", |
| params: { |
| gist_id: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/gists/:gist_id/star" |
| }, |
| unstar: { |
| method: "DELETE", |
| params: { |
| gist_id: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/gists/:gist_id/star" |
| }, |
| update: { |
| method: "PATCH", |
| params: { |
| description: { |
| type: "string" |
| }, |
| files: { |
| type: "object" |
| }, |
| "files.content": { |
| type: "string" |
| }, |
| "files.filename": { |
| type: "string" |
| }, |
| gist_id: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/gists/:gist_id" |
| }, |
| updateComment: { |
| method: "PATCH", |
| params: { |
| body: { |
| required: true, |
| type: "string" |
| }, |
| comment_id: { |
| required: true, |
| type: "integer" |
| }, |
| gist_id: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/gists/:gist_id/comments/:comment_id" |
| } |
| }, |
| git: { |
| createBlob: { |
| method: "POST", |
| params: { |
| content: { |
| required: true, |
| type: "string" |
| }, |
| encoding: { |
| type: "string" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/git/blobs" |
| }, |
| createCommit: { |
| method: "POST", |
| params: { |
| author: { |
| type: "object" |
| }, |
| "author.date": { |
| type: "string" |
| }, |
| "author.email": { |
| type: "string" |
| }, |
| "author.name": { |
| type: "string" |
| }, |
| committer: { |
| type: "object" |
| }, |
| "committer.date": { |
| type: "string" |
| }, |
| "committer.email": { |
| type: "string" |
| }, |
| "committer.name": { |
| type: "string" |
| }, |
| message: { |
| required: true, |
| type: "string" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| parents: { |
| required: true, |
| type: "string[]" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| }, |
| signature: { |
| type: "string" |
| }, |
| tree: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/git/commits" |
| }, |
| createRef: { |
| method: "POST", |
| params: { |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| ref: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| }, |
| sha: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/git/refs" |
| }, |
| createTag: { |
| method: "POST", |
| params: { |
| message: { |
| required: true, |
| type: "string" |
| }, |
| object: { |
| required: true, |
| type: "string" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| }, |
| tag: { |
| required: true, |
| type: "string" |
| }, |
| tagger: { |
| type: "object" |
| }, |
| "tagger.date": { |
| type: "string" |
| }, |
| "tagger.email": { |
| type: "string" |
| }, |
| "tagger.name": { |
| type: "string" |
| }, |
| type: { |
| enum: ["commit", "tree", "blob"], |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/git/tags" |
| }, |
| createTree: { |
| method: "POST", |
| params: { |
| base_tree: { |
| type: "string" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| }, |
| tree: { |
| required: true, |
| type: "object[]" |
| }, |
| "tree[].content": { |
| type: "string" |
| }, |
| "tree[].mode": { |
| enum: ["100644", "100755", "040000", "160000", "120000"], |
| type: "string" |
| }, |
| "tree[].path": { |
| type: "string" |
| }, |
| "tree[].sha": { |
| allowNull: true, |
| type: "string" |
| }, |
| "tree[].type": { |
| enum: ["blob", "tree", "commit"], |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/git/trees" |
| }, |
| deleteRef: { |
| method: "DELETE", |
| params: { |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| ref: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/git/refs/:ref" |
| }, |
| getBlob: { |
| method: "GET", |
| params: { |
| file_sha: { |
| required: true, |
| type: "string" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/git/blobs/:file_sha" |
| }, |
| getCommit: { |
| method: "GET", |
| params: { |
| commit_sha: { |
| required: true, |
| type: "string" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/git/commits/:commit_sha" |
| }, |
| getRef: { |
| method: "GET", |
| params: { |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| ref: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/git/ref/:ref" |
| }, |
| getTag: { |
| method: "GET", |
| params: { |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| }, |
| tag_sha: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/git/tags/:tag_sha" |
| }, |
| getTree: { |
| method: "GET", |
| params: { |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| recursive: { |
| enum: ["1"], |
| type: "integer" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| }, |
| tree_sha: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/git/trees/:tree_sha" |
| }, |
| listMatchingRefs: { |
| method: "GET", |
| params: { |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| ref: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/git/matching-refs/:ref" |
| }, |
| listRefs: { |
| method: "GET", |
| params: { |
| namespace: { |
| type: "string" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/git/refs/:namespace" |
| }, |
| updateRef: { |
| method: "PATCH", |
| params: { |
| force: { |
| type: "boolean" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| ref: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| }, |
| sha: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/git/refs/:ref" |
| } |
| }, |
| gitignore: { |
| getTemplate: { |
| method: "GET", |
| params: { |
| name: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/gitignore/templates/:name" |
| }, |
| listTemplates: { |
| method: "GET", |
| params: {}, |
| url: "/gitignore/templates" |
| } |
| }, |
| interactions: { |
| addOrUpdateRestrictionsForOrg: { |
| headers: { |
| accept: "application/vnd.github.sombra-preview+json" |
| }, |
| method: "PUT", |
| params: { |
| limit: { |
| enum: ["existing_users", "contributors_only", "collaborators_only"], |
| required: true, |
| type: "string" |
| }, |
| org: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/orgs/:org/interaction-limits" |
| }, |
| addOrUpdateRestrictionsForRepo: { |
| headers: { |
| accept: "application/vnd.github.sombra-preview+json" |
| }, |
| method: "PUT", |
| params: { |
| limit: { |
| enum: ["existing_users", "contributors_only", "collaborators_only"], |
| required: true, |
| type: "string" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/interaction-limits" |
| }, |
| getRestrictionsForOrg: { |
| headers: { |
| accept: "application/vnd.github.sombra-preview+json" |
| }, |
| method: "GET", |
| params: { |
| org: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/orgs/:org/interaction-limits" |
| }, |
| getRestrictionsForRepo: { |
| headers: { |
| accept: "application/vnd.github.sombra-preview+json" |
| }, |
| method: "GET", |
| params: { |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/interaction-limits" |
| }, |
| removeRestrictionsForOrg: { |
| headers: { |
| accept: "application/vnd.github.sombra-preview+json" |
| }, |
| method: "DELETE", |
| params: { |
| org: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/orgs/:org/interaction-limits" |
| }, |
| removeRestrictionsForRepo: { |
| headers: { |
| accept: "application/vnd.github.sombra-preview+json" |
| }, |
| method: "DELETE", |
| params: { |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/interaction-limits" |
| } |
| }, |
| issues: { |
| addAssignees: { |
| method: "POST", |
| params: { |
| assignees: { |
| type: "string[]" |
| }, |
| issue_number: { |
| required: true, |
| type: "integer" |
| }, |
| number: { |
| alias: "issue_number", |
| deprecated: true, |
| type: "integer" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/issues/:issue_number/assignees" |
| }, |
| addLabels: { |
| method: "POST", |
| params: { |
| issue_number: { |
| required: true, |
| type: "integer" |
| }, |
| labels: { |
| required: true, |
| type: "string[]" |
| }, |
| number: { |
| alias: "issue_number", |
| deprecated: true, |
| type: "integer" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/issues/:issue_number/labels" |
| }, |
| checkAssignee: { |
| method: "GET", |
| params: { |
| assignee: { |
| required: true, |
| type: "string" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/assignees/:assignee" |
| }, |
| create: { |
| method: "POST", |
| params: { |
| assignee: { |
| type: "string" |
| }, |
| assignees: { |
| type: "string[]" |
| }, |
| body: { |
| type: "string" |
| }, |
| labels: { |
| type: "string[]" |
| }, |
| milestone: { |
| type: "integer" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| }, |
| title: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/issues" |
| }, |
| createComment: { |
| method: "POST", |
| params: { |
| body: { |
| required: true, |
| type: "string" |
| }, |
| issue_number: { |
| required: true, |
| type: "integer" |
| }, |
| number: { |
| alias: "issue_number", |
| deprecated: true, |
| type: "integer" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/issues/:issue_number/comments" |
| }, |
| createLabel: { |
| method: "POST", |
| params: { |
| color: { |
| required: true, |
| type: "string" |
| }, |
| description: { |
| type: "string" |
| }, |
| name: { |
| required: true, |
| type: "string" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/labels" |
| }, |
| createMilestone: { |
| method: "POST", |
| params: { |
| description: { |
| type: "string" |
| }, |
| due_on: { |
| type: "string" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| }, |
| state: { |
| enum: ["open", "closed"], |
| type: "string" |
| }, |
| title: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/milestones" |
| }, |
| deleteComment: { |
| method: "DELETE", |
| params: { |
| comment_id: { |
| required: true, |
| type: "integer" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/issues/comments/:comment_id" |
| }, |
| deleteLabel: { |
| method: "DELETE", |
| params: { |
| name: { |
| required: true, |
| type: "string" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/labels/:name" |
| }, |
| deleteMilestone: { |
| method: "DELETE", |
| params: { |
| milestone_number: { |
| required: true, |
| type: "integer" |
| }, |
| number: { |
| alias: "milestone_number", |
| deprecated: true, |
| type: "integer" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/milestones/:milestone_number" |
| }, |
| get: { |
| method: "GET", |
| params: { |
| issue_number: { |
| required: true, |
| type: "integer" |
| }, |
| number: { |
| alias: "issue_number", |
| deprecated: true, |
| type: "integer" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/issues/:issue_number" |
| }, |
| getComment: { |
| method: "GET", |
| params: { |
| comment_id: { |
| required: true, |
| type: "integer" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/issues/comments/:comment_id" |
| }, |
| getEvent: { |
| method: "GET", |
| params: { |
| event_id: { |
| required: true, |
| type: "integer" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/issues/events/:event_id" |
| }, |
| getLabel: { |
| method: "GET", |
| params: { |
| name: { |
| required: true, |
| type: "string" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/labels/:name" |
| }, |
| getMilestone: { |
| method: "GET", |
| params: { |
| milestone_number: { |
| required: true, |
| type: "integer" |
| }, |
| number: { |
| alias: "milestone_number", |
| deprecated: true, |
| type: "integer" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/milestones/:milestone_number" |
| }, |
| list: { |
| method: "GET", |
| params: { |
| direction: { |
| enum: ["asc", "desc"], |
| type: "string" |
| }, |
| filter: { |
| enum: ["assigned", "created", "mentioned", "subscribed", "all"], |
| type: "string" |
| }, |
| labels: { |
| type: "string" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| since: { |
| type: "string" |
| }, |
| sort: { |
| enum: ["created", "updated", "comments"], |
| type: "string" |
| }, |
| state: { |
| enum: ["open", "closed", "all"], |
| type: "string" |
| } |
| }, |
| url: "/issues" |
| }, |
| listAssignees: { |
| method: "GET", |
| params: { |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/assignees" |
| }, |
| listComments: { |
| method: "GET", |
| params: { |
| issue_number: { |
| required: true, |
| type: "integer" |
| }, |
| number: { |
| alias: "issue_number", |
| deprecated: true, |
| type: "integer" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| }, |
| since: { |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/issues/:issue_number/comments" |
| }, |
| listCommentsForRepo: { |
| method: "GET", |
| params: { |
| direction: { |
| enum: ["asc", "desc"], |
| type: "string" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| }, |
| since: { |
| type: "string" |
| }, |
| sort: { |
| enum: ["created", "updated"], |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/issues/comments" |
| }, |
| listEvents: { |
| method: "GET", |
| params: { |
| issue_number: { |
| required: true, |
| type: "integer" |
| }, |
| number: { |
| alias: "issue_number", |
| deprecated: true, |
| type: "integer" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/issues/:issue_number/events" |
| }, |
| listEventsForRepo: { |
| method: "GET", |
| params: { |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/issues/events" |
| }, |
| listEventsForTimeline: { |
| headers: { |
| accept: "application/vnd.github.mockingbird-preview+json" |
| }, |
| method: "GET", |
| params: { |
| issue_number: { |
| required: true, |
| type: "integer" |
| }, |
| number: { |
| alias: "issue_number", |
| deprecated: true, |
| type: "integer" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/issues/:issue_number/timeline" |
| }, |
| listForAuthenticatedUser: { |
| method: "GET", |
| params: { |
| direction: { |
| enum: ["asc", "desc"], |
| type: "string" |
| }, |
| filter: { |
| enum: ["assigned", "created", "mentioned", "subscribed", "all"], |
| type: "string" |
| }, |
| labels: { |
| type: "string" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| since: { |
| type: "string" |
| }, |
| sort: { |
| enum: ["created", "updated", "comments"], |
| type: "string" |
| }, |
| state: { |
| enum: ["open", "closed", "all"], |
| type: "string" |
| } |
| }, |
| url: "/user/issues" |
| }, |
| listForOrg: { |
| method: "GET", |
| params: { |
| direction: { |
| enum: ["asc", "desc"], |
| type: "string" |
| }, |
| filter: { |
| enum: ["assigned", "created", "mentioned", "subscribed", "all"], |
| type: "string" |
| }, |
| labels: { |
| type: "string" |
| }, |
| org: { |
| required: true, |
| type: "string" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| since: { |
| type: "string" |
| }, |
| sort: { |
| enum: ["created", "updated", "comments"], |
| type: "string" |
| }, |
| state: { |
| enum: ["open", "closed", "all"], |
| type: "string" |
| } |
| }, |
| url: "/orgs/:org/issues" |
| }, |
| listForRepo: { |
| method: "GET", |
| params: { |
| assignee: { |
| type: "string" |
| }, |
| creator: { |
| type: "string" |
| }, |
| direction: { |
| enum: ["asc", "desc"], |
| type: "string" |
| }, |
| labels: { |
| type: "string" |
| }, |
| mentioned: { |
| type: "string" |
| }, |
| milestone: { |
| type: "string" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| }, |
| since: { |
| type: "string" |
| }, |
| sort: { |
| enum: ["created", "updated", "comments"], |
| type: "string" |
| }, |
| state: { |
| enum: ["open", "closed", "all"], |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/issues" |
| }, |
| listLabelsForMilestone: { |
| method: "GET", |
| params: { |
| milestone_number: { |
| required: true, |
| type: "integer" |
| }, |
| number: { |
| alias: "milestone_number", |
| deprecated: true, |
| type: "integer" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/milestones/:milestone_number/labels" |
| }, |
| listLabelsForRepo: { |
| method: "GET", |
| params: { |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/labels" |
| }, |
| listLabelsOnIssue: { |
| method: "GET", |
| params: { |
| issue_number: { |
| required: true, |
| type: "integer" |
| }, |
| number: { |
| alias: "issue_number", |
| deprecated: true, |
| type: "integer" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/issues/:issue_number/labels" |
| }, |
| listMilestonesForRepo: { |
| method: "GET", |
| params: { |
| direction: { |
| enum: ["asc", "desc"], |
| type: "string" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| }, |
| sort: { |
| enum: ["due_on", "completeness"], |
| type: "string" |
| }, |
| state: { |
| enum: ["open", "closed", "all"], |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/milestones" |
| }, |
| lock: { |
| method: "PUT", |
| params: { |
| issue_number: { |
| required: true, |
| type: "integer" |
| }, |
| lock_reason: { |
| enum: ["off-topic", "too heated", "resolved", "spam"], |
| type: "string" |
| }, |
| number: { |
| alias: "issue_number", |
| deprecated: true, |
| type: "integer" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/issues/:issue_number/lock" |
| }, |
| removeAssignees: { |
| method: "DELETE", |
| params: { |
| assignees: { |
| type: "string[]" |
| }, |
| issue_number: { |
| required: true, |
| type: "integer" |
| }, |
| number: { |
| alias: "issue_number", |
| deprecated: true, |
| type: "integer" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/issues/:issue_number/assignees" |
| }, |
| removeLabel: { |
| method: "DELETE", |
| params: { |
| issue_number: { |
| required: true, |
| type: "integer" |
| }, |
| name: { |
| required: true, |
| type: "string" |
| }, |
| number: { |
| alias: "issue_number", |
| deprecated: true, |
| type: "integer" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/issues/:issue_number/labels/:name" |
| }, |
| removeLabels: { |
| method: "DELETE", |
| params: { |
| issue_number: { |
| required: true, |
| type: "integer" |
| }, |
| number: { |
| alias: "issue_number", |
| deprecated: true, |
| type: "integer" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/issues/:issue_number/labels" |
| }, |
| replaceLabels: { |
| method: "PUT", |
| params: { |
| issue_number: { |
| required: true, |
| type: "integer" |
| }, |
| labels: { |
| type: "string[]" |
| }, |
| number: { |
| alias: "issue_number", |
| deprecated: true, |
| type: "integer" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/issues/:issue_number/labels" |
| }, |
| unlock: { |
| method: "DELETE", |
| params: { |
| issue_number: { |
| required: true, |
| type: "integer" |
| }, |
| number: { |
| alias: "issue_number", |
| deprecated: true, |
| type: "integer" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/issues/:issue_number/lock" |
| }, |
| update: { |
| method: "PATCH", |
| params: { |
| assignee: { |
| type: "string" |
| }, |
| assignees: { |
| type: "string[]" |
| }, |
| body: { |
| type: "string" |
| }, |
| issue_number: { |
| required: true, |
| type: "integer" |
| }, |
| labels: { |
| type: "string[]" |
| }, |
| milestone: { |
| allowNull: true, |
| type: "integer" |
| }, |
| number: { |
| alias: "issue_number", |
| deprecated: true, |
| type: "integer" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| }, |
| state: { |
| enum: ["open", "closed"], |
| type: "string" |
| }, |
| title: { |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/issues/:issue_number" |
| }, |
| updateComment: { |
| method: "PATCH", |
| params: { |
| body: { |
| required: true, |
| type: "string" |
| }, |
| comment_id: { |
| required: true, |
| type: "integer" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/issues/comments/:comment_id" |
| }, |
| updateLabel: { |
| method: "PATCH", |
| params: { |
| color: { |
| type: "string" |
| }, |
| current_name: { |
| required: true, |
| type: "string" |
| }, |
| description: { |
| type: "string" |
| }, |
| name: { |
| type: "string" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/labels/:current_name" |
| }, |
| updateMilestone: { |
| method: "PATCH", |
| params: { |
| description: { |
| type: "string" |
| }, |
| due_on: { |
| type: "string" |
| }, |
| milestone_number: { |
| required: true, |
| type: "integer" |
| }, |
| number: { |
| alias: "milestone_number", |
| deprecated: true, |
| type: "integer" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| }, |
| state: { |
| enum: ["open", "closed"], |
| type: "string" |
| }, |
| title: { |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/milestones/:milestone_number" |
| } |
| }, |
| licenses: { |
| get: { |
| method: "GET", |
| params: { |
| license: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/licenses/:license" |
| }, |
| getForRepo: { |
| method: "GET", |
| params: { |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/license" |
| }, |
| list: { |
| deprecated: "octokit.licenses.list() has been renamed to octokit.licenses.listCommonlyUsed() (2019-03-05)", |
| method: "GET", |
| params: {}, |
| url: "/licenses" |
| }, |
| listCommonlyUsed: { |
| method: "GET", |
| params: {}, |
| url: "/licenses" |
| } |
| }, |
| markdown: { |
| render: { |
| method: "POST", |
| params: { |
| context: { |
| type: "string" |
| }, |
| mode: { |
| enum: ["markdown", "gfm"], |
| type: "string" |
| }, |
| text: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/markdown" |
| }, |
| renderRaw: { |
| headers: { |
| "content-type": "text/plain; charset=utf-8" |
| }, |
| method: "POST", |
| params: { |
| data: { |
| mapTo: "data", |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/markdown/raw" |
| } |
| }, |
| meta: { |
| get: { |
| method: "GET", |
| params: {}, |
| url: "/meta" |
| } |
| }, |
| migrations: { |
| cancelImport: { |
| method: "DELETE", |
| params: { |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/import" |
| }, |
| deleteArchiveForAuthenticatedUser: { |
| headers: { |
| accept: "application/vnd.github.wyandotte-preview+json" |
| }, |
| method: "DELETE", |
| params: { |
| migration_id: { |
| required: true, |
| type: "integer" |
| } |
| }, |
| url: "/user/migrations/:migration_id/archive" |
| }, |
| deleteArchiveForOrg: { |
| headers: { |
| accept: "application/vnd.github.wyandotte-preview+json" |
| }, |
| method: "DELETE", |
| params: { |
| migration_id: { |
| required: true, |
| type: "integer" |
| }, |
| org: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/orgs/:org/migrations/:migration_id/archive" |
| }, |
| downloadArchiveForOrg: { |
| headers: { |
| accept: "application/vnd.github.wyandotte-preview+json" |
| }, |
| method: "GET", |
| params: { |
| migration_id: { |
| required: true, |
| type: "integer" |
| }, |
| org: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/orgs/:org/migrations/:migration_id/archive" |
| }, |
| getArchiveForAuthenticatedUser: { |
| headers: { |
| accept: "application/vnd.github.wyandotte-preview+json" |
| }, |
| method: "GET", |
| params: { |
| migration_id: { |
| required: true, |
| type: "integer" |
| } |
| }, |
| url: "/user/migrations/:migration_id/archive" |
| }, |
| getArchiveForOrg: { |
| deprecated: "octokit.migrations.getArchiveForOrg() has been renamed to octokit.migrations.downloadArchiveForOrg() (2020-01-27)", |
| headers: { |
| accept: "application/vnd.github.wyandotte-preview+json" |
| }, |
| method: "GET", |
| params: { |
| migration_id: { |
| required: true, |
| type: "integer" |
| }, |
| org: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/orgs/:org/migrations/:migration_id/archive" |
| }, |
| getCommitAuthors: { |
| method: "GET", |
| params: { |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| }, |
| since: { |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/import/authors" |
| }, |
| getImportProgress: { |
| method: "GET", |
| params: { |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/import" |
| }, |
| getLargeFiles: { |
| method: "GET", |
| params: { |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/import/large_files" |
| }, |
| getStatusForAuthenticatedUser: { |
| headers: { |
| accept: "application/vnd.github.wyandotte-preview+json" |
| }, |
| method: "GET", |
| params: { |
| migration_id: { |
| required: true, |
| type: "integer" |
| } |
| }, |
| url: "/user/migrations/:migration_id" |
| }, |
| getStatusForOrg: { |
| headers: { |
| accept: "application/vnd.github.wyandotte-preview+json" |
| }, |
| method: "GET", |
| params: { |
| migration_id: { |
| required: true, |
| type: "integer" |
| }, |
| org: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/orgs/:org/migrations/:migration_id" |
| }, |
| listForAuthenticatedUser: { |
| headers: { |
| accept: "application/vnd.github.wyandotte-preview+json" |
| }, |
| method: "GET", |
| params: { |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| } |
| }, |
| url: "/user/migrations" |
| }, |
| listForOrg: { |
| headers: { |
| accept: "application/vnd.github.wyandotte-preview+json" |
| }, |
| method: "GET", |
| params: { |
| org: { |
| required: true, |
| type: "string" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| } |
| }, |
| url: "/orgs/:org/migrations" |
| }, |
| listReposForOrg: { |
| headers: { |
| accept: "application/vnd.github.wyandotte-preview+json" |
| }, |
| method: "GET", |
| params: { |
| migration_id: { |
| required: true, |
| type: "integer" |
| }, |
| org: { |
| required: true, |
| type: "string" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| } |
| }, |
| url: "/orgs/:org/migrations/:migration_id/repositories" |
| }, |
| listReposForUser: { |
| headers: { |
| accept: "application/vnd.github.wyandotte-preview+json" |
| }, |
| method: "GET", |
| params: { |
| migration_id: { |
| required: true, |
| type: "integer" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| } |
| }, |
| url: "/user/:migration_id/repositories" |
| }, |
| mapCommitAuthor: { |
| method: "PATCH", |
| params: { |
| author_id: { |
| required: true, |
| type: "integer" |
| }, |
| email: { |
| type: "string" |
| }, |
| name: { |
| type: "string" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/import/authors/:author_id" |
| }, |
| setLfsPreference: { |
| method: "PATCH", |
| params: { |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| }, |
| use_lfs: { |
| enum: ["opt_in", "opt_out"], |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/import/lfs" |
| }, |
| startForAuthenticatedUser: { |
| method: "POST", |
| params: { |
| exclude_attachments: { |
| type: "boolean" |
| }, |
| lock_repositories: { |
| type: "boolean" |
| }, |
| repositories: { |
| required: true, |
| type: "string[]" |
| } |
| }, |
| url: "/user/migrations" |
| }, |
| startForOrg: { |
| method: "POST", |
| params: { |
| exclude_attachments: { |
| type: "boolean" |
| }, |
| lock_repositories: { |
| type: "boolean" |
| }, |
| org: { |
| required: true, |
| type: "string" |
| }, |
| repositories: { |
| required: true, |
| type: "string[]" |
| } |
| }, |
| url: "/orgs/:org/migrations" |
| }, |
| startImport: { |
| method: "PUT", |
| params: { |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| }, |
| tfvc_project: { |
| type: "string" |
| }, |
| vcs: { |
| enum: ["subversion", "git", "mercurial", "tfvc"], |
| type: "string" |
| }, |
| vcs_password: { |
| type: "string" |
| }, |
| vcs_url: { |
| required: true, |
| type: "string" |
| }, |
| vcs_username: { |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/import" |
| }, |
| unlockRepoForAuthenticatedUser: { |
| headers: { |
| accept: "application/vnd.github.wyandotte-preview+json" |
| }, |
| method: "DELETE", |
| params: { |
| migration_id: { |
| required: true, |
| type: "integer" |
| }, |
| repo_name: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/user/migrations/:migration_id/repos/:repo_name/lock" |
| }, |
| unlockRepoForOrg: { |
| headers: { |
| accept: "application/vnd.github.wyandotte-preview+json" |
| }, |
| method: "DELETE", |
| params: { |
| migration_id: { |
| required: true, |
| type: "integer" |
| }, |
| org: { |
| required: true, |
| type: "string" |
| }, |
| repo_name: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/orgs/:org/migrations/:migration_id/repos/:repo_name/lock" |
| }, |
| updateImport: { |
| method: "PATCH", |
| params: { |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| }, |
| vcs_password: { |
| type: "string" |
| }, |
| vcs_username: { |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/import" |
| } |
| }, |
| oauthAuthorizations: { |
| checkAuthorization: { |
| deprecated: "octokit.oauthAuthorizations.checkAuthorization() has been renamed to octokit.apps.checkAuthorization() (2019-11-05)", |
| method: "GET", |
| params: { |
| access_token: { |
| required: true, |
| type: "string" |
| }, |
| client_id: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/applications/:client_id/tokens/:access_token" |
| }, |
| createAuthorization: { |
| deprecated: "octokit.oauthAuthorizations.createAuthorization() is deprecated, see https://developer.github.com/v3/oauth_authorizations/#create-a-new-authorization", |
| method: "POST", |
| params: { |
| client_id: { |
| type: "string" |
| }, |
| client_secret: { |
| type: "string" |
| }, |
| fingerprint: { |
| type: "string" |
| }, |
| note: { |
| required: true, |
| type: "string" |
| }, |
| note_url: { |
| type: "string" |
| }, |
| scopes: { |
| type: "string[]" |
| } |
| }, |
| url: "/authorizations" |
| }, |
| deleteAuthorization: { |
| deprecated: "octokit.oauthAuthorizations.deleteAuthorization() is deprecated, see https://developer.github.com/v3/oauth_authorizations/#delete-an-authorization", |
| method: "DELETE", |
| params: { |
| authorization_id: { |
| required: true, |
| type: "integer" |
| } |
| }, |
| url: "/authorizations/:authorization_id" |
| }, |
| deleteGrant: { |
| deprecated: "octokit.oauthAuthorizations.deleteGrant() is deprecated, see https://developer.github.com/v3/oauth_authorizations/#delete-a-grant", |
| method: "DELETE", |
| params: { |
| grant_id: { |
| required: true, |
| type: "integer" |
| } |
| }, |
| url: "/applications/grants/:grant_id" |
| }, |
| getAuthorization: { |
| deprecated: "octokit.oauthAuthorizations.getAuthorization() is deprecated, see https://developer.github.com/v3/oauth_authorizations/#get-a-single-authorization", |
| method: "GET", |
| params: { |
| authorization_id: { |
| required: true, |
| type: "integer" |
| } |
| }, |
| url: "/authorizations/:authorization_id" |
| }, |
| getGrant: { |
| deprecated: "octokit.oauthAuthorizations.getGrant() is deprecated, see https://developer.github.com/v3/oauth_authorizations/#get-a-single-grant", |
| method: "GET", |
| params: { |
| grant_id: { |
| required: true, |
| type: "integer" |
| } |
| }, |
| url: "/applications/grants/:grant_id" |
| }, |
| getOrCreateAuthorizationForApp: { |
| deprecated: "octokit.oauthAuthorizations.getOrCreateAuthorizationForApp() is deprecated, see https://developer.github.com/v3/oauth_authorizations/#get-or-create-an-authorization-for-a-specific-app", |
| method: "PUT", |
| params: { |
| client_id: { |
| required: true, |
| type: "string" |
| }, |
| client_secret: { |
| required: true, |
| type: "string" |
| }, |
| fingerprint: { |
| type: "string" |
| }, |
| note: { |
| type: "string" |
| }, |
| note_url: { |
| type: "string" |
| }, |
| scopes: { |
| type: "string[]" |
| } |
| }, |
| url: "/authorizations/clients/:client_id" |
| }, |
| getOrCreateAuthorizationForAppAndFingerprint: { |
| deprecated: "octokit.oauthAuthorizations.getOrCreateAuthorizationForAppAndFingerprint() is deprecated, see https://developer.github.com/v3/oauth_authorizations/#get-or-create-an-authorization-for-a-specific-app-and-fingerprint", |
| method: "PUT", |
| params: { |
| client_id: { |
| required: true, |
| type: "string" |
| }, |
| client_secret: { |
| required: true, |
| type: "string" |
| }, |
| fingerprint: { |
| required: true, |
| type: "string" |
| }, |
| note: { |
| type: "string" |
| }, |
| note_url: { |
| type: "string" |
| }, |
| scopes: { |
| type: "string[]" |
| } |
| }, |
| url: "/authorizations/clients/:client_id/:fingerprint" |
| }, |
| getOrCreateAuthorizationForAppFingerprint: { |
| deprecated: "octokit.oauthAuthorizations.getOrCreateAuthorizationForAppFingerprint() has been renamed to octokit.oauthAuthorizations.getOrCreateAuthorizationForAppAndFingerprint() (2018-12-27)", |
| method: "PUT", |
| params: { |
| client_id: { |
| required: true, |
| type: "string" |
| }, |
| client_secret: { |
| required: true, |
| type: "string" |
| }, |
| fingerprint: { |
| required: true, |
| type: "string" |
| }, |
| note: { |
| type: "string" |
| }, |
| note_url: { |
| type: "string" |
| }, |
| scopes: { |
| type: "string[]" |
| } |
| }, |
| url: "/authorizations/clients/:client_id/:fingerprint" |
| }, |
| listAuthorizations: { |
| deprecated: "octokit.oauthAuthorizations.listAuthorizations() is deprecated, see https://developer.github.com/v3/oauth_authorizations/#list-your-authorizations", |
| method: "GET", |
| params: { |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| } |
| }, |
| url: "/authorizations" |
| }, |
| listGrants: { |
| deprecated: "octokit.oauthAuthorizations.listGrants() is deprecated, see https://developer.github.com/v3/oauth_authorizations/#list-your-grants", |
| method: "GET", |
| params: { |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| } |
| }, |
| url: "/applications/grants" |
| }, |
| resetAuthorization: { |
| deprecated: "octokit.oauthAuthorizations.resetAuthorization() has been renamed to octokit.apps.resetAuthorization() (2019-11-05)", |
| method: "POST", |
| params: { |
| access_token: { |
| required: true, |
| type: "string" |
| }, |
| client_id: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/applications/:client_id/tokens/:access_token" |
| }, |
| revokeAuthorizationForApplication: { |
| deprecated: "octokit.oauthAuthorizations.revokeAuthorizationForApplication() has been renamed to octokit.apps.revokeAuthorizationForApplication() (2019-11-05)", |
| method: "DELETE", |
| params: { |
| access_token: { |
| required: true, |
| type: "string" |
| }, |
| client_id: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/applications/:client_id/tokens/:access_token" |
| }, |
| revokeGrantForApplication: { |
| deprecated: "octokit.oauthAuthorizations.revokeGrantForApplication() has been renamed to octokit.apps.revokeGrantForApplication() (2019-11-05)", |
| method: "DELETE", |
| params: { |
| access_token: { |
| required: true, |
| type: "string" |
| }, |
| client_id: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/applications/:client_id/grants/:access_token" |
| }, |
| updateAuthorization: { |
| deprecated: "octokit.oauthAuthorizations.updateAuthorization() is deprecated, see https://developer.github.com/v3/oauth_authorizations/#update-an-existing-authorization", |
| method: "PATCH", |
| params: { |
| add_scopes: { |
| type: "string[]" |
| }, |
| authorization_id: { |
| required: true, |
| type: "integer" |
| }, |
| fingerprint: { |
| type: "string" |
| }, |
| note: { |
| type: "string" |
| }, |
| note_url: { |
| type: "string" |
| }, |
| remove_scopes: { |
| type: "string[]" |
| }, |
| scopes: { |
| type: "string[]" |
| } |
| }, |
| url: "/authorizations/:authorization_id" |
| } |
| }, |
| orgs: { |
| addOrUpdateMembership: { |
| method: "PUT", |
| params: { |
| org: { |
| required: true, |
| type: "string" |
| }, |
| role: { |
| enum: ["admin", "member"], |
| type: "string" |
| }, |
| username: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/orgs/:org/memberships/:username" |
| }, |
| blockUser: { |
| method: "PUT", |
| params: { |
| org: { |
| required: true, |
| type: "string" |
| }, |
| username: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/orgs/:org/blocks/:username" |
| }, |
| checkBlockedUser: { |
| method: "GET", |
| params: { |
| org: { |
| required: true, |
| type: "string" |
| }, |
| username: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/orgs/:org/blocks/:username" |
| }, |
| checkMembership: { |
| method: "GET", |
| params: { |
| org: { |
| required: true, |
| type: "string" |
| }, |
| username: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/orgs/:org/members/:username" |
| }, |
| checkPublicMembership: { |
| method: "GET", |
| params: { |
| org: { |
| required: true, |
| type: "string" |
| }, |
| username: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/orgs/:org/public_members/:username" |
| }, |
| concealMembership: { |
| method: "DELETE", |
| params: { |
| org: { |
| required: true, |
| type: "string" |
| }, |
| username: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/orgs/:org/public_members/:username" |
| }, |
| convertMemberToOutsideCollaborator: { |
| method: "PUT", |
| params: { |
| org: { |
| required: true, |
| type: "string" |
| }, |
| username: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/orgs/:org/outside_collaborators/:username" |
| }, |
| createHook: { |
| method: "POST", |
| params: { |
| active: { |
| type: "boolean" |
| }, |
| config: { |
| required: true, |
| type: "object" |
| }, |
| "config.content_type": { |
| type: "string" |
| }, |
| "config.insecure_ssl": { |
| type: "string" |
| }, |
| "config.secret": { |
| type: "string" |
| }, |
| "config.url": { |
| required: true, |
| type: "string" |
| }, |
| events: { |
| type: "string[]" |
| }, |
| name: { |
| required: true, |
| type: "string" |
| }, |
| org: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/orgs/:org/hooks" |
| }, |
| createInvitation: { |
| method: "POST", |
| params: { |
| email: { |
| type: "string" |
| }, |
| invitee_id: { |
| type: "integer" |
| }, |
| org: { |
| required: true, |
| type: "string" |
| }, |
| role: { |
| enum: ["admin", "direct_member", "billing_manager"], |
| type: "string" |
| }, |
| team_ids: { |
| type: "integer[]" |
| } |
| }, |
| url: "/orgs/:org/invitations" |
| }, |
| deleteHook: { |
| method: "DELETE", |
| params: { |
| hook_id: { |
| required: true, |
| type: "integer" |
| }, |
| org: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/orgs/:org/hooks/:hook_id" |
| }, |
| get: { |
| method: "GET", |
| params: { |
| org: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/orgs/:org" |
| }, |
| getHook: { |
| method: "GET", |
| params: { |
| hook_id: { |
| required: true, |
| type: "integer" |
| }, |
| org: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/orgs/:org/hooks/:hook_id" |
| }, |
| getMembership: { |
| method: "GET", |
| params: { |
| org: { |
| required: true, |
| type: "string" |
| }, |
| username: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/orgs/:org/memberships/:username" |
| }, |
| getMembershipForAuthenticatedUser: { |
| method: "GET", |
| params: { |
| org: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/user/memberships/orgs/:org" |
| }, |
| list: { |
| method: "GET", |
| params: { |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| since: { |
| type: "integer" |
| } |
| }, |
| url: "/organizations" |
| }, |
| listBlockedUsers: { |
| method: "GET", |
| params: { |
| org: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/orgs/:org/blocks" |
| }, |
| listForAuthenticatedUser: { |
| method: "GET", |
| params: { |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| } |
| }, |
| url: "/user/orgs" |
| }, |
| listForUser: { |
| method: "GET", |
| params: { |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| username: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/users/:username/orgs" |
| }, |
| listHooks: { |
| method: "GET", |
| params: { |
| org: { |
| required: true, |
| type: "string" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| } |
| }, |
| url: "/orgs/:org/hooks" |
| }, |
| listInstallations: { |
| headers: { |
| accept: "application/vnd.github.machine-man-preview+json" |
| }, |
| method: "GET", |
| params: { |
| org: { |
| required: true, |
| type: "string" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| } |
| }, |
| url: "/orgs/:org/installations" |
| }, |
| listInvitationTeams: { |
| method: "GET", |
| params: { |
| invitation_id: { |
| required: true, |
| type: "integer" |
| }, |
| org: { |
| required: true, |
| type: "string" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| } |
| }, |
| url: "/orgs/:org/invitations/:invitation_id/teams" |
| }, |
| listMembers: { |
| method: "GET", |
| params: { |
| filter: { |
| enum: ["2fa_disabled", "all"], |
| type: "string" |
| }, |
| org: { |
| required: true, |
| type: "string" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| role: { |
| enum: ["all", "admin", "member"], |
| type: "string" |
| } |
| }, |
| url: "/orgs/:org/members" |
| }, |
| listMemberships: { |
| method: "GET", |
| params: { |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| state: { |
| enum: ["active", "pending"], |
| type: "string" |
| } |
| }, |
| url: "/user/memberships/orgs" |
| }, |
| listOutsideCollaborators: { |
| method: "GET", |
| params: { |
| filter: { |
| enum: ["2fa_disabled", "all"], |
| type: "string" |
| }, |
| org: { |
| required: true, |
| type: "string" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| } |
| }, |
| url: "/orgs/:org/outside_collaborators" |
| }, |
| listPendingInvitations: { |
| method: "GET", |
| params: { |
| org: { |
| required: true, |
| type: "string" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| } |
| }, |
| url: "/orgs/:org/invitations" |
| }, |
| listPublicMembers: { |
| method: "GET", |
| params: { |
| org: { |
| required: true, |
| type: "string" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| } |
| }, |
| url: "/orgs/:org/public_members" |
| }, |
| pingHook: { |
| method: "POST", |
| params: { |
| hook_id: { |
| required: true, |
| type: "integer" |
| }, |
| org: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/orgs/:org/hooks/:hook_id/pings" |
| }, |
| publicizeMembership: { |
| method: "PUT", |
| params: { |
| org: { |
| required: true, |
| type: "string" |
| }, |
| username: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/orgs/:org/public_members/:username" |
| }, |
| removeMember: { |
| method: "DELETE", |
| params: { |
| org: { |
| required: true, |
| type: "string" |
| }, |
| username: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/orgs/:org/members/:username" |
| }, |
| removeMembership: { |
| method: "DELETE", |
| params: { |
| org: { |
| required: true, |
| type: "string" |
| }, |
| username: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/orgs/:org/memberships/:username" |
| }, |
| removeOutsideCollaborator: { |
| method: "DELETE", |
| params: { |
| org: { |
| required: true, |
| type: "string" |
| }, |
| username: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/orgs/:org/outside_collaborators/:username" |
| }, |
| unblockUser: { |
| method: "DELETE", |
| params: { |
| org: { |
| required: true, |
| type: "string" |
| }, |
| username: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/orgs/:org/blocks/:username" |
| }, |
| update: { |
| method: "PATCH", |
| params: { |
| billing_email: { |
| type: "string" |
| }, |
| company: { |
| type: "string" |
| }, |
| default_repository_permission: { |
| enum: ["read", "write", "admin", "none"], |
| type: "string" |
| }, |
| description: { |
| type: "string" |
| }, |
| email: { |
| type: "string" |
| }, |
| has_organization_projects: { |
| type: "boolean" |
| }, |
| has_repository_projects: { |
| type: "boolean" |
| }, |
| location: { |
| type: "string" |
| }, |
| members_allowed_repository_creation_type: { |
| enum: ["all", "private", "none"], |
| type: "string" |
| }, |
| members_can_create_internal_repositories: { |
| type: "boolean" |
| }, |
| members_can_create_private_repositories: { |
| type: "boolean" |
| }, |
| members_can_create_public_repositories: { |
| type: "boolean" |
| }, |
| members_can_create_repositories: { |
| type: "boolean" |
| }, |
| name: { |
| type: "string" |
| }, |
| org: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/orgs/:org" |
| }, |
| updateHook: { |
| method: "PATCH", |
| params: { |
| active: { |
| type: "boolean" |
| }, |
| config: { |
| type: "object" |
| }, |
| "config.content_type": { |
| type: "string" |
| }, |
| "config.insecure_ssl": { |
| type: "string" |
| }, |
| "config.secret": { |
| type: "string" |
| }, |
| "config.url": { |
| required: true, |
| type: "string" |
| }, |
| events: { |
| type: "string[]" |
| }, |
| hook_id: { |
| required: true, |
| type: "integer" |
| }, |
| org: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/orgs/:org/hooks/:hook_id" |
| }, |
| updateMembership: { |
| method: "PATCH", |
| params: { |
| org: { |
| required: true, |
| type: "string" |
| }, |
| state: { |
| enum: ["active"], |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/user/memberships/orgs/:org" |
| } |
| }, |
| projects: { |
| addCollaborator: { |
| headers: { |
| accept: "application/vnd.github.inertia-preview+json" |
| }, |
| method: "PUT", |
| params: { |
| permission: { |
| enum: ["read", "write", "admin"], |
| type: "string" |
| }, |
| project_id: { |
| required: true, |
| type: "integer" |
| }, |
| username: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/projects/:project_id/collaborators/:username" |
| }, |
| createCard: { |
| headers: { |
| accept: "application/vnd.github.inertia-preview+json" |
| }, |
| method: "POST", |
| params: { |
| column_id: { |
| required: true, |
| type: "integer" |
| }, |
| content_id: { |
| type: "integer" |
| }, |
| content_type: { |
| type: "string" |
| }, |
| note: { |
| type: "string" |
| } |
| }, |
| url: "/projects/columns/:column_id/cards" |
| }, |
| createColumn: { |
| headers: { |
| accept: "application/vnd.github.inertia-preview+json" |
| }, |
| method: "POST", |
| params: { |
| name: { |
| required: true, |
| type: "string" |
| }, |
| project_id: { |
| required: true, |
| type: "integer" |
| } |
| }, |
| url: "/projects/:project_id/columns" |
| }, |
| createForAuthenticatedUser: { |
| headers: { |
| accept: "application/vnd.github.inertia-preview+json" |
| }, |
| method: "POST", |
| params: { |
| body: { |
| type: "string" |
| }, |
| name: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/user/projects" |
| }, |
| createForOrg: { |
| headers: { |
| accept: "application/vnd.github.inertia-preview+json" |
| }, |
| method: "POST", |
| params: { |
| body: { |
| type: "string" |
| }, |
| name: { |
| required: true, |
| type: "string" |
| }, |
| org: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/orgs/:org/projects" |
| }, |
| createForRepo: { |
| headers: { |
| accept: "application/vnd.github.inertia-preview+json" |
| }, |
| method: "POST", |
| params: { |
| body: { |
| type: "string" |
| }, |
| name: { |
| required: true, |
| type: "string" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/projects" |
| }, |
| delete: { |
| headers: { |
| accept: "application/vnd.github.inertia-preview+json" |
| }, |
| method: "DELETE", |
| params: { |
| project_id: { |
| required: true, |
| type: "integer" |
| } |
| }, |
| url: "/projects/:project_id" |
| }, |
| deleteCard: { |
| headers: { |
| accept: "application/vnd.github.inertia-preview+json" |
| }, |
| method: "DELETE", |
| params: { |
| card_id: { |
| required: true, |
| type: "integer" |
| } |
| }, |
| url: "/projects/columns/cards/:card_id" |
| }, |
| deleteColumn: { |
| headers: { |
| accept: "application/vnd.github.inertia-preview+json" |
| }, |
| method: "DELETE", |
| params: { |
| column_id: { |
| required: true, |
| type: "integer" |
| } |
| }, |
| url: "/projects/columns/:column_id" |
| }, |
| get: { |
| headers: { |
| accept: "application/vnd.github.inertia-preview+json" |
| }, |
| method: "GET", |
| params: { |
| project_id: { |
| required: true, |
| type: "integer" |
| } |
| }, |
| url: "/projects/:project_id" |
| }, |
| getCard: { |
| headers: { |
| accept: "application/vnd.github.inertia-preview+json" |
| }, |
| method: "GET", |
| params: { |
| card_id: { |
| required: true, |
| type: "integer" |
| } |
| }, |
| url: "/projects/columns/cards/:card_id" |
| }, |
| getColumn: { |
| headers: { |
| accept: "application/vnd.github.inertia-preview+json" |
| }, |
| method: "GET", |
| params: { |
| column_id: { |
| required: true, |
| type: "integer" |
| } |
| }, |
| url: "/projects/columns/:column_id" |
| }, |
| listCards: { |
| headers: { |
| accept: "application/vnd.github.inertia-preview+json" |
| }, |
| method: "GET", |
| params: { |
| archived_state: { |
| enum: ["all", "archived", "not_archived"], |
| type: "string" |
| }, |
| column_id: { |
| required: true, |
| type: "integer" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| } |
| }, |
| url: "/projects/columns/:column_id/cards" |
| }, |
| listCollaborators: { |
| headers: { |
| accept: "application/vnd.github.inertia-preview+json" |
| }, |
| method: "GET", |
| params: { |
| affiliation: { |
| enum: ["outside", "direct", "all"], |
| type: "string" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| project_id: { |
| required: true, |
| type: "integer" |
| } |
| }, |
| url: "/projects/:project_id/collaborators" |
| }, |
| listColumns: { |
| headers: { |
| accept: "application/vnd.github.inertia-preview+json" |
| }, |
| method: "GET", |
| params: { |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| project_id: { |
| required: true, |
| type: "integer" |
| } |
| }, |
| url: "/projects/:project_id/columns" |
| }, |
| listForOrg: { |
| headers: { |
| accept: "application/vnd.github.inertia-preview+json" |
| }, |
| method: "GET", |
| params: { |
| org: { |
| required: true, |
| type: "string" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| state: { |
| enum: ["open", "closed", "all"], |
| type: "string" |
| } |
| }, |
| url: "/orgs/:org/projects" |
| }, |
| listForRepo: { |
| headers: { |
| accept: "application/vnd.github.inertia-preview+json" |
| }, |
| method: "GET", |
| params: { |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| }, |
| state: { |
| enum: ["open", "closed", "all"], |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/projects" |
| }, |
| listForUser: { |
| headers: { |
| accept: "application/vnd.github.inertia-preview+json" |
| }, |
| method: "GET", |
| params: { |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| state: { |
| enum: ["open", "closed", "all"], |
| type: "string" |
| }, |
| username: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/users/:username/projects" |
| }, |
| moveCard: { |
| headers: { |
| accept: "application/vnd.github.inertia-preview+json" |
| }, |
| method: "POST", |
| params: { |
| card_id: { |
| required: true, |
| type: "integer" |
| }, |
| column_id: { |
| type: "integer" |
| }, |
| position: { |
| required: true, |
| type: "string", |
| validation: "^(top|bottom|after:\\d+)$" |
| } |
| }, |
| url: "/projects/columns/cards/:card_id/moves" |
| }, |
| moveColumn: { |
| headers: { |
| accept: "application/vnd.github.inertia-preview+json" |
| }, |
| method: "POST", |
| params: { |
| column_id: { |
| required: true, |
| type: "integer" |
| }, |
| position: { |
| required: true, |
| type: "string", |
| validation: "^(first|last|after:\\d+)$" |
| } |
| }, |
| url: "/projects/columns/:column_id/moves" |
| }, |
| removeCollaborator: { |
| headers: { |
| accept: "application/vnd.github.inertia-preview+json" |
| }, |
| method: "DELETE", |
| params: { |
| project_id: { |
| required: true, |
| type: "integer" |
| }, |
| username: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/projects/:project_id/collaborators/:username" |
| }, |
| reviewUserPermissionLevel: { |
| headers: { |
| accept: "application/vnd.github.inertia-preview+json" |
| }, |
| method: "GET", |
| params: { |
| project_id: { |
| required: true, |
| type: "integer" |
| }, |
| username: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/projects/:project_id/collaborators/:username/permission" |
| }, |
| update: { |
| headers: { |
| accept: "application/vnd.github.inertia-preview+json" |
| }, |
| method: "PATCH", |
| params: { |
| body: { |
| type: "string" |
| }, |
| name: { |
| type: "string" |
| }, |
| organization_permission: { |
| type: "string" |
| }, |
| private: { |
| type: "boolean" |
| }, |
| project_id: { |
| required: true, |
| type: "integer" |
| }, |
| state: { |
| enum: ["open", "closed"], |
| type: "string" |
| } |
| }, |
| url: "/projects/:project_id" |
| }, |
| updateCard: { |
| headers: { |
| accept: "application/vnd.github.inertia-preview+json" |
| }, |
| method: "PATCH", |
| params: { |
| archived: { |
| type: "boolean" |
| }, |
| card_id: { |
| required: true, |
| type: "integer" |
| }, |
| note: { |
| type: "string" |
| } |
| }, |
| url: "/projects/columns/cards/:card_id" |
| }, |
| updateColumn: { |
| headers: { |
| accept: "application/vnd.github.inertia-preview+json" |
| }, |
| method: "PATCH", |
| params: { |
| column_id: { |
| required: true, |
| type: "integer" |
| }, |
| name: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/projects/columns/:column_id" |
| } |
| }, |
| pulls: { |
| checkIfMerged: { |
| method: "GET", |
| params: { |
| number: { |
| alias: "pull_number", |
| deprecated: true, |
| type: "integer" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| pull_number: { |
| required: true, |
| type: "integer" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/pulls/:pull_number/merge" |
| }, |
| create: { |
| method: "POST", |
| params: { |
| base: { |
| required: true, |
| type: "string" |
| }, |
| body: { |
| type: "string" |
| }, |
| draft: { |
| type: "boolean" |
| }, |
| head: { |
| required: true, |
| type: "string" |
| }, |
| maintainer_can_modify: { |
| type: "boolean" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| }, |
| title: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/pulls" |
| }, |
| createComment: { |
| method: "POST", |
| params: { |
| body: { |
| required: true, |
| type: "string" |
| }, |
| commit_id: { |
| required: true, |
| type: "string" |
| }, |
| in_reply_to: { |
| deprecated: true, |
| description: "The comment ID to reply to. **Note**: This must be the ID of a top-level comment, not a reply to that comment. Replies to replies are not supported.", |
| type: "integer" |
| }, |
| line: { |
| type: "integer" |
| }, |
| number: { |
| alias: "pull_number", |
| deprecated: true, |
| type: "integer" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| path: { |
| required: true, |
| type: "string" |
| }, |
| position: { |
| type: "integer" |
| }, |
| pull_number: { |
| required: true, |
| type: "integer" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| }, |
| side: { |
| enum: ["LEFT", "RIGHT"], |
| type: "string" |
| }, |
| start_line: { |
| type: "integer" |
| }, |
| start_side: { |
| enum: ["LEFT", "RIGHT", "side"], |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/pulls/:pull_number/comments" |
| }, |
| createCommentReply: { |
| deprecated: "octokit.pulls.createCommentReply() has been renamed to octokit.pulls.createComment() (2019-09-09)", |
| method: "POST", |
| params: { |
| body: { |
| required: true, |
| type: "string" |
| }, |
| commit_id: { |
| required: true, |
| type: "string" |
| }, |
| in_reply_to: { |
| deprecated: true, |
| description: "The comment ID to reply to. **Note**: This must be the ID of a top-level comment, not a reply to that comment. Replies to replies are not supported.", |
| type: "integer" |
| }, |
| line: { |
| type: "integer" |
| }, |
| number: { |
| alias: "pull_number", |
| deprecated: true, |
| type: "integer" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| path: { |
| required: true, |
| type: "string" |
| }, |
| position: { |
| type: "integer" |
| }, |
| pull_number: { |
| required: true, |
| type: "integer" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| }, |
| side: { |
| enum: ["LEFT", "RIGHT"], |
| type: "string" |
| }, |
| start_line: { |
| type: "integer" |
| }, |
| start_side: { |
| enum: ["LEFT", "RIGHT", "side"], |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/pulls/:pull_number/comments" |
| }, |
| createFromIssue: { |
| deprecated: "octokit.pulls.createFromIssue() is deprecated, see https://developer.github.com/v3/pulls/#create-a-pull-request", |
| method: "POST", |
| params: { |
| base: { |
| required: true, |
| type: "string" |
| }, |
| draft: { |
| type: "boolean" |
| }, |
| head: { |
| required: true, |
| type: "string" |
| }, |
| issue: { |
| required: true, |
| type: "integer" |
| }, |
| maintainer_can_modify: { |
| type: "boolean" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/pulls" |
| }, |
| createReview: { |
| method: "POST", |
| params: { |
| body: { |
| type: "string" |
| }, |
| comments: { |
| type: "object[]" |
| }, |
| "comments[].body": { |
| required: true, |
| type: "string" |
| }, |
| "comments[].path": { |
| required: true, |
| type: "string" |
| }, |
| "comments[].position": { |
| required: true, |
| type: "integer" |
| }, |
| commit_id: { |
| type: "string" |
| }, |
| event: { |
| enum: ["APPROVE", "REQUEST_CHANGES", "COMMENT"], |
| type: "string" |
| }, |
| number: { |
| alias: "pull_number", |
| deprecated: true, |
| type: "integer" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| pull_number: { |
| required: true, |
| type: "integer" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/pulls/:pull_number/reviews" |
| }, |
| createReviewCommentReply: { |
| method: "POST", |
| params: { |
| body: { |
| required: true, |
| type: "string" |
| }, |
| comment_id: { |
| required: true, |
| type: "integer" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| pull_number: { |
| required: true, |
| type: "integer" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/pulls/:pull_number/comments/:comment_id/replies" |
| }, |
| createReviewRequest: { |
| method: "POST", |
| params: { |
| number: { |
| alias: "pull_number", |
| deprecated: true, |
| type: "integer" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| pull_number: { |
| required: true, |
| type: "integer" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| }, |
| reviewers: { |
| type: "string[]" |
| }, |
| team_reviewers: { |
| type: "string[]" |
| } |
| }, |
| url: "/repos/:owner/:repo/pulls/:pull_number/requested_reviewers" |
| }, |
| deleteComment: { |
| method: "DELETE", |
| params: { |
| comment_id: { |
| required: true, |
| type: "integer" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/pulls/comments/:comment_id" |
| }, |
| deletePendingReview: { |
| method: "DELETE", |
| params: { |
| number: { |
| alias: "pull_number", |
| deprecated: true, |
| type: "integer" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| pull_number: { |
| required: true, |
| type: "integer" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| }, |
| review_id: { |
| required: true, |
| type: "integer" |
| } |
| }, |
| url: "/repos/:owner/:repo/pulls/:pull_number/reviews/:review_id" |
| }, |
| deleteReviewRequest: { |
| method: "DELETE", |
| params: { |
| number: { |
| alias: "pull_number", |
| deprecated: true, |
| type: "integer" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| pull_number: { |
| required: true, |
| type: "integer" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| }, |
| reviewers: { |
| type: "string[]" |
| }, |
| team_reviewers: { |
| type: "string[]" |
| } |
| }, |
| url: "/repos/:owner/:repo/pulls/:pull_number/requested_reviewers" |
| }, |
| dismissReview: { |
| method: "PUT", |
| params: { |
| message: { |
| required: true, |
| type: "string" |
| }, |
| number: { |
| alias: "pull_number", |
| deprecated: true, |
| type: "integer" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| pull_number: { |
| required: true, |
| type: "integer" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| }, |
| review_id: { |
| required: true, |
| type: "integer" |
| } |
| }, |
| url: "/repos/:owner/:repo/pulls/:pull_number/reviews/:review_id/dismissals" |
| }, |
| get: { |
| method: "GET", |
| params: { |
| number: { |
| alias: "pull_number", |
| deprecated: true, |
| type: "integer" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| pull_number: { |
| required: true, |
| type: "integer" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/pulls/:pull_number" |
| }, |
| getComment: { |
| method: "GET", |
| params: { |
| comment_id: { |
| required: true, |
| type: "integer" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/pulls/comments/:comment_id" |
| }, |
| getCommentsForReview: { |
| method: "GET", |
| params: { |
| number: { |
| alias: "pull_number", |
| deprecated: true, |
| type: "integer" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| pull_number: { |
| required: true, |
| type: "integer" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| }, |
| review_id: { |
| required: true, |
| type: "integer" |
| } |
| }, |
| url: "/repos/:owner/:repo/pulls/:pull_number/reviews/:review_id/comments" |
| }, |
| getReview: { |
| method: "GET", |
| params: { |
| number: { |
| alias: "pull_number", |
| deprecated: true, |
| type: "integer" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| pull_number: { |
| required: true, |
| type: "integer" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| }, |
| review_id: { |
| required: true, |
| type: "integer" |
| } |
| }, |
| url: "/repos/:owner/:repo/pulls/:pull_number/reviews/:review_id" |
| }, |
| list: { |
| method: "GET", |
| params: { |
| base: { |
| type: "string" |
| }, |
| direction: { |
| enum: ["asc", "desc"], |
| type: "string" |
| }, |
| head: { |
| type: "string" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| }, |
| sort: { |
| enum: ["created", "updated", "popularity", "long-running"], |
| type: "string" |
| }, |
| state: { |
| enum: ["open", "closed", "all"], |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/pulls" |
| }, |
| listComments: { |
| method: "GET", |
| params: { |
| direction: { |
| enum: ["asc", "desc"], |
| type: "string" |
| }, |
| number: { |
| alias: "pull_number", |
| deprecated: true, |
| type: "integer" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| pull_number: { |
| required: true, |
| type: "integer" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| }, |
| since: { |
| type: "string" |
| }, |
| sort: { |
| enum: ["created", "updated"], |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/pulls/:pull_number/comments" |
| }, |
| listCommentsForRepo: { |
| method: "GET", |
| params: { |
| direction: { |
| enum: ["asc", "desc"], |
| type: "string" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| }, |
| since: { |
| type: "string" |
| }, |
| sort: { |
| enum: ["created", "updated"], |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/pulls/comments" |
| }, |
| listCommits: { |
| method: "GET", |
| params: { |
| number: { |
| alias: "pull_number", |
| deprecated: true, |
| type: "integer" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| pull_number: { |
| required: true, |
| type: "integer" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/pulls/:pull_number/commits" |
| }, |
| listFiles: { |
| method: "GET", |
| params: { |
| number: { |
| alias: "pull_number", |
| deprecated: true, |
| type: "integer" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| pull_number: { |
| required: true, |
| type: "integer" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/pulls/:pull_number/files" |
| }, |
| listReviewRequests: { |
| method: "GET", |
| params: { |
| number: { |
| alias: "pull_number", |
| deprecated: true, |
| type: "integer" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| pull_number: { |
| required: true, |
| type: "integer" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/pulls/:pull_number/requested_reviewers" |
| }, |
| listReviews: { |
| method: "GET", |
| params: { |
| number: { |
| alias: "pull_number", |
| deprecated: true, |
| type: "integer" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| pull_number: { |
| required: true, |
| type: "integer" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/pulls/:pull_number/reviews" |
| }, |
| merge: { |
| method: "PUT", |
| params: { |
| commit_message: { |
| type: "string" |
| }, |
| commit_title: { |
| type: "string" |
| }, |
| merge_method: { |
| enum: ["merge", "squash", "rebase"], |
| type: "string" |
| }, |
| number: { |
| alias: "pull_number", |
| deprecated: true, |
| type: "integer" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| pull_number: { |
| required: true, |
| type: "integer" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| }, |
| sha: { |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/pulls/:pull_number/merge" |
| }, |
| submitReview: { |
| method: "POST", |
| params: { |
| body: { |
| type: "string" |
| }, |
| event: { |
| enum: ["APPROVE", "REQUEST_CHANGES", "COMMENT"], |
| required: true, |
| type: "string" |
| }, |
| number: { |
| alias: "pull_number", |
| deprecated: true, |
| type: "integer" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| pull_number: { |
| required: true, |
| type: "integer" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| }, |
| review_id: { |
| required: true, |
| type: "integer" |
| } |
| }, |
| url: "/repos/:owner/:repo/pulls/:pull_number/reviews/:review_id/events" |
| }, |
| update: { |
| method: "PATCH", |
| params: { |
| base: { |
| type: "string" |
| }, |
| body: { |
| type: "string" |
| }, |
| maintainer_can_modify: { |
| type: "boolean" |
| }, |
| number: { |
| alias: "pull_number", |
| deprecated: true, |
| type: "integer" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| pull_number: { |
| required: true, |
| type: "integer" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| }, |
| state: { |
| enum: ["open", "closed"], |
| type: "string" |
| }, |
| title: { |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/pulls/:pull_number" |
| }, |
| updateBranch: { |
| headers: { |
| accept: "application/vnd.github.lydian-preview+json" |
| }, |
| method: "PUT", |
| params: { |
| expected_head_sha: { |
| type: "string" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| pull_number: { |
| required: true, |
| type: "integer" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/pulls/:pull_number/update-branch" |
| }, |
| updateComment: { |
| method: "PATCH", |
| params: { |
| body: { |
| required: true, |
| type: "string" |
| }, |
| comment_id: { |
| required: true, |
| type: "integer" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/pulls/comments/:comment_id" |
| }, |
| updateReview: { |
| method: "PUT", |
| params: { |
| body: { |
| required: true, |
| type: "string" |
| }, |
| number: { |
| alias: "pull_number", |
| deprecated: true, |
| type: "integer" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| pull_number: { |
| required: true, |
| type: "integer" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| }, |
| review_id: { |
| required: true, |
| type: "integer" |
| } |
| }, |
| url: "/repos/:owner/:repo/pulls/:pull_number/reviews/:review_id" |
| } |
| }, |
| rateLimit: { |
| get: { |
| method: "GET", |
| params: {}, |
| url: "/rate_limit" |
| } |
| }, |
| reactions: { |
| createForCommitComment: { |
| headers: { |
| accept: "application/vnd.github.squirrel-girl-preview+json" |
| }, |
| method: "POST", |
| params: { |
| comment_id: { |
| required: true, |
| type: "integer" |
| }, |
| content: { |
| enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"], |
| required: true, |
| type: "string" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/comments/:comment_id/reactions" |
| }, |
| createForIssue: { |
| headers: { |
| accept: "application/vnd.github.squirrel-girl-preview+json" |
| }, |
| method: "POST", |
| params: { |
| content: { |
| enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"], |
| required: true, |
| type: "string" |
| }, |
| issue_number: { |
| required: true, |
| type: "integer" |
| }, |
| number: { |
| alias: "issue_number", |
| deprecated: true, |
| type: "integer" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/issues/:issue_number/reactions" |
| }, |
| createForIssueComment: { |
| headers: { |
| accept: "application/vnd.github.squirrel-girl-preview+json" |
| }, |
| method: "POST", |
| params: { |
| comment_id: { |
| required: true, |
| type: "integer" |
| }, |
| content: { |
| enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"], |
| required: true, |
| type: "string" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/issues/comments/:comment_id/reactions" |
| }, |
| createForPullRequestReviewComment: { |
| headers: { |
| accept: "application/vnd.github.squirrel-girl-preview+json" |
| }, |
| method: "POST", |
| params: { |
| comment_id: { |
| required: true, |
| type: "integer" |
| }, |
| content: { |
| enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"], |
| required: true, |
| type: "string" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/pulls/comments/:comment_id/reactions" |
| }, |
| createForTeamDiscussion: { |
| deprecated: "octokit.reactions.createForTeamDiscussion() has been renamed to octokit.reactions.createForTeamDiscussionLegacy() (2020-01-16)", |
| headers: { |
| accept: "application/vnd.github.squirrel-girl-preview+json" |
| }, |
| method: "POST", |
| params: { |
| content: { |
| enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"], |
| required: true, |
| type: "string" |
| }, |
| discussion_number: { |
| required: true, |
| type: "integer" |
| }, |
| team_id: { |
| required: true, |
| type: "integer" |
| } |
| }, |
| url: "/teams/:team_id/discussions/:discussion_number/reactions" |
| }, |
| createForTeamDiscussionComment: { |
| deprecated: "octokit.reactions.createForTeamDiscussionComment() has been renamed to octokit.reactions.createForTeamDiscussionCommentLegacy() (2020-01-16)", |
| headers: { |
| accept: "application/vnd.github.squirrel-girl-preview+json" |
| }, |
| method: "POST", |
| params: { |
| comment_number: { |
| required: true, |
| type: "integer" |
| }, |
| content: { |
| enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"], |
| required: true, |
| type: "string" |
| }, |
| discussion_number: { |
| required: true, |
| type: "integer" |
| }, |
| team_id: { |
| required: true, |
| type: "integer" |
| } |
| }, |
| url: "/teams/:team_id/discussions/:discussion_number/comments/:comment_number/reactions" |
| }, |
| createForTeamDiscussionCommentInOrg: { |
| headers: { |
| accept: "application/vnd.github.squirrel-girl-preview+json" |
| }, |
| method: "POST", |
| params: { |
| comment_number: { |
| required: true, |
| type: "integer" |
| }, |
| content: { |
| enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"], |
| required: true, |
| type: "string" |
| }, |
| discussion_number: { |
| required: true, |
| type: "integer" |
| }, |
| org: { |
| required: true, |
| type: "string" |
| }, |
| team_slug: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/orgs/:org/teams/:team_slug/discussions/:discussion_number/comments/:comment_number/reactions" |
| }, |
| createForTeamDiscussionCommentLegacy: { |
| deprecated: "octokit.reactions.createForTeamDiscussionCommentLegacy() is deprecated, see https://developer.github.com/v3/reactions/#create-reaction-for-a-team-discussion-comment-legacy", |
| headers: { |
| accept: "application/vnd.github.squirrel-girl-preview+json" |
| }, |
| method: "POST", |
| params: { |
| comment_number: { |
| required: true, |
| type: "integer" |
| }, |
| content: { |
| enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"], |
| required: true, |
| type: "string" |
| }, |
| discussion_number: { |
| required: true, |
| type: "integer" |
| }, |
| team_id: { |
| required: true, |
| type: "integer" |
| } |
| }, |
| url: "/teams/:team_id/discussions/:discussion_number/comments/:comment_number/reactions" |
| }, |
| createForTeamDiscussionInOrg: { |
| headers: { |
| accept: "application/vnd.github.squirrel-girl-preview+json" |
| }, |
| method: "POST", |
| params: { |
| content: { |
| enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"], |
| required: true, |
| type: "string" |
| }, |
| discussion_number: { |
| required: true, |
| type: "integer" |
| }, |
| org: { |
| required: true, |
| type: "string" |
| }, |
| team_slug: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/orgs/:org/teams/:team_slug/discussions/:discussion_number/reactions" |
| }, |
| createForTeamDiscussionLegacy: { |
| deprecated: "octokit.reactions.createForTeamDiscussionLegacy() is deprecated, see https://developer.github.com/v3/reactions/#create-reaction-for-a-team-discussion-legacy", |
| headers: { |
| accept: "application/vnd.github.squirrel-girl-preview+json" |
| }, |
| method: "POST", |
| params: { |
| content: { |
| enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"], |
| required: true, |
| type: "string" |
| }, |
| discussion_number: { |
| required: true, |
| type: "integer" |
| }, |
| team_id: { |
| required: true, |
| type: "integer" |
| } |
| }, |
| url: "/teams/:team_id/discussions/:discussion_number/reactions" |
| }, |
| delete: { |
| headers: { |
| accept: "application/vnd.github.squirrel-girl-preview+json" |
| }, |
| method: "DELETE", |
| params: { |
| reaction_id: { |
| required: true, |
| type: "integer" |
| } |
| }, |
| url: "/reactions/:reaction_id" |
| }, |
| listForCommitComment: { |
| headers: { |
| accept: "application/vnd.github.squirrel-girl-preview+json" |
| }, |
| method: "GET", |
| params: { |
| comment_id: { |
| required: true, |
| type: "integer" |
| }, |
| content: { |
| enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"], |
| type: "string" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/comments/:comment_id/reactions" |
| }, |
| listForIssue: { |
| headers: { |
| accept: "application/vnd.github.squirrel-girl-preview+json" |
| }, |
| method: "GET", |
| params: { |
| content: { |
| enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"], |
| type: "string" |
| }, |
| issue_number: { |
| required: true, |
| type: "integer" |
| }, |
| number: { |
| alias: "issue_number", |
| deprecated: true, |
| type: "integer" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/issues/:issue_number/reactions" |
| }, |
| listForIssueComment: { |
| headers: { |
| accept: "application/vnd.github.squirrel-girl-preview+json" |
| }, |
| method: "GET", |
| params: { |
| comment_id: { |
| required: true, |
| type: "integer" |
| }, |
| content: { |
| enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"], |
| type: "string" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/issues/comments/:comment_id/reactions" |
| }, |
| listForPullRequestReviewComment: { |
| headers: { |
| accept: "application/vnd.github.squirrel-girl-preview+json" |
| }, |
| method: "GET", |
| params: { |
| comment_id: { |
| required: true, |
| type: "integer" |
| }, |
| content: { |
| enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"], |
| type: "string" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/pulls/comments/:comment_id/reactions" |
| }, |
| listForTeamDiscussion: { |
| deprecated: "octokit.reactions.listForTeamDiscussion() has been renamed to octokit.reactions.listForTeamDiscussionLegacy() (2020-01-16)", |
| headers: { |
| accept: "application/vnd.github.squirrel-girl-preview+json" |
| }, |
| method: "GET", |
| params: { |
| content: { |
| enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"], |
| type: "string" |
| }, |
| discussion_number: { |
| required: true, |
| type: "integer" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| team_id: { |
| required: true, |
| type: "integer" |
| } |
| }, |
| url: "/teams/:team_id/discussions/:discussion_number/reactions" |
| }, |
| listForTeamDiscussionComment: { |
| deprecated: "octokit.reactions.listForTeamDiscussionComment() has been renamed to octokit.reactions.listForTeamDiscussionCommentLegacy() (2020-01-16)", |
| headers: { |
| accept: "application/vnd.github.squirrel-girl-preview+json" |
| }, |
| method: "GET", |
| params: { |
| comment_number: { |
| required: true, |
| type: "integer" |
| }, |
| content: { |
| enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"], |
| type: "string" |
| }, |
| discussion_number: { |
| required: true, |
| type: "integer" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| team_id: { |
| required: true, |
| type: "integer" |
| } |
| }, |
| url: "/teams/:team_id/discussions/:discussion_number/comments/:comment_number/reactions" |
| }, |
| listForTeamDiscussionCommentInOrg: { |
| headers: { |
| accept: "application/vnd.github.squirrel-girl-preview+json" |
| }, |
| method: "GET", |
| params: { |
| comment_number: { |
| required: true, |
| type: "integer" |
| }, |
| content: { |
| enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"], |
| type: "string" |
| }, |
| discussion_number: { |
| required: true, |
| type: "integer" |
| }, |
| org: { |
| required: true, |
| type: "string" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| team_slug: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/orgs/:org/teams/:team_slug/discussions/:discussion_number/comments/:comment_number/reactions" |
| }, |
| listForTeamDiscussionCommentLegacy: { |
| deprecated: "octokit.reactions.listForTeamDiscussionCommentLegacy() is deprecated, see https://developer.github.com/v3/reactions/#list-reactions-for-a-team-discussion-comment-legacy", |
| headers: { |
| accept: "application/vnd.github.squirrel-girl-preview+json" |
| }, |
| method: "GET", |
| params: { |
| comment_number: { |
| required: true, |
| type: "integer" |
| }, |
| content: { |
| enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"], |
| type: "string" |
| }, |
| discussion_number: { |
| required: true, |
| type: "integer" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| team_id: { |
| required: true, |
| type: "integer" |
| } |
| }, |
| url: "/teams/:team_id/discussions/:discussion_number/comments/:comment_number/reactions" |
| }, |
| listForTeamDiscussionInOrg: { |
| headers: { |
| accept: "application/vnd.github.squirrel-girl-preview+json" |
| }, |
| method: "GET", |
| params: { |
| content: { |
| enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"], |
| type: "string" |
| }, |
| discussion_number: { |
| required: true, |
| type: "integer" |
| }, |
| org: { |
| required: true, |
| type: "string" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| team_slug: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/orgs/:org/teams/:team_slug/discussions/:discussion_number/reactions" |
| }, |
| listForTeamDiscussionLegacy: { |
| deprecated: "octokit.reactions.listForTeamDiscussionLegacy() is deprecated, see https://developer.github.com/v3/reactions/#list-reactions-for-a-team-discussion-legacy", |
| headers: { |
| accept: "application/vnd.github.squirrel-girl-preview+json" |
| }, |
| method: "GET", |
| params: { |
| content: { |
| enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"], |
| type: "string" |
| }, |
| discussion_number: { |
| required: true, |
| type: "integer" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| team_id: { |
| required: true, |
| type: "integer" |
| } |
| }, |
| url: "/teams/:team_id/discussions/:discussion_number/reactions" |
| } |
| }, |
| repos: { |
| acceptInvitation: { |
| method: "PATCH", |
| params: { |
| invitation_id: { |
| required: true, |
| type: "integer" |
| } |
| }, |
| url: "/user/repository_invitations/:invitation_id" |
| }, |
| addCollaborator: { |
| method: "PUT", |
| params: { |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| permission: { |
| enum: ["pull", "push", "admin"], |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| }, |
| username: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/collaborators/:username" |
| }, |
| addDeployKey: { |
| method: "POST", |
| params: { |
| key: { |
| required: true, |
| type: "string" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| read_only: { |
| type: "boolean" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| }, |
| title: { |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/keys" |
| }, |
| addProtectedBranchAdminEnforcement: { |
| method: "POST", |
| params: { |
| branch: { |
| required: true, |
| type: "string" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/branches/:branch/protection/enforce_admins" |
| }, |
| addProtectedBranchAppRestrictions: { |
| method: "POST", |
| params: { |
| apps: { |
| mapTo: "data", |
| required: true, |
| type: "string[]" |
| }, |
| branch: { |
| required: true, |
| type: "string" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/apps" |
| }, |
| addProtectedBranchRequiredSignatures: { |
| headers: { |
| accept: "application/vnd.github.zzzax-preview+json" |
| }, |
| method: "POST", |
| params: { |
| branch: { |
| required: true, |
| type: "string" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/branches/:branch/protection/required_signatures" |
| }, |
| addProtectedBranchRequiredStatusChecksContexts: { |
| method: "POST", |
| params: { |
| branch: { |
| required: true, |
| type: "string" |
| }, |
| contexts: { |
| mapTo: "data", |
| required: true, |
| type: "string[]" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/branches/:branch/protection/required_status_checks/contexts" |
| }, |
| addProtectedBranchTeamRestrictions: { |
| method: "POST", |
| params: { |
| branch: { |
| required: true, |
| type: "string" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| }, |
| teams: { |
| mapTo: "data", |
| required: true, |
| type: "string[]" |
| } |
| }, |
| url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/teams" |
| }, |
| addProtectedBranchUserRestrictions: { |
| method: "POST", |
| params: { |
| branch: { |
| required: true, |
| type: "string" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| }, |
| users: { |
| mapTo: "data", |
| required: true, |
| type: "string[]" |
| } |
| }, |
| url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/users" |
| }, |
| checkCollaborator: { |
| method: "GET", |
| params: { |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| }, |
| username: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/collaborators/:username" |
| }, |
| checkVulnerabilityAlerts: { |
| headers: { |
| accept: "application/vnd.github.dorian-preview+json" |
| }, |
| method: "GET", |
| params: { |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/vulnerability-alerts" |
| }, |
| compareCommits: { |
| method: "GET", |
| params: { |
| base: { |
| required: true, |
| type: "string" |
| }, |
| head: { |
| required: true, |
| type: "string" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/compare/:base...:head" |
| }, |
| createCommitComment: { |
| method: "POST", |
| params: { |
| body: { |
| required: true, |
| type: "string" |
| }, |
| commit_sha: { |
| required: true, |
| type: "string" |
| }, |
| line: { |
| type: "integer" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| path: { |
| type: "string" |
| }, |
| position: { |
| type: "integer" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| }, |
| sha: { |
| alias: "commit_sha", |
| deprecated: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/commits/:commit_sha/comments" |
| }, |
| createDeployment: { |
| method: "POST", |
| params: { |
| auto_merge: { |
| type: "boolean" |
| }, |
| description: { |
| type: "string" |
| }, |
| environment: { |
| type: "string" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| payload: { |
| type: "string" |
| }, |
| production_environment: { |
| type: "boolean" |
| }, |
| ref: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| }, |
| required_contexts: { |
| type: "string[]" |
| }, |
| task: { |
| type: "string" |
| }, |
| transient_environment: { |
| type: "boolean" |
| } |
| }, |
| url: "/repos/:owner/:repo/deployments" |
| }, |
| createDeploymentStatus: { |
| method: "POST", |
| params: { |
| auto_inactive: { |
| type: "boolean" |
| }, |
| deployment_id: { |
| required: true, |
| type: "integer" |
| }, |
| description: { |
| type: "string" |
| }, |
| environment: { |
| enum: ["production", "staging", "qa"], |
| type: "string" |
| }, |
| environment_url: { |
| type: "string" |
| }, |
| log_url: { |
| type: "string" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| }, |
| state: { |
| enum: ["error", "failure", "inactive", "in_progress", "queued", "pending", "success"], |
| required: true, |
| type: "string" |
| }, |
| target_url: { |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/deployments/:deployment_id/statuses" |
| }, |
| createDispatchEvent: { |
| method: "POST", |
| params: { |
| client_payload: { |
| type: "object" |
| }, |
| event_type: { |
| type: "string" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/dispatches" |
| }, |
| createFile: { |
| deprecated: "octokit.repos.createFile() has been renamed to octokit.repos.createOrUpdateFile() (2019-06-07)", |
| method: "PUT", |
| params: { |
| author: { |
| type: "object" |
| }, |
| "author.email": { |
| required: true, |
| type: "string" |
| }, |
| "author.name": { |
| required: true, |
| type: "string" |
| }, |
| branch: { |
| type: "string" |
| }, |
| committer: { |
| type: "object" |
| }, |
| "committer.email": { |
| required: true, |
| type: "string" |
| }, |
| "committer.name": { |
| required: true, |
| type: "string" |
| }, |
| content: { |
| required: true, |
| type: "string" |
| }, |
| message: { |
| required: true, |
| type: "string" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| path: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| }, |
| sha: { |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/contents/:path" |
| }, |
| createForAuthenticatedUser: { |
| method: "POST", |
| params: { |
| allow_merge_commit: { |
| type: "boolean" |
| }, |
| allow_rebase_merge: { |
| type: "boolean" |
| }, |
| allow_squash_merge: { |
| type: "boolean" |
| }, |
| auto_init: { |
| type: "boolean" |
| }, |
| delete_branch_on_merge: { |
| type: "boolean" |
| }, |
| description: { |
| type: "string" |
| }, |
| gitignore_template: { |
| type: "string" |
| }, |
| has_issues: { |
| type: "boolean" |
| }, |
| has_projects: { |
| type: "boolean" |
| }, |
| has_wiki: { |
| type: "boolean" |
| }, |
| homepage: { |
| type: "string" |
| }, |
| is_template: { |
| type: "boolean" |
| }, |
| license_template: { |
| type: "string" |
| }, |
| name: { |
| required: true, |
| type: "string" |
| }, |
| private: { |
| type: "boolean" |
| }, |
| team_id: { |
| type: "integer" |
| }, |
| visibility: { |
| enum: ["public", "private", "visibility", "internal"], |
| type: "string" |
| } |
| }, |
| url: "/user/repos" |
| }, |
| createFork: { |
| method: "POST", |
| params: { |
| organization: { |
| type: "string" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/forks" |
| }, |
| createHook: { |
| method: "POST", |
| params: { |
| active: { |
| type: "boolean" |
| }, |
| config: { |
| required: true, |
| type: "object" |
| }, |
| "config.content_type": { |
| type: "string" |
| }, |
| "config.insecure_ssl": { |
| type: "string" |
| }, |
| "config.secret": { |
| type: "string" |
| }, |
| "config.url": { |
| required: true, |
| type: "string" |
| }, |
| events: { |
| type: "string[]" |
| }, |
| name: { |
| type: "string" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/hooks" |
| }, |
| createInOrg: { |
| method: "POST", |
| params: { |
| allow_merge_commit: { |
| type: "boolean" |
| }, |
| allow_rebase_merge: { |
| type: "boolean" |
| }, |
| allow_squash_merge: { |
| type: "boolean" |
| }, |
| auto_init: { |
| type: "boolean" |
| }, |
| delete_branch_on_merge: { |
| type: "boolean" |
| }, |
| description: { |
| type: "string" |
| }, |
| gitignore_template: { |
| type: "string" |
| }, |
| has_issues: { |
| type: "boolean" |
| }, |
| has_projects: { |
| type: "boolean" |
| }, |
| has_wiki: { |
| type: "boolean" |
| }, |
| homepage: { |
| type: "string" |
| }, |
| is_template: { |
| type: "boolean" |
| }, |
| license_template: { |
| type: "string" |
| }, |
| name: { |
| required: true, |
| type: "string" |
| }, |
| org: { |
| required: true, |
| type: "string" |
| }, |
| private: { |
| type: "boolean" |
| }, |
| team_id: { |
| type: "integer" |
| }, |
| visibility: { |
| enum: ["public", "private", "visibility", "internal"], |
| type: "string" |
| } |
| }, |
| url: "/orgs/:org/repos" |
| }, |
| createOrUpdateFile: { |
| method: "PUT", |
| params: { |
| author: { |
| type: "object" |
| }, |
| "author.email": { |
| required: true, |
| type: "string" |
| }, |
| "author.name": { |
| required: true, |
| type: "string" |
| }, |
| branch: { |
| type: "string" |
| }, |
| committer: { |
| type: "object" |
| }, |
| "committer.email": { |
| required: true, |
| type: "string" |
| }, |
| "committer.name": { |
| required: true, |
| type: "string" |
| }, |
| content: { |
| required: true, |
| type: "string" |
| }, |
| message: { |
| required: true, |
| type: "string" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| path: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| }, |
| sha: { |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/contents/:path" |
| }, |
| createRelease: { |
| method: "POST", |
| params: { |
| body: { |
| type: "string" |
| }, |
| draft: { |
| type: "boolean" |
| }, |
| name: { |
| type: "string" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| prerelease: { |
| type: "boolean" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| }, |
| tag_name: { |
| required: true, |
| type: "string" |
| }, |
| target_commitish: { |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/releases" |
| }, |
| createStatus: { |
| method: "POST", |
| params: { |
| context: { |
| type: "string" |
| }, |
| description: { |
| type: "string" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| }, |
| sha: { |
| required: true, |
| type: "string" |
| }, |
| state: { |
| enum: ["error", "failure", "pending", "success"], |
| required: true, |
| type: "string" |
| }, |
| target_url: { |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/statuses/:sha" |
| }, |
| createUsingTemplate: { |
| headers: { |
| accept: "application/vnd.github.baptiste-preview+json" |
| }, |
| method: "POST", |
| params: { |
| description: { |
| type: "string" |
| }, |
| name: { |
| required: true, |
| type: "string" |
| }, |
| owner: { |
| type: "string" |
| }, |
| private: { |
| type: "boolean" |
| }, |
| template_owner: { |
| required: true, |
| type: "string" |
| }, |
| template_repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:template_owner/:template_repo/generate" |
| }, |
| declineInvitation: { |
| method: "DELETE", |
| params: { |
| invitation_id: { |
| required: true, |
| type: "integer" |
| } |
| }, |
| url: "/user/repository_invitations/:invitation_id" |
| }, |
| delete: { |
| method: "DELETE", |
| params: { |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo" |
| }, |
| deleteCommitComment: { |
| method: "DELETE", |
| params: { |
| comment_id: { |
| required: true, |
| type: "integer" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/comments/:comment_id" |
| }, |
| deleteDownload: { |
| method: "DELETE", |
| params: { |
| download_id: { |
| required: true, |
| type: "integer" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/downloads/:download_id" |
| }, |
| deleteFile: { |
| method: "DELETE", |
| params: { |
| author: { |
| type: "object" |
| }, |
| "author.email": { |
| type: "string" |
| }, |
| "author.name": { |
| type: "string" |
| }, |
| branch: { |
| type: "string" |
| }, |
| committer: { |
| type: "object" |
| }, |
| "committer.email": { |
| type: "string" |
| }, |
| "committer.name": { |
| type: "string" |
| }, |
| message: { |
| required: true, |
| type: "string" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| path: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| }, |
| sha: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/contents/:path" |
| }, |
| deleteHook: { |
| method: "DELETE", |
| params: { |
| hook_id: { |
| required: true, |
| type: "integer" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/hooks/:hook_id" |
| }, |
| deleteInvitation: { |
| method: "DELETE", |
| params: { |
| invitation_id: { |
| required: true, |
| type: "integer" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/invitations/:invitation_id" |
| }, |
| deleteRelease: { |
| method: "DELETE", |
| params: { |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| release_id: { |
| required: true, |
| type: "integer" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/releases/:release_id" |
| }, |
| deleteReleaseAsset: { |
| method: "DELETE", |
| params: { |
| asset_id: { |
| required: true, |
| type: "integer" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/releases/assets/:asset_id" |
| }, |
| disableAutomatedSecurityFixes: { |
| headers: { |
| accept: "application/vnd.github.london-preview+json" |
| }, |
| method: "DELETE", |
| params: { |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/automated-security-fixes" |
| }, |
| disablePagesSite: { |
| headers: { |
| accept: "application/vnd.github.switcheroo-preview+json" |
| }, |
| method: "DELETE", |
| params: { |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/pages" |
| }, |
| disableVulnerabilityAlerts: { |
| headers: { |
| accept: "application/vnd.github.dorian-preview+json" |
| }, |
| method: "DELETE", |
| params: { |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/vulnerability-alerts" |
| }, |
| enableAutomatedSecurityFixes: { |
| headers: { |
| accept: "application/vnd.github.london-preview+json" |
| }, |
| method: "PUT", |
| params: { |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/automated-security-fixes" |
| }, |
| enablePagesSite: { |
| headers: { |
| accept: "application/vnd.github.switcheroo-preview+json" |
| }, |
| method: "POST", |
| params: { |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| }, |
| source: { |
| type: "object" |
| }, |
| "source.branch": { |
| enum: ["master", "gh-pages"], |
| type: "string" |
| }, |
| "source.path": { |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/pages" |
| }, |
| enableVulnerabilityAlerts: { |
| headers: { |
| accept: "application/vnd.github.dorian-preview+json" |
| }, |
| method: "PUT", |
| params: { |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/vulnerability-alerts" |
| }, |
| get: { |
| method: "GET", |
| params: { |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo" |
| }, |
| getAppsWithAccessToProtectedBranch: { |
| method: "GET", |
| params: { |
| branch: { |
| required: true, |
| type: "string" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/apps" |
| }, |
| getArchiveLink: { |
| method: "GET", |
| params: { |
| archive_format: { |
| required: true, |
| type: "string" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| ref: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/:archive_format/:ref" |
| }, |
| getBranch: { |
| method: "GET", |
| params: { |
| branch: { |
| required: true, |
| type: "string" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/branches/:branch" |
| }, |
| getBranchProtection: { |
| method: "GET", |
| params: { |
| branch: { |
| required: true, |
| type: "string" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/branches/:branch/protection" |
| }, |
| getClones: { |
| method: "GET", |
| params: { |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| per: { |
| enum: ["day", "week"], |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/traffic/clones" |
| }, |
| getCodeFrequencyStats: { |
| method: "GET", |
| params: { |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/stats/code_frequency" |
| }, |
| getCollaboratorPermissionLevel: { |
| method: "GET", |
| params: { |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| }, |
| username: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/collaborators/:username/permission" |
| }, |
| getCombinedStatusForRef: { |
| method: "GET", |
| params: { |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| ref: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/commits/:ref/status" |
| }, |
| getCommit: { |
| method: "GET", |
| params: { |
| commit_sha: { |
| alias: "ref", |
| deprecated: true, |
| type: "string" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| ref: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| }, |
| sha: { |
| alias: "ref", |
| deprecated: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/commits/:ref" |
| }, |
| getCommitActivityStats: { |
| method: "GET", |
| params: { |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/stats/commit_activity" |
| }, |
| getCommitComment: { |
| method: "GET", |
| params: { |
| comment_id: { |
| required: true, |
| type: "integer" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/comments/:comment_id" |
| }, |
| getCommitRefSha: { |
| deprecated: "octokit.repos.getCommitRefSha() is deprecated, see https://developer.github.com/v3/repos/commits/#get-a-single-commit", |
| headers: { |
| accept: "application/vnd.github.v3.sha" |
| }, |
| method: "GET", |
| params: { |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| ref: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/commits/:ref" |
| }, |
| getContents: { |
| method: "GET", |
| params: { |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| path: { |
| required: true, |
| type: "string" |
| }, |
| ref: { |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/contents/:path" |
| }, |
| getContributorsStats: { |
| method: "GET", |
| params: { |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/stats/contributors" |
| }, |
| getDeployKey: { |
| method: "GET", |
| params: { |
| key_id: { |
| required: true, |
| type: "integer" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/keys/:key_id" |
| }, |
| getDeployment: { |
| method: "GET", |
| params: { |
| deployment_id: { |
| required: true, |
| type: "integer" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/deployments/:deployment_id" |
| }, |
| getDeploymentStatus: { |
| method: "GET", |
| params: { |
| deployment_id: { |
| required: true, |
| type: "integer" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| }, |
| status_id: { |
| required: true, |
| type: "integer" |
| } |
| }, |
| url: "/repos/:owner/:repo/deployments/:deployment_id/statuses/:status_id" |
| }, |
| getDownload: { |
| method: "GET", |
| params: { |
| download_id: { |
| required: true, |
| type: "integer" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/downloads/:download_id" |
| }, |
| getHook: { |
| method: "GET", |
| params: { |
| hook_id: { |
| required: true, |
| type: "integer" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/hooks/:hook_id" |
| }, |
| getLatestPagesBuild: { |
| method: "GET", |
| params: { |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/pages/builds/latest" |
| }, |
| getLatestRelease: { |
| method: "GET", |
| params: { |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/releases/latest" |
| }, |
| getPages: { |
| method: "GET", |
| params: { |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/pages" |
| }, |
| getPagesBuild: { |
| method: "GET", |
| params: { |
| build_id: { |
| required: true, |
| type: "integer" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/pages/builds/:build_id" |
| }, |
| getParticipationStats: { |
| method: "GET", |
| params: { |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/stats/participation" |
| }, |
| getProtectedBranchAdminEnforcement: { |
| method: "GET", |
| params: { |
| branch: { |
| required: true, |
| type: "string" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/branches/:branch/protection/enforce_admins" |
| }, |
| getProtectedBranchPullRequestReviewEnforcement: { |
| method: "GET", |
| params: { |
| branch: { |
| required: true, |
| type: "string" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/branches/:branch/protection/required_pull_request_reviews" |
| }, |
| getProtectedBranchRequiredSignatures: { |
| headers: { |
| accept: "application/vnd.github.zzzax-preview+json" |
| }, |
| method: "GET", |
| params: { |
| branch: { |
| required: true, |
| type: "string" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/branches/:branch/protection/required_signatures" |
| }, |
| getProtectedBranchRequiredStatusChecks: { |
| method: "GET", |
| params: { |
| branch: { |
| required: true, |
| type: "string" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/branches/:branch/protection/required_status_checks" |
| }, |
| getProtectedBranchRestrictions: { |
| method: "GET", |
| params: { |
| branch: { |
| required: true, |
| type: "string" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/branches/:branch/protection/restrictions" |
| }, |
| getPunchCardStats: { |
| method: "GET", |
| params: { |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/stats/punch_card" |
| }, |
| getReadme: { |
| method: "GET", |
| params: { |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| ref: { |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/readme" |
| }, |
| getRelease: { |
| method: "GET", |
| params: { |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| release_id: { |
| required: true, |
| type: "integer" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/releases/:release_id" |
| }, |
| getReleaseAsset: { |
| method: "GET", |
| params: { |
| asset_id: { |
| required: true, |
| type: "integer" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/releases/assets/:asset_id" |
| }, |
| getReleaseByTag: { |
| method: "GET", |
| params: { |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| }, |
| tag: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/releases/tags/:tag" |
| }, |
| getTeamsWithAccessToProtectedBranch: { |
| method: "GET", |
| params: { |
| branch: { |
| required: true, |
| type: "string" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/teams" |
| }, |
| getTopPaths: { |
| method: "GET", |
| params: { |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/traffic/popular/paths" |
| }, |
| getTopReferrers: { |
| method: "GET", |
| params: { |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/traffic/popular/referrers" |
| }, |
| getUsersWithAccessToProtectedBranch: { |
| method: "GET", |
| params: { |
| branch: { |
| required: true, |
| type: "string" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/users" |
| }, |
| getViews: { |
| method: "GET", |
| params: { |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| per: { |
| enum: ["day", "week"], |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/traffic/views" |
| }, |
| list: { |
| method: "GET", |
| params: { |
| affiliation: { |
| type: "string" |
| }, |
| direction: { |
| enum: ["asc", "desc"], |
| type: "string" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| sort: { |
| enum: ["created", "updated", "pushed", "full_name"], |
| type: "string" |
| }, |
| type: { |
| enum: ["all", "owner", "public", "private", "member"], |
| type: "string" |
| }, |
| visibility: { |
| enum: ["all", "public", "private"], |
| type: "string" |
| } |
| }, |
| url: "/user/repos" |
| }, |
| listAppsWithAccessToProtectedBranch: { |
| deprecated: "octokit.repos.listAppsWithAccessToProtectedBranch() has been renamed to octokit.repos.getAppsWithAccessToProtectedBranch() (2019-09-13)", |
| method: "GET", |
| params: { |
| branch: { |
| required: true, |
| type: "string" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/apps" |
| }, |
| listAssetsForRelease: { |
| method: "GET", |
| params: { |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| release_id: { |
| required: true, |
| type: "integer" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/releases/:release_id/assets" |
| }, |
| listBranches: { |
| method: "GET", |
| params: { |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| protected: { |
| type: "boolean" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/branches" |
| }, |
| listBranchesForHeadCommit: { |
| headers: { |
| accept: "application/vnd.github.groot-preview+json" |
| }, |
| method: "GET", |
| params: { |
| commit_sha: { |
| required: true, |
| type: "string" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/commits/:commit_sha/branches-where-head" |
| }, |
| listCollaborators: { |
| method: "GET", |
| params: { |
| affiliation: { |
| enum: ["outside", "direct", "all"], |
| type: "string" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/collaborators" |
| }, |
| listCommentsForCommit: { |
| method: "GET", |
| params: { |
| commit_sha: { |
| required: true, |
| type: "string" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| ref: { |
| alias: "commit_sha", |
| deprecated: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/commits/:commit_sha/comments" |
| }, |
| listCommitComments: { |
| method: "GET", |
| params: { |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/comments" |
| }, |
| listCommits: { |
| method: "GET", |
| params: { |
| author: { |
| type: "string" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| page: { |
| type: "integer" |
| }, |
| path: { |
| type: "string" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| }, |
| sha: { |
| type: "string" |
| }, |
| since: { |
| type: "string" |
| }, |
| until: { |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/commits" |
| }, |
| listContributors: { |
| method: "GET", |
| params: { |
| anon: { |
| type: "string" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/contributors" |
| }, |
| listDeployKeys: { |
| method: "GET", |
| params: { |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/keys" |
| }, |
| listDeploymentStatuses: { |
| method: "GET", |
| params: { |
| deployment_id: { |
| required: true, |
| type: "integer" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/deployments/:deployment_id/statuses" |
| }, |
| listDeployments: { |
| method: "GET", |
| params: { |
| environment: { |
| type: "string" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| ref: { |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| }, |
| sha: { |
| type: "string" |
| }, |
| task: { |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/deployments" |
| }, |
| listDownloads: { |
| method: "GET", |
| params: { |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/downloads" |
| }, |
| listForOrg: { |
| method: "GET", |
| params: { |
| direction: { |
| enum: ["asc", "desc"], |
| type: "string" |
| }, |
| org: { |
| required: true, |
| type: "string" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| sort: { |
| enum: ["created", "updated", "pushed", "full_name"], |
| type: "string" |
| }, |
| type: { |
| enum: ["all", "public", "private", "forks", "sources", "member", "internal"], |
| type: "string" |
| } |
| }, |
| url: "/orgs/:org/repos" |
| }, |
| listForUser: { |
| method: "GET", |
| params: { |
| direction: { |
| enum: ["asc", "desc"], |
| type: "string" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| sort: { |
| enum: ["created", "updated", "pushed", "full_name"], |
| type: "string" |
| }, |
| type: { |
| enum: ["all", "owner", "member"], |
| type: "string" |
| }, |
| username: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/users/:username/repos" |
| }, |
| listForks: { |
| method: "GET", |
| params: { |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| }, |
| sort: { |
| enum: ["newest", "oldest", "stargazers"], |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/forks" |
| }, |
| listHooks: { |
| method: "GET", |
| params: { |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/hooks" |
| }, |
| listInvitations: { |
| method: "GET", |
| params: { |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/invitations" |
| }, |
| listInvitationsForAuthenticatedUser: { |
| method: "GET", |
| params: { |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| } |
| }, |
| url: "/user/repository_invitations" |
| }, |
| listLanguages: { |
| method: "GET", |
| params: { |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/languages" |
| }, |
| listPagesBuilds: { |
| method: "GET", |
| params: { |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/pages/builds" |
| }, |
| listProtectedBranchRequiredStatusChecksContexts: { |
| method: "GET", |
| params: { |
| branch: { |
| required: true, |
| type: "string" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/branches/:branch/protection/required_status_checks/contexts" |
| }, |
| listProtectedBranchTeamRestrictions: { |
| deprecated: "octokit.repos.listProtectedBranchTeamRestrictions() has been renamed to octokit.repos.getTeamsWithAccessToProtectedBranch() (2019-09-09)", |
| method: "GET", |
| params: { |
| branch: { |
| required: true, |
| type: "string" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/teams" |
| }, |
| listProtectedBranchUserRestrictions: { |
| deprecated: "octokit.repos.listProtectedBranchUserRestrictions() has been renamed to octokit.repos.getUsersWithAccessToProtectedBranch() (2019-09-09)", |
| method: "GET", |
| params: { |
| branch: { |
| required: true, |
| type: "string" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/users" |
| }, |
| listPublic: { |
| method: "GET", |
| params: { |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| since: { |
| type: "integer" |
| } |
| }, |
| url: "/repositories" |
| }, |
| listPullRequestsAssociatedWithCommit: { |
| headers: { |
| accept: "application/vnd.github.groot-preview+json" |
| }, |
| method: "GET", |
| params: { |
| commit_sha: { |
| required: true, |
| type: "string" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/commits/:commit_sha/pulls" |
| }, |
| listReleases: { |
| method: "GET", |
| params: { |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/releases" |
| }, |
| listStatusesForRef: { |
| method: "GET", |
| params: { |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| ref: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/commits/:ref/statuses" |
| }, |
| listTags: { |
| method: "GET", |
| params: { |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/tags" |
| }, |
| listTeams: { |
| method: "GET", |
| params: { |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/teams" |
| }, |
| listTeamsWithAccessToProtectedBranch: { |
| deprecated: "octokit.repos.listTeamsWithAccessToProtectedBranch() has been renamed to octokit.repos.getTeamsWithAccessToProtectedBranch() (2019-09-13)", |
| method: "GET", |
| params: { |
| branch: { |
| required: true, |
| type: "string" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/teams" |
| }, |
| listTopics: { |
| headers: { |
| accept: "application/vnd.github.mercy-preview+json" |
| }, |
| method: "GET", |
| params: { |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/topics" |
| }, |
| listUsersWithAccessToProtectedBranch: { |
| deprecated: "octokit.repos.listUsersWithAccessToProtectedBranch() has been renamed to octokit.repos.getUsersWithAccessToProtectedBranch() (2019-09-13)", |
| method: "GET", |
| params: { |
| branch: { |
| required: true, |
| type: "string" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/users" |
| }, |
| merge: { |
| method: "POST", |
| params: { |
| base: { |
| required: true, |
| type: "string" |
| }, |
| commit_message: { |
| type: "string" |
| }, |
| head: { |
| required: true, |
| type: "string" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/merges" |
| }, |
| pingHook: { |
| method: "POST", |
| params: { |
| hook_id: { |
| required: true, |
| type: "integer" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/hooks/:hook_id/pings" |
| }, |
| removeBranchProtection: { |
| method: "DELETE", |
| params: { |
| branch: { |
| required: true, |
| type: "string" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/branches/:branch/protection" |
| }, |
| removeCollaborator: { |
| method: "DELETE", |
| params: { |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| }, |
| username: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/collaborators/:username" |
| }, |
| removeDeployKey: { |
| method: "DELETE", |
| params: { |
| key_id: { |
| required: true, |
| type: "integer" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/keys/:key_id" |
| }, |
| removeProtectedBranchAdminEnforcement: { |
| method: "DELETE", |
| params: { |
| branch: { |
| required: true, |
| type: "string" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/branches/:branch/protection/enforce_admins" |
| }, |
| removeProtectedBranchAppRestrictions: { |
| method: "DELETE", |
| params: { |
| apps: { |
| mapTo: "data", |
| required: true, |
| type: "string[]" |
| }, |
| branch: { |
| required: true, |
| type: "string" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/apps" |
| }, |
| removeProtectedBranchPullRequestReviewEnforcement: { |
| method: "DELETE", |
| params: { |
| branch: { |
| required: true, |
| type: "string" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/branches/:branch/protection/required_pull_request_reviews" |
| }, |
| removeProtectedBranchRequiredSignatures: { |
| headers: { |
| accept: "application/vnd.github.zzzax-preview+json" |
| }, |
| method: "DELETE", |
| params: { |
| branch: { |
| required: true, |
| type: "string" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/branches/:branch/protection/required_signatures" |
| }, |
| removeProtectedBranchRequiredStatusChecks: { |
| method: "DELETE", |
| params: { |
| branch: { |
| required: true, |
| type: "string" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/branches/:branch/protection/required_status_checks" |
| }, |
| removeProtectedBranchRequiredStatusChecksContexts: { |
| method: "DELETE", |
| params: { |
| branch: { |
| required: true, |
| type: "string" |
| }, |
| contexts: { |
| mapTo: "data", |
| required: true, |
| type: "string[]" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/branches/:branch/protection/required_status_checks/contexts" |
| }, |
| removeProtectedBranchRestrictions: { |
| method: "DELETE", |
| params: { |
| branch: { |
| required: true, |
| type: "string" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/branches/:branch/protection/restrictions" |
| }, |
| removeProtectedBranchTeamRestrictions: { |
| method: "DELETE", |
| params: { |
| branch: { |
| required: true, |
| type: "string" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| }, |
| teams: { |
| mapTo: "data", |
| required: true, |
| type: "string[]" |
| } |
| }, |
| url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/teams" |
| }, |
| removeProtectedBranchUserRestrictions: { |
| method: "DELETE", |
| params: { |
| branch: { |
| required: true, |
| type: "string" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| }, |
| users: { |
| mapTo: "data", |
| required: true, |
| type: "string[]" |
| } |
| }, |
| url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/users" |
| }, |
| replaceProtectedBranchAppRestrictions: { |
| method: "PUT", |
| params: { |
| apps: { |
| mapTo: "data", |
| required: true, |
| type: "string[]" |
| }, |
| branch: { |
| required: true, |
| type: "string" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/apps" |
| }, |
| replaceProtectedBranchRequiredStatusChecksContexts: { |
| method: "PUT", |
| params: { |
| branch: { |
| required: true, |
| type: "string" |
| }, |
| contexts: { |
| mapTo: "data", |
| required: true, |
| type: "string[]" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/branches/:branch/protection/required_status_checks/contexts" |
| }, |
| replaceProtectedBranchTeamRestrictions: { |
| method: "PUT", |
| params: { |
| branch: { |
| required: true, |
| type: "string" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| }, |
| teams: { |
| mapTo: "data", |
| required: true, |
| type: "string[]" |
| } |
| }, |
| url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/teams" |
| }, |
| replaceProtectedBranchUserRestrictions: { |
| method: "PUT", |
| params: { |
| branch: { |
| required: true, |
| type: "string" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| }, |
| users: { |
| mapTo: "data", |
| required: true, |
| type: "string[]" |
| } |
| }, |
| url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/users" |
| }, |
| replaceTopics: { |
| headers: { |
| accept: "application/vnd.github.mercy-preview+json" |
| }, |
| method: "PUT", |
| params: { |
| names: { |
| required: true, |
| type: "string[]" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/topics" |
| }, |
| requestPageBuild: { |
| method: "POST", |
| params: { |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/pages/builds" |
| }, |
| retrieveCommunityProfileMetrics: { |
| method: "GET", |
| params: { |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/community/profile" |
| }, |
| testPushHook: { |
| method: "POST", |
| params: { |
| hook_id: { |
| required: true, |
| type: "integer" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/hooks/:hook_id/tests" |
| }, |
| transfer: { |
| method: "POST", |
| params: { |
| new_owner: { |
| type: "string" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| }, |
| team_ids: { |
| type: "integer[]" |
| } |
| }, |
| url: "/repos/:owner/:repo/transfer" |
| }, |
| update: { |
| method: "PATCH", |
| params: { |
| allow_merge_commit: { |
| type: "boolean" |
| }, |
| allow_rebase_merge: { |
| type: "boolean" |
| }, |
| allow_squash_merge: { |
| type: "boolean" |
| }, |
| archived: { |
| type: "boolean" |
| }, |
| default_branch: { |
| type: "string" |
| }, |
| delete_branch_on_merge: { |
| type: "boolean" |
| }, |
| description: { |
| type: "string" |
| }, |
| has_issues: { |
| type: "boolean" |
| }, |
| has_projects: { |
| type: "boolean" |
| }, |
| has_wiki: { |
| type: "boolean" |
| }, |
| homepage: { |
| type: "string" |
| }, |
| is_template: { |
| type: "boolean" |
| }, |
| name: { |
| type: "string" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| private: { |
| type: "boolean" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| }, |
| visibility: { |
| enum: ["public", "private", "visibility", "internal"], |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo" |
| }, |
| updateBranchProtection: { |
| method: "PUT", |
| params: { |
| allow_deletions: { |
| type: "boolean" |
| }, |
| allow_force_pushes: { |
| allowNull: true, |
| type: "boolean" |
| }, |
| branch: { |
| required: true, |
| type: "string" |
| }, |
| enforce_admins: { |
| allowNull: true, |
| required: true, |
| type: "boolean" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| }, |
| required_linear_history: { |
| type: "boolean" |
| }, |
| required_pull_request_reviews: { |
| allowNull: true, |
| required: true, |
| type: "object" |
| }, |
| "required_pull_request_reviews.dismiss_stale_reviews": { |
| type: "boolean" |
| }, |
| "required_pull_request_reviews.dismissal_restrictions": { |
| type: "object" |
| }, |
| "required_pull_request_reviews.dismissal_restrictions.teams": { |
| type: "string[]" |
| }, |
| "required_pull_request_reviews.dismissal_restrictions.users": { |
| type: "string[]" |
| }, |
| "required_pull_request_reviews.require_code_owner_reviews": { |
| type: "boolean" |
| }, |
| "required_pull_request_reviews.required_approving_review_count": { |
| type: "integer" |
| }, |
| required_status_checks: { |
| allowNull: true, |
| required: true, |
| type: "object" |
| }, |
| "required_status_checks.contexts": { |
| required: true, |
| type: "string[]" |
| }, |
| "required_status_checks.strict": { |
| required: true, |
| type: "boolean" |
| }, |
| restrictions: { |
| allowNull: true, |
| required: true, |
| type: "object" |
| }, |
| "restrictions.apps": { |
| type: "string[]" |
| }, |
| "restrictions.teams": { |
| required: true, |
| type: "string[]" |
| }, |
| "restrictions.users": { |
| required: true, |
| type: "string[]" |
| } |
| }, |
| url: "/repos/:owner/:repo/branches/:branch/protection" |
| }, |
| updateCommitComment: { |
| method: "PATCH", |
| params: { |
| body: { |
| required: true, |
| type: "string" |
| }, |
| comment_id: { |
| required: true, |
| type: "integer" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/comments/:comment_id" |
| }, |
| updateFile: { |
| deprecated: "octokit.repos.updateFile() has been renamed to octokit.repos.createOrUpdateFile() (2019-06-07)", |
| method: "PUT", |
| params: { |
| author: { |
| type: "object" |
| }, |
| "author.email": { |
| required: true, |
| type: "string" |
| }, |
| "author.name": { |
| required: true, |
| type: "string" |
| }, |
| branch: { |
| type: "string" |
| }, |
| committer: { |
| type: "object" |
| }, |
| "committer.email": { |
| required: true, |
| type: "string" |
| }, |
| "committer.name": { |
| required: true, |
| type: "string" |
| }, |
| content: { |
| required: true, |
| type: "string" |
| }, |
| message: { |
| required: true, |
| type: "string" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| path: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| }, |
| sha: { |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/contents/:path" |
| }, |
| updateHook: { |
| method: "PATCH", |
| params: { |
| active: { |
| type: "boolean" |
| }, |
| add_events: { |
| type: "string[]" |
| }, |
| config: { |
| type: "object" |
| }, |
| "config.content_type": { |
| type: "string" |
| }, |
| "config.insecure_ssl": { |
| type: "string" |
| }, |
| "config.secret": { |
| type: "string" |
| }, |
| "config.url": { |
| required: true, |
| type: "string" |
| }, |
| events: { |
| type: "string[]" |
| }, |
| hook_id: { |
| required: true, |
| type: "integer" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| remove_events: { |
| type: "string[]" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/hooks/:hook_id" |
| }, |
| updateInformationAboutPagesSite: { |
| method: "PUT", |
| params: { |
| cname: { |
| type: "string" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| }, |
| source: { |
| enum: ['"gh-pages"', '"master"', '"master /docs"'], |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/pages" |
| }, |
| updateInvitation: { |
| method: "PATCH", |
| params: { |
| invitation_id: { |
| required: true, |
| type: "integer" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| permissions: { |
| enum: ["read", "write", "admin"], |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/invitations/:invitation_id" |
| }, |
| updateProtectedBranchPullRequestReviewEnforcement: { |
| method: "PATCH", |
| params: { |
| branch: { |
| required: true, |
| type: "string" |
| }, |
| dismiss_stale_reviews: { |
| type: "boolean" |
| }, |
| dismissal_restrictions: { |
| type: "object" |
| }, |
| "dismissal_restrictions.teams": { |
| type: "string[]" |
| }, |
| "dismissal_restrictions.users": { |
| type: "string[]" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| }, |
| require_code_owner_reviews: { |
| type: "boolean" |
| }, |
| required_approving_review_count: { |
| type: "integer" |
| } |
| }, |
| url: "/repos/:owner/:repo/branches/:branch/protection/required_pull_request_reviews" |
| }, |
| updateProtectedBranchRequiredStatusChecks: { |
| method: "PATCH", |
| params: { |
| branch: { |
| required: true, |
| type: "string" |
| }, |
| contexts: { |
| type: "string[]" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| }, |
| strict: { |
| type: "boolean" |
| } |
| }, |
| url: "/repos/:owner/:repo/branches/:branch/protection/required_status_checks" |
| }, |
| updateRelease: { |
| method: "PATCH", |
| params: { |
| body: { |
| type: "string" |
| }, |
| draft: { |
| type: "boolean" |
| }, |
| name: { |
| type: "string" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| prerelease: { |
| type: "boolean" |
| }, |
| release_id: { |
| required: true, |
| type: "integer" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| }, |
| tag_name: { |
| type: "string" |
| }, |
| target_commitish: { |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/releases/:release_id" |
| }, |
| updateReleaseAsset: { |
| method: "PATCH", |
| params: { |
| asset_id: { |
| required: true, |
| type: "integer" |
| }, |
| label: { |
| type: "string" |
| }, |
| name: { |
| type: "string" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/repos/:owner/:repo/releases/assets/:asset_id" |
| }, |
| uploadReleaseAsset: { |
| method: "POST", |
| params: { |
| data: { |
| mapTo: "data", |
| required: true, |
| type: "string | object" |
| }, |
| file: { |
| alias: "data", |
| deprecated: true, |
| type: "string | object" |
| }, |
| headers: { |
| required: true, |
| type: "object" |
| }, |
| "headers.content-length": { |
| required: true, |
| type: "integer" |
| }, |
| "headers.content-type": { |
| required: true, |
| type: "string" |
| }, |
| label: { |
| type: "string" |
| }, |
| name: { |
| required: true, |
| type: "string" |
| }, |
| url: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: ":url" |
| } |
| }, |
| search: { |
| code: { |
| method: "GET", |
| params: { |
| order: { |
| enum: ["desc", "asc"], |
| type: "string" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| q: { |
| required: true, |
| type: "string" |
| }, |
| sort: { |
| enum: ["indexed"], |
| type: "string" |
| } |
| }, |
| url: "/search/code" |
| }, |
| commits: { |
| headers: { |
| accept: "application/vnd.github.cloak-preview+json" |
| }, |
| method: "GET", |
| params: { |
| order: { |
| enum: ["desc", "asc"], |
| type: "string" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| q: { |
| required: true, |
| type: "string" |
| }, |
| sort: { |
| enum: ["author-date", "committer-date"], |
| type: "string" |
| } |
| }, |
| url: "/search/commits" |
| }, |
| issues: { |
| deprecated: "octokit.search.issues() has been renamed to octokit.search.issuesAndPullRequests() (2018-12-27)", |
| method: "GET", |
| params: { |
| order: { |
| enum: ["desc", "asc"], |
| type: "string" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| q: { |
| required: true, |
| type: "string" |
| }, |
| sort: { |
| enum: ["comments", "reactions", "reactions-+1", "reactions--1", "reactions-smile", "reactions-thinking_face", "reactions-heart", "reactions-tada", "interactions", "created", "updated"], |
| type: "string" |
| } |
| }, |
| url: "/search/issues" |
| }, |
| issuesAndPullRequests: { |
| method: "GET", |
| params: { |
| order: { |
| enum: ["desc", "asc"], |
| type: "string" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| q: { |
| required: true, |
| type: "string" |
| }, |
| sort: { |
| enum: ["comments", "reactions", "reactions-+1", "reactions--1", "reactions-smile", "reactions-thinking_face", "reactions-heart", "reactions-tada", "interactions", "created", "updated"], |
| type: "string" |
| } |
| }, |
| url: "/search/issues" |
| }, |
| labels: { |
| method: "GET", |
| params: { |
| order: { |
| enum: ["desc", "asc"], |
| type: "string" |
| }, |
| q: { |
| required: true, |
| type: "string" |
| }, |
| repository_id: { |
| required: true, |
| type: "integer" |
| }, |
| sort: { |
| enum: ["created", "updated"], |
| type: "string" |
| } |
| }, |
| url: "/search/labels" |
| }, |
| repos: { |
| method: "GET", |
| params: { |
| order: { |
| enum: ["desc", "asc"], |
| type: "string" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| q: { |
| required: true, |
| type: "string" |
| }, |
| sort: { |
| enum: ["stars", "forks", "help-wanted-issues", "updated"], |
| type: "string" |
| } |
| }, |
| url: "/search/repositories" |
| }, |
| topics: { |
| method: "GET", |
| params: { |
| q: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/search/topics" |
| }, |
| users: { |
| method: "GET", |
| params: { |
| order: { |
| enum: ["desc", "asc"], |
| type: "string" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| q: { |
| required: true, |
| type: "string" |
| }, |
| sort: { |
| enum: ["followers", "repositories", "joined"], |
| type: "string" |
| } |
| }, |
| url: "/search/users" |
| } |
| }, |
| teams: { |
| addMember: { |
| deprecated: "octokit.teams.addMember() has been renamed to octokit.teams.addMemberLegacy() (2020-01-16)", |
| method: "PUT", |
| params: { |
| team_id: { |
| required: true, |
| type: "integer" |
| }, |
| username: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/teams/:team_id/members/:username" |
| }, |
| addMemberLegacy: { |
| deprecated: "octokit.teams.addMemberLegacy() is deprecated, see https://developer.github.com/v3/teams/members/#add-team-member-legacy", |
| method: "PUT", |
| params: { |
| team_id: { |
| required: true, |
| type: "integer" |
| }, |
| username: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/teams/:team_id/members/:username" |
| }, |
| addOrUpdateMembership: { |
| deprecated: "octokit.teams.addOrUpdateMembership() has been renamed to octokit.teams.addOrUpdateMembershipLegacy() (2020-01-16)", |
| method: "PUT", |
| params: { |
| role: { |
| enum: ["member", "maintainer"], |
| type: "string" |
| }, |
| team_id: { |
| required: true, |
| type: "integer" |
| }, |
| username: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/teams/:team_id/memberships/:username" |
| }, |
| addOrUpdateMembershipInOrg: { |
| method: "PUT", |
| params: { |
| org: { |
| required: true, |
| type: "string" |
| }, |
| role: { |
| enum: ["member", "maintainer"], |
| type: "string" |
| }, |
| team_slug: { |
| required: true, |
| type: "string" |
| }, |
| username: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/orgs/:org/teams/:team_slug/memberships/:username" |
| }, |
| addOrUpdateMembershipLegacy: { |
| deprecated: "octokit.teams.addOrUpdateMembershipLegacy() is deprecated, see https://developer.github.com/v3/teams/members/#add-or-update-team-membership-legacy", |
| method: "PUT", |
| params: { |
| role: { |
| enum: ["member", "maintainer"], |
| type: "string" |
| }, |
| team_id: { |
| required: true, |
| type: "integer" |
| }, |
| username: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/teams/:team_id/memberships/:username" |
| }, |
| addOrUpdateProject: { |
| deprecated: "octokit.teams.addOrUpdateProject() has been renamed to octokit.teams.addOrUpdateProjectLegacy() (2020-01-16)", |
| headers: { |
| accept: "application/vnd.github.inertia-preview+json" |
| }, |
| method: "PUT", |
| params: { |
| permission: { |
| enum: ["read", "write", "admin"], |
| type: "string" |
| }, |
| project_id: { |
| required: true, |
| type: "integer" |
| }, |
| team_id: { |
| required: true, |
| type: "integer" |
| } |
| }, |
| url: "/teams/:team_id/projects/:project_id" |
| }, |
| addOrUpdateProjectInOrg: { |
| headers: { |
| accept: "application/vnd.github.inertia-preview+json" |
| }, |
| method: "PUT", |
| params: { |
| org: { |
| required: true, |
| type: "string" |
| }, |
| permission: { |
| enum: ["read", "write", "admin"], |
| type: "string" |
| }, |
| project_id: { |
| required: true, |
| type: "integer" |
| }, |
| team_slug: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/orgs/:org/teams/:team_slug/projects/:project_id" |
| }, |
| addOrUpdateProjectLegacy: { |
| deprecated: "octokit.teams.addOrUpdateProjectLegacy() is deprecated, see https://developer.github.com/v3/teams/#add-or-update-team-project-legacy", |
| headers: { |
| accept: "application/vnd.github.inertia-preview+json" |
| }, |
| method: "PUT", |
| params: { |
| permission: { |
| enum: ["read", "write", "admin"], |
| type: "string" |
| }, |
| project_id: { |
| required: true, |
| type: "integer" |
| }, |
| team_id: { |
| required: true, |
| type: "integer" |
| } |
| }, |
| url: "/teams/:team_id/projects/:project_id" |
| }, |
| addOrUpdateRepo: { |
| deprecated: "octokit.teams.addOrUpdateRepo() has been renamed to octokit.teams.addOrUpdateRepoLegacy() (2020-01-16)", |
| method: "PUT", |
| params: { |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| permission: { |
| enum: ["pull", "push", "admin"], |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| }, |
| team_id: { |
| required: true, |
| type: "integer" |
| } |
| }, |
| url: "/teams/:team_id/repos/:owner/:repo" |
| }, |
| addOrUpdateRepoInOrg: { |
| method: "PUT", |
| params: { |
| org: { |
| required: true, |
| type: "string" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| permission: { |
| enum: ["pull", "push", "admin"], |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| }, |
| team_slug: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/orgs/:org/teams/:team_slug/repos/:owner/:repo" |
| }, |
| addOrUpdateRepoLegacy: { |
| deprecated: "octokit.teams.addOrUpdateRepoLegacy() is deprecated, see https://developer.github.com/v3/teams/#add-or-update-team-repository-legacy", |
| method: "PUT", |
| params: { |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| permission: { |
| enum: ["pull", "push", "admin"], |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| }, |
| team_id: { |
| required: true, |
| type: "integer" |
| } |
| }, |
| url: "/teams/:team_id/repos/:owner/:repo" |
| }, |
| checkManagesRepo: { |
| deprecated: "octokit.teams.checkManagesRepo() has been renamed to octokit.teams.checkManagesRepoLegacy() (2020-01-16)", |
| method: "GET", |
| params: { |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| }, |
| team_id: { |
| required: true, |
| type: "integer" |
| } |
| }, |
| url: "/teams/:team_id/repos/:owner/:repo" |
| }, |
| checkManagesRepoInOrg: { |
| method: "GET", |
| params: { |
| org: { |
| required: true, |
| type: "string" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| }, |
| team_slug: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/orgs/:org/teams/:team_slug/repos/:owner/:repo" |
| }, |
| checkManagesRepoLegacy: { |
| deprecated: "octokit.teams.checkManagesRepoLegacy() is deprecated, see https://developer.github.com/v3/teams/#check-if-a-team-manages-a-repository-legacy", |
| method: "GET", |
| params: { |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| }, |
| team_id: { |
| required: true, |
| type: "integer" |
| } |
| }, |
| url: "/teams/:team_id/repos/:owner/:repo" |
| }, |
| create: { |
| method: "POST", |
| params: { |
| description: { |
| type: "string" |
| }, |
| maintainers: { |
| type: "string[]" |
| }, |
| name: { |
| required: true, |
| type: "string" |
| }, |
| org: { |
| required: true, |
| type: "string" |
| }, |
| parent_team_id: { |
| type: "integer" |
| }, |
| permission: { |
| enum: ["pull", "push", "admin"], |
| type: "string" |
| }, |
| privacy: { |
| enum: ["secret", "closed"], |
| type: "string" |
| }, |
| repo_names: { |
| type: "string[]" |
| } |
| }, |
| url: "/orgs/:org/teams" |
| }, |
| createDiscussion: { |
| deprecated: "octokit.teams.createDiscussion() has been renamed to octokit.teams.createDiscussionLegacy() (2020-01-16)", |
| method: "POST", |
| params: { |
| body: { |
| required: true, |
| type: "string" |
| }, |
| private: { |
| type: "boolean" |
| }, |
| team_id: { |
| required: true, |
| type: "integer" |
| }, |
| title: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/teams/:team_id/discussions" |
| }, |
| createDiscussionComment: { |
| deprecated: "octokit.teams.createDiscussionComment() has been renamed to octokit.teams.createDiscussionCommentLegacy() (2020-01-16)", |
| method: "POST", |
| params: { |
| body: { |
| required: true, |
| type: "string" |
| }, |
| discussion_number: { |
| required: true, |
| type: "integer" |
| }, |
| team_id: { |
| required: true, |
| type: "integer" |
| } |
| }, |
| url: "/teams/:team_id/discussions/:discussion_number/comments" |
| }, |
| createDiscussionCommentInOrg: { |
| method: "POST", |
| params: { |
| body: { |
| required: true, |
| type: "string" |
| }, |
| discussion_number: { |
| required: true, |
| type: "integer" |
| }, |
| org: { |
| required: true, |
| type: "string" |
| }, |
| team_slug: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/orgs/:org/teams/:team_slug/discussions/:discussion_number/comments" |
| }, |
| createDiscussionCommentLegacy: { |
| deprecated: "octokit.teams.createDiscussionCommentLegacy() is deprecated, see https://developer.github.com/v3/teams/discussion_comments/#create-a-comment-legacy", |
| method: "POST", |
| params: { |
| body: { |
| required: true, |
| type: "string" |
| }, |
| discussion_number: { |
| required: true, |
| type: "integer" |
| }, |
| team_id: { |
| required: true, |
| type: "integer" |
| } |
| }, |
| url: "/teams/:team_id/discussions/:discussion_number/comments" |
| }, |
| createDiscussionInOrg: { |
| method: "POST", |
| params: { |
| body: { |
| required: true, |
| type: "string" |
| }, |
| org: { |
| required: true, |
| type: "string" |
| }, |
| private: { |
| type: "boolean" |
| }, |
| team_slug: { |
| required: true, |
| type: "string" |
| }, |
| title: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/orgs/:org/teams/:team_slug/discussions" |
| }, |
| createDiscussionLegacy: { |
| deprecated: "octokit.teams.createDiscussionLegacy() is deprecated, see https://developer.github.com/v3/teams/discussions/#create-a-discussion-legacy", |
| method: "POST", |
| params: { |
| body: { |
| required: true, |
| type: "string" |
| }, |
| private: { |
| type: "boolean" |
| }, |
| team_id: { |
| required: true, |
| type: "integer" |
| }, |
| title: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/teams/:team_id/discussions" |
| }, |
| delete: { |
| deprecated: "octokit.teams.delete() has been renamed to octokit.teams.deleteLegacy() (2020-01-16)", |
| method: "DELETE", |
| params: { |
| team_id: { |
| required: true, |
| type: "integer" |
| } |
| }, |
| url: "/teams/:team_id" |
| }, |
| deleteDiscussion: { |
| deprecated: "octokit.teams.deleteDiscussion() has been renamed to octokit.teams.deleteDiscussionLegacy() (2020-01-16)", |
| method: "DELETE", |
| params: { |
| discussion_number: { |
| required: true, |
| type: "integer" |
| }, |
| team_id: { |
| required: true, |
| type: "integer" |
| } |
| }, |
| url: "/teams/:team_id/discussions/:discussion_number" |
| }, |
| deleteDiscussionComment: { |
| deprecated: "octokit.teams.deleteDiscussionComment() has been renamed to octokit.teams.deleteDiscussionCommentLegacy() (2020-01-16)", |
| method: "DELETE", |
| params: { |
| comment_number: { |
| required: true, |
| type: "integer" |
| }, |
| discussion_number: { |
| required: true, |
| type: "integer" |
| }, |
| team_id: { |
| required: true, |
| type: "integer" |
| } |
| }, |
| url: "/teams/:team_id/discussions/:discussion_number/comments/:comment_number" |
| }, |
| deleteDiscussionCommentInOrg: { |
| method: "DELETE", |
| params: { |
| comment_number: { |
| required: true, |
| type: "integer" |
| }, |
| discussion_number: { |
| required: true, |
| type: "integer" |
| }, |
| org: { |
| required: true, |
| type: "string" |
| }, |
| team_slug: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/orgs/:org/teams/:team_slug/discussions/:discussion_number/comments/:comment_number" |
| }, |
| deleteDiscussionCommentLegacy: { |
| deprecated: "octokit.teams.deleteDiscussionCommentLegacy() is deprecated, see https://developer.github.com/v3/teams/discussion_comments/#delete-a-comment-legacy", |
| method: "DELETE", |
| params: { |
| comment_number: { |
| required: true, |
| type: "integer" |
| }, |
| discussion_number: { |
| required: true, |
| type: "integer" |
| }, |
| team_id: { |
| required: true, |
| type: "integer" |
| } |
| }, |
| url: "/teams/:team_id/discussions/:discussion_number/comments/:comment_number" |
| }, |
| deleteDiscussionInOrg: { |
| method: "DELETE", |
| params: { |
| discussion_number: { |
| required: true, |
| type: "integer" |
| }, |
| org: { |
| required: true, |
| type: "string" |
| }, |
| team_slug: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/orgs/:org/teams/:team_slug/discussions/:discussion_number" |
| }, |
| deleteDiscussionLegacy: { |
| deprecated: "octokit.teams.deleteDiscussionLegacy() is deprecated, see https://developer.github.com/v3/teams/discussions/#delete-a-discussion-legacy", |
| method: "DELETE", |
| params: { |
| discussion_number: { |
| required: true, |
| type: "integer" |
| }, |
| team_id: { |
| required: true, |
| type: "integer" |
| } |
| }, |
| url: "/teams/:team_id/discussions/:discussion_number" |
| }, |
| deleteInOrg: { |
| method: "DELETE", |
| params: { |
| org: { |
| required: true, |
| type: "string" |
| }, |
| team_slug: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/orgs/:org/teams/:team_slug" |
| }, |
| deleteLegacy: { |
| deprecated: "octokit.teams.deleteLegacy() is deprecated, see https://developer.github.com/v3/teams/#delete-team-legacy", |
| method: "DELETE", |
| params: { |
| team_id: { |
| required: true, |
| type: "integer" |
| } |
| }, |
| url: "/teams/:team_id" |
| }, |
| get: { |
| deprecated: "octokit.teams.get() has been renamed to octokit.teams.getLegacy() (2020-01-16)", |
| method: "GET", |
| params: { |
| team_id: { |
| required: true, |
| type: "integer" |
| } |
| }, |
| url: "/teams/:team_id" |
| }, |
| getByName: { |
| method: "GET", |
| params: { |
| org: { |
| required: true, |
| type: "string" |
| }, |
| team_slug: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/orgs/:org/teams/:team_slug" |
| }, |
| getDiscussion: { |
| deprecated: "octokit.teams.getDiscussion() has been renamed to octokit.teams.getDiscussionLegacy() (2020-01-16)", |
| method: "GET", |
| params: { |
| discussion_number: { |
| required: true, |
| type: "integer" |
| }, |
| team_id: { |
| required: true, |
| type: "integer" |
| } |
| }, |
| url: "/teams/:team_id/discussions/:discussion_number" |
| }, |
| getDiscussionComment: { |
| deprecated: "octokit.teams.getDiscussionComment() has been renamed to octokit.teams.getDiscussionCommentLegacy() (2020-01-16)", |
| method: "GET", |
| params: { |
| comment_number: { |
| required: true, |
| type: "integer" |
| }, |
| discussion_number: { |
| required: true, |
| type: "integer" |
| }, |
| team_id: { |
| required: true, |
| type: "integer" |
| } |
| }, |
| url: "/teams/:team_id/discussions/:discussion_number/comments/:comment_number" |
| }, |
| getDiscussionCommentInOrg: { |
| method: "GET", |
| params: { |
| comment_number: { |
| required: true, |
| type: "integer" |
| }, |
| discussion_number: { |
| required: true, |
| type: "integer" |
| }, |
| org: { |
| required: true, |
| type: "string" |
| }, |
| team_slug: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/orgs/:org/teams/:team_slug/discussions/:discussion_number/comments/:comment_number" |
| }, |
| getDiscussionCommentLegacy: { |
| deprecated: "octokit.teams.getDiscussionCommentLegacy() is deprecated, see https://developer.github.com/v3/teams/discussion_comments/#get-a-single-comment-legacy", |
| method: "GET", |
| params: { |
| comment_number: { |
| required: true, |
| type: "integer" |
| }, |
| discussion_number: { |
| required: true, |
| type: "integer" |
| }, |
| team_id: { |
| required: true, |
| type: "integer" |
| } |
| }, |
| url: "/teams/:team_id/discussions/:discussion_number/comments/:comment_number" |
| }, |
| getDiscussionInOrg: { |
| method: "GET", |
| params: { |
| discussion_number: { |
| required: true, |
| type: "integer" |
| }, |
| org: { |
| required: true, |
| type: "string" |
| }, |
| team_slug: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/orgs/:org/teams/:team_slug/discussions/:discussion_number" |
| }, |
| getDiscussionLegacy: { |
| deprecated: "octokit.teams.getDiscussionLegacy() is deprecated, see https://developer.github.com/v3/teams/discussions/#get-a-single-discussion-legacy", |
| method: "GET", |
| params: { |
| discussion_number: { |
| required: true, |
| type: "integer" |
| }, |
| team_id: { |
| required: true, |
| type: "integer" |
| } |
| }, |
| url: "/teams/:team_id/discussions/:discussion_number" |
| }, |
| getLegacy: { |
| deprecated: "octokit.teams.getLegacy() is deprecated, see https://developer.github.com/v3/teams/#get-team-legacy", |
| method: "GET", |
| params: { |
| team_id: { |
| required: true, |
| type: "integer" |
| } |
| }, |
| url: "/teams/:team_id" |
| }, |
| getMember: { |
| deprecated: "octokit.teams.getMember() has been renamed to octokit.teams.getMemberLegacy() (2020-01-16)", |
| method: "GET", |
| params: { |
| team_id: { |
| required: true, |
| type: "integer" |
| }, |
| username: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/teams/:team_id/members/:username" |
| }, |
| getMemberLegacy: { |
| deprecated: "octokit.teams.getMemberLegacy() is deprecated, see https://developer.github.com/v3/teams/members/#get-team-member-legacy", |
| method: "GET", |
| params: { |
| team_id: { |
| required: true, |
| type: "integer" |
| }, |
| username: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/teams/:team_id/members/:username" |
| }, |
| getMembership: { |
| deprecated: "octokit.teams.getMembership() has been renamed to octokit.teams.getMembershipLegacy() (2020-01-16)", |
| method: "GET", |
| params: { |
| team_id: { |
| required: true, |
| type: "integer" |
| }, |
| username: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/teams/:team_id/memberships/:username" |
| }, |
| getMembershipInOrg: { |
| method: "GET", |
| params: { |
| org: { |
| required: true, |
| type: "string" |
| }, |
| team_slug: { |
| required: true, |
| type: "string" |
| }, |
| username: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/orgs/:org/teams/:team_slug/memberships/:username" |
| }, |
| getMembershipLegacy: { |
| deprecated: "octokit.teams.getMembershipLegacy() is deprecated, see https://developer.github.com/v3/teams/members/#get-team-membership-legacy", |
| method: "GET", |
| params: { |
| team_id: { |
| required: true, |
| type: "integer" |
| }, |
| username: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/teams/:team_id/memberships/:username" |
| }, |
| list: { |
| method: "GET", |
| params: { |
| org: { |
| required: true, |
| type: "string" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| } |
| }, |
| url: "/orgs/:org/teams" |
| }, |
| listChild: { |
| deprecated: "octokit.teams.listChild() has been renamed to octokit.teams.listChildLegacy() (2020-01-16)", |
| method: "GET", |
| params: { |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| team_id: { |
| required: true, |
| type: "integer" |
| } |
| }, |
| url: "/teams/:team_id/teams" |
| }, |
| listChildInOrg: { |
| method: "GET", |
| params: { |
| org: { |
| required: true, |
| type: "string" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| team_slug: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/orgs/:org/teams/:team_slug/teams" |
| }, |
| listChildLegacy: { |
| deprecated: "octokit.teams.listChildLegacy() is deprecated, see https://developer.github.com/v3/teams/#list-child-teams-legacy", |
| method: "GET", |
| params: { |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| team_id: { |
| required: true, |
| type: "integer" |
| } |
| }, |
| url: "/teams/:team_id/teams" |
| }, |
| listDiscussionComments: { |
| deprecated: "octokit.teams.listDiscussionComments() has been renamed to octokit.teams.listDiscussionCommentsLegacy() (2020-01-16)", |
| method: "GET", |
| params: { |
| direction: { |
| enum: ["asc", "desc"], |
| type: "string" |
| }, |
| discussion_number: { |
| required: true, |
| type: "integer" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| team_id: { |
| required: true, |
| type: "integer" |
| } |
| }, |
| url: "/teams/:team_id/discussions/:discussion_number/comments" |
| }, |
| listDiscussionCommentsInOrg: { |
| method: "GET", |
| params: { |
| direction: { |
| enum: ["asc", "desc"], |
| type: "string" |
| }, |
| discussion_number: { |
| required: true, |
| type: "integer" |
| }, |
| org: { |
| required: true, |
| type: "string" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| team_slug: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/orgs/:org/teams/:team_slug/discussions/:discussion_number/comments" |
| }, |
| listDiscussionCommentsLegacy: { |
| deprecated: "octokit.teams.listDiscussionCommentsLegacy() is deprecated, see https://developer.github.com/v3/teams/discussion_comments/#list-comments-legacy", |
| method: "GET", |
| params: { |
| direction: { |
| enum: ["asc", "desc"], |
| type: "string" |
| }, |
| discussion_number: { |
| required: true, |
| type: "integer" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| team_id: { |
| required: true, |
| type: "integer" |
| } |
| }, |
| url: "/teams/:team_id/discussions/:discussion_number/comments" |
| }, |
| listDiscussions: { |
| deprecated: "octokit.teams.listDiscussions() has been renamed to octokit.teams.listDiscussionsLegacy() (2020-01-16)", |
| method: "GET", |
| params: { |
| direction: { |
| enum: ["asc", "desc"], |
| type: "string" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| team_id: { |
| required: true, |
| type: "integer" |
| } |
| }, |
| url: "/teams/:team_id/discussions" |
| }, |
| listDiscussionsInOrg: { |
| method: "GET", |
| params: { |
| direction: { |
| enum: ["asc", "desc"], |
| type: "string" |
| }, |
| org: { |
| required: true, |
| type: "string" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| team_slug: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/orgs/:org/teams/:team_slug/discussions" |
| }, |
| listDiscussionsLegacy: { |
| deprecated: "octokit.teams.listDiscussionsLegacy() is deprecated, see https://developer.github.com/v3/teams/discussions/#list-discussions-legacy", |
| method: "GET", |
| params: { |
| direction: { |
| enum: ["asc", "desc"], |
| type: "string" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| team_id: { |
| required: true, |
| type: "integer" |
| } |
| }, |
| url: "/teams/:team_id/discussions" |
| }, |
| listForAuthenticatedUser: { |
| method: "GET", |
| params: { |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| } |
| }, |
| url: "/user/teams" |
| }, |
| listMembers: { |
| deprecated: "octokit.teams.listMembers() has been renamed to octokit.teams.listMembersLegacy() (2020-01-16)", |
| method: "GET", |
| params: { |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| role: { |
| enum: ["member", "maintainer", "all"], |
| type: "string" |
| }, |
| team_id: { |
| required: true, |
| type: "integer" |
| } |
| }, |
| url: "/teams/:team_id/members" |
| }, |
| listMembersInOrg: { |
| method: "GET", |
| params: { |
| org: { |
| required: true, |
| type: "string" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| role: { |
| enum: ["member", "maintainer", "all"], |
| type: "string" |
| }, |
| team_slug: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/orgs/:org/teams/:team_slug/members" |
| }, |
| listMembersLegacy: { |
| deprecated: "octokit.teams.listMembersLegacy() is deprecated, see https://developer.github.com/v3/teams/members/#list-team-members-legacy", |
| method: "GET", |
| params: { |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| role: { |
| enum: ["member", "maintainer", "all"], |
| type: "string" |
| }, |
| team_id: { |
| required: true, |
| type: "integer" |
| } |
| }, |
| url: "/teams/:team_id/members" |
| }, |
| listPendingInvitations: { |
| deprecated: "octokit.teams.listPendingInvitations() has been renamed to octokit.teams.listPendingInvitationsLegacy() (2020-01-16)", |
| method: "GET", |
| params: { |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| team_id: { |
| required: true, |
| type: "integer" |
| } |
| }, |
| url: "/teams/:team_id/invitations" |
| }, |
| listPendingInvitationsInOrg: { |
| method: "GET", |
| params: { |
| org: { |
| required: true, |
| type: "string" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| team_slug: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/orgs/:org/teams/:team_slug/invitations" |
| }, |
| listPendingInvitationsLegacy: { |
| deprecated: "octokit.teams.listPendingInvitationsLegacy() is deprecated, see https://developer.github.com/v3/teams/members/#list-pending-team-invitations-legacy", |
| method: "GET", |
| params: { |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| team_id: { |
| required: true, |
| type: "integer" |
| } |
| }, |
| url: "/teams/:team_id/invitations" |
| }, |
| listProjects: { |
| deprecated: "octokit.teams.listProjects() has been renamed to octokit.teams.listProjectsLegacy() (2020-01-16)", |
| headers: { |
| accept: "application/vnd.github.inertia-preview+json" |
| }, |
| method: "GET", |
| params: { |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| team_id: { |
| required: true, |
| type: "integer" |
| } |
| }, |
| url: "/teams/:team_id/projects" |
| }, |
| listProjectsInOrg: { |
| headers: { |
| accept: "application/vnd.github.inertia-preview+json" |
| }, |
| method: "GET", |
| params: { |
| org: { |
| required: true, |
| type: "string" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| team_slug: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/orgs/:org/teams/:team_slug/projects" |
| }, |
| listProjectsLegacy: { |
| deprecated: "octokit.teams.listProjectsLegacy() is deprecated, see https://developer.github.com/v3/teams/#list-team-projects-legacy", |
| headers: { |
| accept: "application/vnd.github.inertia-preview+json" |
| }, |
| method: "GET", |
| params: { |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| team_id: { |
| required: true, |
| type: "integer" |
| } |
| }, |
| url: "/teams/:team_id/projects" |
| }, |
| listRepos: { |
| deprecated: "octokit.teams.listRepos() has been renamed to octokit.teams.listReposLegacy() (2020-01-16)", |
| method: "GET", |
| params: { |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| team_id: { |
| required: true, |
| type: "integer" |
| } |
| }, |
| url: "/teams/:team_id/repos" |
| }, |
| listReposInOrg: { |
| method: "GET", |
| params: { |
| org: { |
| required: true, |
| type: "string" |
| }, |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| team_slug: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/orgs/:org/teams/:team_slug/repos" |
| }, |
| listReposLegacy: { |
| deprecated: "octokit.teams.listReposLegacy() is deprecated, see https://developer.github.com/v3/teams/#list-team-repos-legacy", |
| method: "GET", |
| params: { |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| team_id: { |
| required: true, |
| type: "integer" |
| } |
| }, |
| url: "/teams/:team_id/repos" |
| }, |
| removeMember: { |
| deprecated: "octokit.teams.removeMember() has been renamed to octokit.teams.removeMemberLegacy() (2020-01-16)", |
| method: "DELETE", |
| params: { |
| team_id: { |
| required: true, |
| type: "integer" |
| }, |
| username: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/teams/:team_id/members/:username" |
| }, |
| removeMemberLegacy: { |
| deprecated: "octokit.teams.removeMemberLegacy() is deprecated, see https://developer.github.com/v3/teams/members/#remove-team-member-legacy", |
| method: "DELETE", |
| params: { |
| team_id: { |
| required: true, |
| type: "integer" |
| }, |
| username: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/teams/:team_id/members/:username" |
| }, |
| removeMembership: { |
| deprecated: "octokit.teams.removeMembership() has been renamed to octokit.teams.removeMembershipLegacy() (2020-01-16)", |
| method: "DELETE", |
| params: { |
| team_id: { |
| required: true, |
| type: "integer" |
| }, |
| username: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/teams/:team_id/memberships/:username" |
| }, |
| removeMembershipInOrg: { |
| method: "DELETE", |
| params: { |
| org: { |
| required: true, |
| type: "string" |
| }, |
| team_slug: { |
| required: true, |
| type: "string" |
| }, |
| username: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/orgs/:org/teams/:team_slug/memberships/:username" |
| }, |
| removeMembershipLegacy: { |
| deprecated: "octokit.teams.removeMembershipLegacy() is deprecated, see https://developer.github.com/v3/teams/members/#remove-team-membership-legacy", |
| method: "DELETE", |
| params: { |
| team_id: { |
| required: true, |
| type: "integer" |
| }, |
| username: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/teams/:team_id/memberships/:username" |
| }, |
| removeProject: { |
| deprecated: "octokit.teams.removeProject() has been renamed to octokit.teams.removeProjectLegacy() (2020-01-16)", |
| method: "DELETE", |
| params: { |
| project_id: { |
| required: true, |
| type: "integer" |
| }, |
| team_id: { |
| required: true, |
| type: "integer" |
| } |
| }, |
| url: "/teams/:team_id/projects/:project_id" |
| }, |
| removeProjectInOrg: { |
| method: "DELETE", |
| params: { |
| org: { |
| required: true, |
| type: "string" |
| }, |
| project_id: { |
| required: true, |
| type: "integer" |
| }, |
| team_slug: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/orgs/:org/teams/:team_slug/projects/:project_id" |
| }, |
| removeProjectLegacy: { |
| deprecated: "octokit.teams.removeProjectLegacy() is deprecated, see https://developer.github.com/v3/teams/#remove-team-project-legacy", |
| method: "DELETE", |
| params: { |
| project_id: { |
| required: true, |
| type: "integer" |
| }, |
| team_id: { |
| required: true, |
| type: "integer" |
| } |
| }, |
| url: "/teams/:team_id/projects/:project_id" |
| }, |
| removeRepo: { |
| deprecated: "octokit.teams.removeRepo() has been renamed to octokit.teams.removeRepoLegacy() (2020-01-16)", |
| method: "DELETE", |
| params: { |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| }, |
| team_id: { |
| required: true, |
| type: "integer" |
| } |
| }, |
| url: "/teams/:team_id/repos/:owner/:repo" |
| }, |
| removeRepoInOrg: { |
| method: "DELETE", |
| params: { |
| org: { |
| required: true, |
| type: "string" |
| }, |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| }, |
| team_slug: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/orgs/:org/teams/:team_slug/repos/:owner/:repo" |
| }, |
| removeRepoLegacy: { |
| deprecated: "octokit.teams.removeRepoLegacy() is deprecated, see https://developer.github.com/v3/teams/#remove-team-repository-legacy", |
| method: "DELETE", |
| params: { |
| owner: { |
| required: true, |
| type: "string" |
| }, |
| repo: { |
| required: true, |
| type: "string" |
| }, |
| team_id: { |
| required: true, |
| type: "integer" |
| } |
| }, |
| url: "/teams/:team_id/repos/:owner/:repo" |
| }, |
| reviewProject: { |
| deprecated: "octokit.teams.reviewProject() has been renamed to octokit.teams.reviewProjectLegacy() (2020-01-16)", |
| headers: { |
| accept: "application/vnd.github.inertia-preview+json" |
| }, |
| method: "GET", |
| params: { |
| project_id: { |
| required: true, |
| type: "integer" |
| }, |
| team_id: { |
| required: true, |
| type: "integer" |
| } |
| }, |
| url: "/teams/:team_id/projects/:project_id" |
| }, |
| reviewProjectInOrg: { |
| headers: { |
| accept: "application/vnd.github.inertia-preview+json" |
| }, |
| method: "GET", |
| params: { |
| org: { |
| required: true, |
| type: "string" |
| }, |
| project_id: { |
| required: true, |
| type: "integer" |
| }, |
| team_slug: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/orgs/:org/teams/:team_slug/projects/:project_id" |
| }, |
| reviewProjectLegacy: { |
| deprecated: "octokit.teams.reviewProjectLegacy() is deprecated, see https://developer.github.com/v3/teams/#review-a-team-project-legacy", |
| headers: { |
| accept: "application/vnd.github.inertia-preview+json" |
| }, |
| method: "GET", |
| params: { |
| project_id: { |
| required: true, |
| type: "integer" |
| }, |
| team_id: { |
| required: true, |
| type: "integer" |
| } |
| }, |
| url: "/teams/:team_id/projects/:project_id" |
| }, |
| update: { |
| deprecated: "octokit.teams.update() has been renamed to octokit.teams.updateLegacy() (2020-01-16)", |
| method: "PATCH", |
| params: { |
| description: { |
| type: "string" |
| }, |
| name: { |
| required: true, |
| type: "string" |
| }, |
| parent_team_id: { |
| type: "integer" |
| }, |
| permission: { |
| enum: ["pull", "push", "admin"], |
| type: "string" |
| }, |
| privacy: { |
| enum: ["secret", "closed"], |
| type: "string" |
| }, |
| team_id: { |
| required: true, |
| type: "integer" |
| } |
| }, |
| url: "/teams/:team_id" |
| }, |
| updateDiscussion: { |
| deprecated: "octokit.teams.updateDiscussion() has been renamed to octokit.teams.updateDiscussionLegacy() (2020-01-16)", |
| method: "PATCH", |
| params: { |
| body: { |
| type: "string" |
| }, |
| discussion_number: { |
| required: true, |
| type: "integer" |
| }, |
| team_id: { |
| required: true, |
| type: "integer" |
| }, |
| title: { |
| type: "string" |
| } |
| }, |
| url: "/teams/:team_id/discussions/:discussion_number" |
| }, |
| updateDiscussionComment: { |
| deprecated: "octokit.teams.updateDiscussionComment() has been renamed to octokit.teams.updateDiscussionCommentLegacy() (2020-01-16)", |
| method: "PATCH", |
| params: { |
| body: { |
| required: true, |
| type: "string" |
| }, |
| comment_number: { |
| required: true, |
| type: "integer" |
| }, |
| discussion_number: { |
| required: true, |
| type: "integer" |
| }, |
| team_id: { |
| required: true, |
| type: "integer" |
| } |
| }, |
| url: "/teams/:team_id/discussions/:discussion_number/comments/:comment_number" |
| }, |
| updateDiscussionCommentInOrg: { |
| method: "PATCH", |
| params: { |
| body: { |
| required: true, |
| type: "string" |
| }, |
| comment_number: { |
| required: true, |
| type: "integer" |
| }, |
| discussion_number: { |
| required: true, |
| type: "integer" |
| }, |
| org: { |
| required: true, |
| type: "string" |
| }, |
| team_slug: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/orgs/:org/teams/:team_slug/discussions/:discussion_number/comments/:comment_number" |
| }, |
| updateDiscussionCommentLegacy: { |
| deprecated: "octokit.teams.updateDiscussionCommentLegacy() is deprecated, see https://developer.github.com/v3/teams/discussion_comments/#edit-a-comment-legacy", |
| method: "PATCH", |
| params: { |
| body: { |
| required: true, |
| type: "string" |
| }, |
| comment_number: { |
| required: true, |
| type: "integer" |
| }, |
| discussion_number: { |
| required: true, |
| type: "integer" |
| }, |
| team_id: { |
| required: true, |
| type: "integer" |
| } |
| }, |
| url: "/teams/:team_id/discussions/:discussion_number/comments/:comment_number" |
| }, |
| updateDiscussionInOrg: { |
| method: "PATCH", |
| params: { |
| body: { |
| type: "string" |
| }, |
| discussion_number: { |
| required: true, |
| type: "integer" |
| }, |
| org: { |
| required: true, |
| type: "string" |
| }, |
| team_slug: { |
| required: true, |
| type: "string" |
| }, |
| title: { |
| type: "string" |
| } |
| }, |
| url: "/orgs/:org/teams/:team_slug/discussions/:discussion_number" |
| }, |
| updateDiscussionLegacy: { |
| deprecated: "octokit.teams.updateDiscussionLegacy() is deprecated, see https://developer.github.com/v3/teams/discussions/#edit-a-discussion-legacy", |
| method: "PATCH", |
| params: { |
| body: { |
| type: "string" |
| }, |
| discussion_number: { |
| required: true, |
| type: "integer" |
| }, |
| team_id: { |
| required: true, |
| type: "integer" |
| }, |
| title: { |
| type: "string" |
| } |
| }, |
| url: "/teams/:team_id/discussions/:discussion_number" |
| }, |
| updateInOrg: { |
| method: "PATCH", |
| params: { |
| description: { |
| type: "string" |
| }, |
| name: { |
| required: true, |
| type: "string" |
| }, |
| org: { |
| required: true, |
| type: "string" |
| }, |
| parent_team_id: { |
| type: "integer" |
| }, |
| permission: { |
| enum: ["pull", "push", "admin"], |
| type: "string" |
| }, |
| privacy: { |
| enum: ["secret", "closed"], |
| type: "string" |
| }, |
| team_slug: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/orgs/:org/teams/:team_slug" |
| }, |
| updateLegacy: { |
| deprecated: "octokit.teams.updateLegacy() is deprecated, see https://developer.github.com/v3/teams/#edit-team-legacy", |
| method: "PATCH", |
| params: { |
| description: { |
| type: "string" |
| }, |
| name: { |
| required: true, |
| type: "string" |
| }, |
| parent_team_id: { |
| type: "integer" |
| }, |
| permission: { |
| enum: ["pull", "push", "admin"], |
| type: "string" |
| }, |
| privacy: { |
| enum: ["secret", "closed"], |
| type: "string" |
| }, |
| team_id: { |
| required: true, |
| type: "integer" |
| } |
| }, |
| url: "/teams/:team_id" |
| } |
| }, |
| users: { |
| addEmails: { |
| method: "POST", |
| params: { |
| emails: { |
| required: true, |
| type: "string[]" |
| } |
| }, |
| url: "/user/emails" |
| }, |
| block: { |
| method: "PUT", |
| params: { |
| username: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/user/blocks/:username" |
| }, |
| checkBlocked: { |
| method: "GET", |
| params: { |
| username: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/user/blocks/:username" |
| }, |
| checkFollowing: { |
| method: "GET", |
| params: { |
| username: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/user/following/:username" |
| }, |
| checkFollowingForUser: { |
| method: "GET", |
| params: { |
| target_user: { |
| required: true, |
| type: "string" |
| }, |
| username: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/users/:username/following/:target_user" |
| }, |
| createGpgKey: { |
| method: "POST", |
| params: { |
| armored_public_key: { |
| type: "string" |
| } |
| }, |
| url: "/user/gpg_keys" |
| }, |
| createPublicKey: { |
| method: "POST", |
| params: { |
| key: { |
| type: "string" |
| }, |
| title: { |
| type: "string" |
| } |
| }, |
| url: "/user/keys" |
| }, |
| deleteEmails: { |
| method: "DELETE", |
| params: { |
| emails: { |
| required: true, |
| type: "string[]" |
| } |
| }, |
| url: "/user/emails" |
| }, |
| deleteGpgKey: { |
| method: "DELETE", |
| params: { |
| gpg_key_id: { |
| required: true, |
| type: "integer" |
| } |
| }, |
| url: "/user/gpg_keys/:gpg_key_id" |
| }, |
| deletePublicKey: { |
| method: "DELETE", |
| params: { |
| key_id: { |
| required: true, |
| type: "integer" |
| } |
| }, |
| url: "/user/keys/:key_id" |
| }, |
| follow: { |
| method: "PUT", |
| params: { |
| username: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/user/following/:username" |
| }, |
| getAuthenticated: { |
| method: "GET", |
| params: {}, |
| url: "/user" |
| }, |
| getByUsername: { |
| method: "GET", |
| params: { |
| username: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/users/:username" |
| }, |
| getContextForUser: { |
| method: "GET", |
| params: { |
| subject_id: { |
| type: "string" |
| }, |
| subject_type: { |
| enum: ["organization", "repository", "issue", "pull_request"], |
| type: "string" |
| }, |
| username: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/users/:username/hovercard" |
| }, |
| getGpgKey: { |
| method: "GET", |
| params: { |
| gpg_key_id: { |
| required: true, |
| type: "integer" |
| } |
| }, |
| url: "/user/gpg_keys/:gpg_key_id" |
| }, |
| getPublicKey: { |
| method: "GET", |
| params: { |
| key_id: { |
| required: true, |
| type: "integer" |
| } |
| }, |
| url: "/user/keys/:key_id" |
| }, |
| list: { |
| method: "GET", |
| params: { |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| since: { |
| type: "string" |
| } |
| }, |
| url: "/users" |
| }, |
| listBlocked: { |
| method: "GET", |
| params: {}, |
| url: "/user/blocks" |
| }, |
| listEmails: { |
| method: "GET", |
| params: { |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| } |
| }, |
| url: "/user/emails" |
| }, |
| listFollowersForAuthenticatedUser: { |
| method: "GET", |
| params: { |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| } |
| }, |
| url: "/user/followers" |
| }, |
| listFollowersForUser: { |
| method: "GET", |
| params: { |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| username: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/users/:username/followers" |
| }, |
| listFollowingForAuthenticatedUser: { |
| method: "GET", |
| params: { |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| } |
| }, |
| url: "/user/following" |
| }, |
| listFollowingForUser: { |
| method: "GET", |
| params: { |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| username: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/users/:username/following" |
| }, |
| listGpgKeys: { |
| method: "GET", |
| params: { |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| } |
| }, |
| url: "/user/gpg_keys" |
| }, |
| listGpgKeysForUser: { |
| method: "GET", |
| params: { |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| username: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/users/:username/gpg_keys" |
| }, |
| listPublicEmails: { |
| method: "GET", |
| params: { |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| } |
| }, |
| url: "/user/public_emails" |
| }, |
| listPublicKeys: { |
| method: "GET", |
| params: { |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| } |
| }, |
| url: "/user/keys" |
| }, |
| listPublicKeysForUser: { |
| method: "GET", |
| params: { |
| page: { |
| type: "integer" |
| }, |
| per_page: { |
| type: "integer" |
| }, |
| username: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/users/:username/keys" |
| }, |
| togglePrimaryEmailVisibility: { |
| method: "PATCH", |
| params: { |
| email: { |
| required: true, |
| type: "string" |
| }, |
| visibility: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/user/email/visibility" |
| }, |
| unblock: { |
| method: "DELETE", |
| params: { |
| username: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/user/blocks/:username" |
| }, |
| unfollow: { |
| method: "DELETE", |
| params: { |
| username: { |
| required: true, |
| type: "string" |
| } |
| }, |
| url: "/user/following/:username" |
| }, |
| updateAuthenticated: { |
| method: "PATCH", |
| params: { |
| bio: { |
| type: "string" |
| }, |
| blog: { |
| type: "string" |
| }, |
| company: { |
| type: "string" |
| }, |
| email: { |
| type: "string" |
| }, |
| hireable: { |
| type: "boolean" |
| }, |
| location: { |
| type: "string" |
| }, |
| name: { |
| type: "string" |
| } |
| }, |
| url: "/user" |
| } |
| } |
| }; |
| |
| const VERSION = "2.4.0"; |
| |
| function registerEndpoints(octokit, routes) { |
| Object.keys(routes).forEach(namespaceName => { |
| if (!octokit[namespaceName]) { |
| octokit[namespaceName] = {}; |
| } |
| |
| Object.keys(routes[namespaceName]).forEach(apiName => { |
| const apiOptions = routes[namespaceName][apiName]; |
| const endpointDefaults = ["method", "url", "headers"].reduce((map, key) => { |
| if (typeof apiOptions[key] !== "undefined") { |
| map[key] = apiOptions[key]; |
| } |
| |
| return map; |
| }, {}); |
| endpointDefaults.request = { |
| validate: apiOptions.params |
| }; |
| let request = octokit.request.defaults(endpointDefaults); // patch request & endpoint methods to support deprecated parameters. |
| // Not the most elegant solution, but we don’t want to move deprecation |
| // logic into octokit/endpoint.js as it’s out of scope |
| |
| const hasDeprecatedParam = Object.keys(apiOptions.params || {}).find(key => apiOptions.params[key].deprecated); |
| |
| if (hasDeprecatedParam) { |
| const patch = patchForDeprecation.bind(null, octokit, apiOptions); |
| request = patch(octokit.request.defaults(endpointDefaults), `.${namespaceName}.${apiName}()`); |
| request.endpoint = patch(request.endpoint, `.${namespaceName}.${apiName}.endpoint()`); |
| request.endpoint.merge = patch(request.endpoint.merge, `.${namespaceName}.${apiName}.endpoint.merge()`); |
| } |
| |
| if (apiOptions.deprecated) { |
| octokit[namespaceName][apiName] = Object.assign(function deprecatedEndpointMethod() { |
| octokit.log.warn(new deprecation.Deprecation(`[@octokit/rest] ${apiOptions.deprecated}`)); |
| octokit[namespaceName][apiName] = request; |
| return request.apply(null, arguments); |
| }, request); |
| return; |
| } |
| |
| octokit[namespaceName][apiName] = request; |
| }); |
| }); |
| } |
| |
| function patchForDeprecation(octokit, apiOptions, method, methodName) { |
| const patchedMethod = options => { |
| options = Object.assign({}, options); |
| Object.keys(options).forEach(key => { |
| if (apiOptions.params[key] && apiOptions.params[key].deprecated) { |
| const aliasKey = apiOptions.params[key].alias; |
| octokit.log.warn(new deprecation.Deprecation(`[@octokit/rest] "${key}" parameter is deprecated for "${methodName}". Use "${aliasKey}" instead`)); |
| |
| if (!(aliasKey in options)) { |
| options[aliasKey] = options[key]; |
| } |
| |
| delete options[key]; |
| } |
| }); |
| return method(options); |
| }; |
| |
| Object.keys(method).forEach(key => { |
| patchedMethod[key] = method[key]; |
| }); |
| return patchedMethod; |
| } |
| |
| /** |
| * This plugin is a 1:1 copy of internal @octokit/rest plugins. The primary |
| * goal is to rebuild @octokit/rest on top of @octokit/core. Once that is |
| * done, we will remove the registerEndpoints methods and return the methods |
| * directly as with the other plugins. At that point we will also remove the |
| * legacy workarounds and deprecations. |
| * |
| * See the plan at |
| * https://github.com/octokit/plugin-rest-endpoint-methods.js/pull/1 |
| */ |
| |
| function restEndpointMethods(octokit) { |
| // @ts-ignore |
| octokit.registerEndpoints = registerEndpoints.bind(null, octokit); |
| registerEndpoints(octokit, endpointsByScope); // Aliasing scopes for backward compatibility |
| // See https://github.com/octokit/rest.js/pull/1134 |
| |
| [["gitdata", "git"], ["authorization", "oauthAuthorizations"], ["pullRequests", "pulls"]].forEach(([deprecatedScope, scope]) => { |
| Object.defineProperty(octokit, deprecatedScope, { |
| get() { |
| octokit.log.warn( // @ts-ignore |
| new deprecation.Deprecation(`[@octokit/plugin-rest-endpoint-methods] "octokit.${deprecatedScope}.*" methods are deprecated, use "octokit.${scope}.*" instead`)); // @ts-ignore |
| |
| return octokit[scope]; |
| } |
| |
| }); |
| }); |
| return {}; |
| } |
| restEndpointMethods.VERSION = VERSION; |
| |
| exports.restEndpointMethods = restEndpointMethods; |
| //# sourceMappingURL=index.js.map |
| |
| |
| /***/ }), |
| |
| /***/ 850: |
| /***/ (function(module, __unusedexports, __webpack_require__) { |
| |
| module.exports = paginationMethodsPlugin |
| |
| function paginationMethodsPlugin (octokit) { |
| octokit.getFirstPage = __webpack_require__(777).bind(null, octokit) |
| octokit.getLastPage = __webpack_require__(649).bind(null, octokit) |
| octokit.getNextPage = __webpack_require__(550).bind(null, octokit) |
| octokit.getPreviousPage = __webpack_require__(563).bind(null, octokit) |
| octokit.hasFirstPage = __webpack_require__(536) |
| octokit.hasLastPage = __webpack_require__(336) |
| octokit.hasNextPage = __webpack_require__(929) |
| octokit.hasPreviousPage = __webpack_require__(558) |
| } |
| |
| |
| /***/ }), |
| |
| /***/ 854: |
| /***/ (function(module) { |
| |
| /** |
| * lodash (Custom Build) <https://lodash.com/> |
| * Build: `lodash modularize exports="npm" -o ./` |
| * Copyright jQuery Foundation and other contributors <https://jquery.org/> |
| * Released under MIT license <https://lodash.com/license> |
| * Based on Underscore.js 1.8.3 <http://underscorejs.org/LICENSE> |
| * Copyright Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors |
| */ |
| |
| /** Used as the `TypeError` message for "Functions" methods. */ |
| var FUNC_ERROR_TEXT = 'Expected a function'; |
| |
| /** Used to stand-in for `undefined` hash values. */ |
| var HASH_UNDEFINED = '__lodash_hash_undefined__'; |
| |
| /** Used as references for various `Number` constants. */ |
| var INFINITY = 1 / 0; |
| |
| /** `Object#toString` result references. */ |
| var funcTag = '[object Function]', |
| genTag = '[object GeneratorFunction]', |
| symbolTag = '[object Symbol]'; |
| |
| /** Used to match property names within property paths. */ |
| var reIsDeepProp = /\.|\[(?:[^[\]]*|(["'])(?:(?!\1)[^\\]|\\.)*?\1)\]/, |
| reIsPlainProp = /^\w*$/, |
| reLeadingDot = /^\./, |
| rePropName = /[^.[\]]+|\[(?:(-?\d+(?:\.\d+)?)|(["'])((?:(?!\2)[^\\]|\\.)*?)\2)\]|(?=(?:\.|\[\])(?:\.|\[\]|$))/g; |
| |
| /** |
| * Used to match `RegExp` |
| * [syntax characters](http://ecma-international.org/ecma-262/7.0/#sec-patterns). |
| */ |
| var reRegExpChar = /[\\^$.*+?()[\]{}|]/g; |
| |
| /** Used to match backslashes in property paths. */ |
| var reEscapeChar = /\\(\\)?/g; |
| |
| /** Used to detect host constructors (Safari). */ |
| var reIsHostCtor = /^\[object .+?Constructor\]$/; |
| |
| /** Detect free variable `global` from Node.js. */ |
| var freeGlobal = typeof global == 'object' && global && global.Object === Object && global; |
| |
| /** Detect free variable `self`. */ |
| var freeSelf = typeof self == 'object' && self && self.Object === Object && self; |
| |
| /** Used as a reference to the global object. */ |
| var root = freeGlobal || freeSelf || Function('return this')(); |
| |
| /** |
| * Gets the value at `key` of `object`. |
| * |
| * @private |
| * @param {Object} [object] The object to query. |
| * @param {string} key The key of the property to get. |
| * @returns {*} Returns the property value. |
| */ |
| function getValue(object, key) { |
| return object == null ? undefined : object[key]; |
| } |
| |
| /** |
| * Checks if `value` is a host object in IE < 9. |
| * |
| * @private |
| * @param {*} value The value to check. |
| * @returns {boolean} Returns `true` if `value` is a host object, else `false`. |
| */ |
| function isHostObject(value) { |
| // Many host objects are `Object` objects that can coerce to strings |
| // despite having improperly defined `toString` methods. |
| var result = false; |
| if (value != null && typeof value.toString != 'function') { |
| try { |
| result = !!(value + ''); |
| } catch (e) {} |
| } |
| return result; |
| } |
| |
| /** Used for built-in method references. */ |
| var arrayProto = Array.prototype, |
| funcProto = Function.prototype, |
| objectProto = Object.prototype; |
| |
| /** Used to detect overreaching core-js shims. */ |
| var coreJsData = root['__core-js_shared__']; |
| |
| /** Used to detect methods masquerading as native. */ |
| var maskSrcKey = (function() { |
| var uid = /[^.]+$/.exec(coreJsData && coreJsData.keys && coreJsData.keys.IE_PROTO || ''); |
| return uid ? ('Symbol(src)_1.' + uid) : ''; |
| }()); |
| |
| /** Used to resolve the decompiled source of functions. */ |
| var funcToString = funcProto.toString; |
| |
| /** Used to check objects for own properties. */ |
| var hasOwnProperty = objectProto.hasOwnProperty; |
| |
| /** |
| * Used to resolve the |
| * [`toStringTag`](http://ecma-international.org/ecma-262/7.0/#sec-object.prototype.tostring) |
| * of values. |
| */ |
| var objectToString = objectProto.toString; |
| |
| /** Used to detect if a method is native. */ |
| var reIsNative = RegExp('^' + |
| funcToString.call(hasOwnProperty).replace(reRegExpChar, '\\$&') |
| .replace(/hasOwnProperty|(function).*?(?=\\\()| for .+?(?=\\\])/g, '$1.*?') + '$' |
| ); |
| |
| /** Built-in value references. */ |
| var Symbol = root.Symbol, |
| splice = arrayProto.splice; |
| |
| /* Built-in method references that are verified to be native. */ |
| var Map = getNative(root, 'Map'), |
| nativeCreate = getNative(Object, 'create'); |
| |
| /** Used to convert symbols to primitives and strings. */ |
| var symbolProto = Symbol ? Symbol.prototype : undefined, |
| symbolToString = symbolProto ? symbolProto.toString : undefined; |
| |
| /** |
| * Creates a hash object. |
| * |
| * @private |
| * @constructor |
| * @param {Array} [entries] The key-value pairs to cache. |
| */ |
| function Hash(entries) { |
| var index = -1, |
| length = entries ? entries.length : 0; |
| |
| this.clear(); |
| while (++index < length) { |
| var entry = entries[index]; |
| this.set(entry[0], entry[1]); |
| } |
| } |
| |
| /** |
| * Removes all key-value entries from the hash. |
| * |
| * @private |
| * @name clear |
| * @memberOf Hash |
| */ |
| function hashClear() { |
| this.__data__ = nativeCreate ? nativeCreate(null) : {}; |
| } |
| |
| /** |
| * Removes `key` and its value from the hash. |
| * |
| * @private |
| * @name delete |
| * @memberOf Hash |
| * @param {Object} hash The hash to modify. |
| * @param {string} key The key of the value to remove. |
| * @returns {boolean} Returns `true` if the entry was removed, else `false`. |
| */ |
| function hashDelete(key) { |
| return this.has(key) && delete this.__data__[key]; |
| } |
| |
| /** |
| * Gets the hash value for `key`. |
| * |
| * @private |
| * @name get |
| * @memberOf Hash |
| * @param {string} key The key of the value to get. |
| * @returns {*} Returns the entry value. |
| */ |
| function hashGet(key) { |
| var data = this.__data__; |
| if (nativeCreate) { |
| var result = data[key]; |
| return result === HASH_UNDEFINED ? undefined : result; |
| } |
| return hasOwnProperty.call(data, key) ? data[key] : undefined; |
| } |
| |
| /** |
| * Checks if a hash value for `key` exists. |
| * |
| * @private |
| * @name has |
| * @memberOf Hash |
| * @param {string} key The key of the entry to check. |
| * @returns {boolean} Returns `true` if an entry for `key` exists, else `false`. |
| */ |
| function hashHas(key) { |
| var data = this.__data__; |
| return nativeCreate ? data[key] !== undefined : hasOwnProperty.call(data, key); |
| } |
| |
| /** |
| * Sets the hash `key` to `value`. |
| * |
| * @private |
| * @name set |
| * @memberOf Hash |
| * @param {string} key The key of the value to set. |
| * @param {*} value The value to set. |
| * @returns {Object} Returns the hash instance. |
| */ |
| function hashSet(key, value) { |
| var data = this.__data__; |
| data[key] = (nativeCreate && value === undefined) ? HASH_UNDEFINED : value; |
| return this; |
| } |
| |
| // Add methods to `Hash`. |
| Hash.prototype.clear = hashClear; |
| Hash.prototype['delete'] = hashDelete; |
| Hash.prototype.get = hashGet; |
| Hash.prototype.has = hashHas; |
| Hash.prototype.set = hashSet; |
| |
| /** |
| * Creates an list cache object. |
| * |
| * @private |
| * @constructor |
| * @param {Array} [entries] The key-value pairs to cache. |
| */ |
| function ListCache(entries) { |
| var index = -1, |
| length = entries ? entries.length : 0; |
| |
| this.clear(); |
| while (++index < length) { |
| var entry = entries[index]; |
| this.set(entry[0], entry[1]); |
| } |
| } |
| |
| /** |
| * Removes all key-value entries from the list cache. |
| * |
| * @private |
| * @name clear |
| * @memberOf ListCache |
| */ |
| function listCacheClear() { |
| this.__data__ = []; |
| } |
| |
| /** |
| * Removes `key` and its value from the list cache. |
| * |
| * @private |
| * @name delete |
| * @memberOf ListCache |
| * @param {string} key The key of the value to remove. |
| * @returns {boolean} Returns `true` if the entry was removed, else `false`. |
| */ |
| function listCacheDelete(key) { |
| var data = this.__data__, |
| index = assocIndexOf(data, key); |
| |
| if (index < 0) { |
| return false; |
| } |
| var lastIndex = data.length - 1; |
| if (index == lastIndex) { |
| data.pop(); |
| } else { |
| splice.call(data, index, 1); |
| } |
| return true; |
| } |
| |
| /** |
| * Gets the list cache value for `key`. |
| * |
| * @private |
| * @name get |
| * @memberOf ListCache |
| * @param {string} key The key of the value to get. |
| * @returns {*} Returns the entry value. |
| */ |
| function listCacheGet(key) { |
| var data = this.__data__, |
| index = assocIndexOf(data, key); |
| |
| return index < 0 ? undefined : data[index][1]; |
| } |
| |
| /** |
| * Checks if a list cache value for `key` exists. |
| * |
| * @private |
| * @name has |
| * @memberOf ListCache |
| * @param {string} key The key of the entry to check. |
| * @returns {boolean} Returns `true` if an entry for `key` exists, else `false`. |
| */ |
| function listCacheHas(key) { |
| return assocIndexOf(this.__data__, key) > -1; |
| } |
| |
| /** |
| * Sets the list cache `key` to `value`. |
| * |
| * @private |
| * @name set |
| * @memberOf ListCache |
| * @param {string} key The key of the value to set. |
| * @param {*} value The value to set. |
| * @returns {Object} Returns the list cache instance. |
| */ |
| function listCacheSet(key, value) { |
| var data = this.__data__, |
| index = assocIndexOf(data, key); |
| |
| if (index < 0) { |
| data.push([key, value]); |
| } else { |
| data[index][1] = value; |
| } |
| return this; |
| } |
| |
| // Add methods to `ListCache`. |
| ListCache.prototype.clear = listCacheClear; |
| ListCache.prototype['delete'] = listCacheDelete; |
| ListCache.prototype.get = listCacheGet; |
| ListCache.prototype.has = listCacheHas; |
| ListCache.prototype.set = listCacheSet; |
| |
| /** |
| * Creates a map cache object to store key-value pairs. |
| * |
| * @private |
| * @constructor |
| * @param {Array} [entries] The key-value pairs to cache. |
| */ |
| function MapCache(entries) { |
| var index = -1, |
| length = entries ? entries.length : 0; |
| |
| this.clear(); |
| while (++index < length) { |
| var entry = entries[index]; |
| this.set(entry[0], entry[1]); |
| } |
| } |
| |
| /** |
| * Removes all key-value entries from the map. |
| * |
| * @private |
| * @name clear |
| * @memberOf MapCache |
| */ |
| function mapCacheClear() { |
| this.__data__ = { |
| 'hash': new Hash, |
| 'map': new (Map || ListCache), |
| 'string': new Hash |
| }; |
| } |
| |
| /** |
| * Removes `key` and its value from the map. |
| * |
| * @private |
| * @name delete |
| * @memberOf MapCache |
| * @param {string} key The key of the value to remove. |
| * @returns {boolean} Returns `true` if the entry was removed, else `false`. |
| */ |
| function mapCacheDelete(key) { |
| return getMapData(this, key)['delete'](key); |
| } |
| |
| /** |
| * Gets the map value for `key`. |
| * |
| * @private |
| * @name get |
| * @memberOf MapCache |
| * @param {string} key The key of the value to get. |
| * @returns {*} Returns the entry value. |
| */ |
| function mapCacheGet(key) { |
| return getMapData(this, key).get(key); |
| } |
| |
| /** |
| * Checks if a map value for `key` exists. |
| * |
| * @private |
| * @name has |
| * @memberOf MapCache |
| * @param {string} key The key of the entry to check. |
| * @returns {boolean} Returns `true` if an entry for `key` exists, else `false`. |
| */ |
| function mapCacheHas(key) { |
| return getMapData(this, key).has(key); |
| } |
| |
| /** |
| * Sets the map `key` to `value`. |
| * |
| * @private |
| * @name set |
| * @memberOf MapCache |
| * @param {string} key The key of the value to set. |
| * @param {*} value The value to set. |
| * @returns {Object} Returns the map cache instance. |
| */ |
| function mapCacheSet(key, value) { |
| getMapData(this, key).set(key, value); |
| return this; |
| } |
| |
| // Add methods to `MapCache`. |
| MapCache.prototype.clear = mapCacheClear; |
| MapCache.prototype['delete'] = mapCacheDelete; |
| MapCache.prototype.get = mapCacheGet; |
| MapCache.prototype.has = mapCacheHas; |
| MapCache.prototype.set = mapCacheSet; |
| |
| /** |
| * Gets the index at which the `key` is found in `array` of key-value pairs. |
| * |
| * @private |
| * @param {Array} array The array to inspect. |
| * @param {*} key The key to search for. |
| * @returns {number} Returns the index of the matched value, else `-1`. |
| */ |
| function assocIndexOf(array, key) { |
| var length = array.length; |
| while (length--) { |
| if (eq(array[length][0], key)) { |
| return length; |
| } |
| } |
| return -1; |
| } |
| |
| /** |
| * The base implementation of `_.get` without support for default values. |
| * |
| * @private |
| * @param {Object} object The object to query. |
| * @param {Array|string} path The path of the property to get. |
| * @returns {*} Returns the resolved value. |
| */ |
| function baseGet(object, path) { |
| path = isKey(path, object) ? [path] : castPath(path); |
| |
| var index = 0, |
| length = path.length; |
| |
| while (object != null && index < length) { |
| object = object[toKey(path[index++])]; |
| } |
| return (index && index == length) ? object : undefined; |
| } |
| |
| /** |
| * The base implementation of `_.isNative` without bad shim checks. |
| * |
| * @private |
| * @param {*} value The value to check. |
| * @returns {boolean} Returns `true` if `value` is a native function, |
| * else `false`. |
| */ |
| function baseIsNative(value) { |
| if (!isObject(value) || isMasked(value)) { |
| return false; |
| } |
| var pattern = (isFunction(value) || isHostObject(value)) ? reIsNative : reIsHostCtor; |
| return pattern.test(toSource(value)); |
| } |
| |
| /** |
| * The base implementation of `_.toString` which doesn't convert nullish |
| * values to empty strings. |
| * |
| * @private |
| * @param {*} value The value to process. |
| * @returns {string} Returns the string. |
| */ |
| function baseToString(value) { |
| // Exit early for strings to avoid a performance hit in some environments. |
| if (typeof value == 'string') { |
| return value; |
| } |
| if (isSymbol(value)) { |
| return symbolToString ? symbolToString.call(value) : ''; |
| } |
| var result = (value + ''); |
| return (result == '0' && (1 / value) == -INFINITY) ? '-0' : result; |
| } |
| |
| /** |
| * Casts `value` to a path array if it's not one. |
| * |
| * @private |
| * @param {*} value The value to inspect. |
| * @returns {Array} Returns the cast property path array. |
| */ |
| function castPath(value) { |
| return isArray(value) ? value : stringToPath(value); |
| } |
| |
| /** |
| * Gets the data for `map`. |
| * |
| * @private |
| * @param {Object} map The map to query. |
| * @param {string} key The reference key. |
| * @returns {*} Returns the map data. |
| */ |
| function getMapData(map, key) { |
| var data = map.__data__; |
| return isKeyable(key) |
| ? data[typeof key == 'string' ? 'string' : 'hash'] |
| : data.map; |
| } |
| |
| /** |
| * Gets the native function at `key` of `object`. |
| * |
| * @private |
| * @param {Object} object The object to query. |
| * @param {string} key The key of the method to get. |
| * @returns {*} Returns the function if it's native, else `undefined`. |
| */ |
| function getNative(object, key) { |
| var value = getValue(object, key); |
| return baseIsNative(value) ? value : undefined; |
| } |
| |
| /** |
| * Checks if `value` is a property name and not a property path. |
| * |
| * @private |
| * @param {*} value The value to check. |
| * @param {Object} [object] The object to query keys on. |
| * @returns {boolean} Returns `true` if `value` is a property name, else `false`. |
| */ |
| function isKey(value, object) { |
| if (isArray(value)) { |
| return false; |
| } |
| var type = typeof value; |
| if (type == 'number' || type == 'symbol' || type == 'boolean' || |
| value == null || isSymbol(value)) { |
| return true; |
| } |
| return reIsPlainProp.test(value) || !reIsDeepProp.test(value) || |
| (object != null && value in Object(object)); |
| } |
| |
| /** |
| * Checks if `value` is suitable for use as unique object key. |
| * |
| * @private |
| * @param {*} value The value to check. |
| * @returns {boolean} Returns `true` if `value` is suitable, else `false`. |
| */ |
| function isKeyable(value) { |
| var type = typeof value; |
| return (type == 'string' || type == 'number' || type == 'symbol' || type == 'boolean') |
| ? (value !== '__proto__') |
| : (value === null); |
| } |
| |
| /** |
| * Checks if `func` has its source masked. |
| * |
| * @private |
| * @param {Function} func The function to check. |
| * @returns {boolean} Returns `true` if `func` is masked, else `false`. |
| */ |
| function isMasked(func) { |
| return !!maskSrcKey && (maskSrcKey in func); |
| } |
| |
| /** |
| * Converts `string` to a property path array. |
| * |
| * @private |
| * @param {string} string The string to convert. |
| * @returns {Array} Returns the property path array. |
| */ |
| var stringToPath = memoize(function(string) { |
| string = toString(string); |
| |
| var result = []; |
| if (reLeadingDot.test(string)) { |
| result.push(''); |
| } |
| string.replace(rePropName, function(match, number, quote, string) { |
| result.push(quote ? string.replace(reEscapeChar, '$1') : (number || match)); |
| }); |
| return result; |
| }); |
| |
| /** |
| * Converts `value` to a string key if it's not a string or symbol. |
| * |
| * @private |
| * @param {*} value The value to inspect. |
| * @returns {string|symbol} Returns the key. |
| */ |
| function toKey(value) { |
| if (typeof value == 'string' || isSymbol(value)) { |
| return value; |
| } |
| var result = (value + ''); |
| return (result == '0' && (1 / value) == -INFINITY) ? '-0' : result; |
| } |
| |
| /** |
| * Converts `func` to its source code. |
| * |
| * @private |
| * @param {Function} func The function to process. |
| * @returns {string} Returns the source code. |
| */ |
| function toSource(func) { |
| if (func != null) { |
| try { |
| return funcToString.call(func); |
| } catch (e) {} |
| try { |
| return (func + ''); |
| } catch (e) {} |
| } |
| return ''; |
| } |
| |
| /** |
| * Creates a function that memoizes the result of `func`. If `resolver` is |
| * provided, it determines the cache key for storing the result based on the |
| * arguments provided to the memoized function. By default, the first argument |
| * provided to the memoized function is used as the map cache key. The `func` |
| * is invoked with the `this` binding of the memoized function. |
| * |
| * **Note:** The cache is exposed as the `cache` property on the memoized |
| * function. Its creation may be customized by replacing the `_.memoize.Cache` |
| * constructor with one whose instances implement the |
| * [`Map`](http://ecma-international.org/ecma-262/7.0/#sec-properties-of-the-map-prototype-object) |
| * method interface of `delete`, `get`, `has`, and `set`. |
| * |
| * @static |
| * @memberOf _ |
| * @since 0.1.0 |
| * @category Function |
| * @param {Function} func The function to have its output memoized. |
| * @param {Function} [resolver] The function to resolve the cache key. |
| * @returns {Function} Returns the new memoized function. |
| * @example |
| * |
| * var object = { 'a': 1, 'b': 2 }; |
| * var other = { 'c': 3, 'd': 4 }; |
| * |
| * var values = _.memoize(_.values); |
| * values(object); |
| * // => [1, 2] |
| * |
| * values(other); |
| * // => [3, 4] |
| * |
| * object.a = 2; |
| * values(object); |
| * // => [1, 2] |
| * |
| * // Modify the result cache. |
| * values.cache.set(object, ['a', 'b']); |
| * values(object); |
| * // => ['a', 'b'] |
| * |
| * // Replace `_.memoize.Cache`. |
| * _.memoize.Cache = WeakMap; |
| */ |
| function memoize(func, resolver) { |
| if (typeof func != 'function' || (resolver && typeof resolver != 'function')) { |
| throw new TypeError(FUNC_ERROR_TEXT); |
| } |
| var memoized = function() { |
| var args = arguments, |
| key = resolver ? resolver.apply(this, args) : args[0], |
| cache = memoized.cache; |
| |
| if (cache.has(key)) { |
| return cache.get(key); |
| } |
| var result = func.apply(this, args); |
| memoized.cache = cache.set(key, result); |
| return result; |
| }; |
| memoized.cache = new (memoize.Cache || MapCache); |
| return memoized; |
| } |
| |
| // Assign cache to `_.memoize`. |
| memoize.Cache = MapCache; |
| |
| /** |
| * Performs a |
| * [`SameValueZero`](http://ecma-international.org/ecma-262/7.0/#sec-samevaluezero) |
| * comparison between two values to determine if they are equivalent. |
| * |
| * @static |
| * @memberOf _ |
| * @since 4.0.0 |
| * @category Lang |
| * @param {*} value The value to compare. |
| * @param {*} other The other value to compare. |
| * @returns {boolean} Returns `true` if the values are equivalent, else `false`. |
| * @example |
| * |
| * var object = { 'a': 1 }; |
| * var other = { 'a': 1 }; |
| * |
| * _.eq(object, object); |
| * // => true |
| * |
| * _.eq(object, other); |
| * // => false |
| * |
| * _.eq('a', 'a'); |
| * // => true |
| * |
| * _.eq('a', Object('a')); |
| * // => false |
| * |
| * _.eq(NaN, NaN); |
| * // => true |
| */ |
| function eq(value, other) { |
| return value === other || (value !== value && other !== other); |
| } |
| |
| /** |
| * Checks if `value` is classified as an `Array` object. |
| * |
| * @static |
| * @memberOf _ |
| * @since 0.1.0 |
| * @category Lang |
| * @param {*} value The value to check. |
| * @returns {boolean} Returns `true` if `value` is an array, else `false`. |
| * @example |
| * |
| * _.isArray([1, 2, 3]); |
| * // => true |
| * |
| * _.isArray(document.body.children); |
| * // => false |
| * |
| * _.isArray('abc'); |
| * // => false |
| * |
| * _.isArray(_.noop); |
| * // => false |
| */ |
| var isArray = Array.isArray; |
| |
| /** |
| * Checks if `value` is classified as a `Function` object. |
| * |
| * @static |
| * @memberOf _ |
| * @since 0.1.0 |
| * @category Lang |
| * @param {*} value The value to check. |
| * @returns {boolean} Returns `true` if `value` is a function, else `false`. |
| * @example |
| * |
| * _.isFunction(_); |
| * // => true |
| * |
| * _.isFunction(/abc/); |
| * // => false |
| */ |
| function isFunction(value) { |
| // The use of `Object#toString` avoids issues with the `typeof` operator |
| // in Safari 8-9 which returns 'object' for typed array and other constructors. |
| var tag = isObject(value) ? objectToString.call(value) : ''; |
| return tag == funcTag || tag == genTag; |
| } |
| |
| /** |
| * Checks if `value` is the |
| * [language type](http://www.ecma-international.org/ecma-262/7.0/#sec-ecmascript-language-types) |
| * of `Object`. (e.g. arrays, functions, objects, regexes, `new Number(0)`, and `new String('')`) |
| * |
| * @static |
| * @memberOf _ |
| * @since 0.1.0 |
| * @category Lang |
| * @param {*} value The value to check. |
| * @returns {boolean} Returns `true` if `value` is an object, else `false`. |
| * @example |
| * |
| * _.isObject({}); |
| * // => true |
| * |
| * _.isObject([1, 2, 3]); |
| * // => true |
| * |
| * _.isObject(_.noop); |
| * // => true |
| * |
| * _.isObject(null); |
| * // => false |
| */ |
| function isObject(value) { |
| var type = typeof value; |
| return !!value && (type == 'object' || type == 'function'); |
| } |
| |
| /** |
| * Checks if `value` is object-like. A value is object-like if it's not `null` |
| * and has a `typeof` result of "object". |
| * |
| * @static |
| * @memberOf _ |
| * @since 4.0.0 |
| * @category Lang |
| * @param {*} value The value to check. |
| * @returns {boolean} Returns `true` if `value` is object-like, else `false`. |
| * @example |
| * |
| * _.isObjectLike({}); |
| * // => true |
| * |
| * _.isObjectLike([1, 2, 3]); |
| * // => true |
| * |
| * _.isObjectLike(_.noop); |
| * // => false |
| * |
| * _.isObjectLike(null); |
| * // => false |
| */ |
| function isObjectLike(value) { |
| return !!value && typeof value == 'object'; |
| } |
| |
| /** |
| * Checks if `value` is classified as a `Symbol` primitive or object. |
| * |
| * @static |
| * @memberOf _ |
| * @since 4.0.0 |
| * @category Lang |
| * @param {*} value The value to check. |
| * @returns {boolean} Returns `true` if `value` is a symbol, else `false`. |
| * @example |
| * |
| * _.isSymbol(Symbol.iterator); |
| * // => true |
| * |
| * _.isSymbol('abc'); |
| * // => false |
| */ |
| function isSymbol(value) { |
| return typeof value == 'symbol' || |
| (isObjectLike(value) && objectToString.call(value) == symbolTag); |
| } |
| |
| /** |
| * Converts `value` to a string. An empty string is returned for `null` |
| * and `undefined` values. The sign of `-0` is preserved. |
| * |
| * @static |
| * @memberOf _ |
| * @since 4.0.0 |
| * @category Lang |
| * @param {*} value The value to process. |
| * @returns {string} Returns the string. |
| * @example |
| * |
| * _.toString(null); |
| * // => '' |
| * |
| * _.toString(-0); |
| * // => '-0' |
| * |
| * _.toString([1, 2, 3]); |
| * // => '1,2,3' |
| */ |
| function toString(value) { |
| return value == null ? '' : baseToString(value); |
| } |
| |
| /** |
| * Gets the value at `path` of `object`. If the resolved value is |
| * `undefined`, the `defaultValue` is returned in its place. |
| * |
| * @static |
| * @memberOf _ |
| * @since 3.7.0 |
| * @category Object |
| * @param {Object} object The object to query. |
| * @param {Array|string} path The path of the property to get. |
| * @param {*} [defaultValue] The value returned for `undefined` resolved values. |
| * @returns {*} Returns the resolved value. |
| * @example |
| * |
| * var object = { 'a': [{ 'b': { 'c': 3 } }] }; |
| * |
| * _.get(object, 'a[0].b.c'); |
| * // => 3 |
| * |
| * _.get(object, ['a', '0', 'b', 'c']); |
| * // => 3 |
| * |
| * _.get(object, 'a.b.c', 'default'); |
| * // => 'default' |
| */ |
| function get(object, path, defaultValue) { |
| var result = object == null ? undefined : baseGet(object, path); |
| return result === undefined ? defaultValue : result; |
| } |
| |
| module.exports = get; |
| |
| |
| /***/ }), |
| |
| /***/ 855: |
| /***/ (function(module, __unusedexports, __webpack_require__) { |
| |
| module.exports = registerPlugin; |
| |
| const factory = __webpack_require__(47); |
| |
| function registerPlugin(plugins, pluginFunction) { |
| return factory( |
| plugins.includes(pluginFunction) ? plugins : plugins.concat(pluginFunction) |
| ); |
| } |
| |
| |
| /***/ }), |
| |
| /***/ 856: |
| /***/ (function(module, __unusedexports, __webpack_require__) { |
| |
| module.exports = __webpack_require__(141); |
| |
| |
| /***/ }), |
| |
| /***/ 863: |
| /***/ (function(module, __unusedexports, __webpack_require__) { |
| |
| module.exports = authenticationBeforeRequest; |
| |
| const btoa = __webpack_require__(675); |
| |
| const withAuthorizationPrefix = __webpack_require__(143); |
| |
| function authenticationBeforeRequest(state, options) { |
| if (typeof state.auth === "string") { |
| options.headers.authorization = withAuthorizationPrefix(state.auth); |
| return; |
| } |
| |
| if (state.auth.username) { |
| const hash = btoa(`${state.auth.username}:${state.auth.password}`); |
| options.headers.authorization = `Basic ${hash}`; |
| if (state.otp) { |
| options.headers["x-github-otp"] = state.otp; |
| } |
| return; |
| } |
| |
| if (state.auth.clientId) { |
| // There is a special case for OAuth applications, when `clientId` and `clientSecret` is passed as |
| // Basic Authorization instead of query parameters. The only routes where that applies share the same |
| // URL though: `/applications/:client_id/tokens/:access_token`. |
| // |
| // 1. [Check an authorization](https://developer.github.com/v3/oauth_authorizations/#check-an-authorization) |
| // 2. [Reset an authorization](https://developer.github.com/v3/oauth_authorizations/#reset-an-authorization) |
| // 3. [Revoke an authorization for an application](https://developer.github.com/v3/oauth_authorizations/#revoke-an-authorization-for-an-application) |
| // |
| // We identify by checking the URL. It must merge both "/applications/:client_id/tokens/:access_token" |
| // as well as "/applications/123/tokens/token456" |
| if (/\/applications\/:?[\w_]+\/tokens\/:?[\w_]+($|\?)/.test(options.url)) { |
| const hash = btoa(`${state.auth.clientId}:${state.auth.clientSecret}`); |
| options.headers.authorization = `Basic ${hash}`; |
| return; |
| } |
| |
| options.url += options.url.indexOf("?") === -1 ? "?" : "&"; |
| options.url += `client_id=${state.auth.clientId}&client_secret=${state.auth.clientSecret}`; |
| return; |
| } |
| |
| return Promise.resolve() |
| |
| .then(() => { |
| return state.auth(); |
| }) |
| |
| .then(authorization => { |
| options.headers.authorization = withAuthorizationPrefix(authorization); |
| }); |
| } |
| |
| |
| /***/ }), |
| |
| /***/ 866: |
| /***/ (function(module, __unusedexports, __webpack_require__) { |
| |
| "use strict"; |
| |
| var shebangRegex = __webpack_require__(816); |
| |
| module.exports = function (str) { |
| var match = str.match(shebangRegex); |
| |
| if (!match) { |
| return null; |
| } |
| |
| var arr = match[0].replace(/#! ?/, '').split(' '); |
| var bin = arr[0].split('/').pop(); |
| var arg = arr[1]; |
| |
| return (bin === 'env' ? |
| arg : |
| bin + (arg ? ' ' + arg : '') |
| ); |
| }; |
| |
| |
| /***/ }), |
| |
| /***/ 881: |
| /***/ (function(module) { |
| |
| "use strict"; |
| |
| |
| const isWin = process.platform === 'win32'; |
| |
| function notFoundError(original, syscall) { |
| return Object.assign(new Error(`${syscall} ${original.command} ENOENT`), { |
| code: 'ENOENT', |
| errno: 'ENOENT', |
| syscall: `${syscall} ${original.command}`, |
| path: original.command, |
| spawnargs: original.args, |
| }); |
| } |
| |
| function hookChildProcess(cp, parsed) { |
| if (!isWin) { |
| return; |
| } |
| |
| const originalEmit = cp.emit; |
| |
| cp.emit = function (name, arg1) { |
| // If emitting "exit" event and exit code is 1, we need to check if |
| // the command exists and emit an "error" instead |
| // See https://github.com/IndigoUnited/node-cross-spawn/issues/16 |
| if (name === 'exit') { |
| const err = verifyENOENT(arg1, parsed, 'spawn'); |
| |
| if (err) { |
| return originalEmit.call(cp, 'error', err); |
| } |
| } |
| |
| return originalEmit.apply(cp, arguments); // eslint-disable-line prefer-rest-params |
| }; |
| } |
| |
| function verifyENOENT(status, parsed) { |
| if (isWin && status === 1 && !parsed.file) { |
| return notFoundError(parsed.original, 'spawn'); |
| } |
| |
| return null; |
| } |
| |
| function verifyENOENTSync(status, parsed) { |
| if (isWin && status === 1 && !parsed.file) { |
| return notFoundError(parsed.original, 'spawnSync'); |
| } |
| |
| return null; |
| } |
| |
| module.exports = { |
| hookChildProcess, |
| verifyENOENT, |
| verifyENOENTSync, |
| notFoundError, |
| }; |
| |
| |
| /***/ }), |
| |
| /***/ 883: |
| /***/ (function(module) { |
| |
| /** |
| * lodash (Custom Build) <https://lodash.com/> |
| * Build: `lodash modularize exports="npm" -o ./` |
| * Copyright jQuery Foundation and other contributors <https://jquery.org/> |
| * Released under MIT license <https://lodash.com/license> |
| * Based on Underscore.js 1.8.3 <http://underscorejs.org/LICENSE> |
| * Copyright Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors |
| */ |
| |
| /** Used as the `TypeError` message for "Functions" methods. */ |
| var FUNC_ERROR_TEXT = 'Expected a function'; |
| |
| /** Used to stand-in for `undefined` hash values. */ |
| var HASH_UNDEFINED = '__lodash_hash_undefined__'; |
| |
| /** Used as references for various `Number` constants. */ |
| var INFINITY = 1 / 0, |
| MAX_SAFE_INTEGER = 9007199254740991; |
| |
| /** `Object#toString` result references. */ |
| var funcTag = '[object Function]', |
| genTag = '[object GeneratorFunction]', |
| symbolTag = '[object Symbol]'; |
| |
| /** Used to match property names within property paths. */ |
| var reIsDeepProp = /\.|\[(?:[^[\]]*|(["'])(?:(?!\1)[^\\]|\\.)*?\1)\]/, |
| reIsPlainProp = /^\w*$/, |
| reLeadingDot = /^\./, |
| rePropName = /[^.[\]]+|\[(?:(-?\d+(?:\.\d+)?)|(["'])((?:(?!\2)[^\\]|\\.)*?)\2)\]|(?=(?:\.|\[\])(?:\.|\[\]|$))/g; |
| |
| /** |
| * Used to match `RegExp` |
| * [syntax characters](http://ecma-international.org/ecma-262/7.0/#sec-patterns). |
| */ |
| var reRegExpChar = /[\\^$.*+?()[\]{}|]/g; |
| |
| /** Used to match backslashes in property paths. */ |
| var reEscapeChar = /\\(\\)?/g; |
| |
| /** Used to detect host constructors (Safari). */ |
| var reIsHostCtor = /^\[object .+?Constructor\]$/; |
| |
| /** Used to detect unsigned integer values. */ |
| var reIsUint = /^(?:0|[1-9]\d*)$/; |
| |
| /** Detect free variable `global` from Node.js. */ |
| var freeGlobal = typeof global == 'object' && global && global.Object === Object && global; |
| |
| /** Detect free variable `self`. */ |
| var freeSelf = typeof self == 'object' && self && self.Object === Object && self; |
| |
| /** Used as a reference to the global object. */ |
| var root = freeGlobal || freeSelf || Function('return this')(); |
| |
| /** |
| * Gets the value at `key` of `object`. |
| * |
| * @private |
| * @param {Object} [object] The object to query. |
| * @param {string} key The key of the property to get. |
| * @returns {*} Returns the property value. |
| */ |
| function getValue(object, key) { |
| return object == null ? undefined : object[key]; |
| } |
| |
| /** |
| * Checks if `value` is a host object in IE < 9. |
| * |
| * @private |
| * @param {*} value The value to check. |
| * @returns {boolean} Returns `true` if `value` is a host object, else `false`. |
| */ |
| function isHostObject(value) { |
| // Many host objects are `Object` objects that can coerce to strings |
| // despite having improperly defined `toString` methods. |
| var result = false; |
| if (value != null && typeof value.toString != 'function') { |
| try { |
| result = !!(value + ''); |
| } catch (e) {} |
| } |
| return result; |
| } |
| |
| /** Used for built-in method references. */ |
| var arrayProto = Array.prototype, |
| funcProto = Function.prototype, |
| objectProto = Object.prototype; |
| |
| /** Used to detect overreaching core-js shims. */ |
| var coreJsData = root['__core-js_shared__']; |
| |
| /** Used to detect methods masquerading as native. */ |
| var maskSrcKey = (function() { |
| var uid = /[^.]+$/.exec(coreJsData && coreJsData.keys && coreJsData.keys.IE_PROTO || ''); |
| return uid ? ('Symbol(src)_1.' + uid) : ''; |
| }()); |
| |
| /** Used to resolve the decompiled source of functions. */ |
| var funcToString = funcProto.toString; |
| |
| /** Used to check objects for own properties. */ |
| var hasOwnProperty = objectProto.hasOwnProperty; |
| |
| /** |
| * Used to resolve the |
| * [`toStringTag`](http://ecma-international.org/ecma-262/7.0/#sec-object.prototype.tostring) |
| * of values. |
| */ |
| var objectToString = objectProto.toString; |
| |
| /** Used to detect if a method is native. */ |
| var reIsNative = RegExp('^' + |
| funcToString.call(hasOwnProperty).replace(reRegExpChar, '\\$&') |
| .replace(/hasOwnProperty|(function).*?(?=\\\()| for .+?(?=\\\])/g, '$1.*?') + '$' |
| ); |
| |
| /** Built-in value references. */ |
| var Symbol = root.Symbol, |
| splice = arrayProto.splice; |
| |
| /* Built-in method references that are verified to be native. */ |
| var Map = getNative(root, 'Map'), |
| nativeCreate = getNative(Object, 'create'); |
| |
| /** Used to convert symbols to primitives and strings. */ |
| var symbolProto = Symbol ? Symbol.prototype : undefined, |
| symbolToString = symbolProto ? symbolProto.toString : undefined; |
| |
| /** |
| * Creates a hash object. |
| * |
| * @private |
| * @constructor |
| * @param {Array} [entries] The key-value pairs to cache. |
| */ |
| function Hash(entries) { |
| var index = -1, |
| length = entries ? entries.length : 0; |
| |
| this.clear(); |
| while (++index < length) { |
| var entry = entries[index]; |
| this.set(entry[0], entry[1]); |
| } |
| } |
| |
| /** |
| * Removes all key-value entries from the hash. |
| * |
| * @private |
| * @name clear |
| * @memberOf Hash |
| */ |
| function hashClear() { |
| this.__data__ = nativeCreate ? nativeCreate(null) : {}; |
| } |
| |
| /** |
| * Removes `key` and its value from the hash. |
| * |
| * @private |
| * @name delete |
| * @memberOf Hash |
| * @param {Object} hash The hash to modify. |
| * @param {string} key The key of the value to remove. |
| * @returns {boolean} Returns `true` if the entry was removed, else `false`. |
| */ |
| function hashDelete(key) { |
| return this.has(key) && delete this.__data__[key]; |
| } |
| |
| /** |
| * Gets the hash value for `key`. |
| * |
| * @private |
| * @name get |
| * @memberOf Hash |
| * @param {string} key The key of the value to get. |
| * @returns {*} Returns the entry value. |
| */ |
| function hashGet(key) { |
| var data = this.__data__; |
| if (nativeCreate) { |
| var result = data[key]; |
| return result === HASH_UNDEFINED ? undefined : result; |
| } |
| return hasOwnProperty.call(data, key) ? data[key] : undefined; |
| } |
| |
| /** |
| * Checks if a hash value for `key` exists. |
| * |
| * @private |
| * @name has |
| * @memberOf Hash |
| * @param {string} key The key of the entry to check. |
| * @returns {boolean} Returns `true` if an entry for `key` exists, else `false`. |
| */ |
| function hashHas(key) { |
| var data = this.__data__; |
| return nativeCreate ? data[key] !== undefined : hasOwnProperty.call(data, key); |
| } |
| |
| /** |
| * Sets the hash `key` to `value`. |
| * |
| * @private |
| * @name set |
| * @memberOf Hash |
| * @param {string} key The key of the value to set. |
| * @param {*} value The value to set. |
| * @returns {Object} Returns the hash instance. |
| */ |
| function hashSet(key, value) { |
| var data = this.__data__; |
| data[key] = (nativeCreate && value === undefined) ? HASH_UNDEFINED : value; |
| return this; |
| } |
| |
| // Add methods to `Hash`. |
| Hash.prototype.clear = hashClear; |
| Hash.prototype['delete'] = hashDelete; |
| Hash.prototype.get = hashGet; |
| Hash.prototype.has = hashHas; |
| Hash.prototype.set = hashSet; |
| |
| /** |
| * Creates an list cache object. |
| * |
| * @private |
| * @constructor |
| * @param {Array} [entries] The key-value pairs to cache. |
| */ |
| function ListCache(entries) { |
| var index = -1, |
| length = entries ? entries.length : 0; |
| |
| this.clear(); |
| while (++index < length) { |
| var entry = entries[index]; |
| this.set(entry[0], entry[1]); |
| } |
| } |
| |
| /** |
| * Removes all key-value entries from the list cache. |
| * |
| * @private |
| * @name clear |
| * @memberOf ListCache |
| */ |
| function listCacheClear() { |
| this.__data__ = []; |
| } |
| |
| /** |
| * Removes `key` and its value from the list cache. |
| * |
| * @private |
| * @name delete |
| * @memberOf ListCache |
| * @param {string} key The key of the value to remove. |
| * @returns {boolean} Returns `true` if the entry was removed, else `false`. |
| */ |
| function listCacheDelete(key) { |
| var data = this.__data__, |
| index = assocIndexOf(data, key); |
| |
| if (index < 0) { |
| return false; |
| } |
| var lastIndex = data.length - 1; |
| if (index == lastIndex) { |
| data.pop(); |
| } else { |
| splice.call(data, index, 1); |
| } |
| return true; |
| } |
| |
| /** |
| * Gets the list cache value for `key`. |
| * |
| * @private |
| * @name get |
| * @memberOf ListCache |
| * @param {string} key The key of the value to get. |
| * @returns {*} Returns the entry value. |
| */ |
| function listCacheGet(key) { |
| var data = this.__data__, |
| index = assocIndexOf(data, key); |
| |
| return index < 0 ? undefined : data[index][1]; |
| } |
| |
| /** |
| * Checks if a list cache value for `key` exists. |
| * |
| * @private |
| * @name has |
| * @memberOf ListCache |
| * @param {string} key The key of the entry to check. |
| * @returns {boolean} Returns `true` if an entry for `key` exists, else `false`. |
| */ |
| function listCacheHas(key) { |
| return assocIndexOf(this.__data__, key) > -1; |
| } |
| |
| /** |
| * Sets the list cache `key` to `value`. |
| * |
| * @private |
| * @name set |
| * @memberOf ListCache |
| * @param {string} key The key of the value to set. |
| * @param {*} value The value to set. |
| * @returns {Object} Returns the list cache instance. |
| */ |
| function listCacheSet(key, value) { |
| var data = this.__data__, |
| index = assocIndexOf(data, key); |
| |
| if (index < 0) { |
| data.push([key, value]); |
| } else { |
| data[index][1] = value; |
| } |
| return this; |
| } |
| |
| // Add methods to `ListCache`. |
| ListCache.prototype.clear = listCacheClear; |
| ListCache.prototype['delete'] = listCacheDelete; |
| ListCache.prototype.get = listCacheGet; |
| ListCache.prototype.has = listCacheHas; |
| ListCache.prototype.set = listCacheSet; |
| |
| /** |
| * Creates a map cache object to store key-value pairs. |
| * |
| * @private |
| * @constructor |
| * @param {Array} [entries] The key-value pairs to cache. |
| */ |
| function MapCache(entries) { |
| var index = -1, |
| length = entries ? entries.length : 0; |
| |
| this.clear(); |
| while (++index < length) { |
| var entry = entries[index]; |
| this.set(entry[0], entry[1]); |
| } |
| } |
| |
| /** |
| * Removes all key-value entries from the map. |
| * |
| * @private |
| * @name clear |
| * @memberOf MapCache |
| */ |
| function mapCacheClear() { |
| this.__data__ = { |
| 'hash': new Hash, |
| 'map': new (Map || ListCache), |
| 'string': new Hash |
| }; |
| } |
| |
| /** |
| * Removes `key` and its value from the map. |
| * |
| * @private |
| * @name delete |
| * @memberOf MapCache |
| * @param {string} key The key of the value to remove. |
| * @returns {boolean} Returns `true` if the entry was removed, else `false`. |
| */ |
| function mapCacheDelete(key) { |
| return getMapData(this, key)['delete'](key); |
| } |
| |
| /** |
| * Gets the map value for `key`. |
| * |
| * @private |
| * @name get |
| * @memberOf MapCache |
| * @param {string} key The key of the value to get. |
| * @returns {*} Returns the entry value. |
| */ |
| function mapCacheGet(key) { |
| return getMapData(this, key).get(key); |
| } |
| |
| /** |
| * Checks if a map value for `key` exists. |
| * |
| * @private |
| * @name has |
| * @memberOf MapCache |
| * @param {string} key The key of the entry to check. |
| * @returns {boolean} Returns `true` if an entry for `key` exists, else `false`. |
| */ |
| function mapCacheHas(key) { |
| return getMapData(this, key).has(key); |
| } |
| |
| /** |
| * Sets the map `key` to `value`. |
| * |
| * @private |
| * @name set |
| * @memberOf MapCache |
| * @param {string} key The key of the value to set. |
| * @param {*} value The value to set. |
| * @returns {Object} Returns the map cache instance. |
| */ |
| function mapCacheSet(key, value) { |
| getMapData(this, key).set(key, value); |
| return this; |
| } |
| |
| // Add methods to `MapCache`. |
| MapCache.prototype.clear = mapCacheClear; |
| MapCache.prototype['delete'] = mapCacheDelete; |
| MapCache.prototype.get = mapCacheGet; |
| MapCache.prototype.has = mapCacheHas; |
| MapCache.prototype.set = mapCacheSet; |
| |
| /** |
| * Assigns `value` to `key` of `object` if the existing value is not equivalent |
| * using [`SameValueZero`](http://ecma-international.org/ecma-262/7.0/#sec-samevaluezero) |
| * for equality comparisons. |
| * |
| * @private |
| * @param {Object} object The object to modify. |
| * @param {string} key The key of the property to assign. |
| * @param {*} value The value to assign. |
| */ |
| function assignValue(object, key, value) { |
| var objValue = object[key]; |
| if (!(hasOwnProperty.call(object, key) && eq(objValue, value)) || |
| (value === undefined && !(key in object))) { |
| object[key] = value; |
| } |
| } |
| |
| /** |
| * Gets the index at which the `key` is found in `array` of key-value pairs. |
| * |
| * @private |
| * @param {Array} array The array to inspect. |
| * @param {*} key The key to search for. |
| * @returns {number} Returns the index of the matched value, else `-1`. |
| */ |
| function assocIndexOf(array, key) { |
| var length = array.length; |
| while (length--) { |
| if (eq(array[length][0], key)) { |
| return length; |
| } |
| } |
| return -1; |
| } |
| |
| /** |
| * The base implementation of `_.isNative` without bad shim checks. |
| * |
| * @private |
| * @param {*} value The value to check. |
| * @returns {boolean} Returns `true` if `value` is a native function, |
| * else `false`. |
| */ |
| function baseIsNative(value) { |
| if (!isObject(value) || isMasked(value)) { |
| return false; |
| } |
| var pattern = (isFunction(value) || isHostObject(value)) ? reIsNative : reIsHostCtor; |
| return pattern.test(toSource(value)); |
| } |
| |
| /** |
| * The base implementation of `_.set`. |
| * |
| * @private |
| * @param {Object} object The object to modify. |
| * @param {Array|string} path The path of the property to set. |
| * @param {*} value The value to set. |
| * @param {Function} [customizer] The function to customize path creation. |
| * @returns {Object} Returns `object`. |
| */ |
| function baseSet(object, path, value, customizer) { |
| if (!isObject(object)) { |
| return object; |
| } |
| path = isKey(path, object) ? [path] : castPath(path); |
| |
| var index = -1, |
| length = path.length, |
| lastIndex = length - 1, |
| nested = object; |
| |
| while (nested != null && ++index < length) { |
| var key = toKey(path[index]), |
| newValue = value; |
| |
| if (index != lastIndex) { |
| var objValue = nested[key]; |
| newValue = customizer ? customizer(objValue, key, nested) : undefined; |
| if (newValue === undefined) { |
| newValue = isObject(objValue) |
| ? objValue |
| : (isIndex(path[index + 1]) ? [] : {}); |
| } |
| } |
| assignValue(nested, key, newValue); |
| nested = nested[key]; |
| } |
| return object; |
| } |
| |
| /** |
| * The base implementation of `_.toString` which doesn't convert nullish |
| * values to empty strings. |
| * |
| * @private |
| * @param {*} value The value to process. |
| * @returns {string} Returns the string. |
| */ |
| function baseToString(value) { |
| // Exit early for strings to avoid a performance hit in some environments. |
| if (typeof value == 'string') { |
| return value; |
| } |
| if (isSymbol(value)) { |
| return symbolToString ? symbolToString.call(value) : ''; |
| } |
| var result = (value + ''); |
| return (result == '0' && (1 / value) == -INFINITY) ? '-0' : result; |
| } |
| |
| /** |
| * Casts `value` to a path array if it's not one. |
| * |
| * @private |
| * @param {*} value The value to inspect. |
| * @returns {Array} Returns the cast property path array. |
| */ |
| function castPath(value) { |
| return isArray(value) ? value : stringToPath(value); |
| } |
| |
| /** |
| * Gets the data for `map`. |
| * |
| * @private |
| * @param {Object} map The map to query. |
| * @param {string} key The reference key. |
| * @returns {*} Returns the map data. |
| */ |
| function getMapData(map, key) { |
| var data = map.__data__; |
| return isKeyable(key) |
| ? data[typeof key == 'string' ? 'string' : 'hash'] |
| : data.map; |
| } |
| |
| /** |
| * Gets the native function at `key` of `object`. |
| * |
| * @private |
| * @param {Object} object The object to query. |
| * @param {string} key The key of the method to get. |
| * @returns {*} Returns the function if it's native, else `undefined`. |
| */ |
| function getNative(object, key) { |
| var value = getValue(object, key); |
| return baseIsNative(value) ? value : undefined; |
| } |
| |
| /** |
| * Checks if `value` is a valid array-like index. |
| * |
| * @private |
| * @param {*} value The value to check. |
| * @param {number} [length=MAX_SAFE_INTEGER] The upper bounds of a valid index. |
| * @returns {boolean} Returns `true` if `value` is a valid index, else `false`. |
| */ |
| function isIndex(value, length) { |
| length = length == null ? MAX_SAFE_INTEGER : length; |
| return !!length && |
| (typeof value == 'number' || reIsUint.test(value)) && |
| (value > -1 && value % 1 == 0 && value < length); |
| } |
| |
| /** |
| * Checks if `value` is a property name and not a property path. |
| * |
| * @private |
| * @param {*} value The value to check. |
| * @param {Object} [object] The object to query keys on. |
| * @returns {boolean} Returns `true` if `value` is a property name, else `false`. |
| */ |
| function isKey(value, object) { |
| if (isArray(value)) { |
| return false; |
| } |
| var type = typeof value; |
| if (type == 'number' || type == 'symbol' || type == 'boolean' || |
| value == null || isSymbol(value)) { |
| return true; |
| } |
| return reIsPlainProp.test(value) || !reIsDeepProp.test(value) || |
| (object != null && value in Object(object)); |
| } |
| |
| /** |
| * Checks if `value` is suitable for use as unique object key. |
| * |
| * @private |
| * @param {*} value The value to check. |
| * @returns {boolean} Returns `true` if `value` is suitable, else `false`. |
| */ |
| function isKeyable(value) { |
| var type = typeof value; |
| return (type == 'string' || type == 'number' || type == 'symbol' || type == 'boolean') |
| ? (value !== '__proto__') |
| : (value === null); |
| } |
| |
| /** |
| * Checks if `func` has its source masked. |
| * |
| * @private |
| * @param {Function} func The function to check. |
| * @returns {boolean} Returns `true` if `func` is masked, else `false`. |
| */ |
| function isMasked(func) { |
| return !!maskSrcKey && (maskSrcKey in func); |
| } |
| |
| /** |
| * Converts `string` to a property path array. |
| * |
| * @private |
| * @param {string} string The string to convert. |
| * @returns {Array} Returns the property path array. |
| */ |
| var stringToPath = memoize(function(string) { |
| string = toString(string); |
| |
| var result = []; |
| if (reLeadingDot.test(string)) { |
| result.push(''); |
| } |
| string.replace(rePropName, function(match, number, quote, string) { |
| result.push(quote ? string.replace(reEscapeChar, '$1') : (number || match)); |
| }); |
| return result; |
| }); |
| |
| /** |
| * Converts `value` to a string key if it's not a string or symbol. |
| * |
| * @private |
| * @param {*} value The value to inspect. |
| * @returns {string|symbol} Returns the key. |
| */ |
| function toKey(value) { |
| if (typeof value == 'string' || isSymbol(value)) { |
| return value; |
| } |
| var result = (value + ''); |
| return (result == '0' && (1 / value) == -INFINITY) ? '-0' : result; |
| } |
| |
| /** |
| * Converts `func` to its source code. |
| * |
| * @private |
| * @param {Function} func The function to process. |
| * @returns {string} Returns the source code. |
| */ |
| function toSource(func) { |
| if (func != null) { |
| try { |
| return funcToString.call(func); |
| } catch (e) {} |
| try { |
| return (func + ''); |
| } catch (e) {} |
| } |
| return ''; |
| } |
| |
| /** |
| * Creates a function that memoizes the result of `func`. If `resolver` is |
| * provided, it determines the cache key for storing the result based on the |
| * arguments provided to the memoized function. By default, the first argument |
| * provided to the memoized function is used as the map cache key. The `func` |
| * is invoked with the `this` binding of the memoized function. |
| * |
| * **Note:** The cache is exposed as the `cache` property on the memoized |
| * function. Its creation may be customized by replacing the `_.memoize.Cache` |
| * constructor with one whose instances implement the |
| * [`Map`](http://ecma-international.org/ecma-262/7.0/#sec-properties-of-the-map-prototype-object) |
| * method interface of `delete`, `get`, `has`, and `set`. |
| * |
| * @static |
| * @memberOf _ |
| * @since 0.1.0 |
| * @category Function |
| * @param {Function} func The function to have its output memoized. |
| * @param {Function} [resolver] The function to resolve the cache key. |
| * @returns {Function} Returns the new memoized function. |
| * @example |
| * |
| * var object = { 'a': 1, 'b': 2 }; |
| * var other = { 'c': 3, 'd': 4 }; |
| * |
| * var values = _.memoize(_.values); |
| * values(object); |
| * // => [1, 2] |
| * |
| * values(other); |
| * // => [3, 4] |
| * |
| * object.a = 2; |
| * values(object); |
| * // => [1, 2] |
| * |
| * // Modify the result cache. |
| * values.cache.set(object, ['a', 'b']); |
| * values(object); |
| * // => ['a', 'b'] |
| * |
| * // Replace `_.memoize.Cache`. |
| * _.memoize.Cache = WeakMap; |
| */ |
| function memoize(func, resolver) { |
| if (typeof func != 'function' || (resolver && typeof resolver != 'function')) { |
| throw new TypeError(FUNC_ERROR_TEXT); |
| } |
| var memoized = function() { |
| var args = arguments, |
| key = resolver ? resolver.apply(this, args) : args[0], |
| cache = memoized.cache; |
| |
| if (cache.has(key)) { |
| return cache.get(key); |
| } |
| var result = func.apply(this, args); |
| memoized.cache = cache.set(key, result); |
| return result; |
| }; |
| memoized.cache = new (memoize.Cache || MapCache); |
| return memoized; |
| } |
| |
| // Assign cache to `_.memoize`. |
| memoize.Cache = MapCache; |
| |
| /** |
| * Performs a |
| * [`SameValueZero`](http://ecma-international.org/ecma-262/7.0/#sec-samevaluezero) |
| * comparison between two values to determine if they are equivalent. |
| * |
| * @static |
| * @memberOf _ |
| * @since 4.0.0 |
| * @category Lang |
| * @param {*} value The value to compare. |
| * @param {*} other The other value to compare. |
| * @returns {boolean} Returns `true` if the values are equivalent, else `false`. |
| * @example |
| * |
| * var object = { 'a': 1 }; |
| * var other = { 'a': 1 }; |
| * |
| * _.eq(object, object); |
| * // => true |
| * |
| * _.eq(object, other); |
| * // => false |
| * |
| * _.eq('a', 'a'); |
| * // => true |
| * |
| * _.eq('a', Object('a')); |
| * // => false |
| * |
| * _.eq(NaN, NaN); |
| * // => true |
| */ |
| function eq(value, other) { |
| return value === other || (value !== value && other !== other); |
| } |
| |
| /** |
| * Checks if `value` is classified as an `Array` object. |
| * |
| * @static |
| * @memberOf _ |
| * @since 0.1.0 |
| * @category Lang |
| * @param {*} value The value to check. |
| * @returns {boolean} Returns `true` if `value` is an array, else `false`. |
| * @example |
| * |
| * _.isArray([1, 2, 3]); |
| * // => true |
| * |
| * _.isArray(document.body.children); |
| * // => false |
| * |
| * _.isArray('abc'); |
| * // => false |
| * |
| * _.isArray(_.noop); |
| * // => false |
| */ |
| var isArray = Array.isArray; |
| |
| /** |
| * Checks if `value` is classified as a `Function` object. |
| * |
| * @static |
| * @memberOf _ |
| * @since 0.1.0 |
| * @category Lang |
| * @param {*} value The value to check. |
| * @returns {boolean} Returns `true` if `value` is a function, else `false`. |
| * @example |
| * |
| * _.isFunction(_); |
| * // => true |
| * |
| * _.isFunction(/abc/); |
| * // => false |
| */ |
| function isFunction(value) { |
| // The use of `Object#toString` avoids issues with the `typeof` operator |
| // in Safari 8-9 which returns 'object' for typed array and other constructors. |
| var tag = isObject(value) ? objectToString.call(value) : ''; |
| return tag == funcTag || tag == genTag; |
| } |
| |
| /** |
| * Checks if `value` is the |
| * [language type](http://www.ecma-international.org/ecma-262/7.0/#sec-ecmascript-language-types) |
| * of `Object`. (e.g. arrays, functions, objects, regexes, `new Number(0)`, and `new String('')`) |
| * |
| * @static |
| * @memberOf _ |
| * @since 0.1.0 |
| * @category Lang |
| * @param {*} value The value to check. |
| * @returns {boolean} Returns `true` if `value` is an object, else `false`. |
| * @example |
| * |
| * _.isObject({}); |
| * // => true |
| * |
| * _.isObject([1, 2, 3]); |
| * // => true |
| * |
| * _.isObject(_.noop); |
| * // => true |
| * |
| * _.isObject(null); |
| * // => false |
| */ |
| function isObject(value) { |
| var type = typeof value; |
| return !!value && (type == 'object' || type == 'function'); |
| } |
| |
| /** |
| * Checks if `value` is object-like. A value is object-like if it's not `null` |
| * and has a `typeof` result of "object". |
| * |
| * @static |
| * @memberOf _ |
| * @since 4.0.0 |
| * @category Lang |
| * @param {*} value The value to check. |
| * @returns {boolean} Returns `true` if `value` is object-like, else `false`. |
| * @example |
| * |
| * _.isObjectLike({}); |
| * // => true |
| * |
| * _.isObjectLike([1, 2, 3]); |
| * // => true |
| * |
| * _.isObjectLike(_.noop); |
| * // => false |
| * |
| * _.isObjectLike(null); |
| * // => false |
| */ |
| function isObjectLike(value) { |
| return !!value && typeof value == 'object'; |
| } |
| |
| /** |
| * Checks if `value` is classified as a `Symbol` primitive or object. |
| * |
| * @static |
| * @memberOf _ |
| * @since 4.0.0 |
| * @category Lang |
| * @param {*} value The value to check. |
| * @returns {boolean} Returns `true` if `value` is a symbol, else `false`. |
| * @example |
| * |
| * _.isSymbol(Symbol.iterator); |
| * // => true |
| * |
| * _.isSymbol('abc'); |
| * // => false |
| */ |
| function isSymbol(value) { |
| return typeof value == 'symbol' || |
| (isObjectLike(value) && objectToString.call(value) == symbolTag); |
| } |
| |
| /** |
| * Converts `value` to a string. An empty string is returned for `null` |
| * and `undefined` values. The sign of `-0` is preserved. |
| * |
| * @static |
| * @memberOf _ |
| * @since 4.0.0 |
| * @category Lang |
| * @param {*} value The value to process. |
| * @returns {string} Returns the string. |
| * @example |
| * |
| * _.toString(null); |
| * // => '' |
| * |
| * _.toString(-0); |
| * // => '-0' |
| * |
| * _.toString([1, 2, 3]); |
| * // => '1,2,3' |
| */ |
| function toString(value) { |
| return value == null ? '' : baseToString(value); |
| } |
| |
| /** |
| * Sets the value at `path` of `object`. If a portion of `path` doesn't exist, |
| * it's created. Arrays are created for missing index properties while objects |
| * are created for all other missing properties. Use `_.setWith` to customize |
| * `path` creation. |
| * |
| * **Note:** This method mutates `object`. |
| * |
| * @static |
| * @memberOf _ |
| * @since 3.7.0 |
| * @category Object |
| * @param {Object} object The object to modify. |
| * @param {Array|string} path The path of the property to set. |
| * @param {*} value The value to set. |
| * @returns {Object} Returns `object`. |
| * @example |
| * |
| * var object = { 'a': [{ 'b': { 'c': 3 } }] }; |
| * |
| * _.set(object, 'a[0].b.c', 4); |
| * console.log(object.a[0].b.c); |
| * // => 4 |
| * |
| * _.set(object, ['x', '0', 'y', 'z'], 5); |
| * console.log(object.x[0].y.z); |
| * // => 5 |
| */ |
| function set(object, path, value) { |
| return object == null ? object : baseSet(object, path, value); |
| } |
| |
| module.exports = set; |
| |
| |
| /***/ }), |
| |
| /***/ 898: |
| /***/ (function(__unusedmodule, exports, __webpack_require__) { |
| |
| "use strict"; |
| |
| |
| Object.defineProperty(exports, '__esModule', { value: true }); |
| |
| var request = __webpack_require__(753); |
| var universalUserAgent = __webpack_require__(796); |
| |
| const VERSION = "4.5.0"; |
| |
| class GraphqlError extends Error { |
| constructor(request, response) { |
| const message = response.data.errors[0].message; |
| super(message); |
| Object.assign(this, response.data); |
| this.name = "GraphqlError"; |
| this.request = request; // Maintains proper stack trace (only available on V8) |
| |
| /* istanbul ignore next */ |
| |
| if (Error.captureStackTrace) { |
| Error.captureStackTrace(this, this.constructor); |
| } |
| } |
| |
| } |
| |
| const NON_VARIABLE_OPTIONS = ["method", "baseUrl", "url", "headers", "request", "query", "mediaType"]; |
| function graphql(request, query, options) { |
| options = typeof query === "string" ? options = Object.assign({ |
| query |
| }, options) : options = query; |
| const requestOptions = Object.keys(options).reduce((result, key) => { |
| if (NON_VARIABLE_OPTIONS.includes(key)) { |
| result[key] = options[key]; |
| return result; |
| } |
| |
| if (!result.variables) { |
| result.variables = {}; |
| } |
| |
| result.variables[key] = options[key]; |
| return result; |
| }, {}); |
| return request(requestOptions).then(response => { |
| if (response.data.errors) { |
| throw new GraphqlError(requestOptions, { |
| data: response.data |
| }); |
| } |
| |
| return response.data.data; |
| }); |
| } |
| |
| function withDefaults(request$1, newDefaults) { |
| const newRequest = request$1.defaults(newDefaults); |
| |
| const newApi = (query, options) => { |
| return graphql(newRequest, query, options); |
| }; |
| |
| return Object.assign(newApi, { |
| defaults: withDefaults.bind(null, newRequest), |
| endpoint: request.request.endpoint |
| }); |
| } |
| |
| const graphql$1 = withDefaults(request.request, { |
| headers: { |
| "user-agent": `octokit-graphql.js/${VERSION} ${universalUserAgent.getUserAgent()}` |
| }, |
| method: "POST", |
| url: "/graphql" |
| }); |
| function withCustomRequest(customRequest) { |
| return withDefaults(customRequest, { |
| method: "POST", |
| url: "/graphql" |
| }); |
| } |
| |
| exports.graphql = graphql$1; |
| exports.withCustomRequest = withCustomRequest; |
| //# sourceMappingURL=index.js.map |
| |
| |
| /***/ }), |
| |
| /***/ 910: |
| /***/ (function(module, __unusedexports, __webpack_require__) { |
| |
| "use strict"; |
| // JS-YAML's default schema for `load` function. |
| // It is not described in the YAML specification. |
| // |
| // This schema is based on JS-YAML's default safe schema and includes |
| // JavaScript-specific types: !!js/undefined, !!js/regexp and !!js/function. |
| // |
| // Also this schema is used as default base schema at `Schema.create` function. |
| |
| |
| |
| |
| |
| var Schema = __webpack_require__(43); |
| |
| |
| module.exports = Schema.DEFAULT = new Schema({ |
| include: [ |
| __webpack_require__(723) |
| ], |
| explicit: [ |
| __webpack_require__(386), |
| __webpack_require__(629), |
| __webpack_require__(352) |
| ] |
| }); |
| |
| |
| /***/ }), |
| |
| /***/ 913: |
| /***/ (function(module, __unusedexports, __webpack_require__) { |
| |
| "use strict"; |
| |
| |
| /*eslint-disable no-bitwise*/ |
| |
| var NodeBuffer; |
| |
| try { |
| // A trick for browserified version, to not include `Buffer` shim |
| var _require = require; |
| NodeBuffer = _require('buffer').Buffer; |
| } catch (__) {} |
| |
| var Type = __webpack_require__(945); |
| |
| |
| // [ 64, 65, 66 ] -> [ padding, CR, LF ] |
| var BASE64_MAP = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=\n\r'; |
| |
| |
| function resolveYamlBinary(data) { |
| if (data === null) return false; |
| |
| var code, idx, bitlen = 0, max = data.length, map = BASE64_MAP; |
| |
| // Convert one by one. |
| for (idx = 0; idx < max; idx++) { |
| code = map.indexOf(data.charAt(idx)); |
| |
| // Skip CR/LF |
| if (code > 64) continue; |
| |
| // Fail on illegal characters |
| if (code < 0) return false; |
| |
| bitlen += 6; |
| } |
| |
| // If there are any bits left, source was corrupted |
| return (bitlen % 8) === 0; |
| } |
| |
| function constructYamlBinary(data) { |
| var idx, tailbits, |
| input = data.replace(/[\r\n=]/g, ''), // remove CR/LF & padding to simplify scan |
| max = input.length, |
| map = BASE64_MAP, |
| bits = 0, |
| result = []; |
| |
| // Collect by 6*4 bits (3 bytes) |
| |
| for (idx = 0; idx < max; idx++) { |
| if ((idx % 4 === 0) && idx) { |
| result.push((bits >> 16) & 0xFF); |
| result.push((bits >> 8) & 0xFF); |
| result.push(bits & 0xFF); |
| } |
| |
| bits = (bits << 6) | map.indexOf(input.charAt(idx)); |
| } |
| |
| // Dump tail |
| |
| tailbits = (max % 4) * 6; |
| |
| if (tailbits === 0) { |
| result.push((bits >> 16) & 0xFF); |
| result.push((bits >> 8) & 0xFF); |
| result.push(bits & 0xFF); |
| } else if (tailbits === 18) { |
| result.push((bits >> 10) & 0xFF); |
| result.push((bits >> 2) & 0xFF); |
| } else if (tailbits === 12) { |
| result.push((bits >> 4) & 0xFF); |
| } |
| |
| // Wrap into Buffer for NodeJS and leave Array for browser |
| if (NodeBuffer) { |
| // Support node 6.+ Buffer API when available |
| return NodeBuffer.from ? NodeBuffer.from(result) : new NodeBuffer(result); |
| } |
| |
| return result; |
| } |
| |
| function representYamlBinary(object /*, style*/) { |
| var result = '', bits = 0, idx, tail, |
| max = object.length, |
| map = BASE64_MAP; |
| |
| // Convert every three bytes to 4 ASCII characters. |
| |
| for (idx = 0; idx < max; idx++) { |
| if ((idx % 3 === 0) && idx) { |
| result += map[(bits >> 18) & 0x3F]; |
| result += map[(bits >> 12) & 0x3F]; |
| result += map[(bits >> 6) & 0x3F]; |
| result += map[bits & 0x3F]; |
| } |
| |
| bits = (bits << 8) + object[idx]; |
| } |
| |
| // Dump tail |
| |
| tail = max % 3; |
| |
| if (tail === 0) { |
| result += map[(bits >> 18) & 0x3F]; |
| result += map[(bits >> 12) & 0x3F]; |
| result += map[(bits >> 6) & 0x3F]; |
| result += map[bits & 0x3F]; |
| } else if (tail === 2) { |
| result += map[(bits >> 10) & 0x3F]; |
| result += map[(bits >> 4) & 0x3F]; |
| result += map[(bits << 2) & 0x3F]; |
| result += map[64]; |
| } else if (tail === 1) { |
| result += map[(bits >> 2) & 0x3F]; |
| result += map[(bits << 4) & 0x3F]; |
| result += map[64]; |
| result += map[64]; |
| } |
| |
| return result; |
| } |
| |
| function isBinary(object) { |
| return NodeBuffer && NodeBuffer.isBuffer(object); |
| } |
| |
| module.exports = new Type('tag:yaml.org,2002:binary', { |
| kind: 'scalar', |
| resolve: resolveYamlBinary, |
| construct: constructYamlBinary, |
| predicate: isBinary, |
| represent: representYamlBinary |
| }); |
| |
| |
| /***/ }), |
| |
| /***/ 916: |
| /***/ (function(__unusedmodule, exports) { |
| |
| "use strict"; |
| |
| |
| Object.defineProperty(exports, '__esModule', { value: true }); |
| |
| const VERSION = "1.0.0"; |
| |
| /** |
| * @param octokit Octokit instance |
| * @param options Options passed to Octokit constructor |
| */ |
| |
| function requestLog(octokit) { |
| octokit.hook.wrap("request", (request, options) => { |
| octokit.log.debug("request", options); |
| const start = Date.now(); |
| const requestOptions = octokit.request.endpoint.parse(options); |
| const path = requestOptions.url.replace(options.baseUrl, ""); |
| return request(options).then(response => { |
| octokit.log.info(`${requestOptions.method} ${path} - ${response.status} in ${Date.now() - start}ms`); |
| return response; |
| }).catch(error => { |
| octokit.log.info(`${requestOptions.method} ${path} - ${error.status} in ${Date.now() - start}ms`); |
| throw error; |
| }); |
| }); |
| } |
| requestLog.VERSION = VERSION; |
| |
| exports.requestLog = requestLog; |
| //# sourceMappingURL=index.js.map |
| |
| |
| /***/ }), |
| |
| /***/ 921: |
| /***/ (function(module, __unusedexports, __webpack_require__) { |
| |
| "use strict"; |
| |
| |
| var Type = __webpack_require__(945); |
| |
| module.exports = new Type('tag:yaml.org,2002:seq', { |
| kind: 'sequence', |
| construct: function (data) { return data !== null ? data : []; } |
| }); |
| |
| |
| /***/ }), |
| |
| /***/ 929: |
| /***/ (function(module, __unusedexports, __webpack_require__) { |
| |
| module.exports = hasNextPage |
| |
| const deprecate = __webpack_require__(370) |
| const getPageLinks = __webpack_require__(577) |
| |
| function hasNextPage (link) { |
| deprecate(`octokit.hasNextPage() – You can use octokit.paginate or async iterators instead: https://github.com/octokit/rest.js#pagination.`) |
| return getPageLinks(link).next |
| } |
| |
| |
| /***/ }), |
| |
| /***/ 945: |
| /***/ (function(module, __unusedexports, __webpack_require__) { |
| |
| "use strict"; |
| |
| |
| var YAMLException = __webpack_require__(556); |
| |
| var TYPE_CONSTRUCTOR_OPTIONS = [ |
| 'kind', |
| 'resolve', |
| 'construct', |
| 'instanceOf', |
| 'predicate', |
| 'represent', |
| 'defaultStyle', |
| 'styleAliases' |
| ]; |
| |
| var YAML_NODE_KINDS = [ |
| 'scalar', |
| 'sequence', |
| 'mapping' |
| ]; |
| |
| function compileStyleAliases(map) { |
| var result = {}; |
| |
| if (map !== null) { |
| Object.keys(map).forEach(function (style) { |
| map[style].forEach(function (alias) { |
| result[String(alias)] = style; |
| }); |
| }); |
| } |
| |
| return result; |
| } |
| |
| function Type(tag, options) { |
| options = options || {}; |
| |
| Object.keys(options).forEach(function (name) { |
| if (TYPE_CONSTRUCTOR_OPTIONS.indexOf(name) === -1) { |
| throw new YAMLException('Unknown option "' + name + '" is met in definition of "' + tag + '" YAML type.'); |
| } |
| }); |
| |
| // TODO: Add tag format check. |
| this.tag = tag; |
| this.kind = options['kind'] || null; |
| this.resolve = options['resolve'] || function () { return true; }; |
| this.construct = options['construct'] || function (data) { return data; }; |
| this.instanceOf = options['instanceOf'] || null; |
| this.predicate = options['predicate'] || null; |
| this.represent = options['represent'] || null; |
| this.defaultStyle = options['defaultStyle'] || null; |
| this.styleAliases = compileStyleAliases(options['styleAliases'] || null); |
| |
| if (YAML_NODE_KINDS.indexOf(this.kind) === -1) { |
| throw new YAMLException('Unknown kind "' + this.kind + '" is specified for "' + tag + '" YAML type.'); |
| } |
| } |
| |
| module.exports = Type; |
| |
| |
| /***/ }), |
| |
| /***/ 947: |
| /***/ (function(module, __unusedexports, __webpack_require__) { |
| |
| "use strict"; |
| |
| |
| var Type = __webpack_require__(945); |
| |
| var _toString = Object.prototype.toString; |
| |
| function resolveYamlPairs(data) { |
| if (data === null) return true; |
| |
| var index, length, pair, keys, result, |
| object = data; |
| |
| result = new Array(object.length); |
| |
| for (index = 0, length = object.length; index < length; index += 1) { |
| pair = object[index]; |
| |
| if (_toString.call(pair) !== '[object Object]') return false; |
| |
| keys = Object.keys(pair); |
| |
| if (keys.length !== 1) return false; |
| |
| result[index] = [ keys[0], pair[keys[0]] ]; |
| } |
| |
| return true; |
| } |
| |
| function constructYamlPairs(data) { |
| if (data === null) return []; |
| |
| var index, length, pair, keys, result, |
| object = data; |
| |
| result = new Array(object.length); |
| |
| for (index = 0, length = object.length; index < length; index += 1) { |
| pair = object[index]; |
| |
| keys = Object.keys(pair); |
| |
| result[index] = [ keys[0], pair[keys[0]] ]; |
| } |
| |
| return result; |
| } |
| |
| module.exports = new Type('tag:yaml.org,2002:pairs', { |
| kind: 'sequence', |
| resolve: resolveYamlPairs, |
| construct: constructYamlPairs |
| }); |
| |
| |
| /***/ }), |
| |
| /***/ 948: |
| /***/ (function(module) { |
| |
| "use strict"; |
| |
| |
| /** |
| * Tries to execute a function and discards any error that occurs. |
| * @param {Function} fn - Function that might or might not throw an error. |
| * @returns {?*} Return-value of the function when no error occurred. |
| */ |
| module.exports = function(fn) { |
| |
| try { return fn() } catch (e) {} |
| |
| } |
| |
| /***/ }), |
| |
| /***/ 950: |
| /***/ (function(__unusedmodule, exports, __webpack_require__) { |
| |
| "use strict"; |
| |
| Object.defineProperty(exports, "__esModule", { value: true }); |
| const url = __webpack_require__(835); |
| function getProxyUrl(reqUrl) { |
| let usingSsl = reqUrl.protocol === 'https:'; |
| let proxyUrl; |
| if (checkBypass(reqUrl)) { |
| return proxyUrl; |
| } |
| let proxyVar; |
| if (usingSsl) { |
| proxyVar = process.env['https_proxy'] || process.env['HTTPS_PROXY']; |
| } |
| else { |
| proxyVar = process.env['http_proxy'] || process.env['HTTP_PROXY']; |
| } |
| if (proxyVar) { |
| proxyUrl = url.parse(proxyVar); |
| } |
| return proxyUrl; |
| } |
| exports.getProxyUrl = getProxyUrl; |
| function checkBypass(reqUrl) { |
| if (!reqUrl.hostname) { |
| return false; |
| } |
| let noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || ''; |
| if (!noProxy) { |
| return false; |
| } |
| // Determine the request port |
| let reqPort; |
| if (reqUrl.port) { |
| reqPort = Number(reqUrl.port); |
| } |
| else if (reqUrl.protocol === 'http:') { |
| reqPort = 80; |
| } |
| else if (reqUrl.protocol === 'https:') { |
| reqPort = 443; |
| } |
| // Format the request hostname and hostname with port |
| let upperReqHosts = [reqUrl.hostname.toUpperCase()]; |
| if (typeof reqPort === 'number') { |
| upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`); |
| } |
| // Compare request host against noproxy |
| for (let upperNoProxyItem of noProxy |
| .split(',') |
| .map(x => x.trim().toUpperCase()) |
| .filter(x => x)) { |
| if (upperReqHosts.some(x => x === upperNoProxyItem)) { |
| return true; |
| } |
| } |
| return false; |
| } |
| exports.checkBypass = checkBypass; |
| |
| |
| /***/ }), |
| |
| /***/ 954: |
| /***/ (function(module) { |
| |
| module.exports = validateAuth; |
| |
| function validateAuth(auth) { |
| if (typeof auth === "string") { |
| return; |
| } |
| |
| if (typeof auth === "function") { |
| return; |
| } |
| |
| if (auth.username && auth.password) { |
| return; |
| } |
| |
| if (auth.clientId && auth.clientSecret) { |
| return; |
| } |
| |
| throw new Error(`Invalid "auth" option: ${JSON.stringify(auth)}`); |
| } |
| |
| |
| /***/ }), |
| |
| /***/ 955: |
| /***/ (function(module, __unusedexports, __webpack_require__) { |
| |
| "use strict"; |
| |
| const path = __webpack_require__(622); |
| const childProcess = __webpack_require__(129); |
| const crossSpawn = __webpack_require__(20); |
| const stripEof = __webpack_require__(768); |
| const npmRunPath = __webpack_require__(621); |
| const isStream = __webpack_require__(323); |
| const _getStream = __webpack_require__(145); |
| const pFinally = __webpack_require__(697); |
| const onExit = __webpack_require__(260); |
| const errname = __webpack_require__(427); |
| const stdio = __webpack_require__(168); |
| |
| const TEN_MEGABYTES = 1000 * 1000 * 10; |
| |
| function handleArgs(cmd, args, opts) { |
| let parsed; |
| |
| opts = Object.assign({ |
| extendEnv: true, |
| env: {} |
| }, opts); |
| |
| if (opts.extendEnv) { |
| opts.env = Object.assign({}, process.env, opts.env); |
| } |
| |
| if (opts.__winShell === true) { |
| delete opts.__winShell; |
| parsed = { |
| command: cmd, |
| args, |
| options: opts, |
| file: cmd, |
| original: { |
| cmd, |
| args |
| } |
| }; |
| } else { |
| parsed = crossSpawn._parse(cmd, args, opts); |
| } |
| |
| opts = Object.assign({ |
| maxBuffer: TEN_MEGABYTES, |
| buffer: true, |
| stripEof: true, |
| preferLocal: true, |
| localDir: parsed.options.cwd || process.cwd(), |
| encoding: 'utf8', |
| reject: true, |
| cleanup: true |
| }, parsed.options); |
| |
| opts.stdio = stdio(opts); |
| |
| if (opts.preferLocal) { |
| opts.env = npmRunPath.env(Object.assign({}, opts, {cwd: opts.localDir})); |
| } |
| |
| if (opts.detached) { |
| // #115 |
| opts.cleanup = false; |
| } |
| |
| if (process.platform === 'win32' && path.basename(parsed.command) === 'cmd.exe') { |
| // #116 |
| parsed.args.unshift('/q'); |
| } |
| |
| return { |
| cmd: parsed.command, |
| args: parsed.args, |
| opts, |
| parsed |
| }; |
| } |
| |
| function handleInput(spawned, input) { |
| if (input === null || input === undefined) { |
| return; |
| } |
| |
| if (isStream(input)) { |
| input.pipe(spawned.stdin); |
| } else { |
| spawned.stdin.end(input); |
| } |
| } |
| |
| function handleOutput(opts, val) { |
| if (val && opts.stripEof) { |
| val = stripEof(val); |
| } |
| |
| return val; |
| } |
| |
| function handleShell(fn, cmd, opts) { |
| let file = '/bin/sh'; |
| let args = ['-c', cmd]; |
| |
| opts = Object.assign({}, opts); |
| |
| if (process.platform === 'win32') { |
| opts.__winShell = true; |
| file = process.env.comspec || 'cmd.exe'; |
| args = ['/s', '/c', `"${cmd}"`]; |
| opts.windowsVerbatimArguments = true; |
| } |
| |
| if (opts.shell) { |
| file = opts.shell; |
| delete opts.shell; |
| } |
| |
| return fn(file, args, opts); |
| } |
| |
| function getStream(process, stream, {encoding, buffer, maxBuffer}) { |
| if (!process[stream]) { |
| return null; |
| } |
| |
| let ret; |
| |
| if (!buffer) { |
| // TODO: Use `ret = util.promisify(stream.finished)(process[stream]);` when targeting Node.js 10 |
| ret = new Promise((resolve, reject) => { |
| process[stream] |
| .once('end', resolve) |
| .once('error', reject); |
| }); |
| } else if (encoding) { |
| ret = _getStream(process[stream], { |
| encoding, |
| maxBuffer |
| }); |
| } else { |
| ret = _getStream.buffer(process[stream], {maxBuffer}); |
| } |
| |
| return ret.catch(err => { |
| err.stream = stream; |
| err.message = `${stream} ${err.message}`; |
| throw err; |
| }); |
| } |
| |
| function makeError(result, options) { |
| const {stdout, stderr} = result; |
| |
| let err = result.error; |
| const {code, signal} = result; |
| |
| const {parsed, joinedCmd} = options; |
| const timedOut = options.timedOut || false; |
| |
| if (!err) { |
| let output = ''; |
| |
| if (Array.isArray(parsed.opts.stdio)) { |
| if (parsed.opts.stdio[2] !== 'inherit') { |
| output += output.length > 0 ? stderr : `\n${stderr}`; |
| } |
| |
| if (parsed.opts.stdio[1] !== 'inherit') { |
| output += `\n${stdout}`; |
| } |
| } else if (parsed.opts.stdio !== 'inherit') { |
| output = `\n${stderr}${stdout}`; |
| } |
| |
| err = new Error(`Command failed: ${joinedCmd}${output}`); |
| err.code = code < 0 ? errname(code) : code; |
| } |
| |
| err.stdout = stdout; |
| err.stderr = stderr; |
| err.failed = true; |
| err.signal = signal || null; |
| err.cmd = joinedCmd; |
| err.timedOut = timedOut; |
| |
| return err; |
| } |
| |
| function joinCmd(cmd, args) { |
| let joinedCmd = cmd; |
| |
| if (Array.isArray(args) && args.length > 0) { |
| joinedCmd += ' ' + args.join(' '); |
| } |
| |
| return joinedCmd; |
| } |
| |
| module.exports = (cmd, args, opts) => { |
| const parsed = handleArgs(cmd, args, opts); |
| const {encoding, buffer, maxBuffer} = parsed.opts; |
| const joinedCmd = joinCmd(cmd, args); |
| |
| let spawned; |
| try { |
| spawned = childProcess.spawn(parsed.cmd, parsed.args, parsed.opts); |
| } catch (err) { |
| return Promise.reject(err); |
| } |
| |
| let removeExitHandler; |
| if (parsed.opts.cleanup) { |
| removeExitHandler = onExit(() => { |
| spawned.kill(); |
| }); |
| } |
| |
| let timeoutId = null; |
| let timedOut = false; |
| |
| const cleanup = () => { |
| if (timeoutId) { |
| clearTimeout(timeoutId); |
| timeoutId = null; |
| } |
| |
| if (removeExitHandler) { |
| removeExitHandler(); |
| } |
| }; |
| |
| if (parsed.opts.timeout > 0) { |
| timeoutId = setTimeout(() => { |
| timeoutId = null; |
| timedOut = true; |
| spawned.kill(parsed.opts.killSignal); |
| }, parsed.opts.timeout); |
| } |
| |
| const processDone = new Promise(resolve => { |
| spawned.on('exit', (code, signal) => { |
| cleanup(); |
| resolve({code, signal}); |
| }); |
| |
| spawned.on('error', err => { |
| cleanup(); |
| resolve({error: err}); |
| }); |
| |
| if (spawned.stdin) { |
| spawned.stdin.on('error', err => { |
| cleanup(); |
| resolve({error: err}); |
| }); |
| } |
| }); |
| |
| function destroy() { |
| if (spawned.stdout) { |
| spawned.stdout.destroy(); |
| } |
| |
| if (spawned.stderr) { |
| spawned.stderr.destroy(); |
| } |
| } |
| |
| const handlePromise = () => pFinally(Promise.all([ |
| processDone, |
| getStream(spawned, 'stdout', {encoding, buffer, maxBuffer}), |
| getStream(spawned, 'stderr', {encoding, buffer, maxBuffer}) |
| ]).then(arr => { |
| const result = arr[0]; |
| result.stdout = arr[1]; |
| result.stderr = arr[2]; |
| |
| if (result.error || result.code !== 0 || result.signal !== null) { |
| const err = makeError(result, { |
| joinedCmd, |
| parsed, |
| timedOut |
| }); |
| |
| // TODO: missing some timeout logic for killed |
| // https://github.com/nodejs/node/blob/master/lib/child_process.js#L203 |
| // err.killed = spawned.killed || killed; |
| err.killed = err.killed || spawned.killed; |
| |
| if (!parsed.opts.reject) { |
| return err; |
| } |
| |
| throw err; |
| } |
| |
| return { |
| stdout: handleOutput(parsed.opts, result.stdout), |
| stderr: handleOutput(parsed.opts, result.stderr), |
| code: 0, |
| failed: false, |
| killed: false, |
| signal: null, |
| cmd: joinedCmd, |
| timedOut: false |
| }; |
| }), destroy); |
| |
| crossSpawn._enoent.hookChildProcess(spawned, parsed.parsed); |
| |
| handleInput(spawned, parsed.opts.input); |
| |
| spawned.then = (onfulfilled, onrejected) => handlePromise().then(onfulfilled, onrejected); |
| spawned.catch = onrejected => handlePromise().catch(onrejected); |
| |
| return spawned; |
| }; |
| |
| // TODO: set `stderr: 'ignore'` when that option is implemented |
| module.exports.stdout = (...args) => module.exports(...args).then(x => x.stdout); |
| |
| // TODO: set `stdout: 'ignore'` when that option is implemented |
| module.exports.stderr = (...args) => module.exports(...args).then(x => x.stderr); |
| |
| module.exports.shell = (cmd, opts) => handleShell(module.exports, cmd, opts); |
| |
| module.exports.sync = (cmd, args, opts) => { |
| const parsed = handleArgs(cmd, args, opts); |
| const joinedCmd = joinCmd(cmd, args); |
| |
| if (isStream(parsed.opts.input)) { |
| throw new TypeError('The `input` option cannot be a stream in sync mode'); |
| } |
| |
| const result = childProcess.spawnSync(parsed.cmd, parsed.args, parsed.opts); |
| result.code = result.status; |
| |
| if (result.error || result.status !== 0 || result.signal !== null) { |
| const err = makeError(result, { |
| joinedCmd, |
| parsed |
| }); |
| |
| if (!parsed.opts.reject) { |
| return err; |
| } |
| |
| throw err; |
| } |
| |
| return { |
| stdout: handleOutput(parsed.opts, result.stdout), |
| stderr: handleOutput(parsed.opts, result.stderr), |
| code: 0, |
| failed: false, |
| signal: null, |
| cmd: joinedCmd, |
| timedOut: false |
| }; |
| }; |
| |
| module.exports.shellSync = (cmd, opts) => handleShell(module.exports.sync, cmd, opts); |
| |
| |
| /***/ }), |
| |
| /***/ 966: |
| /***/ (function(module, __unusedexports, __webpack_require__) { |
| |
| "use strict"; |
| |
| const {PassThrough} = __webpack_require__(413); |
| |
| module.exports = options => { |
| options = Object.assign({}, options); |
| |
| const {array} = options; |
| let {encoding} = options; |
| const buffer = encoding === 'buffer'; |
| let objectMode = false; |
| |
| if (array) { |
| objectMode = !(encoding || buffer); |
| } else { |
| encoding = encoding || 'utf8'; |
| } |
| |
| if (buffer) { |
| encoding = null; |
| } |
| |
| let len = 0; |
| const ret = []; |
| const stream = new PassThrough({objectMode}); |
| |
| if (encoding) { |
| stream.setEncoding(encoding); |
| } |
| |
| stream.on('data', chunk => { |
| ret.push(chunk); |
| |
| if (objectMode) { |
| len = ret.length; |
| } else { |
| len += chunk.length; |
| } |
| }); |
| |
| stream.getBufferedValue = () => { |
| if (array) { |
| return ret; |
| } |
| |
| return buffer ? Buffer.concat(ret, len) : ret.join(''); |
| }; |
| |
| stream.getBufferedLength = () => len; |
| |
| return stream; |
| }; |
| |
| |
| /***/ }), |
| |
| /***/ 969: |
| /***/ (function(module, __unusedexports, __webpack_require__) { |
| |
| var wrappy = __webpack_require__(11) |
| module.exports = wrappy(once) |
| module.exports.strict = wrappy(onceStrict) |
| |
| once.proto = once(function () { |
| Object.defineProperty(Function.prototype, 'once', { |
| value: function () { |
| return once(this) |
| }, |
| configurable: true |
| }) |
| |
| Object.defineProperty(Function.prototype, 'onceStrict', { |
| value: function () { |
| return onceStrict(this) |
| }, |
| configurable: true |
| }) |
| }) |
| |
| function once (fn) { |
| var f = function () { |
| if (f.called) return f.value |
| f.called = true |
| return f.value = fn.apply(this, arguments) |
| } |
| f.called = false |
| return f |
| } |
| |
| function onceStrict (fn) { |
| var f = function () { |
| if (f.called) |
| throw new Error(f.onceError) |
| f.called = true |
| return f.value = fn.apply(this, arguments) |
| } |
| var name = fn.name || 'Function wrapped with `once`' |
| f.onceError = name + " shouldn't be called more than once" |
| f.called = false |
| return f |
| } |
| |
| |
| /***/ }), |
| |
| /***/ 986: |
| /***/ (function(__unusedmodule, exports, __webpack_require__) { |
| |
| "use strict"; |
| |
| var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { |
| function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } |
| return new (P || (P = Promise))(function (resolve, reject) { |
| function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } |
| function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } |
| function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } |
| step((generator = generator.apply(thisArg, _arguments || [])).next()); |
| }); |
| }; |
| var __importStar = (this && this.__importStar) || function (mod) { |
| if (mod && mod.__esModule) return mod; |
| var result = {}; |
| if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; |
| result["default"] = mod; |
| return result; |
| }; |
| Object.defineProperty(exports, "__esModule", { value: true }); |
| const tr = __importStar(__webpack_require__(686)); |
| /** |
| * Exec a command. |
| * Output will be streamed to the live console. |
| * Returns promise with return code |
| * |
| * @param commandLine command to execute (can include additional args). Must be correctly escaped. |
| * @param args optional arguments for tool. Escaping is handled by the lib. |
| * @param options optional exec options. See ExecOptions |
| * @returns Promise<number> exit code |
| */ |
| function exec(commandLine, args, options) { |
| return __awaiter(this, void 0, void 0, function* () { |
| const commandArgs = tr.argStringToArray(commandLine); |
| if (commandArgs.length === 0) { |
| throw new Error(`Parameter 'commandLine' cannot be null or empty.`); |
| } |
| // Path to tool to execute should be first arg |
| const toolPath = commandArgs[0]; |
| args = commandArgs.slice(1).concat(args || []); |
| const runner = new tr.ToolRunner(toolPath, args, options); |
| return runner.exec(); |
| }); |
| } |
| exports.exec = exec; |
| //# sourceMappingURL=exec.js.map |
| |
| /***/ }), |
| |
| /***/ 988: |
| /***/ (function(module, __unusedexports, __webpack_require__) { |
| |
| "use strict"; |
| |
| |
| var Type = __webpack_require__(945); |
| |
| module.exports = new Type('tag:yaml.org,2002:map', { |
| kind: 'mapping', |
| construct: function (data) { return data !== null ? data : {}; } |
| }); |
| |
| |
| /***/ }) |
| |
| /******/ }); |