| module.exports = |
| /******/ (function(modules, runtime) { // webpackBootstrap |
| /******/ "use strict"; |
| /******/ // The module cache |
| /******/ var installedModules = {}; |
| /******/ |
| /******/ // The require function |
| /******/ function __webpack_require__(moduleId) { |
| /******/ |
| /******/ // Check if module is in cache |
| /******/ if(installedModules[moduleId]) { |
| /******/ return installedModules[moduleId].exports; |
| /******/ } |
| /******/ // Create a new module (and put it into the cache) |
| /******/ var module = installedModules[moduleId] = { |
| /******/ i: moduleId, |
| /******/ l: false, |
| /******/ exports: {} |
| /******/ }; |
| /******/ |
| /******/ // Execute the module function |
| /******/ modules[moduleId].call(module.exports, module, module.exports, __webpack_require__); |
| /******/ |
| /******/ // Flag the module as loaded |
| /******/ module.l = true; |
| /******/ |
| /******/ // Return the exports of the module |
| /******/ return module.exports; |
| /******/ } |
| /******/ |
| /******/ |
| /******/ __webpack_require__.ab = __dirname + "/"; |
| /******/ |
| /******/ // the startup function |
| /******/ function startup() { |
| /******/ // Load entry module and return exports |
| /******/ return __webpack_require__(198); |
| /******/ }; |
| /******/ |
| /******/ // run startup |
| /******/ return startup(); |
| /******/ }) |
| /************************************************************************/ |
| /******/ ({ |
| |
| /***/ 2: |
| /***/ (function(module, __unusedexports, __webpack_require__) { |
| |
| "use strict"; |
| |
| const os = __webpack_require__(87); |
| const macosRelease = __webpack_require__(118); |
| const winRelease = __webpack_require__(49); |
| |
| const osName = (platform, release) => { |
| if (!platform && release) { |
| throw new Error('You can\'t specify a `release` without specifying `platform`'); |
| } |
| |
| platform = platform || os.platform(); |
| |
| let id; |
| |
| if (platform === 'darwin') { |
| if (!release && os.platform() === 'darwin') { |
| release = os.release(); |
| } |
| |
| const prefix = release ? (Number(release.split('.')[0]) > 15 ? 'macOS' : 'OS X') : 'macOS'; |
| id = release ? macosRelease(release).name : ''; |
| return prefix + (id ? ' ' + id : ''); |
| } |
| |
| if (platform === 'linux') { |
| if (!release && os.platform() === 'linux') { |
| release = os.release(); |
| } |
| |
| id = release ? release.replace(/^(\d+\.\d+).*/, '$1') : ''; |
| return 'Linux' + (id ? ' ' + id : ''); |
| } |
| |
| if (platform === 'win32') { |
| if (!release && os.platform() === 'win32') { |
| release = os.release(); |
| } |
| |
| id = release ? winRelease(release) : ''; |
| return 'Windows' + (id ? ' ' + id : ''); |
| } |
| |
| return platform; |
| }; |
| |
| module.exports = osName; |
| |
| |
| /***/ }), |
| |
| /***/ 9: |
| /***/ (function(module, __unusedexports, __webpack_require__) { |
| |
| var once = __webpack_require__(969); |
| |
| var noop = function() {}; |
| |
| var isRequest = function(stream) { |
| return stream.setHeader && typeof stream.abort === 'function'; |
| }; |
| |
| var isChildProcess = function(stream) { |
| return stream.stdio && Array.isArray(stream.stdio) && stream.stdio.length === 3 |
| }; |
| |
| var eos = function(stream, opts, callback) { |
| if (typeof opts === 'function') return eos(stream, null, opts); |
| if (!opts) opts = {}; |
| |
| callback = once(callback || noop); |
| |
| var ws = stream._writableState; |
| var rs = stream._readableState; |
| var readable = opts.readable || (opts.readable !== false && stream.readable); |
| var writable = opts.writable || (opts.writable !== false && stream.writable); |
| var cancelled = false; |
| |
| var onlegacyfinish = function() { |
| if (!stream.writable) onfinish(); |
| }; |
| |
| var onfinish = function() { |
| writable = false; |
| if (!readable) callback.call(stream); |
| }; |
| |
| var onend = function() { |
| readable = false; |
| if (!writable) callback.call(stream); |
| }; |
| |
| var onexit = function(exitCode) { |
| callback.call(stream, exitCode ? new Error('exited with error code: ' + exitCode) : null); |
| }; |
| |
| var onerror = function(err) { |
| callback.call(stream, err); |
| }; |
| |
| var onclose = function() { |
| process.nextTick(onclosenexttick); |
| }; |
| |
| var onclosenexttick = function() { |
| if (cancelled) return; |
| if (readable && !(rs && (rs.ended && !rs.destroyed))) return callback.call(stream, new Error('premature close')); |
| if (writable && !(ws && (ws.ended && !ws.destroyed))) return callback.call(stream, new Error('premature close')); |
| }; |
| |
| var onrequest = function() { |
| stream.req.on('finish', onfinish); |
| }; |
| |
| if (isRequest(stream)) { |
| stream.on('complete', onfinish); |
| stream.on('abort', onclose); |
| if (stream.req) onrequest(); |
| else stream.on('request', onrequest); |
| } else if (writable && !ws) { // legacy streams |
| stream.on('end', onlegacyfinish); |
| stream.on('close', onlegacyfinish); |
| } |
| |
| if (isChildProcess(stream)) stream.on('exit', onexit); |
| |
| stream.on('end', onend); |
| stream.on('finish', onfinish); |
| if (opts.error !== false) stream.on('error', onerror); |
| stream.on('close', onclose); |
| |
| return function() { |
| cancelled = true; |
| stream.removeListener('complete', onfinish); |
| stream.removeListener('abort', onclose); |
| stream.removeListener('request', onrequest); |
| if (stream.req) stream.req.removeListener('finish', onfinish); |
| stream.removeListener('end', onlegacyfinish); |
| stream.removeListener('close', onlegacyfinish); |
| stream.removeListener('finish', onfinish); |
| stream.removeListener('exit', onexit); |
| stream.removeListener('end', onend); |
| stream.removeListener('error', onerror); |
| stream.removeListener('close', onclose); |
| }; |
| }; |
| |
| module.exports = eos; |
| |
| |
| /***/ }), |
| |
| /***/ 11: |
| /***/ (function(module) { |
| |
| // Returns a wrapper function that returns a wrapped callback |
| // The wrapper function should do some stuff, and return a |
| // presumably different callback function. |
| // This makes sure that own properties are retained, so that |
| // decorations and such are not lost along the way. |
| module.exports = wrappy |
| function wrappy (fn, cb) { |
| if (fn && cb) return wrappy(fn)(cb) |
| |
| if (typeof fn !== 'function') |
| throw new TypeError('need wrapper function') |
| |
| Object.keys(fn).forEach(function (k) { |
| wrapper[k] = fn[k] |
| }) |
| |
| return wrapper |
| |
| function wrapper() { |
| var args = new Array(arguments.length) |
| for (var i = 0; i < args.length; i++) { |
| args[i] = arguments[i] |
| } |
| var ret = fn.apply(this, args) |
| var cb = args[args.length-1] |
| if (typeof ret === 'function' && ret !== cb) { |
| Object.keys(cb).forEach(function (k) { |
| ret[k] = cb[k] |
| }) |
| } |
| return ret |
| } |
| } |
| |
| |
| /***/ }), |
| |
| /***/ 16: |
| /***/ (function(module) { |
| |
| module.exports = require("tls"); |
| |
| /***/ }), |
| |
| /***/ 18: |
| /***/ (function() { |
| |
| eval("require")("encoding"); |
| |
| |
| /***/ }), |
| |
| /***/ 20: |
| /***/ (function(module, __unusedexports, __webpack_require__) { |
| |
| "use strict"; |
| |
| |
| const cp = __webpack_require__(129); |
| const parse = __webpack_require__(568); |
| const enoent = __webpack_require__(881); |
| |
| function spawn(command, args, options) { |
| // Parse the arguments |
| const parsed = parse(command, args, options); |
| |
| // Spawn the child process |
| const spawned = cp.spawn(parsed.command, parsed.args, parsed.options); |
| |
| // Hook into child process "exit" event to emit an error if the command |
| // does not exists, see: https://github.com/IndigoUnited/node-cross-spawn/issues/16 |
| enoent.hookChildProcess(spawned, parsed); |
| |
| return spawned; |
| } |
| |
| function spawnSync(command, args, options) { |
| // Parse the arguments |
| const parsed = parse(command, args, options); |
| |
| // Spawn the child process |
| const result = cp.spawnSync(parsed.command, parsed.args, parsed.options); |
| |
| // Analyze if the command does not exist, see: https://github.com/IndigoUnited/node-cross-spawn/issues/16 |
| result.error = result.error || enoent.verifyENOENTSync(result.status, parsed); |
| |
| return result; |
| } |
| |
| module.exports = spawn; |
| module.exports.spawn = spawn; |
| module.exports.sync = spawnSync; |
| |
| module.exports._parse = parse; |
| module.exports._enoent = enoent; |
| |
| |
| /***/ }), |
| |
| /***/ 39: |
| /***/ (function(module) { |
| |
| "use strict"; |
| |
| module.exports = opts => { |
| opts = opts || {}; |
| |
| const env = opts.env || process.env; |
| const platform = opts.platform || process.platform; |
| |
| if (platform !== 'win32') { |
| return 'PATH'; |
| } |
| |
| return Object.keys(env).find(x => x.toUpperCase() === 'PATH') || 'Path'; |
| }; |
| |
| |
| /***/ }), |
| |
| /***/ 48: |
| /***/ (function(module, exports) { |
| |
| exports = module.exports = SemVer |
| |
| var debug |
| /* istanbul ignore next */ |
| if (typeof process === 'object' && |
| process.env && |
| process.env.NODE_DEBUG && |
| /\bsemver\b/i.test(process.env.NODE_DEBUG)) { |
| debug = function () { |
| var args = Array.prototype.slice.call(arguments, 0) |
| args.unshift('SEMVER') |
| console.log.apply(console, args) |
| } |
| } else { |
| debug = function () {} |
| } |
| |
| // Note: this is the semver.org version of the spec that it implements |
| // Not necessarily the package version of this code. |
| exports.SEMVER_SPEC_VERSION = '2.0.0' |
| |
| var MAX_LENGTH = 256 |
| var MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER || |
| /* istanbul ignore next */ 9007199254740991 |
| |
| // Max safe segment length for coercion. |
| var MAX_SAFE_COMPONENT_LENGTH = 16 |
| |
| // The actual regexps go on exports.re |
| var re = exports.re = [] |
| var src = exports.src = [] |
| var R = 0 |
| |
| // The following Regular Expressions can be used for tokenizing, |
| // validating, and parsing SemVer version strings. |
| |
| // ## Numeric Identifier |
| // A single `0`, or a non-zero digit followed by zero or more digits. |
| |
| var NUMERICIDENTIFIER = R++ |
| src[NUMERICIDENTIFIER] = '0|[1-9]\\d*' |
| var NUMERICIDENTIFIERLOOSE = R++ |
| src[NUMERICIDENTIFIERLOOSE] = '[0-9]+' |
| |
| // ## Non-numeric Identifier |
| // Zero or more digits, followed by a letter or hyphen, and then zero or |
| // more letters, digits, or hyphens. |
| |
| var NONNUMERICIDENTIFIER = R++ |
| src[NONNUMERICIDENTIFIER] = '\\d*[a-zA-Z-][a-zA-Z0-9-]*' |
| |
| // ## Main Version |
| // Three dot-separated numeric identifiers. |
| |
| var MAINVERSION = R++ |
| src[MAINVERSION] = '(' + src[NUMERICIDENTIFIER] + ')\\.' + |
| '(' + src[NUMERICIDENTIFIER] + ')\\.' + |
| '(' + src[NUMERICIDENTIFIER] + ')' |
| |
| var MAINVERSIONLOOSE = R++ |
| src[MAINVERSIONLOOSE] = '(' + src[NUMERICIDENTIFIERLOOSE] + ')\\.' + |
| '(' + src[NUMERICIDENTIFIERLOOSE] + ')\\.' + |
| '(' + src[NUMERICIDENTIFIERLOOSE] + ')' |
| |
| // ## Pre-release Version Identifier |
| // A numeric identifier, or a non-numeric identifier. |
| |
| var PRERELEASEIDENTIFIER = R++ |
| src[PRERELEASEIDENTIFIER] = '(?:' + src[NUMERICIDENTIFIER] + |
| '|' + src[NONNUMERICIDENTIFIER] + ')' |
| |
| var PRERELEASEIDENTIFIERLOOSE = R++ |
| src[PRERELEASEIDENTIFIERLOOSE] = '(?:' + src[NUMERICIDENTIFIERLOOSE] + |
| '|' + src[NONNUMERICIDENTIFIER] + ')' |
| |
| // ## Pre-release Version |
| // Hyphen, followed by one or more dot-separated pre-release version |
| // identifiers. |
| |
| var PRERELEASE = R++ |
| src[PRERELEASE] = '(?:-(' + src[PRERELEASEIDENTIFIER] + |
| '(?:\\.' + src[PRERELEASEIDENTIFIER] + ')*))' |
| |
| var PRERELEASELOOSE = R++ |
| src[PRERELEASELOOSE] = '(?:-?(' + src[PRERELEASEIDENTIFIERLOOSE] + |
| '(?:\\.' + src[PRERELEASEIDENTIFIERLOOSE] + ')*))' |
| |
| // ## Build Metadata Identifier |
| // Any combination of digits, letters, or hyphens. |
| |
| var BUILDIDENTIFIER = R++ |
| src[BUILDIDENTIFIER] = '[0-9A-Za-z-]+' |
| |
| // ## Build Metadata |
| // Plus sign, followed by one or more period-separated build metadata |
| // identifiers. |
| |
| var BUILD = R++ |
| src[BUILD] = '(?:\\+(' + src[BUILDIDENTIFIER] + |
| '(?:\\.' + src[BUILDIDENTIFIER] + ')*))' |
| |
| // ## Full Version String |
| // A main version, followed optionally by a pre-release version and |
| // build metadata. |
| |
| // Note that the only major, minor, patch, and pre-release sections of |
| // the version string are capturing groups. The build metadata is not a |
| // capturing group, because it should not ever be used in version |
| // comparison. |
| |
| var FULL = R++ |
| var FULLPLAIN = 'v?' + src[MAINVERSION] + |
| src[PRERELEASE] + '?' + |
| src[BUILD] + '?' |
| |
| src[FULL] = '^' + FULLPLAIN + '$' |
| |
| // like full, but allows v1.2.3 and =1.2.3, which people do sometimes. |
| // also, 1.0.0alpha1 (prerelease without the hyphen) which is pretty |
| // common in the npm registry. |
| var LOOSEPLAIN = '[v=\\s]*' + src[MAINVERSIONLOOSE] + |
| src[PRERELEASELOOSE] + '?' + |
| src[BUILD] + '?' |
| |
| var LOOSE = R++ |
| src[LOOSE] = '^' + LOOSEPLAIN + '$' |
| |
| var GTLT = R++ |
| src[GTLT] = '((?:<|>)?=?)' |
| |
| // Something like "2.*" or "1.2.x". |
| // Note that "x.x" is a valid xRange identifer, meaning "any version" |
| // Only the first item is strictly required. |
| var XRANGEIDENTIFIERLOOSE = R++ |
| src[XRANGEIDENTIFIERLOOSE] = src[NUMERICIDENTIFIERLOOSE] + '|x|X|\\*' |
| var XRANGEIDENTIFIER = R++ |
| src[XRANGEIDENTIFIER] = src[NUMERICIDENTIFIER] + '|x|X|\\*' |
| |
| var XRANGEPLAIN = R++ |
| src[XRANGEPLAIN] = '[v=\\s]*(' + src[XRANGEIDENTIFIER] + ')' + |
| '(?:\\.(' + src[XRANGEIDENTIFIER] + ')' + |
| '(?:\\.(' + src[XRANGEIDENTIFIER] + ')' + |
| '(?:' + src[PRERELEASE] + ')?' + |
| src[BUILD] + '?' + |
| ')?)?' |
| |
| var XRANGEPLAINLOOSE = R++ |
| src[XRANGEPLAINLOOSE] = '[v=\\s]*(' + src[XRANGEIDENTIFIERLOOSE] + ')' + |
| '(?:\\.(' + src[XRANGEIDENTIFIERLOOSE] + ')' + |
| '(?:\\.(' + src[XRANGEIDENTIFIERLOOSE] + ')' + |
| '(?:' + src[PRERELEASELOOSE] + ')?' + |
| src[BUILD] + '?' + |
| ')?)?' |
| |
| var XRANGE = R++ |
| src[XRANGE] = '^' + src[GTLT] + '\\s*' + src[XRANGEPLAIN] + '$' |
| var XRANGELOOSE = R++ |
| src[XRANGELOOSE] = '^' + src[GTLT] + '\\s*' + src[XRANGEPLAINLOOSE] + '$' |
| |
| // Coercion. |
| // Extract anything that could conceivably be a part of a valid semver |
| var COERCE = R++ |
| src[COERCE] = '(?:^|[^\\d])' + |
| '(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '})' + |
| '(?:\\.(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' + |
| '(?:\\.(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' + |
| '(?:$|[^\\d])' |
| |
| // Tilde ranges. |
| // Meaning is "reasonably at or greater than" |
| var LONETILDE = R++ |
| src[LONETILDE] = '(?:~>?)' |
| |
| var TILDETRIM = R++ |
| src[TILDETRIM] = '(\\s*)' + src[LONETILDE] + '\\s+' |
| re[TILDETRIM] = new RegExp(src[TILDETRIM], 'g') |
| var tildeTrimReplace = '$1~' |
| |
| var TILDE = R++ |
| src[TILDE] = '^' + src[LONETILDE] + src[XRANGEPLAIN] + '$' |
| var TILDELOOSE = R++ |
| src[TILDELOOSE] = '^' + src[LONETILDE] + src[XRANGEPLAINLOOSE] + '$' |
| |
| // Caret ranges. |
| // Meaning is "at least and backwards compatible with" |
| var LONECARET = R++ |
| src[LONECARET] = '(?:\\^)' |
| |
| var CARETTRIM = R++ |
| src[CARETTRIM] = '(\\s*)' + src[LONECARET] + '\\s+' |
| re[CARETTRIM] = new RegExp(src[CARETTRIM], 'g') |
| var caretTrimReplace = '$1^' |
| |
| var CARET = R++ |
| src[CARET] = '^' + src[LONECARET] + src[XRANGEPLAIN] + '$' |
| var CARETLOOSE = R++ |
| src[CARETLOOSE] = '^' + src[LONECARET] + src[XRANGEPLAINLOOSE] + '$' |
| |
| // A simple gt/lt/eq thing, or just "" to indicate "any version" |
| var COMPARATORLOOSE = R++ |
| src[COMPARATORLOOSE] = '^' + src[GTLT] + '\\s*(' + LOOSEPLAIN + ')$|^$' |
| var COMPARATOR = R++ |
| src[COMPARATOR] = '^' + src[GTLT] + '\\s*(' + FULLPLAIN + ')$|^$' |
| |
| // An expression to strip any whitespace between the gtlt and the thing |
| // it modifies, so that `> 1.2.3` ==> `>1.2.3` |
| var COMPARATORTRIM = R++ |
| src[COMPARATORTRIM] = '(\\s*)' + src[GTLT] + |
| '\\s*(' + LOOSEPLAIN + '|' + src[XRANGEPLAIN] + ')' |
| |
| // this one has to use the /g flag |
| re[COMPARATORTRIM] = new RegExp(src[COMPARATORTRIM], 'g') |
| var comparatorTrimReplace = '$1$2$3' |
| |
| // Something like `1.2.3 - 1.2.4` |
| // Note that these all use the loose form, because they'll be |
| // checked against either the strict or loose comparator form |
| // later. |
| var HYPHENRANGE = R++ |
| src[HYPHENRANGE] = '^\\s*(' + src[XRANGEPLAIN] + ')' + |
| '\\s+-\\s+' + |
| '(' + src[XRANGEPLAIN] + ')' + |
| '\\s*$' |
| |
| var HYPHENRANGELOOSE = R++ |
| src[HYPHENRANGELOOSE] = '^\\s*(' + src[XRANGEPLAINLOOSE] + ')' + |
| '\\s+-\\s+' + |
| '(' + src[XRANGEPLAINLOOSE] + ')' + |
| '\\s*$' |
| |
| // Star ranges basically just allow anything at all. |
| var STAR = R++ |
| src[STAR] = '(<|>)?=?\\s*\\*' |
| |
| // Compile to actual regexp objects. |
| // All are flag-free, unless they were created above with a flag. |
| for (var i = 0; i < R; i++) { |
| debug(i, src[i]) |
| if (!re[i]) { |
| re[i] = new RegExp(src[i]) |
| } |
| } |
| |
| exports.parse = parse |
| function parse (version, options) { |
| if (!options || typeof options !== 'object') { |
| options = { |
| loose: !!options, |
| includePrerelease: false |
| } |
| } |
| |
| if (version instanceof SemVer) { |
| return version |
| } |
| |
| if (typeof version !== 'string') { |
| return null |
| } |
| |
| if (version.length > MAX_LENGTH) { |
| return null |
| } |
| |
| var r = options.loose ? re[LOOSE] : re[FULL] |
| if (!r.test(version)) { |
| return null |
| } |
| |
| try { |
| return new SemVer(version, options) |
| } catch (er) { |
| return null |
| } |
| } |
| |
| exports.valid = valid |
| function valid (version, options) { |
| var v = parse(version, options) |
| return v ? v.version : null |
| } |
| |
| exports.clean = clean |
| function clean (version, options) { |
| var s = parse(version.trim().replace(/^[=v]+/, ''), options) |
| return s ? s.version : null |
| } |
| |
| exports.SemVer = SemVer |
| |
| function SemVer (version, options) { |
| if (!options || typeof options !== 'object') { |
| options = { |
| loose: !!options, |
| includePrerelease: false |
| } |
| } |
| if (version instanceof SemVer) { |
| if (version.loose === options.loose) { |
| return version |
| } else { |
| version = version.version |
| } |
| } else if (typeof version !== 'string') { |
| throw new TypeError('Invalid Version: ' + version) |
| } |
| |
| if (version.length > MAX_LENGTH) { |
| throw new TypeError('version is longer than ' + MAX_LENGTH + ' characters') |
| } |
| |
| if (!(this instanceof SemVer)) { |
| return new SemVer(version, options) |
| } |
| |
| debug('SemVer', version, options) |
| this.options = options |
| this.loose = !!options.loose |
| |
| var m = version.trim().match(options.loose ? re[LOOSE] : re[FULL]) |
| |
| if (!m) { |
| throw new TypeError('Invalid Version: ' + version) |
| } |
| |
| this.raw = version |
| |
| // these are actually numbers |
| this.major = +m[1] |
| this.minor = +m[2] |
| this.patch = +m[3] |
| |
| if (this.major > MAX_SAFE_INTEGER || this.major < 0) { |
| throw new TypeError('Invalid major version') |
| } |
| |
| if (this.minor > MAX_SAFE_INTEGER || this.minor < 0) { |
| throw new TypeError('Invalid minor version') |
| } |
| |
| if (this.patch > MAX_SAFE_INTEGER || this.patch < 0) { |
| throw new TypeError('Invalid patch version') |
| } |
| |
| // numberify any prerelease numeric ids |
| if (!m[4]) { |
| this.prerelease = [] |
| } else { |
| this.prerelease = m[4].split('.').map(function (id) { |
| if (/^[0-9]+$/.test(id)) { |
| var num = +id |
| if (num >= 0 && num < MAX_SAFE_INTEGER) { |
| return num |
| } |
| } |
| return id |
| }) |
| } |
| |
| this.build = m[5] ? m[5].split('.') : [] |
| this.format() |
| } |
| |
| SemVer.prototype.format = function () { |
| this.version = this.major + '.' + this.minor + '.' + this.patch |
| if (this.prerelease.length) { |
| this.version += '-' + this.prerelease.join('.') |
| } |
| return this.version |
| } |
| |
| SemVer.prototype.toString = function () { |
| return this.version |
| } |
| |
| SemVer.prototype.compare = function (other) { |
| debug('SemVer.compare', this.version, this.options, other) |
| if (!(other instanceof SemVer)) { |
| other = new SemVer(other, this.options) |
| } |
| |
| return this.compareMain(other) || this.comparePre(other) |
| } |
| |
| SemVer.prototype.compareMain = function (other) { |
| if (!(other instanceof SemVer)) { |
| other = new SemVer(other, this.options) |
| } |
| |
| return compareIdentifiers(this.major, other.major) || |
| compareIdentifiers(this.minor, other.minor) || |
| compareIdentifiers(this.patch, other.patch) |
| } |
| |
| SemVer.prototype.comparePre = function (other) { |
| if (!(other instanceof SemVer)) { |
| other = new SemVer(other, this.options) |
| } |
| |
| // NOT having a prerelease is > having one |
| if (this.prerelease.length && !other.prerelease.length) { |
| return -1 |
| } else if (!this.prerelease.length && other.prerelease.length) { |
| return 1 |
| } else if (!this.prerelease.length && !other.prerelease.length) { |
| return 0 |
| } |
| |
| var i = 0 |
| do { |
| var a = this.prerelease[i] |
| var b = other.prerelease[i] |
| debug('prerelease compare', i, a, b) |
| if (a === undefined && b === undefined) { |
| return 0 |
| } else if (b === undefined) { |
| return 1 |
| } else if (a === undefined) { |
| return -1 |
| } else if (a === b) { |
| continue |
| } else { |
| return compareIdentifiers(a, b) |
| } |
| } while (++i) |
| } |
| |
| // preminor will bump the version up to the next minor release, and immediately |
| // down to pre-release. premajor and prepatch work the same way. |
| SemVer.prototype.inc = function (release, identifier) { |
| switch (release) { |
| case 'premajor': |
| this.prerelease.length = 0 |
| this.patch = 0 |
| this.minor = 0 |
| this.major++ |
| this.inc('pre', identifier) |
| break |
| case 'preminor': |
| this.prerelease.length = 0 |
| this.patch = 0 |
| this.minor++ |
| this.inc('pre', identifier) |
| break |
| case 'prepatch': |
| // If this is already a prerelease, it will bump to the next version |
| // drop any prereleases that might already exist, since they are not |
| // relevant at this point. |
| this.prerelease.length = 0 |
| this.inc('patch', identifier) |
| this.inc('pre', identifier) |
| break |
| // If the input is a non-prerelease version, this acts the same as |
| // prepatch. |
| case 'prerelease': |
| if (this.prerelease.length === 0) { |
| this.inc('patch', identifier) |
| } |
| this.inc('pre', identifier) |
| break |
| |
| case 'major': |
| // If this is a pre-major version, bump up to the same major version. |
| // Otherwise increment major. |
| // 1.0.0-5 bumps to 1.0.0 |
| // 1.1.0 bumps to 2.0.0 |
| if (this.minor !== 0 || |
| this.patch !== 0 || |
| this.prerelease.length === 0) { |
| this.major++ |
| } |
| this.minor = 0 |
| this.patch = 0 |
| this.prerelease = [] |
| break |
| case 'minor': |
| // If this is a pre-minor version, bump up to the same minor version. |
| // Otherwise increment minor. |
| // 1.2.0-5 bumps to 1.2.0 |
| // 1.2.1 bumps to 1.3.0 |
| if (this.patch !== 0 || this.prerelease.length === 0) { |
| this.minor++ |
| } |
| this.patch = 0 |
| this.prerelease = [] |
| break |
| case 'patch': |
| // If this is not a pre-release version, it will increment the patch. |
| // If it is a pre-release it will bump up to the same patch version. |
| // 1.2.0-5 patches to 1.2.0 |
| // 1.2.0 patches to 1.2.1 |
| if (this.prerelease.length === 0) { |
| this.patch++ |
| } |
| this.prerelease = [] |
| break |
| // This probably shouldn't be used publicly. |
| // 1.0.0 "pre" would become 1.0.0-0 which is the wrong direction. |
| case 'pre': |
| if (this.prerelease.length === 0) { |
| this.prerelease = [0] |
| } else { |
| var i = this.prerelease.length |
| while (--i >= 0) { |
| if (typeof this.prerelease[i] === 'number') { |
| this.prerelease[i]++ |
| i = -2 |
| } |
| } |
| if (i === -1) { |
| // didn't increment anything |
| this.prerelease.push(0) |
| } |
| } |
| if (identifier) { |
| // 1.2.0-beta.1 bumps to 1.2.0-beta.2, |
| // 1.2.0-beta.fooblz or 1.2.0-beta bumps to 1.2.0-beta.0 |
| if (this.prerelease[0] === identifier) { |
| if (isNaN(this.prerelease[1])) { |
| this.prerelease = [identifier, 0] |
| } |
| } else { |
| this.prerelease = [identifier, 0] |
| } |
| } |
| break |
| |
| default: |
| throw new Error('invalid increment argument: ' + release) |
| } |
| this.format() |
| this.raw = this.version |
| return this |
| } |
| |
| exports.inc = inc |
| function inc (version, release, loose, identifier) { |
| if (typeof (loose) === 'string') { |
| identifier = loose |
| loose = undefined |
| } |
| |
| try { |
| return new SemVer(version, loose).inc(release, identifier).version |
| } catch (er) { |
| return null |
| } |
| } |
| |
| exports.diff = diff |
| function diff (version1, version2) { |
| if (eq(version1, version2)) { |
| return null |
| } else { |
| var v1 = parse(version1) |
| var v2 = parse(version2) |
| var prefix = '' |
| if (v1.prerelease.length || v2.prerelease.length) { |
| prefix = 'pre' |
| var defaultResult = 'prerelease' |
| } |
| for (var key in v1) { |
| if (key === 'major' || key === 'minor' || key === 'patch') { |
| if (v1[key] !== v2[key]) { |
| return prefix + key |
| } |
| } |
| } |
| return defaultResult // may be undefined |
| } |
| } |
| |
| exports.compareIdentifiers = compareIdentifiers |
| |
| var numeric = /^[0-9]+$/ |
| function compareIdentifiers (a, b) { |
| var anum = numeric.test(a) |
| var bnum = numeric.test(b) |
| |
| if (anum && bnum) { |
| a = +a |
| b = +b |
| } |
| |
| return a === b ? 0 |
| : (anum && !bnum) ? -1 |
| : (bnum && !anum) ? 1 |
| : a < b ? -1 |
| : 1 |
| } |
| |
| exports.rcompareIdentifiers = rcompareIdentifiers |
| function rcompareIdentifiers (a, b) { |
| return compareIdentifiers(b, a) |
| } |
| |
| exports.major = major |
| function major (a, loose) { |
| return new SemVer(a, loose).major |
| } |
| |
| exports.minor = minor |
| function minor (a, loose) { |
| return new SemVer(a, loose).minor |
| } |
| |
| exports.patch = patch |
| function patch (a, loose) { |
| return new SemVer(a, loose).patch |
| } |
| |
| exports.compare = compare |
| function compare (a, b, loose) { |
| return new SemVer(a, loose).compare(new SemVer(b, loose)) |
| } |
| |
| exports.compareLoose = compareLoose |
| function compareLoose (a, b) { |
| return compare(a, b, true) |
| } |
| |
| exports.rcompare = rcompare |
| function rcompare (a, b, loose) { |
| return compare(b, a, loose) |
| } |
| |
| exports.sort = sort |
| function sort (list, loose) { |
| return list.sort(function (a, b) { |
| return exports.compare(a, b, loose) |
| }) |
| } |
| |
| exports.rsort = rsort |
| function rsort (list, loose) { |
| return list.sort(function (a, b) { |
| return exports.rcompare(a, b, loose) |
| }) |
| } |
| |
| exports.gt = gt |
| function gt (a, b, loose) { |
| return compare(a, b, loose) > 0 |
| } |
| |
| exports.lt = lt |
| function lt (a, b, loose) { |
| return compare(a, b, loose) < 0 |
| } |
| |
| exports.eq = eq |
| function eq (a, b, loose) { |
| return compare(a, b, loose) === 0 |
| } |
| |
| exports.neq = neq |
| function neq (a, b, loose) { |
| return compare(a, b, loose) !== 0 |
| } |
| |
| exports.gte = gte |
| function gte (a, b, loose) { |
| return compare(a, b, loose) >= 0 |
| } |
| |
| exports.lte = lte |
| function lte (a, b, loose) { |
| return compare(a, b, loose) <= 0 |
| } |
| |
| exports.cmp = cmp |
| function cmp (a, op, b, loose) { |
| switch (op) { |
| case '===': |
| if (typeof a === 'object') |
| a = a.version |
| if (typeof b === 'object') |
| b = b.version |
| return a === b |
| |
| case '!==': |
| if (typeof a === 'object') |
| a = a.version |
| if (typeof b === 'object') |
| b = b.version |
| return a !== b |
| |
| case '': |
| case '=': |
| case '==': |
| return eq(a, b, loose) |
| |
| case '!=': |
| return neq(a, b, loose) |
| |
| case '>': |
| return gt(a, b, loose) |
| |
| case '>=': |
| return gte(a, b, loose) |
| |
| case '<': |
| return lt(a, b, loose) |
| |
| case '<=': |
| return lte(a, b, loose) |
| |
| default: |
| throw new TypeError('Invalid operator: ' + op) |
| } |
| } |
| |
| exports.Comparator = Comparator |
| function Comparator (comp, options) { |
| if (!options || typeof options !== 'object') { |
| options = { |
| loose: !!options, |
| includePrerelease: false |
| } |
| } |
| |
| if (comp instanceof Comparator) { |
| if (comp.loose === !!options.loose) { |
| return comp |
| } else { |
| comp = comp.value |
| } |
| } |
| |
| if (!(this instanceof Comparator)) { |
| return new Comparator(comp, options) |
| } |
| |
| debug('comparator', comp, options) |
| this.options = options |
| this.loose = !!options.loose |
| this.parse(comp) |
| |
| if (this.semver === ANY) { |
| this.value = '' |
| } else { |
| this.value = this.operator + this.semver.version |
| } |
| |
| debug('comp', this) |
| } |
| |
| var ANY = {} |
| Comparator.prototype.parse = function (comp) { |
| var r = this.options.loose ? re[COMPARATORLOOSE] : re[COMPARATOR] |
| var m = comp.match(r) |
| |
| if (!m) { |
| throw new TypeError('Invalid comparator: ' + comp) |
| } |
| |
| this.operator = m[1] |
| if (this.operator === '=') { |
| this.operator = '' |
| } |
| |
| // if it literally is just '>' or '' then allow anything. |
| if (!m[2]) { |
| this.semver = ANY |
| } else { |
| this.semver = new SemVer(m[2], this.options.loose) |
| } |
| } |
| |
| Comparator.prototype.toString = function () { |
| return this.value |
| } |
| |
| Comparator.prototype.test = function (version) { |
| debug('Comparator.test', version, this.options.loose) |
| |
| if (this.semver === ANY) { |
| return true |
| } |
| |
| if (typeof version === 'string') { |
| version = new SemVer(version, this.options) |
| } |
| |
| return cmp(version, this.operator, this.semver, this.options) |
| } |
| |
| Comparator.prototype.intersects = function (comp, options) { |
| if (!(comp instanceof Comparator)) { |
| throw new TypeError('a Comparator is required') |
| } |
| |
| if (!options || typeof options !== 'object') { |
| options = { |
| loose: !!options, |
| includePrerelease: false |
| } |
| } |
| |
| var rangeTmp |
| |
| if (this.operator === '') { |
| rangeTmp = new Range(comp.value, options) |
| return satisfies(this.value, rangeTmp, options) |
| } else if (comp.operator === '') { |
| rangeTmp = new Range(this.value, options) |
| return satisfies(comp.semver, rangeTmp, options) |
| } |
| |
| var sameDirectionIncreasing = |
| (this.operator === '>=' || this.operator === '>') && |
| (comp.operator === '>=' || comp.operator === '>') |
| var sameDirectionDecreasing = |
| (this.operator === '<=' || this.operator === '<') && |
| (comp.operator === '<=' || comp.operator === '<') |
| var sameSemVer = this.semver.version === comp.semver.version |
| var differentDirectionsInclusive = |
| (this.operator === '>=' || this.operator === '<=') && |
| (comp.operator === '>=' || comp.operator === '<=') |
| var oppositeDirectionsLessThan = |
| cmp(this.semver, '<', comp.semver, options) && |
| ((this.operator === '>=' || this.operator === '>') && |
| (comp.operator === '<=' || comp.operator === '<')) |
| var oppositeDirectionsGreaterThan = |
| cmp(this.semver, '>', comp.semver, options) && |
| ((this.operator === '<=' || this.operator === '<') && |
| (comp.operator === '>=' || comp.operator === '>')) |
| |
| return sameDirectionIncreasing || sameDirectionDecreasing || |
| (sameSemVer && differentDirectionsInclusive) || |
| oppositeDirectionsLessThan || oppositeDirectionsGreaterThan |
| } |
| |
| exports.Range = Range |
| function Range (range, options) { |
| if (!options || typeof options !== 'object') { |
| options = { |
| loose: !!options, |
| includePrerelease: false |
| } |
| } |
| |
| if (range instanceof Range) { |
| if (range.loose === !!options.loose && |
| range.includePrerelease === !!options.includePrerelease) { |
| return range |
| } else { |
| return new Range(range.raw, options) |
| } |
| } |
| |
| if (range instanceof Comparator) { |
| return new Range(range.value, options) |
| } |
| |
| if (!(this instanceof Range)) { |
| return new Range(range, options) |
| } |
| |
| this.options = options |
| this.loose = !!options.loose |
| this.includePrerelease = !!options.includePrerelease |
| |
| // First, split based on boolean or || |
| this.raw = range |
| this.set = range.split(/\s*\|\|\s*/).map(function (range) { |
| return this.parseRange(range.trim()) |
| }, this).filter(function (c) { |
| // throw out any that are not relevant for whatever reason |
| return c.length |
| }) |
| |
| if (!this.set.length) { |
| throw new TypeError('Invalid SemVer Range: ' + range) |
| } |
| |
| this.format() |
| } |
| |
| Range.prototype.format = function () { |
| this.range = this.set.map(function (comps) { |
| return comps.join(' ').trim() |
| }).join('||').trim() |
| return this.range |
| } |
| |
| Range.prototype.toString = function () { |
| return this.range |
| } |
| |
| Range.prototype.parseRange = function (range) { |
| var loose = this.options.loose |
| range = range.trim() |
| // `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4` |
| var hr = loose ? re[HYPHENRANGELOOSE] : re[HYPHENRANGE] |
| range = range.replace(hr, hyphenReplace) |
| debug('hyphen replace', range) |
| // `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5` |
| range = range.replace(re[COMPARATORTRIM], comparatorTrimReplace) |
| debug('comparator trim', range, re[COMPARATORTRIM]) |
| |
| // `~ 1.2.3` => `~1.2.3` |
| range = range.replace(re[TILDETRIM], tildeTrimReplace) |
| |
| // `^ 1.2.3` => `^1.2.3` |
| range = range.replace(re[CARETTRIM], caretTrimReplace) |
| |
| // normalize spaces |
| range = range.split(/\s+/).join(' ') |
| |
| // At this point, the range is completely trimmed and |
| // ready to be split into comparators. |
| |
| var compRe = loose ? re[COMPARATORLOOSE] : re[COMPARATOR] |
| var set = range.split(' ').map(function (comp) { |
| return parseComparator(comp, this.options) |
| }, this).join(' ').split(/\s+/) |
| if (this.options.loose) { |
| // in loose mode, throw out any that are not valid comparators |
| set = set.filter(function (comp) { |
| return !!comp.match(compRe) |
| }) |
| } |
| set = set.map(function (comp) { |
| return new Comparator(comp, this.options) |
| }, this) |
| |
| return set |
| } |
| |
| Range.prototype.intersects = function (range, options) { |
| if (!(range instanceof Range)) { |
| throw new TypeError('a Range is required') |
| } |
| |
| return this.set.some(function (thisComparators) { |
| return thisComparators.every(function (thisComparator) { |
| return range.set.some(function (rangeComparators) { |
| return rangeComparators.every(function (rangeComparator) { |
| return thisComparator.intersects(rangeComparator, options) |
| }) |
| }) |
| }) |
| }) |
| } |
| |
| // Mostly just for testing and legacy API reasons |
| exports.toComparators = toComparators |
| function toComparators (range, options) { |
| return new Range(range, options).set.map(function (comp) { |
| return comp.map(function (c) { |
| return c.value |
| }).join(' ').trim().split(' ') |
| }) |
| } |
| |
| // comprised of xranges, tildes, stars, and gtlt's at this point. |
| // already replaced the hyphen ranges |
| // turn into a set of JUST comparators. |
| function parseComparator (comp, options) { |
| debug('comp', comp, options) |
| comp = replaceCarets(comp, options) |
| debug('caret', comp) |
| comp = replaceTildes(comp, options) |
| debug('tildes', comp) |
| comp = replaceXRanges(comp, options) |
| debug('xrange', comp) |
| comp = replaceStars(comp, options) |
| debug('stars', comp) |
| return comp |
| } |
| |
| function isX (id) { |
| return !id || id.toLowerCase() === 'x' || id === '*' |
| } |
| |
| // ~, ~> --> * (any, kinda silly) |
| // ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0 <3.0.0 |
| // ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0 <2.1.0 |
| // ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0 <1.3.0 |
| // ~1.2.3, ~>1.2.3 --> >=1.2.3 <1.3.0 |
| // ~1.2.0, ~>1.2.0 --> >=1.2.0 <1.3.0 |
| function replaceTildes (comp, options) { |
| return comp.trim().split(/\s+/).map(function (comp) { |
| return replaceTilde(comp, options) |
| }).join(' ') |
| } |
| |
| function replaceTilde (comp, options) { |
| var r = options.loose ? re[TILDELOOSE] : re[TILDE] |
| return comp.replace(r, function (_, M, m, p, pr) { |
| debug('tilde', comp, _, M, m, p, pr) |
| var ret |
| |
| if (isX(M)) { |
| ret = '' |
| } else if (isX(m)) { |
| ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0' |
| } else if (isX(p)) { |
| // ~1.2 == >=1.2.0 <1.3.0 |
| ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0' |
| } else if (pr) { |
| debug('replaceTilde pr', pr) |
| ret = '>=' + M + '.' + m + '.' + p + '-' + pr + |
| ' <' + M + '.' + (+m + 1) + '.0' |
| } else { |
| // ~1.2.3 == >=1.2.3 <1.3.0 |
| ret = '>=' + M + '.' + m + '.' + p + |
| ' <' + M + '.' + (+m + 1) + '.0' |
| } |
| |
| debug('tilde return', ret) |
| return ret |
| }) |
| } |
| |
| // ^ --> * (any, kinda silly) |
| // ^2, ^2.x, ^2.x.x --> >=2.0.0 <3.0.0 |
| // ^2.0, ^2.0.x --> >=2.0.0 <3.0.0 |
| // ^1.2, ^1.2.x --> >=1.2.0 <2.0.0 |
| // ^1.2.3 --> >=1.2.3 <2.0.0 |
| // ^1.2.0 --> >=1.2.0 <2.0.0 |
| function replaceCarets (comp, options) { |
| return comp.trim().split(/\s+/).map(function (comp) { |
| return replaceCaret(comp, options) |
| }).join(' ') |
| } |
| |
| function replaceCaret (comp, options) { |
| debug('caret', comp, options) |
| var r = options.loose ? re[CARETLOOSE] : re[CARET] |
| return comp.replace(r, function (_, M, m, p, pr) { |
| debug('caret', comp, _, M, m, p, pr) |
| var ret |
| |
| if (isX(M)) { |
| ret = '' |
| } else if (isX(m)) { |
| ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0' |
| } else if (isX(p)) { |
| if (M === '0') { |
| ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0' |
| } else { |
| ret = '>=' + M + '.' + m + '.0 <' + (+M + 1) + '.0.0' |
| } |
| } else if (pr) { |
| debug('replaceCaret pr', pr) |
| if (M === '0') { |
| if (m === '0') { |
| ret = '>=' + M + '.' + m + '.' + p + '-' + pr + |
| ' <' + M + '.' + m + '.' + (+p + 1) |
| } else { |
| ret = '>=' + M + '.' + m + '.' + p + '-' + pr + |
| ' <' + M + '.' + (+m + 1) + '.0' |
| } |
| } else { |
| ret = '>=' + M + '.' + m + '.' + p + '-' + pr + |
| ' <' + (+M + 1) + '.0.0' |
| } |
| } else { |
| debug('no pr') |
| if (M === '0') { |
| if (m === '0') { |
| ret = '>=' + M + '.' + m + '.' + p + |
| ' <' + M + '.' + m + '.' + (+p + 1) |
| } else { |
| ret = '>=' + M + '.' + m + '.' + p + |
| ' <' + M + '.' + (+m + 1) + '.0' |
| } |
| } else { |
| ret = '>=' + M + '.' + m + '.' + p + |
| ' <' + (+M + 1) + '.0.0' |
| } |
| } |
| |
| debug('caret return', ret) |
| return ret |
| }) |
| } |
| |
| function replaceXRanges (comp, options) { |
| debug('replaceXRanges', comp, options) |
| return comp.split(/\s+/).map(function (comp) { |
| return replaceXRange(comp, options) |
| }).join(' ') |
| } |
| |
| function replaceXRange (comp, options) { |
| comp = comp.trim() |
| var r = options.loose ? re[XRANGELOOSE] : re[XRANGE] |
| return comp.replace(r, function (ret, gtlt, M, m, p, pr) { |
| debug('xRange', comp, ret, gtlt, M, m, p, pr) |
| var xM = isX(M) |
| var xm = xM || isX(m) |
| var xp = xm || isX(p) |
| var anyX = xp |
| |
| if (gtlt === '=' && anyX) { |
| gtlt = '' |
| } |
| |
| if (xM) { |
| if (gtlt === '>' || gtlt === '<') { |
| // nothing is allowed |
| ret = '<0.0.0' |
| } else { |
| // nothing is forbidden |
| ret = '*' |
| } |
| } else if (gtlt && anyX) { |
| // we know patch is an x, because we have any x at all. |
| // replace X with 0 |
| if (xm) { |
| m = 0 |
| } |
| p = 0 |
| |
| if (gtlt === '>') { |
| // >1 => >=2.0.0 |
| // >1.2 => >=1.3.0 |
| // >1.2.3 => >= 1.2.4 |
| gtlt = '>=' |
| if (xm) { |
| M = +M + 1 |
| m = 0 |
| p = 0 |
| } else { |
| m = +m + 1 |
| p = 0 |
| } |
| } else if (gtlt === '<=') { |
| // <=0.7.x is actually <0.8.0, since any 0.7.x should |
| // pass. Similarly, <=7.x is actually <8.0.0, etc. |
| gtlt = '<' |
| if (xm) { |
| M = +M + 1 |
| } else { |
| m = +m + 1 |
| } |
| } |
| |
| ret = gtlt + M + '.' + m + '.' + p |
| } else if (xm) { |
| ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0' |
| } else if (xp) { |
| ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0' |
| } |
| |
| debug('xRange return', ret) |
| |
| return ret |
| }) |
| } |
| |
| // Because * is AND-ed with everything else in the comparator, |
| // and '' means "any version", just remove the *s entirely. |
| function replaceStars (comp, options) { |
| debug('replaceStars', comp, options) |
| // Looseness is ignored here. star is always as loose as it gets! |
| return comp.trim().replace(re[STAR], '') |
| } |
| |
| // This function is passed to string.replace(re[HYPHENRANGE]) |
| // M, m, patch, prerelease, build |
| // 1.2 - 3.4.5 => >=1.2.0 <=3.4.5 |
| // 1.2.3 - 3.4 => >=1.2.0 <3.5.0 Any 3.4.x will do |
| // 1.2 - 3.4 => >=1.2.0 <3.5.0 |
| function hyphenReplace ($0, |
| from, fM, fm, fp, fpr, fb, |
| to, tM, tm, tp, tpr, tb) { |
| if (isX(fM)) { |
| from = '' |
| } else if (isX(fm)) { |
| from = '>=' + fM + '.0.0' |
| } else if (isX(fp)) { |
| from = '>=' + fM + '.' + fm + '.0' |
| } else { |
| from = '>=' + from |
| } |
| |
| if (isX(tM)) { |
| to = '' |
| } else if (isX(tm)) { |
| to = '<' + (+tM + 1) + '.0.0' |
| } else if (isX(tp)) { |
| to = '<' + tM + '.' + (+tm + 1) + '.0' |
| } else if (tpr) { |
| to = '<=' + tM + '.' + tm + '.' + tp + '-' + tpr |
| } else { |
| to = '<=' + to |
| } |
| |
| return (from + ' ' + to).trim() |
| } |
| |
| // if ANY of the sets match ALL of its comparators, then pass |
| Range.prototype.test = function (version) { |
| if (!version) { |
| return false |
| } |
| |
| if (typeof version === 'string') { |
| version = new SemVer(version, this.options) |
| } |
| |
| for (var i = 0; i < this.set.length; i++) { |
| if (testSet(this.set[i], version, this.options)) { |
| return true |
| } |
| } |
| return false |
| } |
| |
| function testSet (set, version, options) { |
| for (var i = 0; i < set.length; i++) { |
| if (!set[i].test(version)) { |
| return false |
| } |
| } |
| |
| if (version.prerelease.length && !options.includePrerelease) { |
| // Find the set of versions that are allowed to have prereleases |
| // For example, ^1.2.3-pr.1 desugars to >=1.2.3-pr.1 <2.0.0 |
| // That should allow `1.2.3-pr.2` to pass. |
| // However, `1.2.4-alpha.notready` should NOT be allowed, |
| // even though it's within the range set by the comparators. |
| for (i = 0; i < set.length; i++) { |
| debug(set[i].semver) |
| if (set[i].semver === ANY) { |
| continue |
| } |
| |
| if (set[i].semver.prerelease.length > 0) { |
| var allowed = set[i].semver |
| if (allowed.major === version.major && |
| allowed.minor === version.minor && |
| allowed.patch === version.patch) { |
| return true |
| } |
| } |
| } |
| |
| // Version has a -pre, but it's not one of the ones we like. |
| return false |
| } |
| |
| return true |
| } |
| |
| exports.satisfies = satisfies |
| function satisfies (version, range, options) { |
| try { |
| range = new Range(range, options) |
| } catch (er) { |
| return false |
| } |
| return range.test(version) |
| } |
| |
| exports.maxSatisfying = maxSatisfying |
| function maxSatisfying (versions, range, options) { |
| var max = null |
| var maxSV = null |
| try { |
| var rangeObj = new Range(range, options) |
| } catch (er) { |
| return null |
| } |
| versions.forEach(function (v) { |
| if (rangeObj.test(v)) { |
| // satisfies(v, range, options) |
| if (!max || maxSV.compare(v) === -1) { |
| // compare(max, v, true) |
| max = v |
| maxSV = new SemVer(max, options) |
| } |
| } |
| }) |
| return max |
| } |
| |
| exports.minSatisfying = minSatisfying |
| function minSatisfying (versions, range, options) { |
| var min = null |
| var minSV = null |
| try { |
| var rangeObj = new Range(range, options) |
| } catch (er) { |
| return null |
| } |
| versions.forEach(function (v) { |
| if (rangeObj.test(v)) { |
| // satisfies(v, range, options) |
| if (!min || minSV.compare(v) === 1) { |
| // compare(min, v, true) |
| min = v |
| minSV = new SemVer(min, options) |
| } |
| } |
| }) |
| return min |
| } |
| |
| exports.minVersion = minVersion |
| function minVersion (range, loose) { |
| range = new Range(range, loose) |
| |
| var minver = new SemVer('0.0.0') |
| if (range.test(minver)) { |
| return minver |
| } |
| |
| minver = new SemVer('0.0.0-0') |
| if (range.test(minver)) { |
| return minver |
| } |
| |
| minver = null |
| for (var i = 0; i < range.set.length; ++i) { |
| var comparators = range.set[i] |
| |
| comparators.forEach(function (comparator) { |
| // Clone to avoid manipulating the comparator's semver object. |
| var compver = new SemVer(comparator.semver.version) |
| switch (comparator.operator) { |
| case '>': |
| if (compver.prerelease.length === 0) { |
| compver.patch++ |
| } else { |
| compver.prerelease.push(0) |
| } |
| compver.raw = compver.format() |
| /* fallthrough */ |
| case '': |
| case '>=': |
| if (!minver || gt(minver, compver)) { |
| minver = compver |
| } |
| break |
| case '<': |
| case '<=': |
| /* Ignore maximum versions */ |
| break |
| /* istanbul ignore next */ |
| default: |
| throw new Error('Unexpected operation: ' + comparator.operator) |
| } |
| }) |
| } |
| |
| if (minver && range.test(minver)) { |
| return minver |
| } |
| |
| return null |
| } |
| |
| exports.validRange = validRange |
| function validRange (range, options) { |
| try { |
| // Return '*' instead of '' so that truthiness works. |
| // This will throw if it's invalid anyway |
| return new Range(range, options).range || '*' |
| } catch (er) { |
| return null |
| } |
| } |
| |
| // Determine if version is less than all the versions possible in the range |
| exports.ltr = ltr |
| function ltr (version, range, options) { |
| return outside(version, range, '<', options) |
| } |
| |
| // Determine if version is greater than all the versions possible in the range. |
| exports.gtr = gtr |
| function gtr (version, range, options) { |
| return outside(version, range, '>', options) |
| } |
| |
| exports.outside = outside |
| function outside (version, range, hilo, options) { |
| version = new SemVer(version, options) |
| range = new Range(range, options) |
| |
| var gtfn, ltefn, ltfn, comp, ecomp |
| switch (hilo) { |
| case '>': |
| gtfn = gt |
| ltefn = lte |
| ltfn = lt |
| comp = '>' |
| ecomp = '>=' |
| break |
| case '<': |
| gtfn = lt |
| ltefn = gte |
| ltfn = gt |
| comp = '<' |
| ecomp = '<=' |
| break |
| default: |
| throw new TypeError('Must provide a hilo val of "<" or ">"') |
| } |
| |
| // If it satisifes the range it is not outside |
| if (satisfies(version, range, options)) { |
| return false |
| } |
| |
| // From now on, variable terms are as if we're in "gtr" mode. |
| // but note that everything is flipped for the "ltr" function. |
| |
| for (var i = 0; i < range.set.length; ++i) { |
| var comparators = range.set[i] |
| |
| var high = null |
| var low = null |
| |
| comparators.forEach(function (comparator) { |
| if (comparator.semver === ANY) { |
| comparator = new Comparator('>=0.0.0') |
| } |
| high = high || comparator |
| low = low || comparator |
| if (gtfn(comparator.semver, high.semver, options)) { |
| high = comparator |
| } else if (ltfn(comparator.semver, low.semver, options)) { |
| low = comparator |
| } |
| }) |
| |
| // If the edge version comparator has a operator then our version |
| // isn't outside it |
| if (high.operator === comp || high.operator === ecomp) { |
| return false |
| } |
| |
| // If the lowest version comparator has an operator and our version |
| // is less than it then it isn't higher than the range |
| if ((!low.operator || low.operator === comp) && |
| ltefn(version, low.semver)) { |
| return false |
| } else if (low.operator === ecomp && ltfn(version, low.semver)) { |
| return false |
| } |
| } |
| return true |
| } |
| |
| exports.prerelease = prerelease |
| function prerelease (version, options) { |
| var parsed = parse(version, options) |
| return (parsed && parsed.prerelease.length) ? parsed.prerelease : null |
| } |
| |
| exports.intersects = intersects |
| function intersects (r1, r2, options) { |
| r1 = new Range(r1, options) |
| r2 = new Range(r2, options) |
| return r1.intersects(r2) |
| } |
| |
| exports.coerce = coerce |
| function coerce (version) { |
| if (version instanceof SemVer) { |
| return version |
| } |
| |
| if (typeof version !== 'string') { |
| return null |
| } |
| |
| var match = version.match(re[COERCE]) |
| |
| if (match == null) { |
| return null |
| } |
| |
| return parse(match[1] + |
| '.' + (match[2] || '0') + |
| '.' + (match[3] || '0')) |
| } |
| |
| |
| /***/ }), |
| |
| /***/ 49: |
| /***/ (function(module, __unusedexports, __webpack_require__) { |
| |
| "use strict"; |
| |
| const os = __webpack_require__(87); |
| const execa = __webpack_require__(955); |
| |
| // Reference: https://www.gaijin.at/en/lstwinver.php |
| const names = new Map([ |
| ['10.0', '10'], |
| ['6.3', '8.1'], |
| ['6.2', '8'], |
| ['6.1', '7'], |
| ['6.0', 'Vista'], |
| ['5.2', 'Server 2003'], |
| ['5.1', 'XP'], |
| ['5.0', '2000'], |
| ['4.9', 'ME'], |
| ['4.1', '98'], |
| ['4.0', '95'] |
| ]); |
| |
| const windowsRelease = release => { |
| const version = /\d+\.\d/.exec(release || os.release()); |
| |
| if (release && !version) { |
| throw new Error('`release` argument doesn\'t match `n.n`'); |
| } |
| |
| const ver = (version || [])[0]; |
| |
| // Server 2008, 2012, 2016, and 2019 versions are ambiguous with desktop versions and must be detected at runtime. |
| // If `release` is omitted or we're on a Windows system, and the version number is an ambiguous version |
| // then use `wmic` to get the OS caption: https://msdn.microsoft.com/en-us/library/aa394531(v=vs.85).aspx |
| // If `wmic` is obsoloete (later versions of Windows 10), use PowerShell instead. |
| // If the resulting caption contains the year 2008, 2012, 2016 or 2019, it is a server version, so return a server OS name. |
| if ((!release || release === os.release()) && ['6.1', '6.2', '6.3', '10.0'].includes(ver)) { |
| let stdout; |
| try { |
| stdout = execa.sync('wmic', ['os', 'get', 'Caption']).stdout || ''; |
| } catch (_) { |
| stdout = execa.sync('powershell', ['(Get-CimInstance -ClassName Win32_OperatingSystem).caption']).stdout || ''; |
| } |
| |
| const year = (stdout.match(/2008|2012|2016|2019/) || [])[0]; |
| |
| if (year) { |
| return `Server ${year}`; |
| } |
| } |
| |
| return names.get(ver); |
| }; |
| |
| module.exports = windowsRelease; |
| |
| |
| /***/ }), |
| |
| /***/ 66: |
| /***/ (function(__unusedmodule, exports) { |
| |
| "use strict"; |
| |
| Object.defineProperty(exports, "__esModule", { value: true }); |
| exports.getChecked = exports.getName = exports.extractLabels = exports.formatLabel = void 0; |
| /** |
| * Format a label into a string representation |
| * @param label labels |
| * @returns string representation of a given label |
| */ |
| function formatLabel(label) { |
| return `{ name: '${label.name}', checked: ${label.checked} }`; |
| } |
| exports.formatLabel = formatLabel; |
| /** |
| * Extract labels from the description of an issue or a pull request |
| * @param description string that contains labels |
| * @param labelPattern regular expression to use to find labels |
| * @returns labels (list of { name: string; checked: boolean; }) |
| * |
| * @example |
| * > const body = '- [ ] `a`\n- [x] `b`' |
| * > const labelPattern = '- \\[([ xX]*)\\] ?`(.+?)`' |
| * > extractLabels(body, labelPattern) |
| * [ { name: 'a', checked: false }, { name: 'b', checked: true } ] |
| */ |
| function extractLabels(description, labelPattern) { |
| function helper(regex, labels = []) { |
| const res = regex.exec(description); |
| if (res) { |
| const checked = res[1].trim().toLocaleLowerCase() === 'x'; |
| const name = res[2].trim(); |
| return helper(regex, [...labels, { name, checked }]); |
| } |
| return labels; |
| } |
| return helper(new RegExp(labelPattern, 'g')); |
| } |
| exports.extractLabels = extractLabels; |
| /** |
| * Get `name` property from an object |
| * @param obj object that has `name` property |
| * @returns value of `name` property |
| * |
| * @example |
| * > getName({ name: 'a' }) |
| * 'a' |
| */ |
| function getName({ name }) { |
| return name; |
| } |
| exports.getName = getName; |
| /** |
| * Get `checked` property from an object |
| * @param obj object that has `checked` property |
| * @returns value of `checked` property |
| * |
| * @example |
| * > getChecked({ checked: true }) |
| * true |
| */ |
| function getChecked({ checked }) { |
| return checked; |
| } |
| exports.getChecked = getChecked; |
| |
| |
| /***/ }), |
| |
| /***/ 87: |
| /***/ (function(module) { |
| |
| module.exports = require("os"); |
| |
| /***/ }), |
| |
| /***/ 118: |
| /***/ (function(module, __unusedexports, __webpack_require__) { |
| |
| "use strict"; |
| |
| const os = __webpack_require__(87); |
| |
| const nameMap = new Map([ |
| [19, 'Catalina'], |
| [18, 'Mojave'], |
| [17, 'High Sierra'], |
| [16, 'Sierra'], |
| [15, 'El Capitan'], |
| [14, 'Yosemite'], |
| [13, 'Mavericks'], |
| [12, 'Mountain Lion'], |
| [11, 'Lion'], |
| [10, 'Snow Leopard'], |
| [9, 'Leopard'], |
| [8, 'Tiger'], |
| [7, 'Panther'], |
| [6, 'Jaguar'], |
| [5, 'Puma'] |
| ]); |
| |
| const macosRelease = release => { |
| release = Number((release || os.release()).split('.')[0]); |
| return { |
| name: nameMap.get(release), |
| version: '10.' + (release - 4) |
| }; |
| }; |
| |
| module.exports = macosRelease; |
| // TODO: remove this in the next major version |
| module.exports.default = macosRelease; |
| |
| |
| /***/ }), |
| |
| /***/ 127: |
| /***/ (function(__unusedmodule, exports, __webpack_require__) { |
| |
| "use strict"; |
| |
| var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { |
| if (k2 === undefined) k2 = k; |
| Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); |
| }) : (function(o, m, k, k2) { |
| if (k2 === undefined) k2 = k; |
| o[k2] = m[k]; |
| })); |
| var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { |
| Object.defineProperty(o, "default", { enumerable: true, value: v }); |
| }) : function(o, v) { |
| o["default"] = v; |
| }); |
| var __importStar = (this && this.__importStar) || function (mod) { |
| if (mod && mod.__esModule) return mod; |
| var result = {}; |
| if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); |
| __setModuleDefault(result, mod); |
| return result; |
| }; |
| Object.defineProperty(exports, "__esModule", { value: true }); |
| exports.getApiBaseUrl = exports.getProxyAgent = exports.getAuthString = void 0; |
| const httpClient = __importStar(__webpack_require__(539)); |
| function getAuthString(token, options) { |
| if (!token && !options.auth) { |
| throw new Error('Parameter token or opts.auth is required'); |
| } |
| else if (token && options.auth) { |
| throw new Error('Parameters token and opts.auth may not both be specified'); |
| } |
| return typeof options.auth === 'string' ? options.auth : `token ${token}`; |
| } |
| exports.getAuthString = getAuthString; |
| function getProxyAgent(destinationUrl) { |
| const hc = new httpClient.HttpClient(); |
| return hc.getAgent(destinationUrl); |
| } |
| exports.getProxyAgent = getProxyAgent; |
| function getApiBaseUrl() { |
| return process.env['GITHUB_API_URL'] || 'https://api.github.com'; |
| } |
| exports.getApiBaseUrl = getApiBaseUrl; |
| //# sourceMappingURL=utils.js.map |
| |
| /***/ }), |
| |
| /***/ 129: |
| /***/ (function(module) { |
| |
| module.exports = require("child_process"); |
| |
| /***/ }), |
| |
| /***/ 141: |
| /***/ (function(__unusedmodule, exports, __webpack_require__) { |
| |
| "use strict"; |
| |
| |
| var net = __webpack_require__(631); |
| var tls = __webpack_require__(16); |
| var http = __webpack_require__(605); |
| var https = __webpack_require__(211); |
| var events = __webpack_require__(614); |
| var assert = __webpack_require__(357); |
| var util = __webpack_require__(669); |
| |
| |
| exports.httpOverHttp = httpOverHttp; |
| exports.httpsOverHttp = httpsOverHttp; |
| exports.httpOverHttps = httpOverHttps; |
| exports.httpsOverHttps = httpsOverHttps; |
| |
| |
| function httpOverHttp(options) { |
| var agent = new TunnelingAgent(options); |
| agent.request = http.request; |
| return agent; |
| } |
| |
| function httpsOverHttp(options) { |
| var agent = new TunnelingAgent(options); |
| agent.request = http.request; |
| agent.createSocket = createSecureSocket; |
| agent.defaultPort = 443; |
| return agent; |
| } |
| |
| function httpOverHttps(options) { |
| var agent = new TunnelingAgent(options); |
| agent.request = https.request; |
| return agent; |
| } |
| |
| function httpsOverHttps(options) { |
| var agent = new TunnelingAgent(options); |
| agent.request = https.request; |
| agent.createSocket = createSecureSocket; |
| agent.defaultPort = 443; |
| return agent; |
| } |
| |
| |
| function TunnelingAgent(options) { |
| var self = this; |
| self.options = options || {}; |
| self.proxyOptions = self.options.proxy || {}; |
| self.maxSockets = self.options.maxSockets || http.Agent.defaultMaxSockets; |
| self.requests = []; |
| self.sockets = []; |
| |
| self.on('free', function onFree(socket, host, port, localAddress) { |
| var options = toOptions(host, port, localAddress); |
| for (var i = 0, len = self.requests.length; i < len; ++i) { |
| var pending = self.requests[i]; |
| if (pending.host === options.host && pending.port === options.port) { |
| // Detect the request to connect same origin server, |
| // reuse the connection. |
| self.requests.splice(i, 1); |
| pending.request.onSocket(socket); |
| return; |
| } |
| } |
| socket.destroy(); |
| self.removeSocket(socket); |
| }); |
| } |
| util.inherits(TunnelingAgent, events.EventEmitter); |
| |
| TunnelingAgent.prototype.addRequest = function addRequest(req, host, port, localAddress) { |
| var self = this; |
| var options = mergeOptions({request: req}, self.options, toOptions(host, port, localAddress)); |
| |
| if (self.sockets.length >= this.maxSockets) { |
| // We are over limit so we'll add it to the queue. |
| self.requests.push(options); |
| return; |
| } |
| |
| // If we are under maxSockets create a new one. |
| self.createSocket(options, function(socket) { |
| socket.on('free', onFree); |
| socket.on('close', onCloseOrRemove); |
| socket.on('agentRemove', onCloseOrRemove); |
| req.onSocket(socket); |
| |
| function onFree() { |
| self.emit('free', socket, options); |
| } |
| |
| function onCloseOrRemove(err) { |
| self.removeSocket(socket); |
| socket.removeListener('free', onFree); |
| socket.removeListener('close', onCloseOrRemove); |
| socket.removeListener('agentRemove', onCloseOrRemove); |
| } |
| }); |
| }; |
| |
| TunnelingAgent.prototype.createSocket = function createSocket(options, cb) { |
| var self = this; |
| var placeholder = {}; |
| self.sockets.push(placeholder); |
| |
| var connectOptions = mergeOptions({}, self.proxyOptions, { |
| method: 'CONNECT', |
| path: options.host + ':' + options.port, |
| agent: false, |
| headers: { |
| host: options.host + ':' + options.port |
| } |
| }); |
| if (options.localAddress) { |
| connectOptions.localAddress = options.localAddress; |
| } |
| if (connectOptions.proxyAuth) { |
| connectOptions.headers = connectOptions.headers || {}; |
| connectOptions.headers['Proxy-Authorization'] = 'Basic ' + |
| new Buffer(connectOptions.proxyAuth).toString('base64'); |
| } |
| |
| debug('making CONNECT request'); |
| var connectReq = self.request(connectOptions); |
| connectReq.useChunkedEncodingByDefault = false; // for v0.6 |
| connectReq.once('response', onResponse); // for v0.6 |
| connectReq.once('upgrade', onUpgrade); // for v0.6 |
| connectReq.once('connect', onConnect); // for v0.7 or later |
| connectReq.once('error', onError); |
| connectReq.end(); |
| |
| function onResponse(res) { |
| // Very hacky. This is necessary to avoid http-parser leaks. |
| res.upgrade = true; |
| } |
| |
| function onUpgrade(res, socket, head) { |
| // Hacky. |
| process.nextTick(function() { |
| onConnect(res, socket, head); |
| }); |
| } |
| |
| function onConnect(res, socket, head) { |
| connectReq.removeAllListeners(); |
| socket.removeAllListeners(); |
| |
| if (res.statusCode !== 200) { |
| debug('tunneling socket could not be established, statusCode=%d', |
| res.statusCode); |
| socket.destroy(); |
| var error = new Error('tunneling socket could not be established, ' + |
| 'statusCode=' + res.statusCode); |
| error.code = 'ECONNRESET'; |
| options.request.emit('error', error); |
| self.removeSocket(placeholder); |
| return; |
| } |
| if (head.length > 0) { |
| debug('got illegal response body from proxy'); |
| socket.destroy(); |
| var error = new Error('got illegal response body from proxy'); |
| error.code = 'ECONNRESET'; |
| options.request.emit('error', error); |
| self.removeSocket(placeholder); |
| return; |
| } |
| debug('tunneling connection has established'); |
| self.sockets[self.sockets.indexOf(placeholder)] = socket; |
| return cb(socket); |
| } |
| |
| function onError(cause) { |
| connectReq.removeAllListeners(); |
| |
| debug('tunneling socket could not be established, cause=%s\n', |
| cause.message, cause.stack); |
| var error = new Error('tunneling socket could not be established, ' + |
| 'cause=' + cause.message); |
| error.code = 'ECONNRESET'; |
| options.request.emit('error', error); |
| self.removeSocket(placeholder); |
| } |
| }; |
| |
| TunnelingAgent.prototype.removeSocket = function removeSocket(socket) { |
| var pos = this.sockets.indexOf(socket) |
| if (pos === -1) { |
| return; |
| } |
| this.sockets.splice(pos, 1); |
| |
| var pending = this.requests.shift(); |
| if (pending) { |
| // If we have pending requests and a socket gets closed a new one |
| // needs to be created to take over in the pool for the one that closed. |
| this.createSocket(pending, function(socket) { |
| pending.request.onSocket(socket); |
| }); |
| } |
| }; |
| |
| function createSecureSocket(options, cb) { |
| var self = this; |
| TunnelingAgent.prototype.createSocket.call(self, options, function(socket) { |
| var hostHeader = options.request.getHeader('host'); |
| var tlsOptions = mergeOptions({}, self.options, { |
| socket: socket, |
| servername: hostHeader ? hostHeader.replace(/:.*$/, '') : options.host |
| }); |
| |
| // 0 is dummy port for v0.6 |
| var secureSocket = tls.connect(0, tlsOptions); |
| self.sockets[self.sockets.indexOf(socket)] = secureSocket; |
| cb(secureSocket); |
| }); |
| } |
| |
| |
| function toOptions(host, port, localAddress) { |
| if (typeof host === 'string') { // since v0.10 |
| return { |
| host: host, |
| port: port, |
| localAddress: localAddress |
| }; |
| } |
| return host; // for v0.11 or later |
| } |
| |
| function mergeOptions(target) { |
| for (var i = 1, len = arguments.length; i < len; ++i) { |
| var overrides = arguments[i]; |
| if (typeof overrides === 'object') { |
| var keys = Object.keys(overrides); |
| for (var j = 0, keyLen = keys.length; j < keyLen; ++j) { |
| var k = keys[j]; |
| if (overrides[k] !== undefined) { |
| target[k] = overrides[k]; |
| } |
| } |
| } |
| } |
| return target; |
| } |
| |
| |
| var debug; |
| if (process.env.NODE_DEBUG && /\btunnel\b/.test(process.env.NODE_DEBUG)) { |
| debug = function() { |
| var args = Array.prototype.slice.call(arguments); |
| if (typeof args[0] === 'string') { |
| args[0] = 'TUNNEL: ' + args[0]; |
| } else { |
| args.unshift('TUNNEL:'); |
| } |
| console.error.apply(console, args); |
| } |
| } else { |
| debug = function() {}; |
| } |
| exports.debug = debug; // for test |
| |
| |
| /***/ }), |
| |
| /***/ 145: |
| /***/ (function(module, __unusedexports, __webpack_require__) { |
| |
| "use strict"; |
| |
| const pump = __webpack_require__(453); |
| const bufferStream = __webpack_require__(966); |
| |
| class MaxBufferError extends Error { |
| constructor() { |
| super('maxBuffer exceeded'); |
| this.name = 'MaxBufferError'; |
| } |
| } |
| |
| function getStream(inputStream, options) { |
| if (!inputStream) { |
| return Promise.reject(new Error('Expected a stream')); |
| } |
| |
| options = Object.assign({maxBuffer: Infinity}, options); |
| |
| const {maxBuffer} = options; |
| |
| let stream; |
| return new Promise((resolve, reject) => { |
| const rejectPromise = error => { |
| if (error) { // A null check |
| error.bufferedData = stream.getBufferedValue(); |
| } |
| reject(error); |
| }; |
| |
| stream = pump(inputStream, bufferStream(options), error => { |
| if (error) { |
| rejectPromise(error); |
| return; |
| } |
| |
| resolve(); |
| }); |
| |
| stream.on('data', () => { |
| if (stream.getBufferedLength() > maxBuffer) { |
| rejectPromise(new MaxBufferError()); |
| } |
| }); |
| }).then(() => stream.getBufferedValue()); |
| } |
| |
| module.exports = getStream; |
| module.exports.buffer = (stream, options) => getStream(stream, Object.assign({}, options, {encoding: 'buffer'})); |
| module.exports.array = (stream, options) => getStream(stream, Object.assign({}, options, {array: true})); |
| module.exports.MaxBufferError = MaxBufferError; |
| |
| |
| /***/ }), |
| |
| /***/ 168: |
| /***/ (function(module) { |
| |
| "use strict"; |
| |
| const alias = ['stdin', 'stdout', 'stderr']; |
| |
| const hasAlias = opts => alias.some(x => Boolean(opts[x])); |
| |
| module.exports = opts => { |
| if (!opts) { |
| return null; |
| } |
| |
| if (opts.stdio && hasAlias(opts)) { |
| throw new Error(`It's not possible to provide \`stdio\` in combination with one of ${alias.map(x => `\`${x}\``).join(', ')}`); |
| } |
| |
| if (typeof opts.stdio === 'string') { |
| return opts.stdio; |
| } |
| |
| const stdio = opts.stdio || []; |
| |
| if (!Array.isArray(stdio)) { |
| throw new TypeError(`Expected \`stdio\` to be of type \`string\` or \`Array\`, got \`${typeof stdio}\``); |
| } |
| |
| const result = []; |
| const len = Math.max(stdio.length, alias.length); |
| |
| for (let i = 0; i < len; i++) { |
| let value = null; |
| |
| if (stdio[i] !== undefined) { |
| value = stdio[i]; |
| } else if (opts[alias[i]] !== undefined) { |
| value = opts[alias[i]]; |
| } |
| |
| result[i] = value; |
| } |
| |
| return result; |
| }; |
| |
| |
| /***/ }), |
| |
| /***/ 197: |
| /***/ (function(module, __unusedexports, __webpack_require__) { |
| |
| module.exports = isexe |
| isexe.sync = sync |
| |
| var fs = __webpack_require__(747) |
| |
| function isexe (path, options, cb) { |
| fs.stat(path, function (er, stat) { |
| cb(er, er ? false : checkStat(stat, options)) |
| }) |
| } |
| |
| function sync (path, options) { |
| return checkStat(fs.statSync(path), options) |
| } |
| |
| function checkStat (stat, options) { |
| return stat.isFile() && checkMode(stat, options) |
| } |
| |
| function checkMode (stat, options) { |
| var mod = stat.mode |
| var uid = stat.uid |
| var gid = stat.gid |
| |
| var myUid = options.uid !== undefined ? |
| options.uid : process.getuid && process.getuid() |
| var myGid = options.gid !== undefined ? |
| options.gid : process.getgid && process.getgid() |
| |
| var u = parseInt('100', 8) |
| var g = parseInt('010', 8) |
| var o = parseInt('001', 8) |
| var ug = u | g |
| |
| var ret = (mod & o) || |
| (mod & g) && gid === myGid || |
| (mod & u) && uid === myUid || |
| (mod & ug) && myUid === 0 |
| |
| return ret |
| } |
| |
| |
| /***/ }), |
| |
| /***/ 198: |
| /***/ (function(__unusedmodule, exports, __webpack_require__) { |
| |
| "use strict"; |
| |
| var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { |
| if (k2 === undefined) k2 = k; |
| Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); |
| }) : (function(o, m, k, k2) { |
| if (k2 === undefined) k2 = k; |
| o[k2] = m[k]; |
| })); |
| var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { |
| Object.defineProperty(o, "default", { enumerable: true, value: v }); |
| }) : function(o, v) { |
| o["default"] = v; |
| }); |
| var __importStar = (this && this.__importStar) || function (mod) { |
| if (mod && mod.__esModule) return mod; |
| var result = {}; |
| if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); |
| __setModuleDefault(result, mod); |
| return result; |
| }; |
| var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { |
| function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } |
| return new (P || (P = Promise))(function (resolve, reject) { |
| function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } |
| function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } |
| function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } |
| step((generator = generator.apply(thisArg, _arguments || [])).next()); |
| }); |
| }; |
| var __asyncValues = (this && this.__asyncValues) || function (o) { |
| if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); |
| var m = o[Symbol.asyncIterator], i; |
| return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); |
| function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } |
| function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } |
| }; |
| Object.defineProperty(exports, "__esModule", { value: true }); |
| const core = __importStar(__webpack_require__(470)); |
| const github = __importStar(__webpack_require__(469)); |
| const enums_1 = __webpack_require__(346); |
| const utils_1 = __webpack_require__(611); |
| const labels_1 = __webpack_require__(66); |
| const logger_1 = __webpack_require__(504); |
| function processIssue(octokit, repo, owner, issue_number, htmlUrl, description, labelPattern, logger,labelsin,user1) { |
| return __awaiter(this, void 0, void 0, function* () { |
| logger.debug(`--- ${htmlUrl} ---`); |
| // Labels extracted from an issue description |
| logger.debug(labelsin) |
| if (labelsin.includes('doc-added')){ |
| return |
| } |
| const Labels=['doc','doc-required','no-need-doc','doc-info-missing'] |
| const labels = labels_1.extractLabels(description, labelPattern); |
| const succmessage ="@"+user1+":Thanks for providing doc info!"; |
| octokit.issues.addAssignees({ |
| owner, |
| repo, |
| issue_number, |
| assignees:user1, |
| }); |
| octokit.issues.listEvents({ |
| owner, |
| repo, |
| issue_number, |
| }); |
| const listEventsData = yield octokit.paginate(octokit.issues.listEvents, { |
| owner, |
| repo, |
| issue_number, |
| }); |
| // Labels added or removed by users |
| const labelsToIgnore = utils_1.removeDuplicates(listEventsData |
| .filter(event => utils_1.isLabelEvent(event) && utils_1.isCreatedByUser(event)) |
| .map(({ label }) => label && label.name)); |
| logger.debug('Labels to ignore:'); |
| logger.debug(utils_1.formatStrArray(labelsToIgnore)); |
| // Labels registered in a repository |
| const labelsForRepoData = yield octokit.paginate(octokit.issues.listLabelsForRepo, { |
| owner, |
| repo, |
| }); |
| logger.debug("-------------------------------") |
| const labelsForRepo = labelsForRepoData.map(labels_1.getName); |
| const labelsToProcess = labels.filter(({ name }) => labelsForRepo.includes(name)&& !labelsToIgnore.includes(name)); |
| // Labels that are already applied on an issue |
| const labelsOnIssueResp = yield octokit.issues.listLabelsOnIssue({ |
| owner, |
| repo, |
| issue_number, |
| }); |
| const labelsOnIssue = labelsOnIssueResp.data.map(labels_1.getName); |
| logger.debug(labelsOnIssue) |
| logger.debug('Labels to process:'); |
| logger.debug(utils_1.formatStrArray(labelsToProcess.map(labels_1.formatLabel))); |
| // Remove labels |
| const shouldRemove = ({ name, checked }) => !checked && labelsOnIssue.includes(name); |
| const labelsToRemove = labelsToProcess.filter(shouldRemove).map(labels_1.getName); |
| logger.debug('Labels to remove:'); |
| logger.debug(utils_1.formatStrArray(labelsToRemove)); |
| if (labelsToRemove.length > 0) { |
| labelsToRemove.forEach((name) => __awaiter(this, void 0, void 0, function* () { |
| yield octokit.issues.removeLabel({ |
| owner, |
| repo, |
| issue_number, |
| name, |
| }); |
| })); |
| } |
| // Add labels |
| const shouldAdd = ({ name, checked }) => checked && !labelsOnIssue.includes(name); |
| |
| |
| const labelsToAdd = labelsToProcess.filter(shouldAdd).map(labels_1.getName); |
| logger.debug(labelsToAdd) |
| var issuelabels=utils_1.removeDuplicates((labelsToAdd.concat(labelsin)).concat(labelsOnIssue)); |
| var corrent=0 |
| if(labelsToRemove.length!=0){ |
| issuelabels=issuelabels.filter((x)=>labelsToRemove.some((item=>x!=item)))} |
| console.log("-----------------------"); |
| console.log(issuelabels); |
| console.log("-----------------------"); |
| var num=0 |
| var isdocmis=0 |
| for(let index=0;index<Labels.length;index++){ |
| if(issuelabels.includes(Labels[index])){ |
| console.log(Labels[index],"issue exists"); |
| if(Labels[index]=='doc-info-missing'){ |
| isdocmis=1 |
| } |
| }else{ |
| num=num+1 |
| } |
| if(labelsToAdd.includes(Labels[index])){ |
| corrent=1; |
| } |
| } |
| const errmessage="@"+user1+":Thanks for your contribution. For this PR, do we need to update docs?\n(The [PR template contains info about doc](https://github.com/apache/pulsar/blob/master/.github/PULL_REQUEST_TEMPLATE.md#documentation), which helps others know more about the changes. Can you provide doc-related info in this and future PR descriptions? Thanks)" |
| if(num==4 && isdocmis==0){ |
| labelsToAdd.push("doc-info-missing") |
| yield octokit.issues.createComment({ |
| owner, |
| repo, |
| issue_number, |
| body:errmessage |
| }) |
| |
| } |
| if(num<3 && isdocmis==1) |
| { |
| yield octokit.issues.removeLabel({ |
| owner, |
| repo, |
| issue_number, |
| name:"doc-info-missing" |
| }) |
| yield octokit.issues.createComment({ |
| owner, |
| repo, |
| issue_number, |
| body:succmessage |
| }) |
| |
| } |
| logger.debug('Labels to add:'); |
| logger.debug(utils_1.formatStrArray(labelsToAdd)); |
| if (labelsToAdd.length > 0) { |
| yield octokit.issues.addLabels({ |
| owner, |
| repo, |
| issue_number, |
| labels: labelsToAdd, |
| }); |
| } |
| if(corrent==1){ |
| try{ |
| yield octokit.issues.removeLabel({ |
| owner, |
| repo, |
| issue_number, |
| name:"doc-info-missing" |
| }) |
| |
| yield octokit.issues.createComment({ |
| owner, |
| repo, |
| issue_number, |
| body:succmessage |
| }) |
| }catch{ |
| logger.debug('no doc info missing') |
| } |
| |
| } |
| }); |
| } |
| function main() { |
| var e_1, _a; |
| return __awaiter(this, void 0, void 0, function* () { |
| try { |
| const token = core.getInput('github-token', { required: true }); |
| const labelPattern = core.getInput('label-pattern', { required: true }); |
| const quiet = core.getInput('quiet', { required: false }); |
| const offset = core.getInput('offset', { required: false }); |
| utils_1.validateEnum('quiet', quiet, enums_1.Quiet); |
| const logger = new logger_1.Logger(quiet === enums_1.Quiet.TRUE ? logger_1.LoggingLevel.SILENT : logger_1.LoggingLevel.DEBUG); |
| const octokit = github.getOctokit(token); |
| const { repo, owner } = github.context.repo; |
| const { eventName } = github.context; |
| switch (eventName) { |
| case 'issues': { |
| const { issue } = github.context.payload; |
| if (issue === undefined) { |
| return; |
| } |
| const labelsin = github.context.payload.issue.labels.map((label) => { |
| return label.name; |
| }); |
| const user1=github.context.payload.issue.user.login |
| const { body, html_url, number: issue_number } = issue; |
| if (body === undefined || html_url === undefined) { |
| return; |
| } |
| yield processIssue(octokit, repo, owner, issue_number, html_url, body, labelPattern, logger,labelsin,user1); |
| break; |
| } |
| case 'pull_request': |
| case 'pull_request_target': { |
| const { pull_request } = github.context.payload; |
| if(pull_request.merged==true) |
| { |
| return |
| } |
| if (pull_request === undefined) { |
| return; |
| } |
| const labelsin = github.context.payload.pull_request.labels.map((label) => { |
| return label.name; |
| }); |
| const user1=github.context.payload.pull_request.user.login |
| const { body, html_url, number: issue_number } = pull_request; |
| if (body === undefined || html_url === undefined) { |
| return; |
| } |
| yield processIssue(octokit, repo, owner, issue_number, html_url, body, labelPattern, logger,labelsin,user1); |
| break; |
| } |
| case 'schedule': { |
| const parsed = utils_1.parseOffsetString(offset); |
| const offsetDate = utils_1.getOffsetDate(new Date(), ...parsed); |
| try { |
| // Iterate through all open issues and pull requests |
| for (var _b = __asyncValues(octokit.paginate.iterator(octokit.issues.listForRepo, { owner, repo, since: offsetDate.toISOString() })), _c; _c = yield _b.next(), !_c.done;) { |
| const page = _c.value; |
| for (const issue of page.data) { |
| const { body, number, html_url, } = issue; |
| yield processIssue(octokit, repo, owner, number, html_url, body, labelPattern, logger); |
| } |
| const rateLimitResp = yield octokit.rateLimit.get(); |
| logger.debug(rateLimitResp.data); |
| } |
| } |
| catch (e_1_1) { e_1 = { error: e_1_1 }; } |
| finally { |
| try { |
| if (_c && !_c.done && (_a = _b.return)) yield _a.call(_b); |
| } |
| finally { if (e_1) throw e_1.error; } |
| } |
| break; |
| } |
| default: { |
| return; |
| } |
| } |
| } |
| catch (error) { |
| core.setFailed(error.message); |
| } |
| }); |
| } |
| main().catch(err => { |
| throw err; |
| }); |
| |
| |
| /***/ }), |
| |
| /***/ 211: |
| /***/ (function(module) { |
| |
| module.exports = require("https"); |
| |
| /***/ }), |
| |
| /***/ 260: |
| /***/ (function(module, __unusedexports, __webpack_require__) { |
| |
| // Note: since nyc uses this module to output coverage, any lines |
| // that are in the direct sync flow of nyc's outputCoverage are |
| // ignored, since we can never get coverage for them. |
| var assert = __webpack_require__(357) |
| var signals = __webpack_require__(654) |
| var isWin = /^win/i.test(process.platform) |
| |
| var EE = __webpack_require__(614) |
| /* istanbul ignore if */ |
| if (typeof EE !== 'function') { |
| EE = EE.EventEmitter |
| } |
| |
| var emitter |
| if (process.__signal_exit_emitter__) { |
| emitter = process.__signal_exit_emitter__ |
| } else { |
| emitter = process.__signal_exit_emitter__ = new EE() |
| emitter.count = 0 |
| emitter.emitted = {} |
| } |
| |
| // Because this emitter is a global, we have to check to see if a |
| // previous version of this library failed to enable infinite listeners. |
| // I know what you're about to say. But literally everything about |
| // signal-exit is a compromise with evil. Get used to it. |
| if (!emitter.infinite) { |
| emitter.setMaxListeners(Infinity) |
| emitter.infinite = true |
| } |
| |
| module.exports = function (cb, opts) { |
| assert.equal(typeof cb, 'function', 'a callback must be provided for exit handler') |
| |
| if (loaded === false) { |
| load() |
| } |
| |
| var ev = 'exit' |
| if (opts && opts.alwaysLast) { |
| ev = 'afterexit' |
| } |
| |
| var remove = function () { |
| emitter.removeListener(ev, cb) |
| if (emitter.listeners('exit').length === 0 && |
| emitter.listeners('afterexit').length === 0) { |
| unload() |
| } |
| } |
| emitter.on(ev, cb) |
| |
| return remove |
| } |
| |
| module.exports.unload = unload |
| function unload () { |
| if (!loaded) { |
| return |
| } |
| loaded = false |
| |
| signals.forEach(function (sig) { |
| try { |
| process.removeListener(sig, sigListeners[sig]) |
| } catch (er) {} |
| }) |
| process.emit = originalProcessEmit |
| process.reallyExit = originalProcessReallyExit |
| emitter.count -= 1 |
| } |
| |
| function emit (event, code, signal) { |
| if (emitter.emitted[event]) { |
| return |
| } |
| emitter.emitted[event] = true |
| emitter.emit(event, code, signal) |
| } |
| |
| // { <signal>: <listener fn>, ... } |
| var sigListeners = {} |
| signals.forEach(function (sig) { |
| sigListeners[sig] = function listener () { |
| // If there are no other listeners, an exit is coming! |
| // Simplest way: remove us and then re-send the signal. |
| // We know that this will kill the process, so we can |
| // safely emit now. |
| var listeners = process.listeners(sig) |
| if (listeners.length === emitter.count) { |
| unload() |
| emit('exit', null, sig) |
| /* istanbul ignore next */ |
| emit('afterexit', null, sig) |
| /* istanbul ignore next */ |
| if (isWin && sig === 'SIGHUP') { |
| // "SIGHUP" throws an `ENOSYS` error on Windows, |
| // so use a supported signal instead |
| sig = 'SIGINT' |
| } |
| process.kill(process.pid, sig) |
| } |
| } |
| }) |
| |
| module.exports.signals = function () { |
| return signals |
| } |
| |
| module.exports.load = load |
| |
| var loaded = false |
| |
| function load () { |
| if (loaded) { |
| return |
| } |
| loaded = true |
| |
| // This is the number of onSignalExit's that are in play. |
| // It's important so that we can count the correct number of |
| // listeners on signals, and don't wait for the other one to |
| // handle it instead of us. |
| emitter.count += 1 |
| |
| signals = signals.filter(function (sig) { |
| try { |
| process.on(sig, sigListeners[sig]) |
| return true |
| } catch (er) { |
| return false |
| } |
| }) |
| |
| process.emit = processEmit |
| process.reallyExit = processReallyExit |
| } |
| |
| var originalProcessReallyExit = process.reallyExit |
| function processReallyExit (code) { |
| process.exitCode = code || 0 |
| emit('exit', process.exitCode, null) |
| /* istanbul ignore next */ |
| emit('afterexit', process.exitCode, null) |
| /* istanbul ignore next */ |
| originalProcessReallyExit.call(process, process.exitCode) |
| } |
| |
| var originalProcessEmit = process.emit |
| function processEmit (ev, arg) { |
| if (ev === 'exit') { |
| if (arg !== undefined) { |
| process.exitCode = arg |
| } |
| var ret = originalProcessEmit.apply(this, arguments) |
| emit('exit', process.exitCode, null) |
| /* istanbul ignore next */ |
| emit('afterexit', process.exitCode, null) |
| return ret |
| } else { |
| return originalProcessEmit.apply(this, arguments) |
| } |
| } |
| |
| |
| /***/ }), |
| |
| /***/ 262: |
| /***/ (function(__unusedmodule, exports, __webpack_require__) { |
| |
| "use strict"; |
| |
| Object.defineProperty(exports, "__esModule", { value: true }); |
| exports.Context = void 0; |
| const fs_1 = __webpack_require__(747); |
| const os_1 = __webpack_require__(87); |
| class Context { |
| /** |
| * Hydrate the context from the environment |
| */ |
| constructor() { |
| this.payload = {}; |
| if (process.env.GITHUB_EVENT_PATH) { |
| if (fs_1.existsSync(process.env.GITHUB_EVENT_PATH)) { |
| this.payload = JSON.parse(fs_1.readFileSync(process.env.GITHUB_EVENT_PATH, { encoding: 'utf8' })); |
| } |
| else { |
| const path = process.env.GITHUB_EVENT_PATH; |
| process.stdout.write(`GITHUB_EVENT_PATH ${path} does not exist${os_1.EOL}`); |
| } |
| } |
| this.eventName = process.env.GITHUB_EVENT_NAME; |
| this.sha = process.env.GITHUB_SHA; |
| this.ref = process.env.GITHUB_REF; |
| this.workflow = process.env.GITHUB_WORKFLOW; |
| this.action = process.env.GITHUB_ACTION; |
| this.actor = process.env.GITHUB_ACTOR; |
| } |
| get issue() { |
| const payload = this.payload; |
| return Object.assign(Object.assign({}, this.repo), { number: (payload.issue || payload.pull_request || payload).number }); |
| } |
| get repo() { |
| if (process.env.GITHUB_REPOSITORY) { |
| const [owner, repo] = process.env.GITHUB_REPOSITORY.split('/'); |
| return { owner, repo }; |
| } |
| if (this.payload.repository) { |
| return { |
| owner: this.payload.repository.owner.login, |
| repo: this.payload.repository.name |
| }; |
| } |
| throw new Error("context.repo requires a GITHUB_REPOSITORY environment variable like 'owner/repo'"); |
| } |
| } |
| exports.Context = Context; |
| //# sourceMappingURL=context.js.map |
| |
| /***/ }), |
| |
| /***/ 280: |
| /***/ (function(module) { |
| |
| module.exports = register |
| |
| function register (state, name, method, options) { |
| if (typeof method !== 'function') { |
| throw new Error('method for before hook must be a function') |
| } |
| |
| if (!options) { |
| options = {} |
| } |
| |
| if (Array.isArray(name)) { |
| return name.reverse().reduce(function (callback, name) { |
| return register.bind(null, state, name, callback, options) |
| }, method)() |
| } |
| |
| return Promise.resolve() |
| .then(function () { |
| if (!state.registry[name]) { |
| return method(options) |
| } |
| |
| return (state.registry[name]).reduce(function (method, registered) { |
| return registered.hook.bind(null, method, options) |
| }, method)() |
| }) |
| } |
| |
| |
| /***/ }), |
| |
| /***/ 299: |
| /***/ (function(__unusedmodule, exports) { |
| |
| "use strict"; |
| |
| |
| Object.defineProperty(exports, '__esModule', { value: true }); |
| |
| const VERSION = "2.2.3"; |
| |
| /** |
| * Some “list” response that can be paginated have a different response structure |
| * |
| * They have a `total_count` key in the response (search also has `incomplete_results`, |
| * /installation/repositories also has `repository_selection`), as well as a key with |
| * the list of the items which name varies from endpoint to endpoint. |
| * |
| * Octokit normalizes these responses so that paginated results are always returned following |
| * the same structure. One challenge is that if the list response has only one page, no Link |
| * header is provided, so this header alone is not sufficient to check wether a response is |
| * paginated or not. |
| * |
| * We check if a "total_count" key is present in the response data, but also make sure that |
| * a "url" property is not, as the "Get the combined status for a specific ref" endpoint would |
| * otherwise match: https://developer.github.com/v3/repos/statuses/#get-the-combined-status-for-a-specific-ref |
| */ |
| function normalizePaginatedListResponse(response) { |
| const responseNeedsNormalization = "total_count" in response.data && !("url" in response.data); |
| if (!responseNeedsNormalization) return response; // keep the additional properties intact as there is currently no other way |
| // to retrieve the same information. |
| |
| const incompleteResults = response.data.incomplete_results; |
| const repositorySelection = response.data.repository_selection; |
| const totalCount = response.data.total_count; |
| delete response.data.incomplete_results; |
| delete response.data.repository_selection; |
| delete response.data.total_count; |
| const namespaceKey = Object.keys(response.data)[0]; |
| const data = response.data[namespaceKey]; |
| response.data = data; |
| |
| if (typeof incompleteResults !== "undefined") { |
| response.data.incomplete_results = incompleteResults; |
| } |
| |
| if (typeof repositorySelection !== "undefined") { |
| response.data.repository_selection = repositorySelection; |
| } |
| |
| response.data.total_count = totalCount; |
| return response; |
| } |
| |
| function iterator(octokit, route, parameters) { |
| const options = typeof route === "function" ? route.endpoint(parameters) : octokit.request.endpoint(route, parameters); |
| const requestMethod = typeof route === "function" ? route : octokit.request; |
| const method = options.method; |
| const headers = options.headers; |
| let url = options.url; |
| return { |
| [Symbol.asyncIterator]: () => ({ |
| next() { |
| if (!url) { |
| return Promise.resolve({ |
| done: true |
| }); |
| } |
| |
| return requestMethod({ |
| method, |
| url, |
| headers |
| }).then(normalizePaginatedListResponse).then(response => { |
| // `response.headers.link` format: |
| // '<https://api.github.com/users/aseemk/followers?page=2>; rel="next", <https://api.github.com/users/aseemk/followers?page=2>; rel="last"' |
| // sets `url` to undefined if "next" URL is not present or `link` header is not set |
| url = ((response.headers.link || "").match(/<([^>]+)>;\s*rel="next"/) || [])[1]; |
| return { |
| value: response |
| }; |
| }); |
| } |
| |
| }) |
| }; |
| } |
| |
| function paginate(octokit, route, parameters, mapFn) { |
| if (typeof parameters === "function") { |
| mapFn = parameters; |
| parameters = undefined; |
| } |
| |
| return gather(octokit, [], iterator(octokit, route, parameters)[Symbol.asyncIterator](), mapFn); |
| } |
| |
| function gather(octokit, results, iterator, mapFn) { |
| return iterator.next().then(result => { |
| if (result.done) { |
| return results; |
| } |
| |
| let earlyExit = false; |
| |
| function done() { |
| earlyExit = true; |
| } |
| |
| results = results.concat(mapFn ? mapFn(result.value, done) : result.value.data); |
| |
| if (earlyExit) { |
| return results; |
| } |
| |
| return gather(octokit, results, iterator, mapFn); |
| }); |
| } |
| |
| /** |
| * @param octokit Octokit instance |
| * @param options Options passed to Octokit constructor |
| */ |
| |
| function paginateRest(octokit) { |
| return { |
| paginate: Object.assign(paginate.bind(null, octokit), { |
| iterator: iterator.bind(null, octokit) |
| }) |
| }; |
| } |
| paginateRest.VERSION = VERSION; |
| |
| exports.paginateRest = paginateRest; |
| //# sourceMappingURL=index.js.map |
| |
| |
| /***/ }), |
| |
| /***/ 323: |
| /***/ (function(module) { |
| |
| "use strict"; |
| |
| |
| var isStream = module.exports = function (stream) { |
| return stream !== null && typeof stream === 'object' && typeof stream.pipe === 'function'; |
| }; |
| |
| isStream.writable = function (stream) { |
| return isStream(stream) && stream.writable !== false && typeof stream._write === 'function' && typeof stream._writableState === 'object'; |
| }; |
| |
| isStream.readable = function (stream) { |
| return isStream(stream) && stream.readable !== false && typeof stream._read === 'function' && typeof stream._readableState === 'object'; |
| }; |
| |
| isStream.duplex = function (stream) { |
| return isStream.writable(stream) && isStream.readable(stream); |
| }; |
| |
| isStream.transform = function (stream) { |
| return isStream.duplex(stream) && typeof stream._transform === 'function' && typeof stream._transformState === 'object'; |
| }; |
| |
| |
| /***/ }), |
| |
| /***/ 346: |
| /***/ (function(__unusedmodule, exports) { |
| |
| "use strict"; |
| |
| Object.defineProperty(exports, "__esModule", { value: true }); |
| exports.OffsetUnits = exports.Quiet = void 0; |
| var Quiet; |
| (function (Quiet) { |
| Quiet["TRUE"] = "true"; |
| Quiet["FALSE"] = "false"; |
| })(Quiet = exports.Quiet || (exports.Quiet = {})); |
| var OffsetUnits; |
| (function (OffsetUnits) { |
| OffsetUnits["HOUR"] = "H"; |
| OffsetUnits["DAY"] = "d"; |
| OffsetUnits["MONTH"] = "m"; |
| })(OffsetUnits = exports.OffsetUnits || (exports.OffsetUnits = {})); |
| |
| |
| /***/ }), |
| |
| /***/ 357: |
| /***/ (function(module) { |
| |
| module.exports = require("assert"); |
| |
| /***/ }), |
| |
| /***/ 385: |
| /***/ (function(__unusedmodule, exports, __webpack_require__) { |
| |
| "use strict"; |
| |
| |
| Object.defineProperty(exports, '__esModule', { value: true }); |
| |
| function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } |
| |
| var isPlainObject = _interopDefault(__webpack_require__(626)); |
| var universalUserAgent = __webpack_require__(796); |
| |
| function lowercaseKeys(object) { |
| if (!object) { |
| return {}; |
| } |
| |
| return Object.keys(object).reduce((newObj, key) => { |
| newObj[key.toLowerCase()] = object[key]; |
| return newObj; |
| }, {}); |
| } |
| |
| function mergeDeep(defaults, options) { |
| const result = Object.assign({}, defaults); |
| Object.keys(options).forEach(key => { |
| if (isPlainObject(options[key])) { |
| if (!(key in defaults)) Object.assign(result, { |
| [key]: options[key] |
| });else result[key] = mergeDeep(defaults[key], options[key]); |
| } else { |
| Object.assign(result, { |
| [key]: options[key] |
| }); |
| } |
| }); |
| return result; |
| } |
| |
| function merge(defaults, route, options) { |
| if (typeof route === "string") { |
| let [method, url] = route.split(" "); |
| options = Object.assign(url ? { |
| method, |
| url |
| } : { |
| url: method |
| }, options); |
| } else { |
| options = Object.assign({}, route); |
| } // lowercase header names before merging with defaults to avoid duplicates |
| |
| |
| options.headers = lowercaseKeys(options.headers); |
| const mergedOptions = mergeDeep(defaults || {}, options); // mediaType.previews arrays are merged, instead of overwritten |
| |
| if (defaults && defaults.mediaType.previews.length) { |
| mergedOptions.mediaType.previews = defaults.mediaType.previews.filter(preview => !mergedOptions.mediaType.previews.includes(preview)).concat(mergedOptions.mediaType.previews); |
| } |
| |
| mergedOptions.mediaType.previews = mergedOptions.mediaType.previews.map(preview => preview.replace(/-preview/, "")); |
| return mergedOptions; |
| } |
| |
| function addQueryParameters(url, parameters) { |
| const separator = /\?/.test(url) ? "&" : "?"; |
| const names = Object.keys(parameters); |
| |
| if (names.length === 0) { |
| return url; |
| } |
| |
| return url + separator + names.map(name => { |
| if (name === "q") { |
| return "q=" + parameters.q.split("+").map(encodeURIComponent).join("+"); |
| } |
| |
| return `${name}=${encodeURIComponent(parameters[name])}`; |
| }).join("&"); |
| } |
| |
| const urlVariableRegex = /\{[^}]+\}/g; |
| |
| function removeNonChars(variableName) { |
| return variableName.replace(/^\W+|\W+$/g, "").split(/,/); |
| } |
| |
| function extractUrlVariableNames(url) { |
| const matches = url.match(urlVariableRegex); |
| |
| if (!matches) { |
| return []; |
| } |
| |
| return matches.map(removeNonChars).reduce((a, b) => a.concat(b), []); |
| } |
| |
| function omit(object, keysToOmit) { |
| return Object.keys(object).filter(option => !keysToOmit.includes(option)).reduce((obj, key) => { |
| obj[key] = object[key]; |
| return obj; |
| }, {}); |
| } |
| |
| // Based on https://github.com/bramstein/url-template, licensed under BSD |
| // TODO: create separate package. |
| // |
| // Copyright (c) 2012-2014, Bram Stein |
| // All rights reserved. |
| // Redistribution and use in source and binary forms, with or without |
| // modification, are permitted provided that the following conditions |
| // are met: |
| // 1. Redistributions of source code must retain the above copyright |
| // notice, this list of conditions and the following disclaimer. |
| // 2. Redistributions in binary form must reproduce the above copyright |
| // notice, this list of conditions and the following disclaimer in the |
| // documentation and/or other materials provided with the distribution. |
| // 3. The name of the author may not be used to endorse or promote products |
| // derived from this software without specific prior written permission. |
| // THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR IMPLIED |
| // WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF |
| // MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO |
| // EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, |
| // INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, |
| // BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, |
| // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY |
| // OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING |
| // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, |
| // EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
| |
| /* istanbul ignore file */ |
| function encodeReserved(str) { |
| return str.split(/(%[0-9A-Fa-f]{2})/g).map(function (part) { |
| if (!/%[0-9A-Fa-f]/.test(part)) { |
| part = encodeURI(part).replace(/%5B/g, "[").replace(/%5D/g, "]"); |
| } |
| |
| return part; |
| }).join(""); |
| } |
| |
| function encodeUnreserved(str) { |
| return encodeURIComponent(str).replace(/[!'()*]/g, function (c) { |
| return "%" + c.charCodeAt(0).toString(16).toUpperCase(); |
| }); |
| } |
| |
| function encodeValue(operator, value, key) { |
| value = operator === "+" || operator === "#" ? encodeReserved(value) : encodeUnreserved(value); |
| |
| if (key) { |
| return encodeUnreserved(key) + "=" + value; |
| } else { |
| return value; |
| } |
| } |
| |
| function isDefined(value) { |
| return value !== undefined && value !== null; |
| } |
| |
| function isKeyOperator(operator) { |
| return operator === ";" || operator === "&" || operator === "?"; |
| } |
| |
| function getValues(context, operator, key, modifier) { |
| var value = context[key], |
| result = []; |
| |
| if (isDefined(value) && value !== "") { |
| if (typeof value === "string" || typeof value === "number" || typeof value === "boolean") { |
| value = value.toString(); |
| |
| if (modifier && modifier !== "*") { |
| value = value.substring(0, parseInt(modifier, 10)); |
| } |
| |
| result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : "")); |
| } else { |
| if (modifier === "*") { |
| if (Array.isArray(value)) { |
| value.filter(isDefined).forEach(function (value) { |
| result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : "")); |
| }); |
| } else { |
| Object.keys(value).forEach(function (k) { |
| if (isDefined(value[k])) { |
| result.push(encodeValue(operator, value[k], k)); |
| } |
| }); |
| } |
| } else { |
| const tmp = []; |
| |
| if (Array.isArray(value)) { |
| value.filter(isDefined).forEach(function (value) { |
| tmp.push(encodeValue(operator, value)); |
| }); |
| } else { |
| Object.keys(value).forEach(function (k) { |
| if (isDefined(value[k])) { |
| tmp.push(encodeUnreserved(k)); |
| tmp.push(encodeValue(operator, value[k].toString())); |
| } |
| }); |
| } |
| |
| if (isKeyOperator(operator)) { |
| result.push(encodeUnreserved(key) + "=" + tmp.join(",")); |
| } else if (tmp.length !== 0) { |
| result.push(tmp.join(",")); |
| } |
| } |
| } |
| } else { |
| if (operator === ";") { |
| if (isDefined(value)) { |
| result.push(encodeUnreserved(key)); |
| } |
| } else if (value === "" && (operator === "&" || operator === "?")) { |
| result.push(encodeUnreserved(key) + "="); |
| } else if (value === "") { |
| result.push(""); |
| } |
| } |
| |
| return result; |
| } |
| |
| function parseUrl(template) { |
| return { |
| expand: expand.bind(null, template) |
| }; |
| } |
| |
| function expand(template, context) { |
| var operators = ["+", "#", ".", "/", ";", "?", "&"]; |
| return template.replace(/\{([^\{\}]+)\}|([^\{\}]+)/g, function (_, expression, literal) { |
| if (expression) { |
| let operator = ""; |
| const values = []; |
| |
| if (operators.indexOf(expression.charAt(0)) !== -1) { |
| operator = expression.charAt(0); |
| expression = expression.substr(1); |
| } |
| |
| expression.split(/,/g).forEach(function (variable) { |
| var tmp = /([^:\*]*)(?::(\d+)|(\*))?/.exec(variable); |
| values.push(getValues(context, operator, tmp[1], tmp[2] || tmp[3])); |
| }); |
| |
| if (operator && operator !== "+") { |
| var separator = ","; |
| |
| if (operator === "?") { |
| separator = "&"; |
| } else if (operator !== "#") { |
| separator = operator; |
| } |
| |
| return (values.length !== 0 ? operator : "") + values.join(separator); |
| } else { |
| return values.join(","); |
| } |
| } else { |
| return encodeReserved(literal); |
| } |
| }); |
| } |
| |
| function parse(options) { |
| // https://fetch.spec.whatwg.org/#methods |
| let method = options.method.toUpperCase(); // replace :varname with {varname} to make it RFC 6570 compatible |
| |
| let url = (options.url || "/").replace(/:([a-z]\w+)/g, "{+$1}"); |
| let headers = Object.assign({}, options.headers); |
| let body; |
| let parameters = omit(options, ["method", "baseUrl", "url", "headers", "request", "mediaType"]); // extract variable names from URL to calculate remaining variables later |
| |
| const urlVariableNames = extractUrlVariableNames(url); |
| url = parseUrl(url).expand(parameters); |
| |
| if (!/^http/.test(url)) { |
| url = options.baseUrl + url; |
| } |
| |
| const omittedParameters = Object.keys(options).filter(option => urlVariableNames.includes(option)).concat("baseUrl"); |
| const remainingParameters = omit(parameters, omittedParameters); |
| const isBinaryRequset = /application\/octet-stream/i.test(headers.accept); |
| |
| if (!isBinaryRequset) { |
| if (options.mediaType.format) { |
| // e.g. application/vnd.github.v3+json => application/vnd.github.v3.raw |
| headers.accept = headers.accept.split(/,/).map(preview => preview.replace(/application\/vnd(\.\w+)(\.v3)?(\.\w+)?(\+json)?$/, `application/vnd$1$2.${options.mediaType.format}`)).join(","); |
| } |
| |
| if (options.mediaType.previews.length) { |
| const previewsFromAcceptHeader = headers.accept.match(/[\w-]+(?=-preview)/g) || []; |
| headers.accept = previewsFromAcceptHeader.concat(options.mediaType.previews).map(preview => { |
| const format = options.mediaType.format ? `.${options.mediaType.format}` : "+json"; |
| return `application/vnd.github.${preview}-preview${format}`; |
| }).join(","); |
| } |
| } // for GET/HEAD requests, set URL query parameters from remaining parameters |
| // for PATCH/POST/PUT/DELETE requests, set request body from remaining parameters |
| |
| |
| if (["GET", "HEAD"].includes(method)) { |
| url = addQueryParameters(url, remainingParameters); |
| } else { |
| if ("data" in remainingParameters) { |
| body = remainingParameters.data; |
| } else { |
| if (Object.keys(remainingParameters).length) { |
| body = remainingParameters; |
| } else { |
| headers["content-length"] = 0; |
| } |
| } |
| } // default content-type for JSON if body is set |
| |
| |
| if (!headers["content-type"] && typeof body !== "undefined") { |
| headers["content-type"] = "application/json; charset=utf-8"; |
| } // GitHub expects 'content-length: 0' header for PUT/PATCH requests without body. |
| // fetch does not allow to set `content-length` header, but we can set body to an empty string |
| |
| |
| if (["PATCH", "PUT"].includes(method) && typeof body === "undefined") { |
| body = ""; |
| } // Only return body/request keys if present |
| |
| |
| return Object.assign({ |
| method, |
| url, |
| headers |
| }, typeof body !== "undefined" ? { |
| body |
| } : null, options.request ? { |
| request: options.request |
| } : null); |
| } |
| |
| function endpointWithDefaults(defaults, route, options) { |
| return parse(merge(defaults, route, options)); |
| } |
| |
| function withDefaults(oldDefaults, newDefaults) { |
| const DEFAULTS = merge(oldDefaults, newDefaults); |
| const endpoint = endpointWithDefaults.bind(null, DEFAULTS); |
| return Object.assign(endpoint, { |
| DEFAULTS, |
| defaults: withDefaults.bind(null, DEFAULTS), |
| merge: merge.bind(null, DEFAULTS), |
| parse |
| }); |
| } |
| |
| const VERSION = "6.0.3"; |
| |
| const userAgent = `octokit-endpoint.js/${VERSION} ${universalUserAgent.getUserAgent()}`; // DEFAULTS has all properties set that EndpointOptions has, except url. |
| // So we use RequestParameters and add method as additional required property. |
| |
| const DEFAULTS = { |
| method: "GET", |
| baseUrl: "https://api.github.com", |
| headers: { |
| accept: "application/vnd.github.v3+json", |
| "user-agent": userAgent |
| }, |
| mediaType: { |
| format: "", |
| previews: [] |
| } |
| }; |
| |
| const endpoint = withDefaults(null, DEFAULTS); |
| |
| exports.endpoint = endpoint; |
| //# sourceMappingURL=index.js.map |
| |
| |
| /***/ }), |
| |
| /***/ 389: |
| /***/ (function(module, __unusedexports, __webpack_require__) { |
| |
| "use strict"; |
| |
| |
| const fs = __webpack_require__(747); |
| const shebangCommand = __webpack_require__(866); |
| |
| function readShebang(command) { |
| // Read the first 150 bytes from the file |
| const size = 150; |
| let buffer; |
| |
| if (Buffer.alloc) { |
| // Node.js v4.5+ / v5.10+ |
| buffer = Buffer.alloc(size); |
| } else { |
| // Old Node.js API |
| buffer = new Buffer(size); |
| buffer.fill(0); // zero-fill |
| } |
| |
| let fd; |
| |
| try { |
| fd = fs.openSync(command, 'r'); |
| fs.readSync(fd, buffer, 0, size, 0); |
| fs.closeSync(fd); |
| } catch (e) { /* Empty */ } |
| |
| // Attempt to extract shebang (null is returned if not a shebang) |
| return shebangCommand(buffer.toString()); |
| } |
| |
| module.exports = readShebang; |
| |
| |
| /***/ }), |
| |
| /***/ 413: |
| /***/ (function(module, __unusedexports, __webpack_require__) { |
| |
| module.exports = __webpack_require__(141); |
| |
| |
| /***/ }), |
| |
| /***/ 427: |
| /***/ (function(module, __unusedexports, __webpack_require__) { |
| |
| "use strict"; |
| |
| // Older verions of Node.js might not have `util.getSystemErrorName()`. |
| // In that case, fall back to a deprecated internal. |
| const util = __webpack_require__(669); |
| |
| let uv; |
| |
| if (typeof util.getSystemErrorName === 'function') { |
| module.exports = util.getSystemErrorName; |
| } else { |
| try { |
| uv = process.binding('uv'); |
| |
| if (typeof uv.errname !== 'function') { |
| throw new TypeError('uv.errname is not a function'); |
| } |
| } catch (err) { |
| console.error('execa/lib/errname: unable to establish process.binding(\'uv\')', err); |
| uv = null; |
| } |
| |
| module.exports = code => errname(uv, code); |
| } |
| |
| // Used for testing the fallback behavior |
| module.exports.__test__ = errname; |
| |
| function errname(uv, code) { |
| if (uv) { |
| return uv.errname(code); |
| } |
| |
| if (!(code < 0)) { |
| throw new Error('err >= 0'); |
| } |
| |
| return `Unknown system error ${code}`; |
| } |
| |
| |
| |
| /***/ }), |
| |
| /***/ 431: |
| /***/ (function(__unusedmodule, exports, __webpack_require__) { |
| |
| "use strict"; |
| |
| var __importStar = (this && this.__importStar) || function (mod) { |
| if (mod && mod.__esModule) return mod; |
| var result = {}; |
| if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; |
| result["default"] = mod; |
| return result; |
| }; |
| Object.defineProperty(exports, "__esModule", { value: true }); |
| const os = __importStar(__webpack_require__(87)); |
| /** |
| * Commands |
| * |
| * Command Format: |
| * ::name key=value,key=value::message |
| * |
| * Examples: |
| * ::warning::This is the message |
| * ::set-env name=MY_VAR::some value |
| */ |
| function issueCommand(command, properties, message) { |
| const cmd = new Command(command, properties, message); |
| process.stdout.write(cmd.toString() + os.EOL); |
| } |
| exports.issueCommand = issueCommand; |
| function issue(name, message = '') { |
| issueCommand(name, {}, message); |
| } |
| exports.issue = issue; |
| const CMD_STRING = '::'; |
| class Command { |
| constructor(command, properties, message) { |
| if (!command) { |
| command = 'missing.command'; |
| } |
| this.command = command; |
| this.properties = properties; |
| this.message = message; |
| } |
| toString() { |
| let cmdStr = CMD_STRING + this.command; |
| if (this.properties && Object.keys(this.properties).length > 0) { |
| cmdStr += ' '; |
| let first = true; |
| for (const key in this.properties) { |
| if (this.properties.hasOwnProperty(key)) { |
| const val = this.properties[key]; |
| if (val) { |
| if (first) { |
| first = false; |
| } |
| else { |
| cmdStr += ','; |
| } |
| cmdStr += `${key}=${escapeProperty(val)}`; |
| } |
| } |
| } |
| } |
| cmdStr += `${CMD_STRING}${escapeData(this.message)}`; |
| return cmdStr; |
| } |
| } |
| /** |
| * Sanitizes an input into a string so it can be passed into issueCommand safely |
| * @param input input to sanitize into a string |
| */ |
| function toCommandValue(input) { |
| if (input === null || input === undefined) { |
| return ''; |
| } |
| else if (typeof input === 'string' || input instanceof String) { |
| return input; |
| } |
| return JSON.stringify(input); |
| } |
| exports.toCommandValue = toCommandValue; |
| function escapeData(s) { |
| return toCommandValue(s) |
| .replace(/%/g, '%25') |
| .replace(/\r/g, '%0D') |
| .replace(/\n/g, '%0A'); |
| } |
| function escapeProperty(s) { |
| return toCommandValue(s) |
| .replace(/%/g, '%25') |
| .replace(/\r/g, '%0D') |
| .replace(/\n/g, '%0A') |
| .replace(/:/g, '%3A') |
| .replace(/,/g, '%2C'); |
| } |
| //# sourceMappingURL=command.js.map |
| |
| /***/ }), |
| |
| /***/ 448: |
| /***/ (function(__unusedmodule, exports, __webpack_require__) { |
| |
| "use strict"; |
| |
| |
| Object.defineProperty(exports, '__esModule', { value: true }); |
| |
| var universalUserAgent = __webpack_require__(796); |
| var beforeAfterHook = __webpack_require__(523); |
| var request = __webpack_require__(753); |
| var graphql = __webpack_require__(898); |
| var authToken = __webpack_require__(813); |
| |
| function _defineProperty(obj, key, value) { |
| if (key in obj) { |
| Object.defineProperty(obj, key, { |
| value: value, |
| enumerable: true, |
| configurable: true, |
| writable: true |
| }); |
| } else { |
| obj[key] = value; |
| } |
| |
| return obj; |
| } |
| |
| function ownKeys(object, enumerableOnly) { |
| var keys = Object.keys(object); |
| |
| if (Object.getOwnPropertySymbols) { |
| var symbols = Object.getOwnPropertySymbols(object); |
| if (enumerableOnly) symbols = symbols.filter(function (sym) { |
| return Object.getOwnPropertyDescriptor(object, sym).enumerable; |
| }); |
| keys.push.apply(keys, symbols); |
| } |
| |
| return keys; |
| } |
| |
| function _objectSpread2(target) { |
| for (var i = 1; i < arguments.length; i++) { |
| var source = arguments[i] != null ? arguments[i] : {}; |
| |
| if (i % 2) { |
| ownKeys(Object(source), true).forEach(function (key) { |
| _defineProperty(target, key, source[key]); |
| }); |
| } else if (Object.getOwnPropertyDescriptors) { |
| Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)); |
| } else { |
| ownKeys(Object(source)).forEach(function (key) { |
| Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); |
| }); |
| } |
| } |
| |
| return target; |
| } |
| |
| const VERSION = "2.5.4"; |
| |
| class Octokit { |
| constructor(options = {}) { |
| const hook = new beforeAfterHook.Collection(); |
| const requestDefaults = { |
| baseUrl: request.request.endpoint.DEFAULTS.baseUrl, |
| headers: {}, |
| request: Object.assign({}, options.request, { |
| hook: hook.bind(null, "request") |
| }), |
| mediaType: { |
| previews: [], |
| format: "" |
| } |
| }; // prepend default user agent with `options.userAgent` if set |
| |
| requestDefaults.headers["user-agent"] = [options.userAgent, `octokit-core.js/${VERSION} ${universalUserAgent.getUserAgent()}`].filter(Boolean).join(" "); |
| |
| if (options.baseUrl) { |
| requestDefaults.baseUrl = options.baseUrl; |
| } |
| |
| if (options.previews) { |
| requestDefaults.mediaType.previews = options.previews; |
| } |
| |
| if (options.timeZone) { |
| requestDefaults.headers["time-zone"] = options.timeZone; |
| } |
| |
| this.request = request.request.defaults(requestDefaults); |
| this.graphql = graphql.withCustomRequest(this.request).defaults(_objectSpread2(_objectSpread2({}, requestDefaults), {}, { |
| baseUrl: requestDefaults.baseUrl.replace(/\/api\/v3$/, "/api") |
| })); |
| this.log = Object.assign({ |
| debug: () => {}, |
| info: () => {}, |
| warn: console.warn.bind(console), |
| error: console.error.bind(console) |
| }, options.log); |
| this.hook = hook; // (1) If neither `options.authStrategy` nor `options.auth` are set, the `octokit` instance |
| // is unauthenticated. The `this.auth()` method is a no-op and no request hook is registred. |
| // (2) If only `options.auth` is set, use the default token authentication strategy. |
| // (3) If `options.authStrategy` is set then use it and pass in `options.auth`. Always pass own request as many strategies accept a custom request instance. |
| // TODO: type `options.auth` based on `options.authStrategy`. |
| |
| if (!options.authStrategy) { |
| if (!options.auth) { |
| // (1) |
| this.auth = async () => ({ |
| type: "unauthenticated" |
| }); |
| } else { |
| // (2) |
| const auth = authToken.createTokenAuth(options.auth); // @ts-ignore ¯\_(ツ)_/¯ |
| |
| hook.wrap("request", auth.hook); |
| this.auth = auth; |
| } |
| } else { |
| const auth = options.authStrategy(Object.assign({ |
| request: this.request |
| }, options.auth)); // @ts-ignore ¯\_(ツ)_/¯ |
| |
| hook.wrap("request", auth.hook); |
| this.auth = auth; |
| } // apply plugins |
| // https://stackoverflow.com/a/16345172 |
| |
| |
| const classConstructor = this.constructor; |
| classConstructor.plugins.forEach(plugin => { |
| Object.assign(this, plugin(this, options)); |
| }); |
| } |
| |
| static defaults(defaults) { |
| const OctokitWithDefaults = class extends this { |
| constructor(...args) { |
| const options = args[0] || {}; |
| super(Object.assign({}, defaults, options, options.userAgent && defaults.userAgent ? { |
| userAgent: `${options.userAgent} ${defaults.userAgent}` |
| } : null)); |
| } |
| |
| }; |
| return OctokitWithDefaults; |
| } |
| /** |
| * Attach a plugin (or many) to your Octokit instance. |
| * |
| * @example |
| * const API = Octokit.plugin(plugin1, plugin2, plugin3, ...) |
| */ |
| |
| |
| static plugin(p1, ...p2) { |
| var _a; |
| |
| if (p1 instanceof Array) { |
| console.warn(["Passing an array of plugins to Octokit.plugin() has been deprecated.", "Instead of:", " Octokit.plugin([plugin1, plugin2, ...])", "Use:", " Octokit.plugin(plugin1, plugin2, ...)"].join("\n")); |
| } |
| |
| const currentPlugins = this.plugins; |
| let newPlugins = [...(p1 instanceof Array ? p1 : [p1]), ...p2]; |
| const NewOctokit = (_a = class extends this {}, _a.plugins = currentPlugins.concat(newPlugins.filter(plugin => !currentPlugins.includes(plugin))), _a); |
| return NewOctokit; |
| } |
| |
| } |
| Octokit.VERSION = VERSION; |
| Octokit.plugins = []; |
| |
| exports.Octokit = Octokit; |
| //# sourceMappingURL=index.js.map |
| |
| |
| /***/ }), |
| |
| /***/ 453: |
| /***/ (function(module, __unusedexports, __webpack_require__) { |
| |
| var once = __webpack_require__(969) |
| var eos = __webpack_require__(9) |
| var fs = __webpack_require__(747) // we only need fs to get the ReadStream and WriteStream prototypes |
| |
| var noop = function () {} |
| var ancient = /^v?\.0/.test(process.version) |
| |
| var isFn = function (fn) { |
| return typeof fn === 'function' |
| } |
| |
| var isFS = function (stream) { |
| if (!ancient) return false // newer node version do not need to care about fs is a special way |
| if (!fs) return false // browser |
| return (stream instanceof (fs.ReadStream || noop) || stream instanceof (fs.WriteStream || noop)) && isFn(stream.close) |
| } |
| |
| var isRequest = function (stream) { |
| return stream.setHeader && isFn(stream.abort) |
| } |
| |
| var destroyer = function (stream, reading, writing, callback) { |
| callback = once(callback) |
| |
| var closed = false |
| stream.on('close', function () { |
| closed = true |
| }) |
| |
| eos(stream, {readable: reading, writable: writing}, function (err) { |
| if (err) return callback(err) |
| closed = true |
| callback() |
| }) |
| |
| var destroyed = false |
| return function (err) { |
| if (closed) return |
| if (destroyed) return |
| destroyed = true |
| |
| if (isFS(stream)) return stream.close(noop) // use close for fs streams to avoid fd leaks |
| if (isRequest(stream)) return stream.abort() // request.destroy just do .end - .abort is what we want |
| |
| if (isFn(stream.destroy)) return stream.destroy() |
| |
| callback(err || new Error('stream was destroyed')) |
| } |
| } |
| |
| var call = function (fn) { |
| fn() |
| } |
| |
| var pipe = function (from, to) { |
| return from.pipe(to) |
| } |
| |
| var pump = function () { |
| var streams = Array.prototype.slice.call(arguments) |
| var callback = isFn(streams[streams.length - 1] || noop) && streams.pop() || noop |
| |
| if (Array.isArray(streams[0])) streams = streams[0] |
| if (streams.length < 2) throw new Error('pump requires two streams per minimum') |
| |
| var error |
| var destroys = streams.map(function (stream, i) { |
| var reading = i < streams.length - 1 |
| var writing = i > 0 |
| return destroyer(stream, reading, writing, function (err) { |
| if (!error) error = err |
| if (err) destroys.forEach(call) |
| if (reading) return |
| destroys.forEach(call) |
| callback(error) |
| }) |
| }) |
| |
| return streams.reduce(pipe) |
| } |
| |
| module.exports = pump |
| |
| |
| /***/ }), |
| |
| /***/ 454: |
| /***/ (function(module, exports, __webpack_require__) { |
| |
| "use strict"; |
| |
| |
| Object.defineProperty(exports, '__esModule', { value: true }); |
| |
| function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } |
| |
| var Stream = _interopDefault(__webpack_require__(794)); |
| var http = _interopDefault(__webpack_require__(605)); |
| var Url = _interopDefault(__webpack_require__(835)); |
| var https = _interopDefault(__webpack_require__(211)); |
| var zlib = _interopDefault(__webpack_require__(761)); |
| |
| // Based on https://github.com/tmpvar/jsdom/blob/aa85b2abf07766ff7bf5c1f6daafb3726f2f2db5/lib/jsdom/living/blob.js |
| |
| // fix for "Readable" isn't a named export issue |
| const Readable = Stream.Readable; |
| |
| const BUFFER = Symbol('buffer'); |
| const TYPE = Symbol('type'); |
| |
| class Blob { |
| constructor() { |
| this[TYPE] = ''; |
| |
| const blobParts = arguments[0]; |
| const options = arguments[1]; |
| |
| const buffers = []; |
| let size = 0; |
| |
| if (blobParts) { |
| const a = blobParts; |
| const length = Number(a.length); |
| for (let i = 0; i < length; i++) { |
| const element = a[i]; |
| let buffer; |
| if (element instanceof Buffer) { |
| buffer = element; |
| } else if (ArrayBuffer.isView(element)) { |
| buffer = Buffer.from(element.buffer, element.byteOffset, element.byteLength); |
| } else if (element instanceof ArrayBuffer) { |
| buffer = Buffer.from(element); |
| } else if (element instanceof Blob) { |
| buffer = element[BUFFER]; |
| } else { |
| buffer = Buffer.from(typeof element === 'string' ? element : String(element)); |
| } |
| size += buffer.length; |
| buffers.push(buffer); |
| } |
| } |
| |
| this[BUFFER] = Buffer.concat(buffers); |
| |
| let type = options && options.type !== undefined && String(options.type).toLowerCase(); |
| if (type && !/[^\u0020-\u007E]/.test(type)) { |
| this[TYPE] = type; |
| } |
| } |
| get size() { |
| return this[BUFFER].length; |
| } |
| get type() { |
| return this[TYPE]; |
| } |
| text() { |
| return Promise.resolve(this[BUFFER].toString()); |
| } |
| arrayBuffer() { |
| const buf = this[BUFFER]; |
| const ab = buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); |
| return Promise.resolve(ab); |
| } |
| stream() { |
| const readable = new Readable(); |
| readable._read = function () {}; |
| readable.push(this[BUFFER]); |
| readable.push(null); |
| return readable; |
| } |
| toString() { |
| return '[object Blob]'; |
| } |
| slice() { |
| const size = this.size; |
| |
| const start = arguments[0]; |
| const end = arguments[1]; |
| let relativeStart, relativeEnd; |
| if (start === undefined) { |
| relativeStart = 0; |
| } else if (start < 0) { |
| relativeStart = Math.max(size + start, 0); |
| } else { |
| relativeStart = Math.min(start, size); |
| } |
| if (end === undefined) { |
| relativeEnd = size; |
| } else if (end < 0) { |
| relativeEnd = Math.max(size + end, 0); |
| } else { |
| relativeEnd = Math.min(end, size); |
| } |
| const span = Math.max(relativeEnd - relativeStart, 0); |
| |
| const buffer = this[BUFFER]; |
| const slicedBuffer = buffer.slice(relativeStart, relativeStart + span); |
| const blob = new Blob([], { type: arguments[2] }); |
| blob[BUFFER] = slicedBuffer; |
| return blob; |
| } |
| } |
| |
| Object.defineProperties(Blob.prototype, { |
| size: { enumerable: true }, |
| type: { enumerable: true }, |
| slice: { enumerable: true } |
| }); |
| |
| Object.defineProperty(Blob.prototype, Symbol.toStringTag, { |
| value: 'Blob', |
| writable: false, |
| enumerable: false, |
| configurable: true |
| }); |
| |
| /** |
| * fetch-error.js |
| * |
| * FetchError interface for operational errors |
| */ |
| |
| /** |
| * Create FetchError instance |
| * |
| * @param String message Error message for human |
| * @param String type Error type for machine |
| * @param String systemError For Node.js system error |
| * @return FetchError |
| */ |
| function FetchError(message, type, systemError) { |
| Error.call(this, message); |
| |
| this.message = message; |
| this.type = type; |
| |
| // when err.type is `system`, err.code contains system error code |
| if (systemError) { |
| this.code = this.errno = systemError.code; |
| } |
| |
| // hide custom error implementation details from end-users |
| Error.captureStackTrace(this, this.constructor); |
| } |
| |
| FetchError.prototype = Object.create(Error.prototype); |
| FetchError.prototype.constructor = FetchError; |
| FetchError.prototype.name = 'FetchError'; |
| |
| let convert; |
| try { |
| convert = __webpack_require__(18).convert; |
| } catch (e) {} |
| |
| const INTERNALS = Symbol('Body internals'); |
| |
| // fix an issue where "PassThrough" isn't a named export for node <10 |
| const PassThrough = Stream.PassThrough; |
| |
| /** |
| * Body mixin |
| * |
| * Ref: https://fetch.spec.whatwg.org/#body |
| * |
| * @param Stream body Readable stream |
| * @param Object opts Response options |
| * @return Void |
| */ |
| function Body(body) { |
| var _this = this; |
| |
| var _ref = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}, |
| _ref$size = _ref.size; |
| |
| let size = _ref$size === undefined ? 0 : _ref$size; |
| var _ref$timeout = _ref.timeout; |
| let timeout = _ref$timeout === undefined ? 0 : _ref$timeout; |
| |
| if (body == null) { |
| // body is undefined or null |
| body = null; |
| } else if (isURLSearchParams(body)) { |
| // body is a URLSearchParams |
| body = Buffer.from(body.toString()); |
| } else if (isBlob(body)) ; else if (Buffer.isBuffer(body)) ; else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') { |
| // body is ArrayBuffer |
| body = Buffer.from(body); |
| } else if (ArrayBuffer.isView(body)) { |
| // body is ArrayBufferView |
| body = Buffer.from(body.buffer, body.byteOffset, body.byteLength); |
| } else if (body instanceof Stream) ; else { |
| // none of the above |
| // coerce to string then buffer |
| body = Buffer.from(String(body)); |
| } |
| this[INTERNALS] = { |
| body, |
| disturbed: false, |
| error: null |
| }; |
| this.size = size; |
| this.timeout = timeout; |
| |
| if (body instanceof Stream) { |
| body.on('error', function (err) { |
| const error = err.name === 'AbortError' ? err : new FetchError(`Invalid response body while trying to fetch ${_this.url}: ${err.message}`, 'system', err); |
| _this[INTERNALS].error = error; |
| }); |
| } |
| } |
| |
| Body.prototype = { |
| get body() { |
| return this[INTERNALS].body; |
| }, |
| |
| get bodyUsed() { |
| return this[INTERNALS].disturbed; |
| }, |
| |
| /** |
| * Decode response as ArrayBuffer |
| * |
| * @return Promise |
| */ |
| arrayBuffer() { |
| return consumeBody.call(this).then(function (buf) { |
| return buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); |
| }); |
| }, |
| |
| /** |
| * Return raw response as Blob |
| * |
| * @return Promise |
| */ |
| blob() { |
| let ct = this.headers && this.headers.get('content-type') || ''; |
| return consumeBody.call(this).then(function (buf) { |
| return Object.assign( |
| // Prevent copying |
| new Blob([], { |
| type: ct.toLowerCase() |
| }), { |
| [BUFFER]: buf |
| }); |
| }); |
| }, |
| |
| /** |
| * Decode response as json |
| * |
| * @return Promise |
| */ |
| json() { |
| var _this2 = this; |
| |
| return consumeBody.call(this).then(function (buffer) { |
| try { |
| return JSON.parse(buffer.toString()); |
| } catch (err) { |
| return Body.Promise.reject(new FetchError(`invalid json response body at ${_this2.url} reason: ${err.message}`, 'invalid-json')); |
| } |
| }); |
| }, |
| |
| /** |
| * Decode response as text |
| * |
| * @return Promise |
| */ |
| text() { |
| return consumeBody.call(this).then(function (buffer) { |
| return buffer.toString(); |
| }); |
| }, |
| |
| /** |
| * Decode response as buffer (non-spec api) |
| * |
| * @return Promise |
| */ |
| buffer() { |
| return consumeBody.call(this); |
| }, |
| |
| /** |
| * Decode response as text, while automatically detecting the encoding and |
| * trying to decode to UTF-8 (non-spec api) |
| * |
| * @return Promise |
| */ |
| textConverted() { |
| var _this3 = this; |
| |
| return consumeBody.call(this).then(function (buffer) { |
| return convertBody(buffer, _this3.headers); |
| }); |
| } |
| }; |
| |
| // In browsers, all properties are enumerable. |
| Object.defineProperties(Body.prototype, { |
| body: { enumerable: true }, |
| bodyUsed: { enumerable: true }, |
| arrayBuffer: { enumerable: true }, |
| blob: { enumerable: true }, |
| json: { enumerable: true }, |
| text: { enumerable: true } |
| }); |
| |
| Body.mixIn = function (proto) { |
| for (const name of Object.getOwnPropertyNames(Body.prototype)) { |
| // istanbul ignore else: future proof |
| if (!(name in proto)) { |
| const desc = Object.getOwnPropertyDescriptor(Body.prototype, name); |
| Object.defineProperty(proto, name, desc); |
| } |
| } |
| }; |
| |
| /** |
| * Consume and convert an entire Body to a Buffer. |
| * |
| * Ref: https://fetch.spec.whatwg.org/#concept-body-consume-body |
| * |
| * @return Promise |
| */ |
| function consumeBody() { |
| var _this4 = this; |
| |
| if (this[INTERNALS].disturbed) { |
| return Body.Promise.reject(new TypeError(`body used already for: ${this.url}`)); |
| } |
| |
| this[INTERNALS].disturbed = true; |
| |
| if (this[INTERNALS].error) { |
| return Body.Promise.reject(this[INTERNALS].error); |
| } |
| |
| let body = this.body; |
| |
| // body is null |
| if (body === null) { |
| return Body.Promise.resolve(Buffer.alloc(0)); |
| } |
| |
| // body is blob |
| if (isBlob(body)) { |
| body = body.stream(); |
| } |
| |
| // body is buffer |
| if (Buffer.isBuffer(body)) { |
| return Body.Promise.resolve(body); |
| } |
| |
| // istanbul ignore if: should never happen |
| if (!(body instanceof Stream)) { |
| return Body.Promise.resolve(Buffer.alloc(0)); |
| } |
| |
| // body is stream |
| // get ready to actually consume the body |
| let accum = []; |
| let accumBytes = 0; |
| let abort = false; |
| |
| return new Body.Promise(function (resolve, reject) { |
| let resTimeout; |
| |
| // allow timeout on slow response body |
| if (_this4.timeout) { |
| resTimeout = setTimeout(function () { |
| abort = true; |
| reject(new FetchError(`Response timeout while trying to fetch ${_this4.url} (over ${_this4.timeout}ms)`, 'body-timeout')); |
| }, _this4.timeout); |
| } |
| |
| // handle stream errors |
| body.on('error', function (err) { |
| if (err.name === 'AbortError') { |
| // if the request was aborted, reject with this Error |
| abort = true; |
| reject(err); |
| } else { |
| // other errors, such as incorrect content-encoding |
| reject(new FetchError(`Invalid response body while trying to fetch ${_this4.url}: ${err.message}`, 'system', err)); |
| } |
| }); |
| |
| body.on('data', function (chunk) { |
| if (abort || chunk === null) { |
| return; |
| } |
| |
| if (_this4.size && accumBytes + chunk.length > _this4.size) { |
| abort = true; |
| reject(new FetchError(`content size at ${_this4.url} over limit: ${_this4.size}`, 'max-size')); |
| return; |
| } |
| |
| accumBytes += chunk.length; |
| accum.push(chunk); |
| }); |
| |
| body.on('end', function () { |
| if (abort) { |
| return; |
| } |
| |
| clearTimeout(resTimeout); |
| |
| try { |
| resolve(Buffer.concat(accum, accumBytes)); |
| } catch (err) { |
| // handle streams that have accumulated too much data (issue #414) |
| reject(new FetchError(`Could not create Buffer from response body for ${_this4.url}: ${err.message}`, 'system', err)); |
| } |
| }); |
| }); |
| } |
| |
| /** |
| * Detect buffer encoding and convert to target encoding |
| * ref: http://www.w3.org/TR/2011/WD-html5-20110113/parsing.html#determining-the-character-encoding |
| * |
| * @param Buffer buffer Incoming buffer |
| * @param String encoding Target encoding |
| * @return String |
| */ |
| function convertBody(buffer, headers) { |
| if (typeof convert !== 'function') { |
| throw new Error('The package `encoding` must be installed to use the textConverted() function'); |
| } |
| |
| const ct = headers.get('content-type'); |
| let charset = 'utf-8'; |
| let res, str; |
| |
| // header |
| if (ct) { |
| res = /charset=([^;]*)/i.exec(ct); |
| } |
| |
| // no charset in content type, peek at response body for at most 1024 bytes |
| str = buffer.slice(0, 1024).toString(); |
| |
| // html5 |
| if (!res && str) { |
| res = /<meta.+?charset=(['"])(.+?)\1/i.exec(str); |
| } |
| |
| // html4 |
| if (!res && str) { |
| res = /<meta[\s]+?http-equiv=(['"])content-type\1[\s]+?content=(['"])(.+?)\2/i.exec(str); |
| |
| if (res) { |
| res = /charset=(.*)/i.exec(res.pop()); |
| } |
| } |
| |
| // xml |
| if (!res && str) { |
| res = /<\?xml.+?encoding=(['"])(.+?)\1/i.exec(str); |
| } |
| |
| // found charset |
| if (res) { |
| charset = res.pop(); |
| |
| // prevent decode issues when sites use incorrect encoding |
| // ref: https://hsivonen.fi/encoding-menu/ |
| if (charset === 'gb2312' || charset === 'gbk') { |
| charset = 'gb18030'; |
| } |
| } |
| |
| // turn raw buffers into a single utf-8 buffer |
| return convert(buffer, 'UTF-8', charset).toString(); |
| } |
| |
| /** |
| * Detect a URLSearchParams object |
| * ref: https://github.com/bitinn/node-fetch/issues/296#issuecomment-307598143 |
| * |
| * @param Object obj Object to detect by type or brand |
| * @return String |
| */ |
| function isURLSearchParams(obj) { |
| // Duck-typing as a necessary condition. |
| if (typeof obj !== 'object' || typeof obj.append !== 'function' || typeof obj.delete !== 'function' || typeof obj.get !== 'function' || typeof obj.getAll !== 'function' || typeof obj.has !== 'function' || typeof obj.set !== 'function') { |
| return false; |
| } |
| |
| // Brand-checking and more duck-typing as optional condition. |
| return obj.constructor.name === 'URLSearchParams' || Object.prototype.toString.call(obj) === '[object URLSearchParams]' || typeof obj.sort === 'function'; |
| } |
| |
| /** |
| * Check if `obj` is a W3C `Blob` object (which `File` inherits from) |
| * @param {*} obj |
| * @return {boolean} |
| */ |
| function isBlob(obj) { |
| return typeof obj === 'object' && typeof obj.arrayBuffer === 'function' && typeof obj.type === 'string' && typeof obj.stream === 'function' && typeof obj.constructor === 'function' && typeof obj.constructor.name === 'string' && /^(Blob|File)$/.test(obj.constructor.name) && /^(Blob|File)$/.test(obj[Symbol.toStringTag]); |
| } |
| |
| /** |
| * Clone body given Res/Req instance |
| * |
| * @param Mixed instance Response or Request instance |
| * @return Mixed |
| */ |
| function clone(instance) { |
| let p1, p2; |
| let body = instance.body; |
| |
| // don't allow cloning a used body |
| if (instance.bodyUsed) { |
| throw new Error('cannot clone body after it is used'); |
| } |
| |
| // check that body is a stream and not form-data object |
| // note: we can't clone the form-data object without having it as a dependency |
| if (body instanceof Stream && typeof body.getBoundary !== 'function') { |
| // tee instance body |
| p1 = new PassThrough(); |
| p2 = new PassThrough(); |
| body.pipe(p1); |
| body.pipe(p2); |
| // set instance body to teed body and return the other teed body |
| instance[INTERNALS].body = p1; |
| body = p2; |
| } |
| |
| return body; |
| } |
| |
| /** |
| * Performs the operation "extract a `Content-Type` value from |object|" as |
| * specified in the specification: |
| * https://fetch.spec.whatwg.org/#concept-bodyinit-extract |
| * |
| * This function assumes that instance.body is present. |
| * |
| * @param Mixed instance Any options.body input |
| */ |
| function extractContentType(body) { |
| if (body === null) { |
| // body is null |
| return null; |
| } else if (typeof body === 'string') { |
| // body is string |
| return 'text/plain;charset=UTF-8'; |
| } else if (isURLSearchParams(body)) { |
| // body is a URLSearchParams |
| return 'application/x-www-form-urlencoded;charset=UTF-8'; |
| } else if (isBlob(body)) { |
| // body is blob |
| return body.type || null; |
| } else if (Buffer.isBuffer(body)) { |
| // body is buffer |
| return null; |
| } else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') { |
| // body is ArrayBuffer |
| return null; |
| } else if (ArrayBuffer.isView(body)) { |
| // body is ArrayBufferView |
| return null; |
| } else if (typeof body.getBoundary === 'function') { |
| // detect form data input from form-data module |
| return `multipart/form-data;boundary=${body.getBoundary()}`; |
| } else if (body instanceof Stream) { |
| // body is stream |
| // can't really do much about this |
| return null; |
| } else { |
| // Body constructor defaults other things to string |
| return 'text/plain;charset=UTF-8'; |
| } |
| } |
| |
| /** |
| * The Fetch Standard treats this as if "total bytes" is a property on the body. |
| * For us, we have to explicitly get it with a function. |
| * |
| * ref: https://fetch.spec.whatwg.org/#concept-body-total-bytes |
| * |
| * @param Body instance Instance of Body |
| * @return Number? Number of bytes, or null if not possible |
| */ |
| function getTotalBytes(instance) { |
| const body = instance.body; |
| |
| |
| if (body === null) { |
| // body is null |
| return 0; |
| } else if (isBlob(body)) { |
| return body.size; |
| } else if (Buffer.isBuffer(body)) { |
| // body is buffer |
| return body.length; |
| } else if (body && typeof body.getLengthSync === 'function') { |
| // detect form data input from form-data module |
| if (body._lengthRetrievers && body._lengthRetrievers.length == 0 || // 1.x |
| body.hasKnownLength && body.hasKnownLength()) { |
| // 2.x |
| return body.getLengthSync(); |
| } |
| return null; |
| } else { |
| // body is stream |
| return null; |
| } |
| } |
| |
| /** |
| * Write a Body to a Node.js WritableStream (e.g. http.Request) object. |
| * |
| * @param Body instance Instance of Body |
| * @return Void |
| */ |
| function writeToStream(dest, instance) { |
| const body = instance.body; |
| |
| |
| if (body === null) { |
| // body is null |
| dest.end(); |
| } else if (isBlob(body)) { |
| body.stream().pipe(dest); |
| } else if (Buffer.isBuffer(body)) { |
| // body is buffer |
| dest.write(body); |
| dest.end(); |
| } else { |
| // body is stream |
| body.pipe(dest); |
| } |
| } |
| |
| // expose Promise |
| Body.Promise = global.Promise; |
| |
| /** |
| * headers.js |
| * |
| * Headers class offers convenient helpers |
| */ |
| |
| const invalidTokenRegex = /[^\^_`a-zA-Z\-0-9!#$%&'*+.|~]/; |
| const invalidHeaderCharRegex = /[^\t\x20-\x7e\x80-\xff]/; |
| |
| function validateName(name) { |
| name = `${name}`; |
| if (invalidTokenRegex.test(name) || name === '') { |
| throw new TypeError(`${name} is not a legal HTTP header name`); |
| } |
| } |
| |
| function validateValue(value) { |
| value = `${value}`; |
| if (invalidHeaderCharRegex.test(value)) { |
| throw new TypeError(`${value} is not a legal HTTP header value`); |
| } |
| } |
| |
| /** |
| * Find the key in the map object given a header name. |
| * |
| * Returns undefined if not found. |
| * |
| * @param String name Header name |
| * @return String|Undefined |
| */ |
| function find(map, name) { |
| name = name.toLowerCase(); |
| for (const key in map) { |
| if (key.toLowerCase() === name) { |
| return key; |
| } |
| } |
| return undefined; |
| } |
| |
| const MAP = Symbol('map'); |
| class Headers { |
| /** |
| * Headers class |
| * |
| * @param Object headers Response headers |
| * @return Void |
| */ |
| constructor() { |
| let init = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : undefined; |
| |
| this[MAP] = Object.create(null); |
| |
| if (init instanceof Headers) { |
| const rawHeaders = init.raw(); |
| const headerNames = Object.keys(rawHeaders); |
| |
| for (const headerName of headerNames) { |
| for (const value of rawHeaders[headerName]) { |
| this.append(headerName, value); |
| } |
| } |
| |
| return; |
| } |
| |
| // We don't worry about converting prop to ByteString here as append() |
| // will handle it. |
| if (init == null) ; else if (typeof init === 'object') { |
| const method = init[Symbol.iterator]; |
| if (method != null) { |
| if (typeof method !== 'function') { |
| throw new TypeError('Header pairs must be iterable'); |
| } |
| |
| // sequence<sequence<ByteString>> |
| // Note: per spec we have to first exhaust the lists then process them |
| const pairs = []; |
| for (const pair of init) { |
| if (typeof pair !== 'object' || typeof pair[Symbol.iterator] !== 'function') { |
| throw new TypeError('Each header pair must be iterable'); |
| } |
| pairs.push(Array.from(pair)); |
| } |
| |
| for (const pair of pairs) { |
| if (pair.length !== 2) { |
| throw new TypeError('Each header pair must be a name/value tuple'); |
| } |
| this.append(pair[0], pair[1]); |
| } |
| } else { |
| // record<ByteString, ByteString> |
| for (const key of Object.keys(init)) { |
| const value = init[key]; |
| this.append(key, value); |
| } |
| } |
| } else { |
| throw new TypeError('Provided initializer must be an object'); |
| } |
| } |
| |
| /** |
| * Return combined header value given name |
| * |
| * @param String name Header name |
| * @return Mixed |
| */ |
| get(name) { |
| name = `${name}`; |
| validateName(name); |
| const key = find(this[MAP], name); |
| if (key === undefined) { |
| return null; |
| } |
| |
| return this[MAP][key].join(', '); |
| } |
| |
| /** |
| * Iterate over all headers |
| * |
| * @param Function callback Executed for each item with parameters (value, name, thisArg) |
| * @param Boolean thisArg `this` context for callback function |
| * @return Void |
| */ |
| forEach(callback) { |
| let thisArg = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : undefined; |
| |
| let pairs = getHeaders(this); |
| let i = 0; |
| while (i < pairs.length) { |
| var _pairs$i = pairs[i]; |
| const name = _pairs$i[0], |
| value = _pairs$i[1]; |
| |
| callback.call(thisArg, value, name, this); |
| pairs = getHeaders(this); |
| i++; |
| } |
| } |
| |
| /** |
| * Overwrite header values given name |
| * |
| * @param String name Header name |
| * @param String value Header value |
| * @return Void |
| */ |
| set(name, value) { |
| name = `${name}`; |
| value = `${value}`; |
| validateName(name); |
| validateValue(value); |
| const key = find(this[MAP], name); |
| this[MAP][key !== undefined ? key : name] = [value]; |
| } |
| |
| /** |
| * Append a value onto existing header |
| * |
| * @param String name Header name |
| * @param String value Header value |
| * @return Void |
| */ |
| append(name, value) { |
| name = `${name}`; |
| value = `${value}`; |
| validateName(name); |
| validateValue(value); |
| const key = find(this[MAP], name); |
| if (key !== undefined) { |
| this[MAP][key].push(value); |
| } else { |
| this[MAP][name] = [value]; |
| } |
| } |
| |
| /** |
| * Check for header name existence |
| * |
| * @param String name Header name |
| * @return Boolean |
| */ |
| has(name) { |
| name = `${name}`; |
| validateName(name); |
| return find(this[MAP], name) !== undefined; |
| } |
| |
| /** |
| * Delete all header values given name |
| * |
| * @param String name Header name |
| * @return Void |
| */ |
| delete(name) { |
| name = `${name}`; |
| validateName(name); |
| const key = find(this[MAP], name); |
| if (key !== undefined) { |
| delete this[MAP][key]; |
| } |
| } |
| |
| /** |
| * Return raw headers (non-spec api) |
| * |
| * @return Object |
| */ |
| raw() { |
| return this[MAP]; |
| } |
| |
| /** |
| * Get an iterator on keys. |
| * |
| * @return Iterator |
| */ |
| keys() { |
| return createHeadersIterator(this, 'key'); |
| } |
| |
| /** |
| * Get an iterator on values. |
| * |
| * @return Iterator |
| */ |
| values() { |
| return createHeadersIterator(this, 'value'); |
| } |
| |
| /** |
| * Get an iterator on entries. |
| * |
| * This is the default iterator of the Headers object. |
| * |
| * @return Iterator |
| */ |
| [Symbol.iterator]() { |
| return createHeadersIterator(this, 'key+value'); |
| } |
| } |
| Headers.prototype.entries = Headers.prototype[Symbol.iterator]; |
| |
| Object.defineProperty(Headers.prototype, Symbol.toStringTag, { |
| value: 'Headers', |
| writable: false, |
| enumerable: false, |
| configurable: true |
| }); |
| |
| Object.defineProperties(Headers.prototype, { |
| get: { enumerable: true }, |
| forEach: { enumerable: true }, |
| set: { enumerable: true }, |
| append: { enumerable: true }, |
| has: { enumerable: true }, |
| delete: { enumerable: true }, |
| keys: { enumerable: true }, |
| values: { enumerable: true }, |
| entries: { enumerable: true } |
| }); |
| |
| function getHeaders(headers) { |
| let kind = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'key+value'; |
| |
| const keys = Object.keys(headers[MAP]).sort(); |
| return keys.map(kind === 'key' ? function (k) { |
| return k.toLowerCase(); |
| } : kind === 'value' ? function (k) { |
| return headers[MAP][k].join(', '); |
| } : function (k) { |
| return [k.toLowerCase(), headers[MAP][k].join(', ')]; |
| }); |
| } |
| |
| const INTERNAL = Symbol('internal'); |
| |
| function createHeadersIterator(target, kind) { |
| const iterator = Object.create(HeadersIteratorPrototype); |
| iterator[INTERNAL] = { |
| target, |
| kind, |
| index: 0 |
| }; |
| return iterator; |
| } |
| |
| const HeadersIteratorPrototype = Object.setPrototypeOf({ |
| next() { |
| // istanbul ignore if |
| if (!this || Object.getPrototypeOf(this) !== HeadersIteratorPrototype) { |
| throw new TypeError('Value of `this` is not a HeadersIterator'); |
| } |
| |
| var _INTERNAL = this[INTERNAL]; |
| const target = _INTERNAL.target, |
| kind = _INTERNAL.kind, |
| index = _INTERNAL.index; |
| |
| const values = getHeaders(target, kind); |
| const len = values.length; |
| if (index >= len) { |
| return { |
| value: undefined, |
| done: true |
| }; |
| } |
| |
| this[INTERNAL].index = index + 1; |
| |
| return { |
| value: values[index], |
| done: false |
| }; |
| } |
| }, Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]()))); |
| |
| Object.defineProperty(HeadersIteratorPrototype, Symbol.toStringTag, { |
| value: 'HeadersIterator', |
| writable: false, |
| enumerable: false, |
| configurable: true |
| }); |
| |
| /** |
| * Export the Headers object in a form that Node.js can consume. |
| * |
| * @param Headers headers |
| * @return Object |
| */ |
| function exportNodeCompatibleHeaders(headers) { |
| const obj = Object.assign({ __proto__: null }, headers[MAP]); |
| |
| // http.request() only supports string as Host header. This hack makes |
| // specifying custom Host header possible. |
| const hostHeaderKey = find(headers[MAP], 'Host'); |
| if (hostHeaderKey !== undefined) { |
| obj[hostHeaderKey] = obj[hostHeaderKey][0]; |
| } |
| |
| return obj; |
| } |
| |
| /** |
| * Create a Headers object from an object of headers, ignoring those that do |
| * not conform to HTTP grammar productions. |
| * |
| * @param Object obj Object of headers |
| * @return Headers |
| */ |
| function createHeadersLenient(obj) { |
| const headers = new Headers(); |
| for (const name of Object.keys(obj)) { |
| if (invalidTokenRegex.test(name)) { |
| continue; |
| } |
| if (Array.isArray(obj[name])) { |
| for (const val of obj[name]) { |
| if (invalidHeaderCharRegex.test(val)) { |
| continue; |
| } |
| if (headers[MAP][name] === undefined) { |
| headers[MAP][name] = [val]; |
| } else { |
| headers[MAP][name].push(val); |
| } |
| } |
| } else if (!invalidHeaderCharRegex.test(obj[name])) { |
| headers[MAP][name] = [obj[name]]; |
| } |
| } |
| return headers; |
| } |
| |
| const INTERNALS$1 = Symbol('Response internals'); |
| |
| // fix an issue where "STATUS_CODES" aren't a named export for node <10 |
| const STATUS_CODES = http.STATUS_CODES; |
| |
| /** |
| * Response class |
| * |
| * @param Stream body Readable stream |
| * @param Object opts Response options |
| * @return Void |
| */ |
| class Response { |
| constructor() { |
| let body = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : null; |
| let opts = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; |
| |
| Body.call(this, body, opts); |
| |
| const status = opts.status || 200; |
| const headers = new Headers(opts.headers); |
| |
| if (body != null && !headers.has('Content-Type')) { |
| const contentType = extractContentType(body); |
| if (contentType) { |
| headers.append('Content-Type', contentType); |
| } |
| } |
| |
| this[INTERNALS$1] = { |
| url: opts.url, |
| status, |
| statusText: opts.statusText || STATUS_CODES[status], |
| headers, |
| counter: opts.counter |
| }; |
| } |
| |
| get url() { |
| return this[INTERNALS$1].url || ''; |
| } |
| |
| get status() { |
| return this[INTERNALS$1].status; |
| } |
| |
| /** |
| * Convenience property representing if the request ended normally |
| */ |
| get ok() { |
| return this[INTERNALS$1].status >= 200 && this[INTERNALS$1].status < 300; |
| } |
| |
| get redirected() { |
| return this[INTERNALS$1].counter > 0; |
| } |
| |
| get statusText() { |
| return this[INTERNALS$1].statusText; |
| } |
| |
| get headers() { |
| return this[INTERNALS$1].headers; |
| } |
| |
| /** |
| * Clone this response |
| * |
| * @return Response |
| */ |
| clone() { |
| return new Response(clone(this), { |
| url: this.url, |
| status: this.status, |
| statusText: this.statusText, |
| headers: this.headers, |
| ok: this.ok, |
| redirected: this.redirected |
| }); |
| } |
| } |
| |
| Body.mixIn(Response.prototype); |
| |
| Object.defineProperties(Response.prototype, { |
| url: { enumerable: true }, |
| status: { enumerable: true }, |
| ok: { enumerable: true }, |
| redirected: { enumerable: true }, |
| statusText: { enumerable: true }, |
| headers: { enumerable: true }, |
| clone: { enumerable: true } |
| }); |
| |
| Object.defineProperty(Response.prototype, Symbol.toStringTag, { |
| value: 'Response', |
| writable: false, |
| enumerable: false, |
| configurable: true |
| }); |
| |
| const INTERNALS$2 = Symbol('Request internals'); |
| |
| // fix an issue where "format", "parse" aren't a named export for node <10 |
| const parse_url = Url.parse; |
| const format_url = Url.format; |
| |
| const streamDestructionSupported = 'destroy' in Stream.Readable.prototype; |
| |
| /** |
| * Check if a value is an instance of Request. |
| * |
| * @param Mixed input |
| * @return Boolean |
| */ |
| function isRequest(input) { |
| return typeof input === 'object' && typeof input[INTERNALS$2] === 'object'; |
| } |
| |
| function isAbortSignal(signal) { |
| const proto = signal && typeof signal === 'object' && Object.getPrototypeOf(signal); |
| return !!(proto && proto.constructor.name === 'AbortSignal'); |
| } |
| |
| /** |
| * Request class |
| * |
| * @param Mixed input Url or Request instance |
| * @param Object init Custom options |
| * @return Void |
| */ |
| class Request { |
| constructor(input) { |
| let init = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; |
| |
| let parsedURL; |
| |
| // normalize input |
| if (!isRequest(input)) { |
| if (input && input.href) { |
| // in order to support Node.js' Url objects; though WHATWG's URL objects |
| // will fall into this branch also (since their `toString()` will return |
| // `href` property anyway) |
| parsedURL = parse_url(input.href); |
| } else { |
| // coerce input to a string before attempting to parse |
| parsedURL = parse_url(`${input}`); |
| } |
| input = {}; |
| } else { |
| parsedURL = parse_url(input.url); |
| } |
| |
| let method = init.method || input.method || 'GET'; |
| method = method.toUpperCase(); |
| |
| if ((init.body != null || isRequest(input) && input.body !== null) && (method === 'GET' || method === 'HEAD')) { |
| throw new TypeError('Request with GET/HEAD method cannot have body'); |
| } |
| |
| let inputBody = init.body != null ? init.body : isRequest(input) && input.body !== null ? clone(input) : null; |
| |
| Body.call(this, inputBody, { |
| timeout: init.timeout || input.timeout || 0, |
| size: init.size || input.size || 0 |
| }); |
| |
| const headers = new Headers(init.headers || input.headers || {}); |
| |
| if (inputBody != null && !headers.has('Content-Type')) { |
| const contentType = extractContentType(inputBody); |
| if (contentType) { |
| headers.append('Content-Type', contentType); |
| } |
| } |
| |
| let signal = isRequest(input) ? input.signal : null; |
| if ('signal' in init) signal = init.signal; |
| |
| if (signal != null && !isAbortSignal(signal)) { |
| throw new TypeError('Expected signal to be an instanceof AbortSignal'); |
| } |
| |
| this[INTERNALS$2] = { |
| method, |
| redirect: init.redirect || input.redirect || 'follow', |
| headers, |
| parsedURL, |
| signal |
| }; |
| |
| // node-fetch-only options |
| this.follow = init.follow !== undefined ? init.follow : input.follow !== undefined ? input.follow : 20; |
| this.compress = init.compress !== undefined ? init.compress : input.compress !== undefined ? input.compress : true; |
| this.counter = init.counter || input.counter || 0; |
| this.agent = init.agent || input.agent; |
| } |
| |
| get method() { |
| return this[INTERNALS$2].method; |
| } |
| |
| get url() { |
| return format_url(this[INTERNALS$2].parsedURL); |
| } |
| |
| get headers() { |
| return this[INTERNALS$2].headers; |
| } |
| |
| get redirect() { |
| return this[INTERNALS$2].redirect; |
| } |
| |
| get signal() { |
| return this[INTERNALS$2].signal; |
| } |
| |
| /** |
| * Clone this request |
| * |
| * @return Request |
| */ |
| clone() { |
| return new Request(this); |
| } |
| } |
| |
| Body.mixIn(Request.prototype); |
| |
| Object.defineProperty(Request.prototype, Symbol.toStringTag, { |
| value: 'Request', |
| writable: false, |
| enumerable: false, |
| configurable: true |
| }); |
| |
| Object.defineProperties(Request.prototype, { |
| method: { enumerable: true }, |
| url: { enumerable: true }, |
| headers: { enumerable: true }, |
| redirect: { enumerable: true }, |
| clone: { enumerable: true }, |
| signal: { enumerable: true } |
| }); |
| |
| /** |
| * Convert a Request to Node.js http request options. |
| * |
| * @param Request A Request instance |
| * @return Object The options object to be passed to http.request |
| */ |
| function getNodeRequestOptions(request) { |
| const parsedURL = request[INTERNALS$2].parsedURL; |
| const headers = new Headers(request[INTERNALS$2].headers); |
| |
| // fetch step 1.3 |
| if (!headers.has('Accept')) { |
| headers.set('Accept', '*/*'); |
| } |
| |
| // Basic fetch |
| if (!parsedURL.protocol || !parsedURL.hostname) { |
| throw new TypeError('Only absolute URLs are supported'); |
| } |
| |
| if (!/^https?:$/.test(parsedURL.protocol)) { |
| throw new TypeError('Only HTTP(S) protocols are supported'); |
| } |
| |
| if (request.signal && request.body instanceof Stream.Readable && !streamDestructionSupported) { |
| throw new Error('Cancellation of streamed requests with AbortSignal is not supported in node < 8'); |
| } |
| |
| // HTTP-network-or-cache fetch steps 2.4-2.7 |
| let contentLengthValue = null; |
| if (request.body == null && /^(POST|PUT)$/i.test(request.method)) { |
| contentLengthValue = '0'; |
| } |
| if (request.body != null) { |
| const totalBytes = getTotalBytes(request); |
| if (typeof totalBytes === 'number') { |
| contentLengthValue = String(totalBytes); |
| } |
| } |
| if (contentLengthValue) { |
| headers.set('Content-Length', contentLengthValue); |
| } |
| |
| // HTTP-network-or-cache fetch step 2.11 |
| if (!headers.has('User-Agent')) { |
| headers.set('User-Agent', 'node-fetch/1.0 (+https://github.com/bitinn/node-fetch)'); |
| } |
| |
| // HTTP-network-or-cache fetch step 2.15 |
| if (request.compress && !headers.has('Accept-Encoding')) { |
| headers.set('Accept-Encoding', 'gzip,deflate'); |
| } |
| |
| let agent = request.agent; |
| if (typeof agent === 'function') { |
| agent = agent(parsedURL); |
| } |
| |
| if (!headers.has('Connection') && !agent) { |
| headers.set('Connection', 'close'); |
| } |
| |
| // HTTP-network fetch step 4.2 |
| // chunked encoding is handled by Node.js |
| |
| return Object.assign({}, parsedURL, { |
| method: request.method, |
| headers: exportNodeCompatibleHeaders(headers), |
| agent |
| }); |
| } |
| |
| /** |
| * abort-error.js |
| * |
| * AbortError interface for cancelled requests |
| */ |
| |
| /** |
| * Create AbortError instance |
| * |
| * @param String message Error message for human |
| * @return AbortError |
| */ |
| function AbortError(message) { |
| Error.call(this, message); |
| |
| this.type = 'aborted'; |
| this.message = message; |
| |
| // hide custom error implementation details from end-users |
| Error.captureStackTrace(this, this.constructor); |
| } |
| |
| AbortError.prototype = Object.create(Error.prototype); |
| AbortError.prototype.constructor = AbortError; |
| AbortError.prototype.name = 'AbortError'; |
| |
| // fix an issue where "PassThrough", "resolve" aren't a named export for node <10 |
| const PassThrough$1 = Stream.PassThrough; |
| const resolve_url = Url.resolve; |
| |
| /** |
| * Fetch function |
| * |
| * @param Mixed url Absolute url or Request instance |
| * @param Object opts Fetch options |
| * @return Promise |
| */ |
| function fetch(url, opts) { |
| |
| // allow custom promise |
| if (!fetch.Promise) { |
| throw new Error('native promise missing, set fetch.Promise to your favorite alternative'); |
| } |
| |
| Body.Promise = fetch.Promise; |
| |
| // wrap http.request into fetch |
| return new fetch.Promise(function (resolve, reject) { |
| // build request object |
| const request = new Request(url, opts); |
| const options = getNodeRequestOptions(request); |
| |
| const send = (options.protocol === 'https:' ? https : http).request; |
| const signal = request.signal; |
| |
| let response = null; |
| |
| const abort = function abort() { |
| let error = new AbortError('The user aborted a request.'); |
| reject(error); |
| if (request.body && request.body instanceof Stream.Readable) { |
| request.body.destroy(error); |
| } |
| if (!response || !response.body) return; |
| response.body.emit('error', error); |
| }; |
| |
| if (signal && signal.aborted) { |
| abort(); |
| return; |
| } |
| |
| const abortAndFinalize = function abortAndFinalize() { |
| abort(); |
| finalize(); |
| }; |
| |
| // send request |
| const req = send(options); |
| let reqTimeout; |
| |
| if (signal) { |
| signal.addEventListener('abort', abortAndFinalize); |
| } |
| |
| function finalize() { |
| req.abort(); |
| if (signal) signal.removeEventListener('abort', abortAndFinalize); |
| clearTimeout(reqTimeout); |
| } |
| |
| if (request.timeout) { |
| req.once('socket', function (socket) { |
| reqTimeout = setTimeout(function () { |
| reject(new FetchError(`network timeout at: ${request.url}`, 'request-timeout')); |
| finalize(); |
| }, request.timeout); |
| }); |
| } |
| |
| req.on('error', function (err) { |
| reject(new FetchError(`request to ${request.url} failed, reason: ${err.message}`, 'system', err)); |
| finalize(); |
| }); |
| |
| req.on('response', function (res) { |
| clearTimeout(reqTimeout); |
| |
| const headers = createHeadersLenient(res.headers); |
| |
| // HTTP fetch step 5 |
| if (fetch.isRedirect(res.statusCode)) { |
| // HTTP fetch step 5.2 |
| const location = headers.get('Location'); |
| |
| // HTTP fetch step 5.3 |
| const locationURL = location === null ? null : resolve_url(request.url, location); |
| |
| // HTTP fetch step 5.5 |
| switch (request.redirect) { |
| case 'error': |
| reject(new FetchError(`redirect mode is set to error: ${request.url}`, 'no-redirect')); |
| finalize(); |
| return; |
| case 'manual': |
| // node-fetch-specific step: make manual redirect a bit easier to use by setting the Location header value to the resolved URL. |
| if (locationURL !== null) { |
| // handle corrupted header |
| try { |
| headers.set('Location', locationURL); |
| } catch (err) { |
| // istanbul ignore next: nodejs server prevent invalid response headers, we can't test this through normal request |
| reject(err); |
| } |
| } |
| break; |
| case 'follow': |
| // HTTP-redirect fetch step 2 |
| if (locationURL === null) { |
| break; |
| } |
| |
| // HTTP-redirect fetch step 5 |
| if (request.counter >= request.follow) { |
| reject(new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect')); |
| finalize(); |
| return; |
| } |
| |
| // HTTP-redirect fetch step 6 (counter increment) |
| // Create a new Request object. |
| const requestOpts = { |
| headers: new Headers(request.headers), |
| follow: request.follow, |
| counter: request.counter + 1, |
| agent: request.agent, |
| compress: request.compress, |
| method: request.method, |
| body: request.body, |
| signal: request.signal, |
| timeout: request.timeout |
| }; |
| |
| // HTTP-redirect fetch step 9 |
| if (res.statusCode !== 303 && request.body && getTotalBytes(request) === null) { |
| reject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect')); |
| finalize(); |
| return; |
| } |
| |
| // HTTP-redirect fetch step 11 |
| if (res.statusCode === 303 || (res.statusCode === 301 || res.statusCode === 302) && request.method === 'POST') { |
| requestOpts.method = 'GET'; |
| requestOpts.body = undefined; |
| requestOpts.headers.delete('content-length'); |
| } |
| |
| // HTTP-redirect fetch step 15 |
| resolve(fetch(new Request(locationURL, requestOpts))); |
| finalize(); |
| return; |
| } |
| } |
| |
| // prepare response |
| res.once('end', function () { |
| if (signal) signal.removeEventListener('abort', abortAndFinalize); |
| }); |
| let body = res.pipe(new PassThrough$1()); |
| |
| const response_options = { |
| url: request.url, |
| status: res.statusCode, |
| statusText: res.statusMessage, |
| headers: headers, |
| size: request.size, |
| timeout: request.timeout, |
| counter: request.counter |
| }; |
| |
| // HTTP-network fetch step 12.1.1.3 |
| const codings = headers.get('Content-Encoding'); |
| |
| // HTTP-network fetch step 12.1.1.4: handle content codings |
| |
| // in following scenarios we ignore compression support |
| // 1. compression support is disabled |
| // 2. HEAD request |
| // 3. no Content-Encoding header |
| // 4. no content response (204) |
| // 5. content not modified response (304) |
| if (!request.compress || request.method === 'HEAD' || codings === null || res.statusCode === 204 || res.statusCode === 304) { |
| response = new Response(body, response_options); |
| resolve(response); |
| return; |
| } |
| |
| // For Node v6+ |
| // Be less strict when decoding compressed responses, since sometimes |
| // servers send slightly invalid responses that are still accepted |
| // by common browsers. |
| // Always using Z_SYNC_FLUSH is what cURL does. |
| const zlibOptions = { |
| flush: zlib.Z_SYNC_FLUSH, |
| finishFlush: zlib.Z_SYNC_FLUSH |
| }; |
| |
| // for gzip |
| if (codings == 'gzip' || codings == 'x-gzip') { |
| body = body.pipe(zlib.createGunzip(zlibOptions)); |
| response = new Response(body, response_options); |
| resolve(response); |
| return; |
| } |
| |
| // for deflate |
| if (codings == 'deflate' || codings == 'x-deflate') { |
| // handle the infamous raw deflate response from old servers |
| // a hack for old IIS and Apache servers |
| const raw = res.pipe(new PassThrough$1()); |
| raw.once('data', function (chunk) { |
| // see http://stackoverflow.com/questions/37519828 |
| if ((chunk[0] & 0x0F) === 0x08) { |
| body = body.pipe(zlib.createInflate()); |
| } else { |
| body = body.pipe(zlib.createInflateRaw()); |
| } |
| response = new Response(body, response_options); |
| resolve(response); |
| }); |
| return; |
| } |
| |
| // for br |
| if (codings == 'br' && typeof zlib.createBrotliDecompress === 'function') { |
| body = body.pipe(zlib.createBrotliDecompress()); |
| response = new Response(body, response_options); |
| resolve(response); |
| return; |
| } |
| |
| // otherwise, use response as-is |
| response = new Response(body, response_options); |
| resolve(response); |
| }); |
| |
| writeToStream(req, request); |
| }); |
| } |
| /** |
| * Redirect code matching |
| * |
| * @param Number code Status code |
| * @return Boolean |
| */ |
| fetch.isRedirect = function (code) { |
| return code === 301 || code === 302 || code === 303 || code === 307 || code === 308; |
| }; |
| |
| // expose Promise |
| fetch.Promise = global.Promise; |
| |
| module.exports = exports = fetch; |
| Object.defineProperty(exports, "__esModule", { value: true }); |
| exports.default = exports; |
| exports.Headers = Headers; |
| exports.Request = Request; |
| exports.Response = Response; |
| exports.FetchError = FetchError; |
| |
| |
| /***/ }), |
| |
| /***/ 462: |
| /***/ (function(module) { |
| |
| "use strict"; |
| |
| |
| // See http://www.robvanderwoude.com/escapechars.php |
| const metaCharsRegExp = /([()\][%!^"`<>&|;, *?])/g; |
| |
| function escapeCommand(arg) { |
| // Escape meta chars |
| arg = arg.replace(metaCharsRegExp, '^$1'); |
| |
| return arg; |
| } |
| |
| function escapeArgument(arg, doubleEscapeMetaChars) { |
| // Convert to string |
| arg = `${arg}`; |
| |
| // Algorithm below is based on https://qntm.org/cmd |
| |
| // Sequence of backslashes followed by a double quote: |
| // double up all the backslashes and escape the double quote |
| arg = arg.replace(/(\\*)"/g, '$1$1\\"'); |
| |
| // Sequence of backslashes followed by the end of the string |
| // (which will become a double quote later): |
| // double up all the backslashes |
| arg = arg.replace(/(\\*)$/, '$1$1'); |
| |
| // All other backslashes occur literally |
| |
| // Quote the whole thing: |
| arg = `"${arg}"`; |
| |
| // Escape meta chars |
| arg = arg.replace(metaCharsRegExp, '^$1'); |
| |
| // Double escape meta chars if necessary |
| if (doubleEscapeMetaChars) { |
| arg = arg.replace(metaCharsRegExp, '^$1'); |
| } |
| |
| return arg; |
| } |
| |
| module.exports.command = escapeCommand; |
| module.exports.argument = escapeArgument; |
| |
| |
| /***/ }), |
| |
| /***/ 463: |
| /***/ (function(__unusedmodule, exports, __webpack_require__) { |
| |
| "use strict"; |
| |
| |
| Object.defineProperty(exports, '__esModule', { value: true }); |
| |
| function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } |
| |
| var deprecation = __webpack_require__(692); |
| var once = _interopDefault(__webpack_require__(969)); |
| |
| const logOnce = once(deprecation => console.warn(deprecation)); |
| /** |
| * Error with extra properties to help with debugging |
| */ |
| |
| class RequestError extends Error { |
| constructor(message, statusCode, options) { |
| super(message); // Maintains proper stack trace (only available on V8) |
| |
| /* istanbul ignore next */ |
| |
| if (Error.captureStackTrace) { |
| Error.captureStackTrace(this, this.constructor); |
| } |
| |
| this.name = "HttpError"; |
| this.status = statusCode; |
| Object.defineProperty(this, "code", { |
| get() { |
| logOnce(new deprecation.Deprecation("[@octokit/request-error] `error.code` is deprecated, use `error.status`.")); |
| return statusCode; |
| } |
| |
| }); |
| this.headers = options.headers || {}; // redact request credentials without mutating original request options |
| |
| const requestCopy = Object.assign({}, options.request); |
| |
| if (options.request.headers.authorization) { |
| requestCopy.headers = Object.assign({}, options.request.headers, { |
| authorization: options.request.headers.authorization.replace(/ .*$/, " [REDACTED]") |
| }); |
| } |
| |
| requestCopy.url = requestCopy.url // client_id & client_secret can be passed as URL query parameters to increase rate limit |
| // see https://developer.github.com/v3/#increasing-the-unauthenticated-rate-limit-for-oauth-applications |
| .replace(/\bclient_secret=\w+/g, "client_secret=[REDACTED]") // OAuth tokens can be passed as URL query parameters, although it is not recommended |
| // see https://developer.github.com/v3/#oauth2-token-sent-in-a-header |
| .replace(/\baccess_token=\w+/g, "access_token=[REDACTED]"); |
| this.request = requestCopy; |
| } |
| |
| } |
| |
| exports.RequestError = RequestError; |
| //# sourceMappingURL=index.js.map |
| |
| |
| /***/ }), |
| |
| /***/ 469: |
| /***/ (function(__unusedmodule, exports, __webpack_require__) { |
| |
| "use strict"; |
| |
| var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { |
| if (k2 === undefined) k2 = k; |
| Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); |
| }) : (function(o, m, k, k2) { |
| if (k2 === undefined) k2 = k; |
| o[k2] = m[k]; |
| })); |
| var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { |
| Object.defineProperty(o, "default", { enumerable: true, value: v }); |
| }) : function(o, v) { |
| o["default"] = v; |
| }); |
| var __importStar = (this && this.__importStar) || function (mod) { |
| if (mod && mod.__esModule) return mod; |
| var result = {}; |
| if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); |
| __setModuleDefault(result, mod); |
| return result; |
| }; |
| Object.defineProperty(exports, "__esModule", { value: true }); |
| exports.getOctokit = exports.context = void 0; |
| const Context = __importStar(__webpack_require__(262)); |
| const utils_1 = __webpack_require__(521); |
| exports.context = new Context.Context(); |
| /** |
| * Returns a hydrated octokit ready to use for GitHub Actions |
| * |
| * @param token the repo PAT or GITHUB_TOKEN |
| * @param options other options to set |
| */ |
| function getOctokit(token, options) { |
| return new utils_1.GitHub(utils_1.getOctokitOptions(token, options)); |
| } |
| exports.getOctokit = getOctokit; |
| //# sourceMappingURL=github.js.map |
| |
| /***/ }), |
| |
| /***/ 470: |
| /***/ (function(__unusedmodule, exports, __webpack_require__) { |
| |
| "use strict"; |
| |
| var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { |
| function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } |
| return new (P || (P = Promise))(function (resolve, reject) { |
| function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } |
| function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } |
| function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } |
| step((generator = generator.apply(thisArg, _arguments || [])).next()); |
| }); |
| }; |
| var __importStar = (this && this.__importStar) || function (mod) { |
| if (mod && mod.__esModule) return mod; |
| var result = {}; |
| if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; |
| result["default"] = mod; |
| return result; |
| }; |
| Object.defineProperty(exports, "__esModule", { value: true }); |
| const command_1 = __webpack_require__(431); |
| const os = __importStar(__webpack_require__(87)); |
| const path = __importStar(__webpack_require__(622)); |
| /** |
| * The code to exit an action |
| */ |
| var ExitCode; |
| (function (ExitCode) { |
| /** |
| * A code indicating that the action was successful |
| */ |
| ExitCode[ExitCode["Success"] = 0] = "Success"; |
| /** |
| * A code indicating that the action was a failure |
| */ |
| ExitCode[ExitCode["Failure"] = 1] = "Failure"; |
| })(ExitCode = exports.ExitCode || (exports.ExitCode = {})); |
| //----------------------------------------------------------------------- |
| // Variables |
| //----------------------------------------------------------------------- |
| /** |
| * Sets env variable for this action and future actions in the job |
| * @param name the name of the variable to set |
| * @param val the value of the variable. Non-string values will be converted to a string via JSON.stringify |
| */ |
| // eslint-disable-next-line @typescript-eslint/no-explicit-any |
| function exportVariable(name, val) { |
| const convertedVal = command_1.toCommandValue(val); |
| process.env[name] = convertedVal; |
| command_1.issueCommand('set-env', { name }, convertedVal); |
| } |
| exports.exportVariable = exportVariable; |
| /** |
| * Registers a secret which will get masked from logs |
| * @param secret value of the secret |
| */ |
| function setSecret(secret) { |
| command_1.issueCommand('add-mask', {}, secret); |
| } |
| exports.setSecret = setSecret; |
| /** |
| * Prepends inputPath to the PATH (for this action and future actions) |
| * @param inputPath |
| */ |
| function addPath(inputPath) { |
| command_1.issueCommand('add-path', {}, inputPath); |
| process.env['PATH'] = `${inputPath}${path.delimiter}${process.env['PATH']}`; |
| } |
| exports.addPath = addPath; |
| /** |
| * Gets the value of an input. The value is also trimmed. |
| * |
| * @param name name of the input to get |
| * @param options optional. See InputOptions. |
| * @returns string |
| */ |
| function getInput(name, options) { |
| const val = process.env[`INPUT_${name.replace(/ /g, '_').toUpperCase()}`] || ''; |
| if (options && options.required && !val) { |
| throw new Error(`Input required and not supplied: ${name}`); |
| } |
| return val.trim(); |
| } |
| exports.getInput = getInput; |
| /** |
| * Sets the value of an output. |
| * |
| * @param name name of the output to set |
| * @param value value to store. Non-string values will be converted to a string via JSON.stringify |
| */ |
| // eslint-disable-next-line @typescript-eslint/no-explicit-any |
| function setOutput(name, value) { |
| command_1.issueCommand('set-output', { name }, value); |
| } |
| exports.setOutput = setOutput; |
| /** |
| * Enables or disables the echoing of commands into stdout for the rest of the step. |
| * Echoing is disabled by default if ACTIONS_STEP_DEBUG is not set. |
| * |
| */ |
| function setCommandEcho(enabled) { |
| command_1.issue('echo', enabled ? 'on' : 'off'); |
| } |
| exports.setCommandEcho = setCommandEcho; |
| //----------------------------------------------------------------------- |
| // Results |
| //----------------------------------------------------------------------- |
| /** |
| * Sets the action status to failed. |
| * When the action exits it will be with an exit code of 1 |
| * @param message add error issue message |
| */ |
| function setFailed(message) { |
| process.exitCode = ExitCode.Failure; |
| error(message); |
| } |
| exports.setFailed = setFailed; |
| //----------------------------------------------------------------------- |
| // Logging Commands |
| //----------------------------------------------------------------------- |
| /** |
| * Gets whether Actions Step Debug is on or not |
| */ |
| function isDebug() { |
| return process.env['RUNNER_DEBUG'] === '1'; |
| } |
| exports.isDebug = isDebug; |
| /** |
| * Writes debug message to user log |
| * @param message debug message |
| */ |
| function debug(message) { |
| command_1.issueCommand('debug', {}, message); |
| } |
| exports.debug = debug; |
| /** |
| * Adds an error issue |
| * @param message error issue message. Errors will be converted to string via toString() |
| */ |
| function error(message) { |
| command_1.issue('error', message instanceof Error ? message.toString() : message); |
| } |
| exports.error = error; |
| /** |
| * Adds an warning issue |
| * @param message warning issue message. Errors will be converted to string via toString() |
| */ |
| function warning(message) { |
| command_1.issue('warning', message instanceof Error ? message.toString() : message); |
| } |
| exports.warning = warning; |
| /** |
| * Writes info to log with console.log. |
| * @param message info message |
| */ |
| function info(message) { |
| process.stdout.write(message + os.EOL); |
| } |
| exports.info = info; |
| /** |
| * Begin an output group. |
| * |
| * Output until the next `groupEnd` will be foldable in this group |
| * |
| * @param name The name of the output group |
| */ |
| function startGroup(name) { |
| command_1.issue('group', name); |
| } |
| exports.startGroup = startGroup; |
| /** |
| * End an output group. |
| */ |
| function endGroup() { |
| command_1.issue('endgroup'); |
| } |
| exports.endGroup = endGroup; |
| /** |
| * Wrap an asynchronous function call in a group. |
| * |
| * Returns the same type as the function itself. |
| * |
| * @param name The name of the group |
| * @param fn The function to wrap in the group |
| */ |
| function group(name, fn) { |
| return __awaiter(this, void 0, void 0, function* () { |
| startGroup(name); |
| let result; |
| try { |
| result = yield fn(); |
| } |
| finally { |
| endGroup(); |
| } |
| return result; |
| }); |
| } |
| exports.group = group; |
| //----------------------------------------------------------------------- |
| // Wrapper action state |
| //----------------------------------------------------------------------- |
| /** |
| * Saves state for current action, the state can only be retrieved by this action's post job execution. |
| * |
| * @param name name of the state to store |
| * @param value value to store. Non-string values will be converted to a string via JSON.stringify |
| */ |
| // eslint-disable-next-line @typescript-eslint/no-explicit-any |
| function saveState(name, value) { |
| command_1.issueCommand('save-state', { name }, value); |
| } |
| exports.saveState = saveState; |
| /** |
| * Gets the value of an state set by this action's main execution. |
| * |
| * @param name name of the state to get |
| * @returns string |
| */ |
| function getState(name) { |
| return process.env[`STATE_${name}`] || ''; |
| } |
| exports.getState = getState; |
| //# sourceMappingURL=core.js.map |
| |
| /***/ }), |
| |
| /***/ 489: |
| /***/ (function(module, __unusedexports, __webpack_require__) { |
| |
| "use strict"; |
| |
| |
| const path = __webpack_require__(622); |
| const which = __webpack_require__(814); |
| const pathKey = __webpack_require__(39)(); |
| |
| function resolveCommandAttempt(parsed, withoutPathExt) { |
| const cwd = process.cwd(); |
| const hasCustomCwd = parsed.options.cwd != null; |
| |
| // If a custom `cwd` was specified, we need to change the process cwd |
| // because `which` will do stat calls but does not support a custom cwd |
| if (hasCustomCwd) { |
| try { |
| process.chdir(parsed.options.cwd); |
| } catch (err) { |
| /* Empty */ |
| } |
| } |
| |
| let resolved; |
| |
| try { |
| resolved = which.sync(parsed.command, { |
| path: (parsed.options.env || process.env)[pathKey], |
| pathExt: withoutPathExt ? path.delimiter : undefined, |
| }); |
| } catch (e) { |
| /* Empty */ |
| } finally { |
| process.chdir(cwd); |
| } |
| |
| // If we successfully resolved, ensure that an absolute path is returned |
| // Note that when a custom `cwd` was used, we need to resolve to an absolute path based on it |
| if (resolved) { |
| resolved = path.resolve(hasCustomCwd ? parsed.options.cwd : '', resolved); |
| } |
| |
| return resolved; |
| } |
| |
| function resolveCommand(parsed) { |
| return resolveCommandAttempt(parsed) || resolveCommandAttempt(parsed, true); |
| } |
| |
| module.exports = resolveCommand; |
| |
| |
| /***/ }), |
| |
| /***/ 504: |
| /***/ (function(__unusedmodule, exports) { |
| |
| "use strict"; |
| |
| Object.defineProperty(exports, "__esModule", { value: true }); |
| exports.Logger = exports.LoggingLevel = void 0; |
| var LoggingLevel; |
| (function (LoggingLevel) { |
| LoggingLevel[LoggingLevel["DEBUG"] = 0] = "DEBUG"; |
| LoggingLevel[LoggingLevel["SILENT"] = 1] = "SILENT"; |
| })(LoggingLevel = exports.LoggingLevel || (exports.LoggingLevel = {})); |
| class Logger { |
| constructor(level = 0) { |
| this.level = level; |
| } |
| /* eslint-disable @typescript-eslint/no-explicit-any */ |
| log(message, ...optionalParams) { |
| console.log(message, ...optionalParams); |
| } |
| shouldLog(level) { |
| return this.level <= level; |
| } |
| /* eslint-disable @typescript-eslint/no-explicit-any */ |
| /* eslint-disable @typescript-eslint/explicit-module-boundary-types */ |
| debug(message, ...optionalParams) { |
| if (this.shouldLog(LoggingLevel.DEBUG)) { |
| this.log(message, ...optionalParams); |
| } |
| } |
| setLevel(newLevel) { |
| this.level = newLevel; |
| } |
| } |
| exports.Logger = Logger; |
| |
| |
| /***/ }), |
| |
| /***/ 510: |
| /***/ (function(module) { |
| |
| module.exports = addHook |
| |
| function addHook (state, kind, name, hook) { |
| var orig = hook |
| if (!state.registry[name]) { |
| state.registry[name] = [] |
| } |
| |
| if (kind === 'before') { |
| hook = function (method, options) { |
| return Promise.resolve() |
| .then(orig.bind(null, options)) |
| .then(method.bind(null, options)) |
| } |
| } |
| |
| if (kind === 'after') { |
| hook = function (method, options) { |
| var result |
| return Promise.resolve() |
| .then(method.bind(null, options)) |
| .then(function (result_) { |
| result = result_ |
| return orig(result, options) |
| }) |
| .then(function () { |
| return result |
| }) |
| } |
| } |
| |
| if (kind === 'error') { |
| hook = function (method, options) { |
| return Promise.resolve() |
| .then(method.bind(null, options)) |
| .catch(function (error) { |
| return orig(error, options) |
| }) |
| } |
| } |
| |
| state.registry[name].push({ |
| hook: hook, |
| orig: orig |
| }) |
| } |
| |
| |
| /***/ }), |
| |
| /***/ 521: |
| /***/ (function(__unusedmodule, exports, __webpack_require__) { |
| |
| "use strict"; |
| |
| var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { |
| if (k2 === undefined) k2 = k; |
| Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); |
| }) : (function(o, m, k, k2) { |
| if (k2 === undefined) k2 = k; |
| o[k2] = m[k]; |
| })); |
| var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { |
| Object.defineProperty(o, "default", { enumerable: true, value: v }); |
| }) : function(o, v) { |
| o["default"] = v; |
| }); |
| var __importStar = (this && this.__importStar) || function (mod) { |
| if (mod && mod.__esModule) return mod; |
| var result = {}; |
| if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); |
| __setModuleDefault(result, mod); |
| return result; |
| }; |
| Object.defineProperty(exports, "__esModule", { value: true }); |
| exports.getOctokitOptions = exports.GitHub = exports.context = void 0; |
| const Context = __importStar(__webpack_require__(262)); |
| const Utils = __importStar(__webpack_require__(127)); |
| // octokit + plugins |
| const core_1 = __webpack_require__(448); |
| const plugin_rest_endpoint_methods_1 = __webpack_require__(842); |
| const plugin_paginate_rest_1 = __webpack_require__(299); |
| exports.context = new Context.Context(); |
| const baseUrl = Utils.getApiBaseUrl(); |
| const defaults = { |
| baseUrl, |
| request: { |
| agent: Utils.getProxyAgent(baseUrl) |
| } |
| }; |
| exports.GitHub = core_1.Octokit.plugin(plugin_rest_endpoint_methods_1.restEndpointMethods, plugin_paginate_rest_1.paginateRest).defaults(defaults); |
| /** |
| * Convience function to correctly format Octokit Options to pass into the constructor. |
| * |
| * @param token the repo PAT or GITHUB_TOKEN |
| * @param options other options to set |
| */ |
| function getOctokitOptions(token, options) { |
| const opts = Object.assign({}, options || {}); // Shallow clone - don't mutate the object provided by the caller |
| // Auth |
| const auth = Utils.getAuthString(token, opts); |
| if (auth) { |
| opts.auth = auth; |
| } |
| return opts; |
| } |
| exports.getOctokitOptions = getOctokitOptions; |
| //# sourceMappingURL=utils.js.map |
| |
| /***/ }), |
| |
| /***/ 523: |
| /***/ (function(module, __unusedexports, __webpack_require__) { |
| |
| var register = __webpack_require__(280) |
| var addHook = __webpack_require__(510) |
| var removeHook = __webpack_require__(763) |
| |
| // bind with array of arguments: https://stackoverflow.com/a/21792913 |
| var bind = Function.bind |
| var bindable = bind.bind(bind) |
| |
| function bindApi (hook, state, name) { |
| var removeHookRef = bindable(removeHook, null).apply(null, name ? [state, name] : [state]) |
| hook.api = { remove: removeHookRef } |
| hook.remove = removeHookRef |
| |
| ;['before', 'error', 'after', 'wrap'].forEach(function (kind) { |
| var args = name ? [state, kind, name] : [state, kind] |
| hook[kind] = hook.api[kind] = bindable(addHook, null).apply(null, args) |
| }) |
| } |
| |
| function HookSingular () { |
| var singularHookName = 'h' |
| var singularHookState = { |
| registry: {} |
| } |
| var singularHook = register.bind(null, singularHookState, singularHookName) |
| bindApi(singularHook, singularHookState, singularHookName) |
| return singularHook |
| } |
| |
| function HookCollection () { |
| var state = { |
| registry: {} |
| } |
| |
| var hook = register.bind(null, state) |
| bindApi(hook, state) |
| |
| return hook |
| } |
| |
| var collectionHookDeprecationMessageDisplayed = false |
| function Hook () { |
| if (!collectionHookDeprecationMessageDisplayed) { |
| console.warn('[before-after-hook]: "Hook()" repurposing warning, use "Hook.Collection()". Read more: https://git.io/upgrade-before-after-hook-to-1.4') |
| collectionHookDeprecationMessageDisplayed = true |
| } |
| return HookCollection() |
| } |
| |
| Hook.Singular = HookSingular.bind() |
| Hook.Collection = HookCollection.bind() |
| |
| module.exports = Hook |
| // expose constructors as a named property for TypeScript |
| module.exports.Hook = Hook |
| module.exports.Singular = Hook.Singular |
| module.exports.Collection = Hook.Collection |
| |
| |
| /***/ }), |
| |
| /***/ 539: |
| /***/ (function(__unusedmodule, exports, __webpack_require__) { |
| |
| "use strict"; |
| |
| Object.defineProperty(exports, "__esModule", { value: true }); |
| const url = __webpack_require__(835); |
| const http = __webpack_require__(605); |
| const https = __webpack_require__(211); |
| const pm = __webpack_require__(950); |
| let tunnel; |
| var HttpCodes; |
| (function (HttpCodes) { |
| HttpCodes[HttpCodes["OK"] = 200] = "OK"; |
| HttpCodes[HttpCodes["MultipleChoices"] = 300] = "MultipleChoices"; |
| HttpCodes[HttpCodes["MovedPermanently"] = 301] = "MovedPermanently"; |
| HttpCodes[HttpCodes["ResourceMoved"] = 302] = "ResourceMoved"; |
| HttpCodes[HttpCodes["SeeOther"] = 303] = "SeeOther"; |
| HttpCodes[HttpCodes["NotModified"] = 304] = "NotModified"; |
| HttpCodes[HttpCodes["UseProxy"] = 305] = "UseProxy"; |
| HttpCodes[HttpCodes["SwitchProxy"] = 306] = "SwitchProxy"; |
| HttpCodes[HttpCodes["TemporaryRedirect"] = 307] = "TemporaryRedirect"; |
| HttpCodes[HttpCodes["PermanentRedirect"] = 308] = "PermanentRedirect"; |
| HttpCodes[HttpCodes["BadRequest"] = 400] = "BadRequest"; |
| HttpCodes[HttpCodes["Unauthorized"] = 401] = "Unauthorized"; |
| HttpCodes[HttpCodes["PaymentRequired"] = 402] = "PaymentRequired"; |
| HttpCodes[HttpCodes["Forbidden"] = 403] = "Forbidden"; |
| HttpCodes[HttpCodes["NotFound"] = 404] = "NotFound"; |
| HttpCodes[HttpCodes["MethodNotAllowed"] = 405] = "MethodNotAllowed"; |
| HttpCodes[HttpCodes["NotAcceptable"] = 406] = "NotAcceptable"; |
| HttpCodes[HttpCodes["ProxyAuthenticationRequired"] = 407] = "ProxyAuthenticationRequired"; |
| HttpCodes[HttpCodes["RequestTimeout"] = 408] = "RequestTimeout"; |
| HttpCodes[HttpCodes["Conflict"] = 409] = "Conflict"; |
| HttpCodes[HttpCodes["Gone"] = 410] = "Gone"; |
| HttpCodes[HttpCodes["TooManyRequests"] = 429] = "TooManyRequests"; |
| HttpCodes[HttpCodes["InternalServerError"] = 500] = "InternalServerError"; |
| HttpCodes[HttpCodes["NotImplemented"] = 501] = "NotImplemented"; |
| HttpCodes[HttpCodes["BadGateway"] = 502] = "BadGateway"; |
| HttpCodes[HttpCodes["ServiceUnavailable"] = 503] = "ServiceUnavailable"; |
| HttpCodes[HttpCodes["GatewayTimeout"] = 504] = "GatewayTimeout"; |
| })(HttpCodes = exports.HttpCodes || (exports.HttpCodes = {})); |
| var Headers; |
| (function (Headers) { |
| Headers["Accept"] = "accept"; |
| Headers["ContentType"] = "content-type"; |
| })(Headers = exports.Headers || (exports.Headers = {})); |
| var MediaTypes; |
| (function (MediaTypes) { |
| MediaTypes["ApplicationJson"] = "application/json"; |
| })(MediaTypes = exports.MediaTypes || (exports.MediaTypes = {})); |
| /** |
| * Returns the proxy URL, depending upon the supplied url and proxy environment variables. |
| * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com |
| */ |
| function getProxyUrl(serverUrl) { |
| let proxyUrl = pm.getProxyUrl(url.parse(serverUrl)); |
| return proxyUrl ? proxyUrl.href : ''; |
| } |
| exports.getProxyUrl = getProxyUrl; |
| const HttpRedirectCodes = [ |
| HttpCodes.MovedPermanently, |
| HttpCodes.ResourceMoved, |
| HttpCodes.SeeOther, |
| HttpCodes.TemporaryRedirect, |
| HttpCodes.PermanentRedirect |
| ]; |
| const HttpResponseRetryCodes = [ |
| HttpCodes.BadGateway, |
| HttpCodes.ServiceUnavailable, |
| HttpCodes.GatewayTimeout |
| ]; |
| const RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD']; |
| const ExponentialBackoffCeiling = 10; |
| const ExponentialBackoffTimeSlice = 5; |
| class HttpClientResponse { |
| constructor(message) { |
| this.message = message; |
| } |
| readBody() { |
| return new Promise(async (resolve, reject) => { |
| let output = Buffer.alloc(0); |
| this.message.on('data', (chunk) => { |
| output = Buffer.concat([output, chunk]); |
| }); |
| this.message.on('end', () => { |
| resolve(output.toString()); |
| }); |
| }); |
| } |
| } |
| exports.HttpClientResponse = HttpClientResponse; |
| function isHttps(requestUrl) { |
| let parsedUrl = url.parse(requestUrl); |
| return parsedUrl.protocol === 'https:'; |
| } |
| exports.isHttps = isHttps; |
| class HttpClient { |
| constructor(userAgent, handlers, requestOptions) { |
| this._ignoreSslError = false; |
| this._allowRedirects = true; |
| this._allowRedirectDowngrade = false; |
| this._maxRedirects = 50; |
| this._allowRetries = false; |
| this._maxRetries = 1; |
| this._keepAlive = false; |
| this._disposed = false; |
| this.userAgent = userAgent; |
| this.handlers = handlers || []; |
| this.requestOptions = requestOptions; |
| if (requestOptions) { |
| if (requestOptions.ignoreSslError != null) { |
| this._ignoreSslError = requestOptions.ignoreSslError; |
| } |
| this._socketTimeout = requestOptions.socketTimeout; |
| if (requestOptions.allowRedirects != null) { |
| this._allowRedirects = requestOptions.allowRedirects; |
| } |
| if (requestOptions.allowRedirectDowngrade != null) { |
| this._allowRedirectDowngrade = requestOptions.allowRedirectDowngrade; |
| } |
| if (requestOptions.maxRedirects != null) { |
| this._maxRedirects = Math.max(requestOptions.maxRedirects, 0); |
| } |
| if (requestOptions.keepAlive != null) { |
| this._keepAlive = requestOptions.keepAlive; |
| } |
| if (requestOptions.allowRetries != null) { |
| this._allowRetries = requestOptions.allowRetries; |
| } |
| if (requestOptions.maxRetries != null) { |
| this._maxRetries = requestOptions.maxRetries; |
| } |
| } |
| } |
| options(requestUrl, additionalHeaders) { |
| return this.request('OPTIONS', requestUrl, null, additionalHeaders || {}); |
| } |
| get(requestUrl, additionalHeaders) { |
| return this.request('GET', requestUrl, null, additionalHeaders || {}); |
| } |
| del(requestUrl, additionalHeaders) { |
| return this.request('DELETE', requestUrl, null, additionalHeaders || {}); |
| } |
| post(requestUrl, data, additionalHeaders) { |
| return this.request('POST', requestUrl, data, additionalHeaders || {}); |
| } |
| patch(requestUrl, data, additionalHeaders) { |
| return this.request('PATCH', requestUrl, data, additionalHeaders || {}); |
| } |
| put(requestUrl, data, additionalHeaders) { |
| return this.request('PUT', requestUrl, data, additionalHeaders || {}); |
| } |
| head(requestUrl, additionalHeaders) { |
| return this.request('HEAD', requestUrl, null, additionalHeaders || {}); |
| } |
| sendStream(verb, requestUrl, stream, additionalHeaders) { |
| return this.request(verb, requestUrl, stream, additionalHeaders); |
| } |
| /** |
| * Gets a typed object from an endpoint |
| * Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise |
| */ |
| async getJson(requestUrl, additionalHeaders = {}) { |
| additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); |
| let res = await this.get(requestUrl, additionalHeaders); |
| return this._processResponse(res, this.requestOptions); |
| } |
| async postJson(requestUrl, obj, additionalHeaders = {}) { |
| let data = JSON.stringify(obj, null, 2); |
| additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); |
| additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); |
| let res = await this.post(requestUrl, data, additionalHeaders); |
| return this._processResponse(res, this.requestOptions); |
| } |
| async putJson(requestUrl, obj, additionalHeaders = {}) { |
| let data = JSON.stringify(obj, null, 2); |
| additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); |
| additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); |
| let res = await this.put(requestUrl, data, additionalHeaders); |
| return this._processResponse(res, this.requestOptions); |
| } |
| async patchJson(requestUrl, obj, additionalHeaders = {}) { |
| let data = JSON.stringify(obj, null, 2); |
| additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); |
| additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); |
| let res = await this.patch(requestUrl, data, additionalHeaders); |
| return this._processResponse(res, this.requestOptions); |
| } |
| /** |
| * Makes a raw http request. |
| * All other methods such as get, post, patch, and request ultimately call this. |
| * Prefer get, del, post and patch |
| */ |
| async request(verb, requestUrl, data, headers) { |
| if (this._disposed) { |
| throw new Error('Client has already been disposed.'); |
| } |
| let parsedUrl = url.parse(requestUrl); |
| let info = this._prepareRequest(verb, parsedUrl, headers); |
| // Only perform retries on reads since writes may not be idempotent. |
| let maxTries = this._allowRetries && RetryableHttpVerbs.indexOf(verb) != -1 |
| ? this._maxRetries + 1 |
| : 1; |
| let numTries = 0; |
| let response; |
| while (numTries < maxTries) { |
| response = await this.requestRaw(info, data); |
| // Check if it's an authentication challenge |
| if (response && |
| response.message && |
| response.message.statusCode === HttpCodes.Unauthorized) { |
| let authenticationHandler; |
| for (let i = 0; i < this.handlers.length; i++) { |
| if (this.handlers[i].canHandleAuthentication(response)) { |
| authenticationHandler = this.handlers[i]; |
| break; |
| } |
| } |
| if (authenticationHandler) { |
| return authenticationHandler.handleAuthentication(this, info, data); |
| } |
| else { |
| // We have received an unauthorized response but have no handlers to handle it. |
| // Let the response return to the caller. |
| return response; |
| } |
| } |
| let redirectsRemaining = this._maxRedirects; |
| while (HttpRedirectCodes.indexOf(response.message.statusCode) != -1 && |
| this._allowRedirects && |
| redirectsRemaining > 0) { |
| const redirectUrl = response.message.headers['location']; |
| if (!redirectUrl) { |
| // if there's no location to redirect to, we won't |
| break; |
| } |
| let parsedRedirectUrl = url.parse(redirectUrl); |
| if (parsedUrl.protocol == 'https:' && |
| parsedUrl.protocol != parsedRedirectUrl.protocol && |
| !this._allowRedirectDowngrade) { |
| throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.'); |
| } |
| // we need to finish reading the response before reassigning response |
| // which will leak the open socket. |
| await response.readBody(); |
| // strip authorization header if redirected to a different hostname |
| if (parsedRedirectUrl.hostname !== parsedUrl.hostname) { |
| for (let header in headers) { |
| // header names are case insensitive |
| if (header.toLowerCase() === 'authorization') { |
| delete headers[header]; |
| } |
| } |
| } |
| // let's make the request with the new redirectUrl |
| info = this._prepareRequest(verb, parsedRedirectUrl, headers); |
| response = await this.requestRaw(info, data); |
| redirectsRemaining--; |
| } |
| if (HttpResponseRetryCodes.indexOf(response.message.statusCode) == -1) { |
| // If not a retry code, return immediately instead of retrying |
| return response; |
| } |
| numTries += 1; |
| if (numTries < maxTries) { |
| await response.readBody(); |
| await this._performExponentialBackoff(numTries); |
| } |
| } |
| return response; |
| } |
| /** |
| * Needs to be called if keepAlive is set to true in request options. |
| */ |
| dispose() { |
| if (this._agent) { |
| this._agent.destroy(); |
| } |
| this._disposed = true; |
| } |
| /** |
| * Raw request. |
| * @param info |
| * @param data |
| */ |
| requestRaw(info, data) { |
| return new Promise((resolve, reject) => { |
| let callbackForResult = function (err, res) { |
| if (err) { |
| reject(err); |
| } |
| resolve(res); |
| }; |
| this.requestRawWithCallback(info, data, callbackForResult); |
| }); |
| } |
| /** |
| * Raw request with callback. |
| * @param info |
| * @param data |
| * @param onResult |
| */ |
| requestRawWithCallback(info, data, onResult) { |
| let socket; |
| if (typeof data === 'string') { |
| info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8'); |
| } |
| let callbackCalled = false; |
| let handleResult = (err, res) => { |
| if (!callbackCalled) { |
| callbackCalled = true; |
| onResult(err, res); |
| } |
| }; |
| let req = info.httpModule.request(info.options, (msg) => { |
| let res = new HttpClientResponse(msg); |
| handleResult(null, res); |
| }); |
| req.on('socket', sock => { |
| socket = sock; |
| }); |
| // If we ever get disconnected, we want the socket to timeout eventually |
| req.setTimeout(this._socketTimeout || 3 * 60000, () => { |
| if (socket) { |
| socket.end(); |
| } |
| handleResult(new Error('Request timeout: ' + info.options.path), null); |
| }); |
| req.on('error', function (err) { |
| // err has statusCode property |
| // res should have headers |
| handleResult(err, null); |
| }); |
| if (data && typeof data === 'string') { |
| req.write(data, 'utf8'); |
| } |
| if (data && typeof data !== 'string') { |
| data.on('close', function () { |
| req.end(); |
| }); |
| data.pipe(req); |
| } |
| else { |
| req.end(); |
| } |
| } |
| /** |
| * Gets an http agent. This function is useful when you need an http agent that handles |
| * routing through a proxy server - depending upon the url and proxy environment variables. |
| * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com |
| */ |
| getAgent(serverUrl) { |
| let parsedUrl = url.parse(serverUrl); |
| return this._getAgent(parsedUrl); |
| } |
| _prepareRequest(method, requestUrl, headers) { |
| const info = {}; |
| info.parsedUrl = requestUrl; |
| const usingSsl = info.parsedUrl.protocol === 'https:'; |
| info.httpModule = usingSsl ? https : http; |
| const defaultPort = usingSsl ? 443 : 80; |
| info.options = {}; |
| info.options.host = info.parsedUrl.hostname; |
| info.options.port = info.parsedUrl.port |
| ? parseInt(info.parsedUrl.port) |
| : defaultPort; |
| info.options.path = |
| (info.parsedUrl.pathname || '') + (info.parsedUrl.search || ''); |
| info.options.method = method; |
| info.options.headers = this._mergeHeaders(headers); |
| if (this.userAgent != null) { |
| info.options.headers['user-agent'] = this.userAgent; |
| } |
| info.options.agent = this._getAgent(info.parsedUrl); |
| // gives handlers an opportunity to participate |
| if (this.handlers) { |
| this.handlers.forEach(handler => { |
| handler.prepareRequest(info.options); |
| }); |
| } |
| return info; |
| } |
| _mergeHeaders(headers) { |
| const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {}); |
| if (this.requestOptions && this.requestOptions.headers) { |
| return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers)); |
| } |
| return lowercaseKeys(headers || {}); |
| } |
| _getExistingOrDefaultHeader(additionalHeaders, header, _default) { |
| const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {}); |
| let clientHeader; |
| if (this.requestOptions && this.requestOptions.headers) { |
| clientHeader = lowercaseKeys(this.requestOptions.headers)[header]; |
| } |
| return additionalHeaders[header] || clientHeader || _default; |
| } |
| _getAgent(parsedUrl) { |
| let agent; |
| let proxyUrl = pm.getProxyUrl(parsedUrl); |
| let useProxy = proxyUrl && proxyUrl.hostname; |
| if (this._keepAlive && useProxy) { |
| agent = this._proxyAgent; |
| } |
| if (this._keepAlive && !useProxy) { |
| agent = this._agent; |
| } |
| // if agent is already assigned use that agent. |
| if (!!agent) { |
| return agent; |
| } |
| const usingSsl = parsedUrl.protocol === 'https:'; |
| let maxSockets = 100; |
| if (!!this.requestOptions) { |
| maxSockets = this.requestOptions.maxSockets || http.globalAgent.maxSockets; |
| } |
| if (useProxy) { |
| // If using proxy, need tunnel |
| if (!tunnel) { |
| tunnel = __webpack_require__(413); |
| } |
| const agentOptions = { |
| maxSockets: maxSockets, |
| keepAlive: this._keepAlive, |
| proxy: { |
| proxyAuth: proxyUrl.auth, |
| host: proxyUrl.hostname, |
| port: proxyUrl.port |
| } |
| }; |
| let tunnelAgent; |
| const overHttps = proxyUrl.protocol === 'https:'; |
| if (usingSsl) { |
| tunnelAgent = overHttps ? tunnel.httpsOverHttps : tunnel.httpsOverHttp; |
| } |
| else { |
| tunnelAgent = overHttps ? tunnel.httpOverHttps : tunnel.httpOverHttp; |
| } |
| agent = tunnelAgent(agentOptions); |
| this._proxyAgent = agent; |
| } |
| // if reusing agent across request and tunneling agent isn't assigned create a new agent |
| if (this._keepAlive && !agent) { |
| const options = { keepAlive: this._keepAlive, maxSockets: maxSockets }; |
| agent = usingSsl ? new https.Agent(options) : new http.Agent(options); |
| this._agent = agent; |
| } |
| // if not using private agent and tunnel agent isn't setup then use global agent |
| if (!agent) { |
| agent = usingSsl ? https.globalAgent : http.globalAgent; |
| } |
| if (usingSsl && this._ignoreSslError) { |
| // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process |
| // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options |
| // we have to cast it to any and change it directly |
| agent.options = Object.assign(agent.options || {}, { |
| rejectUnauthorized: false |
| }); |
| } |
| return agent; |
| } |
| _performExponentialBackoff(retryNumber) { |
| retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber); |
| const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber); |
| return new Promise(resolve => setTimeout(() => resolve(), ms)); |
| } |
| static dateTimeDeserializer(key, value) { |
| if (typeof value === 'string') { |
| let a = new Date(value); |
| if (!isNaN(a.valueOf())) { |
| return a; |
| } |
| } |
| return value; |
| } |
| async _processResponse(res, options) { |
| return new Promise(async (resolve, reject) => { |
| const statusCode = res.message.statusCode; |
| const response = { |
| statusCode: statusCode, |
| result: null, |
| headers: {} |
| }; |
| // not found leads to null obj returned |
| if (statusCode == HttpCodes.NotFound) { |
| resolve(response); |
| } |
| let obj; |
| let contents; |
| // get the result from the body |
| try { |
| contents = await res.readBody(); |
| if (contents && contents.length > 0) { |
| if (options && options.deserializeDates) { |
| obj = JSON.parse(contents, HttpClient.dateTimeDeserializer); |
| } |
| else { |
| obj = JSON.parse(contents); |
| } |
| response.result = obj; |
| } |
| response.headers = res.message.headers; |
| } |
| catch (err) { |
| // Invalid resource (contents not json); leaving result obj null |
| } |
| // note that 3xx redirects are handled by the http layer. |
| if (statusCode > 299) { |
| let msg; |
| // if exception/error in body, attempt to get better error |
| if (obj && obj.message) { |
| msg = obj.message; |
| } |
| else if (contents && contents.length > 0) { |
| // it may be the case that the exception is in the body message as string |
| msg = contents; |
| } |
| else { |
| msg = 'Failed request: (' + statusCode + ')'; |
| } |
| let err = new Error(msg); |
| // attach statusCode and body obj (if available) to the error object |
| err['statusCode'] = statusCode; |
| if (response.result) { |
| err['result'] = response.result; |
| } |
| reject(err); |
| } |
| else { |
| resolve(response); |
| } |
| }); |
| } |
| } |
| exports.HttpClient = HttpClient; |
| |
| |
| /***/ }), |
| |
| /***/ 548: |
| /***/ (function(module) { |
| |
| "use strict"; |
| |
| |
| /*! |
| * isobject <https://github.com/jonschlinkert/isobject> |
| * |
| * Copyright (c) 2014-2017, Jon Schlinkert. |
| * Released under the MIT License. |
| */ |
| |
| function isObject(val) { |
| return val != null && typeof val === 'object' && Array.isArray(val) === false; |
| } |
| |
| /*! |
| * is-plain-object <https://github.com/jonschlinkert/is-plain-object> |
| * |
| * Copyright (c) 2014-2017, Jon Schlinkert. |
| * Released under the MIT License. |
| */ |
| |
| function isObjectObject(o) { |
| return isObject(o) === true |
| && Object.prototype.toString.call(o) === '[object Object]'; |
| } |
| |
| function isPlainObject(o) { |
| var ctor,prot; |
| |
| if (isObjectObject(o) === false) return false; |
| |
| // If has modified constructor |
| ctor = o.constructor; |
| if (typeof ctor !== 'function') return false; |
| |
| // If has modified prototype |
| prot = ctor.prototype; |
| if (isObjectObject(prot) === false) return false; |
| |
| // If constructor does not have an Object-specific method |
| if (prot.hasOwnProperty('isPrototypeOf') === false) { |
| return false; |
| } |
| |
| // Most likely a plain Object |
| return true; |
| } |
| |
| module.exports = isPlainObject; |
| |
| |
| /***/ }), |
| |
| /***/ 568: |
| /***/ (function(module, __unusedexports, __webpack_require__) { |
| |
| "use strict"; |
| |
| |
| const path = __webpack_require__(622); |
| const niceTry = __webpack_require__(948); |
| const resolveCommand = __webpack_require__(489); |
| const escape = __webpack_require__(462); |
| const readShebang = __webpack_require__(389); |
| const semver = __webpack_require__(48); |
| |
| const isWin = process.platform === 'win32'; |
| const isExecutableRegExp = /\.(?:com|exe)$/i; |
| const isCmdShimRegExp = /node_modules[\\/].bin[\\/][^\\/]+\.cmd$/i; |
| |
| // `options.shell` is supported in Node ^4.8.0, ^5.7.0 and >= 6.0.0 |
| const supportsShellOption = niceTry(() => semver.satisfies(process.version, '^4.8.0 || ^5.7.0 || >= 6.0.0', true)) || false; |
| |
| function detectShebang(parsed) { |
| parsed.file = resolveCommand(parsed); |
| |
| const shebang = parsed.file && readShebang(parsed.file); |
| |
| if (shebang) { |
| parsed.args.unshift(parsed.file); |
| parsed.command = shebang; |
| |
| return resolveCommand(parsed); |
| } |
| |
| return parsed.file; |
| } |
| |
| function parseNonShell(parsed) { |
| if (!isWin) { |
| return parsed; |
| } |
| |
| // Detect & add support for shebangs |
| const commandFile = detectShebang(parsed); |
| |
| // We don't need a shell if the command filename is an executable |
| const needsShell = !isExecutableRegExp.test(commandFile); |
| |
| // If a shell is required, use cmd.exe and take care of escaping everything correctly |
| // Note that `forceShell` is an hidden option used only in tests |
| if (parsed.options.forceShell || needsShell) { |
| // Need to double escape meta chars if the command is a cmd-shim located in `node_modules/.bin/` |
| // The cmd-shim simply calls execute the package bin file with NodeJS, proxying any argument |
| // Because the escape of metachars with ^ gets interpreted when the cmd.exe is first called, |
| // we need to double escape them |
| const needsDoubleEscapeMetaChars = isCmdShimRegExp.test(commandFile); |
| |
| // Normalize posix paths into OS compatible paths (e.g.: foo/bar -> foo\bar) |
| // This is necessary otherwise it will always fail with ENOENT in those cases |
| parsed.command = path.normalize(parsed.command); |
| |
| // Escape command & arguments |
| parsed.command = escape.command(parsed.command); |
| parsed.args = parsed.args.map((arg) => escape.argument(arg, needsDoubleEscapeMetaChars)); |
| |
| const shellCommand = [parsed.command].concat(parsed.args).join(' '); |
| |
| parsed.args = ['/d', '/s', '/c', `"${shellCommand}"`]; |
| parsed.command = process.env.comspec || 'cmd.exe'; |
| parsed.options.windowsVerbatimArguments = true; // Tell node's spawn that the arguments are already escaped |
| } |
| |
| return parsed; |
| } |
| |
| function parseShell(parsed) { |
| // If node supports the shell option, there's no need to mimic its behavior |
| if (supportsShellOption) { |
| return parsed; |
| } |
| |
| // Mimic node shell option |
| // See https://github.com/nodejs/node/blob/b9f6a2dc059a1062776133f3d4fd848c4da7d150/lib/child_process.js#L335 |
| const shellCommand = [parsed.command].concat(parsed.args).join(' '); |
| |
| if (isWin) { |
| parsed.command = typeof parsed.options.shell === 'string' ? parsed.options.shell : process.env.comspec || 'cmd.exe'; |
| parsed.args = ['/d', '/s', '/c', `"${shellCommand}"`]; |
| parsed.options.windowsVerbatimArguments = true; // Tell node's spawn that the arguments are already escaped |
| } else { |
| if (typeof parsed.options.shell === 'string') { |
| parsed.command = parsed.options.shell; |
| } else if (process.platform === 'android') { |
| parsed.command = '/system/bin/sh'; |
| } else { |
| parsed.command = '/bin/sh'; |
| } |
| |
| parsed.args = ['-c', shellCommand]; |
| } |
| |
| return parsed; |
| } |
| |
| function parse(command, args, options) { |
| // Normalize arguments, similar to nodejs |
| if (args && !Array.isArray(args)) { |
| options = args; |
| args = null; |
| } |
| |
| args = args ? args.slice(0) : []; // Clone array to avoid changing the original |
| options = Object.assign({}, options); // Clone object to avoid changing the original |
| |
| // Build our parsed object |
| const parsed = { |
| command, |
| args, |
| options, |
| file: undefined, |
| original: { |
| command, |
| args, |
| }, |
| }; |
| |
| // Delegate further parsing to shell or non-shell |
| return options.shell ? parseShell(parsed) : parseNonShell(parsed); |
| } |
| |
| module.exports = parse; |
| |
| |
| /***/ }), |
| |
| /***/ 605: |
| /***/ (function(module) { |
| |
| module.exports = require("http"); |
| |
| /***/ }), |
| |
| /***/ 611: |
| /***/ (function(__unusedmodule, exports, __webpack_require__) { |
| |
| "use strict"; |
| |
| Object.defineProperty(exports, "__esModule", { value: true }); |
| exports.removeDuplicates = exports.isCreatedByUser = exports.isLabelEvent = exports.getOffsetDate = exports.parseOffsetString = exports.validateEnum = exports.formatStrArray = void 0; |
| const enums_1 = __webpack_require__(346); |
| /** |
| * Format a string array into a list |
| * @param strArray string array |
| * @returns string that represents a list |
| * |
| * @example |
| * > toListStr(['a', 'b']) |
| * - a |
| * - b |
| */ |
| function formatStrArray(strArray) { |
| if (strArray.length === 0) { |
| return ''; |
| } |
| return strArray.map(s => `- ${s}`).join('\n') + '\n'; |
| } |
| exports.formatStrArray = formatStrArray; |
| /** |
| * Validate an enum value |
| * @param name name of the variable to check |
| * @param val value to check |
| * @param enumObj enum object |
| * |
| * @example |
| * > enum CD { |
| * C = 'c', |
| * D = 'd', |
| * } |
| * > validateEnums('a', 'b', CD) |
| * Uncaught Error: `a` must be one of ['c', 'd'], but got 'b' |
| */ |
| function validateEnum(name, val, enumObj) { |
| const values = Object.values(enumObj); |
| if (!values.includes(val)) { |
| const wrap = (s) => `'${s}'`; |
| const joined = values.map(wrap).join(', '); |
| throw new Error(`\`${name}\` must be one of [${joined}], but got ${wrap(val)}`); |
| } |
| } |
| exports.validateEnum = validateEnum; |
| /** |
| * Parse a offset string |
| * @param offset offset string (e.g. '1M') |
| * @returns [value, unit] |
| * |
| * @example |
| * > parseOffsetString('1M') |
| * [ 1, 'M' ] |
| */ |
| function parseOffsetString(offsetStr) { |
| const chars = Object.values(enums_1.OffsetUnits).join(''); |
| const pattern = `^(\\d+)([${chars}])$`; |
| const m = new RegExp(pattern).exec(offsetStr); |
| if (m === null) { |
| throw Error(`"${offsetStr}" doesn't match "${pattern}"`); |
| } |
| const value = parseInt(m[1]); |
| const unit = m[2]; |
| return [value, unit]; |
| } |
| exports.parseOffsetString = parseOffsetString; |
| /** |
| * Get a offset date |
| * @param date base date |
| * @param value time value |
| * @param unit time unit (must be one of ['H', 'D', 'M']) |
| * @returns offset date |
| * |
| * @example |
| * > const d = new Date('2020-10-10T10:10:10.000Z') |
| * > getOffsetDate(d, '1H').toISOString() |
| * '2020-10-10T09:10:10.000Z' |
| * |
| * > getOffsetDate(d, '1D').toISOString() |
| * '2020-10-09T10:10:10.000Z' |
| * |
| * > getOffsetDate(d, '1M').toISOString() |
| * '2020-09-10T10:10:10.000Z' |
| */ |
| function getOffsetDate(date, value, unit) { |
| const copied = new Date(date); |
| switch (unit) { |
| case enums_1.OffsetUnits.HOUR: { |
| copied.setHours(copied.getHours() - value); |
| return copied; |
| } |
| case enums_1.OffsetUnits.DAY: { |
| copied.setDate(copied.getDate() - value); |
| return copied; |
| } |
| case enums_1.OffsetUnits.MONTH: { |
| copied.setMonth(copied.getMonth() - value); |
| return copied; |
| } |
| default: { |
| throw Error('Should not reach here'); |
| } |
| } |
| } |
| exports.getOffsetDate = getOffsetDate; |
| /** |
| * Check if a given event is a label event |
| * @param event issue event |
| * @returns true if `event` is a label event otherwise false |
| */ |
| function isLabelEvent(event) { |
| return ['labeled', 'unlabeled'].includes(event.event); |
| } |
| exports.isLabelEvent = isLabelEvent; |
| /** |
| * Check if a given event is created by a user |
| * @param event issue event |
| * @returns true if a given event is created by a user otherwise false |
| */ |
| function isCreatedByUser(event) { |
| return event.actor.type === 'User';; |
| } |
| exports.isCreatedByUser = isCreatedByUser; |
| /** |
| * Remove duplicates in an array |
| * @param array array that may contain duplicates |
| * @returns unique array |
| */ |
| function removeDuplicates(array) { |
| return [...new Set(array)]; |
| } |
| exports.removeDuplicates = removeDuplicates; |
| |
| |
| /***/ }), |
| |
| /***/ 614: |
| /***/ (function(module) { |
| |
| module.exports = require("events"); |
| |
| /***/ }), |
| |
| /***/ 621: |
| /***/ (function(module, __unusedexports, __webpack_require__) { |
| |
| "use strict"; |
| |
| const path = __webpack_require__(622); |
| const pathKey = __webpack_require__(39); |
| |
| module.exports = opts => { |
| opts = Object.assign({ |
| cwd: process.cwd(), |
| path: process.env[pathKey()] |
| }, opts); |
| |
| let prev; |
| let pth = path.resolve(opts.cwd); |
| const ret = []; |
| |
| while (prev !== pth) { |
| ret.push(path.join(pth, 'node_modules/.bin')); |
| prev = pth; |
| pth = path.resolve(pth, '..'); |
| } |
| |
| // ensure the running `node` binary is used |
| ret.push(path.dirname(process.execPath)); |
| |
| return ret.concat(opts.path).join(path.delimiter); |
| }; |
| |
| module.exports.env = opts => { |
| opts = Object.assign({ |
| env: process.env |
| }, opts); |
| |
| const env = Object.assign({}, opts.env); |
| const path = pathKey({env}); |
| |
| opts.path = env[path]; |
| env[path] = module.exports(opts); |
| |
| return env; |
| }; |
| |
| |
| /***/ }), |
| |
| /***/ 622: |
| /***/ (function(module) { |
| |
| module.exports = require("path"); |
| |
| /***/ }), |
| |
| /***/ 626: |
| /***/ (function(module) { |
| |
| "use strict"; |
| |
| |
| /*! |
| * isobject <https://github.com/jonschlinkert/isobject> |
| * |
| * Copyright (c) 2014-2017, Jon Schlinkert. |
| * Released under the MIT License. |
| */ |
| |
| function isObject(val) { |
| return val != null && typeof val === 'object' && Array.isArray(val) === false; |
| } |
| |
| /*! |
| * is-plain-object <https://github.com/jonschlinkert/is-plain-object> |
| * |
| * Copyright (c) 2014-2017, Jon Schlinkert. |
| * Released under the MIT License. |
| */ |
| |
| function isObjectObject(o) { |
| return isObject(o) === true |
| && Object.prototype.toString.call(o) === '[object Object]'; |
| } |
| |
| function isPlainObject(o) { |
| var ctor,prot; |
| |
| if (isObjectObject(o) === false) return false; |
| |
| // If has modified constructor |
| ctor = o.constructor; |
| if (typeof ctor !== 'function') return false; |
| |
| // If has modified prototype |
| prot = ctor.prototype; |
| if (isObjectObject(prot) === false) return false; |
| |
| // If constructor does not have an Object-specific method |
| if (prot.hasOwnProperty('isPrototypeOf') === false) { |
| return false; |
| } |
| |
| // Most likely a plain Object |
| return true; |
| } |
| |
| module.exports = isPlainObject; |
| |
| |
| /***/ }), |
| |
| /***/ 631: |
| /***/ (function(module) { |
| |
| module.exports = require("net"); |
| |
| /***/ }), |
| |
| /***/ 654: |
| /***/ (function(module) { |
| |
| // This is not the set of all possible signals. |
| // |
| // It IS, however, the set of all signals that trigger |
| // an exit on either Linux or BSD systems. Linux is a |
| // superset of the signal names supported on BSD, and |
| // the unknown signals just fail to register, so we can |
| // catch that easily enough. |
| // |
| // Don't bother with SIGKILL. It's uncatchable, which |
| // means that we can't fire any callbacks anyway. |
| // |
| // If a user does happen to register a handler on a non- |
| // fatal signal like SIGWINCH or something, and then |
| // exit, it'll end up firing `process.emit('exit')`, so |
| // the handler will be fired anyway. |
| // |
| // SIGBUS, SIGFPE, SIGSEGV and SIGILL, when not raised |
| // artificially, inherently leave the process in a |
| // state from which it is not safe to try and enter JS |
| // listeners. |
| module.exports = [ |
| 'SIGABRT', |
| 'SIGALRM', |
| 'SIGHUP', |
| 'SIGINT', |
| 'SIGTERM' |
| ] |
| |
| if (process.platform !== 'win32') { |
| module.exports.push( |
| 'SIGVTALRM', |
| 'SIGXCPU', |
| 'SIGXFSZ', |
| 'SIGUSR2', |
| 'SIGTRAP', |
| 'SIGSYS', |
| 'SIGQUIT', |
| 'SIGIOT' |
| // should detect profiler and enable/disable accordingly. |
| // see #21 |
| // 'SIGPROF' |
| ) |
| } |
| |
| if (process.platform === 'linux') { |
| module.exports.push( |
| 'SIGIO', |
| 'SIGPOLL', |
| 'SIGPWR', |
| 'SIGSTKFLT', |
| 'SIGUNUSED' |
| ) |
| } |
| |
| |
| /***/ }), |
| |
| /***/ 669: |
| /***/ (function(module) { |
| |
| module.exports = require("util"); |
| |
| /***/ }), |
| |
| /***/ 692: |
| /***/ (function(__unusedmodule, exports) { |
| |
| "use strict"; |
| |
| |
| Object.defineProperty(exports, '__esModule', { value: true }); |
| |
| class Deprecation extends Error { |
| constructor(message) { |
| super(message); // Maintains proper stack trace (only available on V8) |
| |
| /* istanbul ignore next */ |
| |
| if (Error.captureStackTrace) { |
| Error.captureStackTrace(this, this.constructor); |
| } |
| |
| this.name = 'Deprecation'; |
| } |
| |
| } |
| |
| exports.Deprecation = Deprecation; |
| |
| |
| /***/ }), |
| |
| /***/ 697: |
| /***/ (function(module) { |
| |
| "use strict"; |
| |
| module.exports = (promise, onFinally) => { |
| onFinally = onFinally || (() => {}); |
| |
| return promise.then( |
| val => new Promise(resolve => { |
| resolve(onFinally()); |
| }).then(() => val), |
| err => new Promise(resolve => { |
| resolve(onFinally()); |
| }).then(() => { |
| throw err; |
| }) |
| ); |
| }; |
| |
| |
| /***/ }), |
| |
| /***/ 742: |
| /***/ (function(module, __unusedexports, __webpack_require__) { |
| |
| var fs = __webpack_require__(747) |
| var core |
| if (process.platform === 'win32' || global.TESTING_WINDOWS) { |
| core = __webpack_require__(818) |
| } else { |
| core = __webpack_require__(197) |
| } |
| |
| module.exports = isexe |
| isexe.sync = sync |
| |
| function isexe (path, options, cb) { |
| if (typeof options === 'function') { |
| cb = options |
| options = {} |
| } |
| |
| if (!cb) { |
| if (typeof Promise !== 'function') { |
| throw new TypeError('callback not provided') |
| } |
| |
| return new Promise(function (resolve, reject) { |
| isexe(path, options || {}, function (er, is) { |
| if (er) { |
| reject(er) |
| } else { |
| resolve(is) |
| } |
| }) |
| }) |
| } |
| |
| core(path, options || {}, function (er, is) { |
| // ignore EACCES because that just means we aren't allowed to run it |
| if (er) { |
| if (er.code === 'EACCES' || options && options.ignoreErrors) { |
| er = null |
| is = false |
| } |
| } |
| cb(er, is) |
| }) |
| } |
| |
| function sync (path, options) { |
| // my kingdom for a filtered catch |
| try { |
| return core.sync(path, options || {}) |
| } catch (er) { |
| if (options && options.ignoreErrors || er.code === 'EACCES') { |
| return false |
| } else { |
| throw er |
| } |
| } |
| } |
| |
| |
| /***/ }), |
| |
| /***/ 747: |
| /***/ (function(module) { |
| |
| module.exports = require("fs"); |
| |
| /***/ }), |
| |
| /***/ 753: |
| /***/ (function(__unusedmodule, exports, __webpack_require__) { |
| |
| "use strict"; |
| |
| |
| Object.defineProperty(exports, '__esModule', { value: true }); |
| |
| function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } |
| |
| var endpoint = __webpack_require__(385); |
| var universalUserAgent = __webpack_require__(796); |
| var isPlainObject = _interopDefault(__webpack_require__(548)); |
| var nodeFetch = _interopDefault(__webpack_require__(454)); |
| var requestError = __webpack_require__(463); |
| |
| const VERSION = "5.4.5"; |
| |
| function getBufferResponse(response) { |
| return response.arrayBuffer(); |
| } |
| |
| function fetchWrapper(requestOptions) { |
| if (isPlainObject(requestOptions.body) || Array.isArray(requestOptions.body)) { |
| requestOptions.body = JSON.stringify(requestOptions.body); |
| } |
| |
| let headers = {}; |
| let status; |
| let url; |
| const fetch = requestOptions.request && requestOptions.request.fetch || nodeFetch; |
| return fetch(requestOptions.url, Object.assign({ |
| method: requestOptions.method, |
| body: requestOptions.body, |
| headers: requestOptions.headers, |
| redirect: requestOptions.redirect |
| }, requestOptions.request)).then(response => { |
| url = response.url; |
| status = response.status; |
| |
| for (const keyAndValue of response.headers) { |
| headers[keyAndValue[0]] = keyAndValue[1]; |
| } |
| |
| if (status === 204 || status === 205) { |
| return; |
| } // GitHub API returns 200 for HEAD requests |
| |
| |
| if (requestOptions.method === "HEAD") { |
| if (status < 400) { |
| return; |
| } |
| |
| throw new requestError.RequestError(response.statusText, status, { |
| headers, |
| request: requestOptions |
| }); |
| } |
| |
| if (status === 304) { |
| throw new requestError.RequestError("Not modified", status, { |
| headers, |
| request: requestOptions |
| }); |
| } |
| |
| if (status >= 400) { |
| return response.text().then(message => { |
| const error = new requestError.RequestError(message, status, { |
| headers, |
| request: requestOptions |
| }); |
| |
| try { |
| let responseBody = JSON.parse(error.message); |
| Object.assign(error, responseBody); |
| let errors = responseBody.errors; // Assumption `errors` would always be in Array format |
| |
| error.message = error.message + ": " + errors.map(JSON.stringify).join(", "); |
| } catch (e) {// ignore, see octokit/rest.js#684 |
| } |
| |
| throw error; |
| }); |
| } |
| |
| const contentType = response.headers.get("content-type"); |
| |
| if (/application\/json/.test(contentType)) { |
| return response.json(); |
| } |
| |
| if (!contentType || /^text\/|charset=utf-8$/.test(contentType)) { |
| return response.text(); |
| } |
| |
| return getBufferResponse(response); |
| }).then(data => { |
| return { |
| status, |
| url, |
| headers, |
| data |
| }; |
| }).catch(error => { |
| if (error instanceof requestError.RequestError) { |
| throw error; |
| } |
| |
| throw new requestError.RequestError(error.message, 500, { |
| headers, |
| request: requestOptions |
| }); |
| }); |
| } |
| |
| function withDefaults(oldEndpoint, newDefaults) { |
| const endpoint = oldEndpoint.defaults(newDefaults); |
| |
| const newApi = function (route, parameters) { |
| const endpointOptions = endpoint.merge(route, parameters); |
| |
| if (!endpointOptions.request || !endpointOptions.request.hook) { |
| return fetchWrapper(endpoint.parse(endpointOptions)); |
| } |
| |
| const request = (route, parameters) => { |
| return fetchWrapper(endpoint.parse(endpoint.merge(route, parameters))); |
| }; |
| |
| Object.assign(request, { |
| endpoint, |
| defaults: withDefaults.bind(null, endpoint) |
| }); |
| return endpointOptions.request.hook(request, endpointOptions); |
| }; |
| |
| return Object.assign(newApi, { |
| endpoint, |
| defaults: withDefaults.bind(null, endpoint) |
| }); |
| } |
| |
| const request = withDefaults(endpoint.endpoint, { |
| headers: { |
| "user-agent": `octokit-request.js/${VERSION} ${universalUserAgent.getUserAgent()}` |
| } |
| }); |
| |
| exports.request = request; |
| //# sourceMappingURL=index.js.map |
| |
| |
| /***/ }), |
| |
| /***/ 761: |
| /***/ (function(module) { |
| |
| module.exports = require("zlib"); |
| |
| /***/ }), |
| |
| /***/ 763: |
| /***/ (function(module) { |
| |
| module.exports = removeHook |
| |
| function removeHook (state, name, method) { |
| if (!state.registry[name]) { |
| return |
| } |
| |
| var index = state.registry[name] |
| .map(function (registered) { return registered.orig }) |
| .indexOf(method) |
| |
| if (index === -1) { |
| return |
| } |
| |
| state.registry[name].splice(index, 1) |
| } |
| |
| |
| /***/ }), |
| |
| /***/ 768: |
| /***/ (function(module) { |
| |
| "use strict"; |
| |
| module.exports = function (x) { |
| var lf = typeof x === 'string' ? '\n' : '\n'.charCodeAt(); |
| var cr = typeof x === 'string' ? '\r' : '\r'.charCodeAt(); |
| |
| if (x[x.length - 1] === lf) { |
| x = x.slice(0, x.length - 1); |
| } |
| |
| if (x[x.length - 1] === cr) { |
| x = x.slice(0, x.length - 1); |
| } |
| |
| return x; |
| }; |
| |
| |
| /***/ }), |
| |
| /***/ 794: |
| /***/ (function(module) { |
| |
| module.exports = require("stream"); |
| |
| /***/ }), |
| |
| /***/ 796: |
| /***/ (function(__unusedmodule, exports, __webpack_require__) { |
| |
| "use strict"; |
| |
| |
| Object.defineProperty(exports, '__esModule', { value: true }); |
| |
| function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } |
| |
| var osName = _interopDefault(__webpack_require__(2)); |
| |
| function getUserAgent() { |
| try { |
| return `Node.js/${process.version.substr(1)} (${osName()}; ${process.arch})`; |
| } catch (error) { |
| if (/wmic os get Caption/.test(error.message)) { |
| return "Windows <version undetectable>"; |
| } |
| |
| return "<environment undetectable>"; |
| } |
| } |
| |
| exports.getUserAgent = getUserAgent; |
| //# sourceMappingURL=index.js.map |
| |
| |
| /***/ }), |
| |
| /***/ 813: |
| /***/ (function(__unusedmodule, exports) { |
| |
| "use strict"; |
| |
| |
| Object.defineProperty(exports, '__esModule', { value: true }); |
| |
| async function auth(token) { |
| const tokenType = token.split(/\./).length === 3 ? "app" : /^v\d+\./.test(token) ? "installation" : "oauth"; |
| return { |
| type: "token", |
| token: token, |
| tokenType |
| }; |
| } |
| |
| /** |
| * Prefix token for usage in the Authorization header |
| * |
| * @param token OAuth token or JSON Web Token |
| */ |
| function withAuthorizationPrefix(token) { |
| if (token.split(/\./).length === 3) { |
| return `bearer ${token}`; |
| } |
| |
| return `token ${token}`; |
| } |
| |
| async function hook(token, request, route, parameters) { |
| const endpoint = request.endpoint.merge(route, parameters); |
| endpoint.headers.authorization = withAuthorizationPrefix(token); |
| return request(endpoint); |
| } |
| |
| const createTokenAuth = function createTokenAuth(token) { |
| if (!token) { |
| throw new Error("[@octokit/auth-token] No token passed to createTokenAuth"); |
| } |
| |
| if (typeof token !== "string") { |
| throw new Error("[@octokit/auth-token] Token passed to createTokenAuth is not a string"); |
| } |
| |
| token = token.replace(/^(token|bearer) +/i, ""); |
| return Object.assign(auth.bind(null, token), { |
| hook: hook.bind(null, token) |
| }); |
| }; |
| |
| exports.createTokenAuth = createTokenAuth; |
| //# sourceMappingURL=index.js.map |
| |
| |
| /***/ }), |
| |
| /***/ 814: |
| /***/ (function(module, __unusedexports, __webpack_require__) { |
| |
| module.exports = which |
| which.sync = whichSync |
| |
| var isWindows = process.platform === 'win32' || |
| process.env.OSTYPE === 'cygwin' || |
| process.env.OSTYPE === 'msys' |
| |
| var path = __webpack_require__(622) |
| var COLON = isWindows ? ';' : ':' |
| var isexe = __webpack_require__(742) |
| |
| function getNotFoundError (cmd) { |
| var er = new Error('not found: ' + cmd) |
| er.code = 'ENOENT' |
| |
| return er |
| } |
| |
| function getPathInfo (cmd, opt) { |
| var colon = opt.colon || COLON |
| var pathEnv = opt.path || process.env.PATH || '' |
| var pathExt = [''] |
| |
| pathEnv = pathEnv.split(colon) |
| |
| var pathExtExe = '' |
| if (isWindows) { |
| pathEnv.unshift(process.cwd()) |
| pathExtExe = (opt.pathExt || process.env.PATHEXT || '.EXE;.CMD;.BAT;.COM') |
| pathExt = pathExtExe.split(colon) |
| |
| |
| // Always test the cmd itself first. isexe will check to make sure |
| // it's found in the pathExt set. |
| if (cmd.indexOf('.') !== -1 && pathExt[0] !== '') |
| pathExt.unshift('') |
| } |
| |
| // If it has a slash, then we don't bother searching the pathenv. |
| // just check the file itself, and that's it. |
| if (cmd.match(/\//) || isWindows && cmd.match(/\\/)) |
| pathEnv = [''] |
| |
| return { |
| env: pathEnv, |
| ext: pathExt, |
| extExe: pathExtExe |
| } |
| } |
| |
| function which (cmd, opt, cb) { |
| if (typeof opt === 'function') { |
| cb = opt |
| opt = {} |
| } |
| |
| var info = getPathInfo(cmd, opt) |
| var pathEnv = info.env |
| var pathExt = info.ext |
| var pathExtExe = info.extExe |
| var found = [] |
| |
| ;(function F (i, l) { |
| if (i === l) { |
| if (opt.all && found.length) |
| return cb(null, found) |
| else |
| return cb(getNotFoundError(cmd)) |
| } |
| |
| var pathPart = pathEnv[i] |
| if (pathPart.charAt(0) === '"' && pathPart.slice(-1) === '"') |
| pathPart = pathPart.slice(1, -1) |
| |
| var p = path.join(pathPart, cmd) |
| if (!pathPart && (/^\.[\\\/]/).test(cmd)) { |
| p = cmd.slice(0, 2) + p |
| } |
| ;(function E (ii, ll) { |
| if (ii === ll) return F(i + 1, l) |
| var ext = pathExt[ii] |
| isexe(p + ext, { pathExt: pathExtExe }, function (er, is) { |
| if (!er && is) { |
| if (opt.all) |
| found.push(p + ext) |
| else |
| return cb(null, p + ext) |
| } |
| return E(ii + 1, ll) |
| }) |
| })(0, pathExt.length) |
| })(0, pathEnv.length) |
| } |
| |
| function whichSync (cmd, opt) { |
| opt = opt || {} |
| |
| var info = getPathInfo(cmd, opt) |
| var pathEnv = info.env |
| var pathExt = info.ext |
| var pathExtExe = info.extExe |
| var found = [] |
| |
| for (var i = 0, l = pathEnv.length; i < l; i ++) { |
| var pathPart = pathEnv[i] |
| if (pathPart.charAt(0) === '"' && pathPart.slice(-1) === '"') |
| pathPart = pathPart.slice(1, -1) |
| |
| var p = path.join(pathPart, cmd) |
| if (!pathPart && /^\.[\\\/]/.test(cmd)) { |
| p = cmd.slice(0, 2) + p |
| } |
| for (var j = 0, ll = pathExt.length; j < ll; j ++) { |
| var cur = p + pathExt[j] |
| var is |
| try { |
| is = isexe.sync(cur, { pathExt: pathExtExe }) |
| if (is) { |
| if (opt.all) |
| found.push(cur) |
| else |
| return cur |
| } |
| } catch (ex) {} |
| } |
| } |
| |
| if (opt.all && found.length) |
| return found |
| |
| if (opt.nothrow) |
| return null |
| |
| throw getNotFoundError(cmd) |
| } |
| |
| |
| /***/ }), |
| |
| /***/ 816: |
| /***/ (function(module) { |
| |
| "use strict"; |
| |
| module.exports = /^#!.*/; |
| |
| |
| /***/ }), |
| |
| /***/ 818: |
| /***/ (function(module, __unusedexports, __webpack_require__) { |
| |
| module.exports = isexe |
| isexe.sync = sync |
| |
| var fs = __webpack_require__(747) |
| |
| function checkPathExt (path, options) { |
| var pathext = options.pathExt !== undefined ? |
| options.pathExt : process.env.PATHEXT |
| |
| if (!pathext) { |
| return true |
| } |
| |
| pathext = pathext.split(';') |
| if (pathext.indexOf('') !== -1) { |
| return true |
| } |
| for (var i = 0; i < pathext.length; i++) { |
| var p = pathext[i].toLowerCase() |
| if (p && path.substr(-p.length).toLowerCase() === p) { |
| return true |
| } |
| } |
| return false |
| } |
| |
| function checkStat (stat, path, options) { |
| if (!stat.isSymbolicLink() && !stat.isFile()) { |
| return false |
| } |
| return checkPathExt(path, options) |
| } |
| |
| function isexe (path, options, cb) { |
| fs.stat(path, function (er, stat) { |
| cb(er, er ? false : checkStat(stat, path, options)) |
| }) |
| } |
| |
| function sync (path, options) { |
| return checkStat(fs.statSync(path), path, options) |
| } |
| |
| |
| /***/ }), |
| |
| /***/ 835: |
| /***/ (function(module) { |
| |
| module.exports = require("url"); |
| |
| /***/ }), |
| |
| /***/ 842: |
| /***/ (function(__unusedmodule, exports) { |
| |
| "use strict"; |
| |
| |
| Object.defineProperty(exports, '__esModule', { value: true }); |
| |
| const Endpoints = { |
| actions: { |
| addSelectedRepoToOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}"], |
| cancelWorkflowRun: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel"], |
| createOrUpdateOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}"], |
| createOrUpdateRepoSecret: ["PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}", {}, { |
| renamedParameters: { |
| name: "secret_name" |
| } |
| }], |
| createOrUpdateSecretForRepo: ["PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}", {}, { |
| renamed: ["actions", "createOrUpdateRepoSecret"], |
| renamedParameters: { |
| name: "secret_name" |
| } |
| }], |
| createRegistrationToken: ["POST /repos/{owner}/{repo}/actions/runners/registration-token", {}, { |
| renamed: ["actions", "createRegistrationTokenForRepo"] |
| }], |
| createRegistrationTokenForOrg: ["POST /orgs/{org}/actions/runners/registration-token"], |
| createRegistrationTokenForRepo: ["POST /repos/{owner}/{repo}/actions/runners/registration-token"], |
| createRemoveToken: ["POST /repos/{owner}/{repo}/actions/runners/remove-token", {}, { |
| renamed: ["actions", "createRemoveTokenForRepo"] |
| }], |
| createRemoveTokenForOrg: ["POST /orgs/{org}/actions/runners/remove-token"], |
| createRemoveTokenForRepo: ["POST /repos/{owner}/{repo}/actions/runners/remove-token"], |
| deleteArtifact: ["DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"], |
| deleteOrgSecret: ["DELETE /orgs/{org}/actions/secrets/{secret_name}"], |
| deleteRepoSecret: ["DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}", {}, { |
| renamedParameters: { |
| name: "secret_name" |
| } |
| }], |
| deleteSecretFromRepo: ["DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}", {}, { |
| renamed: ["actions", "deleteRepoSecret"], |
| renamedParameters: { |
| name: "secret_name" |
| } |
| }], |
| deleteSelfHostedRunnerFromOrg: ["DELETE /orgs/{org}/actions/runners/{runner_id}"], |
| deleteSelfHostedRunnerFromRepo: ["DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}"], |
| deleteWorkflowRunLogs: ["DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs"], |
| downloadArtifact: ["GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}"], |
| downloadJobLogsForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs"], |
| downloadWorkflowJobLogs: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs", {}, { |
| renamed: ["actions", "downloadJobLogsForWorkflowRun"] |
| }], |
| downloadWorkflowRunLogs: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs"], |
| getArtifact: ["GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"], |
| getJobForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}"], |
| getOrgPublicKey: ["GET /orgs/{org}/actions/secrets/public-key"], |
| getOrgSecret: ["GET /orgs/{org}/actions/secrets/{secret_name}"], |
| getPublicKey: ["GET /repos/{owner}/{repo}/actions/secrets/public-key", {}, { |
| renamed: ["actions", "getRepoPublicKey"] |
| }], |
| getRepoPublicKey: ["GET /repos/{owner}/{repo}/actions/secrets/public-key"], |
| getRepoSecret: ["GET /repos/{owner}/{repo}/actions/secrets/{secret_name}", {}, { |
| renamedParameters: { |
| name: "secret_name" |
| } |
| }], |
| getSecret: ["GET /repos/{owner}/{repo}/actions/secrets/{secret_name}", {}, { |
| renamed: ["actions", "getRepoSecret"], |
| renamedParameters: { |
| name: "secret_name" |
| } |
| }], |
| getSelfHostedRunner: ["GET /repos/{owner}/{repo}/actions/runners/{runner_id}", {}, { |
| renamed: ["actions", "getSelfHostedRunnerForRepo"] |
| }], |
| getSelfHostedRunnerForOrg: ["GET /orgs/{org}/actions/runners/{runner_id}"], |
| getSelfHostedRunnerForRepo: ["GET /repos/{owner}/{repo}/actions/runners/{runner_id}"], |
| getWorkflow: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}"], |
| getWorkflowJob: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}", {}, { |
| renamed: ["actions", "getJobForWorkflowRun"] |
| }], |
| getWorkflowRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}"], |
| getWorkflowRunUsage: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing"], |
| getWorkflowUsage: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing"], |
| listArtifactsForRepo: ["GET /repos/{owner}/{repo}/actions/artifacts"], |
| listDownloadsForSelfHostedRunnerApplication: ["GET /repos/{owner}/{repo}/actions/runners/downloads", {}, { |
| renamed: ["actions", "listRunnerApplicationsForRepo"] |
| }], |
| listJobsForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs"], |
| listOrgSecrets: ["GET /orgs/{org}/actions/secrets"], |
| listRepoSecrets: ["GET /repos/{owner}/{repo}/actions/secrets"], |
| listRepoWorkflowRuns: ["GET /repos/{owner}/{repo}/actions/runs", {}, { |
| renamed: ["actions", "listWorkflowRunsForRepo"] |
| }], |
| listRepoWorkflows: ["GET /repos/{owner}/{repo}/actions/workflows"], |
| listRunnerApplicationsForOrg: ["GET /orgs/{org}/actions/runners/downloads"], |
| listRunnerApplicationsForRepo: ["GET /repos/{owner}/{repo}/actions/runners/downloads"], |
| listSecretsForRepo: ["GET /repos/{owner}/{repo}/actions/secrets", {}, { |
| renamed: ["actions", "listRepoSecrets"] |
| }], |
| listSelectedReposForOrgSecret: ["GET /orgs/{org}/actions/secrets/{secret_name}/repositories"], |
| listSelfHostedRunnersForOrg: ["GET /orgs/{org}/actions/runners"], |
| listSelfHostedRunnersForRepo: ["GET /repos/{owner}/{repo}/actions/runners"], |
| listWorkflowJobLogs: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs", {}, { |
| renamed: ["actions", "downloadWorkflowJobLogs"] |
| }], |
| listWorkflowRunArtifacts: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts"], |
| listWorkflowRunLogs: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs", {}, { |
| renamed: ["actions", "downloadWorkflowRunLogs"] |
| }], |
| listWorkflowRuns: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs"], |
| listWorkflowRunsForRepo: ["GET /repos/{owner}/{repo}/actions/runs"], |
| reRunWorkflow: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun"], |
| removeSelectedRepoFromOrgSecret: ["DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}"], |
| removeSelfHostedRunner: ["DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}", {}, { |
| renamed: ["actions", "deleteSelfHostedRunnerFromRepo"] |
| }], |
| setSelectedReposForOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}/repositories"] |
| }, |
| activity: { |
| checkRepoIsStarredByAuthenticatedUser: ["GET /user/starred/{owner}/{repo}"], |
| checkStarringRepo: ["GET /user/starred/{owner}/{repo}", {}, { |
| renamed: ["activity", "checkRepoIsStarredByAuthenticatedUser"] |
| }], |
| deleteRepoSubscription: ["DELETE /repos/{owner}/{repo}/subscription"], |
| deleteThreadSubscription: ["DELETE /notifications/threads/{thread_id}/subscription"], |
| getFeeds: ["GET /feeds"], |
| getRepoSubscription: ["GET /repos/{owner}/{repo}/subscription"], |
| getThread: ["GET /notifications/threads/{thread_id}"], |
| getThreadSubscription: ["PUT /notifications", {}, { |
| renamed: ["activity", "getThreadSubscriptionForAuthenticatedUser"] |
| }], |
| getThreadSubscriptionForAuthenticatedUser: ["GET /notifications/threads/{thread_id}/subscription"], |
| listEventsForAuthenticatedUser: ["GET /users/{username}/events"], |
| listEventsForOrg: ["GET /users/{username}/events/orgs/{org}", {}, { |
| renamed: ["activity", "listOrgEventsForAuthenticatedUser"] |
| }], |
| listEventsForUser: ["GET /users/{username}/events", {}, { |
| renamed: ["activity", "listEventsForAuthenticatedUser"] |
| }], |
| listFeeds: ["GET /feeds", {}, { |
| renamed: ["activity", "getFeeds"] |
| }], |
| listNotifications: ["GET /notifications", {}, { |
| renamed: ["activity", "listNotificationsForAuthenticatedUser"] |
| }], |
| listNotificationsForAuthenticatedUser: ["GET /notifications"], |
| listNotificationsForRepo: ["GET /repos/{owner}/{repo}/notifications", {}, { |
| renamed: ["activity", "listRepoNotificationsForAuthenticatedUser"] |
| }], |
| listOrgEventsForAuthenticatedUser: ["GET /users/{username}/events/orgs/{org}"], |
| listPublicEvents: ["GET /events"], |
| listPublicEventsForOrg: ["GET /orgs/{org}/events", {}, { |
| renamed: ["activity", "listPublicOrgEvents"] |
| }], |
| listPublicEventsForRepoNetwork: ["GET /networks/{owner}/{repo}/events"], |
| listPublicEventsForUser: ["GET /users/{username}/events/public"], |
| listPublicOrgEvents: ["GET /orgs/{org}/events"], |
| listReceivedEventsForUser: ["GET /users/{username}/received_events"], |
| listReceivedPublicEventsForUser: ["GET /users/{username}/received_events/public"], |
| listRepoEvents: ["GET /repos/{owner}/{repo}/events"], |
| listRepoNotificationsForAuthenticatedUser: ["GET /repos/{owner}/{repo}/notifications"], |
| listReposStarredByAuthenticatedUser: ["GET /user/starred"], |
| listReposStarredByUser: ["GET /users/{username}/starred"], |
| listReposWatchedByUser: ["GET /users/{username}/subscriptions"], |
| listStargazersForRepo: ["GET /repos/{owner}/{repo}/stargazers"], |
| listWatchedReposForAuthenticatedUser: ["GET /user/subscriptions"], |
| listWatchersForRepo: ["GET /repos/{owner}/{repo}/subscribers"], |
| markAsRead: ["PUT /notifications", {}, { |
| renamed: ["activity", "markNotificationsAsRead"] |
| }], |
| markNotificationsAsRead: ["PUT /notifications"], |
| markNotificationsAsReadForRepo: ["PUT /repos/{owner}/{repo}/notifications", {}, { |
| renamed: ["activity", "markRepoNotificationsAsRead"] |
| }], |
| markRepoNotificationsAsRead: ["PUT /repos/{owner}/{repo}/notifications"], |
| markThreadAsRead: ["PATCH /notifications/threads/{thread_id}"], |
| setRepoSubscription: ["PUT /repos/{owner}/{repo}/subscription"], |
| setThreadSubscription: ["PUT /notifications/threads/{thread_id}/subscription"], |
| starRepo: ["PUT /user/starred/{owner}/{repo}", {}, { |
| renamed: ["activity", "starRepoForAuthenticatedUser"] |
| }], |
| starRepoForAuthenticatedUser: ["PUT /user/starred/{owner}/{repo}"], |
| unstarRepo: ["DELETE /user/starred/{owner}/{repo}", {}, { |
| renamed: ["activity", "unstarRepoForAuthenticatedUser"] |
| }], |
| unstarRepoForAuthenticatedUser: ["DELETE /user/starred/{owner}/{repo}"] |
| }, |
| apps: { |
| addRepoToInstallation: ["PUT /user/installations/{installation_id}/repositories/{repository_id}", { |
| mediaType: { |
| previews: ["machine-man"] |
| } |
| }], |
| checkAccountIsAssociatedWithAny: ["GET /marketplace_listing/accounts/{account_id}", {}, { |
| renamed: ["apps", "getSubscriptionPlanForAccount"] |
| }], |
| checkAccountIsAssociatedWithAnyStubbed: ["GET /marketplace_listing/stubbed/accounts/{account_id}", {}, { |
| renamed: ["apps", "getSubscriptionPlanForAccountStubbed"] |
| }], |
| checkToken: ["POST /applications/{client_id}/token"], |
| createContentAttachment: ["POST /content_references/{content_reference_id}/attachments", { |
| mediaType: { |
| previews: ["corsair"] |
| } |
| }], |
| createFromManifest: ["POST /app-manifests/{code}/conversions"], |
| createInstallationAccessToken: ["POST /app/installations/{installation_id}/access_tokens", { |
| mediaType: { |
| previews: ["machine-man"] |
| } |
| }], |
| createInstallationToken: ["POST /app/installations/{installation_id}/access_tokens", { |
| mediaType: { |
| previews: ["machine-man"] |
| } |
| }, { |
| renamed: ["apps", "createInstallationAccessToken"] |
| }], |
| deleteAuthorization: ["DELETE /applications/{client_id}/grant"], |
| deleteInstallation: ["DELETE /app/installations/{installation_id}", { |
| mediaType: { |
| previews: ["machine-man"] |
| } |
| }], |
| deleteToken: ["DELETE /applications/{client_id}/token"], |
| getAuthenticated: ["GET /app", { |
| mediaType: { |
| previews: ["machine-man"] |
| } |
| }], |
| getBySlug: ["GET /apps/{app_slug}", { |
| mediaType: { |
| previews: ["machine-man"] |
| } |
| }], |
| getInstallation: ["GET /app/installations/{installation_id}", { |
| mediaType: { |
| previews: ["machine-man"] |
| } |
| }], |
| getOrgInstallation: ["GET /orgs/{org}/installation", { |
| mediaType: { |
| previews: ["machine-man"] |
| } |
| }], |
| getRepoInstallation: ["GET /repos/{owner}/{repo}/installation", { |
| mediaType: { |
| previews: ["machine-man"] |
| } |
| }], |
| getSubscriptionPlanForAccount: ["GET /marketplace_listing/accounts/{account_id}"], |
| getSubscriptionPlanForAccountStubbed: ["GET /marketplace_listing/stubbed/accounts/{account_id}"], |
| getUserInstallation: ["GET /users/{username}/installation", { |
| mediaType: { |
| previews: ["machine-man"] |
| } |
| }], |
| listAccountsForPlan: ["GET /marketplace_listing/plans/{plan_id}/accounts"], |
| listAccountsForPlanStubbed: ["GET /marketplace_listing/stubbed/plans/{plan_id}/accounts"], |
| listAccountsUserOrOrgOnPlan: ["GET /marketplace_listing/plans/{plan_id}/accounts", {}, { |
| renamed: ["apps", "listAccountsForPlan"] |
| }], |
| listAccountsUserOrOrgOnPlanStubbed: ["GET /marketplace_listing/stubbed/plans/{plan_id}/accounts", {}, { |
| renamed: ["apps", "listAccountsForPlanStubbed"] |
| }], |
| listInstallationReposForAuthenticatedUser: ["GET /user/installations/{installation_id}/repositories", { |
| mediaType: { |
| previews: ["machine-man"] |
| } |
| }], |
| listInstallations: ["GET /app/installations", { |
| mediaType: { |
| previews: ["machine-man"] |
| } |
| }], |
| listInstallationsForAuthenticatedUser: ["GET /user/installations", { |
| mediaType: { |
| previews: ["machine-man"] |
| } |
| }], |
| listMarketplacePurchasesForAuthenticatedUser: ["GET /user/marketplace_purchases", {}, { |
| renamed: ["apps", "listSubscriptionsForAuthenticatedUser"] |
| }], |
| listMarketplacePurchasesForAuthenticatedUserStubbed: ["GET /user/marketplace_purchases/stubbed", {}, { |
| renamed: ["apps", "listSubscriptionsForAuthenticatedUserStubbed"] |
| }], |
| listPlans: ["GET /marketplace_listing/plans"], |
| listPlansStubbed: ["GET /marketplace_listing/stubbed/plans"], |
| listRepos: ["GET /installation/repositories", { |
| mediaType: { |
| previews: ["machine-man"] |
| } |
| }, { |
| renamed: ["apps", "listReposAccessibleToInstallation"] |
| }], |
| listReposAccessibleToInstallation: ["GET /installation/repositories", { |
| mediaType: { |
| previews: ["machine-man"] |
| } |
| }], |
| listSubscriptionsForAuthenticatedUser: ["GET /user/marketplace_purchases"], |
| listSubscriptionsForAuthenticatedUserStubbed: ["GET /user/marketplace_purchases/stubbed"], |
| removeRepoFromInstallation: ["DELETE /user/installations/{installation_id}/repositories/{repository_id}", { |
| mediaType: { |
| previews: ["machine-man"] |
| } |
| }], |
| resetToken: ["PATCH /applications/{client_id}/token"], |
| revokeInstallationAccessToken: ["DELETE /installation/token"], |
| revokeInstallationToken: ["DELETE /installation/token", {}, { |
| renamed: ["apps", "revokeInstallationAccessToken"] |
| }], |
| suspendInstallation: ["PUT /app/installations/{installation_id}/suspended"], |
| unsuspendInstallation: ["DELETE /app/installations/{installation_id}/suspended"] |
| }, |
| checks: { |
| create: ["POST /repos/{owner}/{repo}/check-runs", { |
| mediaType: { |
| previews: ["antiope"] |
| } |
| }], |
| createSuite: ["POST /repos/{owner}/{repo}/check-suites", { |
| mediaType: { |
| previews: ["antiope"] |
| } |
| }], |
| get: ["GET /repos/{owner}/{repo}/check-runs/{check_run_id}", { |
| mediaType: { |
| previews: ["antiope"] |
| } |
| }], |
| getSuite: ["GET /repos/{owner}/{repo}/check-suites/{check_suite_id}", { |
| mediaType: { |
| previews: ["antiope"] |
| } |
| }], |
| listAnnotations: ["GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations", { |
| mediaType: { |
| previews: ["antiope"] |
| } |
| }], |
| listForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-runs", { |
| mediaType: { |
| previews: ["antiope"] |
| } |
| }], |
| listForSuite: ["GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs", { |
| mediaType: { |
| previews: ["antiope"] |
| } |
| }], |
| listSuitesForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-suites", { |
| mediaType: { |
| previews: ["antiope"] |
| } |
| }], |
| rerequestSuite: ["POST /repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest", { |
| mediaType: { |
| previews: ["antiope"] |
| } |
| }], |
| setSuitesPreferences: ["PATCH /repos/{owner}/{repo}/check-suites/preferences", { |
| mediaType: { |
| previews: ["antiope"] |
| } |
| }], |
| update: ["PATCH /repos/{owner}/{repo}/check-runs/{check_run_id}", { |
| mediaType: { |
| previews: ["antiope"] |
| } |
| }] |
| }, |
| codeScanning: { |
| getAlert: ["GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_id}"], |
| listAlertsForRepo: ["GET /repos/{owner}/{repo}/code-scanning/alerts"] |
| }, |
| codesOfConduct: { |
| getAllCodesOfConduct: ["GET /codes_of_conduct", { |
| mediaType: { |
| previews: ["scarlet-witch"] |
| } |
| }], |
| getConductCode: ["GET /codes_of_conduct/{key}", { |
| mediaType: { |
| previews: ["scarlet-witch"] |
| } |
| }], |
| getForRepo: ["GET /repos/{owner}/{repo}/community/code_of_conduct", { |
| mediaType: { |
| previews: ["scarlet-witch"] |
| } |
| }], |
| listConductCodes: ["GET /codes_of_conduct", { |
| mediaType: { |
| previews: ["scarlet-witch"] |
| } |
| }, { |
| renamed: ["codesOfConduct", "getAllCodesOfConduct"] |
| }] |
| }, |
| emojis: { |
| get: ["GET /emojis"] |
| }, |
| gists: { |
| checkIsStarred: ["GET /gists/{gist_id}/star"], |
| create: ["POST /gists"], |
| createComment: ["POST /gists/{gist_id}/comments"], |
| delete: ["DELETE /gists/{gist_id}"], |
| deleteComment: ["DELETE /gists/{gist_id}/comments/{comment_id}"], |
| fork: ["POST /gists/{gist_id}/forks"], |
| get: ["GET /gists/{gist_id}"], |
| getComment: ["GET /gists/{gist_id}/comments/{comment_id}"], |
| getRevision: ["GET /gists/{gist_id}/{sha}"], |
| list: ["GET /gists"], |
| listComments: ["GET /gists/{gist_id}/comments"], |
| listCommits: ["GET /gists/{gist_id}/commits"], |
| listForUser: ["GET /users/{username}/gists"], |
| listForks: ["GET /gists/{gist_id}/forks"], |
| listPublic: ["GET /gists/public"], |
| listPublicForUser: ["GET /users/{username}/gists", {}, { |
| renamed: ["gists", "listForUser"] |
| }], |
| listStarred: ["GET /gists/starred"], |
| star: ["PUT /gists/{gist_id}/star"], |
| unstar: ["DELETE /gists/{gist_id}/star"], |
| update: ["PATCH /gists/{gist_id}"], |
| updateComment: ["PATCH /gists/{gist_id}/comments/{comment_id}"] |
| }, |
| git: { |
| createBlob: ["POST /repos/{owner}/{repo}/git/blobs"], |
| createCommit: ["POST /repos/{owner}/{repo}/git/commits"], |
| createRef: ["POST /repos/{owner}/{repo}/git/refs"], |
| createTag: ["POST /repos/{owner}/{repo}/git/tags"], |
| createTree: ["POST /repos/{owner}/{repo}/git/trees"], |
| deleteRef: ["DELETE /repos/{owner}/{repo}/git/refs/{ref}"], |
| getBlob: ["GET /repos/{owner}/{repo}/git/blobs/{file_sha}"], |
| getCommit: ["GET /repos/{owner}/{repo}/git/commits/{commit_sha}"], |
| getRef: ["GET /repos/{owner}/{repo}/git/ref/{ref}"], |
| getTag: ["GET /repos/{owner}/{repo}/git/tags/{tag_sha}"], |
| getTree: ["GET /repos/{owner}/{repo}/git/trees/{tree_sha}"], |
| listMatchingRefs: ["GET /repos/{owner}/{repo}/git/matching-refs/{ref}"], |
| updateRef: ["PATCH /repos/{owner}/{repo}/git/refs/{ref}"] |
| }, |
| gitignore: { |
| getAllTemplates: ["GET /gitignore/templates"], |
| getTemplate: ["GET /gitignore/templates/{name}"], |
| listTemplates: ["GET /gitignore/templates", {}, { |
| renamed: ["gitignore", "getAllTemplates"] |
| }] |
| }, |
| interactions: { |
| addOrUpdateRestrictionsForOrg: ["PUT /orgs/{org}/interaction-limits", { |
| mediaType: { |
| previews: ["sombra"] |
| } |
| }, { |
| renamed: ["interactions", "setRestrictionsForOrg"] |
| }], |
| addOrUpdateRestrictionsForRepo: ["PUT /repos/{owner}/{repo}/interaction-limits", { |
| mediaType: { |
| previews: ["sombra"] |
| } |
| }, { |
| renamed: ["interactions", "setRestrictionsForRepo"] |
| }], |
| getRestrictionsForOrg: ["GET /orgs/{org}/interaction-limits", { |
| mediaType: { |
| previews: ["sombra"] |
| } |
| }], |
| getRestrictionsForRepo: ["GET /repos/{owner}/{repo}/interaction-limits", { |
| mediaType: { |
| previews: ["sombra"] |
| } |
| }], |
| removeRestrictionsForOrg: ["DELETE /orgs/{org}/interaction-limits", { |
| mediaType: { |
| previews: ["sombra"] |
| } |
| }], |
| removeRestrictionsForRepo: ["DELETE /repos/{owner}/{repo}/interaction-limits", { |
| mediaType: { |
| previews: ["sombra"] |
| } |
| }], |
| setRestrictionsForOrg: ["PUT /orgs/{org}/interaction-limits", { |
| mediaType: { |
| previews: ["sombra"] |
| } |
| }], |
| setRestrictionsForRepo: ["PUT /repos/{owner}/{repo}/interaction-limits", { |
| mediaType: { |
| previews: ["sombra"] |
| } |
| }] |
| }, |
| issues: { |
| addAssignees: ["POST /repos/{owner}/{repo}/issues/{issue_number}/assignees"], |
| addLabels: ["POST /repos/{owner}/{repo}/issues/{issue_number}/labels"], |
| checkAssignee: ["GET /repos/{owner}/{repo}/assignees/{assignee}", {}, { |
| renamed: ["issues", "checkUserCanBeAssigned"] |
| }], |
| checkUserCanBeAssigned: ["GET /repos/{owner}/{repo}/assignees/{assignee}"], |
| create: ["POST /repos/{owner}/{repo}/issues"], |
| createComment: ["POST /repos/{owner}/{repo}/issues/{issue_number}/comments"], |
| createLabel: ["POST /repos/{owner}/{repo}/labels"], |
| createMilestone: ["POST /repos/{owner}/{repo}/milestones"], |
| deleteComment: ["DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}"], |
| deleteLabel: ["DELETE /repos/{owner}/{repo}/labels/{name}"], |
| deleteMilestone: ["DELETE /repos/{owner}/{repo}/milestones/{milestone_number}"], |
| get: ["GET /repos/{owner}/{repo}/issues/{issue_number}"], |
| getComment: ["GET /repos/{owner}/{repo}/issues/comments/{comment_id}"], |
| getEvent: ["GET /repos/{owner}/{repo}/issues/events/{event_id}"], |
| getLabel: ["GET /repos/{owner}/{repo}/labels/{name}"], |
| getMilestone: ["GET /repos/{owner}/{repo}/milestones/{milestone_number}"], |
| list: ["GET /issues"], |
| listAssignees: ["GET /repos/{owner}/{repo}/assignees"], |
| listComments: ["GET /repos/{owner}/{repo}/issues/{issue_number}/comments"], |
| listCommentsForRepo: ["GET /repos/{owner}/{repo}/issues/comments"], |
| listEvents: ["GET /repos/{owner}/{repo}/issues/{issue_number}/events"], |
| listEventsForRepo: ["GET /repos/{owner}/{repo}/issues/events"], |
| listEventsForTimeline: ["GET /repos/{owner}/{repo}/issues/{issue_number}/timeline", { |
| mediaType: { |
| previews: ["mockingbird"] |
| } |
| }], |
| listForAuthenticatedUser: ["GET /user/issues"], |
| listForOrg: ["GET /orgs/{org}/issues"], |
| listForRepo: ["GET /repos/{owner}/{repo}/issues"], |
| listLabelsForMilestone: ["GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels"], |
| listLabelsForRepo: ["GET /repos/{owner}/{repo}/labels"], |
| listLabelsOnIssue: ["GET /repos/{owner}/{repo}/issues/{issue_number}/labels"], |
| listMilestones: ["GET /repos/{owner}/{repo}/milestones"], |
| listMilestonesForRepo: ["GET /repos/{owner}/{repo}/milestones", {}, { |
| renamed: ["issues", "listMilestones"] |
| }], |
| lock: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/lock"], |
| removeAllLabels: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels"], |
| removeAssignees: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/assignees"], |
| removeLabel: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels/{name}"], |
| removeLabels: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels", {}, { |
| renamed: ["issues", "removeAllLabels"] |
| }], |
| replaceAllLabels: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/labels", {}, { |
| renamed: ["issues", "setLabels"] |
| }], |
| replaceLabels: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/labels", {}, { |
| renamed: ["issues", "replaceAllLabels"] |
| }], |
| setLabels: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/labels"], |
| unlock: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/lock"], |
| update: ["PATCH /repos/{owner}/{repo}/issues/{issue_number}"], |
| updateComment: ["PATCH /repos/{owner}/{repo}/issues/comments/{comment_id}"], |
| updateLabel: ["PATCH /repos/{owner}/{repo}/labels/{name}"], |
| updateMilestone: ["PATCH /repos/{owner}/{repo}/milestones/{milestone_number}"] |
| }, |
| licenses: { |
| get: ["GET /licenses/{license}"], |
| getAllCommonlyUsed: ["GET /licenses"], |
| getForRepo: ["GET /repos/{owner}/{repo}/license"], |
| listCommonlyUsed: ["GET /licenses", {}, { |
| renamed: ["licenses", "getAllCommonlyUsed"] |
| }] |
| }, |
| markdown: { |
| render: ["POST /markdown"], |
| renderRaw: ["POST /markdown/raw", { |
| headers: { |
| "content-type": "text/plain; charset=utf-8" |
| } |
| }] |
| }, |
| meta: { |
| get: ["GET /meta"] |
| }, |
| migrations: { |
| cancelImport: ["DELETE /repos/{owner}/{repo}/import"], |
| deleteArchiveForAuthenticatedUser: ["DELETE /user/migrations/{migration_id}/archive", { |
| mediaType: { |
| previews: ["wyandotte"] |
| } |
| }], |
| deleteArchiveForOrg: ["DELETE /orgs/{org}/migrations/{migration_id}/archive", { |
| mediaType: { |
| previews: ["wyandotte"] |
| } |
| }], |
| downloadArchiveForOrg: ["GET /orgs/{org}/migrations/{migration_id}/archive", { |
| mediaType: { |
| previews: ["wyandotte"] |
| } |
| }], |
| getArchiveForAuthenticatedUser: ["GET /user/migrations/{migration_id}/archive", { |
| mediaType: { |
| previews: ["wyandotte"] |
| } |
| }], |
| getCommitAuthors: ["GET /repos/{owner}/{repo}/import/authors"], |
| getImportProgress: ["GET /repos/{owner}/{repo}/import", {}, { |
| renamed: ["migrations", "getImportStatus"] |
| }], |
| getImportStatus: ["GET /repos/{owner}/{repo}/import"], |
| getLargeFiles: ["GET /repos/{owner}/{repo}/import/large_files"], |
| getStatusForAuthenticatedUser: ["GET /user/migrations/{migration_id}", { |
| mediaType: { |
| previews: ["wyandotte"] |
| } |
| }], |
| getStatusForOrg: ["GET /orgs/{org}/migrations/{migration_id}", { |
| mediaType: { |
| previews: ["wyandotte"] |
| } |
| }], |
| listForAuthenticatedUser: ["GET /user/migrations", { |
| mediaType: { |
| previews: ["wyandotte"] |
| } |
| }], |
| listForOrg: ["GET /orgs/{org}/migrations", { |
| mediaType: { |
| previews: ["wyandotte"] |
| } |
| }], |
| listReposForOrg: ["GET /orgs/{org}/migrations/{migration_id}/repositories", { |
| mediaType: { |
| previews: ["wyandotte"] |
| } |
| }], |
| listReposForUser: ["GET /user/{migration_id}/repositories", { |
| mediaType: { |
| previews: ["wyandotte"] |
| } |
| }], |
| mapCommitAuthor: ["PATCH /repos/{owner}/{repo}/import/authors/{author_id}"], |
| setLfsPreference: ["PATCH /repos/{owner}/{repo}/import/lfs"], |
| startForAuthenticatedUser: ["POST /user/migrations"], |
| startForOrg: ["POST /orgs/{org}/migrations"], |
| startImport: ["PUT /repos/{owner}/{repo}/import"], |
| unlockRepoForAuthenticatedUser: ["DELETE /user/migrations/{migration_id}/repos/{repo_name}/lock", { |
| mediaType: { |
| previews: ["wyandotte"] |
| } |
| }], |
| unlockRepoForOrg: ["DELETE /orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock", { |
| mediaType: { |
| previews: ["wyandotte"] |
| } |
| }], |
| updateImport: ["PATCH /repos/{owner}/{repo}/import"] |
| }, |
| orgs: { |
| addOrUpdateMembership: ["PUT /orgs/{org}/memberships/{username}", {}, { |
| renamed: ["orgs", "setMembershipForUser"] |
| }], |
| blockUser: ["PUT /orgs/{org}/blocks/{username}"], |
| checkBlockedUser: ["GET /orgs/{org}/blocks/{username}"], |
| checkMembership: ["GET /orgs/{org}/members/{username}", {}, { |
| renamed: ["orgs", "checkMembershipForUser"] |
| }], |
| checkMembershipForUser: ["GET /orgs/{org}/members/{username}"], |
| checkPublicMembership: ["GET /orgs/{org}/public_members/{username}", {}, { |
| renamed: ["orgs", "checkPublicMembershipForUser"] |
| }], |
| checkPublicMembershipForUser: ["GET /orgs/{org}/public_members/{username}"], |
| concealMembership: ["DELETE /orgs/{org}/public_members/{username}", {}, { |
| renamed: ["orgs", "removePublicMembershipForAuthenticatedUser"] |
| }], |
| convertMemberToOutsideCollaborator: ["PUT /orgs/{org}/outside_collaborators/{username}"], |
| createHook: ["POST /orgs/{org}/hooks", {}, { |
| renamed: ["orgs", "createWebhook"] |
| }], |
| createInvitation: ["POST /orgs/{org}/invitations"], |
| createWebhook: ["POST /orgs/{org}/hooks"], |
| deleteHook: ["DELETE /orgs/{org}/hooks/{hook_id}", {}, { |
| renamed: ["orgs", "deleteWebhook"] |
| }], |
| deleteWebhook: ["DELETE /orgs/{org}/hooks/{hook_id}"], |
| get: ["GET /orgs/{org}"], |
| getHook: ["GET /orgs/{org}/hooks/{hook_id}", {}, { |
| renamed: ["orgs", "getWebhook"] |
| }], |
| getMembership: ["GET /orgs/{org}/memberships/{username}", {}, { |
| renamed: ["orgs", "getMembershipForUser"] |
| }], |
| getMembershipForAuthenticatedUser: ["GET /user/memberships/orgs/{org}"], |
| getMembershipForUser: ["GET /orgs/{org}/memberships/{username}"], |
| getWebhook: ["GET /orgs/{org}/hooks/{hook_id}"], |
| list: ["GET /organizations"], |
| listAppInstallations: ["GET /orgs/{org}/installations", { |
| mediaType: { |
| previews: ["machine-man"] |
| } |
| }], |
| listBlockedUsers: ["GET /orgs/{org}/blocks"], |
| listForAuthenticatedUser: ["GET /user/orgs"], |
| listForUser: ["GET /users/{username}/orgs"], |
| listHooks: ["GET /orgs/{org}/hooks", {}, { |
| renamed: ["orgs", "listWebhooks"] |
| }], |
| listInstallations: ["GET /orgs/{org}/installations", { |
| mediaType: { |
| previews: ["machine-man"] |
| } |
| }, { |
| renamed: ["orgs", "listAppInstallations"] |
| }], |
| listInvitationTeams: ["GET /orgs/{org}/invitations/{invitation_id}/teams"], |
| listMembers: ["GET /orgs/{org}/members"], |
| listMemberships: ["GET /user/memberships/orgs", {}, { |
| renamed: ["orgs", "listMembershipsForAuthenticatedUser"] |
| }], |
| listMembershipsForAuthenticatedUser: ["GET /user/memberships/orgs"], |
| listOutsideCollaborators: ["GET /orgs/{org}/outside_collaborators"], |
| listPendingInvitations: ["GET /orgs/{org}/invitations"], |
| listPublicMembers: ["GET /orgs/{org}/public_members"], |
| listWebhooks: ["GET /orgs/{org}/hooks"], |
| pingHook: ["POST /orgs/{org}/hooks/{hook_id}/pings", {}, { |
| renamed: ["orgs", "pingWebhook"] |
| }], |
| pingWebhook: ["POST /orgs/{org}/hooks/{hook_id}/pings"], |
| publicizeMembership: ["PUT /orgs/{org}/public_members/{username}", {}, { |
| renamed: ["orgs", "setPublicMembershipForAuthenticatedUser"] |
| }], |
| removeMember: ["DELETE /orgs/{org}/members/{username}"], |
| removeMembership: ["DELETE /orgs/{org}/memberships/{username}", {}, { |
| renamed: ["orgs", "removeMembershipForUser"] |
| }], |
| removeMembershipForUser: ["DELETE /orgs/{org}/memberships/{username}"], |
| removeOutsideCollaborator: ["DELETE /orgs/{org}/outside_collaborators/{username}"], |
| removePublicMembershipForAuthenticatedUser: ["DELETE /orgs/{org}/public_members/{username}"], |
| setMembershipForUser: ["PUT /orgs/{org}/memberships/{username}"], |
| setPublicMembershipForAuthenticatedUser: ["PUT /orgs/{org}/public_members/{username}"], |
| unblockUser: ["DELETE /orgs/{org}/blocks/{username}"], |
| update: ["PATCH /orgs/{org}"], |
| updateHook: ["PATCH /orgs/{org}/hooks/{hook_id}", {}, { |
| renamed: ["orgs", "updateWebhook"] |
| }], |
| updateMembership: ["PATCH /user/memberships/orgs/{org}", {}, { |
| renamed: ["orgs", "updateMembershipForAuthenticatedUser"] |
| }], |
| updateMembershipForAuthenticatedUser: ["PATCH /user/memberships/orgs/{org}"], |
| updateWebhook: ["PATCH /orgs/{org}/hooks/{hook_id}"] |
| }, |
| projects: { |
| addCollaborator: ["PUT /projects/{project_id}/collaborators/{username}", { |
| mediaType: { |
| previews: ["inertia"] |
| } |
| }], |
| createCard: ["POST /projects/columns/{column_id}/cards", { |
| mediaType: { |
| previews: ["inertia"] |
| } |
| }], |
| createColumn: ["POST /projects/{project_id}/columns", { |
| mediaType: { |
| previews: ["inertia"] |
| } |
| }], |
| createForAuthenticatedUser: ["POST /user/projects", { |
| mediaType: { |
| previews: ["inertia"] |
| } |
| }], |
| createForOrg: ["POST /orgs/{org}/projects", { |
| mediaType: { |
| previews: ["inertia"] |
| } |
| }], |
| createForRepo: ["POST /repos/{owner}/{repo}/projects", { |
| mediaType: { |
| previews: ["inertia"] |
| } |
| }], |
| delete: ["DELETE /projects/{project_id}", { |
| mediaType: { |
| previews: ["inertia"] |
| } |
| }], |
| deleteCard: ["DELETE /projects/columns/cards/{card_id}", { |
| mediaType: { |
| previews: ["inertia"] |
| } |
| }], |
| deleteColumn: ["DELETE /projects/columns/{column_id}", { |
| mediaType: { |
| previews: ["inertia"] |
| } |
| }], |
| get: ["GET /projects/{project_id}", { |
| mediaType: { |
| previews: ["inertia"] |
| } |
| }], |
| getCard: ["GET /projects/columns/cards/{card_id}", { |
| mediaType: { |
| previews: ["inertia"] |
| } |
| }], |
| getColumn: ["GET /projects/columns/{column_id}", { |
| mediaType: { |
| previews: ["inertia"] |
| } |
| }], |
| getPermissionForUser: ["GET /projects/{project_id}/collaborators/{username}/permission", { |
| mediaType: { |
| previews: ["inertia"] |
| } |
| }], |
| listCards: ["GET /projects/columns/{column_id}/cards", { |
| mediaType: { |
| previews: ["inertia"] |
| } |
| }], |
| listCollaborators: ["GET /projects/{project_id}/collaborators", { |
| mediaType: { |
| previews: ["inertia"] |
| } |
| }], |
| listColumns: ["GET /projects/{project_id}/columns", { |
| mediaType: { |
| previews: ["inertia"] |
| } |
| }], |
| listForOrg: ["GET /orgs/{org}/projects", { |
| mediaType: { |
| previews: ["inertia"] |
| } |
| }], |
| listForRepo: ["GET /repos/{owner}/{repo}/projects", { |
| mediaType: { |
| previews: ["inertia"] |
| } |
| }], |
| listForUser: ["GET /users/{username}/projects", { |
| mediaType: { |
| previews: ["inertia"] |
| } |
| }], |
| moveCard: ["POST /projects/columns/cards/{card_id}/moves", { |
| mediaType: { |
| previews: ["inertia"] |
| } |
| }], |
| moveColumn: ["POST /projects/columns/{column_id}/moves", { |
| mediaType: { |
| previews: ["inertia"] |
| } |
| }], |
| removeCollaborator: ["DELETE /projects/{project_id}/collaborators/{username}", { |
| mediaType: { |
| previews: ["inertia"] |
| } |
| }], |
| reviewUserPermissionLevel: ["GET /projects/{project_id}/collaborators/{username}/permission", { |
| mediaType: { |
| previews: ["inertia"] |
| } |
| }, { |
| renamed: ["projects", "getPermissionForUser"] |
| }], |
| update: ["PATCH /projects/{project_id}", { |
| mediaType: { |
| previews: ["inertia"] |
| } |
| }], |
| updateCard: ["PATCH /projects/columns/cards/{card_id}", { |
| mediaType: { |
| previews: ["inertia"] |
| } |
| }], |
| updateColumn: ["PATCH /projects/columns/{column_id}", { |
| mediaType: { |
| previews: ["inertia"] |
| } |
| }] |
| }, |
| pulls: { |
| checkIfMerged: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/merge"], |
| create: ["POST /repos/{owner}/{repo}/pulls"], |
| createComment: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/comments", {}, { |
| renamed: ["pulls", "createReviewComment"] |
| }], |
| createReplyForReviewComment: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies"], |
| createReview: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews"], |
| createReviewComment: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/comments"], |
| createReviewCommentReply: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies", {}, { |
| renamed: ["pulls", "createReplyForReviewComment"] |
| }], |
| createReviewRequest: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers", {}, { |
| renamed: ["pulls", "requestReviewers"] |
| }], |
| deleteComment: ["DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}", {}, { |
| renamed: ["pulls", "deleteReviewComment"] |
| }], |
| deletePendingReview: ["DELETE /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"], |
| deleteReviewComment: ["DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}"], |
| deleteReviewRequest: ["DELETE /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers", {}, { |
| renamed: ["pulls", "removeRequestedReviewers"] |
| }], |
| dismissReview: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals"], |
| get: ["GET /repos/{owner}/{repo}/pulls/{pull_number}"], |
| getComment: ["GET /repos/{owner}/{repo}/pulls/comments/{comment_id}", {}, { |
| renamed: ["pulls", "getReviewComment"] |
| }], |
| getCommentsForReview: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments", {}, { |
| renamed: ["pulls", "listCommentsForReview"] |
| }], |
| getReview: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"], |
| getReviewComment: ["GET /repos/{owner}/{repo}/pulls/comments/{comment_id}"], |
| list: ["GET /repos/{owner}/{repo}/pulls"], |
| listComments: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/comments", {}, { |
| renamed: ["pulls", "listReviewComments"] |
| }], |
| listCommentsForRepo: ["GET /repos/{owner}/{repo}/pulls/comments", {}, { |
| renamed: ["pulls", "listReviewCommentsForRepo"] |
| }], |
| listCommentsForReview: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments"], |
| listCommits: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/commits"], |
| listFiles: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/files"], |
| listRequestedReviewers: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"], |
| listReviewComments: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/comments"], |
| listReviewCommentsForRepo: ["GET /repos/{owner}/{repo}/pulls/comments"], |
| listReviewRequests: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers", {}, { |
| renamed: ["pulls", "listRequestedReviewers"] |
| }], |
| listReviews: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews"], |
| merge: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/merge"], |
| removeRequestedReviewers: ["DELETE /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"], |
| requestReviewers: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"], |
| submitReview: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events"], |
| update: ["PATCH /repos/{owner}/{repo}/pulls/{pull_number}"], |
| updateBranch: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/update-branch", { |
| mediaType: { |
| previews: ["lydian"] |
| } |
| }], |
| updateComment: ["PATCH /repos/{owner}/{repo}/pulls/comments/{comment_id}", {}, { |
| renamed: ["pulls", "updateReviewComment"] |
| }], |
| updateReview: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"], |
| updateReviewComment: ["PATCH /repos/{owner}/{repo}/pulls/comments/{comment_id}"] |
| }, |
| rateLimit: { |
| get: ["GET /rate_limit"] |
| }, |
| reactions: { |
| createForCommitComment: ["POST /repos/{owner}/{repo}/comments/{comment_id}/reactions", { |
| mediaType: { |
| previews: ["squirrel-girl"] |
| } |
| }], |
| createForIssue: ["POST /repos/{owner}/{repo}/issues/{issue_number}/reactions", { |
| mediaType: { |
| previews: ["squirrel-girl"] |
| } |
| }], |
| createForIssueComment: ["POST /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions", { |
| mediaType: { |
| previews: ["squirrel-girl"] |
| } |
| }], |
| createForPullRequestReviewComment: ["POST /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions", { |
| mediaType: { |
| previews: ["squirrel-girl"] |
| } |
| }], |
| createForTeamDiscussionCommentInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions", { |
| mediaType: { |
| previews: ["squirrel-girl"] |
| } |
| }], |
| createForTeamDiscussionInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions", { |
| mediaType: { |
| previews: ["squirrel-girl"] |
| } |
| }], |
| delete: ["DELETE /reactions/{reaction_id}", { |
| mediaType: { |
| previews: ["squirrel-girl"] |
| } |
| }, { |
| renamed: ["reactions", "deleteLegacy"] |
| }], |
| deleteForCommitComment: ["DELETE /repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}", { |
| mediaType: { |
| previews: ["squirrel-girl"] |
| } |
| }], |
| deleteForIssue: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}", { |
| mediaType: { |
| previews: ["squirrel-girl"] |
| } |
| }], |
| deleteForIssueComment: ["DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}", { |
| mediaType: { |
| previews: ["squirrel-girl"] |
| } |
| }], |
| deleteForPullRequestComment: ["DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}", { |
| mediaType: { |
| previews: ["squirrel-girl"] |
| } |
| }], |
| deleteForTeamDiscussion: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}", { |
| mediaType: { |
| previews: ["squirrel-girl"] |
| } |
| }], |
| deleteForTeamDiscussionComment: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}", { |
| mediaType: { |
| previews: ["squirrel-girl"] |
| } |
| }], |
| deleteLegacy: ["DELETE /reactions/{reaction_id}", { |
| mediaType: { |
| previews: ["squirrel-girl"] |
| } |
| }, { |
| deprecated: "octokit.reactions.deleteLegacy() is deprecated, see https://developer.github.com/v3/reactions/#delete-a-reaction-legacy" |
| }], |
| listForCommitComment: ["GET /repos/{owner}/{repo}/comments/{comment_id}/reactions", { |
| mediaType: { |
| previews: ["squirrel-girl"] |
| } |
| }], |
| listForIssue: ["GET /repos/{owner}/{repo}/issues/{issue_number}/reactions", { |
| mediaType: { |
| previews: ["squirrel-girl"] |
| } |
| }], |
| listForIssueComment: ["GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions", { |
| mediaType: { |
| previews: ["squirrel-girl"] |
| } |
| }], |
| listForPullRequestReviewComment: ["GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions", { |
| mediaType: { |
| previews: ["squirrel-girl"] |
| } |
| }], |
| listForTeamDiscussionCommentInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions", { |
| mediaType: { |
| previews: ["squirrel-girl"] |
| } |
| }], |
| listForTeamDiscussionInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions", { |
| mediaType: { |
| previews: ["squirrel-girl"] |
| } |
| }] |
| }, |
| repos: { |
| acceptInvitation: ["PATCH /user/repository_invitations/{invitation_id}"], |
| addAppAccessRestrictions: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, { |
| mapToData: "apps" |
| }], |
| addCollaborator: ["PUT /repos/{owner}/{repo}/collaborators/{username}"], |
| addDeployKey: ["POST /repos/{owner}/{repo}/keys", {}, { |
| renamed: ["repos", "createDeployKey"] |
| }], |
| addProtectedBranchAdminEnforcement: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins", {}, { |
| renamed: ["repos", "setAdminBranchProtection"] |
| }], |
| addProtectedBranchAppRestrictions: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, { |
| mapToData: "apps", |
| renamed: ["repos", "addAppAccessRestrictions"] |
| }], |
| addProtectedBranchRequiredSignatures: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures", { |
| mediaType: { |
| previews: ["zzzax"] |
| } |
| }, { |
| renamed: ["repos", "createCommitSignatureProtection"] |
| }], |
| addProtectedBranchRequiredStatusChecksContexts: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, { |
| mapToData: "contexts", |
| renamed: ["repos", "addStatusCheckContexts"] |
| }], |
| addProtectedBranchTeamRestrictions: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, { |
| mapToData: "teams", |
| renamed: ["repos", "addTeamAccessRestrictions"] |
| }], |
| addProtectedBranchUserRestrictions: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, { |
| mapToData: "users", |
| renamed: ["repos", "addUserAccessRestrictions"] |
| }], |
| addStatusCheckContexts: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, { |
| mapToData: "contexts" |
| }], |
| addTeamAccessRestrictions: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, { |
| mapToData: "teams" |
| }], |
| addUserAccessRestrictions: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, { |
| mapToData: "users" |
| }], |
| checkCollaborator: ["GET /repos/{owner}/{repo}/collaborators/{username}"], |
| checkVulnerabilityAlerts: ["GET /repos/{owner}/{repo}/vulnerability-alerts", { |
| mediaType: { |
| previews: ["dorian"] |
| } |
| }], |
| compareCommits: ["GET /repos/{owner}/{repo}/compare/{base}...{head}"], |
| createCommitComment: ["POST /repos/{owner}/{repo}/commits/{commit_sha}/comments"], |
| createCommitSignatureProtection: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures", { |
| mediaType: { |
| previews: ["zzzax"] |
| } |
| }], |
| createCommitStatus: ["POST /repos/{owner}/{repo}/statuses/{sha}"], |
| createDeployKey: ["POST /repos/{owner}/{repo}/keys"], |
| createDeployment: ["POST /repos/{owner}/{repo}/deployments"], |
| createDeploymentStatus: ["POST /repos/{owner}/{repo}/deployments/{deployment_id}/statuses"], |
| createDispatchEvent: ["POST /repos/{owner}/{repo}/dispatches"], |
| createForAuthenticatedUser: ["POST /user/repos"], |
| createFork: ["POST /repos/{owner}/{repo}/forks"], |
| createHook: ["POST /repos/{owner}/{repo}/hooks", {}, { |
| renamed: ["repos", "createWebhook"] |
| }], |
| createInOrg: ["POST /orgs/{org}/repos"], |
| createOrUpdateFile: ["PUT /repos/{owner}/{repo}/contents/{path}", {}, { |
| renamed: ["repos", "createOrUpdateFileContents"] |
| }], |
| createOrUpdateFileContents: ["PUT /repos/{owner}/{repo}/contents/{path}"], |
| createPagesSite: ["POST /repos/{owner}/{repo}/pages", { |
| mediaType: { |
| previews: ["switcheroo"] |
| } |
| }], |
| createRelease: ["POST /repos/{owner}/{repo}/releases"], |
| createStatus: ["POST /repos/{owner}/{repo}/statuses/{sha}", {}, { |
| renamed: ["repos", "createCommitStatus"] |
| }], |
| createUsingTemplate: ["POST /repos/{template_owner}/{template_repo}/generate", { |
| mediaType: { |
| previews: ["baptiste"] |
| } |
| }], |
| createWebhook: ["POST /repos/{owner}/{repo}/hooks"], |
| declineInvitation: ["DELETE /user/repository_invitations/{invitation_id}"], |
| delete: ["DELETE /repos/{owner}/{repo}"], |
| deleteAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions"], |
| deleteAdminBranchProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"], |
| deleteBranchProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection"], |
| deleteCommitComment: ["DELETE /repos/{owner}/{repo}/comments/{comment_id}"], |
| deleteCommitSignatureProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures", { |
| mediaType: { |
| previews: ["zzzax"] |
| } |
| }], |
| deleteDeployKey: ["DELETE /repos/{owner}/{repo}/keys/{key_id}"], |
| deleteDeployment: ["DELETE /repos/{owner}/{repo}/deployments/{deployment_id}"], |
| deleteDownload: ["DELETE /repos/{owner}/{repo}/downloads/{download_id}"], |
| deleteFile: ["DELETE /repos/{owner}/{repo}/contents/{path}"], |
| deleteHook: ["DELETE /repos/{owner}/{repo}/hooks/{hook_id}", {}, { |
| renamed: ["repos", "deleteWebhook"] |
| }], |
| deleteInvitation: ["DELETE /repos/{owner}/{repo}/invitations/{invitation_id}"], |
| deletePagesSite: ["DELETE /repos/{owner}/{repo}/pages", { |
| mediaType: { |
| previews: ["switcheroo"] |
| } |
| }], |
| deletePullRequestReviewProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"], |
| deleteRelease: ["DELETE /repos/{owner}/{repo}/releases/{release_id}"], |
| deleteReleaseAsset: ["DELETE /repos/{owner}/{repo}/releases/assets/{asset_id}"], |
| deleteWebhook: ["DELETE /repos/{owner}/{repo}/hooks/{hook_id}"], |
| disableAutomatedSecurityFixes: ["DELETE /repos/{owner}/{repo}/automated-security-fixes", { |
| mediaType: { |
| previews: ["london"] |
| } |
| }], |
| disablePagesSite: ["DELETE /repos/{owner}/{repo}/pages", { |
| mediaType: { |
| previews: ["switcheroo"] |
| } |
| }, { |
| renamed: ["repos", "deletePagesSite"] |
| }], |
| disableVulnerabilityAlerts: ["DELETE /repos/{owner}/{repo}/vulnerability-alerts", { |
| mediaType: { |
| previews: ["dorian"] |
| } |
| }], |
| downloadArchive: ["GET /repos/{owner}/{repo}/{archive_format}/{ref}"], |
| enableAutomatedSecurityFixes: ["PUT /repos/{owner}/{repo}/automated-security-fixes", { |
| mediaType: { |
| previews: ["london"] |
| } |
| }], |
| enablePagesSite: ["POST /repos/{owner}/{repo}/pages", { |
| mediaType: { |
| previews: ["switcheroo"] |
| } |
| }, { |
| renamed: ["repos", "createPagesSite"] |
| }], |
| enableVulnerabilityAlerts: ["PUT /repos/{owner}/{repo}/vulnerability-alerts", { |
| mediaType: { |
| previews: ["dorian"] |
| } |
| }], |
| get: ["GET /repos/{owner}/{repo}"], |
| getAccessRestrictions: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions"], |
| getAdminBranchProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"], |
| getAllStatusCheckContexts: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts"], |
| getAllTopics: ["GET /repos/{owner}/{repo}/topics", { |
| mediaType: { |
| previews: ["mercy"] |
| } |
| }], |
| getAppsWithAccessToProtectedBranch: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps"], |
| getArchiveLink: ["GET /repos/{owner}/{repo}/{archive_format}/{ref}", {}, { |
| renamed: ["repos", "downloadArchive"] |
| }], |
| getBranch: ["GET /repos/{owner}/{repo}/branches/{branch}"], |
| getBranchProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection"], |
| getClones: ["GET /repos/{owner}/{repo}/traffic/clones"], |
| getCodeFrequencyStats: ["GET /repos/{owner}/{repo}/stats/code_frequency"], |
| getCollaboratorPermissionLevel: ["GET /repos/{owner}/{repo}/collaborators/{username}/permission"], |
| getCombinedStatusForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/status"], |
| getCommit: ["GET /repos/{owner}/{repo}/commits/{ref}"], |
| getCommitActivityStats: ["GET /repos/{owner}/{repo}/stats/commit_activity"], |
| getCommitComment: ["GET /repos/{owner}/{repo}/comments/{comment_id}"], |
| getCommitSignatureProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures", { |
| mediaType: { |
| previews: ["zzzax"] |
| } |
| }], |
| getCommunityProfileMetrics: ["GET /repos/{owner}/{repo}/community/profile"], |
| getContent: ["GET /repos/{owner}/{repo}/contents/{path}"], |
| getContents: ["GET /repos/{owner}/{repo}/contents/{path}", {}, { |
| renamed: ["repos", "getContent"] |
| }], |
| getContributorsStats: ["GET /repos/{owner}/{repo}/stats/contributors"], |
| getDeployKey: ["GET /repos/{owner}/{repo}/keys/{key_id}"], |
| getDeployment: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}"], |
| getDeploymentStatus: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}"], |
| getDownload: ["GET /repos/{owner}/{repo}/downloads/{download_id}"], |
| getHook: ["GET /repos/{owner}/{repo}/hooks/{hook_id}", {}, { |
| renamed: ["repos", "getWebhook"] |
| }], |
| getLatestPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/latest"], |
| getLatestRelease: ["GET /repos/{owner}/{repo}/releases/latest"], |
| getPages: ["GET /repos/{owner}/{repo}/pages"], |
| getPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/{build_id}"], |
| getParticipationStats: ["GET /repos/{owner}/{repo}/stats/participation"], |
| getProtectedBranchAdminEnforcement: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins", {}, { |
| renamed: ["repos", "getAdminBranchProtection"] |
| }], |
| getProtectedBranchPullRequestReviewEnforcement: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews", {}, { |
| renamed: ["repos", "getPullRequestReviewProtection"] |
| }], |
| getProtectedBranchRequiredSignatures: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures", { |
| mediaType: { |
| previews: ["zzzax"] |
| } |
| }, { |
| renamed: ["repos", "getCommitSignatureProtection"] |
| }], |
| getProtectedBranchRequiredStatusChecks: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks", {}, { |
| renamed: ["repos", "getStatusChecksProtection"] |
| }], |
| getProtectedBranchRestrictions: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions", {}, { |
| renamed: ["repos", "getAccessRestrictions"] |
| }], |
| getPullRequestReviewProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"], |
| getPunchCardStats: ["GET /repos/{owner}/{repo}/stats/punch_card"], |
| getReadme: ["GET /repos/{owner}/{repo}/readme"], |
| getRelease: ["GET /repos/{owner}/{repo}/releases/{release_id}"], |
| getReleaseAsset: ["GET /repos/{owner}/{repo}/releases/assets/{asset_id}"], |
| getReleaseByTag: ["GET /repos/{owner}/{repo}/releases/tags/{tag}"], |
| getStatusChecksProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"], |
| getTeamsWithAccessToProtectedBranch: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams"], |
| getTopPaths: ["GET /repos/{owner}/{repo}/traffic/popular/paths"], |
| getTopReferrers: ["GET /repos/{owner}/{repo}/traffic/popular/referrers"], |
| getUsersWithAccessToProtectedBranch: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users"], |
| getViews: ["GET /repos/{owner}/{repo}/traffic/views"], |
| getWebhook: ["GET /repos/{owner}/{repo}/hooks/{hook_id}"], |
| list: ["GET /user/repos", {}, { |
| renamed: ["repos", "listForAuthenticatedUser"] |
| }], |
| listAssetsForRelease: ["GET /repos/{owner}/{repo}/releases/{release_id}/assets", {}, { |
| renamed: ["repos", "listReleaseAssets"] |
| }], |
| listBranches: ["GET /repos/{owner}/{repo}/branches"], |
| listBranchesForHeadCommit: ["GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head", { |
| mediaType: { |
| previews: ["groot"] |
| } |
| }], |
| listCollaborators: ["GET /repos/{owner}/{repo}/collaborators"], |
| listCommentsForCommit: ["GET /repos/{owner}/{repo}/commits/{commit_sha}/comments"], |
| listCommitComments: ["GET /repos/{owner}/{repo}/comments", {}, { |
| renamed: ["repos", "listCommitCommentsForRepo"] |
| }], |
| listCommitCommentsForRepo: ["GET /repos/{owner}/{repo}/comments"], |
| listCommitStatusesForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/statuses"], |
| listCommits: ["GET /repos/{owner}/{repo}/commits"], |
| listContributors: ["GET /repos/{owner}/{repo}/contributors"], |
| listDeployKeys: ["GET /repos/{owner}/{repo}/keys"], |
| listDeploymentStatuses: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses"], |
| listDeployments: ["GET /repos/{owner}/{repo}/deployments"], |
| listDownloads: ["GET /repos/{owner}/{repo}/downloads"], |
| listForAuthenticatedUser: ["GET /user/repos"], |
| listForOrg: ["GET /orgs/{org}/repos"], |
| listForUser: ["GET /users/{username}/repos"], |
| listForks: ["GET /repos/{owner}/{repo}/forks"], |
| listHooks: ["GET /repos/{owner}/{repo}/hooks", {}, { |
| renamed: ["repos", "listWebhooks"] |
| }], |
| listInvitations: ["GET /repos/{owner}/{repo}/invitations"], |
| listInvitationsForAuthenticatedUser: ["GET /user/repository_invitations"], |
| listLanguages: ["GET /repos/{owner}/{repo}/languages"], |
| listPagesBuilds: ["GET /repos/{owner}/{repo}/pages/builds"], |
| listProtectedBranchRequiredStatusChecksContexts: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, { |
| renamed: ["repos", "getAllStatusCheckContexts"] |
| }], |
| listPublic: ["GET /repositories"], |
| listPullRequestsAssociatedWithCommit: ["GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls", { |
| mediaType: { |
| previews: ["groot"] |
| } |
| }], |
| listReleaseAssets: ["GET /repos/{owner}/{repo}/releases/{release_id}/assets"], |
| listReleases: ["GET /repos/{owner}/{repo}/releases"], |
| listStatusesForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/statuses", {}, { |
| renamed: ["repos", "listCommitStatusesForRef"] |
| }], |
| listTags: ["GET /repos/{owner}/{repo}/tags"], |
| listTeams: ["GET /repos/{owner}/{repo}/teams"], |
| listTopics: ["GET /repos/{owner}/{repo}/topics", { |
| mediaType: { |
| previews: ["mercy"] |
| } |
| }, { |
| renamed: ["repos", "getAllTopics"] |
| }], |
| listWebhooks: ["GET /repos/{owner}/{repo}/hooks"], |
| merge: ["POST /repos/{owner}/{repo}/merges"], |
| pingHook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/pings", {}, { |
| renamed: ["repos", "pingWebhook"] |
| }], |
| pingWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/pings"], |
| removeAppAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, { |
| mapToData: "apps" |
| }], |
| removeBranchProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection", {}, { |
| renamed: ["repos", "deleteBranchProtection"] |
| }], |
| removeCollaborator: ["DELETE /repos/{owner}/{repo}/collaborators/{username}"], |
| removeDeployKey: ["DELETE /repos/{owner}/{repo}/keys/{key_id}", {}, { |
| renamed: ["repos", "deleteDeployKey"] |
| }], |
| removeProtectedBranchAdminEnforcement: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins", {}, { |
| renamed: ["repos", "deleteAdminBranchProtection"] |
| }], |
| removeProtectedBranchAppRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, { |
| mapToData: "apps", |
| renamed: ["repos", "removeAppAccessRestrictions"] |
| }], |
| removeProtectedBranchPullRequestReviewEnforcement: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews", {}, { |
| renamed: ["repos", "deletePullRequestReviewProtection"] |
| }], |
| removeProtectedBranchRequiredSignatures: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures", { |
| mediaType: { |
| previews: ["zzzax"] |
| } |
| }, { |
| renamed: ["repos", "deleteCommitSignatureProtection"] |
| }], |
| removeProtectedBranchRequiredStatusChecks: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks", {}, { |
| renamed: ["repos", "removeStatusChecksProtection"] |
| }], |
| removeProtectedBranchRequiredStatusChecksContexts: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, { |
| mapToData: "contexts", |
| renamed: ["repos", "removeStatusCheckContexts"] |
| }], |
| removeProtectedBranchRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions", {}, { |
| renamed: ["repos", "deleteAccessRestrictions"] |
| }], |
| removeProtectedBranchTeamRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, { |
| mapToData: "teams", |
| renamed: ["repos", "removeTeamAccessRestrictions"] |
| }], |
| removeProtectedBranchUserRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, { |
| mapToData: "users", |
| renamed: ["repos", "removeUserAccessRestrictions"] |
| }], |
| removeStatusCheckContexts: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, { |
| mapToData: "contexts" |
| }], |
| removeStatusCheckProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"], |
| removeTeamAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, { |
| mapToData: "teams" |
| }], |
| removeUserAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, { |
| mapToData: "users" |
| }], |
| replaceAllTopics: ["PUT /repos/{owner}/{repo}/topics", { |
| mediaType: { |
| previews: ["mercy"] |
| } |
| }], |
| replaceProtectedBranchAppRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, { |
| mapToData: "apps", |
| renamed: ["repos", "setAppAccessRestrictions"] |
| }], |
| replaceProtectedBranchRequiredStatusChecksContexts: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, { |
| mapToData: "contexts", |
| renamed: ["repos", "setStatusCheckContexts"] |
| }], |
| replaceProtectedBranchTeamRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, { |
| mapToData: "teams", |
| renamed: ["repos", "setTeamAccessRestrictions"] |
| }], |
| replaceProtectedBranchUserRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, { |
| mapToData: "users", |
| renamed: ["repos", "setUserAccessRestrictions"] |
| }], |
| replaceTopics: ["PUT /repos/{owner}/{repo}/topics", { |
| mediaType: { |
| previews: ["mercy"] |
| } |
| }, { |
| renamed: ["repos", "replaceAllTopics"] |
| }], |
| requestPageBuild: ["POST /repos/{owner}/{repo}/pages/builds", {}, { |
| renamed: ["repos", "requestPagesBuild"] |
| }], |
| requestPagesBuild: ["POST /repos/{owner}/{repo}/pages/builds"], |
| retrieveCommunityProfileMetrics: ["GET /repos/{owner}/{repo}/community/profile", {}, { |
| renamed: ["repos", "getCommunityProfileMetrics"] |
| }], |
| setAdminBranchProtection: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"], |
| setAppAccessRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, { |
| mapToData: "apps" |
| }], |
| setStatusCheckContexts: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, { |
| mapToData: "contexts" |
| }], |
| setTeamAccessRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, { |
| mapToData: "teams" |
| }], |
| setUserAccessRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, { |
| mapToData: "users" |
| }], |
| testPushHook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/tests", {}, { |
| renamed: ["repos", "testPushWebhook"] |
| }], |
| testPushWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/tests"], |
| transfer: ["POST /repos/{owner}/{repo}/transfer"], |
| update: ["PATCH /repos/{owner}/{repo}"], |
| updateBranchProtection: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection"], |
| updateCommitComment: ["PATCH /repos/{owner}/{repo}/comments/{comment_id}"], |
| updateHook: ["PATCH /repos/{owner}/{repo}/hooks/{hook_id}", {}, { |
| renamed: ["repos", "updateWebhook"] |
| }], |
| updateInformationAboutPagesSite: ["PUT /repos/{owner}/{repo}/pages"], |
| updateInvitation: ["PATCH /repos/{owner}/{repo}/invitations/{invitation_id}"], |
| updateProtectedBranchPullRequestReviewEnforcement: ["PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews", {}, { |
| renamed: ["repos", "updatePullRequestReviewProtection"] |
| }], |
| updateProtectedBranchRequiredStatusChecks: ["PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks", {}, { |
| renamed: ["repos", "updateStatusChecksProtection"] |
| }], |
| updatePullRequestReviewProtection: ["PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"], |
| updateRelease: ["PATCH /repos/{owner}/{repo}/releases/{release_id}"], |
| updateReleaseAsset: ["PATCH /repos/{owner}/{repo}/releases/assets/{asset_id}"], |
| updateStatusCheckPotection: ["PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"], |
| updateWebhook: ["PATCH /repos/{owner}/{repo}/hooks/{hook_id}"], |
| uploadReleaseAsset: ["POST /repos/{owner}/{repo}/releases/{release_id}/assets{?name,label}", { |
| baseUrl: "https://uploads.github.com" |
| }] |
| }, |
| search: { |
| code: ["GET /search/code"], |
| commits: ["GET /search/commits", { |
| mediaType: { |
| previews: ["cloak"] |
| } |
| }], |
| issuesAndPullRequests: ["GET /search/issues"], |
| labels: ["GET /search/labels"], |
| repos: ["GET /search/repositories"], |
| topics: ["GET /search/topics"], |
| users: ["GET /search/users"] |
| }, |
| teams: { |
| addOrUpdateMembershipForUserInOrg: ["PUT /orgs/{org}/teams/{team_slug}/memberships/{username}"], |
| addOrUpdateMembershipInOrg: ["PUT /orgs/{org}/teams/{team_slug}/memberships/{username}", {}, { |
| renamed: ["teams", "addOrUpdateMembershipForUserInOrg"] |
| }], |
| addOrUpdateProjectInOrg: ["PUT /orgs/{org}/teams/{team_slug}/projects/{project_id}", { |
| mediaType: { |
| previews: ["inertia"] |
| } |
| }, { |
| renamed: ["teams", "addOrUpdateProjectPermissionsInOrg"] |
| }], |
| addOrUpdateProjectPermissionsInOrg: ["PUT /orgs/{org}/teams/{team_slug}/projects/{project_id}", { |
| mediaType: { |
| previews: ["inertia"] |
| } |
| }], |
| addOrUpdateRepoInOrg: ["PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}", {}, { |
| renamed: ["teams", "addOrUpdateRepoPermissionsInOrg"] |
| }], |
| addOrUpdateRepoPermissionsInOrg: ["PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"], |
| checkManagesRepoInOrg: ["GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}", {}, { |
| renamed: ["teams", "checkPermissionsForRepoInOrg"] |
| }], |
| checkPermissionsForProjectInOrg: ["GET /orgs/{org}/teams/{team_slug}/projects/{project_id}", { |
| mediaType: { |
| previews: ["inertia"] |
| } |
| }], |
| checkPermissionsForRepoInOrg: ["GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"], |
| create: ["POST /orgs/{org}/teams"], |
| createDiscussionCommentInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments"], |
| createDiscussionInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions"], |
| deleteDiscussionCommentInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"], |
| deleteDiscussionInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"], |
| deleteInOrg: ["DELETE /orgs/{org}/teams/{team_slug}"], |
| getByName: ["GET /orgs/{org}/teams/{team_slug}"], |
| getDiscussionCommentInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"], |
| getDiscussionInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"], |
| getMembershipForUserInOrg: ["GET /orgs/{org}/teams/{team_slug}/memberships/{username}"], |
| getMembershipInOrg: ["GET /orgs/{org}/teams/{team_slug}/memberships/{username}", {}, { |
| renamed: ["teams", "getMembershipForUserInOrg"] |
| }], |
| list: ["GET /orgs/{org}/teams"], |
| listChildInOrg: ["GET /orgs/{org}/teams/{team_slug}/teams"], |
| listDiscussionCommentsInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments"], |
| listDiscussionsInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions"], |
| listForAuthenticatedUser: ["GET /user/teams"], |
| listMembersInOrg: ["GET /orgs/{org}/teams/{team_slug}/members"], |
| listPendingInvitationsInOrg: ["GET /orgs/{org}/teams/{team_slug}/invitations"], |
| listProjectsInOrg: ["GET /orgs/{org}/teams/{team_slug}/projects", { |
| mediaType: { |
| previews: ["inertia"] |
| } |
| }], |
| listReposInOrg: ["GET /orgs/{org}/teams/{team_slug}/repos"], |
| removeMembershipForUserInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/memberships/{username}"], |
| removeMembershipInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/memberships/{username}", {}, { |
| renamed: ["teams", "removeMembershipForUserInOrg"] |
| }], |
| removeProjectInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/projects/{project_id}"], |
| removeRepoInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"], |
| reviewProjectInOrg: ["GET /orgs/{org}/teams/{team_slug}/projects/{project_id}", { |
| mediaType: { |
| previews: ["inertia"] |
| } |
| }, { |
| renamed: ["teams", "checkPermissionsForProjectInOrg"] |
| }], |
| updateDiscussionCommentInOrg: ["PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"], |
| updateDiscussionInOrg: ["PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"], |
| updateInOrg: ["PATCH /orgs/{org}/teams/{team_slug}"] |
| }, |
| users: { |
| addEmailForAuthenticated: ["POST /user/emails"], |
| addEmails: ["POST /user/emails", {}, { |
| renamed: ["users", "addEmailsForAuthenticated"] |
| }], |
| block: ["PUT /user/blocks/{username}"], |
| checkBlocked: ["GET /user/blocks/{username}"], |
| checkFollowing: ["GET /user/following/{username}", {}, { |
| renamed: ["users", "checkPersonIsFollowedByAuthenticated"] |
| }], |
| checkFollowingForUser: ["GET /users/{username}/following/{target_user}"], |
| checkPersonIsFollowedByAuthenticated: ["GET /user/following/{username}"], |
| createGpgKey: ["POST /user/gpg_keys", {}, { |
| renamed: ["users", "createGpgKeyForAuthenticated"] |
| }], |
| createGpgKeyForAuthenticated: ["POST /user/gpg_keys"], |
| createPublicKey: ["POST /user/keys", {}, { |
| renamed: ["users", "createPublicSshKeyForAuthenticated"] |
| }], |
| createPublicSshKeyForAuthenticated: ["POST /user/keys"], |
| deleteEmailForAuthenticated: ["DELETE /user/emails"], |
| deleteEmails: ["DELETE /user/emails", {}, { |
| renamed: ["users", "deleteEmailsForAuthenticated"] |
| }], |
| deleteGpgKey: ["DELETE /user/gpg_keys/{gpg_key_id}", {}, { |
| renamed: ["users", "deleteGpgKeyForAuthenticated"] |
| }], |
| deleteGpgKeyForAuthenticated: ["DELETE /user/gpg_keys/{gpg_key_id}"], |
| deletePublicKey: ["DELETE /user/keys/{key_id}", {}, { |
| renamed: ["users", "deletePublicSshKeyForAuthenticated"] |
| }], |
| deletePublicSshKeyForAuthenticated: ["DELETE /user/keys/{key_id}"], |
| follow: ["PUT /user/following/{username}"], |
| getAuthenticated: ["GET /user"], |
| getByUsername: ["GET /users/{username}"], |
| getContextForUser: ["GET /users/{username}/hovercard"], |
| getGpgKey: ["GET /user/gpg_keys/{gpg_key_id}", {}, { |
| renamed: ["users", "getGpgKeyForAuthenticated"] |
| }], |
| getGpgKeyForAuthenticated: ["GET /user/gpg_keys/{gpg_key_id}"], |
| getPublicKey: ["GET /user/keys/{key_id}", {}, { |
| renamed: ["users", "getPublicSshKeyForAuthenticated"] |
| }], |
| getPublicSshKeyForAuthenticated: ["GET /user/keys/{key_id}"], |
| list: ["GET /users"], |
| listBlocked: ["GET /user/blocks", {}, { |
| renamed: ["users", "listBlockedByAuthenticated"] |
| }], |
| listBlockedByAuthenticated: ["GET /user/blocks"], |
| listEmails: ["GET /user/emails", {}, { |
| renamed: ["users", "listEmailsForAuthenticated"] |
| }], |
| listEmailsForAuthenticated: ["GET /user/emails"], |
| listFollowedByAuthenticated: ["GET /user/following"], |
| listFollowersForAuthenticatedUser: ["GET /user/followers"], |
| listFollowersForUser: ["GET /users/{username}/followers"], |
| listFollowingForAuthenticatedUser: ["GET /user/following", {}, { |
| renamed: ["users", "listFollowedByAuthenticated"] |
| }], |
| listFollowingForUser: ["GET /users/{username}/following"], |
| listGpgKeys: ["GET /user/gpg_keys", {}, { |
| renamed: ["users", "listGpgKeysForAuthenticated"] |
| }], |
| listGpgKeysForAuthenticated: ["GET /user/gpg_keys"], |
| listGpgKeysForUser: ["GET /users/{username}/gpg_keys"], |
| listPublicEmails: ["GET /user/public_emails", {}, { |
| renamed: ["users", "listPublicEmailsForAuthenticatedUser"] |
| }], |
| listPublicEmailsForAuthenticated: ["GET /user/public_emails"], |
| listPublicKeys: ["GET /user/keys", {}, { |
| renamed: ["users", "listPublicSshKeysForAuthenticated"] |
| }], |
| listPublicKeysForUser: ["GET /users/{username}/keys"], |
| listPublicSshKeysForAuthenticated: ["GET /user/keys"], |
| setPrimaryEmailVisibilityForAuthenticated: ["PATCH /user/email/visibility"], |
| togglePrimaryEmailVisibility: ["PATCH /user/email/visibility", {}, { |
| renamed: ["users", "setPrimaryEmailVisibilityForAuthenticated"] |
| }], |
| unblock: ["DELETE /user/blocks/{username}"], |
| unfollow: ["DELETE /user/following/{username}"], |
| updateAuthenticated: ["PATCH /user"] |
| } |
| }; |
| |
| const VERSION = "3.17.0"; |
| |
| function endpointsToMethods(octokit, endpointsMap) { |
| const newMethods = {}; |
| |
| for (const [scope, endpoints] of Object.entries(endpointsMap)) { |
| for (const [methodName, endpoint] of Object.entries(endpoints)) { |
| const [route, defaults, decorations] = endpoint; |
| const [method, url] = route.split(/ /); |
| const endpointDefaults = Object.assign({ |
| method, |
| url |
| }, defaults); |
| |
| if (!newMethods[scope]) { |
| newMethods[scope] = {}; |
| } |
| |
| const scopeMethods = newMethods[scope]; |
| |
| if (decorations) { |
| scopeMethods[methodName] = decorate(octokit, scope, methodName, endpointDefaults, decorations); |
| continue; |
| } |
| |
| scopeMethods[methodName] = octokit.request.defaults(endpointDefaults); |
| } |
| } |
| |
| return newMethods; |
| } |
| |
| function decorate(octokit, scope, methodName, defaults, decorations) { |
| const requestWithDefaults = octokit.request.defaults(defaults); |
| |
| function withDecorations(...args) { |
| // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488 |
| let options = requestWithDefaults.endpoint.merge(...args); // There are currently no other decorations than `.mapToData` |
| |
| if (decorations.mapToData) { |
| options = Object.assign({}, options, { |
| data: options[decorations.mapToData], |
| [decorations.mapToData]: undefined |
| }); |
| return requestWithDefaults(options); |
| } // NOTE: there are currently no deprecations. But we keep the code |
| // below for future reference |
| |
| |
| if (decorations.renamed) { |
| const [newScope, newMethodName] = decorations.renamed; |
| octokit.log.warn(`octokit.${scope}.${methodName}() has been renamed to octokit.${newScope}.${newMethodName}()`); |
| } |
| |
| if (decorations.deprecated) { |
| octokit.log.warn(decorations.deprecated); |
| } |
| |
| if (decorations.renamedParameters) { |
| // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488 |
| const options = requestWithDefaults.endpoint.merge(...args); |
| |
| for (const [name, alias] of Object.entries(decorations.renamedParameters)) { |
| // There is currently no deprecated parameter that is optional, |
| // so we never hit the else branch below at this point. |
| |
| /* istanbul ignore else */ |
| if (name in options) { |
| octokit.log.warn(`"${name}" parameter is deprecated for "octokit.${scope}.${methodName}()". Use "${alias}" instead`); |
| |
| if (!(alias in options)) { |
| options[alias] = options[name]; |
| } |
| |
| delete options[name]; |
| } |
| } |
| |
| return requestWithDefaults(options); |
| } // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488 |
| |
| |
| return requestWithDefaults(...args); |
| } |
| |
| return Object.assign(withDecorations, requestWithDefaults); |
| } |
| |
| /** |
| * This plugin is a 1:1 copy of internal @octokit/rest plugins. The primary |
| * goal is to rebuild @octokit/rest on top of @octokit/core. Once that is |
| * done, we will remove the registerEndpoints methods and return the methods |
| * directly as with the other plugins. At that point we will also remove the |
| * legacy workarounds and deprecations. |
| * |
| * See the plan at |
| * https://github.com/octokit/plugin-rest-endpoint-methods.js/pull/1 |
| */ |
| |
| function restEndpointMethods(octokit) { |
| return endpointsToMethods(octokit, Endpoints); |
| } |
| restEndpointMethods.VERSION = VERSION; |
| |
| exports.restEndpointMethods = restEndpointMethods; |
| //# sourceMappingURL=index.js.map |
| |
| |
| /***/ }), |
| |
| /***/ 866: |
| /***/ (function(module, __unusedexports, __webpack_require__) { |
| |
| "use strict"; |
| |
| var shebangRegex = __webpack_require__(816); |
| |
| module.exports = function (str) { |
| var match = str.match(shebangRegex); |
| |
| if (!match) { |
| return null; |
| } |
| |
| var arr = match[0].replace(/#! ?/, '').split(' '); |
| var bin = arr[0].split('/').pop(); |
| var arg = arr[1]; |
| |
| return (bin === 'env' ? |
| arg : |
| bin + (arg ? ' ' + arg : '') |
| ); |
| }; |
| |
| |
| /***/ }), |
| |
| /***/ 881: |
| /***/ (function(module) { |
| |
| "use strict"; |
| |
| |
| const isWin = process.platform === 'win32'; |
| |
| function notFoundError(original, syscall) { |
| return Object.assign(new Error(`${syscall} ${original.command} ENOENT`), { |
| code: 'ENOENT', |
| errno: 'ENOENT', |
| syscall: `${syscall} ${original.command}`, |
| path: original.command, |
| spawnargs: original.args, |
| }); |
| } |
| |
| function hookChildProcess(cp, parsed) { |
| if (!isWin) { |
| return; |
| } |
| |
| const originalEmit = cp.emit; |
| |
| cp.emit = function (name, arg1) { |
| // If emitting "exit" event and exit code is 1, we need to check if |
| // the command exists and emit an "error" instead |
| // See https://github.com/IndigoUnited/node-cross-spawn/issues/16 |
| if (name === 'exit') { |
| const err = verifyENOENT(arg1, parsed, 'spawn'); |
| |
| if (err) { |
| return originalEmit.call(cp, 'error', err); |
| } |
| } |
| |
| return originalEmit.apply(cp, arguments); // eslint-disable-line prefer-rest-params |
| }; |
| } |
| |
| function verifyENOENT(status, parsed) { |
| if (isWin && status === 1 && !parsed.file) { |
| return notFoundError(parsed.original, 'spawn'); |
| } |
| |
| return null; |
| } |
| |
| function verifyENOENTSync(status, parsed) { |
| if (isWin && status === 1 && !parsed.file) { |
| return notFoundError(parsed.original, 'spawnSync'); |
| } |
| |
| return null; |
| } |
| |
| module.exports = { |
| hookChildProcess, |
| verifyENOENT, |
| verifyENOENTSync, |
| notFoundError, |
| }; |
| |
| |
| /***/ }), |
| |
| /***/ 898: |
| /***/ (function(__unusedmodule, exports, __webpack_require__) { |
| |
| "use strict"; |
| |
| |
| Object.defineProperty(exports, '__esModule', { value: true }); |
| |
| var request = __webpack_require__(753); |
| var universalUserAgent = __webpack_require__(796); |
| |
| const VERSION = "4.5.1"; |
| |
| class GraphqlError extends Error { |
| constructor(request, response) { |
| const message = response.data.errors[0].message; |
| super(message); |
| Object.assign(this, response.data); |
| this.name = "GraphqlError"; |
| this.request = request; // Maintains proper stack trace (only available on V8) |
| |
| /* istanbul ignore next */ |
| |
| if (Error.captureStackTrace) { |
| Error.captureStackTrace(this, this.constructor); |
| } |
| } |
| |
| } |
| |
| const NON_VARIABLE_OPTIONS = ["method", "baseUrl", "url", "headers", "request", "query", "mediaType"]; |
| function graphql(request, query, options) { |
| options = typeof query === "string" ? options = Object.assign({ |
| query |
| }, options) : options = query; |
| const requestOptions = Object.keys(options).reduce((result, key) => { |
| if (NON_VARIABLE_OPTIONS.includes(key)) { |
| result[key] = options[key]; |
| return result; |
| } |
| |
| if (!result.variables) { |
| result.variables = {}; |
| } |
| |
| result.variables[key] = options[key]; |
| return result; |
| }, {}); |
| return request(requestOptions).then(response => { |
| if (response.data.errors) { |
| throw new GraphqlError(requestOptions, { |
| data: response.data |
| }); |
| } |
| |
| return response.data.data; |
| }); |
| } |
| |
| function withDefaults(request$1, newDefaults) { |
| const newRequest = request$1.defaults(newDefaults); |
| |
| const newApi = (query, options) => { |
| return graphql(newRequest, query, options); |
| }; |
| |
| return Object.assign(newApi, { |
| defaults: withDefaults.bind(null, newRequest), |
| endpoint: request.request.endpoint |
| }); |
| } |
| |
| const graphql$1 = withDefaults(request.request, { |
| headers: { |
| "user-agent": `octokit-graphql.js/${VERSION} ${universalUserAgent.getUserAgent()}` |
| }, |
| method: "POST", |
| url: "/graphql" |
| }); |
| function withCustomRequest(customRequest) { |
| return withDefaults(customRequest, { |
| method: "POST", |
| url: "/graphql" |
| }); |
| } |
| |
| exports.graphql = graphql$1; |
| exports.withCustomRequest = withCustomRequest; |
| //# sourceMappingURL=index.js.map |
| |
| |
| /***/ }), |
| |
| /***/ 948: |
| /***/ (function(module) { |
| |
| "use strict"; |
| |
| |
| /** |
| * Tries to execute a function and discards any error that occurs. |
| * @param {Function} fn - Function that might or might not throw an error. |
| * @returns {?*} Return-value of the function when no error occurred. |
| */ |
| module.exports = function(fn) { |
| |
| try { return fn() } catch (e) {} |
| |
| } |
| |
| /***/ }), |
| |
| /***/ 950: |
| /***/ (function(__unusedmodule, exports, __webpack_require__) { |
| |
| "use strict"; |
| |
| Object.defineProperty(exports, "__esModule", { value: true }); |
| const url = __webpack_require__(835); |
| function getProxyUrl(reqUrl) { |
| let usingSsl = reqUrl.protocol === 'https:'; |
| let proxyUrl; |
| if (checkBypass(reqUrl)) { |
| return proxyUrl; |
| } |
| let proxyVar; |
| if (usingSsl) { |
| proxyVar = process.env['https_proxy'] || process.env['HTTPS_PROXY']; |
| } |
| else { |
| proxyVar = process.env['http_proxy'] || process.env['HTTP_PROXY']; |
| } |
| if (proxyVar) { |
| proxyUrl = url.parse(proxyVar); |
| } |
| return proxyUrl; |
| } |
| exports.getProxyUrl = getProxyUrl; |
| function checkBypass(reqUrl) { |
| if (!reqUrl.hostname) { |
| return false; |
| } |
| let noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || ''; |
| if (!noProxy) { |
| return false; |
| } |
| // Determine the request port |
| let reqPort; |
| if (reqUrl.port) { |
| reqPort = Number(reqUrl.port); |
| } |
| else if (reqUrl.protocol === 'http:') { |
| reqPort = 80; |
| } |
| else if (reqUrl.protocol === 'https:') { |
| reqPort = 443; |
| } |
| // Format the request hostname and hostname with port |
| let upperReqHosts = [reqUrl.hostname.toUpperCase()]; |
| if (typeof reqPort === 'number') { |
| upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`); |
| } |
| // Compare request host against noproxy |
| for (let upperNoProxyItem of noProxy |
| .split(',') |
| .map(x => x.trim().toUpperCase()) |
| .filter(x => x)) { |
| if (upperReqHosts.some(x => x === upperNoProxyItem)) { |
| return true; |
| } |
| } |
| return false; |
| } |
| exports.checkBypass = checkBypass; |
| |
| |
| /***/ }), |
| |
| /***/ 955: |
| /***/ (function(module, __unusedexports, __webpack_require__) { |
| |
| "use strict"; |
| |
| const path = __webpack_require__(622); |
| const childProcess = __webpack_require__(129); |
| const crossSpawn = __webpack_require__(20); |
| const stripEof = __webpack_require__(768); |
| const npmRunPath = __webpack_require__(621); |
| const isStream = __webpack_require__(323); |
| const _getStream = __webpack_require__(145); |
| const pFinally = __webpack_require__(697); |
| const onExit = __webpack_require__(260); |
| const errname = __webpack_require__(427); |
| const stdio = __webpack_require__(168); |
| |
| const TEN_MEGABYTES = 1000 * 1000 * 10; |
| |
| function handleArgs(cmd, args, opts) { |
| let parsed; |
| |
| opts = Object.assign({ |
| extendEnv: true, |
| env: {} |
| }, opts); |
| |
| if (opts.extendEnv) { |
| opts.env = Object.assign({}, process.env, opts.env); |
| } |
| |
| if (opts.__winShell === true) { |
| delete opts.__winShell; |
| parsed = { |
| command: cmd, |
| args, |
| options: opts, |
| file: cmd, |
| original: { |
| cmd, |
| args |
| } |
| }; |
| } else { |
| parsed = crossSpawn._parse(cmd, args, opts); |
| } |
| |
| opts = Object.assign({ |
| maxBuffer: TEN_MEGABYTES, |
| buffer: true, |
| stripEof: true, |
| preferLocal: true, |
| localDir: parsed.options.cwd || process.cwd(), |
| encoding: 'utf8', |
| reject: true, |
| cleanup: true |
| }, parsed.options); |
| |
| opts.stdio = stdio(opts); |
| |
| if (opts.preferLocal) { |
| opts.env = npmRunPath.env(Object.assign({}, opts, {cwd: opts.localDir})); |
| } |
| |
| if (opts.detached) { |
| // #115 |
| opts.cleanup = false; |
| } |
| |
| if (process.platform === 'win32' && path.basename(parsed.command) === 'cmd.exe') { |
| // #116 |
| parsed.args.unshift('/q'); |
| } |
| |
| return { |
| cmd: parsed.command, |
| args: parsed.args, |
| opts, |
| parsed |
| }; |
| } |
| |
| function handleInput(spawned, input) { |
| if (input === null || input === undefined) { |
| return; |
| } |
| |
| if (isStream(input)) { |
| input.pipe(spawned.stdin); |
| } else { |
| spawned.stdin.end(input); |
| } |
| } |
| |
| function handleOutput(opts, val) { |
| if (val && opts.stripEof) { |
| val = stripEof(val); |
| } |
| |
| return val; |
| } |
| |
| function handleShell(fn, cmd, opts) { |
| let file = '/bin/sh'; |
| let args = ['-c', cmd]; |
| |
| opts = Object.assign({}, opts); |
| |
| if (process.platform === 'win32') { |
| opts.__winShell = true; |
| file = process.env.comspec || 'cmd.exe'; |
| args = ['/s', '/c', `"${cmd}"`]; |
| opts.windowsVerbatimArguments = true; |
| } |
| |
| if (opts.shell) { |
| file = opts.shell; |
| delete opts.shell; |
| } |
| |
| return fn(file, args, opts); |
| } |
| |
| function getStream(process, stream, {encoding, buffer, maxBuffer}) { |
| if (!process[stream]) { |
| return null; |
| } |
| |
| let ret; |
| |
| if (!buffer) { |
| // TODO: Use `ret = util.promisify(stream.finished)(process[stream]);` when targeting Node.js 10 |
| ret = new Promise((resolve, reject) => { |
| process[stream] |
| .once('end', resolve) |
| .once('error', reject); |
| }); |
| } else if (encoding) { |
| ret = _getStream(process[stream], { |
| encoding, |
| maxBuffer |
| }); |
| } else { |
| ret = _getStream.buffer(process[stream], {maxBuffer}); |
| } |
| |
| return ret.catch(err => { |
| err.stream = stream; |
| err.message = `${stream} ${err.message}`; |
| throw err; |
| }); |
| } |
| |
| function makeError(result, options) { |
| const {stdout, stderr} = result; |
| |
| let err = result.error; |
| const {code, signal} = result; |
| |
| const {parsed, joinedCmd} = options; |
| const timedOut = options.timedOut || false; |
| |
| if (!err) { |
| let output = ''; |
| |
| if (Array.isArray(parsed.opts.stdio)) { |
| if (parsed.opts.stdio[2] !== 'inherit') { |
| output += output.length > 0 ? stderr : `\n${stderr}`; |
| } |
| |
| if (parsed.opts.stdio[1] !== 'inherit') { |
| output += `\n${stdout}`; |
| } |
| } else if (parsed.opts.stdio !== 'inherit') { |
| output = `\n${stderr}${stdout}`; |
| } |
| |
| err = new Error(`Command failed: ${joinedCmd}${output}`); |
| err.code = code < 0 ? errname(code) : code; |
| } |
| |
| err.stdout = stdout; |
| err.stderr = stderr; |
| err.failed = true; |
| err.signal = signal || null; |
| err.cmd = joinedCmd; |
| err.timedOut = timedOut; |
| |
| return err; |
| } |
| |
| function joinCmd(cmd, args) { |
| let joinedCmd = cmd; |
| |
| if (Array.isArray(args) && args.length > 0) { |
| joinedCmd += ' ' + args.join(' '); |
| } |
| |
| return joinedCmd; |
| } |
| |
| module.exports = (cmd, args, opts) => { |
| const parsed = handleArgs(cmd, args, opts); |
| const {encoding, buffer, maxBuffer} = parsed.opts; |
| const joinedCmd = joinCmd(cmd, args); |
| |
| let spawned; |
| try { |
| spawned = childProcess.spawn(parsed.cmd, parsed.args, parsed.opts); |
| } catch (err) { |
| return Promise.reject(err); |
| } |
| |
| let removeExitHandler; |
| if (parsed.opts.cleanup) { |
| removeExitHandler = onExit(() => { |
| spawned.kill(); |
| }); |
| } |
| |
| let timeoutId = null; |
| let timedOut = false; |
| |
| const cleanup = () => { |
| if (timeoutId) { |
| clearTimeout(timeoutId); |
| timeoutId = null; |
| } |
| |
| if (removeExitHandler) { |
| removeExitHandler(); |
| } |
| }; |
| |
| if (parsed.opts.timeout > 0) { |
| timeoutId = setTimeout(() => { |
| timeoutId = null; |
| timedOut = true; |
| spawned.kill(parsed.opts.killSignal); |
| }, parsed.opts.timeout); |
| } |
| |
| const processDone = new Promise(resolve => { |
| spawned.on('exit', (code, signal) => { |
| cleanup(); |
| resolve({code, signal}); |
| }); |
| |
| spawned.on('error', err => { |
| cleanup(); |
| resolve({error: err}); |
| }); |
| |
| if (spawned.stdin) { |
| spawned.stdin.on('error', err => { |
| cleanup(); |
| resolve({error: err}); |
| }); |
| } |
| }); |
| |
| function destroy() { |
| if (spawned.stdout) { |
| spawned.stdout.destroy(); |
| } |
| |
| if (spawned.stderr) { |
| spawned.stderr.destroy(); |
| } |
| } |
| |
| const handlePromise = () => pFinally(Promise.all([ |
| processDone, |
| getStream(spawned, 'stdout', {encoding, buffer, maxBuffer}), |
| getStream(spawned, 'stderr', {encoding, buffer, maxBuffer}) |
| ]).then(arr => { |
| const result = arr[0]; |
| result.stdout = arr[1]; |
| result.stderr = arr[2]; |
| |
| if (result.error || result.code !== 0 || result.signal !== null) { |
| const err = makeError(result, { |
| joinedCmd, |
| parsed, |
| timedOut |
| }); |
| |
| // TODO: missing some timeout logic for killed |
| // https://github.com/nodejs/node/blob/master/lib/child_process.js#L203 |
| // err.killed = spawned.killed || killed; |
| err.killed = err.killed || spawned.killed; |
| |
| if (!parsed.opts.reject) { |
| return err; |
| } |
| |
| throw err; |
| } |
| |
| return { |
| stdout: handleOutput(parsed.opts, result.stdout), |
| stderr: handleOutput(parsed.opts, result.stderr), |
| code: 0, |
| failed: false, |
| killed: false, |
| signal: null, |
| cmd: joinedCmd, |
| timedOut: false |
| }; |
| }), destroy); |
| |
| crossSpawn._enoent.hookChildProcess(spawned, parsed.parsed); |
| |
| handleInput(spawned, parsed.opts.input); |
| |
| spawned.then = (onfulfilled, onrejected) => handlePromise().then(onfulfilled, onrejected); |
| spawned.catch = onrejected => handlePromise().catch(onrejected); |
| |
| return spawned; |
| }; |
| |
| // TODO: set `stderr: 'ignore'` when that option is implemented |
| module.exports.stdout = (...args) => module.exports(...args).then(x => x.stdout); |
| |
| // TODO: set `stdout: 'ignore'` when that option is implemented |
| module.exports.stderr = (...args) => module.exports(...args).then(x => x.stderr); |
| |
| module.exports.shell = (cmd, opts) => handleShell(module.exports, cmd, opts); |
| |
| module.exports.sync = (cmd, args, opts) => { |
| const parsed = handleArgs(cmd, args, opts); |
| const joinedCmd = joinCmd(cmd, args); |
| |
| if (isStream(parsed.opts.input)) { |
| throw new TypeError('The `input` option cannot be a stream in sync mode'); |
| } |
| |
| const result = childProcess.spawnSync(parsed.cmd, parsed.args, parsed.opts); |
| result.code = result.status; |
| |
| if (result.error || result.status !== 0 || result.signal !== null) { |
| const err = makeError(result, { |
| joinedCmd, |
| parsed |
| }); |
| |
| if (!parsed.opts.reject) { |
| return err; |
| } |
| |
| throw err; |
| } |
| |
| return { |
| stdout: handleOutput(parsed.opts, result.stdout), |
| stderr: handleOutput(parsed.opts, result.stderr), |
| code: 0, |
| failed: false, |
| signal: null, |
| cmd: joinedCmd, |
| timedOut: false |
| }; |
| }; |
| |
| module.exports.shellSync = (cmd, opts) => handleShell(module.exports.sync, cmd, opts); |
| |
| |
| /***/ }), |
| |
| /***/ 966: |
| /***/ (function(module, __unusedexports, __webpack_require__) { |
| |
| "use strict"; |
| |
| const {PassThrough} = __webpack_require__(794); |
| |
| module.exports = options => { |
| options = Object.assign({}, options); |
| |
| const {array} = options; |
| let {encoding} = options; |
| const buffer = encoding === 'buffer'; |
| let objectMode = false; |
| |
| if (array) { |
| objectMode = !(encoding || buffer); |
| } else { |
| encoding = encoding || 'utf8'; |
| } |
| |
| if (buffer) { |
| encoding = null; |
| } |
| |
| let len = 0; |
| const ret = []; |
| const stream = new PassThrough({objectMode}); |
| |
| if (encoding) { |
| stream.setEncoding(encoding); |
| } |
| |
| stream.on('data', chunk => { |
| ret.push(chunk); |
| |
| if (objectMode) { |
| len = ret.length; |
| } else { |
| len += chunk.length; |
| } |
| }); |
| |
| stream.getBufferedValue = () => { |
| if (array) { |
| return ret; |
| } |
| |
| return buffer ? Buffer.concat(ret, len) : ret.join(''); |
| }; |
| |
| stream.getBufferedLength = () => len; |
| |
| return stream; |
| }; |
| |
| |
| /***/ }), |
| |
| /***/ 969: |
| /***/ (function(module, __unusedexports, __webpack_require__) { |
| |
| var wrappy = __webpack_require__(11) |
| module.exports = wrappy(once) |
| module.exports.strict = wrappy(onceStrict) |
| |
| once.proto = once(function () { |
| Object.defineProperty(Function.prototype, 'once', { |
| value: function () { |
| return once(this) |
| }, |
| configurable: true |
| }) |
| |
| Object.defineProperty(Function.prototype, 'onceStrict', { |
| value: function () { |
| return onceStrict(this) |
| }, |
| configurable: true |
| }) |
| }) |
| |
| function once (fn) { |
| var f = function () { |
| if (f.called) return f.value |
| f.called = true |
| return f.value = fn.apply(this, arguments) |
| } |
| f.called = false |
| return f |
| } |
| |
| function onceStrict (fn) { |
| var f = function () { |
| if (f.called) |
| throw new Error(f.onceError) |
| f.called = true |
| return f.value = fn.apply(this, arguments) |
| } |
| var name = fn.name || 'Function wrapped with `once`' |
| f.onceError = name + " shouldn't be called more than once" |
| f.called = false |
| return f |
| } |
| |
| |
| /***/ }) |
| |
| /******/ }); |
| |