Permalink
Cannot retrieve contributors at this time
17052 lines (14926 sloc)
521 KB
Name already in use
A tag already exists with the provided branch name. Many Git commands accept both tag and branch names, so creating this branch may cause unexpected behavior. Are you sure you want to create this branch?
build-push-action/dist/index.js
Go to fileThis commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
/******/ (() => { // webpackBootstrap | |
/******/ var __webpack_modules__ = ({ | |
/***/ 9295: | |
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { | |
"use strict"; | |
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { | |
if (k2 === undefined) k2 = k; | |
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); | |
}) : (function(o, m, k, k2) { | |
if (k2 === undefined) k2 = k; | |
o[k2] = m[k]; | |
})); | |
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { | |
Object.defineProperty(o, "default", { enumerable: true, value: v }); | |
}) : function(o, v) { | |
o["default"] = v; | |
}); | |
var __importStar = (this && this.__importStar) || function (mod) { | |
if (mod && mod.__esModule) return mod; | |
var result = {}; | |
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); | |
__setModuleDefault(result, mod); | |
return result; | |
}; | |
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { | |
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } | |
return new (P || (P = Promise))(function (resolve, reject) { | |
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } | |
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } | |
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } | |
step((generator = generator.apply(thisArg, _arguments || [])).next()); | |
}); | |
}; | |
var __importDefault = (this && this.__importDefault) || function (mod) { | |
return (mod && mod.__esModule) ? mod : { "default": mod }; | |
}; | |
Object.defineProperty(exports, "__esModule", ({ value: true })); | |
exports.satisfies = exports.parseVersion = exports.getVersion = exports.isAvailable = exports.hasGitAuthToken = exports.isLocalOrTarExporter = exports.getSecret = exports.getSecretFile = exports.getSecretString = exports.getImageID = exports.getImageIDFile = void 0; | |
const sync_1 = __importDefault(__nccwpck_require__(8750)); | |
const fs_1 = __importDefault(__nccwpck_require__(5747)); | |
const path_1 = __importDefault(__nccwpck_require__(5622)); | |
const semver = __importStar(__nccwpck_require__(1383)); | |
const exec = __importStar(__nccwpck_require__(1514)); | |
const context = __importStar(__nccwpck_require__(3842)); | |
function getImageIDFile() { | |
return __awaiter(this, void 0, void 0, function* () { | |
return path_1.default.join(context.tmpDir(), 'iidfile').split(path_1.default.sep).join(path_1.default.posix.sep); | |
}); | |
} | |
exports.getImageIDFile = getImageIDFile; | |
function getImageID() { | |
return __awaiter(this, void 0, void 0, function* () { | |
const iidFile = yield getImageIDFile(); | |
if (!fs_1.default.existsSync(iidFile)) { | |
return undefined; | |
} | |
return fs_1.default.readFileSync(iidFile, { encoding: 'utf-8' }); | |
}); | |
} | |
exports.getImageID = getImageID; | |
function getSecretString(kvp) { | |
return __awaiter(this, void 0, void 0, function* () { | |
return getSecret(kvp, false); | |
}); | |
} | |
exports.getSecretString = getSecretString; | |
function getSecretFile(kvp) { | |
return __awaiter(this, void 0, void 0, function* () { | |
return getSecret(kvp, true); | |
}); | |
} | |
exports.getSecretFile = getSecretFile; | |
function getSecret(kvp, file) { | |
return __awaiter(this, void 0, void 0, function* () { | |
const delimiterIndex = kvp.indexOf('='); | |
const key = kvp.substring(0, delimiterIndex); | |
let value = kvp.substring(delimiterIndex + 1); | |
if (key.length == 0 || value.length == 0) { | |
throw new Error(`${kvp} is not a valid secret`); | |
} | |
if (file) { | |
if (!fs_1.default.existsSync(value)) { | |
throw new Error(`secret file ${value} not found`); | |
} | |
value = fs_1.default.readFileSync(value, { encoding: 'utf-8' }); | |
} | |
const secretFile = context.tmpNameSync({ | |
tmpdir: context.tmpDir() | |
}); | |
fs_1.default.writeFileSync(secretFile, value); | |
return `id=${key},src=${secretFile}`; | |
}); | |
} | |
exports.getSecret = getSecret; | |
function isLocalOrTarExporter(outputs) { | |
for (let output of sync_1.default(outputs.join(`\n`), { | |
delimiter: ',', | |
trim: true, | |
columns: false, | |
relaxColumnCount: true | |
})) { | |
// Local if no type is defined | |
// https://github.com/docker/buildx/blob/d2bf42f8b4784d83fde17acb3ed84703ddc2156b/build/output.go#L29-L43 | |
if (output.length == 1 && !output[0].startsWith('type=')) { | |
return true; | |
} | |
for (let [key, value] of output.map(chunk => chunk.split('=').map(item => item.trim()))) { | |
if (key == 'type' && (value == 'local' || value == 'tar')) { | |
return true; | |
} | |
} | |
} | |
return false; | |
} | |
exports.isLocalOrTarExporter = isLocalOrTarExporter; | |
function hasGitAuthToken(secrets) { | |
for (let secret of secrets) { | |
if (secret.startsWith('GIT_AUTH_TOKEN=')) { | |
return true; | |
} | |
} | |
return false; | |
} | |
exports.hasGitAuthToken = hasGitAuthToken; | |
function isAvailable() { | |
return __awaiter(this, void 0, void 0, function* () { | |
return yield exec | |
.getExecOutput('docker', ['buildx'], { | |
ignoreReturnCode: true, | |
silent: true | |
}) | |
.then(res => { | |
if (res.stderr.length > 0 && res.exitCode != 0) { | |
return false; | |
} | |
return res.exitCode == 0; | |
}); | |
}); | |
} | |
exports.isAvailable = isAvailable; | |
function getVersion() { | |
return __awaiter(this, void 0, void 0, function* () { | |
return yield exec | |
.getExecOutput('docker', ['buildx', 'version'], { | |
ignoreReturnCode: true, | |
silent: true | |
}) | |
.then(res => { | |
if (res.stderr.length > 0 && res.exitCode != 0) { | |
throw new Error(res.stderr.trim()); | |
} | |
return parseVersion(res.stdout); | |
}); | |
}); | |
} | |
exports.getVersion = getVersion; | |
function parseVersion(stdout) { | |
const matches = /\sv?([0-9a-f]{7}|[0-9.]+)/.exec(stdout); | |
if (!matches) { | |
throw new Error(`Cannot parse buildx version`); | |
} | |
return matches[1]; | |
} | |
exports.parseVersion = parseVersion; | |
function satisfies(version, range) { | |
return semver.satisfies(version, range) || /^[0-9a-f]{7}$/.exec(version) !== null; | |
} | |
exports.satisfies = satisfies; | |
//# sourceMappingURL=buildx.js.map | |
/***/ }), | |
/***/ 3842: | |
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { | |
"use strict"; | |
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { | |
if (k2 === undefined) k2 = k; | |
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); | |
}) : (function(o, m, k, k2) { | |
if (k2 === undefined) k2 = k; | |
o[k2] = m[k]; | |
})); | |
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { | |
Object.defineProperty(o, "default", { enumerable: true, value: v }); | |
}) : function(o, v) { | |
o["default"] = v; | |
}); | |
var __importStar = (this && this.__importStar) || function (mod) { | |
if (mod && mod.__esModule) return mod; | |
var result = {}; | |
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); | |
__setModuleDefault(result, mod); | |
return result; | |
}; | |
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { | |
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } | |
return new (P || (P = Promise))(function (resolve, reject) { | |
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } | |
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } | |
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } | |
step((generator = generator.apply(thisArg, _arguments || [])).next()); | |
}); | |
}; | |
var __importDefault = (this && this.__importDefault) || function (mod) { | |
return (mod && mod.__esModule) ? mod : { "default": mod }; | |
}; | |
Object.defineProperty(exports, "__esModule", ({ value: true })); | |
exports.setOutput = exports.asyncForEach = exports.getInputList = exports.getArgs = exports.getInputs = exports.tmpNameSync = exports.tmpDir = exports.defaultContext = void 0; | |
const sync_1 = __importDefault(__nccwpck_require__(8750)); | |
const fs = __importStar(__nccwpck_require__(5747)); | |
const os = __importStar(__nccwpck_require__(2087)); | |
const path = __importStar(__nccwpck_require__(5622)); | |
const tmp = __importStar(__nccwpck_require__(8517)); | |
const core = __importStar(__nccwpck_require__(2186)); | |
const command_1 = __nccwpck_require__(5241); | |
const github = __importStar(__nccwpck_require__(5438)); | |
const buildx = __importStar(__nccwpck_require__(9295)); | |
let _defaultContext, _tmpDir; | |
function defaultContext() { | |
if (!_defaultContext) { | |
let ref = github.context.ref; | |
if (github.context.sha && ref && !ref.startsWith('refs/')) { | |
ref = `refs/heads/${github.context.ref}`; | |
} | |
if (github.context.sha && !ref.startsWith(`refs/pull/`)) { | |
ref = github.context.sha; | |
} | |
_defaultContext = `${process.env.GITHUB_SERVER_URL || 'https://github.com'}/${github.context.repo.owner}/${github.context.repo.repo}.git#${ref}`; | |
} | |
return _defaultContext; | |
} | |
exports.defaultContext = defaultContext; | |
function tmpDir() { | |
if (!_tmpDir) { | |
_tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'docker-build-push-')).split(path.sep).join(path.posix.sep); | |
} | |
return _tmpDir; | |
} | |
exports.tmpDir = tmpDir; | |
function tmpNameSync(options) { | |
return tmp.tmpNameSync(options); | |
} | |
exports.tmpNameSync = tmpNameSync; | |
function getInputs(defaultContext) { | |
return __awaiter(this, void 0, void 0, function* () { | |
return { | |
allow: yield getInputList('allow'), | |
buildArgs: yield getInputList('build-args', true), | |
builder: core.getInput('builder'), | |
cacheFrom: yield getInputList('cache-from', true), | |
cacheTo: yield getInputList('cache-to', true), | |
context: core.getInput('context') || defaultContext, | |
file: core.getInput('file'), | |
labels: yield getInputList('labels', true), | |
load: core.getBooleanInput('load'), | |
network: core.getInput('network'), | |
noCache: core.getBooleanInput('no-cache'), | |
outputs: yield getInputList('outputs', true), | |
platforms: yield getInputList('platforms'), | |
pull: core.getBooleanInput('pull'), | |
push: core.getBooleanInput('push'), | |
secrets: yield getInputList('secrets', true), | |
secretFiles: yield getInputList('secret-files', true), | |
ssh: yield getInputList('ssh'), | |
tags: yield getInputList('tags'), | |
target: core.getInput('target'), | |
githubToken: core.getInput('github-token') | |
}; | |
}); | |
} | |
exports.getInputs = getInputs; | |
function getArgs(inputs, defaultContext, buildxVersion) { | |
return __awaiter(this, void 0, void 0, function* () { | |
let args = ['buildx']; | |
args.push.apply(args, yield getBuildArgs(inputs, defaultContext, buildxVersion)); | |
args.push.apply(args, yield getCommonArgs(inputs)); | |
args.push(inputs.context); | |
return args; | |
}); | |
} | |
exports.getArgs = getArgs; | |
function getBuildArgs(inputs, defaultContext, buildxVersion) { | |
return __awaiter(this, void 0, void 0, function* () { | |
let args = ['build']; | |
yield exports.asyncForEach(inputs.buildArgs, (buildArg) => __awaiter(this, void 0, void 0, function* () { | |
args.push('--build-arg', buildArg); | |
})); | |
yield exports.asyncForEach(inputs.labels, (label) => __awaiter(this, void 0, void 0, function* () { | |
args.push('--label', label); | |
})); | |
yield exports.asyncForEach(inputs.tags, (tag) => __awaiter(this, void 0, void 0, function* () { | |
args.push('--tag', tag); | |
})); | |
if (inputs.target) { | |
args.push('--target', inputs.target); | |
} | |
if (inputs.allow.length > 0) { | |
args.push('--allow', inputs.allow.join(',')); | |
} | |
if (inputs.platforms.length > 0) { | |
args.push('--platform', inputs.platforms.join(',')); | |
} | |
yield exports.asyncForEach(inputs.outputs, (output) => __awaiter(this, void 0, void 0, function* () { | |
args.push('--output', output); | |
})); | |
if (!buildx.isLocalOrTarExporter(inputs.outputs) && (inputs.platforms.length == 0 || buildx.satisfies(buildxVersion, '>=0.4.2'))) { | |
args.push('--iidfile', yield buildx.getImageIDFile()); | |
} | |
yield exports.asyncForEach(inputs.cacheFrom, (cacheFrom) => __awaiter(this, void 0, void 0, function* () { | |
args.push('--cache-from', cacheFrom); | |
})); | |
yield exports.asyncForEach(inputs.cacheTo, (cacheTo) => __awaiter(this, void 0, void 0, function* () { | |
args.push('--cache-to', cacheTo); | |
})); | |
yield exports.asyncForEach(inputs.secrets, (secret) => __awaiter(this, void 0, void 0, function* () { | |
try { | |
args.push('--secret', yield buildx.getSecretString(secret)); | |
} | |
catch (err) { | |
core.warning(err.message); | |
} | |
})); | |
yield exports.asyncForEach(inputs.secretFiles, (secretFile) => __awaiter(this, void 0, void 0, function* () { | |
try { | |
args.push('--secret', yield buildx.getSecretFile(secretFile)); | |
} | |
catch (err) { | |
core.warning(err.message); | |
} | |
})); | |
if (inputs.githubToken && !buildx.hasGitAuthToken(inputs.secrets) && inputs.context == defaultContext) { | |
args.push('--secret', yield buildx.getSecretString(`GIT_AUTH_TOKEN=${inputs.githubToken}`)); | |
} | |
yield exports.asyncForEach(inputs.ssh, (ssh) => __awaiter(this, void 0, void 0, function* () { | |
args.push('--ssh', ssh); | |
})); | |
if (inputs.file) { | |
args.push('--file', inputs.file); | |
} | |
return args; | |
}); | |
} | |
function getCommonArgs(inputs) { | |
return __awaiter(this, void 0, void 0, function* () { | |
let args = []; | |
if (inputs.noCache) { | |
args.push('--no-cache'); | |
} | |
if (inputs.builder) { | |
args.push('--builder', inputs.builder); | |
} | |
if (inputs.pull) { | |
args.push('--pull'); | |
} | |
if (inputs.load) { | |
args.push('--load'); | |
} | |
if (inputs.network) { | |
args.push('--network', inputs.network); | |
} | |
if (inputs.push) { | |
args.push('--push'); | |
} | |
return args; | |
}); | |
} | |
function getInputList(name, ignoreComma) { | |
return __awaiter(this, void 0, void 0, function* () { | |
let res = []; | |
const items = core.getInput(name); | |
if (items == '') { | |
return res; | |
} | |
for (let output of (yield sync_1.default(items, { | |
columns: false, | |
relax: true, | |
relaxColumnCount: true, | |
skipLinesWithEmptyValues: true | |
}))) { | |
if (output.length == 1) { | |
res.push(output[0]); | |
continue; | |
} | |
else if (!ignoreComma) { | |
res.push(...output); | |
continue; | |
} | |
res.push(output.join(',')); | |
} | |
return res.filter(item => item).map(pat => pat.trim()); | |
}); | |
} | |
exports.getInputList = getInputList; | |
const asyncForEach = (array, callback) => __awaiter(void 0, void 0, void 0, function* () { | |
for (let index = 0; index < array.length; index++) { | |
yield callback(array[index], index, array); | |
} | |
}); | |
exports.asyncForEach = asyncForEach; | |
// FIXME: Temp fix https://github.com/actions/toolkit/issues/777 | |
function setOutput(name, value) { | |
command_1.issueCommand('set-output', { name }, value); | |
} | |
exports.setOutput = setOutput; | |
//# sourceMappingURL=context.js.map | |
/***/ }), | |
/***/ 3109: | |
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { | |
"use strict"; | |
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { | |
if (k2 === undefined) k2 = k; | |
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); | |
}) : (function(o, m, k, k2) { | |
if (k2 === undefined) k2 = k; | |
o[k2] = m[k]; | |
})); | |
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { | |
Object.defineProperty(o, "default", { enumerable: true, value: v }); | |
}) : function(o, v) { | |
o["default"] = v; | |
}); | |
var __importStar = (this && this.__importStar) || function (mod) { | |
if (mod && mod.__esModule) return mod; | |
var result = {}; | |
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); | |
__setModuleDefault(result, mod); | |
return result; | |
}; | |
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { | |
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } | |
return new (P || (P = Promise))(function (resolve, reject) { | |
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } | |
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } | |
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } | |
step((generator = generator.apply(thisArg, _arguments || [])).next()); | |
}); | |
}; | |
Object.defineProperty(exports, "__esModule", ({ value: true })); | |
const fs = __importStar(__nccwpck_require__(5747)); | |
const buildx = __importStar(__nccwpck_require__(9295)); | |
const context = __importStar(__nccwpck_require__(3842)); | |
const stateHelper = __importStar(__nccwpck_require__(8647)); | |
const core = __importStar(__nccwpck_require__(2186)); | |
const exec = __importStar(__nccwpck_require__(1514)); | |
function run() { | |
return __awaiter(this, void 0, void 0, function* () { | |
try { | |
core.startGroup(`Docker info`); | |
yield exec.exec('docker', ['version']); | |
yield exec.exec('docker', ['info']); | |
core.endGroup(); | |
if (!(yield buildx.isAvailable())) { | |
core.setFailed(`Docker buildx is required. See https://github.com/docker/setup-buildx-action to set up buildx.`); | |
return; | |
} | |
stateHelper.setTmpDir(context.tmpDir()); | |
const buildxVersion = yield buildx.getVersion(); | |
const defContext = context.defaultContext(); | |
let inputs = yield context.getInputs(defContext); | |
const args = yield context.getArgs(inputs, defContext, buildxVersion); | |
yield exec | |
.getExecOutput('docker', args, { | |
ignoreReturnCode: true | |
}) | |
.then(res => { | |
if (res.stderr.length > 0 && res.exitCode != 0) { | |
throw new Error(`buildx bake failed with: ${res.stderr.match(/(.*)\s*$/)[0].trim()}`); | |
} | |
}); | |
const imageID = yield buildx.getImageID(); | |
if (imageID) { | |
core.startGroup(`Extracting digest`); | |
core.info(`${imageID}`); | |
context.setOutput('digest', imageID); | |
core.endGroup(); | |
} | |
} | |
catch (error) { | |
core.setFailed(error.message); | |
} | |
}); | |
} | |
function cleanup() { | |
return __awaiter(this, void 0, void 0, function* () { | |
if (stateHelper.tmpDir.length > 0) { | |
core.startGroup(`Removing temp folder ${stateHelper.tmpDir}`); | |
fs.rmdirSync(stateHelper.tmpDir, { recursive: true }); | |
core.endGroup(); | |
} | |
}); | |
} | |
if (!stateHelper.IsPost) { | |
run(); | |
} | |
else { | |
cleanup(); | |
} | |
//# sourceMappingURL=main.js.map | |
/***/ }), | |
/***/ 8647: | |
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { | |
"use strict"; | |
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { | |
if (k2 === undefined) k2 = k; | |
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); | |
}) : (function(o, m, k, k2) { | |
if (k2 === undefined) k2 = k; | |
o[k2] = m[k]; | |
})); | |
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { | |
Object.defineProperty(o, "default", { enumerable: true, value: v }); | |
}) : function(o, v) { | |
o["default"] = v; | |
}); | |
var __importStar = (this && this.__importStar) || function (mod) { | |
if (mod && mod.__esModule) return mod; | |
var result = {}; | |
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); | |
__setModuleDefault(result, mod); | |
return result; | |
}; | |
Object.defineProperty(exports, "__esModule", ({ value: true })); | |
exports.setTmpDir = exports.tmpDir = exports.IsPost = void 0; | |
const core = __importStar(__nccwpck_require__(2186)); | |
exports.IsPost = !!process.env['STATE_isPost']; | |
exports.tmpDir = process.env['STATE_tmpDir'] || ''; | |
function setTmpDir(tmpDir) { | |
core.saveState('tmpDir', tmpDir); | |
} | |
exports.setTmpDir = setTmpDir; | |
if (!exports.IsPost) { | |
core.saveState('isPost', 'true'); | |
} | |
//# sourceMappingURL=state-helper.js.map | |
/***/ }), | |
/***/ 5241: | |
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { | |
"use strict"; | |
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { | |
if (k2 === undefined) k2 = k; | |
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); | |
}) : (function(o, m, k, k2) { | |
if (k2 === undefined) k2 = k; | |
o[k2] = m[k]; | |
})); | |
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { | |
Object.defineProperty(o, "default", { enumerable: true, value: v }); | |
}) : function(o, v) { | |
o["default"] = v; | |
}); | |
var __importStar = (this && this.__importStar) || function (mod) { | |
if (mod && mod.__esModule) return mod; | |
var result = {}; | |
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); | |
__setModuleDefault(result, mod); | |
return result; | |
}; | |
Object.defineProperty(exports, "__esModule", ({ value: true })); | |
exports.issue = exports.issueCommand = void 0; | |
const os = __importStar(__nccwpck_require__(2087)); | |
const utils_1 = __nccwpck_require__(5278); | |
/** | |
* Commands | |
* | |
* Command Format: | |
* ::name key=value,key=value::message | |
* | |
* Examples: | |
* ::warning::This is the message | |
* ::set-env name=MY_VAR::some value | |
*/ | |
function issueCommand(command, properties, message) { | |
const cmd = new Command(command, properties, message); | |
process.stdout.write(cmd.toString() + os.EOL); | |
} | |
exports.issueCommand = issueCommand; | |
function issue(name, message = '') { | |
issueCommand(name, {}, message); | |
} | |
exports.issue = issue; | |
const CMD_STRING = '::'; | |
class Command { | |
constructor(command, properties, message) { | |
if (!command) { | |
command = 'missing.command'; | |
} | |
this.command = command; | |
this.properties = properties; | |
this.message = message; | |
} | |
toString() { | |
let cmdStr = CMD_STRING + this.command; | |
if (this.properties && Object.keys(this.properties).length > 0) { | |
cmdStr += ' '; | |
let first = true; | |
for (const key in this.properties) { | |
if (this.properties.hasOwnProperty(key)) { | |
const val = this.properties[key]; | |
if (val) { | |
if (first) { | |
first = false; | |
} | |
else { | |
cmdStr += ','; | |
} | |
cmdStr += `${key}=${escapeProperty(val)}`; | |
} | |
} | |
} | |
} | |
cmdStr += `${CMD_STRING}${escapeData(this.message)}`; | |
return cmdStr; | |
} | |
} | |
function escapeData(s) { | |
return utils_1.toCommandValue(s) | |
.replace(/%/g, '%25') | |
.replace(/\r/g, '%0D') | |
.replace(/\n/g, '%0A'); | |
} | |
function escapeProperty(s) { | |
return utils_1.toCommandValue(s) | |
.replace(/%/g, '%25') | |
.replace(/\r/g, '%0D') | |
.replace(/\n/g, '%0A') | |
.replace(/:/g, '%3A') | |
.replace(/,/g, '%2C'); | |
} | |
//# sourceMappingURL=command.js.map | |
/***/ }), | |
/***/ 2186: | |
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { | |
"use strict"; | |
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { | |
if (k2 === undefined) k2 = k; | |
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); | |
}) : (function(o, m, k, k2) { | |
if (k2 === undefined) k2 = k; | |
o[k2] = m[k]; | |
})); | |
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { | |
Object.defineProperty(o, "default", { enumerable: true, value: v }); | |
}) : function(o, v) { | |
o["default"] = v; | |
}); | |
var __importStar = (this && this.__importStar) || function (mod) { | |
if (mod && mod.__esModule) return mod; | |
var result = {}; | |
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); | |
__setModuleDefault(result, mod); | |
return result; | |
}; | |
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { | |
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } | |
return new (P || (P = Promise))(function (resolve, reject) { | |
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } | |
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } | |
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } | |
step((generator = generator.apply(thisArg, _arguments || [])).next()); | |
}); | |
}; | |
Object.defineProperty(exports, "__esModule", ({ value: true })); | |
exports.getState = exports.saveState = exports.group = exports.endGroup = exports.startGroup = exports.info = exports.warning = exports.error = exports.debug = exports.isDebug = exports.setFailed = exports.setCommandEcho = exports.setOutput = exports.getBooleanInput = exports.getMultilineInput = exports.getInput = exports.addPath = exports.setSecret = exports.exportVariable = exports.ExitCode = void 0; | |
const command_1 = __nccwpck_require__(5241); | |
const file_command_1 = __nccwpck_require__(717); | |
const utils_1 = __nccwpck_require__(5278); | |
const os = __importStar(__nccwpck_require__(2087)); | |
const path = __importStar(__nccwpck_require__(5622)); | |
/** | |
* The code to exit an action | |
*/ | |
var ExitCode; | |
(function (ExitCode) { | |
/** | |
* A code indicating that the action was successful | |
*/ | |
ExitCode[ExitCode["Success"] = 0] = "Success"; | |
/** | |
* A code indicating that the action was a failure | |
*/ | |
ExitCode[ExitCode["Failure"] = 1] = "Failure"; | |
})(ExitCode = exports.ExitCode || (exports.ExitCode = {})); | |
//----------------------------------------------------------------------- | |
// Variables | |
//----------------------------------------------------------------------- | |
/** | |
* Sets env variable for this action and future actions in the job | |
* @param name the name of the variable to set | |
* @param val the value of the variable. Non-string values will be converted to a string via JSON.stringify | |
*/ | |
// eslint-disable-next-line @typescript-eslint/no-explicit-any | |
function exportVariable(name, val) { | |
const convertedVal = utils_1.toCommandValue(val); | |
process.env[name] = convertedVal; | |
const filePath = process.env['GITHUB_ENV'] || ''; | |
if (filePath) { | |
const delimiter = '_GitHubActionsFileCommandDelimeter_'; | |
const commandValue = `${name}<<${delimiter}${os.EOL}${convertedVal}${os.EOL}${delimiter}`; | |
file_command_1.issueCommand('ENV', commandValue); | |
} | |
else { | |
command_1.issueCommand('set-env', { name }, convertedVal); | |
} | |
} | |
exports.exportVariable = exportVariable; | |
/** | |
* Registers a secret which will get masked from logs | |
* @param secret value of the secret | |
*/ | |
function setSecret(secret) { | |
command_1.issueCommand('add-mask', {}, secret); | |
} | |
exports.setSecret = setSecret; | |
/** | |
* Prepends inputPath to the PATH (for this action and future actions) | |
* @param inputPath | |
*/ | |
function addPath(inputPath) { | |
const filePath = process.env['GITHUB_PATH'] || ''; | |
if (filePath) { | |
file_command_1.issueCommand('PATH', inputPath); | |
} | |
else { | |
command_1.issueCommand('add-path', {}, inputPath); | |
} | |
process.env['PATH'] = `${inputPath}${path.delimiter}${process.env['PATH']}`; | |
} | |
exports.addPath = addPath; | |
/** | |
* Gets the value of an input. | |
* Unless trimWhitespace is set to false in InputOptions, the value is also trimmed. | |
* Returns an empty string if the value is not defined. | |
* | |
* @param name name of the input to get | |
* @param options optional. See InputOptions. | |
* @returns string | |
*/ | |
function getInput(name, options) { | |
const val = process.env[`INPUT_${name.replace(/ /g, '_').toUpperCase()}`] || ''; | |
if (options && options.required && !val) { | |
throw new Error(`Input required and not supplied: ${name}`); | |
} | |
if (options && options.trimWhitespace === false) { | |
return val; | |
} | |
return val.trim(); | |
} | |
exports.getInput = getInput; | |
/** | |
* Gets the values of an multiline input. Each value is also trimmed. | |
* | |
* @param name name of the input to get | |
* @param options optional. See InputOptions. | |
* @returns string[] | |
* | |
*/ | |
function getMultilineInput(name, options) { | |
const inputs = getInput(name, options) | |
.split('\n') | |
.filter(x => x !== ''); | |
return inputs; | |
} | |
exports.getMultilineInput = getMultilineInput; | |
/** | |
* Gets the input value of the boolean type in the YAML 1.2 "core schema" specification. | |
* Support boolean input list: `true | True | TRUE | false | False | FALSE` . | |
* The return value is also in boolean type. | |
* ref: https://yaml.org/spec/1.2/spec.html#id2804923 | |
* | |
* @param name name of the input to get | |
* @param options optional. See InputOptions. | |
* @returns boolean | |
*/ | |
function getBooleanInput(name, options) { | |
const trueValue = ['true', 'True', 'TRUE']; | |
const falseValue = ['false', 'False', 'FALSE']; | |
const val = getInput(name, options); | |
if (trueValue.includes(val)) | |
return true; | |
if (falseValue.includes(val)) | |
return false; | |
throw new TypeError(`Input does not meet YAML 1.2 "Core Schema" specification: ${name}\n` + | |
`Support boolean input list: \`true | True | TRUE | false | False | FALSE\``); | |
} | |
exports.getBooleanInput = getBooleanInput; | |
/** | |
* Sets the value of an output. | |
* | |
* @param name name of the output to set | |
* @param value value to store. Non-string values will be converted to a string via JSON.stringify | |
*/ | |
// eslint-disable-next-line @typescript-eslint/no-explicit-any | |
function setOutput(name, value) { | |
process.stdout.write(os.EOL); | |
command_1.issueCommand('set-output', { name }, value); | |
} | |
exports.setOutput = setOutput; | |
/** | |
* Enables or disables the echoing of commands into stdout for the rest of the step. | |
* Echoing is disabled by default if ACTIONS_STEP_DEBUG is not set. | |
* | |
*/ | |
function setCommandEcho(enabled) { | |
command_1.issue('echo', enabled ? 'on' : 'off'); | |
} | |
exports.setCommandEcho = setCommandEcho; | |
//----------------------------------------------------------------------- | |
// Results | |
//----------------------------------------------------------------------- | |
/** | |
* Sets the action status to failed. | |
* When the action exits it will be with an exit code of 1 | |
* @param message add error issue message | |
*/ | |
function setFailed(message) { | |
process.exitCode = ExitCode.Failure; | |
error(message); | |
} | |
exports.setFailed = setFailed; | |
//----------------------------------------------------------------------- | |
// Logging Commands | |
//----------------------------------------------------------------------- | |
/** | |
* Gets whether Actions Step Debug is on or not | |
*/ | |
function isDebug() { | |
return process.env['RUNNER_DEBUG'] === '1'; | |
} | |
exports.isDebug = isDebug; | |
/** | |
* Writes debug message to user log | |
* @param message debug message | |
*/ | |
function debug(message) { | |
command_1.issueCommand('debug', {}, message); | |
} | |
exports.debug = debug; | |
/** | |
* Adds an error issue | |
* @param message error issue message. Errors will be converted to string via toString() | |
*/ | |
function error(message) { | |
command_1.issue('error', message instanceof Error ? message.toString() : message); | |
} | |
exports.error = error; | |
/** | |
* Adds an warning issue | |
* @param message warning issue message. Errors will be converted to string via toString() | |
*/ | |
function warning(message) { | |
command_1.issue('warning', message instanceof Error ? message.toString() : message); | |
} | |
exports.warning = warning; | |
/** | |
* Writes info to log with console.log. | |
* @param message info message | |
*/ | |
function info(message) { | |
process.stdout.write(message + os.EOL); | |
} | |
exports.info = info; | |
/** | |
* Begin an output group. | |
* | |
* Output until the next `groupEnd` will be foldable in this group | |
* | |
* @param name The name of the output group | |
*/ | |
function startGroup(name) { | |
command_1.issue('group', name); | |
} | |
exports.startGroup = startGroup; | |
/** | |
* End an output group. | |
*/ | |
function endGroup() { | |
command_1.issue('endgroup'); | |
} | |
exports.endGroup = endGroup; | |
/** | |
* Wrap an asynchronous function call in a group. | |
* | |
* Returns the same type as the function itself. | |
* | |
* @param name The name of the group | |
* @param fn The function to wrap in the group | |
*/ | |
function group(name, fn) { | |
return __awaiter(this, void 0, void 0, function* () { | |
startGroup(name); | |
let result; | |
try { | |
result = yield fn(); | |
} | |
finally { | |
endGroup(); | |
} | |
return result; | |
}); | |
} | |
exports.group = group; | |
//----------------------------------------------------------------------- | |
// Wrapper action state | |
//----------------------------------------------------------------------- | |
/** | |
* Saves state for current action, the state can only be retrieved by this action's post job execution. | |
* | |
* @param name name of the state to store | |
* @param value value to store. Non-string values will be converted to a string via JSON.stringify | |
*/ | |
// eslint-disable-next-line @typescript-eslint/no-explicit-any | |
function saveState(name, value) { | |
command_1.issueCommand('save-state', { name }, value); | |
} | |
exports.saveState = saveState; | |
/** | |
* Gets the value of an state set by this action's main execution. | |
* | |
* @param name name of the state to get | |
* @returns string | |
*/ | |
function getState(name) { | |
return process.env[`STATE_${name}`] || ''; | |
} | |
exports.getState = getState; | |
//# sourceMappingURL=core.js.map | |
/***/ }), | |
/***/ 717: | |
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { | |
"use strict"; | |
// For internal use, subject to change. | |
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { | |
if (k2 === undefined) k2 = k; | |
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); | |
}) : (function(o, m, k, k2) { | |
if (k2 === undefined) k2 = k; | |
o[k2] = m[k]; | |
})); | |
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { | |
Object.defineProperty(o, "default", { enumerable: true, value: v }); | |
}) : function(o, v) { | |
o["default"] = v; | |
}); | |
var __importStar = (this && this.__importStar) || function (mod) { | |
if (mod && mod.__esModule) return mod; | |
var result = {}; | |
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); | |
__setModuleDefault(result, mod); | |
return result; | |
}; | |
Object.defineProperty(exports, "__esModule", ({ value: true })); | |
exports.issueCommand = void 0; | |
// We use any as a valid input type | |
/* eslint-disable @typescript-eslint/no-explicit-any */ | |
const fs = __importStar(__nccwpck_require__(5747)); | |
const os = __importStar(__nccwpck_require__(2087)); | |
const utils_1 = __nccwpck_require__(5278); | |
function issueCommand(command, message) { | |
const filePath = process.env[`GITHUB_${command}`]; | |
if (!filePath) { | |
throw new Error(`Unable to find environment variable for file command ${command}`); | |
} | |
if (!fs.existsSync(filePath)) { | |
throw new Error(`Missing file at path: ${filePath}`); | |
} | |
fs.appendFileSync(filePath, `${utils_1.toCommandValue(message)}${os.EOL}`, { | |
encoding: 'utf8' | |
}); | |
} | |
exports.issueCommand = issueCommand; | |
//# sourceMappingURL=file-command.js.map | |
/***/ }), | |
/***/ 5278: | |
/***/ ((__unused_webpack_module, exports) => { | |
"use strict"; | |
// We use any as a valid input type | |
/* eslint-disable @typescript-eslint/no-explicit-any */ | |
Object.defineProperty(exports, "__esModule", ({ value: true })); | |
exports.toCommandValue = void 0; | |
/** | |
* Sanitizes an input into a string so it can be passed into issueCommand safely | |
* @param input input to sanitize into a string | |
*/ | |
function toCommandValue(input) { | |
if (input === null || input === undefined) { | |
return ''; | |
} | |
else if (typeof input === 'string' || input instanceof String) { | |
return input; | |
} | |
return JSON.stringify(input); | |
} | |
exports.toCommandValue = toCommandValue; | |
//# sourceMappingURL=utils.js.map | |
/***/ }), | |
/***/ 1514: | |
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { | |
"use strict"; | |
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { | |
if (k2 === undefined) k2 = k; | |
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); | |
}) : (function(o, m, k, k2) { | |
if (k2 === undefined) k2 = k; | |
o[k2] = m[k]; | |
})); | |
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { | |
Object.defineProperty(o, "default", { enumerable: true, value: v }); | |
}) : function(o, v) { | |
o["default"] = v; | |
}); | |
var __importStar = (this && this.__importStar) || function (mod) { | |
if (mod && mod.__esModule) return mod; | |
var result = {}; | |
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); | |
__setModuleDefault(result, mod); | |
return result; | |
}; | |
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { | |
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } | |
return new (P || (P = Promise))(function (resolve, reject) { | |
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } | |
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } | |
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } | |
step((generator = generator.apply(thisArg, _arguments || [])).next()); | |
}); | |
}; | |
Object.defineProperty(exports, "__esModule", ({ value: true })); | |
exports.getExecOutput = exports.exec = void 0; | |
const string_decoder_1 = __nccwpck_require__(4304); | |
const tr = __importStar(__nccwpck_require__(8159)); | |
/** | |
* Exec a command. | |
* Output will be streamed to the live console. | |
* Returns promise with return code | |
* | |
* @param commandLine command to execute (can include additional args). Must be correctly escaped. | |
* @param args optional arguments for tool. Escaping is handled by the lib. | |
* @param options optional exec options. See ExecOptions | |
* @returns Promise<number> exit code | |
*/ | |
function exec(commandLine, args, options) { | |
return __awaiter(this, void 0, void 0, function* () { | |
const commandArgs = tr.argStringToArray(commandLine); | |
if (commandArgs.length === 0) { | |
throw new Error(`Parameter 'commandLine' cannot be null or empty.`); | |
} | |
// Path to tool to execute should be first arg | |
const toolPath = commandArgs[0]; | |
args = commandArgs.slice(1).concat(args || []); | |
const runner = new tr.ToolRunner(toolPath, args, options); | |
return runner.exec(); | |
}); | |
} | |
exports.exec = exec; | |
/** | |
* Exec a command and get the output. | |
* Output will be streamed to the live console. | |
* Returns promise with the exit code and collected stdout and stderr | |
* | |
* @param commandLine command to execute (can include additional args). Must be correctly escaped. | |
* @param args optional arguments for tool. Escaping is handled by the lib. | |
* @param options optional exec options. See ExecOptions | |
* @returns Promise<ExecOutput> exit code, stdout, and stderr | |
*/ | |
function getExecOutput(commandLine, args, options) { | |
var _a, _b; | |
return __awaiter(this, void 0, void 0, function* () { | |
let stdout = ''; | |
let stderr = ''; | |
//Using string decoder covers the case where a mult-byte character is split | |
const stdoutDecoder = new string_decoder_1.StringDecoder('utf8'); | |
const stderrDecoder = new string_decoder_1.StringDecoder('utf8'); | |
const originalStdoutListener = (_a = options === null || options === void 0 ? void 0 : options.listeners) === null || _a === void 0 ? void 0 : _a.stdout; | |
const originalStdErrListener = (_b = options === null || options === void 0 ? void 0 : options.listeners) === null || _b === void 0 ? void 0 : _b.stderr; | |
const stdErrListener = (data) => { | |
stderr += stderrDecoder.write(data); | |
if (originalStdErrListener) { | |
originalStdErrListener(data); | |
} | |
}; | |
const stdOutListener = (data) => { | |
stdout += stdoutDecoder.write(data); | |
if (originalStdoutListener) { | |
originalStdoutListener(data); | |
} | |
}; | |
const listeners = Object.assign(Object.assign({}, options === null || options === void 0 ? void 0 : options.listeners), { stdout: stdOutListener, stderr: stdErrListener }); | |
const exitCode = yield exec(commandLine, args, Object.assign(Object.assign({}, options), { listeners })); | |
//flush any remaining characters | |
stdout += stdoutDecoder.end(); | |
stderr += stderrDecoder.end(); | |
return { | |
exitCode, | |
stdout, | |
stderr | |
}; | |
}); | |
} | |
exports.getExecOutput = getExecOutput; | |
//# sourceMappingURL=exec.js.map | |
/***/ }), | |
/***/ 8159: | |
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { | |
"use strict"; | |
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { | |
if (k2 === undefined) k2 = k; | |
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); | |
}) : (function(o, m, k, k2) { | |
if (k2 === undefined) k2 = k; | |
o[k2] = m[k]; | |
})); | |
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { | |
Object.defineProperty(o, "default", { enumerable: true, value: v }); | |
}) : function(o, v) { | |
o["default"] = v; | |
}); | |
var __importStar = (this && this.__importStar) || function (mod) { | |
if (mod && mod.__esModule) return mod; | |
var result = {}; | |
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); | |
__setModuleDefault(result, mod); | |
return result; | |
}; | |
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { | |
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } | |
return new (P || (P = Promise))(function (resolve, reject) { | |
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } | |
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } | |
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } | |
step((generator = generator.apply(thisArg, _arguments || [])).next()); | |
}); | |
}; | |
Object.defineProperty(exports, "__esModule", ({ value: true })); | |
exports.argStringToArray = exports.ToolRunner = void 0; | |
const os = __importStar(__nccwpck_require__(2087)); | |
const events = __importStar(__nccwpck_require__(8614)); | |
const child = __importStar(__nccwpck_require__(3129)); | |
const path = __importStar(__nccwpck_require__(5622)); | |
const io = __importStar(__nccwpck_require__(7351)); | |
const ioUtil = __importStar(__nccwpck_require__(1962)); | |
const timers_1 = __nccwpck_require__(8213); | |
/* eslint-disable @typescript-eslint/unbound-method */ | |
const IS_WINDOWS = process.platform === 'win32'; | |
/* | |
* Class for running command line tools. Handles quoting and arg parsing in a platform agnostic way. | |
*/ | |
class ToolRunner extends events.EventEmitter { | |
constructor(toolPath, args, options) { | |
super(); | |
if (!toolPath) { | |
throw new Error("Parameter 'toolPath' cannot be null or empty."); | |
} | |
this.toolPath = toolPath; | |
this.args = args || []; | |
this.options = options || {}; | |
} | |
_debug(message) { | |
if (this.options.listeners && this.options.listeners.debug) { | |
this.options.listeners.debug(message); | |
} | |
} | |
_getCommandString(options, noPrefix) { | |
const toolPath = this._getSpawnFileName(); | |
const args = this._getSpawnArgs(options); | |
let cmd = noPrefix ? '' : '[command]'; // omit prefix when piped to a second tool | |
if (IS_WINDOWS) { | |
// Windows + cmd file | |
if (this._isCmdFile()) { | |
cmd += toolPath; | |
for (const a of args) { | |
cmd += ` ${a}`; | |
} | |
} | |
// Windows + verbatim | |
else if (options.windowsVerbatimArguments) { | |
cmd += `"${toolPath}"`; | |
for (const a of args) { | |
cmd += ` ${a}`; | |
} | |
} | |
// Windows (regular) | |
else { | |
cmd += this._windowsQuoteCmdArg(toolPath); | |
for (const a of args) { | |
cmd += ` ${this._windowsQuoteCmdArg(a)}`; | |
} | |
} | |
} | |
else { | |
// OSX/Linux - this can likely be improved with some form of quoting. | |
// creating processes on Unix is fundamentally different than Windows. | |
// on Unix, execvp() takes an arg array. | |
cmd += toolPath; | |
for (const a of args) { | |
cmd += ` ${a}`; | |
} | |
} | |
return cmd; | |
} | |
_processLineBuffer(data, strBuffer, onLine) { | |
try { | |
let s = strBuffer + data.toString(); | |
let n = s.indexOf(os.EOL); | |
while (n > -1) { | |
const line = s.substring(0, n); | |
onLine(line); | |
// the rest of the string ... | |
s = s.substring(n + os.EOL.length); | |
n = s.indexOf(os.EOL); | |
} | |
return s; | |
} | |
catch (err) { | |
// streaming lines to console is best effort. Don't fail a build. | |
this._debug(`error processing line. Failed with error ${err}`); | |
return ''; | |
} | |
} | |
_getSpawnFileName() { | |
if (IS_WINDOWS) { | |
if (this._isCmdFile()) { | |
return process.env['COMSPEC'] || 'cmd.exe'; | |
} | |
} | |
return this.toolPath; | |
} | |
_getSpawnArgs(options) { | |
if (IS_WINDOWS) { | |
if (this._isCmdFile()) { | |
let argline = `/D /S /C "${this._windowsQuoteCmdArg(this.toolPath)}`; | |
for (const a of this.args) { | |
argline += ' '; | |
argline += options.windowsVerbatimArguments | |
? a | |
: this._windowsQuoteCmdArg(a); | |
} | |
argline += '"'; | |
return [argline]; | |
} | |
} | |
return this.args; | |
} | |
_endsWith(str, end) { | |
return str.endsWith(end); | |
} | |
_isCmdFile() { | |
const upperToolPath = this.toolPath.toUpperCase(); | |
return (this._endsWith(upperToolPath, '.CMD') || | |
this._endsWith(upperToolPath, '.BAT')); | |
} | |
_windowsQuoteCmdArg(arg) { | |
// for .exe, apply the normal quoting rules that libuv applies | |
if (!this._isCmdFile()) { | |
return this._uvQuoteCmdArg(arg); | |
} | |
// otherwise apply quoting rules specific to the cmd.exe command line parser. | |
// the libuv rules are generic and are not designed specifically for cmd.exe | |
// command line parser. | |
// | |
// for a detailed description of the cmd.exe command line parser, refer to | |
// http://stackoverflow.com/questions/4094699/how-does-the-windows-command-interpreter-cmd-exe-parse-scripts/7970912#7970912 | |
// need quotes for empty arg | |
if (!arg) { | |
return '""'; | |
} | |
// determine whether the arg needs to be quoted | |
const cmdSpecialChars = [ | |
' ', | |
'\t', | |
'&', | |
'(', | |
')', | |
'[', | |
']', | |
'{', | |
'}', | |
'^', | |
'=', | |
';', | |
'!', | |
"'", | |
'+', | |
',', | |
'`', | |
'~', | |
'|', | |
'<', | |
'>', | |
'"' | |
]; | |
let needsQuotes = false; | |
for (const char of arg) { | |
if (cmdSpecialChars.some(x => x === char)) { | |
needsQuotes = true; | |
break; | |
} | |
} | |
// short-circuit if quotes not needed | |
if (!needsQuotes) { | |
return arg; | |
} | |
// the following quoting rules are very similar to the rules that by libuv applies. | |
// | |
// 1) wrap the string in quotes | |
// | |
// 2) double-up quotes - i.e. " => "" | |
// | |
// this is different from the libuv quoting rules. libuv replaces " with \", which unfortunately | |
// doesn't work well with a cmd.exe command line. | |
// | |
// note, replacing " with "" also works well if the arg is passed to a downstream .NET console app. | |
// for example, the command line: | |
// foo.exe "myarg:""my val""" | |
// is parsed by a .NET console app into an arg array: | |
// [ "myarg:\"my val\"" ] | |
// which is the same end result when applying libuv quoting rules. although the actual | |
// command line from libuv quoting rules would look like: | |
// foo.exe "myarg:\"my val\"" | |
// | |
// 3) double-up slashes that precede a quote, | |
// e.g. hello \world => "hello \world" | |
// hello\"world => "hello\\""world" | |
// hello\\"world => "hello\\\\""world" | |
// hello world\ => "hello world\\" | |
// | |
// technically this is not required for a cmd.exe command line, or the batch argument parser. | |
// the reasons for including this as a .cmd quoting rule are: | |
// | |
// a) this is optimized for the scenario where the argument is passed from the .cmd file to an | |
// external program. many programs (e.g. .NET console apps) rely on the slash-doubling rule. | |
// | |
// b) it's what we've been doing previously (by deferring to node default behavior) and we | |
// haven't heard any complaints about that aspect. | |
// | |
// note, a weakness of the quoting rules chosen here, is that % is not escaped. in fact, % cannot be | |
// escaped when used on the command line directly - even though within a .cmd file % can be escaped | |
// by using %%. | |
// | |
// the saving grace is, on the command line, %var% is left as-is if var is not defined. this contrasts | |
// the line parsing rules within a .cmd file, where if var is not defined it is replaced with nothing. | |
// | |
// one option that was explored was replacing % with ^% - i.e. %var% => ^%var^%. this hack would | |
// often work, since it is unlikely that var^ would exist, and the ^ character is removed when the | |
// variable is used. the problem, however, is that ^ is not removed when %* is used to pass the args | |
// to an external program. | |
// | |
// an unexplored potential solution for the % escaping problem, is to create a wrapper .cmd file. | |
// % can be escaped within a .cmd file. | |
let reverse = '"'; | |
let quoteHit = true; | |
for (let i = arg.length; i > 0; i--) { | |
// walk the string in reverse | |
reverse += arg[i - 1]; | |
if (quoteHit && arg[i - 1] === '\\') { | |
reverse += '\\'; // double the slash | |
} | |
else if (arg[i - 1] === '"') { | |
quoteHit = true; | |
reverse += '"'; // double the quote | |
} | |
else { | |
quoteHit = false; | |
} | |
} | |
reverse += '"'; | |
return reverse | |
.split('') | |
.reverse() | |
.join(''); | |
} | |
_uvQuoteCmdArg(arg) { | |
// Tool runner wraps child_process.spawn() and needs to apply the same quoting as | |
// Node in certain cases where the undocumented spawn option windowsVerbatimArguments | |
// is used. | |
// | |
// Since this function is a port of quote_cmd_arg from Node 4.x (technically, lib UV, | |
// see https://github.com/nodejs/node/blob/v4.x/deps/uv/src/win/process.c for details), | |
// pasting copyright notice from Node within this function: | |
// | |
// Copyright Joyent, Inc. and other Node contributors. All rights reserved. | |
// | |
// Permission is hereby granted, free of charge, to any person obtaining a copy | |
// of this software and associated documentation files (the "Software"), to | |
// deal in the Software without restriction, including without limitation the | |
// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or | |
// sell copies of the Software, and to permit persons to whom the Software is | |
// furnished to do so, subject to the following conditions: | |
// | |
// The above copyright notice and this permission notice shall be included in | |
// all copies or substantial portions of the Software. | |
// | |
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR | |
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, | |
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE | |
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER | |
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING | |
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS | |
// IN THE SOFTWARE. | |
if (!arg) { | |
// Need double quotation for empty argument | |
return '""'; | |
} | |
if (!arg.includes(' ') && !arg.includes('\t') && !arg.includes('"')) { | |
// No quotation needed | |
return arg; | |
} | |
if (!arg.includes('"') && !arg.includes('\\')) { | |
// No embedded double quotes or backslashes, so I can just wrap | |
// quote marks around the whole thing. | |
return `"${arg}"`; | |
} | |
// Expected input/output: | |
// input : hello"world | |
// output: "hello\"world" | |
// input : hello""world | |
// output: "hello\"\"world" | |
// input : hello\world | |
// output: hello\world | |
// input : hello\\world | |
// output: hello\\world | |
// input : hello\"world | |
// output: "hello\\\"world" | |
// input : hello\\"world | |
// output: "hello\\\\\"world" | |
// input : hello world\ | |
// output: "hello world\\" - note the comment in libuv actually reads "hello world\" | |
// but it appears the comment is wrong, it should be "hello world\\" | |
let reverse = '"'; | |
let quoteHit = true; | |
for (let i = arg.length; i > 0; i--) { | |
// walk the string in reverse | |
reverse += arg[i - 1]; | |
if (quoteHit && arg[i - 1] === '\\') { | |
reverse += '\\'; | |
} | |
else if (arg[i - 1] === '"') { | |
quoteHit = true; | |
reverse += '\\'; | |
} | |
else { | |
quoteHit = false; | |
} | |
} | |
reverse += '"'; | |
return reverse | |
.split('') | |
.reverse() | |
.join(''); | |
} | |
_cloneExecOptions(options) { | |
options = options || {}; | |
const result = { | |
cwd: options.cwd || process.cwd(), | |
env: options.env || process.env, | |
silent: options.silent || false, | |
windowsVerbatimArguments: options.windowsVerbatimArguments || false, | |
failOnStdErr: options.failOnStdErr || false, | |
ignoreReturnCode: options.ignoreReturnCode || false, | |
delay: options.delay || 10000 | |
}; | |
result.outStream = options.outStream || process.stdout; | |
result.errStream = options.errStream || process.stderr; | |
return result; | |
} | |
_getSpawnOptions(options, toolPath) { | |
options = options || {}; | |
const result = {}; | |
result.cwd = options.cwd; | |
result.env = options.env; | |
result['windowsVerbatimArguments'] = | |
options.windowsVerbatimArguments || this._isCmdFile(); | |
if (options.windowsVerbatimArguments) { | |
result.argv0 = `"${toolPath}"`; | |
} | |
return result; | |
} | |
/** | |
* Exec a tool. | |
* Output will be streamed to the live console. | |
* Returns promise with return code | |
* | |
* @param tool path to tool to exec | |
* @param options optional exec options. See ExecOptions | |
* @returns number | |
*/ | |
exec() { | |
return __awaiter(this, void 0, void 0, function* () { | |
// root the tool path if it is unrooted and contains relative pathing | |
if (!ioUtil.isRooted(this.toolPath) && | |
(this.toolPath.includes('/') || | |
(IS_WINDOWS && this.toolPath.includes('\\')))) { | |
// prefer options.cwd if it is specified, however options.cwd may also need to be rooted | |
this.toolPath = path.resolve(process.cwd(), this.options.cwd || process.cwd(), this.toolPath); | |
} | |
// if the tool is only a file name, then resolve it from the PATH | |
// otherwise verify it exists (add extension on Windows if necessary) | |
this.toolPath = yield io.which(this.toolPath, true); | |
return new Promise((resolve, reject) => __awaiter(this, void 0, void 0, function* () { | |
this._debug(`exec tool: ${this.toolPath}`); | |
this._debug('arguments:'); | |
for (const arg of this.args) { | |
this._debug(` ${arg}`); | |
} | |
const optionsNonNull = this._cloneExecOptions(this.options); | |
if (!optionsNonNull.silent && optionsNonNull.outStream) { | |
optionsNonNull.outStream.write(this._getCommandString(optionsNonNull) + os.EOL); | |
} | |
const state = new ExecState(optionsNonNull, this.toolPath); | |
state.on('debug', (message) => { | |
this._debug(message); | |
}); | |
if (this.options.cwd && !(yield ioUtil.exists(this.options.cwd))) { | |
return reject(new Error(`The cwd: ${this.options.cwd} does not exist!`)); | |
} | |
const fileName = this._getSpawnFileName(); | |
const cp = child.spawn(fileName, this._getSpawnArgs(optionsNonNull), this._getSpawnOptions(this.options, fileName)); | |
let stdbuffer = ''; | |
if (cp.stdout) { | |
cp.stdout.on('data', (data) => { | |
if (this.options.listeners && this.options.listeners.stdout) { | |
this.options.listeners.stdout(data); | |
} | |
if (!optionsNonNull.silent && optionsNonNull.outStream) { | |
optionsNonNull.outStream.write(data); | |
} | |
stdbuffer = this._processLineBuffer(data, stdbuffer, (line) => { | |
if (this.options.listeners && this.options.listeners.stdline) { | |
this.options.listeners.stdline(line); | |
} | |
}); | |
}); | |
} | |
let errbuffer = ''; | |
if (cp.stderr) { | |
cp.stderr.on('data', (data) => { | |
state.processStderr = true; | |
if (this.options.listeners && this.options.listeners.stderr) { | |
this.options.listeners.stderr(data); | |
} | |
if (!optionsNonNull.silent && | |
optionsNonNull.errStream && | |
optionsNonNull.outStream) { | |
const s = optionsNonNull.failOnStdErr | |
? optionsNonNull.errStream | |
: optionsNonNull.outStream; | |
s.write(data); | |
} | |
errbuffer = this._processLineBuffer(data, errbuffer, (line) => { | |
if (this.options.listeners && this.options.listeners.errline) { | |
this.options.listeners.errline(line); | |
} | |
}); | |
}); | |
} | |
cp.on('error', (err) => { | |
state.processError = err.message; | |
state.processExited = true; | |
state.processClosed = true; | |
state.CheckComplete(); | |
}); | |
cp.on('exit', (code) => { | |
state.processExitCode = code; | |
state.processExited = true; | |
this._debug(`Exit code ${code} received from tool '${this.toolPath}'`); | |
state.CheckComplete(); | |
}); | |
cp.on('close', (code) => { | |
state.processExitCode = code; | |
state.processExited = true; | |
state.processClosed = true; | |
this._debug(`STDIO streams have closed for tool '${this.toolPath}'`); | |
state.CheckComplete(); | |
}); | |
state.on('done', (error, exitCode) => { | |
if (stdbuffer.length > 0) { | |
this.emit('stdline', stdbuffer); | |
} | |
if (errbuffer.length > 0) { | |
this.emit('errline', errbuffer); | |
} | |
cp.removeAllListeners(); | |
if (error) { | |
reject(error); | |
} | |
else { | |
resolve(exitCode); | |
} | |
}); | |
if (this.options.input) { | |
if (!cp.stdin) { | |
throw new Error('child process missing stdin'); | |
} | |
cp.stdin.end(this.options.input); | |
} | |
})); | |
}); | |
} | |
} | |
exports.ToolRunner = ToolRunner; | |
/** | |
* Convert an arg string to an array of args. Handles escaping | |
* | |
* @param argString string of arguments | |
* @returns string[] array of arguments | |
*/ | |
function argStringToArray(argString) { | |
const args = []; | |
let inQuotes = false; | |
let escaped = false; | |
let arg = ''; | |
function append(c) { | |
// we only escape double quotes. | |
if (escaped && c !== '"') { | |
arg += '\\'; | |
} | |
arg += c; | |
escaped = false; | |
} | |
for (let i = 0; i < argString.length; i++) { | |
const c = argString.charAt(i); | |
if (c === '"') { | |
if (!escaped) { | |
inQuotes = !inQuotes; | |
} | |
else { | |
append(c); | |
} | |
continue; | |
} | |
if (c === '\\' && escaped) { | |
append(c); | |
continue; | |
} | |
if (c === '\\' && inQuotes) { | |
escaped = true; | |
continue; | |
} | |
if (c === ' ' && !inQuotes) { | |
if (arg.length > 0) { | |
args.push(arg); | |
arg = ''; | |
} | |
continue; | |
} | |
append(c); | |
} | |
if (arg.length > 0) { | |
args.push(arg.trim()); | |
} | |
return args; | |
} | |
exports.argStringToArray = argStringToArray; | |
class ExecState extends events.EventEmitter { | |
constructor(options, toolPath) { | |
super(); | |
this.processClosed = false; // tracks whether the process has exited and stdio is closed | |
this.processError = ''; | |
this.processExitCode = 0; | |
this.processExited = false; // tracks whether the process has exited | |
this.processStderr = false; // tracks whether stderr was written to | |
this.delay = 10000; // 10 seconds | |
this.done = false; | |
this.timeout = null; | |
if (!toolPath) { | |
throw new Error('toolPath must not be empty'); | |
} | |
this.options = options; | |
this.toolPath = toolPath; | |
if (options.delay) { | |
this.delay = options.delay; | |
} | |
} | |
CheckComplete() { | |
if (this.done) { | |
return; | |
} | |
if (this.processClosed) { | |
this._setResult(); | |
} | |
else if (this.processExited) { | |
this.timeout = timers_1.setTimeout(ExecState.HandleTimeout, this.delay, this); | |
} | |
} | |
_debug(message) { | |
this.emit('debug', message); | |
} | |
_setResult() { | |
// determine whether there is an error | |
let error; | |
if (this.processExited) { | |
if (this.processError) { | |
error = new Error(`There was an error when attempting to execute the process '${this.toolPath}'. This may indicate the process failed to start. Error: ${this.processError}`); | |
} | |
else if (this.processExitCode !== 0 && !this.options.ignoreReturnCode) { | |
error = new Error(`The process '${this.toolPath}' failed with exit code ${this.processExitCode}`); | |
} | |
else if (this.processStderr && this.options.failOnStdErr) { | |
error = new Error(`The process '${this.toolPath}' failed because one or more lines were written to the STDERR stream`); | |
} | |
} | |
// clear the timeout | |
if (this.timeout) { | |
clearTimeout(this.timeout); | |
this.timeout = null; | |
} | |
this.done = true; | |
this.emit('done', error, this.processExitCode); | |
} | |
static HandleTimeout(state) { | |
if (state.done) { | |
return; | |
} | |
if (!state.processClosed && state.processExited) { | |
const message = `The STDIO streams did not close within ${state.delay / | |
1000} seconds of the exit event from process '${state.toolPath}'. This may indicate a child process inherited the STDIO streams and has not yet exited.`; | |
state._debug(message); | |
} | |
state._setResult(); | |
} | |
} | |
//# sourceMappingURL=toolrunner.js.map | |
/***/ }), | |
/***/ 4087: | |
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { | |
"use strict"; | |
Object.defineProperty(exports, "__esModule", ({ value: true })); | |
exports.Context = void 0; | |
const fs_1 = __nccwpck_require__(5747); | |
const os_1 = __nccwpck_require__(2087); | |
class Context { | |
/** | |
* Hydrate the context from the environment | |
*/ | |
constructor() { | |
var _a, _b, _c; | |
this.payload = {}; | |
if (process.env.GITHUB_EVENT_PATH) { | |
if (fs_1.existsSync(process.env.GITHUB_EVENT_PATH)) { | |
this.payload = JSON.parse(fs_1.readFileSync(process.env.GITHUB_EVENT_PATH, { encoding: 'utf8' })); | |
} | |
else { | |
const path = process.env.GITHUB_EVENT_PATH; | |
process.stdout.write(`GITHUB_EVENT_PATH ${path} does not exist${os_1.EOL}`); | |
} | |
} | |
this.eventName = process.env.GITHUB_EVENT_NAME; | |
this.sha = process.env.GITHUB_SHA; | |
this.ref = process.env.GITHUB_REF; | |
this.workflow = process.env.GITHUB_WORKFLOW; | |
this.action = process.env.GITHUB_ACTION; | |
this.actor = process.env.GITHUB_ACTOR; | |
this.job = process.env.GITHUB_JOB; | |
this.runNumber = parseInt(process.env.GITHUB_RUN_NUMBER, 10); | |
this.runId = parseInt(process.env.GITHUB_RUN_ID, 10); | |
this.apiUrl = (_a = process.env.GITHUB_API_URL) !== null && _a !== void 0 ? _a : `https://api.github.com`; | |
this.serverUrl = (_b = process.env.GITHUB_SERVER_URL) !== null && _b !== void 0 ? _b : `https://github.com`; | |
this.graphqlUrl = (_c = process.env.GITHUB_GRAPHQL_URL) !== null && _c !== void 0 ? _c : `https://api.github.com/graphql`; | |
} | |
get issue() { | |
const payload = this.payload; | |
return Object.assign(Object.assign({}, this.repo), { number: (payload.issue || payload.pull_request || payload).number }); | |
} | |
get repo() { | |
if (process.env.GITHUB_REPOSITORY) { | |
const [owner, repo] = process.env.GITHUB_REPOSITORY.split('/'); | |
return { owner, repo }; | |
} | |
if (this.payload.repository) { | |
return { | |
owner: this.payload.repository.owner.login, | |
repo: this.payload.repository.name | |
}; | |
} | |
throw new Error("context.repo requires a GITHUB_REPOSITORY environment variable like 'owner/repo'"); | |
} | |
} | |
exports.Context = Context; | |
//# sourceMappingURL=context.js.map | |
/***/ }), | |
/***/ 5438: | |
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { | |
"use strict"; | |
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { | |
if (k2 === undefined) k2 = k; | |
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); | |
}) : (function(o, m, k, k2) { | |
if (k2 === undefined) k2 = k; | |
o[k2] = m[k]; | |
})); | |
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { | |
Object.defineProperty(o, "default", { enumerable: true, value: v }); | |
}) : function(o, v) { | |
o["default"] = v; | |
}); | |
var __importStar = (this && this.__importStar) || function (mod) { | |
if (mod && mod.__esModule) return mod; | |
var result = {}; | |
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); | |
__setModuleDefault(result, mod); | |
return result; | |
}; | |
Object.defineProperty(exports, "__esModule", ({ value: true })); | |
exports.getOctokit = exports.context = void 0; | |
const Context = __importStar(__nccwpck_require__(4087)); | |
const utils_1 = __nccwpck_require__(3030); | |
exports.context = new Context.Context(); | |
/** | |
* Returns a hydrated octokit ready to use for GitHub Actions | |
* | |
* @param token the repo PAT or GITHUB_TOKEN | |
* @param options other options to set | |
*/ | |
function getOctokit(token, options) { | |
return new utils_1.GitHub(utils_1.getOctokitOptions(token, options)); | |
} | |
exports.getOctokit = getOctokit; | |
//# sourceMappingURL=github.js.map | |
/***/ }), | |
/***/ 7914: | |
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { | |
"use strict"; | |
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { | |
if (k2 === undefined) k2 = k; | |
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); | |
}) : (function(o, m, k, k2) { | |
if (k2 === undefined) k2 = k; | |
o[k2] = m[k]; | |
})); | |
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { | |
Object.defineProperty(o, "default", { enumerable: true, value: v }); | |
}) : function(o, v) { | |
o["default"] = v; | |
}); | |
var __importStar = (this && this.__importStar) || function (mod) { | |
if (mod && mod.__esModule) return mod; | |
var result = {}; | |
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); | |
__setModuleDefault(result, mod); | |
return result; | |
}; | |
Object.defineProperty(exports, "__esModule", ({ value: true })); | |
exports.getApiBaseUrl = exports.getProxyAgent = exports.getAuthString = void 0; | |
const httpClient = __importStar(__nccwpck_require__(9925)); | |
function getAuthString(token, options) { | |
if (!token && !options.auth) { | |
throw new Error('Parameter token or opts.auth is required'); | |
} | |
else if (token && options.auth) { | |
throw new Error('Parameters token and opts.auth may not both be specified'); | |
} | |
return typeof options.auth === 'string' ? options.auth : `token ${token}`; | |
} | |
exports.getAuthString = getAuthString; | |
function getProxyAgent(destinationUrl) { | |
const hc = new httpClient.HttpClient(); | |
return hc.getAgent(destinationUrl); | |
} | |
exports.getProxyAgent = getProxyAgent; | |
function getApiBaseUrl() { | |
return process.env['GITHUB_API_URL'] || 'https://api.github.com'; | |
} | |
exports.getApiBaseUrl = getApiBaseUrl; | |
//# sourceMappingURL=utils.js.map | |
/***/ }), | |
/***/ 3030: | |
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { | |
"use strict"; | |
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { | |
if (k2 === undefined) k2 = k; | |
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); | |
}) : (function(o, m, k, k2) { | |
if (k2 === undefined) k2 = k; | |
o[k2] = m[k]; | |
})); | |
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { | |
Object.defineProperty(o, "default", { enumerable: true, value: v }); | |
}) : function(o, v) { | |
o["default"] = v; | |
}); | |
var __importStar = (this && this.__importStar) || function (mod) { | |
if (mod && mod.__esModule) return mod; | |
var result = {}; | |
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); | |
__setModuleDefault(result, mod); | |
return result; | |
}; | |
Object.defineProperty(exports, "__esModule", ({ value: true })); | |
exports.getOctokitOptions = exports.GitHub = exports.context = void 0; | |
const Context = __importStar(__nccwpck_require__(4087)); | |
const Utils = __importStar(__nccwpck_require__(7914)); | |
// octokit + plugins | |
const core_1 = __nccwpck_require__(6762); | |
const plugin_rest_endpoint_methods_1 = __nccwpck_require__(3044); | |
const plugin_paginate_rest_1 = __nccwpck_require__(4193); | |
exports.context = new Context.Context(); | |
const baseUrl = Utils.getApiBaseUrl(); | |
const defaults = { | |
baseUrl, | |
request: { | |
agent: Utils.getProxyAgent(baseUrl) | |
} | |
}; | |
exports.GitHub = core_1.Octokit.plugin(plugin_rest_endpoint_methods_1.restEndpointMethods, plugin_paginate_rest_1.paginateRest).defaults(defaults); | |
/** | |
* Convience function to correctly format Octokit Options to pass into the constructor. | |
* | |
* @param token the repo PAT or GITHUB_TOKEN | |
* @param options other options to set | |
*/ | |
function getOctokitOptions(token, options) { | |
const opts = Object.assign({}, options || {}); // Shallow clone - don't mutate the object provided by the caller | |
// Auth | |
const auth = Utils.getAuthString(token, opts); | |
if (auth) { | |
opts.auth = auth; | |
} | |
return opts; | |
} | |
exports.getOctokitOptions = getOctokitOptions; | |
//# sourceMappingURL=utils.js.map | |
/***/ }), | |
/***/ 9925: | |
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { | |
"use strict"; | |
Object.defineProperty(exports, "__esModule", ({ value: true })); | |
const http = __nccwpck_require__(8605); | |
const https = __nccwpck_require__(7211); | |
const pm = __nccwpck_require__(6443); | |
let tunnel; | |
var HttpCodes; | |
(function (HttpCodes) { | |
HttpCodes[HttpCodes["OK"] = 200] = "OK"; | |
HttpCodes[HttpCodes["MultipleChoices"] = 300] = "MultipleChoices"; | |
HttpCodes[HttpCodes["MovedPermanently"] = 301] = "MovedPermanently"; | |
HttpCodes[HttpCodes["ResourceMoved"] = 302] = "ResourceMoved"; | |
HttpCodes[HttpCodes["SeeOther"] = 303] = "SeeOther"; | |
HttpCodes[HttpCodes["NotModified"] = 304] = "NotModified"; | |
HttpCodes[HttpCodes["UseProxy"] = 305] = "UseProxy"; | |
HttpCodes[HttpCodes["SwitchProxy"] = 306] = "SwitchProxy"; | |
HttpCodes[HttpCodes["TemporaryRedirect"] = 307] = "TemporaryRedirect"; | |
HttpCodes[HttpCodes["PermanentRedirect"] = 308] = "PermanentRedirect"; | |
HttpCodes[HttpCodes["BadRequest"] = 400] = "BadRequest"; | |
HttpCodes[HttpCodes["Unauthorized"] = 401] = "Unauthorized"; | |
HttpCodes[HttpCodes["PaymentRequired"] = 402] = "PaymentRequired"; | |
HttpCodes[HttpCodes["Forbidden"] = 403] = "Forbidden"; | |
HttpCodes[HttpCodes["NotFound"] = 404] = "NotFound"; | |
HttpCodes[HttpCodes["MethodNotAllowed"] = 405] = "MethodNotAllowed"; | |
HttpCodes[HttpCodes["NotAcceptable"] = 406] = "NotAcceptable"; | |
HttpCodes[HttpCodes["ProxyAuthenticationRequired"] = 407] = "ProxyAuthenticationRequired"; | |
HttpCodes[HttpCodes["RequestTimeout"] = 408] = "RequestTimeout"; | |
HttpCodes[HttpCodes["Conflict"] = 409] = "Conflict"; | |
HttpCodes[HttpCodes["Gone"] = 410] = "Gone"; | |
HttpCodes[HttpCodes["TooManyRequests"] = 429] = "TooManyRequests"; | |
HttpCodes[HttpCodes["InternalServerError"] = 500] = "InternalServerError"; | |
HttpCodes[HttpCodes["NotImplemented"] = 501] = "NotImplemented"; | |
HttpCodes[HttpCodes["BadGateway"] = 502] = "BadGateway"; | |
HttpCodes[HttpCodes["ServiceUnavailable"] = 503] = "ServiceUnavailable"; | |
HttpCodes[HttpCodes["GatewayTimeout"] = 504] = "GatewayTimeout"; | |
})(HttpCodes = exports.HttpCodes || (exports.HttpCodes = {})); | |
var Headers; | |
(function (Headers) { | |
Headers["Accept"] = "accept"; | |
Headers["ContentType"] = "content-type"; | |
})(Headers = exports.Headers || (exports.Headers = {})); | |
var MediaTypes; | |
(function (MediaTypes) { | |
MediaTypes["ApplicationJson"] = "application/json"; | |
})(MediaTypes = exports.MediaTypes || (exports.MediaTypes = {})); | |
/** | |
* Returns the proxy URL, depending upon the supplied url and proxy environment variables. | |
* @param serverUrl The server URL where the request will be sent. For example, https://api.github.com | |
*/ | |
function getProxyUrl(serverUrl) { | |
let proxyUrl = pm.getProxyUrl(new URL(serverUrl)); | |
return proxyUrl ? proxyUrl.href : ''; | |
} | |
exports.getProxyUrl = getProxyUrl; | |
const HttpRedirectCodes = [ | |
HttpCodes.MovedPermanently, | |
HttpCodes.ResourceMoved, | |
HttpCodes.SeeOther, | |
HttpCodes.TemporaryRedirect, | |
HttpCodes.PermanentRedirect | |
]; | |
const HttpResponseRetryCodes = [ | |
HttpCodes.BadGateway, | |
HttpCodes.ServiceUnavailable, | |
HttpCodes.GatewayTimeout | |
]; | |
const RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD']; | |
const ExponentialBackoffCeiling = 10; | |
const ExponentialBackoffTimeSlice = 5; | |
class HttpClientError extends Error { | |
constructor(message, statusCode) { | |
super(message); | |
this.name = 'HttpClientError'; | |
this.statusCode = statusCode; | |
Object.setPrototypeOf(this, HttpClientError.prototype); | |
} | |
} | |
exports.HttpClientError = HttpClientError; | |
class HttpClientResponse { | |
constructor(message) { | |
this.message = message; | |
} | |
readBody() { | |
return new Promise(async (resolve, reject) => { | |
let output = Buffer.alloc(0); | |
this.message.on('data', (chunk) => { | |
output = Buffer.concat([output, chunk]); | |
}); | |
this.message.on('end', () => { | |
resolve(output.toString()); | |
}); | |
}); | |
} | |
} | |
exports.HttpClientResponse = HttpClientResponse; | |
function isHttps(requestUrl) { | |
let parsedUrl = new URL(requestUrl); | |
return parsedUrl.protocol === 'https:'; | |
} | |
exports.isHttps = isHttps; | |
class HttpClient { | |
constructor(userAgent, handlers, requestOptions) { | |
this._ignoreSslError = false; | |
this._allowRedirects = true; | |
this._allowRedirectDowngrade = false; | |
this._maxRedirects = 50; | |
this._allowRetries = false; | |
this._maxRetries = 1; | |
this._keepAlive = false; | |
this._disposed = false; | |
this.userAgent = userAgent; | |
this.handlers = handlers || []; | |
this.requestOptions = requestOptions; | |
if (requestOptions) { | |
if (requestOptions.ignoreSslError != null) { | |
this._ignoreSslError = requestOptions.ignoreSslError; | |
} | |
this._socketTimeout = requestOptions.socketTimeout; | |
if (requestOptions.allowRedirects != null) { | |
this._allowRedirects = requestOptions.allowRedirects; | |
} | |
if (requestOptions.allowRedirectDowngrade != null) { | |
this._allowRedirectDowngrade = requestOptions.allowRedirectDowngrade; | |
} | |
if (requestOptions.maxRedirects != null) { | |
this._maxRedirects = Math.max(requestOptions.maxRedirects, 0); | |
} | |
if (requestOptions.keepAlive != null) { | |
this._keepAlive = requestOptions.keepAlive; | |
} | |
if (requestOptions.allowRetries != null) { | |
this._allowRetries = requestOptions.allowRetries; | |
} | |
if (requestOptions.maxRetries != null) { | |
this._maxRetries = requestOptions.maxRetries; | |
} | |
} | |
} | |
options(requestUrl, additionalHeaders) { | |
return this.request('OPTIONS', requestUrl, null, additionalHeaders || {}); | |
} | |
get(requestUrl, additionalHeaders) { | |
return this.request('GET', requestUrl, null, additionalHeaders || {}); | |
} | |
del(requestUrl, additionalHeaders) { | |
return this.request('DELETE', requestUrl, null, additionalHeaders || {}); | |
} | |
post(requestUrl, data, additionalHeaders) { | |
return this.request('POST', requestUrl, data, additionalHeaders || {}); | |
} | |
patch(requestUrl, data, additionalHeaders) { | |
return this.request('PATCH', requestUrl, data, additionalHeaders || {}); | |
} | |
put(requestUrl, data, additionalHeaders) { | |
return this.request('PUT', requestUrl, data, additionalHeaders || {}); | |
} | |
head(requestUrl, additionalHeaders) { | |
return this.request('HEAD', requestUrl, null, additionalHeaders || {}); | |
} | |
sendStream(verb, requestUrl, stream, additionalHeaders) { | |
return this.request(verb, requestUrl, stream, additionalHeaders); | |
} | |
/** | |
* Gets a typed object from an endpoint | |
* Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise | |
*/ | |
async getJson(requestUrl, additionalHeaders = {}) { | |
additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); | |
let res = await this.get(requestUrl, additionalHeaders); | |
return this._processResponse(res, this.requestOptions); | |
} | |
async postJson(requestUrl, obj, additionalHeaders = {}) { | |
let data = JSON.stringify(obj, null, 2); | |
additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); | |
additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); | |
let res = await this.post(requestUrl, data, additionalHeaders); | |
return this._processResponse(res, this.requestOptions); | |
} | |
async putJson(requestUrl, obj, additionalHeaders = {}) { | |
let data = JSON.stringify(obj, null, 2); | |
additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); | |
additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); | |
let res = await this.put(requestUrl, data, additionalHeaders); | |
return this._processResponse(res, this.requestOptions); | |
} | |
async patchJson(requestUrl, obj, additionalHeaders = {}) { | |
let data = JSON.stringify(obj, null, 2); | |
additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); | |
additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); | |
let res = await this.patch(requestUrl, data, additionalHeaders); | |
return this._processResponse(res, this.requestOptions); | |
} | |
/** | |
* Makes a raw http request. | |
* All other methods such as get, post, patch, and request ultimately call this. | |
* Prefer get, del, post and patch | |
*/ | |
async request(verb, requestUrl, data, headers) { | |
if (this._disposed) { | |
throw new Error('Client has already been disposed.'); | |
} | |
let parsedUrl = new URL(requestUrl); | |
let info = this._prepareRequest(verb, parsedUrl, headers); | |
// Only perform retries on reads since writes may not be idempotent. | |
let maxTries = this._allowRetries && RetryableHttpVerbs.indexOf(verb) != -1 | |
? this._maxRetries + 1 | |
: 1; | |
let numTries = 0; | |
let response; | |
while (numTries < maxTries) { | |
response = await this.requestRaw(info, data); | |
// Check if it's an authentication challenge | |
if (response && | |
response.message && | |
response.message.statusCode === HttpCodes.Unauthorized) { | |
let authenticationHandler; | |
for (let i = 0; i < this.handlers.length; i++) { | |
if (this.handlers[i].canHandleAuthentication(response)) { | |
authenticationHandler = this.handlers[i]; | |
break; | |
} | |
} | |
if (authenticationHandler) { | |
return authenticationHandler.handleAuthentication(this, info, data); | |
} | |
else { | |
// We have received an unauthorized response but have no handlers to handle it. | |
// Let the response return to the caller. | |
return response; | |
} | |
} | |
let redirectsRemaining = this._maxRedirects; | |
while (HttpRedirectCodes.indexOf(response.message.statusCode) != -1 && | |
this._allowRedirects && | |
redirectsRemaining > 0) { | |
const redirectUrl = response.message.headers['location']; | |
if (!redirectUrl) { | |
// if there's no location to redirect to, we won't | |
break; | |
} | |
let parsedRedirectUrl = new URL(redirectUrl); | |
if (parsedUrl.protocol == 'https:' && | |
parsedUrl.protocol != parsedRedirectUrl.protocol && | |
!this._allowRedirectDowngrade) { | |
throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.'); | |
} | |
// we need to finish reading the response before reassigning response | |
// which will leak the open socket. | |
await response.readBody(); | |
// strip authorization header if redirected to a different hostname | |
if (parsedRedirectUrl.hostname !== parsedUrl.hostname) { | |
for (let header in headers) { | |
// header names are case insensitive | |
if (header.toLowerCase() === 'authorization') { | |
delete headers[header]; | |
} | |
} | |
} | |
// let's make the request with the new redirectUrl | |
info = this._prepareRequest(verb, parsedRedirectUrl, headers); | |
response = await this.requestRaw(info, data); | |
redirectsRemaining--; | |
} | |
if (HttpResponseRetryCodes.indexOf(response.message.statusCode) == -1) { | |
// If not a retry code, return immediately instead of retrying | |
return response; | |
} | |
numTries += 1; | |
if (numTries < maxTries) { | |
await response.readBody(); | |
await this._performExponentialBackoff(numTries); | |
} | |
} | |
return response; | |
} | |
/** | |
* Needs to be called if keepAlive is set to true in request options. | |
*/ | |
dispose() { | |
if (this._agent) { | |
this._agent.destroy(); | |
} | |
this._disposed = true; | |
} | |
/** | |
* Raw request. | |
* @param info | |
* @param data | |
*/ | |
requestRaw(info, data) { | |
return new Promise((resolve, reject) => { | |
let callbackForResult = function (err, res) { | |
if (err) { | |
reject(err); | |
} | |
resolve(res); | |
}; | |
this.requestRawWithCallback(info, data, callbackForResult); | |
}); | |
} | |
/** | |
* Raw request with callback. | |
* @param info | |
* @param data | |
* @param onResult | |
*/ | |
requestRawWithCallback(info, data, onResult) { | |
let socket; | |
if (typeof data === 'string') { | |
info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8'); | |
} | |
let callbackCalled = false; | |
let handleResult = (err, res) => { | |
if (!callbackCalled) { | |
callbackCalled = true; | |
onResult(err, res); | |
} | |
}; | |
let req = info.httpModule.request(info.options, (msg) => { | |
let res = new HttpClientResponse(msg); | |
handleResult(null, res); | |
}); | |
req.on('socket', sock => { | |
socket = sock; | |
}); | |
// If we ever get disconnected, we want the socket to timeout eventually | |
req.setTimeout(this._socketTimeout || 3 * 60000, () => { | |
if (socket) { | |
socket.end(); | |
} | |
handleResult(new Error('Request timeout: ' + info.options.path), null); | |
}); | |
req.on('error', function (err) { | |
// err has statusCode property | |
// res should have headers | |
handleResult(err, null); | |
}); | |
if (data && typeof data === 'string') { | |
req.write(data, 'utf8'); | |
} | |
if (data && typeof data !== 'string') { | |
data.on('close', function () { | |
req.end(); | |
}); | |
data.pipe(req); | |
} | |
else { | |
req.end(); | |
} | |
} | |
/** | |
* Gets an http agent. This function is useful when you need an http agent that handles | |
* routing through a proxy server - depending upon the url and proxy environment variables. | |
* @param serverUrl The server URL where the request will be sent. For example, https://api.github.com | |
*/ | |
getAgent(serverUrl) { | |
let parsedUrl = new URL(serverUrl); | |
return this._getAgent(parsedUrl); | |
} | |
_prepareRequest(method, requestUrl, headers) { | |
const info = {}; | |
info.parsedUrl = requestUrl; | |
const usingSsl = info.parsedUrl.protocol === 'https:'; | |
info.httpModule = usingSsl ? https : http; | |
const defaultPort = usingSsl ? 443 : 80; | |
info.options = {}; | |
info.options.host = info.parsedUrl.hostname; | |
info.options.port = info.parsedUrl.port | |
? parseInt(info.parsedUrl.port) | |
: defaultPort; | |
info.options.path = | |
(info.parsedUrl.pathname || '') + (info.parsedUrl.search || ''); | |
info.options.method = method; | |
info.options.headers = this._mergeHeaders(headers); | |
if (this.userAgent != null) { | |
info.options.headers['user-agent'] = this.userAgent; | |
} | |
info.options.agent = this._getAgent(info.parsedUrl); | |
// gives handlers an opportunity to participate | |
if (this.handlers) { | |
this.handlers.forEach(handler => { | |
handler.prepareRequest(info.options); | |
}); | |
} | |
return info; | |
} | |
_mergeHeaders(headers) { | |
const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {}); | |
if (this.requestOptions && this.requestOptions.headers) { | |
return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers)); | |
} | |
return lowercaseKeys(headers || {}); | |
} | |
_getExistingOrDefaultHeader(additionalHeaders, header, _default) { | |
const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {}); | |
let clientHeader; | |
if (this.requestOptions && this.requestOptions.headers) { | |
clientHeader = lowercaseKeys(this.requestOptions.headers)[header]; | |
} | |
return additionalHeaders[header] || clientHeader || _default; | |
} | |
_getAgent(parsedUrl) { | |
let agent; | |
let proxyUrl = pm.getProxyUrl(parsedUrl); | |
let useProxy = proxyUrl && proxyUrl.hostname; | |
if (this._keepAlive && useProxy) { | |
agent = this._proxyAgent; | |
} | |
if (this._keepAlive && !useProxy) { | |
agent = this._agent; | |
} | |
// if agent is already assigned use that agent. | |
if (!!agent) { | |
return agent; | |
} | |
const usingSsl = parsedUrl.protocol === 'https:'; | |
let maxSockets = 100; | |
if (!!this.requestOptions) { | |
maxSockets = this.requestOptions.maxSockets || http.globalAgent.maxSockets; | |
} | |
if (useProxy) { | |
// If using proxy, need tunnel | |
if (!tunnel) { | |
tunnel = __nccwpck_require__(4294); | |
} | |
const agentOptions = { | |
maxSockets: maxSockets, | |
keepAlive: this._keepAlive, | |
proxy: { | |
...((proxyUrl.username || proxyUrl.password) && { | |
proxyAuth: `${proxyUrl.username}:${proxyUrl.password}` | |
}), | |
host: proxyUrl.hostname, | |
port: proxyUrl.port | |
} | |
}; | |
let tunnelAgent; | |
const overHttps = proxyUrl.protocol === 'https:'; | |
if (usingSsl) { | |
tunnelAgent = overHttps ? tunnel.httpsOverHttps : tunnel.httpsOverHttp; | |
} | |
else { | |
tunnelAgent = overHttps ? tunnel.httpOverHttps : tunnel.httpOverHttp; | |
} | |
agent = tunnelAgent(agentOptions); | |
this._proxyAgent = agent; | |
} | |
// if reusing agent across request and tunneling agent isn't assigned create a new agent | |
if (this._keepAlive && !agent) { | |
const options = { keepAlive: this._keepAlive, maxSockets: maxSockets }; | |
agent = usingSsl ? new https.Agent(options) : new http.Agent(options); | |
this._agent = agent; | |
} | |
// if not using private agent and tunnel agent isn't setup then use global agent | |
if (!agent) { | |
agent = usingSsl ? https.globalAgent : http.globalAgent; | |
} | |
if (usingSsl && this._ignoreSslError) { | |
// we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process | |
// http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options | |
// we have to cast it to any and change it directly | |
agent.options = Object.assign(agent.options || {}, { | |
rejectUnauthorized: false | |
}); | |
} | |
return agent; | |
} | |
_performExponentialBackoff(retryNumber) { | |
retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber); | |
const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber); | |
return new Promise(resolve => setTimeout(() => resolve(), ms)); | |
} | |
static dateTimeDeserializer(key, value) { | |
if (typeof value === 'string') { | |
let a = new Date(value); | |
if (!isNaN(a.valueOf())) { | |
return a; | |
} | |
} | |
return value; | |
} | |
async _processResponse(res, options) { | |
return new Promise(async (resolve, reject) => { | |
const statusCode = res.message.statusCode; | |
const response = { | |
statusCode: statusCode, | |
result: null, | |
headers: {} | |
}; | |
// not found leads to null obj returned | |
if (statusCode == HttpCodes.NotFound) { | |
resolve(response); | |
} | |
let obj; | |
let contents; | |
// get the result from the body | |
try { | |
contents = await res.readBody(); | |
if (contents && contents.length > 0) { | |
if (options && options.deserializeDates) { | |
obj = JSON.parse(contents, HttpClient.dateTimeDeserializer); | |
} | |
else { | |
obj = JSON.parse(contents); | |
} | |
response.result = obj; | |
} | |
response.headers = res.message.headers; | |
} | |
catch (err) { | |
// Invalid resource (contents not json); leaving result obj null | |
} | |
// note that 3xx redirects are handled by the http layer. | |
if (statusCode > 299) { | |
let msg; | |
// if exception/error in body, attempt to get better error | |
if (obj && obj.message) { | |
msg = obj.message; | |
} | |
else if (contents && contents.length > 0) { | |
// it may be the case that the exception is in the body message as string | |
msg = contents; | |
} | |
else { | |
msg = 'Failed request: (' + statusCode + ')'; | |
} | |
let err = new HttpClientError(msg, statusCode); | |
err.result = response.result; | |
reject(err); | |
} | |
else { | |
resolve(response); | |
} | |
}); | |
} | |
} | |
exports.HttpClient = HttpClient; | |
/***/ }), | |
/***/ 6443: | |
/***/ ((__unused_webpack_module, exports) => { | |
"use strict"; | |
Object.defineProperty(exports, "__esModule", ({ value: true })); | |
function getProxyUrl(reqUrl) { | |
let usingSsl = reqUrl.protocol === 'https:'; | |
let proxyUrl; | |
if (checkBypass(reqUrl)) { | |
return proxyUrl; | |
} | |
let proxyVar; | |
if (usingSsl) { | |
proxyVar = process.env['https_proxy'] || process.env['HTTPS_PROXY']; | |
} | |
else { | |
proxyVar = process.env['http_proxy'] || process.env['HTTP_PROXY']; | |
} | |
if (proxyVar) { | |
proxyUrl = new URL(proxyVar); | |
} | |
return proxyUrl; | |
} | |
exports.getProxyUrl = getProxyUrl; | |
function checkBypass(reqUrl) { | |
if (!reqUrl.hostname) { | |
return false; | |
} | |
let noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || ''; | |
if (!noProxy) { | |
return false; | |
} | |
// Determine the request port | |
let reqPort; | |
if (reqUrl.port) { | |
reqPort = Number(reqUrl.port); | |
} | |
else if (reqUrl.protocol === 'http:') { | |
reqPort = 80; | |
} | |
else if (reqUrl.protocol === 'https:') { | |
reqPort = 443; | |
} | |
// Format the request hostname and hostname with port | |
let upperReqHosts = [reqUrl.hostname.toUpperCase()]; | |
if (typeof reqPort === 'number') { | |
upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`); | |
} | |
// Compare request host against noproxy | |
for (let upperNoProxyItem of noProxy | |
.split(',') | |
.map(x => x.trim().toUpperCase()) | |
.filter(x => x)) { | |
if (upperReqHosts.some(x => x === upperNoProxyItem)) { | |
return true; | |
} | |
} | |
return false; | |
} | |
exports.checkBypass = checkBypass; | |
/***/ }), | |
/***/ 1962: | |
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { | |
"use strict"; | |
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { | |
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } | |
return new (P || (P = Promise))(function (resolve, reject) { | |
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } | |
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } | |
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } | |
step((generator = generator.apply(thisArg, _arguments || [])).next()); | |
}); | |
}; | |
var _a; | |
Object.defineProperty(exports, "__esModule", ({ value: true })); | |
const assert_1 = __nccwpck_require__(2357); | |
const fs = __nccwpck_require__(5747); | |
const path = __nccwpck_require__(5622); | |
_a = fs.promises, exports.chmod = _a.chmod, exports.copyFile = _a.copyFile, exports.lstat = _a.lstat, exports.mkdir = _a.mkdir, exports.readdir = _a.readdir, exports.readlink = _a.readlink, exports.rename = _a.rename, exports.rmdir = _a.rmdir, exports.stat = _a.stat, exports.symlink = _a.symlink, exports.unlink = _a.unlink; | |
exports.IS_WINDOWS = process.platform === 'win32'; | |
function exists(fsPath) { | |
return __awaiter(this, void 0, void 0, function* () { | |
try { | |
yield exports.stat(fsPath); | |
} | |
catch (err) { | |
if (err.code === 'ENOENT') { | |
return false; | |
} | |
throw err; | |
} | |
return true; | |
}); | |
} | |
exports.exists = exists; | |
function isDirectory(fsPath, useStat = false) { | |
return __awaiter(this, void 0, void 0, function* () { | |
const stats = useStat ? yield exports.stat(fsPath) : yield exports.lstat(fsPath); | |
return stats.isDirectory(); | |
}); | |
} | |
exports.isDirectory = isDirectory; | |
/** | |
* On OSX/Linux, true if path starts with '/'. On Windows, true for paths like: | |
* \, \hello, \\hello\share, C:, and C:\hello (and corresponding alternate separator cases). | |
*/ | |
function isRooted(p) { | |
p = normalizeSeparators(p); | |
if (!p) { | |
throw new Error('isRooted() parameter "p" cannot be empty'); | |
} | |
if (exports.IS_WINDOWS) { | |
return (p.startsWith('\\') || /^[A-Z]:/i.test(p) // e.g. \ or \hello or \\hello | |
); // e.g. C: or C:\hello | |
} | |
return p.startsWith('/'); | |
} | |
exports.isRooted = isRooted; | |
/** | |
* Recursively create a directory at `fsPath`. | |
* | |
* This implementation is optimistic, meaning it attempts to create the full | |
* path first, and backs up the path stack from there. | |
* | |
* @param fsPath The path to create | |
* @param maxDepth The maximum recursion depth | |
* @param depth The current recursion depth | |
*/ | |
function mkdirP(fsPath, maxDepth = 1000, depth = 1) { | |
return __awaiter(this, void 0, void 0, function* () { | |
assert_1.ok(fsPath, 'a path argument must be provided'); | |
fsPath = path.resolve(fsPath); | |
if (depth >= maxDepth) | |
return exports.mkdir(fsPath); | |
try { | |
yield exports.mkdir(fsPath); | |
return; | |
} | |
catch (err) { | |
switch (err.code) { | |
case 'ENOENT': { | |
yield mkdirP(path.dirname(fsPath), maxDepth, depth + 1); | |
yield exports.mkdir(fsPath); | |
return; | |
} | |
default: { | |
let stats; | |
try { | |
stats = yield exports.stat(fsPath); | |
} | |
catch (err2) { | |
throw err; | |
} | |
if (!stats.isDirectory()) | |
throw err; | |
} | |
} | |
} | |
}); | |
} | |
exports.mkdirP = mkdirP; | |
/** | |
* Best effort attempt to determine whether a file exists and is executable. | |
* @param filePath file path to check | |
* @param extensions additional file extensions to try | |
* @return if file exists and is executable, returns the file path. otherwise empty string. | |
*/ | |
function tryGetExecutablePath(filePath, extensions) { | |
return __awaiter(this, void 0, void 0, function* () { | |
let stats = undefined; | |
try { | |
// test file exists | |
stats = yield exports.stat(filePath); | |
} | |
catch (err) { | |
if (err.code !== 'ENOENT') { | |
// eslint-disable-next-line no-console | |
console.log(`Unexpected error attempting to determine if executable file exists '${filePath}': ${err}`); | |
} | |
} | |
if (stats && stats.isFile()) { | |
if (exports.IS_WINDOWS) { | |
// on Windows, test for valid extension | |
const upperExt = path.extname(filePath).toUpperCase(); | |
if (extensions.some(validExt => validExt.toUpperCase() === upperExt)) { | |
return filePath; | |
} | |
} | |
else { | |
if (isUnixExecutable(stats)) { | |
return filePath; | |
} | |
} | |
} | |
// try each extension | |
const originalFilePath = filePath; | |
for (const extension of extensions) { | |
filePath = originalFilePath + extension; | |
stats = undefined; | |
try { | |
stats = yield exports.stat(filePath); | |
} | |
catch (err) { | |
if (err.code !== 'ENOENT') { | |
// eslint-disable-next-line no-console | |
console.log(`Unexpected error attempting to determine if executable file exists '${filePath}': ${err}`); | |
} | |
} | |
if (stats && stats.isFile()) { | |
if (exports.IS_WINDOWS) { | |
// preserve the case of the actual file (since an extension was appended) | |
try { | |
const directory = path.dirname(filePath); | |
const upperName = path.basename(filePath).toUpperCase(); | |
for (const actualName of yield exports.readdir(directory)) { | |
if (upperName === actualName.toUpperCase()) { | |
filePath = path.join(directory, actualName); | |
break; | |
} | |
} | |
} | |
catch (err) { | |
// eslint-disable-next-line no-console | |
console.log(`Unexpected error attempting to determine the actual case of the file '${filePath}': ${err}`); | |
} | |
return filePath; | |
} | |
else { | |
if (isUnixExecutable(stats)) { | |
return filePath; | |
} | |
} | |
} | |
} | |
return ''; | |
}); | |
} | |
exports.tryGetExecutablePath = tryGetExecutablePath; | |
function normalizeSeparators(p) { | |
p = p || ''; | |
if (exports.IS_WINDOWS) { | |
// convert slashes on Windows | |
p = p.replace(/\//g, '\\'); | |
// remove redundant slashes | |
return p.replace(/\\\\+/g, '\\'); | |
} | |
// remove redundant slashes | |
return p.replace(/\/\/+/g, '/'); | |
} | |
// on Mac/Linux, test the execute bit | |
// R W X R W X R W X | |
// 256 128 64 32 16 8 4 2 1 | |
function isUnixExecutable(stats) { | |
return ((stats.mode & 1) > 0 || | |
((stats.mode & 8) > 0 && stats.gid === process.getgid()) || | |
((stats.mode & 64) > 0 && stats.uid === process.getuid())); | |
} | |
//# sourceMappingURL=io-util.js.map | |
/***/ }), | |
/***/ 7351: | |
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { | |
"use strict"; | |
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { | |
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } | |
return new (P || (P = Promise))(function (resolve, reject) { | |
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } | |
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } | |
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } | |
step((generator = generator.apply(thisArg, _arguments || [])).next()); | |
}); | |
}; | |
Object.defineProperty(exports, "__esModule", ({ value: true })); | |
const childProcess = __nccwpck_require__(3129); | |
const path = __nccwpck_require__(5622); | |
const util_1 = __nccwpck_require__(1669); | |
const ioUtil = __nccwpck_require__(1962); | |
const exec = util_1.promisify(childProcess.exec); | |
/** | |
* Copies a file or folder. | |
* Based off of shelljs - https://github.com/shelljs/shelljs/blob/9237f66c52e5daa40458f94f9565e18e8132f5a6/src/cp.js | |
* | |
* @param source source path | |
* @param dest destination path | |
* @param options optional. See CopyOptions. | |
*/ | |
function cp(source, dest, options = {}) { | |
return __awaiter(this, void 0, void 0, function* () { | |
const { force, recursive } = readCopyOptions(options); | |
const destStat = (yield ioUtil.exists(dest)) ? yield ioUtil.stat(dest) : null; | |
// Dest is an existing file, but not forcing | |
if (destStat && destStat.isFile() && !force) { | |
return; | |
} | |
// If dest is an existing directory, should copy inside. | |
const newDest = destStat && destStat.isDirectory() | |
? path.join(dest, path.basename(source)) | |
: dest; | |
if (!(yield ioUtil.exists(source))) { | |
throw new Error(`no such file or directory: ${source}`); | |
} | |
const sourceStat = yield ioUtil.stat(source); | |
if (sourceStat.isDirectory()) { | |
if (!recursive) { | |
throw new Error(`Failed to copy. ${source} is a directory, but tried to copy without recursive flag.`); | |
} | |
else { | |
yield cpDirRecursive(source, newDest, 0, force); | |
} | |
} | |
else { | |
if (path.relative(source, newDest) === '') { | |
// a file cannot be copied to itself | |
throw new Error(`'${newDest}' and '${source}' are the same file`); | |
} | |
yield copyFile(source, newDest, force); | |
} | |
}); | |
} | |
exports.cp = cp; | |
/** | |
* Moves a path. | |
* | |
* @param source source path | |
* @param dest destination path | |
* @param options optional. See MoveOptions. | |
*/ | |
function mv(source, dest, options = {}) { | |
return __awaiter(this, void 0, void 0, function* () { | |
if (yield ioUtil.exists(dest)) { | |
let destExists = true; | |
if (yield ioUtil.isDirectory(dest)) { | |
// If dest is directory copy src into dest | |
dest = path.join(dest, path.basename(source)); | |
destExists = yield ioUtil.exists(dest); | |
} | |
if (destExists) { | |
if (options.force == null || options.force) { | |
yield rmRF(dest); | |
} | |
else { | |
throw new Error('Destination already exists'); | |
} | |
} | |
} | |
yield mkdirP(path.dirname(dest)); | |
yield ioUtil.rename(source, dest); | |
}); | |
} | |
exports.mv = mv; | |
/** | |
* Remove a path recursively with force | |
* | |
* @param inputPath path to remove | |
*/ | |
function rmRF(inputPath) { | |
return __awaiter(this, void 0, void 0, function* () { | |
if (ioUtil.IS_WINDOWS) { | |
// Node doesn't provide a delete operation, only an unlink function. This means that if the file is being used by another | |
// program (e.g. antivirus), it won't be deleted. To address this, we shell out the work to rd/del. | |
try { | |
if (yield ioUtil.isDirectory(inputPath, true)) { | |
yield exec(`rd /s /q "${inputPath}"`); | |
} | |
else { | |
yield exec(`del /f /a "${inputPath}"`); | |
} | |
} | |
catch (err) { | |
// if you try to delete a file that doesn't exist, desired result is achieved | |
// other errors are valid | |
if (err.code !== 'ENOENT') | |
throw err; | |
} | |
// Shelling out fails to remove a symlink folder with missing source, this unlink catches that | |
try { | |
yield ioUtil.unlink(inputPath); | |
} | |
catch (err) { | |
// if you try to delete a file that doesn't exist, desired result is achieved | |
// other errors are valid | |
if (err.code !== 'ENOENT') | |
throw err; | |
} | |
} | |
else { | |
let isDir = false; | |
try { | |
isDir = yield ioUtil.isDirectory(inputPath); | |
} | |
catch (err) { | |
// if you try to delete a file that doesn't exist, desired result is achieved | |
// other errors are valid | |
if (err.code !== 'ENOENT') | |
throw err; | |
return; | |
} | |
if (isDir) { | |
yield exec(`rm -rf "${inputPath}"`); | |
} | |
else { | |
yield ioUtil.unlink(inputPath); | |
} | |
} | |
}); | |
} | |
exports.rmRF = rmRF; | |
/** | |
* Make a directory. Creates the full path with folders in between | |
* Will throw if it fails | |
* | |
* @param fsPath path to create | |
* @returns Promise<void> | |
*/ | |
function mkdirP(fsPath) { | |
return __awaiter(this, void 0, void 0, function* () { | |
yield ioUtil.mkdirP(fsPath); | |
}); | |
} | |
exports.mkdirP = mkdirP; | |
/** | |
* Returns path of a tool had the tool actually been invoked. Resolves via paths. | |
* If you check and the tool does not exist, it will throw. | |
* | |
* @param tool name of the tool | |
* @param check whether to check if tool exists | |
* @returns Promise<string> path to tool | |
*/ | |
function which(tool, check) { | |
return __awaiter(this, void 0, void 0, function* () { | |
if (!tool) { | |
throw new Error("parameter 'tool' is required"); | |
} | |
// recursive when check=true | |
if (check) { | |
const result = yield which(tool, false); | |
if (!result) { | |
if (ioUtil.IS_WINDOWS) { | |
throw new Error(`Unable to locate executable file: ${tool}. Please verify either the file path exists or the file can be found within a directory specified by the PATH environment variable. Also verify the file has a valid extension for an executable file.`); | |
} | |
else { | |
throw new Error(`Unable to locate executable file: ${tool}. Please verify either the file path exists or the file can be found within a directory specified by the PATH environment variable. Also check the file mode to verify the file is executable.`); | |
} | |
} | |
} | |
try { | |
// build the list of extensions to try | |
const extensions = []; | |
if (ioUtil.IS_WINDOWS && process.env.PATHEXT) { | |
for (const extension of process.env.PATHEXT.split(path.delimiter)) { | |
if (extension) { | |
extensions.push(extension); | |
} | |
} | |
} | |
// if it's rooted, return it if exists. otherwise return empty. | |
if (ioUtil.isRooted(tool)) { | |
const filePath = yield ioUtil.tryGetExecutablePath(tool, extensions); | |
if (filePath) { | |
return filePath; | |
} | |
return ''; | |
} | |
// if any path separators, return empty | |
if (tool.includes('/') || (ioUtil.IS_WINDOWS && tool.includes('\\'))) { | |
return ''; | |
} | |
// build the list of directories | |
// | |
// Note, technically "where" checks the current directory on Windows. From a toolkit perspective, | |
// it feels like we should not do this. Checking the current directory seems like more of a use | |
// case of a shell, and the which() function exposed by the toolkit should strive for consistency | |
// across platforms. | |
const directories = []; | |
if (process.env.PATH) { | |
for (const p of process.env.PATH.split(path.delimiter)) { | |
if (p) { | |
directories.push(p); | |
} | |
} | |
} | |
// return the first match | |
for (const directory of directories) { | |
const filePath = yield ioUtil.tryGetExecutablePath(directory + path.sep + tool, extensions); | |
if (filePath) { | |
return filePath; | |
} | |
} | |
return ''; | |
} | |
catch (err) { | |
throw new Error(`which failed with message ${err.message}`); | |
} | |
}); | |
} | |
exports.which = which; | |
function readCopyOptions(options) { | |
const force = options.force == null ? true : options.force; | |
const recursive = Boolean(options.recursive); | |
return { force, recursive }; | |
} | |
function cpDirRecursive(sourceDir, destDir, currentDepth, force) { | |
return __awaiter(this, void 0, void 0, function* () { | |
// Ensure there is not a run away recursive copy | |
if (currentDepth >= 255) | |
return; | |
currentDepth++; | |
yield mkdirP(destDir); | |
const files = yield ioUtil.readdir(sourceDir); | |
for (const fileName of files) { | |
const srcFile = `${sourceDir}/${fileName}`; | |
const destFile = `${destDir}/${fileName}`; | |
const srcFileStat = yield ioUtil.lstat(srcFile); | |
if (srcFileStat.isDirectory()) { | |
// Recurse | |
yield cpDirRecursive(srcFile, destFile, currentDepth, force); | |
} | |
else { | |
yield copyFile(srcFile, destFile, force); | |
} | |
} | |
// Change the mode for the newly created directory | |
yield ioUtil.chmod(destDir, (yield ioUtil.stat(sourceDir)).mode); | |
}); | |
} | |
// Buffered file copy | |
function copyFile(srcFile, destFile, force) { | |
return __awaiter(this, void 0, void 0, function* () { | |
if ((yield ioUtil.lstat(srcFile)).isSymbolicLink()) { | |
// unlink/re-link it | |
try { | |
yield ioUtil.lstat(destFile); | |
yield ioUtil.unlink(destFile); | |
} | |
catch (e) { | |
// Try to override file permission | |
if (e.code === 'EPERM') { | |
yield ioUtil.chmod(destFile, '0666'); | |
yield ioUtil.unlink(destFile); | |
} | |
// other errors = it doesn't exist, no work to do | |
} | |
// Copy over symlink | |
const symlinkFull = yield ioUtil.readlink(srcFile); | |
yield ioUtil.symlink(symlinkFull, destFile, ioUtil.IS_WINDOWS ? 'junction' : null); | |
} | |
else if (!(yield ioUtil.exists(destFile)) || force) { | |
yield ioUtil.copyFile(srcFile, destFile); | |
} | |
}); | |
} | |
//# sourceMappingURL=io.js.map | |
/***/ }), | |
/***/ 334: | |
/***/ ((__unused_webpack_module, exports) => { | |
"use strict"; | |
Object.defineProperty(exports, "__esModule", ({ value: true })); | |
async function auth(token) { | |
const tokenType = token.split(/\./).length === 3 ? "app" : /^v\d+\./.test(token) ? "installation" : "oauth"; | |
return { | |
type: "token", | |
token: token, | |
tokenType | |
}; | |
} | |
/** | |
* Prefix token for usage in the Authorization header | |
* | |
* @param token OAuth token or JSON Web Token | |
*/ | |
function withAuthorizationPrefix(token) { | |
if (token.split(/\./).length === 3) { | |
return `bearer ${token}`; | |
} | |
return `token ${token}`; | |
} | |
async function hook(token, request, route, parameters) { | |
const endpoint = request.endpoint.merge(route, parameters); | |
endpoint.headers.authorization = withAuthorizationPrefix(token); | |
return request(endpoint); | |
} | |
const createTokenAuth = function createTokenAuth(token) { | |
if (!token) { | |
throw new Error("[@octokit/auth-token] No token passed to createTokenAuth"); | |
} | |
if (typeof token !== "string") { | |
throw new Error("[@octokit/auth-token] Token passed to createTokenAuth is not a string"); | |
} | |
token = token.replace(/^(token|bearer) +/i, ""); | |
return Object.assign(auth.bind(null, token), { | |
hook: hook.bind(null, token) | |
}); | |
}; | |
exports.createTokenAuth = createTokenAuth; | |
//# sourceMappingURL=index.js.map | |
/***/ }), | |
/***/ 6762: | |
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { | |
"use strict"; | |
Object.defineProperty(exports, "__esModule", ({ value: true })); | |
var universalUserAgent = __nccwpck_require__(5030); | |
var beforeAfterHook = __nccwpck_require__(3682); | |
var request = __nccwpck_require__(6234); | |
var graphql = __nccwpck_require__(8467); | |
var authToken = __nccwpck_require__(334); | |
function _objectWithoutPropertiesLoose(source, excluded) { | |
if (source == null) return {}; | |
var target = {}; | |
var sourceKeys = Object.keys(source); | |
var key, i; | |
for (i = 0; i < sourceKeys.length; i++) { | |
key = sourceKeys[i]; | |
if (excluded.indexOf(key) >= 0) continue; | |
target[key] = source[key]; | |
} | |
return target; | |
} | |
function _objectWithoutProperties(source, excluded) { | |
if (source == null) return {}; | |
var target = _objectWithoutPropertiesLoose(source, excluded); | |
var key, i; | |
if (Object.getOwnPropertySymbols) { | |
var sourceSymbolKeys = Object.getOwnPropertySymbols(source); | |
for (i = 0; i < sourceSymbolKeys.length; i++) { | |
key = sourceSymbolKeys[i]; | |
if (excluded.indexOf(key) >= 0) continue; | |
if (!Object.prototype.propertyIsEnumerable.call(source, key)) continue; | |
target[key] = source[key]; | |
} | |
} | |
return target; | |
} | |
const VERSION = "3.4.0"; | |
class Octokit { | |
constructor(options = {}) { | |
const hook = new beforeAfterHook.Collection(); | |
const requestDefaults = { | |
baseUrl: request.request.endpoint.DEFAULTS.baseUrl, | |
headers: {}, | |
request: Object.assign({}, options.request, { | |
// @ts-ignore internal usage only, no need to type | |
hook: hook.bind(null, "request") | |
}), | |
mediaType: { | |
previews: [], | |
format: "" | |
} | |
}; // prepend default user agent with `options.userAgent` if set | |
requestDefaults.headers["user-agent"] = [options.userAgent, `octokit-core.js/${VERSION} ${universalUserAgent.getUserAgent()}`].filter(Boolean).join(" "); | |
if (options.baseUrl) { | |
requestDefaults.baseUrl = options.baseUrl; | |
} | |
if (options.previews) { | |
requestDefaults.mediaType.previews = options.previews; | |
} | |
if (options.timeZone) { | |
requestDefaults.headers["time-zone"] = options.timeZone; | |
} | |
this.request = request.request.defaults(requestDefaults); | |
this.graphql = graphql.withCustomRequest(this.request).defaults(requestDefaults); | |
this.log = Object.assign({ | |
debug: () => {}, | |
info: () => {}, | |
warn: console.warn.bind(console), | |
error: console.error.bind(console) | |
}, options.log); | |
this.hook = hook; // (1) If neither `options.authStrategy` nor `options.auth` are set, the `octokit` instance | |
// is unauthenticated. The `this.auth()` method is a no-op and no request hook is registered. | |
// (2) If only `options.auth` is set, use the default token authentication strategy. | |
// (3) If `options.authStrategy` is set then use it and pass in `options.auth`. Always pass own request as many strategies accept a custom request instance. | |
// TODO: type `options.auth` based on `options.authStrategy`. | |
if (!options.authStrategy) { | |
if (!options.auth) { | |
// (1) | |
this.auth = async () => ({ | |
type: "unauthenticated" | |
}); | |
} else { | |
// (2) | |
const auth = authToken.createTokenAuth(options.auth); // @ts-ignore ¯\_(ツ)_/¯ | |
hook.wrap("request", auth.hook); | |
this.auth = auth; | |
} | |
} else { | |
const { | |
authStrategy | |
} = options, | |
otherOptions = _objectWithoutProperties(options, ["authStrategy"]); | |
const auth = authStrategy(Object.assign({ | |
request: this.request, | |
log: this.log, | |
// we pass the current octokit instance as well as its constructor options | |
// to allow for authentication strategies that return a new octokit instance | |
// that shares the same internal state as the current one. The original | |
// requirement for this was the "event-octokit" authentication strategy | |
// of https://github.com/probot/octokit-auth-probot. | |
octokit: this, | |
octokitOptions: otherOptions | |
}, options.auth)); // @ts-ignore ¯\_(ツ)_/¯ | |
hook.wrap("request", auth.hook); | |
this.auth = auth; | |
} // apply plugins | |
// https://stackoverflow.com/a/16345172 | |
const classConstructor = this.constructor; | |
classConstructor.plugins.forEach(plugin => { | |
Object.assign(this, plugin(this, options)); | |
}); | |
} | |
static defaults(defaults) { | |
const OctokitWithDefaults = class extends this { | |
constructor(...args) { | |
const options = args[0] || {}; | |
if (typeof defaults === "function") { | |
super(defaults(options)); | |
return; | |
} | |
super(Object.assign({}, defaults, options, options.userAgent && defaults.userAgent ? { | |
userAgent: `${options.userAgent} ${defaults.userAgent}` | |
} : null)); | |
} | |
}; | |
return OctokitWithDefaults; | |
} | |
/** | |
* Attach a plugin (or many) to your Octokit instance. | |
* | |
* @example | |
* const API = Octokit.plugin(plugin1, plugin2, plugin3, ...) | |
*/ | |
static plugin(...newPlugins) { | |
var _a; | |
const currentPlugins = this.plugins; | |
const NewOctokit = (_a = class extends this {}, _a.plugins = currentPlugins.concat(newPlugins.filter(plugin => !currentPlugins.includes(plugin))), _a); | |
return NewOctokit; | |
} | |
} | |
Octokit.VERSION = VERSION; | |
Octokit.plugins = []; | |
exports.Octokit = Octokit; | |
//# sourceMappingURL=index.js.map | |
/***/ }), | |
/***/ 9440: | |
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { | |
"use strict"; | |
Object.defineProperty(exports, "__esModule", ({ value: true })); | |
function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } | |
var isPlainObject = _interopDefault(__nccwpck_require__(4038)); | |
var universalUserAgent = __nccwpck_require__(5030); | |
function lowercaseKeys(object) { | |
if (!object) { | |
return {}; | |
} | |
return Object.keys(object).reduce((newObj, key) => { | |
newObj[key.toLowerCase()] = object[key]; | |
return newObj; | |
}, {}); | |
} | |
function mergeDeep(defaults, options) { | |
const result = Object.assign({}, defaults); | |
Object.keys(options).forEach(key => { | |
if (isPlainObject(options[key])) { | |
if (!(key in defaults)) Object.assign(result, { | |
[key]: options[key] | |
});else result[key] = mergeDeep(defaults[key], options[key]); | |
} else { | |
Object.assign(result, { | |
[key]: options[key] | |
}); | |
} | |
}); | |
return result; | |
} | |
function merge(defaults, route, options) { | |
if (typeof route === "string") { | |
let [method, url] = route.split(" "); | |
options = Object.assign(url ? { | |
method, | |
url | |
} : { | |
url: method | |
}, options); | |
} else { | |
options = Object.assign({}, route); | |
} // lowercase header names before merging with defaults to avoid duplicates | |
options.headers = lowercaseKeys(options.headers); | |
const mergedOptions = mergeDeep(defaults || {}, options); // mediaType.previews arrays are merged, instead of overwritten | |
if (defaults && defaults.mediaType.previews.length) { | |
mergedOptions.mediaType.previews = defaults.mediaType.previews.filter(preview => !mergedOptions.mediaType.previews.includes(preview)).concat(mergedOptions.mediaType.previews); | |
} | |
mergedOptions.mediaType.previews = mergedOptions.mediaType.previews.map(preview => preview.replace(/-preview/, "")); | |
return mergedOptions; | |
} | |
function addQueryParameters(url, parameters) { | |
const separator = /\?/.test(url) ? "&" : "?"; | |
const names = Object.keys(parameters); | |
if (names.length === 0) { | |
return url; | |
} | |
return url + separator + names.map(name => { | |
if (name === "q") { | |
return "q=" + parameters.q.split("+").map(encodeURIComponent).join("+"); | |
} | |
return `${name}=${encodeURIComponent(parameters[name])}`; | |
}).join("&"); | |
} | |
const urlVariableRegex = /\{[^}]+\}/g; | |
function removeNonChars(variableName) { | |
return variableName.replace(/^\W+|\W+$/g, "").split(/,/); | |
} | |
function extractUrlVariableNames(url) { | |
const matches = url.match(urlVariableRegex); | |
if (!matches) { | |
return []; | |
} | |
return matches.map(removeNonChars).reduce((a, b) => a.concat(b), []); | |
} | |
function omit(object, keysToOmit) { | |
return Object.keys(object).filter(option => !keysToOmit.includes(option)).reduce((obj, key) => { | |
obj[key] = object[key]; | |
return obj; | |
}, {}); | |
} | |
// Based on https://github.com/bramstein/url-template, licensed under BSD | |
// TODO: create separate package. | |
// | |
// Copyright (c) 2012-2014, Bram Stein | |
// All rights reserved. | |
// Redistribution and use in source and binary forms, with or without | |
// modification, are permitted provided that the following conditions | |
// are met: | |
// 1. Redistributions of source code must retain the above copyright | |
// notice, this list of conditions and the following disclaimer. | |
// 2. Redistributions in binary form must reproduce the above copyright | |
// notice, this list of conditions and the following disclaimer in the | |
// documentation and/or other materials provided with the distribution. | |
// 3. The name of the author may not be used to endorse or promote products | |
// derived from this software without specific prior written permission. | |
// THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR IMPLIED | |
// WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF | |
// MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO | |
// EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, | |
// INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, | |
// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, | |
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY | |
// OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING | |
// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, | |
// EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | |
/* istanbul ignore file */ | |
function encodeReserved(str) { | |
return str.split(/(%[0-9A-Fa-f]{2})/g).map(function (part) { | |
if (!/%[0-9A-Fa-f]/.test(part)) { | |
part = encodeURI(part).replace(/%5B/g, "[").replace(/%5D/g, "]"); | |
} | |
return part; | |
}).join(""); | |
} | |
function encodeUnreserved(str) { | |
return encodeURIComponent(str).replace(/[!'()*]/g, function (c) { | |
return "%" + c.charCodeAt(0).toString(16).toUpperCase(); | |
}); | |
} | |
function encodeValue(operator, value, key) { | |
value = operator === "+" || operator === "#" ? encodeReserved(value) : encodeUnreserved(value); | |
if (key) { | |
return encodeUnreserved(key) + "=" + value; | |
} else { | |
return value; | |
} | |
} | |
function isDefined(value) { | |
return value !== undefined && value !== null; | |
} | |
function isKeyOperator(operator) { | |
return operator === ";" || operator === "&" || operator === "?"; | |
} | |
function getValues(context, operator, key, modifier) { | |
var value = context[key], | |
result = []; | |
if (isDefined(value) && value !== "") { | |
if (typeof value === "string" || typeof value === "number" || typeof value === "boolean") { | |
value = value.toString(); | |
if (modifier && modifier !== "*") { | |
value = value.substring(0, parseInt(modifier, 10)); | |
} | |
result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : "")); | |
} else { | |
if (modifier === "*") { | |
if (Array.isArray(value)) { | |
value.filter(isDefined).forEach(function (value) { | |
result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : "")); | |
}); | |
} else { | |
Object.keys(value).forEach(function (k) { | |
if (isDefined(value[k])) { | |
result.push(encodeValue(operator, value[k], k)); | |
} | |
}); | |
} | |
} else { | |
const tmp = []; | |
if (Array.isArray(value)) { | |
value.filter(isDefined).forEach(function (value) { | |
tmp.push(encodeValue(operator, value)); | |
}); | |
} else { | |
Object.keys(value).forEach(function (k) { | |
if (isDefined(value[k])) { | |
tmp.push(encodeUnreserved(k)); | |
tmp.push(encodeValue(operator, value[k].toString())); | |
} | |
}); | |
} | |
if (isKeyOperator(operator)) { | |
result.push(encodeUnreserved(key) + "=" + tmp.join(",")); | |
} else if (tmp.length !== 0) { | |
result.push(tmp.join(",")); | |
} | |
} | |
} | |
} else { | |
if (operator === ";") { | |
if (isDefined(value)) { | |
result.push(encodeUnreserved(key)); | |
} | |
} else if (value === "" && (operator === "&" || operator === "?")) { | |
result.push(encodeUnreserved(key) + "="); | |
} else if (value === "") { | |
result.push(""); | |
} | |
} | |
return result; | |
} | |
function parseUrl(template) { | |
return { | |
expand: expand.bind(null, template) | |
}; | |
} | |
function expand(template, context) { | |
var operators = ["+", "#", ".", "/", ";", "?", "&"]; | |
return template.replace(/\{([^\{\}]+)\}|([^\{\}]+)/g, function (_, expression, literal) { | |
if (expression) { | |
let operator = ""; | |
const values = []; | |
if (operators.indexOf(expression.charAt(0)) !== -1) { | |
operator = expression.charAt(0); | |
expression = expression.substr(1); | |
} | |
expression.split(/,/g).forEach(function (variable) { | |
var tmp = /([^:\*]*)(?::(\d+)|(\*))?/.exec(variable); | |
values.push(getValues(context, operator, tmp[1], tmp[2] || tmp[3])); | |
}); | |
if (operator && operator !== "+") { | |
var separator = ","; | |
if (operator === "?") { | |
separator = "&"; | |
} else if (operator !== "#") { | |
separator = operator; | |
} | |
return (values.length !== 0 ? operator : "") + values.join(separator); | |
} else { | |
return values.join(","); | |
} | |
} else { | |
return encodeReserved(literal); | |
} | |
}); | |
} | |
function parse(options) { | |
// https://fetch.spec.whatwg.org/#methods | |
let method = options.method.toUpperCase(); // replace :varname with {varname} to make it RFC 6570 compatible | |
let url = (options.url || "/").replace(/:([a-z]\w+)/g, "{+$1}"); | |
let headers = Object.assign({}, options.headers); | |
let body; | |
let parameters = omit(options, ["method", "baseUrl", "url", "headers", "request", "mediaType"]); // extract variable names from URL to calculate remaining variables later | |
const urlVariableNames = extractUrlVariableNames(url); | |
url = parseUrl(url).expand(parameters); | |
if (!/^http/.test(url)) { | |
url = options.baseUrl + url; | |
} | |
const omittedParameters = Object.keys(options).filter(option => urlVariableNames.includes(option)).concat("baseUrl"); | |
const remainingParameters = omit(parameters, omittedParameters); | |
const isBinaryRequset = /application\/octet-stream/i.test(headers.accept); | |
if (!isBinaryRequset) { | |
if (options.mediaType.format) { | |
// e.g. application/vnd.github.v3+json => application/vnd.github.v3.raw | |
headers.accept = headers.accept.split(/,/).map(preview => preview.replace(/application\/vnd(\.\w+)(\.v3)?(\.\w+)?(\+json)?$/, `application/vnd$1$2.${options.mediaType.format}`)).join(","); | |
} | |
if (options.mediaType.previews.length) { | |
const previewsFromAcceptHeader = headers.accept.match(/[\w-]+(?=-preview)/g) || []; | |
headers.accept = previewsFromAcceptHeader.concat(options.mediaType.previews).map(preview => { | |
const format = options.mediaType.format ? `.${options.mediaType.format}` : "+json"; | |
return `application/vnd.github.${preview}-preview${format}`; | |
}).join(","); | |
} | |
} // for GET/HEAD requests, set URL query parameters from remaining parameters | |
// for PATCH/POST/PUT/DELETE requests, set request body from remaining parameters | |
if (["GET", "HEAD"].includes(method)) { | |
url = addQueryParameters(url, remainingParameters); | |
} else { | |
if ("data" in remainingParameters) { | |
body = remainingParameters.data; | |
} else { | |
if (Object.keys(remainingParameters).length) { | |
body = remainingParameters; | |
} else { | |
headers["content-length"] = 0; | |
} | |
} | |
} // default content-type for JSON if body is set | |
if (!headers["content-type"] && typeof body !== "undefined") { | |
headers["content-type"] = "application/json; charset=utf-8"; | |
} // GitHub expects 'content-length: 0' header for PUT/PATCH requests without body. | |
// fetch does not allow to set `content-length` header, but we can set body to an empty string | |
if (["PATCH", "PUT"].includes(method) && typeof body === "undefined") { | |
body = ""; | |
} // Only return body/request keys if present | |
return Object.assign({ | |
method, | |
url, | |
headers | |
}, typeof body !== "undefined" ? { | |
body | |
} : null, options.request ? { | |
request: options.request | |
} : null); | |
} | |
function endpointWithDefaults(defaults, route, options) { | |
return parse(merge(defaults, route, options)); | |
} | |
function withDefaults(oldDefaults, newDefaults) { | |
const DEFAULTS = merge(oldDefaults, newDefaults); | |
const endpoint = endpointWithDefaults.bind(null, DEFAULTS); | |
return Object.assign(endpoint, { | |
DEFAULTS, | |
defaults: withDefaults.bind(null, DEFAULTS), | |
merge: merge.bind(null, DEFAULTS), | |
parse | |
}); | |
} | |
const VERSION = "6.0.5"; | |
const userAgent = `octokit-endpoint.js/${VERSION} ${universalUserAgent.getUserAgent()}`; // DEFAULTS has all properties set that EndpointOptions has, except url. | |
// So we use RequestParameters and add method as additional required property. | |
const DEFAULTS = { | |
method: "GET", | |
baseUrl: "https://api.github.com", | |
headers: { | |
accept: "application/vnd.github.v3+json", | |
"user-agent": userAgent | |
}, | |
mediaType: { | |
format: "", | |
previews: [] | |
} | |
}; | |
const endpoint = withDefaults(null, DEFAULTS); | |
exports.endpoint = endpoint; | |
//# sourceMappingURL=index.js.map | |
/***/ }), | |
/***/ 4038: | |
/***/ ((module) => { | |
"use strict"; | |
/*! | |
* is-plain-object <https://github.com/jonschlinkert/is-plain-object> | |
* | |
* Copyright (c) 2014-2017, Jon Schlinkert. | |
* Released under the MIT License. | |
*/ | |
function isObject(o) { | |
return Object.prototype.toString.call(o) === '[object Object]'; | |
} | |
function isPlainObject(o) { | |
var ctor,prot; | |
if (isObject(o) === false) return false; | |
// If has modified constructor | |
ctor = o.constructor; | |
if (ctor === undefined) return true; | |
// If has modified prototype | |
prot = ctor.prototype; | |
if (isObject(prot) === false) return false; | |
// If constructor does not have an Object-specific method | |
if (prot.hasOwnProperty('isPrototypeOf') === false) { | |
return false; | |
} | |
// Most likely a plain Object | |
return true; | |
} | |
module.exports = isPlainObject; | |
/***/ }), | |
/***/ 8467: | |
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { | |
"use strict"; | |
Object.defineProperty(exports, "__esModule", ({ value: true })); | |
var request = __nccwpck_require__(3758); | |
var universalUserAgent = __nccwpck_require__(5030); | |
const VERSION = "4.6.1"; | |
class GraphqlError extends Error { | |
constructor(request, response) { | |
const message = response.data.errors[0].message; | |
super(message); | |
Object.assign(this, response.data); | |
Object.assign(this, { | |
headers: response.headers | |
}); | |
this.name = "GraphqlError"; | |
this.request = request; // Maintains proper stack trace (only available on V8) | |
/* istanbul ignore next */ | |
if (Error.captureStackTrace) { | |
Error.captureStackTrace(this, this.constructor); | |
} | |
} | |
} | |
const NON_VARIABLE_OPTIONS = ["method", "baseUrl", "url", "headers", "request", "query", "mediaType"]; | |
const FORBIDDEN_VARIABLE_OPTIONS = ["query", "method", "url"]; | |
const GHES_V3_SUFFIX_REGEX = /\/api\/v3\/?$/; | |
function graphql(request, query, options) { | |
if (options) { | |
if (typeof query === "string" && "query" in options) { | |
return Promise.reject(new Error(`[@octokit/graphql] "query" cannot be used as variable name`)); | |
} | |
for (const key in options) { | |
if (!FORBIDDEN_VARIABLE_OPTIONS.includes(key)) continue; | |
return Promise.reject(new Error(`[@octokit/graphql] "${key}" cannot be used as variable name`)); | |
} | |
} | |
const parsedOptions = typeof query === "string" ? Object.assign({ | |
query | |
}, options) : query; | |
const requestOptions = Object.keys(parsedOptions).reduce((result, key) => { | |
if (NON_VARIABLE_OPTIONS.includes(key)) { | |
result[key] = parsedOptions[key]; | |
return result; | |
} | |
if (!result.variables) { | |
result.variables = {}; | |
} | |
result.variables[key] = parsedOptions[key]; | |
return result; | |
}, {}); // workaround for GitHub Enterprise baseUrl set with /api/v3 suffix | |
// https://github.com/octokit/auth-app.js/issues/111#issuecomment-657610451 | |
const baseUrl = parsedOptions.baseUrl || request.endpoint.DEFAULTS.baseUrl; | |
if (GHES_V3_SUFFIX_REGEX.test(baseUrl)) { | |
requestOptions.url = baseUrl.replace(GHES_V3_SUFFIX_REGEX, "/api/graphql"); | |
} | |
return request(requestOptions).then(response => { | |
if (response.data.errors) { | |
const headers = {}; | |
for (const key of Object.keys(response.headers)) { | |
headers[key] = response.headers[key]; | |
} | |
throw new GraphqlError(requestOptions, { | |
headers, | |
data: response.data | |
}); | |
} | |
return response.data.data; | |
}); | |
} | |
function withDefaults(request$1, newDefaults) { | |
const newRequest = request$1.defaults(newDefaults); | |
const newApi = (query, options) => { | |
return graphql(newRequest, query, options); | |
}; | |
return Object.assign(newApi, { | |
defaults: withDefaults.bind(null, newRequest), | |
endpoint: request.request.endpoint | |
}); | |
} | |
const graphql$1 = withDefaults(request.request, { | |
headers: { | |
"user-agent": `octokit-graphql.js/${VERSION} ${universalUserAgent.getUserAgent()}` | |
}, | |
method: "POST", | |
url: "/graphql" | |
}); | |
function withCustomRequest(customRequest) { | |
return withDefaults(customRequest, { | |
method: "POST", | |
url: "/graphql" | |
}); | |
} | |
exports.graphql = graphql$1; | |
exports.withCustomRequest = withCustomRequest; | |
//# sourceMappingURL=index.js.map | |
/***/ }), | |
/***/ 3758: | |
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { | |
"use strict"; | |
Object.defineProperty(exports, "__esModule", ({ value: true })); | |
function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } | |
var endpoint = __nccwpck_require__(9440); | |
var universalUserAgent = __nccwpck_require__(5030); | |
var isPlainObject = _interopDefault(__nccwpck_require__(8034)); | |
var nodeFetch = _interopDefault(__nccwpck_require__(467)); | |
var requestError = __nccwpck_require__(537); | |
const VERSION = "5.4.7"; | |
function getBufferResponse(response) { | |
return response.arrayBuffer(); | |
} | |
function fetchWrapper(requestOptions) { | |
if (isPlainObject(requestOptions.body) || Array.isArray(requestOptions.body)) { | |
requestOptions.body = JSON.stringify(requestOptions.body); | |
} | |
let headers = {}; | |
let status; | |
let url; | |
const fetch = requestOptions.request && requestOptions.request.fetch || nodeFetch; | |
return fetch(requestOptions.url, Object.assign({ | |
method: requestOptions.method, | |
body: requestOptions.body, | |
headers: requestOptions.headers, | |
redirect: requestOptions.redirect | |
}, requestOptions.request)).then(response => { | |
url = response.url; | |
status = response.status; | |
for (const keyAndValue of response.headers) { | |
headers[keyAndValue[0]] = keyAndValue[1]; | |
} | |
if (status === 204 || status === 205) { | |
return; | |
} // GitHub API returns 200 for HEAD requests | |
if (requestOptions.method === "HEAD") { | |
if (status < 400) { | |
return; | |
} | |
throw new requestError.RequestError(response.statusText, status, { | |
headers, | |
request: requestOptions | |
}); | |
} | |
if (status === 304) { | |
throw new requestError.RequestError("Not modified", status, { | |
headers, | |
request: requestOptions | |
}); | |
} | |
if (status >= 400) { | |
return response.text().then(message => { | |
const error = new requestError.RequestError(message, status, { | |
headers, | |
request: requestOptions | |
}); | |
try { | |
let responseBody = JSON.parse(error.message); | |
Object.assign(error, responseBody); | |
let errors = responseBody.errors; // Assumption `errors` would always be in Array format | |
error.message = error.message + ": " + errors.map(JSON.stringify).join(", "); | |
} catch (e) {// ignore, see octokit/rest.js#684 | |
} | |
throw error; | |
}); | |
} | |
const contentType = response.headers.get("content-type"); | |
if (/application\/json/.test(contentType)) { | |
return response.json(); | |
} | |
if (!contentType || /^text\/|charset=utf-8$/.test(contentType)) { | |
return response.text(); | |
} | |
return getBufferResponse(response); | |
}).then(data => { | |
return { | |
status, | |
url, | |
headers, | |
data | |
}; | |
}).catch(error => { | |
if (error instanceof requestError.RequestError) { | |
throw error; | |
} | |
throw new requestError.RequestError(error.message, 500, { | |
headers, | |
request: requestOptions | |
}); | |
}); | |
} | |
function withDefaults(oldEndpoint, newDefaults) { | |
const endpoint = oldEndpoint.defaults(newDefaults); | |
const newApi = function (route, parameters) { | |
const endpointOptions = endpoint.merge(route, parameters); | |
if (!endpointOptions.request || !endpointOptions.request.hook) { | |
return fetchWrapper(endpoint.parse(endpointOptions)); | |
} | |
const request = (route, parameters) => { | |
return fetchWrapper(endpoint.parse(endpoint.merge(route, parameters))); | |
}; | |
Object.assign(request, { | |
endpoint, | |
defaults: withDefaults.bind(null, endpoint) | |
}); | |
return endpointOptions.request.hook(request, endpointOptions); | |
}; | |
return Object.assign(newApi, { | |
endpoint, | |
defaults: withDefaults.bind(null, endpoint) | |
}); | |
} | |
const request = withDefaults(endpoint.endpoint, { | |
headers: { | |
"user-agent": `octokit-request.js/${VERSION} ${universalUserAgent.getUserAgent()}` | |
} | |
}); | |
exports.request = request; | |
//# sourceMappingURL=index.js.map | |
/***/ }), | |
/***/ 8034: | |
/***/ ((module) => { | |
"use strict"; | |
/*! | |
* is-plain-object <https://github.com/jonschlinkert/is-plain-object> | |
* | |
* Copyright (c) 2014-2017, Jon Schlinkert. | |
* Released under the MIT License. | |
*/ | |
function isObject(o) { | |
return Object.prototype.toString.call(o) === '[object Object]'; | |
} | |
function isPlainObject(o) { | |
var ctor,prot; | |
if (isObject(o) === false) return false; | |
// If has modified constructor | |
ctor = o.constructor; | |
if (ctor === undefined) return true; | |
// If has modified prototype | |
prot = ctor.prototype; | |
if (isObject(prot) === false) return false; | |
// If constructor does not have an Object-specific method | |
if (prot.hasOwnProperty('isPrototypeOf') === false) { | |
return false; | |
} | |
// Most likely a plain Object | |
return true; | |
} | |
module.exports = isPlainObject; | |
/***/ }), | |
/***/ 4193: | |
/***/ ((__unused_webpack_module, exports) => { | |
"use strict"; | |
Object.defineProperty(exports, "__esModule", ({ value: true })); | |
const VERSION = "2.13.3"; | |
/** | |
* Some “list” response that can be paginated have a different response structure | |
* | |
* They have a `total_count` key in the response (search also has `incomplete_results`, | |
* /installation/repositories also has `repository_selection`), as well as a key with | |
* the list of the items which name varies from endpoint to endpoint. | |
* | |
* Octokit normalizes these responses so that paginated results are always returned following | |
* the same structure. One challenge is that if the list response has only one page, no Link | |
* header is provided, so this header alone is not sufficient to check wether a response is | |
* paginated or not. | |
* | |
* We check if a "total_count" key is present in the response data, but also make sure that | |
* a "url" property is not, as the "Get the combined status for a specific ref" endpoint would | |
* otherwise match: https://developer.github.com/v3/repos/statuses/#get-the-combined-status-for-a-specific-ref | |
*/ | |
function normalizePaginatedListResponse(response) { | |
const responseNeedsNormalization = "total_count" in response.data && !("url" in response.data); | |
if (!responseNeedsNormalization) return response; // keep the additional properties intact as there is currently no other way | |
// to retrieve the same information. | |
const incompleteResults = response.data.incomplete_results; | |
const repositorySelection = response.data.repository_selection; | |
const totalCount = response.data.total_count; | |
delete response.data.incomplete_results; | |
delete response.data.repository_selection; | |
delete response.data.total_count; | |
const namespaceKey = Object.keys(response.data)[0]; | |
const data = response.data[namespaceKey]; | |
response.data = data; | |
if (typeof incompleteResults !== "undefined") { | |
response.data.incomplete_results = incompleteResults; | |
} | |
if (typeof repositorySelection !== "undefined") { | |
response.data.repository_selection = repositorySelection; | |
} | |
response.data.total_count = totalCount; | |
return response; | |
} | |
function iterator(octokit, route, parameters) { | |
const options = typeof route === "function" ? route.endpoint(parameters) : octokit.request.endpoint(route, parameters); | |
const requestMethod = typeof route === "function" ? route : octokit.request; | |
const method = options.method; | |
const headers = options.headers; | |
let url = options.url; | |
return { | |
[Symbol.asyncIterator]: () => ({ | |
async next() { | |
if (!url) return { | |
done: true | |
}; | |
const response = await requestMethod({ | |
method, | |
url, | |
headers | |
}); | |
const normalizedResponse = normalizePaginatedListResponse(response); // `response.headers.link` format: | |
// '<https://api.github.com/users/aseemk/followers?page=2>; rel="next", <https://api.github.com/users/aseemk/followers?page=2>; rel="last"' | |
// sets `url` to undefined if "next" URL is not present or `link` header is not set | |
url = ((normalizedResponse.headers.link || "").match(/<([^>]+)>;\s*rel="next"/) || [])[1]; | |
return { | |
value: normalizedResponse | |
}; | |
} | |
}) | |
}; | |
} | |
function paginate(octokit, route, parameters, mapFn) { | |
if (typeof parameters === "function") { | |
mapFn = parameters; | |
parameters = undefined; | |
} | |
return gather(octokit, [], iterator(octokit, route, parameters)[Symbol.asyncIterator](), mapFn); | |
} | |
function gather(octokit, results, iterator, mapFn) { | |
return iterator.next().then(result => { | |
if (result.done) { | |
return results; | |
} | |
let earlyExit = false; | |
function done() { | |
earlyExit = true; | |
} | |
results = results.concat(mapFn ? mapFn(result.value, done) : result.value.data); | |
if (earlyExit) { | |
return results; | |
} | |
return gather(octokit, results, iterator, mapFn); | |
}); | |
} | |
const composePaginateRest = Object.assign(paginate, { | |
iterator | |
}); | |
const paginatingEndpoints = ["GET /app/installations", "GET /applications/grants", "GET /authorizations", "GET /enterprises/{enterprise}/actions/permissions/organizations", "GET /enterprises/{enterprise}/actions/runner-groups", "GET /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/organizations", "GET /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/runners", "GET /enterprises/{enterprise}/actions/runners", "GET /enterprises/{enterprise}/actions/runners/downloads", "GET /events", "GET /gists", "GET /gists/public", "GET /gists/starred", "GET /gists/{gist_id}/comments", "GET /gists/{gist_id}/commits", "GET /gists/{gist_id}/forks", "GET /installation/repositories", "GET /issues", "GET /marketplace_listing/plans", "GET /marketplace_listing/plans/{plan_id}/accounts", "GET /marketplace_listing/stubbed/plans", "GET /marketplace_listing/stubbed/plans/{plan_id}/accounts", "GET /networks/{owner}/{repo}/events", "GET /notifications", "GET /organizations", "GET /orgs/{org}/actions/permissions/repositories", "GET /orgs/{org}/actions/runner-groups", "GET /orgs/{org}/actions/runner-groups/{runner_group_id}/repositories", "GET /orgs/{org}/actions/runner-groups/{runner_group_id}/runners", "GET /orgs/{org}/actions/runners", "GET /orgs/{org}/actions/runners/downloads", "GET /orgs/{org}/actions/secrets", "GET /orgs/{org}/actions/secrets/{secret_name}/repositories", "GET /orgs/{org}/blocks", "GET /orgs/{org}/credential-authorizations", "GET /orgs/{org}/events", "GET /orgs/{org}/failed_invitations", "GET /orgs/{org}/hooks", "GET /orgs/{org}/installations", "GET /orgs/{org}/invitations", "GET /orgs/{org}/invitations/{invitation_id}/teams", "GET /orgs/{org}/issues", "GET /orgs/{org}/members", "GET /orgs/{org}/migrations", "GET /orgs/{org}/migrations/{migration_id}/repositories", "GET /orgs/{org}/outside_collaborators", "GET /orgs/{org}/projects", "GET /orgs/{org}/public_members", "GET /orgs/{org}/repos", "GET /orgs/{org}/team-sync/groups", "GET /orgs/{org}/teams", "GET /orgs/{org}/teams/{team_slug}/discussions", "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments", "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions", "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions", "GET /orgs/{org}/teams/{team_slug}/invitations", "GET /orgs/{org}/teams/{team_slug}/members", "GET /orgs/{org}/teams/{team_slug}/projects", "GET /orgs/{org}/teams/{team_slug}/repos", "GET /orgs/{org}/teams/{team_slug}/team-sync/group-mappings", "GET /orgs/{org}/teams/{team_slug}/teams", "GET /projects/columns/{column_id}/cards", "GET /projects/{project_id}/collaborators", "GET /projects/{project_id}/columns", "GET /repos/{owner}/{repo}/actions/artifacts", "GET /repos/{owner}/{repo}/actions/runners", "GET /repos/{owner}/{repo}/actions/runners/downloads", "GET /repos/{owner}/{repo}/actions/runs", "GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts", "GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs", "GET /repos/{owner}/{repo}/actions/secrets", "GET /repos/{owner}/{repo}/actions/workflows", "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs", "GET /repos/{owner}/{repo}/assignees", "GET /repos/{owner}/{repo}/branches", "GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations", "GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs", "GET /repos/{owner}/{repo}/code-scanning/alerts", "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances", "GET /repos/{owner}/{repo}/code-scanning/analyses", "GET /repos/{owner}/{repo}/collaborators", "GET /repos/{owner}/{repo}/comments", "GET /repos/{owner}/{repo}/comments/{comment_id}/reactions", "GET /repos/{owner}/{repo}/commits", "GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head", "GET /repos/{owner}/{repo}/commits/{commit_sha}/comments", "GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls", "GET /repos/{owner}/{repo}/commits/{ref}/check-runs", "GET /repos/{owner}/{repo}/commits/{ref}/check-suites", "GET /repos/{owner}/{repo}/commits/{ref}/statuses", "GET /repos/{owner}/{repo}/contributors", "GET /repos/{owner}/{repo}/deployments", "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses", "GET /repos/{owner}/{repo}/events", "GET /repos/{owner}/{repo}/forks", "GET /repos/{owner}/{repo}/git/matching-refs/{ref}", "GET /repos/{owner}/{repo}/hooks", "GET /repos/{owner}/{repo}/invitations", "GET /repos/{owner}/{repo}/issues", "GET /repos/{owner}/{repo}/issues/comments", "GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions", "GET /repos/{owner}/{repo}/issues/events", "GET /repos/{owner}/{repo}/issues/{issue_number}/comments", "GET /repos/{owner}/{repo}/issues/{issue_number}/events", "GET /repos/{owner}/{repo}/issues/{issue_number}/labels", "GET /repos/{owner}/{repo}/issues/{issue_number}/reactions", "GET /repos/{owner}/{repo}/issues/{issue_number}/timeline", "GET /repos/{owner}/{repo}/keys", "GET /repos/{owner}/{repo}/labels", "GET /repos/{owner}/{repo}/milestones", "GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels", "GET /repos/{owner}/{repo}/notifications", "GET /repos/{owner}/{repo}/pages/builds", "GET /repos/{owner}/{repo}/projects", "GET /repos/{owner}/{repo}/pulls", "GET /repos/{owner}/{repo}/pulls/comments", "GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions", "GET /repos/{owner}/{repo}/pulls/{pull_number}/comments", "GET /repos/{owner}/{repo}/pulls/{pull_number}/commits", "GET /repos/{owner}/{repo}/pulls/{pull_number}/files", "GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers", "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews", "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments", "GET /repos/{owner}/{repo}/releases", "GET /repos/{owner}/{repo}/releases/{release_id}/assets", "GET /repos/{owner}/{repo}/secret-scanning/alerts", "GET /repos/{owner}/{repo}/stargazers", "GET /repos/{owner}/{repo}/subscribers", "GET /repos/{owner}/{repo}/tags", "GET /repos/{owner}/{repo}/teams", "GET /repositories", "GET /repositories/{repository_id}/environments/{environment_name}/secrets", "GET /scim/v2/enterprises/{enterprise}/Groups", "GET /scim/v2/enterprises/{enterprise}/Users", "GET /scim/v2/organizations/{org}/Users", "GET /search/code", "GET /search/commits", "GET /search/issues", "GET /search/labels", "GET /search/repositories", "GET /search/topics", "GET /search/users", "GET /teams/{team_id}/discussions", "GET /teams/{team_id}/discussions/{discussion_number}/comments", "GET /teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}/reactions", "GET /teams/{team_id}/discussions/{discussion_number}/reactions", "GET /teams/{team_id}/invitations", "GET /teams/{team_id}/members", "GET /teams/{team_id}/projects", "GET /teams/{team_id}/repos", "GET /teams/{team_id}/team-sync/group-mappings", "GET /teams/{team_id}/teams", "GET /user/blocks", "GET /user/emails", "GET /user/followers", "GET /user/following", "GET /user/gpg_keys", "GET /user/installations", "GET /user/installations/{installation_id}/repositories", "GET /user/issues", "GET /user/keys", "GET /user/marketplace_purchases", "GET /user/marketplace_purchases/stubbed", "GET /user/memberships/orgs", "GET /user/migrations", "GET /user/migrations/{migration_id}/repositories", "GET /user/orgs", "GET /user/public_emails", "GET /user/repos", "GET /user/repository_invitations", "GET /user/starred", "GET /user/subscriptions", "GET /user/teams", "GET /users", "GET /users/{username}/events", "GET /users/{username}/events/orgs/{org}", "GET /users/{username}/events/public", "GET /users/{username}/followers", "GET /users/{username}/following", "GET /users/{username}/gists", "GET /users/{username}/gpg_keys", "GET /users/{username}/keys", "GET /users/{username}/orgs", "GET /users/{username}/projects", "GET /users/{username}/received_events", "GET /users/{username}/received_events/public", "GET /users/{username}/repos", "GET /users/{username}/starred", "GET /users/{username}/subscriptions"]; | |
function isPaginatingEndpoint(arg) { | |
if (typeof arg === "string") { | |
return paginatingEndpoints.includes(arg); | |
} else { | |
return false; | |
} | |
} | |
/** | |
* @param octokit Octokit instance | |
* @param options Options passed to Octokit constructor | |
*/ | |
function paginateRest(octokit) { | |
return { | |
paginate: Object.assign(paginate.bind(null, octokit), { | |
iterator: iterator.bind(null, octokit) | |
}) | |
}; | |
} | |
paginateRest.VERSION = VERSION; | |
exports.composePaginateRest = composePaginateRest; | |
exports.isPaginatingEndpoint = isPaginatingEndpoint; | |
exports.paginateRest = paginateRest; | |
exports.paginatingEndpoints = paginatingEndpoints; | |
//# sourceMappingURL=index.js.map | |
/***/ }), | |
/***/ 3044: | |
/***/ ((__unused_webpack_module, exports) => { | |
"use strict"; | |
Object.defineProperty(exports, "__esModule", ({ value: true })); | |
function _defineProperty(obj, key, value) { | |
if (key in obj) { | |
Object.defineProperty(obj, key, { | |
value: value, | |
enumerable: true, | |
configurable: true, | |
writable: true | |
}); | |
} else { | |
obj[key] = value; | |
} | |
return obj; | |
} | |
function ownKeys(object, enumerableOnly) { | |
var keys = Object.keys(object); | |
if (Object.getOwnPropertySymbols) { | |
var symbols = Object.getOwnPropertySymbols(object); | |
if (enumerableOnly) symbols = symbols.filter(function (sym) { | |
return Object.getOwnPropertyDescriptor(object, sym).enumerable; | |
}); | |
keys.push.apply(keys, symbols); | |
} | |
return keys; | |
} | |
function _objectSpread2(target) { | |
for (var i = 1; i < arguments.length; i++) { | |
var source = arguments[i] != null ? arguments[i] : {}; | |
if (i % 2) { | |
ownKeys(Object(source), true).forEach(function (key) { | |
_defineProperty(target, key, source[key]); | |
}); | |
} else if (Object.getOwnPropertyDescriptors) { | |
Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)); | |
} else { | |
ownKeys(Object(source)).forEach(function (key) { | |
Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); | |
}); | |
} | |
} | |
return target; | |
} | |
const Endpoints = { | |
actions: { | |
addSelectedRepoToOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}"], | |
cancelWorkflowRun: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel"], | |
createOrUpdateEnvironmentSecret: ["PUT /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}"], | |
createOrUpdateOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}"], | |
createOrUpdateRepoSecret: ["PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}"], | |
createRegistrationTokenForOrg: ["POST /orgs/{org}/actions/runners/registration-token"], | |
createRegistrationTokenForRepo: ["POST /repos/{owner}/{repo}/actions/runners/registration-token"], | |
createRemoveTokenForOrg: ["POST /orgs/{org}/actions/runners/remove-token"], | |
createRemoveTokenForRepo: ["POST /repos/{owner}/{repo}/actions/runners/remove-token"], | |
createWorkflowDispatch: ["POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches"], | |
deleteArtifact: ["DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"], | |
deleteEnvironmentSecret: ["DELETE /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}"], | |
deleteOrgSecret: ["DELETE /orgs/{org}/actions/secrets/{secret_name}"], | |
deleteRepoSecret: ["DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}"], | |
deleteSelfHostedRunnerFromOrg: ["DELETE /orgs/{org}/actions/runners/{runner_id}"], | |
deleteSelfHostedRunnerFromRepo: ["DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}"], | |
deleteWorkflowRun: ["DELETE /repos/{owner}/{repo}/actions/runs/{run_id}"], | |
deleteWorkflowRunLogs: ["DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs"], | |
disableSelectedRepositoryGithubActionsOrganization: ["DELETE /orgs/{org}/actions/permissions/repositories/{repository_id}"], | |
disableWorkflow: ["PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/disable"], | |
downloadArtifact: ["GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}"], | |
downloadJobLogsForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs"], | |
downloadWorkflowRunLogs: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs"], | |
enableSelectedRepositoryGithubActionsOrganization: ["PUT /orgs/{org}/actions/permissions/repositories/{repository_id}"], | |
enableWorkflow: ["PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/enable"], | |
getAllowedActionsOrganization: ["GET /orgs/{org}/actions/permissions/selected-actions"], | |
getAllowedActionsRepository: ["GET /repos/{owner}/{repo}/actions/permissions/selected-actions"], | |
getArtifact: ["GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"], | |
getEnvironmentPublicKey: ["GET /repositories/{repository_id}/environments/{environment_name}/secrets/public-key"], | |
getEnvironmentSecret: ["GET /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}"], | |
getGithubActionsPermissionsOrganization: ["GET /orgs/{org}/actions/permissions"], | |
getGithubActionsPermissionsRepository: ["GET /repos/{owner}/{repo}/actions/permissions"], | |
getJobForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}"], | |
getOrgPublicKey: ["GET /orgs/{org}/actions/secrets/public-key"], | |
getOrgSecret: ["GET /orgs/{org}/actions/secrets/{secret_name}"], | |
getPendingDeploymentsForRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments"], | |
getRepoPermissions: ["GET /repos/{owner}/{repo}/actions/permissions", {}, { | |
renamed: ["actions", "getGithubActionsPermissionsRepository"] | |
}], | |
getRepoPublicKey: ["GET /repos/{owner}/{repo}/actions/secrets/public-key"], | |
getRepoSecret: ["GET /repos/{owner}/{repo}/actions/secrets/{secret_name}"], | |
getReviewsForRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/approvals"], | |
getSelfHostedRunnerForOrg: ["GET /orgs/{org}/actions/runners/{runner_id}"], | |
getSelfHostedRunnerForRepo: ["GET /repos/{owner}/{repo}/actions/runners/{runner_id}"], | |
getWorkflow: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}"], | |
getWorkflowRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}"], | |
getWorkflowRunUsage: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing"], | |
getWorkflowUsage: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing"], | |
listArtifactsForRepo: ["GET /repos/{owner}/{repo}/actions/artifacts"], | |
listEnvironmentSecrets: ["GET /repositories/{repository_id}/environments/{environment_name}/secrets"], | |
listJobsForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs"], | |
listOrgSecrets: ["GET /orgs/{org}/actions/secrets"], | |
listRepoSecrets: ["GET /repos/{owner}/{repo}/actions/secrets"], | |
listRepoWorkflows: ["GET /repos/{owner}/{repo}/actions/workflows"], | |
listRunnerApplicationsForOrg: ["GET /orgs/{org}/actions/runners/downloads"], | |
listRunnerApplicationsForRepo: ["GET /repos/{owner}/{repo}/actions/runners/downloads"], | |
listSelectedReposForOrgSecret: ["GET /orgs/{org}/actions/secrets/{secret_name}/repositories"], | |
listSelectedRepositoriesEnabledGithubActionsOrganization: ["GET /orgs/{org}/actions/permissions/repositories"], | |
listSelfHostedRunnersForOrg: ["GET /orgs/{org}/actions/runners"], | |
listSelfHostedRunnersForRepo: ["GET /repos/{owner}/{repo}/actions/runners"], | |
listWorkflowRunArtifacts: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts"], | |
listWorkflowRuns: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs"], | |
listWorkflowRunsForRepo: ["GET /repos/{owner}/{repo}/actions/runs"], | |
reRunWorkflow: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun"], | |
removeSelectedRepoFromOrgSecret: ["DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}"], | |
reviewPendingDeploymentsForRun: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments"], | |
setAllowedActionsOrganization: ["PUT /orgs/{org}/actions/permissions/selected-actions"], | |
setAllowedActionsRepository: ["PUT /repos/{owner}/{repo}/actions/permissions/selected-actions"], | |
setGithubActionsPermissionsOrganization: ["PUT /orgs/{org}/actions/permissions"], | |
setGithubActionsPermissionsRepository: ["PUT /repos/{owner}/{repo}/actions/permissions"], | |
setSelectedReposForOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}/repositories"], | |
setSelectedRepositoriesEnabledGithubActionsOrganization: ["PUT /orgs/{org}/actions/permissions/repositories"] | |
}, | |
activity: { | |
checkRepoIsStarredByAuthenticatedUser: ["GET /user/starred/{owner}/{repo}"], | |
deleteRepoSubscription: ["DELETE /repos/{owner}/{repo}/subscription"], | |
deleteThreadSubscription: ["DELETE /notifications/threads/{thread_id}/subscription"], | |
getFeeds: ["GET /feeds"], | |
getRepoSubscription: ["GET /repos/{owner}/{repo}/subscription"], | |
getThread: ["GET /notifications/threads/{thread_id}"], | |
getThreadSubscriptionForAuthenticatedUser: ["GET /notifications/threads/{thread_id}/subscription"], | |
listEventsForAuthenticatedUser: ["GET /users/{username}/events"], | |
listNotificationsForAuthenticatedUser: ["GET /notifications"], | |
listOrgEventsForAuthenticatedUser: ["GET /users/{username}/events/orgs/{org}"], | |
listPublicEvents: ["GET /events"], | |
listPublicEventsForRepoNetwork: ["GET /networks/{owner}/{repo}/events"], | |
listPublicEventsForUser: ["GET /users/{username}/events/public"], | |
listPublicOrgEvents: ["GET /orgs/{org}/events"], | |
listReceivedEventsForUser: ["GET /users/{username}/received_events"], | |
listReceivedPublicEventsForUser: ["GET /users/{username}/received_events/public"], | |
listRepoEvents: ["GET /repos/{owner}/{repo}/events"], | |
listRepoNotificationsForAuthenticatedUser: ["GET /repos/{owner}/{repo}/notifications"], | |
listReposStarredByAuthenticatedUser: ["GET /user/starred"], | |
listReposStarredByUser: ["GET /users/{username}/starred"], | |
listReposWatchedByUser: ["GET /users/{username}/subscriptions"], | |
listStargazersForRepo: ["GET /repos/{owner}/{repo}/stargazers"], | |
listWatchedReposForAuthenticatedUser: ["GET /user/subscriptions"], | |
listWatchersForRepo: ["GET /repos/{owner}/{repo}/subscribers"], | |
markNotificationsAsRead: ["PUT /notifications"], | |
markRepoNotificationsAsRead: ["PUT /repos/{owner}/{repo}/notifications"], | |
markThreadAsRead: ["PATCH /notifications/threads/{thread_id}"], | |
setRepoSubscription: ["PUT /repos/{owner}/{repo}/subscription"], | |
setThreadSubscription: ["PUT /notifications/threads/{thread_id}/subscription"], | |
starRepoForAuthenticatedUser: ["PUT /user/starred/{owner}/{repo}"], | |
unstarRepoForAuthenticatedUser: ["DELETE /user/starred/{owner}/{repo}"] | |
}, | |
apps: { | |
addRepoToInstallation: ["PUT /user/installations/{installation_id}/repositories/{repository_id}"], | |
checkToken: ["POST /applications/{client_id}/token"], | |
createContentAttachment: ["POST /content_references/{content_reference_id}/attachments", { | |
mediaType: { | |
previews: ["corsair"] | |
} | |
}], | |
createFromManifest: ["POST /app-manifests/{code}/conversions"], | |
createInstallationAccessToken: ["POST /app/installations/{installation_id}/access_tokens"], | |
deleteAuthorization: ["DELETE /applications/{client_id}/grant"], | |
deleteInstallation: ["DELETE /app/installations/{installation_id}"], | |
deleteToken: ["DELETE /applications/{client_id}/token"], | |
getAuthenticated: ["GET /app"], | |
getBySlug: ["GET /apps/{app_slug}"], | |
getInstallation: ["GET /app/installations/{installation_id}"], | |
getOrgInstallation: ["GET /orgs/{org}/installation"], | |
getRepoInstallation: ["GET /repos/{owner}/{repo}/installation"], | |
getSubscriptionPlanForAccount: ["GET /marketplace_listing/accounts/{account_id}"], | |
getSubscriptionPlanForAccountStubbed: ["GET /marketplace_listing/stubbed/accounts/{account_id}"], | |
getUserInstallation: ["GET /users/{username}/installation"], | |
getWebhookConfigForApp: ["GET /app/hook/config"], | |
listAccountsForPlan: ["GET /marketplace_listing/plans/{plan_id}/accounts"], | |
listAccountsForPlanStubbed: ["GET /marketplace_listing/stubbed/plans/{plan_id}/accounts"], | |
listInstallationReposForAuthenticatedUser: ["GET /user/installations/{installation_id}/repositories"], | |
listInstallations: ["GET /app/installations"], | |
listInstallationsForAuthenticatedUser: ["GET /user/installations"], | |
listPlans: ["GET /marketplace_listing/plans"], | |
listPlansStubbed: ["GET /marketplace_listing/stubbed/plans"], | |
listReposAccessibleToInstallation: ["GET /installation/repositories"], | |
listSubscriptionsForAuthenticatedUser: ["GET /user/marketplace_purchases"], | |
listSubscriptionsForAuthenticatedUserStubbed: ["GET /user/marketplace_purchases/stubbed"], | |
removeRepoFromInstallation: ["DELETE /user/installations/{installation_id}/repositories/{repository_id}"], | |
resetToken: ["PATCH /applications/{client_id}/token"], | |
revokeInstallationAccessToken: ["DELETE /installation/token"], | |
scopeToken: ["POST /applications/{client_id}/token/scoped"], | |
suspendInstallation: ["PUT /app/installations/{installation_id}/suspended"], | |
unsuspendInstallation: ["DELETE /app/installations/{installation_id}/suspended"], | |
updateWebhookConfigForApp: ["PATCH /app/hook/config"] | |
}, | |
billing: { | |
getGithubActionsBillingOrg: ["GET /orgs/{org}/settings/billing/actions"], | |
getGithubActionsBillingUser: ["GET /users/{username}/settings/billing/actions"], | |
getGithubPackagesBillingOrg: ["GET /orgs/{org}/settings/billing/packages"], | |
getGithubPackagesBillingUser: ["GET /users/{username}/settings/billing/packages"], | |
getSharedStorageBillingOrg: ["GET /orgs/{org}/settings/billing/shared-storage"], | |
getSharedStorageBillingUser: ["GET /users/{username}/settings/billing/shared-storage"] | |
}, | |
checks: { | |
create: ["POST /repos/{owner}/{repo}/check-runs"], | |
createSuite: ["POST /repos/{owner}/{repo}/check-suites"], | |
get: ["GET /repos/{owner}/{repo}/check-runs/{check_run_id}"], | |
getSuite: ["GET /repos/{owner}/{repo}/check-suites/{check_suite_id}"], | |
listAnnotations: ["GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations"], | |
listForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-runs"], | |
listForSuite: ["GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs"], | |
listSuitesForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-suites"], | |
rerequestSuite: ["POST /repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest"], | |
setSuitesPreferences: ["PATCH /repos/{owner}/{repo}/check-suites/preferences"], | |
update: ["PATCH /repos/{owner}/{repo}/check-runs/{check_run_id}"] | |
}, | |
codeScanning: { | |
deleteAnalysis: ["DELETE /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}{?confirm_delete}"], | |
getAlert: ["GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}", {}, { | |
renamedParameters: { | |
alert_id: "alert_number" | |
} | |
}], | |
getAnalysis: ["GET /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}"], | |
getSarif: ["GET /repos/{owner}/{repo}/code-scanning/sarifs/{sarif_id}"], | |
listAlertsForRepo: ["GET /repos/{owner}/{repo}/code-scanning/alerts"], | |
listAlertsInstances: ["GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances"], | |
listRecentAnalyses: ["GET /repos/{owner}/{repo}/code-scanning/analyses"], | |
updateAlert: ["PATCH /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}"], | |
uploadSarif: ["POST /repos/{owner}/{repo}/code-scanning/sarifs"] | |
}, | |
codesOfConduct: { | |
getAllCodesOfConduct: ["GET /codes_of_conduct", { | |
mediaType: { | |
previews: ["scarlet-witch"] | |
} | |
}], | |
getConductCode: ["GET /codes_of_conduct/{key}", { | |
mediaType: { | |
previews: ["scarlet-witch"] | |
} | |
}], | |
getForRepo: ["GET /repos/{owner}/{repo}/community/code_of_conduct", { | |
mediaType: { | |
previews: ["scarlet-witch"] | |
} | |
}] | |
}, | |
emojis: { | |
get: ["GET /emojis"] | |
}, | |
enterpriseAdmin: { | |
disableSelectedOrganizationGithubActionsEnterprise: ["DELETE /enterprises/{enterprise}/actions/permissions/organizations/{org_id}"], | |
enableSelectedOrganizationGithubActionsEnterprise: ["PUT /enterprises/{enterprise}/actions/permissions/organizations/{org_id}"], | |
getAllowedActionsEnterprise: ["GET /enterprises/{enterprise}/actions/permissions/selected-actions"], | |
getGithubActionsPermissionsEnterprise: ["GET /enterprises/{enterprise}/actions/permissions"], | |
listSelectedOrganizationsEnabledGithubActionsEnterprise: ["GET /enterprises/{enterprise}/actions/permissions/organizations"], | |
setAllowedActionsEnterprise: ["PUT /enterprises/{enterprise}/actions/permissions/selected-actions"], | |
setGithubActionsPermissionsEnterprise: ["PUT /enterprises/{enterprise}/actions/permissions"], | |
setSelectedOrganizationsEnabledGithubActionsEnterprise: ["PUT /enterprises/{enterprise}/actions/permissions/organizations"] | |
}, | |
gists: { | |
checkIsStarred: ["GET /gists/{gist_id}/star"], | |
create: ["POST /gists"], | |
createComment: ["POST /gists/{gist_id}/comments"], | |
delete: ["DELETE /gists/{gist_id}"], | |
deleteComment: ["DELETE /gists/{gist_id}/comments/{comment_id}"], | |
fork: ["POST /gists/{gist_id}/forks"], | |
get: ["GET /gists/{gist_id}"], | |
getComment: ["GET /gists/{gist_id}/comments/{comment_id}"], | |
getRevision: ["GET /gists/{gist_id}/{sha}"], | |
list: ["GET /gists"], | |
listComments: ["GET /gists/{gist_id}/comments"], | |
listCommits: ["GET /gists/{gist_id}/commits"], | |
listForUser: ["GET /users/{username}/gists"], | |
listForks: ["GET /gists/{gist_id}/forks"], | |
listPublic: ["GET /gists/public"], | |
listStarred: ["GET /gists/starred"], | |
star: ["PUT /gists/{gist_id}/star"], | |
unstar: ["DELETE /gists/{gist_id}/star"], | |
update: ["PATCH /gists/{gist_id}"], | |
updateComment: ["PATCH /gists/{gist_id}/comments/{comment_id}"] | |
}, | |
git: { | |
createBlob: ["POST /repos/{owner}/{repo}/git/blobs"], | |
createCommit: ["POST /repos/{owner}/{repo}/git/commits"], | |
createRef: ["POST /repos/{owner}/{repo}/git/refs"], | |
createTag: ["POST /repos/{owner}/{repo}/git/tags"], | |
createTree: ["POST /repos/{owner}/{repo}/git/trees"], | |
deleteRef: ["DELETE /repos/{owner}/{repo}/git/refs/{ref}"], | |
getBlob: ["GET /repos/{owner}/{repo}/git/blobs/{file_sha}"], | |
getCommit: ["GET /repos/{owner}/{repo}/git/commits/{commit_sha}"], | |
getRef: ["GET /repos/{owner}/{repo}/git/ref/{ref}"], | |
getTag: ["GET /repos/{owner}/{repo}/git/tags/{tag_sha}"], | |
getTree: ["GET /repos/{owner}/{repo}/git/trees/{tree_sha}"], | |
listMatchingRefs: ["GET /repos/{owner}/{repo}/git/matching-refs/{ref}"], | |
updateRef: ["PATCH /repos/{owner}/{repo}/git/refs/{ref}"] | |
}, | |
gitignore: { | |
getAllTemplates: ["GET /gitignore/templates"], | |
getTemplate: ["GET /gitignore/templates/{name}"] | |
}, | |
interactions: { | |
getRestrictionsForAuthenticatedUser: ["GET /user/interaction-limits"], | |
getRestrictionsForOrg: ["GET /orgs/{org}/interaction-limits"], | |
getRestrictionsForRepo: ["GET /repos/{owner}/{repo}/interaction-limits"], | |
getRestrictionsForYourPublicRepos: ["GET /user/interaction-limits", {}, { | |
renamed: ["interactions", "getRestrictionsForAuthenticatedUser"] | |
}], | |
removeRestrictionsForAuthenticatedUser: ["DELETE /user/interaction-limits"], | |
removeRestrictionsForOrg: ["DELETE /orgs/{org}/interaction-limits"], | |
removeRestrictionsForRepo: ["DELETE /repos/{owner}/{repo}/interaction-limits"], | |
removeRestrictionsForYourPublicRepos: ["DELETE /user/interaction-limits", {}, { | |
renamed: ["interactions", "removeRestrictionsForAuthenticatedUser"] | |
}], | |
setRestrictionsForAuthenticatedUser: ["PUT /user/interaction-limits"], | |
setRestrictionsForOrg: ["PUT /orgs/{org}/interaction-limits"], | |
setRestrictionsForRepo: ["PUT /repos/{owner}/{repo}/interaction-limits"], | |
setRestrictionsForYourPublicRepos: ["PUT /user/interaction-limits", {}, { | |
renamed: ["interactions", "setRestrictionsForAuthenticatedUser"] | |
}] | |
}, | |
issues: { | |
addAssignees: ["POST /repos/{owner}/{repo}/issues/{issue_number}/assignees"], | |
addLabels: ["POST /repos/{owner}/{repo}/issues/{issue_number}/labels"], | |
checkUserCanBeAssigned: ["GET /repos/{owner}/{repo}/assignees/{assignee}"], | |
create: ["POST /repos/{owner}/{repo}/issues"], | |
createComment: ["POST /repos/{owner}/{repo}/issues/{issue_number}/comments"], | |
createLabel: ["POST /repos/{owner}/{repo}/labels"], | |
createMilestone: ["POST /repos/{owner}/{repo}/milestones"], | |
deleteComment: ["DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}"], | |
deleteLabel: ["DELETE /repos/{owner}/{repo}/labels/{name}"], | |
deleteMilestone: ["DELETE /repos/{owner}/{repo}/milestones/{milestone_number}"], | |
get: ["GET /repos/{owner}/{repo}/issues/{issue_number}"], | |
getComment: ["GET /repos/{owner}/{repo}/issues/comments/{comment_id}"], | |
getEvent: ["GET /repos/{owner}/{repo}/issues/events/{event_id}"], | |
getLabel: ["GET /repos/{owner}/{repo}/labels/{name}"], | |
getMilestone: ["GET /repos/{owner}/{repo}/milestones/{milestone_number}"], | |
list: ["GET /issues"], | |
listAssignees: ["GET /repos/{owner}/{repo}/assignees"], | |
listComments: ["GET /repos/{owner}/{repo}/issues/{issue_number}/comments"], | |
listCommentsForRepo: ["GET /repos/{owner}/{repo}/issues/comments"], | |
listEvents: ["GET /repos/{owner}/{repo}/issues/{issue_number}/events"], | |
listEventsForRepo: ["GET /repos/{owner}/{repo}/issues/events"], | |
listEventsForTimeline: ["GET /repos/{owner}/{repo}/issues/{issue_number}/timeline", { | |
mediaType: { | |
previews: ["mockingbird"] | |
} | |
}], | |
listForAuthenticatedUser: ["GET /user/issues"], | |
listForOrg: ["GET /orgs/{org}/issues"], | |
listForRepo: ["GET /repos/{owner}/{repo}/issues"], | |
listLabelsForMilestone: ["GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels"], | |
listLabelsForRepo: ["GET /repos/{owner}/{repo}/labels"], | |
listLabelsOnIssue: ["GET /repos/{owner}/{repo}/issues/{issue_number}/labels"], | |
listMilestones: ["GET /repos/{owner}/{repo}/milestones"], | |
lock: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/lock"], | |
removeAllLabels: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels"], | |
removeAssignees: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/assignees"], | |
removeLabel: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels/{name}"], | |
setLabels: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/labels"], | |
unlock: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/lock"], | |
update: ["PATCH /repos/{owner}/{repo}/issues/{issue_number}"], | |
updateComment: ["PATCH /repos/{owner}/{repo}/issues/comments/{comment_id}"], | |
updateLabel: ["PATCH /repos/{owner}/{repo}/labels/{name}"], | |
updateMilestone: ["PATCH /repos/{owner}/{repo}/milestones/{milestone_number}"] | |
}, | |
licenses: { | |
get: ["GET /licenses/{license}"], | |
getAllCommonlyUsed: ["GET /licenses"], | |
getForRepo: ["GET /repos/{owner}/{repo}/license"] | |
}, | |
markdown: { | |
render: ["POST /markdown"], | |
renderRaw: ["POST /markdown/raw", { | |
headers: { | |
"content-type": "text/plain; charset=utf-8" | |
} | |
}] | |
}, | |
meta: { | |
get: ["GET /meta"], | |
getOctocat: ["GET /octocat"], | |
getZen: ["GET /zen"], | |
root: ["GET /"] | |
}, | |
migrations: { | |
cancelImport: ["DELETE /repos/{owner}/{repo}/import"], | |
deleteArchiveForAuthenticatedUser: ["DELETE /user/migrations/{migration_id}/archive", { | |
mediaType: { | |
previews: ["wyandotte"] | |
} | |
}], | |
deleteArchiveForOrg: ["DELETE /orgs/{org}/migrations/{migration_id}/archive", { | |
mediaType: { | |
previews: ["wyandotte"] | |
} | |
}], | |
downloadArchiveForOrg: ["GET /orgs/{org}/migrations/{migration_id}/archive", { | |
mediaType: { | |
previews: ["wyandotte"] | |
} | |
}], | |
getArchiveForAuthenticatedUser: ["GET /user/migrations/{migration_id}/archive", { | |
mediaType: { | |
previews: ["wyandotte"] | |
} | |
}], | |
getCommitAuthors: ["GET /repos/{owner}/{repo}/import/authors"], | |
getImportStatus: ["GET /repos/{owner}/{repo}/import"], | |
getLargeFiles: ["GET /repos/{owner}/{repo}/import/large_files"], | |
getStatusForAuthenticatedUser: ["GET /user/migrations/{migration_id}", { | |
mediaType: { | |
previews: ["wyandotte"] | |
} | |
}], | |
getStatusForOrg: ["GET /orgs/{org}/migrations/{migration_id}", { | |
mediaType: { | |
previews: ["wyandotte"] | |
} | |
}], | |
listForAuthenticatedUser: ["GET /user/migrations", { | |
mediaType: { | |
previews: ["wyandotte"] | |
} | |
}], | |
listForOrg: ["GET /orgs/{org}/migrations", { | |
mediaType: { | |
previews: ["wyandotte"] | |
} | |
}], | |
listReposForOrg: ["GET /orgs/{org}/migrations/{migration_id}/repositories", { | |
mediaType: { | |
previews: ["wyandotte"] | |
} | |
}], | |
listReposForUser: ["GET /user/migrations/{migration_id}/repositories", { | |
mediaType: { | |
previews: ["wyandotte"] | |
} | |
}], | |
mapCommitAuthor: ["PATCH /repos/{owner}/{repo}/import/authors/{author_id}"], | |
setLfsPreference: ["PATCH /repos/{owner}/{repo}/import/lfs"], | |
startForAuthenticatedUser: ["POST /user/migrations"], | |
startForOrg: ["POST /orgs/{org}/migrations"], | |
startImport: ["PUT /repos/{owner}/{repo}/import"], | |
unlockRepoForAuthenticatedUser: ["DELETE /user/migrations/{migration_id}/repos/{repo_name}/lock", { | |
mediaType: { | |
previews: ["wyandotte"] | |
} | |
}], | |
unlockRepoForOrg: ["DELETE /orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock", { | |
mediaType: { | |
previews: ["wyandotte"] | |
} | |
}], | |
updateImport: ["PATCH /repos/{owner}/{repo}/import"] | |
}, | |
orgs: { | |
blockUser: ["PUT /orgs/{org}/blocks/{username}"], | |
cancelInvitation: ["DELETE /orgs/{org}/invitations/{invitation_id}"], | |
checkBlockedUser: ["GET /orgs/{org}/blocks/{username}"], | |
checkMembershipForUser: ["GET /orgs/{org}/members/{username}"], | |
checkPublicMembershipForUser: ["GET /orgs/{org}/public_members/{username}"], | |
convertMemberToOutsideCollaborator: ["PUT /orgs/{org}/outside_collaborators/{username}"], | |
createInvitation: ["POST /orgs/{org}/invitations"], | |
createWebhook: ["POST /orgs/{org}/hooks"], | |
deleteWebhook: ["DELETE /orgs/{org}/hooks/{hook_id}"], | |
get: ["GET /orgs/{org}"], | |
getMembershipForAuthenticatedUser: ["GET /user/memberships/orgs/{org}"], | |
getMembershipForUser: ["GET /orgs/{org}/memberships/{username}"], | |
getWebhook: ["GET /orgs/{org}/hooks/{hook_id}"], | |
getWebhookConfigForOrg: ["GET /orgs/{org}/hooks/{hook_id}/config"], | |
list: ["GET /organizations"], | |
listAppInstallations: ["GET /orgs/{org}/installations"], | |
listBlockedUsers: ["GET /orgs/{org}/blocks"], | |
listFailedInvitations: ["GET /orgs/{org}/failed_invitations"], | |
listForAuthenticatedUser: ["GET /user/orgs"], | |
listForUser: ["GET /users/{username}/orgs"], | |
listInvitationTeams: ["GET /orgs/{org}/invitations/{invitation_id}/teams"], | |
listMembers: ["GET /orgs/{org}/members"], | |
listMembershipsForAuthenticatedUser: ["GET /user/memberships/orgs"], | |
listOutsideCollaborators: ["GET /orgs/{org}/outside_collaborators"], | |
listPendingInvitations: ["GET /orgs/{org}/invitations"], | |
listPublicMembers: ["GET /orgs/{org}/public_members"], | |
listWebhooks: ["GET /orgs/{org}/hooks"], | |
pingWebhook: ["POST /orgs/{org}/hooks/{hook_id}/pings"], | |
removeMember: ["DELETE /orgs/{org}/members/{username}"], | |
removeMembershipForUser: ["DELETE /orgs/{org}/memberships/{username}"], | |
removeOutsideCollaborator: ["DELETE /orgs/{org}/outside_collaborators/{username}"], | |
removePublicMembershipForAuthenticatedUser: ["DELETE /orgs/{org}/public_members/{username}"], | |
setMembershipForUser: ["PUT /orgs/{org}/memberships/{username}"], | |
setPublicMembershipForAuthenticatedUser: ["PUT /orgs/{org}/public_members/{username}"], | |
unblockUser: ["DELETE /orgs/{org}/blocks/{username}"], | |
update: ["PATCH /orgs/{org}"], | |
updateMembershipForAuthenticatedUser: ["PATCH /user/memberships/orgs/{org}"], | |
updateWebhook: ["PATCH /orgs/{org}/hooks/{hook_id}"], | |
updateWebhookConfigForOrg: ["PATCH /orgs/{org}/hooks/{hook_id}/config"] | |
}, | |
packages: { | |
deletePackageForAuthenticatedUser: ["DELETE /user/packages/{package_type}/{package_name}"], | |
deletePackageForOrg: ["DELETE /orgs/{org}/packages/{package_type}/{package_name}"], | |
deletePackageVersionForAuthenticatedUser: ["DELETE /user/packages/{package_type}/{package_name}/versions/{package_version_id}"], | |
deletePackageVersionForOrg: ["DELETE /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}"], | |
getAllPackageVersionsForAPackageOwnedByAnOrg: ["GET /orgs/{org}/packages/{package_type}/{package_name}/versions", {}, { | |
renamed: ["packages", "getAllPackageVersionsForPackageOwnedByOrg"] | |
}], | |
getAllPackageVersionsForAPackageOwnedByTheAuthenticatedUser: ["GET /user/packages/{package_type}/{package_name}/versions", {}, { | |
renamed: ["packages", "getAllPackageVersionsForPackageOwnedByAuthenticatedUser"] | |
}], | |
getAllPackageVersionsForPackageOwnedByAuthenticatedUser: ["GET /user/packages/{package_type}/{package_name}/versions"], | |
getAllPackageVersionsForPackageOwnedByOrg: ["GET /orgs/{org}/packages/{package_type}/{package_name}/versions"], | |
getAllPackageVersionsForPackageOwnedByUser: ["GET /users/{username}/packages/{package_type}/{package_name}/versions"], | |
getPackageForAuthenticatedUser: ["GET /user/packages/{package_type}/{package_name}"], | |
getPackageForOrganization: ["GET /orgs/{org}/packages/{package_type}/{package_name}"], | |
getPackageForUser: ["GET /users/{username}/packages/{package_type}/{package_name}"], | |
getPackageVersionForAuthenticatedUser: ["GET /user/packages/{package_type}/{package_name}/versions/{package_version_id}"], | |
getPackageVersionForOrganization: ["GET /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}"], | |
getPackageVersionForUser: ["GET /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}"], | |
restorePackageForAuthenticatedUser: ["POST /user/packages/{package_type}/{package_name}/restore{?token}"], | |
restorePackageForOrg: ["POST /orgs/{org}/packages/{package_type}/{package_name}/restore{?token}"], | |
restorePackageVersionForAuthenticatedUser: ["POST /user/packages/{package_type}/{package_name}/versions/{package_version_id}/restore"], | |
restorePackageVersionForOrg: ["POST /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore"] | |
}, | |
projects: { | |
addCollaborator: ["PUT /projects/{project_id}/collaborators/{username}", { | |
mediaType: { | |
previews: ["inertia"] | |
} | |
}], | |
createCard: ["POST /projects/columns/{column_id}/cards", { | |
mediaType: { | |
previews: ["inertia"] | |
} | |
}], | |
createColumn: ["POST /projects/{project_id}/columns", { | |
mediaType: { | |
previews: ["inertia"] | |
} | |
}], | |
createForAuthenticatedUser: ["POST /user/projects", { | |
mediaType: { | |
previews: ["inertia"] | |
} | |
}], | |
createForOrg: ["POST /orgs/{org}/projects", { | |
mediaType: { | |
previews: ["inertia"] | |
} | |
}], | |
createForRepo: ["POST /repos/{owner}/{repo}/projects", { | |
mediaType: { | |
previews: ["inertia"] | |
} | |
}], | |
delete: ["DELETE /projects/{project_id}", { | |
mediaType: { | |
previews: ["inertia"] | |
} | |
}], | |
deleteCard: ["DELETE /projects/columns/cards/{card_id}", { | |
mediaType: { | |
previews: ["inertia"] | |
} | |
}], | |
deleteColumn: ["DELETE /projects/columns/{column_id}", { | |
mediaType: { | |
previews: ["inertia"] | |
} | |
}], | |
get: ["GET /projects/{project_id}", { | |
mediaType: { | |
previews: ["inertia"] | |
} | |
}], | |
getCard: ["GET /projects/columns/cards/{card_id}", { | |
mediaType: { | |
previews: ["inertia"] | |
} | |
}], | |
getColumn: ["GET /projects/columns/{column_id}", { | |
mediaType: { | |
previews: ["inertia"] | |
} | |
}], | |
getPermissionForUser: ["GET /projects/{project_id}/collaborators/{username}/permission", { | |
mediaType: { | |
previews: ["inertia"] | |
} | |
}], | |
listCards: ["GET /projects/columns/{column_id}/cards", { | |
mediaType: { | |
previews: ["inertia"] | |
} | |
}], | |
listCollaborators: ["GET /projects/{project_id}/collaborators", { | |
mediaType: { | |
previews: ["inertia"] | |
} | |
}], | |
listColumns: ["GET /projects/{project_id}/columns", { | |
mediaType: { | |
previews: ["inertia"] | |
} | |
}], | |
listForOrg: ["GET /orgs/{org}/projects", { | |
mediaType: { | |
previews: ["inertia"] | |
} | |
}], | |
listForRepo: ["GET /repos/{owner}/{repo}/projects", { | |
mediaType: { | |
previews: ["inertia"] | |
} | |
}], | |
listForUser: ["GET /users/{username}/projects", { | |
mediaType: { | |
previews: ["inertia"] | |
} | |
}], | |
moveCard: ["POST /projects/columns/cards/{card_id}/moves", { | |
mediaType: { | |
previews: ["inertia"] | |
} | |
}], | |
moveColumn: ["POST /projects/columns/{column_id}/moves", { | |
mediaType: { | |
previews: ["inertia"] | |
} | |
}], | |
removeCollaborator: ["DELETE /projects/{project_id}/collaborators/{username}", { | |
mediaType: { | |
previews: ["inertia"] | |
} | |
}], | |
update: ["PATCH /projects/{project_id}", { | |
mediaType: { | |
previews: ["inertia"] | |
} | |
}], | |
updateCard: ["PATCH /projects/columns/cards/{card_id}", { | |
mediaType: { | |
previews: ["inertia"] | |
} | |
}], | |
updateColumn: ["PATCH /projects/columns/{column_id}", { | |
mediaType: { | |
previews: ["inertia"] | |
} | |
}] | |
}, | |
pulls: { | |
checkIfMerged: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/merge"], | |
create: ["POST /repos/{owner}/{repo}/pulls"], | |
createReplyForReviewComment: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies"], | |
createReview: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews"], | |
createReviewComment: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/comments"], | |
deletePendingReview: ["DELETE /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"], | |
deleteReviewComment: ["DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}"], | |
dismissReview: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals"], | |
get: ["GET /repos/{owner}/{repo}/pulls/{pull_number}"], | |
getReview: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"], | |
getReviewComment: ["GET /repos/{owner}/{repo}/pulls/comments/{comment_id}"], | |
list: ["GET /repos/{owner}/{repo}/pulls"], | |
listCommentsForReview: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments"], | |
listCommits: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/commits"], | |
listFiles: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/files"], | |
listRequestedReviewers: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"], | |
listReviewComments: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/comments"], | |
listReviewCommentsForRepo: ["GET /repos/{owner}/{repo}/pulls/comments"], | |
listReviews: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews"], | |
merge: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/merge"], | |
removeRequestedReviewers: ["DELETE /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"], | |
requestReviewers: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"], | |
submitReview: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events"], | |
update: ["PATCH /repos/{owner}/{repo}/pulls/{pull_number}"], | |
updateBranch: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/update-branch", { | |
mediaType: { | |
previews: ["lydian"] | |
} | |
}], | |
updateReview: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"], | |
updateReviewComment: ["PATCH /repos/{owner}/{repo}/pulls/comments/{comment_id}"] | |
}, | |
rateLimit: { | |
get: ["GET /rate_limit"] | |
}, | |
reactions: { | |
createForCommitComment: ["POST /repos/{owner}/{repo}/comments/{comment_id}/reactions", { | |
mediaType: { | |
previews: ["squirrel-girl"] | |
} | |
}], | |
createForIssue: ["POST /repos/{owner}/{repo}/issues/{issue_number}/reactions", { | |
mediaType: { | |
previews: ["squirrel-girl"] | |
} | |
}], | |
createForIssueComment: ["POST /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions", { | |
mediaType: { | |
previews: ["squirrel-girl"] | |
} | |
}], | |
createForPullRequestReviewComment: ["POST /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions", { | |
mediaType: { | |
previews: ["squirrel-girl"] | |
} | |
}], | |
createForTeamDiscussionCommentInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions", { | |
mediaType: { | |
previews: ["squirrel-girl"] | |
} | |
}], | |
createForTeamDiscussionInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions", { | |
mediaType: { | |
previews: ["squirrel-girl"] | |
} | |
}], | |
deleteForCommitComment: ["DELETE /repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}", { | |
mediaType: { | |
previews: ["squirrel-girl"] | |
} | |
}], | |
deleteForIssue: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}", { | |
mediaType: { | |
previews: ["squirrel-girl"] | |
} | |
}], | |
deleteForIssueComment: ["DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}", { | |
mediaType: { | |
previews: ["squirrel-girl"] | |
} | |
}], | |
deleteForPullRequestComment: ["DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}", { | |
mediaType: { | |
previews: ["squirrel-girl"] | |
} | |
}], | |
deleteForTeamDiscussion: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}", { | |
mediaType: { | |
previews: ["squirrel-girl"] | |
} | |
}], | |
deleteForTeamDiscussionComment: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}", { | |
mediaType: { | |
previews: ["squirrel-girl"] | |
} | |
}], | |
deleteLegacy: ["DELETE /reactions/{reaction_id}", { | |
mediaType: { | |
previews: ["squirrel-girl"] | |
} | |
}, { | |
deprecated: "octokit.rest.reactions.deleteLegacy() is deprecated, see https://docs.github.com/rest/reference/reactions/#delete-a-reaction-legacy" | |
}], | |
listForCommitComment: ["GET /repos/{owner}/{repo}/comments/{comment_id}/reactions", { | |
mediaType: { | |
previews: ["squirrel-girl"] | |
} | |
}], | |
listForIssue: ["GET /repos/{owner}/{repo}/issues/{issue_number}/reactions", { | |
mediaType: { | |
previews: ["squirrel-girl"] | |
} | |
}], | |
listForIssueComment: ["GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions", { | |
mediaType: { | |
previews: ["squirrel-girl"] | |
} | |
}], | |
listForPullRequestReviewComment: ["GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions", { | |
mediaType: { | |
previews: ["squirrel-girl"] | |
} | |
}], | |
listForTeamDiscussionCommentInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions", { | |
mediaType: { | |
previews: ["squirrel-girl"] | |
} | |
}], | |
listForTeamDiscussionInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions", { | |
mediaType: { | |
previews: ["squirrel-girl"] | |
} | |
}] | |
}, | |
repos: { | |
acceptInvitation: ["PATCH /user/repository_invitations/{invitation_id}"], | |
addAppAccessRestrictions: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, { | |
mapToData: "apps" | |
}], | |
addCollaborator: ["PUT /repos/{owner}/{repo}/collaborators/{username}"], | |
addStatusCheckContexts: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, { | |
mapToData: "contexts" | |
}], | |
addTeamAccessRestrictions: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, { | |
mapToData: "teams" | |
}], | |
addUserAccessRestrictions: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, { | |
mapToData: "users" | |
}], | |
checkCollaborator: ["GET /repos/{owner}/{repo}/collaborators/{username}"], | |
checkVulnerabilityAlerts: ["GET /repos/{owner}/{repo}/vulnerability-alerts", { | |
mediaType: { | |
previews: ["dorian"] | |
} | |
}], | |
compareCommits: ["GET /repos/{owner}/{repo}/compare/{base}...{head}"], | |
createCommitComment: ["POST /repos/{owner}/{repo}/commits/{commit_sha}/comments"], | |
createCommitSignatureProtection: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures", { | |
mediaType: { | |
previews: ["zzzax"] | |
} | |
}], | |
createCommitStatus: ["POST /repos/{owner}/{repo}/statuses/{sha}"], | |
createDeployKey: ["POST /repos/{owner}/{repo}/keys"], | |
createDeployment: ["POST /repos/{owner}/{repo}/deployments"], | |
createDeploymentStatus: ["POST /repos/{owner}/{repo}/deployments/{deployment_id}/statuses"], | |
createDispatchEvent: ["POST /repos/{owner}/{repo}/dispatches"], | |
createForAuthenticatedUser: ["POST /user/repos"], | |
createFork: ["POST /repos/{owner}/{repo}/forks"], | |
createInOrg: ["POST /orgs/{org}/repos"], | |
createOrUpdateEnvironment: ["PUT /repos/{owner}/{repo}/environments/{environment_name}"], | |
createOrUpdateFileContents: ["PUT /repos/{owner}/{repo}/contents/{path}"], | |
createPagesSite: ["POST /repos/{owner}/{repo}/pages", { | |
mediaType: { | |
previews: ["switcheroo"] | |
} | |
}], | |
createRelease: ["POST /repos/{owner}/{repo}/releases"], | |
createUsingTemplate: ["POST /repos/{template_owner}/{template_repo}/generate", { | |
mediaType: { | |
previews: ["baptiste"] | |
} | |
}], | |
createWebhook: ["POST /repos/{owner}/{repo}/hooks"], | |
declineInvitation: ["DELETE /user/repository_invitations/{invitation_id}"], | |
delete: ["DELETE /repos/{owner}/{repo}"], | |
deleteAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions"], | |
deleteAdminBranchProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"], | |
deleteAnEnvironment: ["DELETE /repos/{owner}/{repo}/environments/{environment_name}"], | |
deleteBranchProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection"], | |
deleteCommitComment: ["DELETE /repos/{owner}/{repo}/comments/{comment_id}"], | |
deleteCommitSignatureProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures", { | |
mediaType: { | |
previews: ["zzzax"] | |
} | |
}], | |
deleteDeployKey: ["DELETE /repos/{owner}/{repo}/keys/{key_id}"], | |
deleteDeployment: ["DELETE /repos/{owner}/{repo}/deployments/{deployment_id}"], | |
deleteFile: ["DELETE /repos/{owner}/{repo}/contents/{path}"], | |
deleteInvitation: ["DELETE /repos/{owner}/{repo}/invitations/{invitation_id}"], | |
deletePagesSite: ["DELETE /repos/{owner}/{repo}/pages", { | |
mediaType: { | |
previews: ["switcheroo"] | |
} | |
}], | |
deletePullRequestReviewProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"], | |
deleteRelease: ["DELETE /repos/{owner}/{repo}/releases/{release_id}"], | |
deleteReleaseAsset: ["DELETE /repos/{owner}/{repo}/releases/assets/{asset_id}"], | |
deleteWebhook: ["DELETE /repos/{owner}/{repo}/hooks/{hook_id}"], | |
disableAutomatedSecurityFixes: ["DELETE /repos/{owner}/{repo}/automated-security-fixes", { | |
mediaType: { | |
previews: ["london"] | |
} | |
}], | |
disableVulnerabilityAlerts: ["DELETE /repos/{owner}/{repo}/vulnerability-alerts", { | |
mediaType: { | |
previews: ["dorian"] | |
} | |
}], | |
downloadArchive: ["GET /repos/{owner}/{repo}/zipball/{ref}", {}, { | |
renamed: ["repos", "downloadZipballArchive"] | |
}], | |
downloadTarballArchive: ["GET /repos/{owner}/{repo}/tarball/{ref}"], | |
downloadZipballArchive: ["GET /repos/{owner}/{repo}/zipball/{ref}"], | |
enableAutomatedSecurityFixes: ["PUT /repos/{owner}/{repo}/automated-security-fixes", { | |
mediaType: { | |
previews: ["london"] | |
} | |
}], | |
enableVulnerabilityAlerts: ["PUT /repos/{owner}/{repo}/vulnerability-alerts", { | |
mediaType: { | |
previews: ["dorian"] | |
} | |
}], | |
get: ["GET /repos/{owner}/{repo}"], | |
getAccessRestrictions: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions"], | |
getAdminBranchProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"], | |
getAllEnvironments: ["GET /repos/{owner}/{repo}/environments"], | |
getAllStatusCheckContexts: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts"], | |
getAllTopics: ["GET /repos/{owner}/{repo}/topics", { | |
mediaType: { | |
previews: ["mercy"] | |
} | |
}], | |
getAppsWithAccessToProtectedBranch: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps"], | |
getBranch: ["GET /repos/{owner}/{repo}/branches/{branch}"], | |
getBranchProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection"], | |
getClones: ["GET /repos/{owner}/{repo}/traffic/clones"], | |
getCodeFrequencyStats: ["GET /repos/{owner}/{repo}/stats/code_frequency"], | |
getCollaboratorPermissionLevel: ["GET /repos/{owner}/{repo}/collaborators/{username}/permission"], | |
getCombinedStatusForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/status"], | |
getCommit: ["GET /repos/{owner}/{repo}/commits/{ref}"], | |
getCommitActivityStats: ["GET /repos/{owner}/{repo}/stats/commit_activity"], | |
getCommitComment: ["GET /repos/{owner}/{repo}/comments/{comment_id}"], | |
getCommitSignatureProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures", { | |
mediaType: { | |
previews: ["zzzax"] | |
} | |
}], | |
getCommunityProfileMetrics: ["GET /repos/{owner}/{repo}/community/profile"], | |
getContent: ["GET /repos/{owner}/{repo}/contents/{path}"], | |
getContributorsStats: ["GET /repos/{owner}/{repo}/stats/contributors"], | |
getDeployKey: ["GET /repos/{owner}/{repo}/keys/{key_id}"], | |
getDeployment: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}"], | |
getDeploymentStatus: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}"], | |
getEnvironment: ["GET /repos/{owner}/{repo}/environments/{environment_name}"], | |
getLatestPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/latest"], | |
getLatestRelease: ["GET /repos/{owner}/{repo}/releases/latest"], | |
getPages: ["GET /repos/{owner}/{repo}/pages"], | |
getPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/{build_id}"], | |
getPagesHealthCheck: ["GET /repos/{owner}/{repo}/pages/health"], | |
getParticipationStats: ["GET /repos/{owner}/{repo}/stats/participation"], | |
getPullRequestReviewProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"], | |
getPunchCardStats: ["GET /repos/{owner}/{repo}/stats/punch_card"], | |
getReadme: ["GET /repos/{owner}/{repo}/readme"], | |
getReadmeInDirectory: ["GET /repos/{owner}/{repo}/readme/{dir}"], | |
getRelease: ["GET /repos/{owner}/{repo}/releases/{release_id}"], | |
getReleaseAsset: ["GET /repos/{owner}/{repo}/releases/assets/{asset_id}"], | |
getReleaseByTag: ["GET /repos/{owner}/{repo}/releases/tags/{tag}"], | |
getStatusChecksProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"], | |
getTeamsWithAccessToProtectedBranch: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams"], | |
getTopPaths: ["GET /repos/{owner}/{repo}/traffic/popular/paths"], | |
getTopReferrers: ["GET /repos/{owner}/{repo}/traffic/popular/referrers"], | |
getUsersWithAccessToProtectedBranch: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users"], | |
getViews: ["GET /repos/{owner}/{repo}/traffic/views"], | |
getWebhook: ["GET /repos/{owner}/{repo}/hooks/{hook_id}"], | |
getWebhookConfigForRepo: ["GET /repos/{owner}/{repo}/hooks/{hook_id}/config"], | |
listBranches: ["GET /repos/{owner}/{repo}/branches"], | |
listBranchesForHeadCommit: ["GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head", { | |
mediaType: { | |
previews: ["groot"] | |
} | |
}], | |
listCollaborators: ["GET /repos/{owner}/{repo}/collaborators"], | |
listCommentsForCommit: ["GET /repos/{owner}/{repo}/commits/{commit_sha}/comments"], | |
listCommitCommentsForRepo: ["GET /repos/{owner}/{repo}/comments"], | |
listCommitStatusesForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/statuses"], | |
listCommits: ["GET /repos/{owner}/{repo}/commits"], | |
listContributors: ["GET /repos/{owner}/{repo}/contributors"], | |
listDeployKeys: ["GET /repos/{owner}/{repo}/keys"], | |
listDeploymentStatuses: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses"], | |
listDeployments: ["GET /repos/{owner}/{repo}/deployments"], | |
listForAuthenticatedUser: ["GET /user/repos"], | |
listForOrg: ["GET /orgs/{org}/repos"], | |
listForUser: ["GET /users/{username}/repos"], | |
listForks: ["GET /repos/{owner}/{repo}/forks"], | |
listInvitations: ["GET /repos/{owner}/{repo}/invitations"], | |
listInvitationsForAuthenticatedUser: ["GET /user/repository_invitations"], | |
listLanguages: ["GET /repos/{owner}/{repo}/languages"], | |
listPagesBuilds: ["GET /repos/{owner}/{repo}/pages/builds"], | |
listPublic: ["GET /repositories"], | |
listPullRequestsAssociatedWithCommit: ["GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls", { | |
mediaType: { | |
previews: ["groot"] | |
} | |
}], | |
listReleaseAssets: ["GET /repos/{owner}/{repo}/releases/{release_id}/assets"], | |
listReleases: ["GET /repos/{owner}/{repo}/releases"], | |
listTags: ["GET /repos/{owner}/{repo}/tags"], | |
listTeams: ["GET /repos/{owner}/{repo}/teams"], | |
listWebhooks: ["GET /repos/{owner}/{repo}/hooks"], | |
merge: ["POST /repos/{owner}/{repo}/merges"], | |
pingWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/pings"], | |
removeAppAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, { | |
mapToData: "apps" | |
}], | |
removeCollaborator: ["DELETE /repos/{owner}/{repo}/collaborators/{username}"], | |
removeStatusCheckContexts: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, { | |
mapToData: "contexts" | |
}], | |
removeStatusCheckProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"], | |
removeTeamAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, { | |
mapToData: "teams" | |
}], | |
removeUserAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, { | |
mapToData: "users" | |
}], | |
renameBranch: ["POST /repos/{owner}/{repo}/branches/{branch}/rename"], | |
replaceAllTopics: ["PUT /repos/{owner}/{repo}/topics", { | |
mediaType: { | |
previews: ["mercy"] | |
} | |
}], | |
requestPagesBuild: ["POST /repos/{owner}/{repo}/pages/builds"], | |
setAdminBranchProtection: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"], | |
setAppAccessRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, { | |
mapToData: "apps" | |
}], | |
setStatusCheckContexts: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, { | |
mapToData: "contexts" | |
}], | |
setTeamAccessRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, { | |
mapToData: "teams" | |
}], | |
setUserAccessRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, { | |
mapToData: "users" | |
}], | |
testPushWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/tests"], | |
transfer: ["POST /repos/{owner}/{repo}/transfer"], | |
update: ["PATCH /repos/{owner}/{repo}"], | |
updateBranchProtection: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection"], | |
updateCommitComment: ["PATCH /repos/{owner}/{repo}/comments/{comment_id}"], | |
updateInformationAboutPagesSite: ["PUT /repos/{owner}/{repo}/pages"], | |
updateInvitation: ["PATCH /repos/{owner}/{repo}/invitations/{invitation_id}"], | |
updatePullRequestReviewProtection: ["PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"], | |
updateRelease: ["PATCH /repos/{owner}/{repo}/releases/{release_id}"], | |
updateReleaseAsset: ["PATCH /repos/{owner}/{repo}/releases/assets/{asset_id}"], | |
updateStatusCheckPotection: ["PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks", {}, { | |
renamed: ["repos", "updateStatusCheckProtection"] | |
}], | |
updateStatusCheckProtection: ["PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"], | |
updateWebhook: ["PATCH /repos/{owner}/{repo}/hooks/{hook_id}"], | |
updateWebhookConfigForRepo: ["PATCH /repos/{owner}/{repo}/hooks/{hook_id}/config"], | |
uploadReleaseAsset: ["POST /repos/{owner}/{repo}/releases/{release_id}/assets{?name,label}", { | |
baseUrl: "https://uploads.github.com" | |
}] | |
}, | |
search: { | |
code: ["GET /search/code"], | |
commits: ["GET /search/commits", { | |
mediaType: { | |
previews: ["cloak"] | |
} | |
}], | |
issuesAndPullRequests: ["GET /search/issues"], | |
labels: ["GET /search/labels"], | |
repos: ["GET /search/repositories"], | |
topics: ["GET /search/topics", { | |
mediaType: { | |
previews: ["mercy"] | |
} | |
}], | |
users: ["GET /search/users"] | |
}, | |
secretScanning: { | |
getAlert: ["GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}"], | |
listAlertsForRepo: ["GET /repos/{owner}/{repo}/secret-scanning/alerts"], | |
updateAlert: ["PATCH /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}"] | |
}, | |
teams: { | |
addOrUpdateMembershipForUserInOrg: ["PUT /orgs/{org}/teams/{team_slug}/memberships/{username}"], | |
addOrUpdateProjectPermissionsInOrg: ["PUT /orgs/{org}/teams/{team_slug}/projects/{project_id}", { | |
mediaType: { | |
previews: ["inertia"] | |
} | |
}], | |
addOrUpdateRepoPermissionsInOrg: ["PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"], | |
checkPermissionsForProjectInOrg: ["GET /orgs/{org}/teams/{team_slug}/projects/{project_id}", { | |
mediaType: { | |
previews: ["inertia"] | |
} | |
}], | |
checkPermissionsForRepoInOrg: ["GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"], | |
create: ["POST /orgs/{org}/teams"], | |
createDiscussionCommentInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments"], | |
createDiscussionInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions"], | |
deleteDiscussionCommentInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"], | |
deleteDiscussionInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"], | |
deleteInOrg: ["DELETE /orgs/{org}/teams/{team_slug}"], | |
getByName: ["GET /orgs/{org}/teams/{team_slug}"], | |
getDiscussionCommentInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"], | |
getDiscussionInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"], | |
getMembershipForUserInOrg: ["GET /orgs/{org}/teams/{team_slug}/memberships/{username}"], | |
list: ["GET /orgs/{org}/teams"], | |
listChildInOrg: ["GET /orgs/{org}/teams/{team_slug}/teams"], | |
listDiscussionCommentsInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments"], | |
listDiscussionsInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions"], | |
listForAuthenticatedUser: ["GET /user/teams"], | |
listMembersInOrg: ["GET /orgs/{org}/teams/{team_slug}/members"], | |
listPendingInvitationsInOrg: ["GET /orgs/{org}/teams/{team_slug}/invitations"], | |
listProjectsInOrg: ["GET /orgs/{org}/teams/{team_slug}/projects", { | |
mediaType: { | |
previews: ["inertia"] | |
} | |
}], | |
listReposInOrg: ["GET /orgs/{org}/teams/{team_slug}/repos"], | |
removeMembershipForUserInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/memberships/{username}"], | |
removeProjectInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/projects/{project_id}"], | |
removeRepoInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"], | |
updateDiscussionCommentInOrg: ["PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"], | |
updateDiscussionInOrg: ["PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"], | |
updateInOrg: ["PATCH /orgs/{org}/teams/{team_slug}"] | |
}, | |
users: { | |
addEmailForAuthenticated: ["POST /user/emails"], | |
block: ["PUT /user/blocks/{username}"], | |
checkBlocked: ["GET /user/blocks/{username}"], | |
checkFollowingForUser: ["GET /users/{username}/following/{target_user}"], | |
checkPersonIsFollowedByAuthenticated: ["GET /user/following/{username}"], | |
createGpgKeyForAuthenticated: ["POST /user/gpg_keys"], | |
createPublicSshKeyForAuthenticated: ["POST /user/keys"], | |
deleteEmailForAuthenticated: ["DELETE /user/emails"], | |
deleteGpgKeyForAuthenticated: ["DELETE /user/gpg_keys/{gpg_key_id}"], | |
deletePublicSshKeyForAuthenticated: ["DELETE /user/keys/{key_id}"], | |
follow: ["PUT /user/following/{username}"], | |
getAuthenticated: ["GET /user"], | |
getByUsername: ["GET /users/{username}"], | |
getContextForUser: ["GET /users/{username}/hovercard"], | |
getGpgKeyForAuthenticated: ["GET /user/gpg_keys/{gpg_key_id}"], | |
getPublicSshKeyForAuthenticated: ["GET /user/keys/{key_id}"], | |
list: ["GET /users"], | |
listBlockedByAuthenticated: ["GET /user/blocks"], | |
listEmailsForAuthenticated: ["GET /user/emails"], | |
listFollowedByAuthenticated: ["GET /user/following"], | |
listFollowersForAuthenticatedUser: ["GET /user/followers"], | |
listFollowersForUser: ["GET /users/{username}/followers"], | |
listFollowingForUser: ["GET /users/{username}/following"], | |
listGpgKeysForAuthenticated: ["GET /user/gpg_keys"], | |
listGpgKeysForUser: ["GET /users/{username}/gpg_keys"], | |
listPublicEmailsForAuthenticated: ["GET /user/public_emails"], | |
listPublicKeysForUser: ["GET /users/{username}/keys"], | |
listPublicSshKeysForAuthenticated: ["GET /user/keys"], | |
setPrimaryEmailVisibilityForAuthenticated: ["PATCH /user/email/visibility"], | |
unblock: ["DELETE /user/blocks/{username}"], | |
unfollow: ["DELETE /user/following/{username}"], | |
updateAuthenticated: ["PATCH /user"] | |
} | |
}; | |
const VERSION = "5.1.1"; | |
function endpointsToMethods(octokit, endpointsMap) { | |
const newMethods = {}; | |
for (const [scope, endpoints] of Object.entries(endpointsMap)) { | |
for (const [methodName, endpoint] of Object.entries(endpoints)) { | |
const [route, defaults, decorations] = endpoint; | |
const [method, url] = route.split(/ /); | |
const endpointDefaults = Object.assign({ | |
method, | |
url | |
}, defaults); | |
if (!newMethods[scope]) { | |
newMethods[scope] = {}; | |
} | |
const scopeMethods = newMethods[scope]; | |
if (decorations) { | |
scopeMethods[methodName] = decorate(octokit, scope, methodName, endpointDefaults, decorations); | |
continue; | |
} | |
scopeMethods[methodName] = octokit.request.defaults(endpointDefaults); | |
} | |
} | |
return newMethods; | |
} | |
function decorate(octokit, scope, methodName, defaults, decorations) { | |
const requestWithDefaults = octokit.request.defaults(defaults); | |
/* istanbul ignore next */ | |
function withDecorations(...args) { | |
// @ts-ignore https://github.com/microsoft/TypeScript/issues/25488 | |
let options = requestWithDefaults.endpoint.merge(...args); // There are currently no other decorations than `.mapToData` | |
if (decorations.mapToData) { | |
options = Object.assign({}, options, { | |
data: options[decorations.mapToData], | |
[decorations.mapToData]: undefined | |
}); | |
return requestWithDefaults(options); | |
} | |
if (decorations.renamed) { | |
const [newScope, newMethodName] = decorations.renamed; | |
octokit.log.warn(`octokit.${scope}.${methodName}() has been renamed to octokit.${newScope}.${newMethodName}()`); | |
} | |
if (decorations.deprecated) { | |
octokit.log.warn(decorations.deprecated); | |
} | |
if (decorations.renamedParameters) { | |
// @ts-ignore https://github.com/microsoft/TypeScript/issues/25488 | |
const options = requestWithDefaults.endpoint.merge(...args); | |
for (const [name, alias] of Object.entries(decorations.renamedParameters)) { | |
if (name in options) { | |
octokit.log.warn(`"${name}" parameter is deprecated for "octokit.${scope}.${methodName}()". Use "${alias}" instead`); | |
if (!(alias in options)) { | |
options[alias] = options[name]; | |
} | |
delete options[name]; | |
} | |
} | |
return requestWithDefaults(options); | |
} // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488 | |
return requestWithDefaults(...args); | |
} | |
return Object.assign(withDecorations, requestWithDefaults); | |
} | |
function restEndpointMethods(octokit) { | |
const api = endpointsToMethods(octokit, Endpoints); | |
return { | |
rest: api | |
}; | |
} | |
restEndpointMethods.VERSION = VERSION; | |
function legacyRestEndpointMethods(octokit) { | |
const api = endpointsToMethods(octokit, Endpoints); | |
return _objectSpread2(_objectSpread2({}, api), {}, { | |
rest: api | |
}); | |
} | |
legacyRestEndpointMethods.VERSION = VERSION; | |
exports.legacyRestEndpointMethods = legacyRestEndpointMethods; | |
exports.restEndpointMethods = restEndpointMethods; | |
//# sourceMappingURL=index.js.map | |
/***/ }), | |
/***/ 537: | |
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { | |
"use strict"; | |
Object.defineProperty(exports, "__esModule", ({ value: true })); | |
function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } | |
var deprecation = __nccwpck_require__(8932); | |
var once = _interopDefault(__nccwpck_require__(1223)); | |
const logOnce = once(deprecation => console.warn(deprecation)); | |
/** | |
* Error with extra properties to help with debugging | |
*/ | |
class RequestError extends Error { | |
constructor(message, statusCode, options) { | |
super(message); // Maintains proper stack trace (only available on V8) | |
/* istanbul ignore next */ | |
if (Error.captureStackTrace) { | |
Error.captureStackTrace(this, this.constructor); | |
} | |
this.name = "HttpError"; | |
this.status = statusCode; | |
Object.defineProperty(this, "code", { | |
get() { | |
logOnce(new deprecation.Deprecation("[@octokit/request-error] `error.code` is deprecated, use `error.status`.")); | |
return statusCode; | |
} | |
}); | |
this.headers = options.headers || {}; // redact request credentials without mutating original request options | |
const requestCopy = Object.assign({}, options.request); | |
if (options.request.headers.authorization) { | |
requestCopy.headers = Object.assign({}, options.request.headers, { | |
authorization: options.request.headers.authorization.replace(/ .*$/, " [REDACTED]") | |
}); | |
} | |
requestCopy.url = requestCopy.url // client_id & client_secret can be passed as URL query parameters to increase rate limit | |
// see https://developer.github.com/v3/#increasing-the-unauthenticated-rate-limit-for-oauth-applications | |
.replace(/\bclient_secret=\w+/g, "client_secret=[REDACTED]") // OAuth tokens can be passed as URL query parameters, although it is not recommended | |
// see https://developer.github.com/v3/#oauth2-token-sent-in-a-header | |
.replace(/\baccess_token=\w+/g, "access_token=[REDACTED]"); | |
this.request = requestCopy; | |
} | |
} | |
exports.RequestError = RequestError; | |
//# sourceMappingURL=index.js.map | |
/***/ }), | |
/***/ 6234: | |
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { | |
"use strict"; | |
Object.defineProperty(exports, "__esModule", ({ value: true })); | |
function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } | |
var endpoint = __nccwpck_require__(9440); | |
var universalUserAgent = __nccwpck_require__(5030); | |
var isPlainObject = __nccwpck_require__(9062); | |
var nodeFetch = _interopDefault(__nccwpck_require__(467)); | |
var requestError = __nccwpck_require__(537); | |
const VERSION = "5.4.15"; | |
function getBufferResponse(response) { | |
return response.arrayBuffer(); | |
} | |
function fetchWrapper(requestOptions) { | |
if (isPlainObject.isPlainObject(requestOptions.body) || Array.isArray(requestOptions.body)) { | |
requestOptions.body = JSON.stringify(requestOptions.body); | |
} | |
let headers = {}; | |
let status; | |
let url; | |
const fetch = requestOptions.request && requestOptions.request.fetch || nodeFetch; | |
return fetch(requestOptions.url, Object.assign({ | |
method: requestOptions.method, | |
body: requestOptions.body, | |
headers: requestOptions.headers, | |
redirect: requestOptions.redirect | |
}, // `requestOptions.request.agent` type is incompatible | |
// see https://github.com/octokit/types.ts/pull/264 | |
requestOptions.request)).then(response => { | |
url = response.url; | |
status = response.status; | |
for (const keyAndValue of response.headers) { | |
headers[keyAndValue[0]] = keyAndValue[1]; | |
} | |
if (status === 204 || status === 205) { | |
return; | |
} // GitHub API returns 200 for HEAD requests | |
if (requestOptions.method === "HEAD") { | |
if (status < 400) { | |
return; | |
} | |
throw new requestError.RequestError(response.statusText, status, { | |
headers, | |
request: requestOptions | |
}); | |
} | |
if (status === 304) { | |
throw new requestError.RequestError("Not modified", status, { | |
headers, | |
request: requestOptions | |
}); | |
} | |
if (status >= 400) { | |
return response.text().then(message => { | |
const error = new requestError.RequestError(message, status, { | |
headers, | |
request: requestOptions | |
}); | |
try { | |
let responseBody = JSON.parse(error.message); | |
Object.assign(error, responseBody); | |
let errors = responseBody.errors; // Assumption `errors` would always be in Array format | |
error.message = error.message + ": " + errors.map(JSON.stringify).join(", "); | |
} catch (e) {// ignore, see octokit/rest.js#684 | |
} | |
throw error; | |
}); | |
} | |
const contentType = response.headers.get("content-type"); | |
if (/application\/json/.test(contentType)) { | |
return response.json(); | |
} | |
if (!contentType || /^text\/|charset=utf-8$/.test(contentType)) { | |
return response.text(); | |
} | |
return getBufferResponse(response); | |
}).then(data => { | |
return { | |
status, | |
url, | |
headers, | |
data | |
}; | |
}).catch(error => { | |
if (error instanceof requestError.RequestError) { | |
throw error; | |
} | |
throw new requestError.RequestError(error.message, 500, { | |
headers, | |
request: requestOptions | |
}); | |
}); | |
} | |
function withDefaults(oldEndpoint, newDefaults) { | |
const endpoint = oldEndpoint.defaults(newDefaults); | |
const newApi = function (route, parameters) { | |
const endpointOptions = endpoint.merge(route, parameters); | |
if (!endpointOptions.request || !endpointOptions.request.hook) { | |
return fetchWrapper(endpoint.parse(endpointOptions)); | |
} | |
const request = (route, parameters) => { | |
return fetchWrapper(endpoint.parse(endpoint.merge(route, parameters))); | |
}; | |
Object.assign(request, { | |
endpoint, | |
defaults: withDefaults.bind(null, endpoint) | |
}); | |
return endpointOptions.request.hook(request, endpointOptions); | |
}; | |
return Object.assign(newApi, { | |
endpoint, | |
defaults: withDefaults.bind(null, endpoint) | |
}); | |
} | |
const request = withDefaults(endpoint.endpoint, { | |
headers: { | |
"user-agent": `octokit-request.js/${VERSION} ${universalUserAgent.getUserAgent()}` | |
} | |
}); | |
exports.request = request; | |
//# sourceMappingURL=index.js.map | |
/***/ }), | |
/***/ 9062: | |
/***/ ((__unused_webpack_module, exports) => { | |
"use strict"; | |
Object.defineProperty(exports, "__esModule", ({ value: true })); | |
/*! | |
* is-plain-object <https://github.com/jonschlinkert/is-plain-object> | |
* | |
* Copyright (c) 2014-2017, Jon Schlinkert. | |
* Released under the MIT License. | |
*/ | |
function isObject(o) { | |
return Object.prototype.toString.call(o) === '[object Object]'; | |
} | |
function isPlainObject(o) { | |
var ctor,prot; | |
if (isObject(o) === false) return false; | |
// If has modified constructor | |
ctor = o.constructor; | |
if (ctor === undefined) return true; | |
// If has modified prototype | |
prot = ctor.prototype; | |
if (isObject(prot) === false) return false; | |
// If constructor does not have an Object-specific method | |
if (prot.hasOwnProperty('isPrototypeOf') === false) { | |
return false; | |
} | |
// Most likely a plain Object | |
return true; | |
} | |
exports.isPlainObject = isPlainObject; | |
/***/ }), | |
/***/ 9417: | |
/***/ ((module) => { | |
"use strict"; | |
module.exports = balanced; | |
function balanced(a, b, str) { | |
if (a instanceof RegExp) a = maybeMatch(a, str); | |
if (b instanceof RegExp) b = maybeMatch(b, str); | |
var r = range(a, b, str); | |
return r && { | |
start: r[0], | |
end: r[1], | |
pre: str.slice(0, r[0]), | |
body: str.slice(r[0] + a.length, r[1]), | |
post: str.slice(r[1] + b.length) | |
}; | |
} | |
function maybeMatch(reg, str) { | |
var m = str.match(reg); | |
return m ? m[0] : null; | |
} | |
balanced.range = range; | |
function range(a, b, str) { | |
var begs, beg, left, right, result; | |
var ai = str.indexOf(a); | |
var bi = str.indexOf(b, ai + 1); | |
var i = ai; | |
if (ai >= 0 && bi > 0) { | |
if(a===b) { | |
return [ai, bi]; | |
} | |
begs = []; | |
left = str.length; | |
while (i >= 0 && !result) { | |
if (i == ai) { | |
begs.push(i); | |
ai = str.indexOf(a, i + 1); | |
} else if (begs.length == 1) { | |
result = [ begs.pop(), bi ]; | |
} else { | |
beg = begs.pop(); | |
if (beg < left) { | |
left = beg; | |
right = bi; | |
} | |
bi = str.indexOf(b, i + 1); | |
} | |
i = ai < bi && ai >= 0 ? ai : bi; | |
} | |
if (begs.length) { | |
result = [ left, right ]; | |
} | |
} | |
return result; | |
} | |
/***/ }), | |
/***/ 3682: | |
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { | |
var register = __nccwpck_require__(4670) | |
var addHook = __nccwpck_require__(5549) | |
var removeHook = __nccwpck_require__(6819) | |
// bind with array of arguments: https://stackoverflow.com/a/21792913 | |
var bind = Function.bind | |
var bindable = bind.bind(bind) | |
function bindApi (hook, state, name) { | |
var removeHookRef = bindable(removeHook, null).apply(null, name ? [state, name] : [state]) | |
hook.api = { remove: removeHookRef } | |
hook.remove = removeHookRef | |
;['before', 'error', 'after', 'wrap'].forEach(function (kind) { | |
var args = name ? [state, kind, name] : [state, kind] | |
hook[kind] = hook.api[kind] = bindable(addHook, null).apply(null, args) | |
}) | |
} | |
function HookSingular () { | |
var singularHookName = 'h' | |
var singularHookState = { | |
registry: {} | |
} | |
var singularHook = register.bind(null, singularHookState, singularHookName) | |
bindApi(singularHook, singularHookState, singularHookName) | |
return singularHook | |
} | |
function HookCollection () { | |
var state = { | |
registry: {} | |
} | |
var hook = register.bind(null, state) | |
bindApi(hook, state) | |
return hook | |
} | |
var collectionHookDeprecationMessageDisplayed = false | |
function Hook () { | |
if (!collectionHookDeprecationMessageDisplayed) { | |
console.warn('[before-after-hook]: "Hook()" repurposing warning, use "Hook.Collection()". Read more: https://git.io/upgrade-before-after-hook-to-1.4') | |
collectionHookDeprecationMessageDisplayed = true | |
} | |
return HookCollection() | |
} | |
Hook.Singular = HookSingular.bind() | |
Hook.Collection = HookCollection.bind() | |
module.exports = Hook | |
// expose constructors as a named property for TypeScript | |
module.exports.Hook = Hook | |
module.exports.Singular = Hook.Singular | |
module.exports.Collection = Hook.Collection | |
/***/ }), | |
/***/ 5549: | |
/***/ ((module) => { | |
module.exports = addHook; | |
function addHook(state, kind, name, hook) { | |
var orig = hook; | |
if (!state.registry[name]) { | |
state.registry[name] = []; | |
} | |
if (kind === "before") { | |
hook = function (method, options) { | |
return Promise.resolve() | |
.then(orig.bind(null, options)) | |
.then(method.bind(null, options)); | |
}; | |
} | |
if (kind === "after") { | |
hook = function (method, options) { | |
var result; | |
return Promise.resolve() | |
.then(method.bind(null, options)) | |
.then(function (result_) { | |
result = result_; | |
return orig(result, options); | |
}) | |
.then(function () { | |
return result; | |
}); | |
}; | |
} | |
if (kind === "error") { | |
hook = function (method, options) { | |
return Promise.resolve() | |
.then(method.bind(null, options)) | |
.catch(function (error) { | |
return orig(error, options); | |
}); | |
}; | |
} | |
state.registry[name].push({ | |
hook: hook, | |
orig: orig, | |
}); | |
} | |
/***/ }), | |
/***/ 4670: | |
/***/ ((module) => { | |
module.exports = register; | |
function register(state, name, method, options) { | |
if (typeof method !== "function") { | |
throw new Error("method for before hook must be a function"); | |
} | |
if (!options) { | |
options = {}; | |
} | |
if (Array.isArray(name)) { | |
return name.reverse().reduce(function (callback, name) { | |
return register.bind(null, state, name, callback, options); | |
}, method)(); | |
} | |
return Promise.resolve().then(function () { | |
if (!state.registry[name]) { | |
return method(options); | |
} | |
return state.registry[name].reduce(function (method, registered) { | |
return registered.hook.bind(null, method, options); | |
}, method)(); | |
}); | |
} | |
/***/ }), | |
/***/ 6819: | |
/***/ ((module) => { | |
module.exports = removeHook; | |
function removeHook(state, name, method) { | |
if (!state.registry[name]) { | |
return; | |
} | |
var index = state.registry[name] | |
.map(function (registered) { | |
return registered.orig; | |
}) | |
.indexOf(method); | |
if (index === -1) { | |
return; | |
} | |
state.registry[name].splice(index, 1); | |
} | |
/***/ }), | |
/***/ 3717: | |
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { | |
var concatMap = __nccwpck_require__(6891); | |
var balanced = __nccwpck_require__(9417); | |
module.exports = expandTop; | |
var escSlash = '\0SLASH'+Math.random()+'\0'; | |
var escOpen = '\0OPEN'+Math.random()+'\0'; | |
var escClose = '\0CLOSE'+Math.random()+'\0'; | |
var escComma = '\0COMMA'+Math.random()+'\0'; | |
var escPeriod = '\0PERIOD'+Math.random()+'\0'; | |
function numeric(str) { | |
return parseInt(str, 10) == str | |
? parseInt(str, 10) | |
: str.charCodeAt(0); | |
} | |
function escapeBraces(str) { | |
return str.split('\\\\').join(escSlash) | |
.split('\\{').join(escOpen) | |
.split('\\}').join(escClose) | |
.split('\\,').join(escComma) | |
.split('\\.').join(escPeriod); | |
} | |
function unescapeBraces(str) { | |
return str.split(escSlash).join('\\') | |
.split(escOpen).join('{') | |
.split(escClose).join('}') | |
.split(escComma).join(',') | |
.split(escPeriod).join('.'); | |
} | |
// Basically just str.split(","), but handling cases | |
// where we have nested braced sections, which should be | |
// treated as individual members, like {a,{b,c},d} | |
function parseCommaParts(str) { | |
if (!str) | |
return ['']; | |
var parts = []; | |
var m = balanced('{', '}', str); | |
if (!m) | |
return str.split(','); | |
var pre = m.pre; | |
var body = m.body; | |
var post = m.post; | |
var p = pre.split(','); | |
p[p.length-1] += '{' + body + '}'; | |
var postParts = parseCommaParts(post); | |
if (post.length) { | |
p[p.length-1] += postParts.shift(); | |
p.push.apply(p, postParts); | |
} | |
parts.push.apply(parts, p); | |
return parts; | |
} | |
function expandTop(str) { | |
if (!str) | |
return []; | |
// I don't know why Bash 4.3 does this, but it does. | |
// Anything starting with {} will have the first two bytes preserved | |
// but *only* at the top level, so {},a}b will not expand to anything, | |
// but a{},b}c will be expanded to [a}c,abc]. | |
// One could argue that this is a bug in Bash, but since the goal of | |
// this module is to match Bash's rules, we escape a leading {} | |
if (str.substr(0, 2) === '{}') { | |
str = '\\{\\}' + str.substr(2); | |
} | |
return expand(escapeBraces(str), true).map(unescapeBraces); | |
} | |
function identity(e) { | |
return e; | |
} | |
function embrace(str) { | |
return '{' + str + '}'; | |
} | |
function isPadded(el) { | |
return /^-?0\d/.test(el); | |
} | |
function lte(i, y) { | |
return i <= y; | |
} | |
function gte(i, y) { | |
return i >= y; | |
} | |
function expand(str, isTop) { | |
var expansions = []; | |
var m = balanced('{', '}', str); | |
if (!m || /\$$/.test(m.pre)) return [str]; | |
var isNumericSequence = /^-?\d+\.\.-?\d+(?:\.\.-?\d+)?$/.test(m.body); | |
var isAlphaSequence = /^[a-zA-Z]\.\.[a-zA-Z](?:\.\.-?\d+)?$/.test(m.body); | |
var isSequence = isNumericSequence || isAlphaSequence; | |
var isOptions = m.body.indexOf(',') >= 0; | |
if (!isSequence && !isOptions) { | |
// {a},b} | |
if (m.post.match(/,.*\}/)) { | |
str = m.pre + '{' + m.body + escClose + m.post; | |
return expand(str); | |
} | |
return [str]; | |
} | |
var n; | |
if (isSequence) { | |
n = m.body.split(/\.\./); | |
} else { | |
n = parseCommaParts(m.body); | |
if (n.length === 1) { | |
// x{{a,b}}y ==> x{a}y x{b}y | |
n = expand(n[0], false).map(embrace); | |
if (n.length === 1) { | |
var post = m.post.length | |
? expand(m.post, false) | |
: ['']; | |
return post.map(function(p) { | |
return m.pre + n[0] + p; | |
}); | |
} | |
} | |
} | |
// at this point, n is the parts, and we know it's not a comma set | |
// with a single entry. | |
// no need to expand pre, since it is guaranteed to be free of brace-sets | |
var pre = m.pre; | |
var post = m.post.length | |
? expand(m.post, false) | |
: ['']; | |
var N; | |
if (isSequence) { | |
var x = numeric(n[0]); | |
var y = numeric(n[1]); | |
var width = Math.max(n[0].length, n[1].length) | |
var incr = n.length == 3 | |
? Math.abs(numeric(n[2])) | |
: 1; | |
var test = lte; | |
var reverse = y < x; | |
if (reverse) { | |
incr *= -1; | |
test = gte; | |
} | |
var pad = n.some(isPadded); | |
N = []; | |
for (var i = x; test(i, y); i += incr) { | |
var c; | |
if (isAlphaSequence) { | |
c = String.fromCharCode(i); | |
if (c === '\\') | |
c = ''; | |
} else { | |
c = String(i); | |
if (pad) { | |
var need = width - c.length; | |
if (need > 0) { | |
var z = new Array(need + 1).join('0'); | |
if (i < 0) | |
c = '-' + z + c.slice(1); | |
else | |
c = z + c; | |
} | |
} | |
} | |
N.push(c); | |
} | |
} else { | |
N = concatMap(n, function(el) { return expand(el, false) }); | |
} | |
for (var j = 0; j < N.length; j++) { | |
for (var k = 0; k < post.length; k++) { | |
var expansion = pre + N[j] + post[k]; | |
if (!isTop || isSequence || expansion) | |
expansions.push(expansion); | |
} | |
} | |
return expansions; | |
} | |
/***/ }), | |
/***/ 6891: | |
/***/ ((module) => { | |
module.exports = function (xs, fn) { | |
var res = []; | |
for (var i = 0; i < xs.length; i++) { | |
var x = fn(xs[i], i); | |
if (isArray(x)) res.push.apply(res, x); | |
else res.push(x); | |
} | |
return res; | |
}; | |
var isArray = Array.isArray || function (xs) { | |
return Object.prototype.toString.call(xs) === '[object Array]'; | |
}; | |
/***/ }), | |
/***/ 6942: | |
/***/ ((module) => { | |
class ResizeableBuffer{ | |
constructor(size=100){ | |
this.size = size | |
this.length = 0 | |
this.buf = Buffer.alloc(size) | |
} | |
prepend(val){ | |
if(Buffer.isBuffer(val)){ | |
const length = this.length + val.length | |
if(length >= this.size){ | |
this.resize() | |
if(length >= this.size){ | |
throw Error('INVALID_BUFFER_STATE') | |
} | |
} | |
const buf = this.buf | |
this.buf = Buffer.alloc(this.size) | |
val.copy(this.buf, 0) | |
buf.copy(this.buf, val.length) | |
this.length += val.length | |
}else{ | |
const length = this.length++ | |
if(length === this.size){ | |
this.resize() | |
} | |
const buf = this.clone() | |
this.buf[0] = val | |
buf.copy(this.buf,1, 0, length) | |
} | |
} | |
append(val){ | |
const length = this.length++ | |
if(length === this.size){ | |
this.resize() | |
} | |
this.buf[length] = val | |
} | |
clone(){ | |
return Buffer.from(this.buf.slice(0, this.length)) | |
} | |
resize(){ | |
const length = this.length | |
this.size = this.size * 2 | |
const buf = Buffer.alloc(this.size) | |
this.buf.copy(buf,0, 0, length) | |
this.buf = buf | |
} | |
toString(encoding){ | |
if(encoding){ | |
return this.buf.slice(0, this.length).toString(encoding) | |
}else{ | |
return Uint8Array.prototype.slice.call(this.buf.slice(0, this.length)) | |
} | |
} | |
toJSON(){ | |
return this.toString('utf8') | |
} | |
reset(){ | |
this.length = 0 | |
} | |
} | |
module.exports = ResizeableBuffer | |
/***/ }), | |
/***/ 2830: | |
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { | |
/* | |
CSV Parse | |
Please look at the [project documentation](https://csv.js.org/parse/) for | |
additional information. | |
*/ | |
const { Transform } = __nccwpck_require__(2413) | |
const ResizeableBuffer = __nccwpck_require__(6942) | |
// white space characters | |
// https://en.wikipedia.org/wiki/Whitespace_character | |
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_Expressions/Character_Classes#Types | |
// \f\n\r\t\v\u00a0\u1680\u2000-\u200a\u2028\u2029\u202f\u205f\u3000\ufeff | |
const tab = 9 | |
const nl = 10 // \n, 0x0A in hexadecimal, 10 in decimal | |
const np = 12 | |
const cr = 13 // \r, 0x0D in hexadécimal, 13 in decimal | |
const space = 32 | |
const boms = { | |
// Note, the following are equals: | |
// Buffer.from("\ufeff") | |
// Buffer.from([239, 187, 191]) | |
// Buffer.from('EFBBBF', 'hex') | |
'utf8': Buffer.from([239, 187, 191]), | |
// Note, the following are equals: | |
// Buffer.from "\ufeff", 'utf16le | |
// Buffer.from([255, 254]) | |
'utf16le': Buffer.from([255, 254]) | |
} | |
class Parser extends Transform { | |
constructor(opts = {}){ | |
super({...{readableObjectMode: true}, ...opts, encoding: null}) | |
this.__originalOptions = opts | |
this.__normalizeOptions(opts) | |
} | |
__normalizeOptions(opts){ | |
const options = {} | |
// Merge with user options | |
for(let opt in opts){ | |
options[underscore(opt)] = opts[opt] | |
} | |
// Normalize option `encoding` | |
// Note: defined first because other options depends on it | |
// to convert chars/strings into buffers. | |
if(options.encoding === undefined || options.encoding === true){ | |
options.encoding = 'utf8' | |
}else if(options.encoding === null || options.encoding === false){ | |
options.encoding = null | |
}else if(typeof options.encoding !== 'string' && options.encoding !== null){ | |
throw new CsvError('CSV_INVALID_OPTION_ENCODING', [ | |
'Invalid option encoding:', | |
'encoding must be a string or null to return a buffer,', | |
`got ${JSON.stringify(options.encoding)}` | |
], options) | |
} | |
// Normalize option `bom` | |
if(options.bom === undefined || options.bom === null || options.bom === false){ | |
options.bom = false | |
}else if(options.bom !== true){ | |
throw new CsvError('CSV_INVALID_OPTION_BOM', [ | |
'Invalid option bom:', 'bom must be true,', | |
`got ${JSON.stringify(options.bom)}` | |
], options) | |
} | |
// Normalize option `cast` | |
let fnCastField = null | |
if(options.cast === undefined || options.cast === null || options.cast === false || options.cast === ''){ | |
options.cast = undefined | |
}else if(typeof options.cast === 'function'){ | |
fnCastField = options.cast | |
options.cast = true | |
}else if(options.cast !== true){ | |
throw new CsvError('CSV_INVALID_OPTION_CAST', [ | |
'Invalid option cast:', 'cast must be true or a function,', | |
`got ${JSON.stringify(options.cast)}` | |
], options) | |
} | |
// Normalize option `cast_date` | |
if(options.cast_date === undefined || options.cast_date === null || options.cast_date === false || options.cast_date === ''){ | |
options.cast_date = false | |
}else if(options.cast_date === true){ | |
options.cast_date = function(value){ | |
const date = Date.parse(value) | |
return !isNaN(date) ? new Date(date) : value | |
} | |
}else if(typeof options.cast_date !== 'function'){ | |
throw new CsvError('CSV_INVALID_OPTION_CAST_DATE', [ | |
'Invalid option cast_date:', 'cast_date must be true or a function,', | |
`got ${JSON.stringify(options.cast_date)}` | |
], options) | |
} | |
// Normalize option `columns` | |
let fnFirstLineToHeaders = null | |
if(options.columns === true){ | |
// Fields in the first line are converted as-is to columns | |
fnFirstLineToHeaders = undefined | |
}else if(typeof options.columns === 'function'){ | |
fnFirstLineToHeaders = options.columns | |
options.columns = true | |
}else if(Array.isArray(options.columns)){ | |
options.columns = normalizeColumnsArray(options.columns) | |
}else if(options.columns === undefined || options.columns === null || options.columns === false){ | |
options.columns = false | |
}else{ | |
throw new CsvError('CSV_INVALID_OPTION_COLUMNS', [ | |
'Invalid option columns:', | |
'expect an array, a function or true,', | |
`got ${JSON.stringify(options.columns)}` | |
], options) | |
} | |
// Normalize option `columns_duplicates_to_array` | |
if(options.columns_duplicates_to_array === undefined || options.columns_duplicates_to_array === null || options.columns_duplicates_to_array === false){ | |
options.columns_duplicates_to_array = false | |
}else if(options.columns_duplicates_to_array !== true){ | |
throw new CsvError('CSV_INVALID_OPTION_COLUMNS_DUPLICATES_TO_ARRAY', [ | |
'Invalid option columns_duplicates_to_array:', | |
'expect an boolean,', | |
`got ${JSON.stringify(options.columns_duplicates_to_array)}` | |
], options) | |
}else if(options.columns === false){ | |
throw new CsvError('CSV_INVALID_OPTION_COLUMNS_DUPLICATES_TO_ARRAY', [ | |
'Invalid option columns_duplicates_to_array:', | |
'the `columns` mode must be activated.' | |
], options) | |
} | |
// Normalize option `comment` | |
if(options.comment === undefined || options.comment === null || options.comment === false || options.comment === ''){ | |
options.comment = null | |
}else{ | |
if(typeof options.comment === 'string'){ | |
options.comment = Buffer.from(options.comment, options.encoding) | |
} | |
if(!Buffer.isBuffer(options.comment)){ | |
throw new CsvError('CSV_INVALID_OPTION_COMMENT', [ | |
'Invalid option comment:', | |
'comment must be a buffer or a string,', | |
`got ${JSON.stringify(options.comment)}` | |
], options) | |
} | |
} | |
// Normalize option `delimiter` | |
const delimiter_json = JSON.stringify(options.delimiter) | |
if(!Array.isArray(options.delimiter)) options.delimiter = [options.delimiter] | |
if(options.delimiter.length === 0){ | |
throw new CsvError('CSV_INVALID_OPTION_DELIMITER', [ | |
'Invalid option delimiter:', | |
'delimiter must be a non empty string or buffer or array of string|buffer,', | |
`got ${delimiter_json}` | |
], options) | |
} | |
options.delimiter = options.delimiter.map(function(delimiter){ | |
if(delimiter === undefined || delimiter === null || delimiter === false){ | |
return Buffer.from(',', options.encoding) | |
} | |
if(typeof delimiter === 'string'){ | |
delimiter = Buffer.from(delimiter, options.encoding) | |
} | |
if( !Buffer.isBuffer(delimiter) || delimiter.length === 0){ | |
throw new CsvError('CSV_INVALID_OPTION_DELIMITER', [ | |
'Invalid option delimiter:', | |
'delimiter must be a non empty string or buffer or array of string|buffer,', | |
`got ${delimiter_json}` | |
], options) | |
} | |
return delimiter | |
}) | |
// Normalize option `escape` | |
if(options.escape === undefined || options.escape === true){ | |
options.escape = Buffer.from('"', options.encoding) | |
}else if(typeof options.escape === 'string'){ | |
options.escape = Buffer.from(options.escape, options.encoding) | |
}else if (options.escape === null || options.escape === false){ | |
options.escape = null | |
} | |
if(options.escape !== null){ | |
if(!Buffer.isBuffer(options.escape)){ | |
throw new Error(`Invalid Option: escape must be a buffer, a string or a boolean, got ${JSON.stringify(options.escape)}`) | |
} | |
} | |
// Normalize option `from` | |
if(options.from === undefined || options.from === null){ | |
options.from = 1 | |
}else{ | |
if(typeof options.from === 'string' && /\d+/.test(options.from)){ | |
options.from = parseInt(options.from) | |
} | |
if(Number.isInteger(options.from)){ | |
if(options.from < 0){ | |
throw new Error(`Invalid Option: from must be a positive integer, got ${JSON.stringify(opts.from)}`) | |
} | |
}else{ | |
throw new Error(`Invalid Option: from must be an integer, got ${JSON.stringify(options.from)}`) | |
} | |
} | |
// Normalize option `from_line` | |
if(options.from_line === undefined || options.from_line === null){ | |
options.from_line = 1 | |
}else{ | |
if(typeof options.from_line === 'string' && /\d+/.test(options.from_line)){ | |
options.from_line = parseInt(options.from_line) | |
} | |
if(Number.isInteger(options.from_line)){ | |
if(options.from_line <= 0){ | |
throw new Error(`Invalid Option: from_line must be a positive integer greater than 0, got ${JSON.stringify(opts.from_line)}`) | |
} | |
}else{ | |
throw new Error(`Invalid Option: from_line must be an integer, got ${JSON.stringify(opts.from_line)}`) | |
} | |
} | |
// Normalize options `ignore_last_delimiters` | |
if(options.ignore_last_delimiters === undefined || options.ignore_last_delimiters === null){ | |
options.ignore_last_delimiters = false | |
}else if(typeof options.ignore_last_delimiters === 'number'){ | |
options.ignore_last_delimiters = Math.floor(options.ignore_last_delimiters) | |
if(options.ignore_last_delimiters === 0){ | |
options.ignore_last_delimiters = false | |
} | |
}else if(typeof options.ignore_last_delimiters !== 'boolean'){ | |
throw new CsvError('CSV_INVALID_OPTION_IGNORE_LAST_DELIMITERS', [ | |
'Invalid option `ignore_last_delimiters`:', | |
'the value must be a boolean value or an integer,', | |
`got ${JSON.stringify(options.ignore_last_delimiters)}` | |
], options) | |
} | |
if(options.ignore_last_delimiters === true && options.columns === false){ | |
throw new CsvError('CSV_IGNORE_LAST_DELIMITERS_REQUIRES_COLUMNS', [ | |
'The option `ignore_last_delimiters`', | |
'requires the activation of the `columns` option' | |
], options) | |
} | |
// Normalize option `info` | |
if(options.info === undefined || options.info === null || options.info === false){ | |
options.info = false | |
}else if(options.info !== true){ | |
throw new Error(`Invalid Option: info must be true, got ${JSON.stringify(options.info)}`) | |
} | |
// Normalize option `max_record_size` | |
if(options.max_record_size === undefined || options.max_record_size === null || options.max_record_size === false){ | |
options.max_record_size = 0 | |
}else if(Number.isInteger(options.max_record_size) && options.max_record_size >= 0){ | |
// Great, nothing to do | |
}else if(typeof options.max_record_size === 'string' && /\d+/.test(options.max_record_size)){ | |
options.max_record_size = parseInt(options.max_record_size) | |
}else{ | |
throw new Error(`Invalid Option: max_record_size must be a positive integer, got ${JSON.stringify(options.max_record_size)}`) | |
} | |
// Normalize option `objname` | |
if(options.objname === undefined || options.objname === null || options.objname === false){ | |
options.objname = undefined | |
}else if(Buffer.isBuffer(options.objname)){ | |
if(options.objname.length === 0){ | |
throw new Error(`Invalid Option: objname must be a non empty buffer`) | |
} | |
if(options.encoding === null){ | |
// Don't call `toString`, leave objname as a buffer | |
}else{ | |
options.objname = options.objname.toString(options.encoding) | |
} | |
}else if(typeof options.objname === 'string'){ | |
if(options.objname.length === 0){ | |
throw new Error(`Invalid Option: objname must be a non empty string`) | |
} | |
// Great, nothing to do | |
}else{ | |
throw new Error(`Invalid Option: objname must be a string or a buffer, got ${options.objname}`) | |
} | |
// Normalize option `on_record` | |
if(options.on_record === undefined || options.on_record === null){ | |
options.on_record = undefined | |
}else if(typeof options.on_record !== 'function'){ | |
throw new CsvError('CSV_INVALID_OPTION_ON_RECORD', [ | |
'Invalid option `on_record`:', | |
'expect a function,', | |
`got ${JSON.stringify(options.on_record)}` | |
], options) | |
} | |
// Normalize option `quote` | |
if(options.quote === null || options.quote === false || options.quote === ''){ | |
options.quote = null | |
}else{ | |
if(options.quote === undefined || options.quote === true){ | |
options.quote = Buffer.from('"', options.encoding) | |
}else if(typeof options.quote === 'string'){ | |
options.quote = Buffer.from(options.quote, options.encoding) | |
} | |
if(!Buffer.isBuffer(options.quote)){ | |
throw new Error(`Invalid Option: quote must be a buffer or a string, got ${JSON.stringify(options.quote)}`) | |
} | |
} | |
// Normalize option `raw` | |
if(options.raw === undefined || options.raw === null || options.raw === false){ | |
options.raw = false | |
}else if(options.raw !== true){ | |
throw new Error(`Invalid Option: raw must be true, got ${JSON.stringify(options.raw)}`) | |
} | |
// Normalize option `record_delimiter` | |
if(!options.record_delimiter){ | |
options.record_delimiter = [] | |
}else if(!Array.isArray(options.record_delimiter)){ | |
options.record_delimiter = [options.record_delimiter] | |
} | |
options.record_delimiter = options.record_delimiter.map( function(rd){ | |
if(typeof rd === 'string'){ | |
rd = Buffer.from(rd, options.encoding) | |
} | |
return rd | |
}) | |
// Normalize option `relax` | |
if(typeof options.relax === 'boolean'){ | |
// Great, nothing to do | |
}else if(options.relax === undefined || options.relax === null){ | |
options.relax = false | |
}else{ | |
throw new Error(`Invalid Option: relax must be a boolean, got ${JSON.stringify(options.relax)}`) | |
} | |
// Normalize option `relax_column_count` | |
if(typeof options.relax_column_count === 'boolean'){ | |
// Great, nothing to do | |
}else if(options.relax_column_count === undefined || options.relax_column_count === null){ | |
options.relax_column_count = false | |
}else{ | |
throw new Error(`Invalid Option: relax_column_count must be a boolean, got ${JSON.stringify(options.relax_column_count)}`) | |
} | |
if(typeof options.relax_column_count_less === 'boolean'){ | |
// Great, nothing to do | |
}else if(options.relax_column_count_less === undefined || options.relax_column_count_less === null){ | |
options.relax_column_count_less = false | |
}else{ | |
throw new Error(`Invalid Option: relax_column_count_less must be a boolean, got ${JSON.stringify(options.relax_column_count_less)}`) | |
} | |
if(typeof options.relax_column_count_more === 'boolean'){ | |
// Great, nothing to do | |
}else if(options.relax_column_count_more === undefined || options.relax_column_count_more === null){ | |
options.relax_column_count_more = false | |
}else{ | |
throw new Error(`Invalid Option: relax_column_count_more must be a boolean, got ${JSON.stringify(options.relax_column_count_more)}`) | |
} | |
// Normalize option `skip_empty_lines` | |
if(typeof options.skip_empty_lines === 'boolean'){ | |
// Great, nothing to do | |
}else if(options.skip_empty_lines === undefined || options.skip_empty_lines === null){ | |
options.skip_empty_lines = false | |
}else{ | |
throw new Error(`Invalid Option: skip_empty_lines must be a boolean, got ${JSON.stringify(options.skip_empty_lines)}`) | |
} | |
// Normalize option `skip_lines_with_empty_values` | |
if(typeof options.skip_lines_with_empty_values === 'boolean'){ | |
// Great, nothing to do | |
}else if(options.skip_lines_with_empty_values === undefined || options.skip_lines_with_empty_values === null){ | |
options.skip_lines_with_empty_values = false | |
}else{ | |
throw new Error(`Invalid Option: skip_lines_with_empty_values must be a boolean, got ${JSON.stringify(options.skip_lines_with_empty_values)}`) | |
} | |
// Normalize option `skip_lines_with_error` | |
if(typeof options.skip_lines_with_error === 'boolean'){ | |
// Great, nothing to do | |
}else if(options.skip_lines_with_error === undefined || options.skip_lines_with_error === null){ | |
options.skip_lines_with_error = false | |
}else{ | |
throw new Error(`Invalid Option: skip_lines_with_error must be a boolean, got ${JSON.stringify(options.skip_lines_with_error)}`) | |
} | |
// Normalize option `rtrim` | |
if(options.rtrim === undefined || options.rtrim === null || options.rtrim === false){ | |
options.rtrim = false | |
}else if(options.rtrim !== true){ | |
throw new Error(`Invalid Option: rtrim must be a boolean, got ${JSON.stringify(options.rtrim)}`) | |
} | |
// Normalize option `ltrim` | |
if(options.ltrim === undefined || options.ltrim === null || options.ltrim === false){ | |
options.ltrim = false | |
}else if(options.ltrim !== true){ | |
throw new Error(`Invalid Option: ltrim must be a boolean, got ${JSON.stringify(options.ltrim)}`) | |
} | |
// Normalize option `trim` | |
if(options.trim === undefined || options.trim === null || options.trim === false){ | |
options.trim = false | |
}else if(options.trim !== true){ | |
throw new Error(`Invalid Option: trim must be a boolean, got ${JSON.stringify(options.trim)}`) | |
} | |
// Normalize options `trim`, `ltrim` and `rtrim` | |
if(options.trim === true && opts.ltrim !== false){ | |
options.ltrim = true | |
}else if(options.ltrim !== true){ | |
options.ltrim = false | |
} | |
if(options.trim === true && opts.rtrim !== false){ | |
options.rtrim = true | |
}else if(options.rtrim !== true){ | |
options.rtrim = false | |
} | |
// Normalize option `to` | |
if(options.to === undefined || options.to === null){ | |
options.to = -1 | |
}else{ | |
if(typeof options.to === 'string' && /\d+/.test(options.to)){ | |
options.to = parseInt(options.to) | |
} | |
if(Number.isInteger(options.to)){ | |
if(options.to <= 0){ | |
throw new Error(`Invalid Option: to must be a positive integer greater than 0, got ${JSON.stringify(opts.to)}`) | |
} | |
}else{ | |
throw new Error(`Invalid Option: to must be an integer, got ${JSON.stringify(opts.to)}`) | |
} | |
} | |
// Normalize option `to_line` | |
if(options.to_line === undefined || options.to_line === null){ | |
options.to_line = -1 | |
}else{ | |
if(typeof options.to_line === 'string' && /\d+/.test(options.to_line)){ | |
options.to_line = parseInt(options.to_line) | |
} | |
if(Number.isInteger(options.to_line)){ | |
if(options.to_line <= 0){ | |
throw new Error(`Invalid Option: to_line must be a positive integer greater than 0, got ${JSON.stringify(opts.to_line)}`) | |
} | |
}else{ | |
throw new Error(`Invalid Option: to_line must be an integer, got ${JSON.stringify(opts.to_line)}`) | |
} | |
} | |
this.info = { | |
comment_lines: 0, | |
empty_lines: 0, | |
invalid_field_length: 0, | |
lines: 1, | |
records: 0 | |
} | |
this.options = options | |
this.state = { | |
bomSkipped: false, | |
castField: fnCastField, | |
commenting: false, | |
// Current error encountered by a record | |
error: undefined, | |
enabled: options.from_line === 1, | |
escaping: false, | |
// escapeIsQuote: options.escape === options.quote, | |
escapeIsQuote: Buffer.isBuffer(options.escape) && Buffer.isBuffer(options.quote) && Buffer.compare(options.escape, options.quote) === 0, | |
// columns can be `false`, `true`, `Array` | |
expectedRecordLength: Array.isArray(options.columns) ? options.columns.length : undefined, | |
field: new ResizeableBuffer(20), | |
firstLineToHeaders: fnFirstLineToHeaders, | |
needMoreDataSize: Math.max( | |
// Skip if the remaining buffer smaller than comment | |
options.comment !== null ? options.comment.length : 0, | |
// Skip if the remaining buffer can be delimiter | |
...options.delimiter.map( (delimiter) => delimiter.length), | |
// Skip if the remaining buffer can be escape sequence | |
options.quote !== null ? options.quote.length : 0, | |
), | |
previousBuf: undefined, | |
quoting: false, | |
stop: false, | |
rawBuffer: new ResizeableBuffer(100), | |
record: [], | |
recordHasError: false, | |
record_length: 0, | |
recordDelimiterMaxLength: options.record_delimiter.length === 0 ? 2 : Math.max(...options.record_delimiter.map( (v) => v.length)), | |
trimChars: [Buffer.from(' ', options.encoding)[0], Buffer.from('\t', options.encoding)[0]], | |
wasQuoting: false, | |
wasRowDelimiter: false | |
} | |
} | |
// Implementation of `Transform._transform` | |
_transform(buf, encoding, callback){ | |
if(this.state.stop === true){ | |
return | |
} | |
const err = this.__parse(buf, false) | |
if(err !== undefined){ | |
this.state.stop = true | |
} | |
callback(err) | |
} | |
// Implementation of `Transform._flush` | |
_flush(callback){ | |
if(this.state.stop === true){ | |
return | |
} | |
const err = this.__parse(undefined, true) | |
callback(err) | |
} | |
// Central parser implementation | |
__parse(nextBuf, end){ | |
const {bom, comment, escape, from_line, ltrim, max_record_size, quote, raw, relax, rtrim, skip_empty_lines, to, to_line} = this.options | |
let {record_delimiter} = this.options | |
const {bomSkipped, previousBuf, rawBuffer, escapeIsQuote} = this.state | |
let buf | |
if(previousBuf === undefined){ | |
if(nextBuf === undefined){ | |
// Handle empty string | |
this.push(null) | |
return | |
}else{ | |
buf = nextBuf | |
} | |
}else if(previousBuf !== undefined && nextBuf === undefined){ | |
buf = previousBuf | |
}else{ | |
buf = Buffer.concat([previousBuf, nextBuf]) | |
} | |
// Handle UTF BOM | |
if(bomSkipped === false){ | |
if(bom === false){ | |
this.state.bomSkipped = true | |
}else if(buf.length < 3){ | |
// No enough data | |
if(end === false){ | |
// Wait for more data | |
this.state.previousBuf = buf | |
return | |
} | |
}else{ | |
for(let encoding in boms){ | |
if(boms[encoding].compare(buf, 0, boms[encoding].length) === 0){ | |
// Skip BOM | |
buf = buf.slice(boms[encoding].length) | |
// Renormalize original options with the new encoding | |
this.__normalizeOptions({...this.__originalOptions, encoding: encoding}) | |
break | |
} | |
} | |
this.state.bomSkipped = true | |
} | |
} | |
const bufLen = buf.length | |
let pos | |
for(pos = 0; pos < bufLen; pos++){ | |
// Ensure we get enough space to look ahead | |
// There should be a way to move this out of the loop | |
if(this.__needMoreData(pos, bufLen, end)){ | |
break | |
} | |
if(this.state.wasRowDelimiter === true){ | |
this.info.lines++ | |
this.state.wasRowDelimiter = false | |
} | |
if(to_line !== -1 && this.info.lines > to_line){ | |
this.state.stop = true | |
this.push(null) | |
return | |
} | |
// Auto discovery of record_delimiter, unix, mac and windows supported | |
if(this.state.quoting === false && record_delimiter.length === 0){ | |
const record_delimiterCount = this.__autoDiscoverRecordDelimiter(buf, pos) | |
if(record_delimiterCount){ | |
record_delimiter = this.options.record_delimiter | |
} | |
} | |
const chr = buf[pos] | |
if(raw === true){ | |
rawBuffer.append(chr) | |
} | |
if((chr === cr || chr === nl) && this.state.wasRowDelimiter === false ){ | |
this.state.wasRowDelimiter = true | |
} | |
// Previous char was a valid escape char | |
// treat the current char as a regular char | |
if(this.state.escaping === true){ | |
this.state.escaping = false | |
}else{ | |
// Escape is only active inside quoted fields | |
// We are quoting, the char is an escape chr and there is a chr to escape | |
// if(escape !== null && this.state.quoting === true && chr === escape && pos + 1 < bufLen){ | |
if(escape !== null && this.state.quoting === true && this.__isEscape(buf, pos, chr) && pos + escape.length < bufLen){ | |
if(escapeIsQuote){ | |
if(this.__isQuote(buf, pos+escape.length)){ | |
this.state.escaping = true | |
pos += escape.length - 1 | |
continue | |
} | |
}else{ | |
this.state.escaping = true | |
pos += escape.length - 1 | |
continue | |
} | |
} | |
// Not currently escaping and chr is a quote | |
// TODO: need to compare bytes instead of single char | |
if(this.state.commenting === false && this.__isQuote(buf, pos)){ | |
if(this.state.quoting === true){ | |
const nextChr = buf[pos+quote.length] | |
const isNextChrTrimable = rtrim && this.__isCharTrimable(nextChr) | |
const isNextChrComment = comment !== null && this.__compareBytes(comment, buf, pos+quote.length, nextChr) | |
const isNextChrDelimiter = this.__isDelimiter(buf, pos+quote.length, nextChr) | |
const isNextChrRecordDelimiter = record_delimiter.length === 0 ? this.__autoDiscoverRecordDelimiter(buf, pos+quote.length) : this.__isRecordDelimiter(nextChr, buf, pos+quote.length) | |
// Escape a quote | |
// Treat next char as a regular character | |
if(escape !== null && this.__isEscape(buf, pos, chr) && this.__isQuote(buf, pos + escape.length)){ | |
pos += escape.length - 1 | |
}else if(!nextChr || isNextChrDelimiter || isNextChrRecordDelimiter || isNextChrComment || isNextChrTrimable){ | |
this.state.quoting = false | |
this.state.wasQuoting = true | |
pos += quote.length - 1 | |
continue | |
}else if(relax === false){ | |
const err = this.__error( | |
new CsvError('CSV_INVALID_CLOSING_QUOTE', [ | |
'Invalid Closing Quote:', | |
`got "${String.fromCharCode(nextChr)}"`, | |
`at line ${this.info.lines}`, | |
'instead of delimiter, record delimiter, trimable character', | |
'(if activated) or comment', | |
], this.options, this.__infoField()) | |
) | |
if(err !== undefined) return err | |
}else{ | |
this.state.quoting = false | |
this.state.wasQuoting = true | |
this.state.field.prepend(quote) | |
pos += quote.length - 1 | |
} | |
}else{ | |
if(this.state.field.length !== 0){ | |
// In relax mode, treat opening quote preceded by chrs as regular | |
if( relax === false ){ | |
const err = this.__error( | |
new CsvError('INVALID_OPENING_QUOTE', [ | |
'Invalid Opening Quote:', | |
`a quote is found inside a field at line ${this.info.lines}`, | |
], this.options, this.__infoField(), { | |
field: this.state.field, | |
}) | |
) | |
if(err !== undefined) return err | |
} | |
}else{ | |
this.state.quoting = true | |
pos += quote.length - 1 | |
continue | |
} | |
} | |
} | |
if(this.state.quoting === false){ | |
let recordDelimiterLength = this.__isRecordDelimiter(chr, buf, pos) | |
if(recordDelimiterLength !== 0){ | |
// Do not emit comments which take a full line | |
const skipCommentLine = this.state.commenting && (this.state.wasQuoting === false && this.state.record.length === 0 && this.state.field.length === 0) | |
if(skipCommentLine){ | |
this.info.comment_lines++ | |
// Skip full comment line | |
}else{ | |
// Activate records emition if above from_line | |
if(this.state.enabled === false && this.info.lines + (this.state.wasRowDelimiter === true ? 1: 0) >= from_line){ | |
this.state.enabled = true | |
this.__resetField() | |
this.__resetRecord() | |
pos += recordDelimiterLength - 1 | |
continue | |
} | |
// Skip if line is empty and skip_empty_lines activated | |
if(skip_empty_lines === true && this.state.wasQuoting === false && this.state.record.length === 0 && this.state.field.length === 0){ | |
this.info.empty_lines++ | |
pos += recordDelimiterLength - 1 | |
continue | |
} | |
const errField = this.__onField() | |
if(errField !== undefined) return errField | |
const errRecord = this.__onRecord() | |
if(errRecord !== undefined) return errRecord | |
if(to !== -1 && this.info.records >= to){ | |
this.state.stop = true | |
this.push(null) | |
return | |
} | |
} | |
this.state.commenting = false | |
pos += recordDelimiterLength - 1 | |
continue | |
} | |
if(this.state.commenting){ | |
continue | |
} | |
const commentCount = comment === null ? 0 : this.__compareBytes(comment, buf, pos, chr) | |
if(commentCount !== 0){ | |
this.state.commenting = true | |
continue | |
} | |
let delimiterLength = this.__isDelimiter(buf, pos, chr) | |
if(delimiterLength !== 0){ | |
const errField = this.__onField() | |
if(errField !== undefined) return errField | |
pos += delimiterLength - 1 | |
continue | |
} | |
} | |
} | |
if(this.state.commenting === false){ | |
if(max_record_size !== 0 && this.state.record_length + this.state.field.length > max_record_size){ | |
const err = this.__error( | |
new CsvError('CSV_MAX_RECORD_SIZE', [ | |
'Max Record Size:', | |
'record exceed the maximum number of tolerated bytes', | |
`of ${max_record_size}`, | |
`at line ${this.info.lines}`, | |
], this.options, this.__infoField()) | |
) | |
if(err !== undefined) return err | |
} | |
} | |
const lappend = ltrim === false || this.state.quoting === true || this.state.field.length !== 0 || !this.__isCharTrimable(chr) | |
// rtrim in non quoting is handle in __onField | |
const rappend = rtrim === false || this.state.wasQuoting === false | |
if( lappend === true && rappend === true ){ | |
this.state.field.append(chr) | |
}else if(rtrim === true && !this.__isCharTrimable(chr)){ | |
const err = this.__error( | |
new CsvError('CSV_NON_TRIMABLE_CHAR_AFTER_CLOSING_QUOTE', [ | |
'Invalid Closing Quote:', | |
'found non trimable byte after quote', | |
`at line ${this.info.lines}`, | |
], this.options, this.__infoField()) | |
) | |
if(err !== undefined) return err | |
} | |
} | |
if(end === true){ | |
// Ensure we are not ending in a quoting state | |
if(this.state.quoting === true){ | |
const err = this.__error( | |
new CsvError('CSV_QUOTE_NOT_CLOSED', [ | |
'Quote Not Closed:', | |
`the parsing is finished with an opening quote at line ${this.info.lines}`, | |
], this.options, this.__infoField()) | |
) | |
if(err !== undefined) return err | |
}else{ | |
// Skip last line if it has no characters | |
if(this.state.wasQuoting === true || this.state.record.length !== 0 || this.state.field.length !== 0){ | |
const errField = this.__onField() | |
if(errField !== undefined) return errField | |
const errRecord = this.__onRecord() | |
if(errRecord !== undefined) return errRecord | |
}else if(this.state.wasRowDelimiter === true){ | |
this.info.empty_lines++ | |
}else if(this.state.commenting === true){ | |
this.info.comment_lines++ | |
} | |
} | |
}else{ | |
this.state.previousBuf = buf.slice(pos) | |
} | |
if(this.state.wasRowDelimiter === true){ | |
this.info.lines++ | |
this.state.wasRowDelimiter = false | |
} | |
} | |
__onRecord(){ | |
const {columns, columns_duplicates_to_array, encoding, info, from, relax_column_count, relax_column_count_less, relax_column_count_more, raw, skip_lines_with_empty_values} = this.options | |
const {enabled, record} = this.state | |
if(enabled === false){ | |
return this.__resetRecord() | |
} | |
// Convert the first line into column names | |
const recordLength = record.length | |
if(columns === true){ | |
if(skip_lines_with_empty_values === true && isRecordEmpty(record)){ | |
this.__resetRecord() | |
return | |
} | |
return this.__firstLineToColumns(record) | |
} | |
if(columns === false && this.info.records === 0){ | |
this.state.expectedRecordLength = recordLength | |
} | |
if(recordLength !== this.state.expectedRecordLength){ | |
const err = columns === false ? | |
// Todo: rename CSV_INCONSISTENT_RECORD_LENGTH to | |
// CSV_RECORD_INCONSISTENT_FIELDS_LENGTH | |
new CsvError('CSV_INCONSISTENT_RECORD_LENGTH', [ | |
'Invalid Record Length:', | |
`expect ${this.state.expectedRecordLength},`, | |
`got ${recordLength} on line ${this.info.lines}`, | |
], this.options, this.__infoField(), { | |
record: record, | |
}) | |
: | |
// Todo: rename CSV_RECORD_DONT_MATCH_COLUMNS_LENGTH to | |
// CSV_RECORD_INCONSISTENT_COLUMNS | |
new CsvError('CSV_RECORD_DONT_MATCH_COLUMNS_LENGTH', [ | |
'Invalid Record Length:', | |
`columns length is ${columns.length},`, // rename columns | |
`got ${recordLength} on line ${this.info.lines}`, | |
], this.options, this.__infoField(), { | |
record: record, | |
}) | |
if(relax_column_count === true || | |
(relax_column_count_less === true && recordLength < this.state.expectedRecordLength) || | |
(relax_column_count_more === true && recordLength > this.state.expectedRecordLength) ){ | |
this.info.invalid_field_length++ | |
this.state.error = err | |
// Error is undefined with skip_lines_with_error | |
}else{ | |
const finalErr = this.__error(err) | |
if(finalErr) return finalErr | |
} | |
} | |
if(skip_lines_with_empty_values === true && isRecordEmpty(record)){ | |
this.__resetRecord() | |
return | |
} | |
if(this.state.recordHasError === true){ | |
this.__resetRecord() | |
this.state.recordHasError = false | |
return | |
} | |
this.info.records++ | |
if(from === 1 || this.info.records >= from){ | |
// With columns, records are object | |
if(columns !== false){ | |
const obj = {} | |
// Transform record array to an object | |
for(let i = 0, l = record.length; i < l; i++){ | |
if(columns[i] === undefined || columns[i].disabled) continue | |
// Turn duplicate columns into an array | |
if (columns_duplicates_to_array === true && obj[columns[i].name] !== undefined) { | |
if (Array.isArray(obj[columns[i].name])) { | |
obj[columns[i].name] = obj[columns[i].name].concat(record[i]) | |
} else { | |
obj[columns[i].name] = [obj[columns[i].name], record[i]] | |
} | |
} else { | |
obj[columns[i].name] = record[i] | |
} | |
} | |
const {objname} = this.options | |
// Without objname (default) | |
if(objname === undefined){ | |
if(raw === true || info === true){ | |
const err = this.__push(Object.assign( | |
{record: obj}, | |
(raw === true ? {raw: this.state.rawBuffer.toString(encoding)}: {}), | |
(info === true ? {info: this.__infoRecord()}: {}) | |
)) | |
if(err){ | |
return err | |
} | |
}else{ | |
const err = this.__push(obj) | |
if(err){ | |
return err | |
} | |
} | |
// With objname (default) | |
}else{ | |
if(raw === true || info === true){ | |
const err = this.__push(Object.assign( | |
{record: [obj[objname], obj]}, | |
raw === true ? {raw: this.state.rawBuffer.toString(encoding)}: {}, | |
info === true ? {info: this.__infoRecord()}: {} | |
)) | |
if(err){ | |
return err | |
} | |
}else{ | |
const err = this.__push([obj[objname], obj]) | |
if(err){ | |
return err | |
} | |
} | |
} | |
// Without columns, records are array | |
}else{ | |
if(raw === true || info === true){ | |
const err = this.__push(Object.assign( | |
{record: record}, | |
raw === true ? {raw: this.state.rawBuffer.toString(encoding)}: {}, | |
info === true ? {info: this.__infoRecord()}: {} | |
)) | |
if(err){ | |
return err | |
} | |
}else{ | |
const err = this.__push(record) | |
if(err){ | |
return err | |
} | |
} | |
} | |
} | |
this.__resetRecord() | |
} | |
__firstLineToColumns(record){ | |
const {firstLineToHeaders} = this.state | |
try{ | |
const headers = firstLineToHeaders === undefined ? record : firstLineToHeaders.call(null, record) | |
if(!Array.isArray(headers)){ | |
return this.__error( | |
new CsvError('CSV_INVALID_COLUMN_MAPPING', [ | |
'Invalid Column Mapping:', | |
'expect an array from column function,', | |
`got ${JSON.stringify(headers)}` | |
], this.options, this.__infoField(), { | |
headers: headers, | |
}) | |
) | |
} | |
const normalizedHeaders = normalizeColumnsArray(headers) | |
this.state.expectedRecordLength = normalizedHeaders.length | |
this.options.columns = normalizedHeaders | |
this.__resetRecord() | |
return | |
}catch(err){ | |
return err | |
} | |
} | |
__resetRecord(){ | |
if(this.options.raw === true){ | |
this.state.rawBuffer.reset() | |
} | |
this.state.error = undefined | |
this.state.record = [] | |
this.state.record_length = 0 | |
} | |
__onField(){ | |
const {cast, encoding, rtrim, max_record_size} = this.options | |
const {enabled, wasQuoting} = this.state | |
// Short circuit for the from_line options | |
if(enabled === false){ | |
return this.__resetField() | |
} | |
let field = this.state.field.toString(encoding) | |
if(rtrim === true && wasQuoting === false){ | |
field = field.trimRight() | |
} | |
if(cast === true){ | |
const [err, f] = this.__cast(field) | |
if(err !== undefined) return err | |
field = f | |
} | |
this.state.record.push(field) | |
// Increment record length if record size must not exceed a limit | |
if(max_record_size !== 0 && typeof field === 'string'){ | |
this.state.record_length += field.length | |
} | |
this.__resetField() | |
} | |
__resetField(){ | |
this.state.field.reset() | |
this.state.wasQuoting = false | |
} | |
__push(record){ | |
const {on_record} = this.options | |
if(on_record !== undefined){ | |
const info = this.__infoRecord() | |
try{ | |
record = on_record.call(null, record, info) | |
}catch(err){ | |
return err | |
} | |
if(record === undefined || record === null){ return } | |
} | |
this.push(record) | |
} | |
// Return a tuple with the error and the casted value | |
__cast(field){ | |
const {columns, relax_column_count} = this.options | |
const isColumns = Array.isArray(columns) | |
// Dont loose time calling cast | |
// because the final record is an object | |
// and this field can't be associated to a key present in columns | |
if( isColumns === true && relax_column_count && this.options.columns.length <= this.state.record.length ){ | |
return [undefined, undefined] | |
} | |
if(this.state.castField !== null){ | |
try{ | |
const info = this.__infoField() | |
return [undefined, this.state.castField.call(null, field, info)] | |
}catch(err){ | |
return [err] | |
} | |
} | |
if(this.__isFloat(field)){ | |
return [undefined, parseFloat(field)] | |
}else if(this.options.cast_date !== false){ | |
const info = this.__infoField() | |
return [undefined, this.options.cast_date.call(null, field, info)] | |
} | |
return [undefined, field] | |
} | |
// Helper to test if a character is a space or a line delimiter | |
__isCharTrimable(chr){ | |
return chr === space || chr === tab || chr === cr || chr === nl || chr === np | |
} | |
// Keep it in case we implement the `cast_int` option | |
// __isInt(value){ | |
// // return Number.isInteger(parseInt(value)) | |
// // return !isNaN( parseInt( obj ) ); | |
// return /^(\-|\+)?[1-9][0-9]*$/.test(value) | |
// } | |
__isFloat(value){ | |
return (value - parseFloat( value ) + 1) >= 0 // Borrowed from jquery | |
} | |
__compareBytes(sourceBuf, targetBuf, targetPos, firstByte){ | |
if(sourceBuf[0] !== firstByte) return 0 | |
const sourceLength = sourceBuf.length | |
for(let i = 1; i < sourceLength; i++){ | |
if(sourceBuf[i] !== targetBuf[targetPos+i]) return 0 | |
} | |
return sourceLength | |
} | |
__needMoreData(i, bufLen, end){ | |
if(end) return false | |
const {quote} = this.options | |
const {quoting, needMoreDataSize, recordDelimiterMaxLength} = this.state | |
const numOfCharLeft = bufLen - i - 1 | |
const requiredLength = Math.max( | |
needMoreDataSize, | |
// Skip if the remaining buffer smaller than record delimiter | |
recordDelimiterMaxLength, | |
// Skip if the remaining buffer can be record delimiter following the closing quote | |
// 1 is for quote.length | |
quoting ? (quote.length + recordDelimiterMaxLength) : 0, | |
) | |
return numOfCharLeft < requiredLength | |
} | |
__isDelimiter(buf, pos, chr){ | |
const {delimiter, ignore_last_delimiters} = this.options | |
if(ignore_last_delimiters === true && this.state.record.length === this.options.columns.length - 1){ | |
return 0 | |
}else if(ignore_last_delimiters !== false && typeof ignore_last_delimiters === 'number' && this.state.record.length === ignore_last_delimiters - 1){ | |
return 0 | |
} | |
loop1: for(let i = 0; i < delimiter.length; i++){ | |
const del = delimiter[i] | |
if(del[0] === chr){ | |
for(let j = 1; j < del.length; j++){ | |
if(del[j] !== buf[pos+j]) continue loop1 | |
} | |
return del.length | |
} | |
} | |
return 0 | |
} | |
__isRecordDelimiter(chr, buf, pos){ | |
const {record_delimiter} = this.options | |
const recordDelimiterLength = record_delimiter.length | |
loop1: for(let i = 0; i < recordDelimiterLength; i++){ | |
const rd = record_delimiter[i] | |
const rdLength = rd.length | |
if(rd[0] !== chr){ | |
continue | |
} | |
for(let j = 1; j < rdLength; j++){ | |
if(rd[j] !== buf[pos+j]){ | |
continue loop1 | |
} | |
} | |
return rd.length | |
} | |
return 0 | |
} | |
__isEscape(buf, pos, chr){ | |
const {escape} = this.options | |
if(escape === null) return false | |
const l = escape.length | |
if(escape[0] === chr){ | |
for(let i = 0; i < l; i++){ | |
if(escape[i] !== buf[pos+i]){ | |
return false | |
} | |
} | |
return true | |
} | |
return false | |
} | |
__isQuote(buf, pos){ | |
const {quote} = this.options | |
if(quote === null) return false | |
const l = quote.length | |
for(let i = 0; i < l; i++){ | |
if(quote[i] !== buf[pos+i]){ | |
return false | |
} | |
} | |
return true | |
} | |
__autoDiscoverRecordDelimiter(buf, pos){ | |
const {encoding} = this.options | |
const chr = buf[pos] | |
if(chr === cr){ | |
if(buf[pos+1] === nl){ | |
this.options.record_delimiter.push(Buffer.from('\r\n', encoding)) | |
this.state.recordDelimiterMaxLength = 2 | |
return 2 | |
}else{ | |
this.options.record_delimiter.push(Buffer.from('\r', encoding)) | |
this.state.recordDelimiterMaxLength = 1 | |
return 1 | |
} | |
}else if(chr === nl){ | |
this.options.record_delimiter.push(Buffer.from('\n', encoding)) | |
this.state.recordDelimiterMaxLength = 1 | |
return 1 | |
} | |
return 0 | |
} | |
__error(msg){ | |
const {skip_lines_with_error} = this.options | |
const err = typeof msg === 'string' ? new Error(msg) : msg | |
if(skip_lines_with_error){ | |
this.state.recordHasError = true | |
this.emit('skip', err) | |
return undefined | |
}else{ | |
return err | |
} | |
} | |
__infoDataSet(){ | |
return { | |
...this.info, | |
columns: this.options.columns | |
} | |
} | |
__infoRecord(){ | |
const {columns} = this.options | |
return { | |
...this.__infoDataSet(), | |
error: this.state.error, | |
header: columns === true, | |
index: this.state.record.length, | |
} | |
} | |
__infoField(){ | |
const {columns} = this.options | |
const isColumns = Array.isArray(columns) | |
return { | |
...this.__infoRecord(), | |
column: isColumns === true ? | |
( columns.length > this.state.record.length ? | |
columns[this.state.record.length].name : | |
null | |
) : | |
this.state.record.length, | |
quoting: this.state.wasQuoting, | |
} | |
} | |
} | |
const parse = function(){ | |
let data, options, callback | |
for(let i in arguments){ | |
const argument = arguments[i] | |
const type = typeof argument | |
if(data === undefined && (typeof argument === 'string' || Buffer.isBuffer(argument))){ | |
data = argument | |
}else if(options === undefined && isObject(argument)){ | |
options = argument | |
}else if(callback === undefined && type === 'function'){ | |
callback = argument | |
}else{ | |
throw new CsvError('CSV_INVALID_ARGUMENT', [ | |
'Invalid argument:', | |
`got ${JSON.stringify(argument)} at index ${i}` | |
], this.options) | |
} | |
} | |
const parser = new Parser(options) | |
if(callback){ | |
const records = options === undefined || options.objname === undefined ? [] : {} | |
parser.on('readable', function(){ | |
let record | |
while((record = this.read()) !== null){ | |
if(options === undefined || options.objname === undefined){ | |
records.push(record) | |
}else{ | |
records[record[0]] = record[1] | |
} | |
} | |
}) | |
parser.on('error', function(err){ | |
callback(err, undefined, parser.__infoDataSet()) | |
}) | |
parser.on('end', function(){ | |
callback(undefined, records, parser.__infoDataSet()) | |
}) | |
} | |
if(data !== undefined){ | |
// Give a chance for events to be registered later | |
if(typeof setImmediate === 'function'){ | |
setImmediate(function(){ | |
parser.write(data) | |
parser.end() | |
}) | |
}else{ | |
parser.write(data) | |
parser.end() | |
} | |
} | |
return parser | |
} | |
class CsvError extends Error { | |
constructor(code, message, options, ...contexts) { | |
if(Array.isArray(message)) message = message.join(' ') | |
super(message) | |
if(Error.captureStackTrace !== undefined){ | |
Error.captureStackTrace(this, CsvError) | |
} | |
this.code = code | |
for(const context of contexts){ | |
for(const key in context){ | |
const value = context[key] | |
this[key] = Buffer.isBuffer(value) ? value.toString(options.encoding) : value == null ? value : JSON.parse(JSON.stringify(value)) | |
} | |
} | |
} | |
} | |
parse.Parser = Parser | |
parse.CsvError = CsvError | |
module.exports = parse | |
const underscore = function(str){ | |
return str.replace(/([A-Z])/g, function(_, match){ | |
return '_' + match.toLowerCase() | |
}) | |
} | |
const isObject = function(obj){ | |
return (typeof obj === 'object' && obj !== null && !Array.isArray(obj)) | |
} | |
const isRecordEmpty = function(record){ | |
return record.every( (field) => field == null || field.toString && field.toString().trim() === '' ) | |
} | |
const normalizeColumnsArray = function(columns){ | |
const normalizedColumns = []; | |
for(let i = 0, l = columns.length; i < l; i++){ | |
const column = columns[i] | |
if(column === undefined || column === null || column === false){ | |
normalizedColumns[i] = { disabled: true } | |
}else if(typeof column === 'string'){ | |
normalizedColumns[i] = { name: column } | |
}else if(isObject(column)){ | |
if(typeof column.name !== 'string'){ | |
throw new CsvError('CSV_OPTION_COLUMNS_MISSING_NAME', [ | |
'Option columns missing name:', | |
`property "name" is required at position ${i}`, | |
'when column is an object literal' | |
]) | |
} | |
normalizedColumns[i] = column | |
}else{ | |
throw new CsvError('CSV_INVALID_COLUMN_DEFINITION', [ | |
'Invalid column definition:', | |
'expect a string or a literal object,', | |
`got ${JSON.stringify(column)} at position ${i}` | |
]) | |
} | |
} | |
return normalizedColumns; | |
} | |
/***/ }), | |
/***/ 8750: | |
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { | |
const parse = __nccwpck_require__(2830) | |
module.exports = function(data, options={}){ | |
if(typeof data === 'string'){ | |
data = Buffer.from(data) | |
} | |
const records = options && options.objname ? {} : [] | |
const parser = new parse.Parser(options) | |
parser.push = function(record){ | |
if(record === null){ | |
return | |
} | |
if(options.objname === undefined) | |
records.push(record) | |
else{ | |
records[record[0]] = record[1] | |
} | |
} | |
const err1 = parser.__parse(data, false) | |
if(err1 !== undefined) throw err1 | |
const err2 = parser.__parse(undefined, true) | |
if(err2 !== undefined) throw err2 | |
return records | |
} | |
/***/ }), | |
/***/ 8932: | |
/***/ ((__unused_webpack_module, exports) => { | |
"use strict"; | |
Object.defineProperty(exports, "__esModule", ({ value: true })); | |
class Deprecation extends Error { | |
constructor(message) { | |
super(message); // Maintains proper stack trace (only available on V8) | |
/* istanbul ignore next */ | |
if (Error.captureStackTrace) { | |
Error.captureStackTrace(this, this.constructor); | |
} | |
this.name = 'Deprecation'; | |
} | |
} | |
exports.Deprecation = Deprecation; | |
/***/ }), | |
/***/ 6863: | |
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { | |
module.exports = realpath | |
realpath.realpath = realpath | |
realpath.sync = realpathSync | |
realpath.realpathSync = realpathSync | |
realpath.monkeypatch = monkeypatch | |
realpath.unmonkeypatch = unmonkeypatch | |
var fs = __nccwpck_require__(5747) | |
var origRealpath = fs.realpath | |
var origRealpathSync = fs.realpathSync | |
var version = process.version | |
var ok = /^v[0-5]\./.test(version) | |
var old = __nccwpck_require__(1734) | |
function newError (er) { | |
return er && er.syscall === 'realpath' && ( | |
er.code === 'ELOOP' || | |
er.code === 'ENOMEM' || | |
er.code === 'ENAMETOOLONG' | |
) | |
} | |
function realpath (p, cache, cb) { | |
if (ok) { | |
return origRealpath(p, cache, cb) | |
} | |
if (typeof cache === 'function') { | |
cb = cache | |
cache = null | |
} | |
origRealpath(p, cache, function (er, result) { | |
if (newError(er)) { | |
old.realpath(p, cache, cb) | |
} else { | |
cb(er, result) | |
} | |
}) | |
} | |
function realpathSync (p, cache) { | |
if (ok) { | |
return origRealpathSync(p, cache) | |
} | |
try { | |
return origRealpathSync(p, cache) | |
} catch (er) { | |
if (newError(er)) { | |
return old.realpathSync(p, cache) | |
} else { | |
throw er | |
} | |
} | |
} | |
function monkeypatch () { | |
fs.realpath = realpath | |
fs.realpathSync = realpathSync | |
} | |
function unmonkeypatch () { | |
fs.realpath = origRealpath | |
fs.realpathSync = origRealpathSync | |
} | |
/***/ }), | |
/***/ 1734: | |
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { | |
// Copyright Joyent, Inc. and other Node contributors. | |
// | |
// Permission is hereby granted, free of charge, to any person obtaining a | |
// copy of this software and associated documentation files (the | |
// "Software"), to deal in the Software without restriction, including | |
// without limitation the rights to use, copy, modify, merge, publish, | |
// distribute, sublicense, and/or sell copies of the Software, and to permit | |
// persons to whom the Software is furnished to do so, subject to the | |
// following conditions: | |
// | |
// The above copyright notice and this permission notice shall be included | |
// in all copies or substantial portions of the Software. | |
// | |
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS | |
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF | |
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN | |
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, | |
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR | |
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE | |
// USE OR OTHER DEALINGS IN THE SOFTWARE. | |
var pathModule = __nccwpck_require__(5622); | |
var isWindows = process.platform === 'win32'; | |
var fs = __nccwpck_require__(5747); | |
// JavaScript implementation of realpath, ported from node pre-v6 | |
var DEBUG = process.env.NODE_DEBUG && /fs/.test(process.env.NODE_DEBUG); | |
function rethrow() { | |
// Only enable in debug mode. A backtrace uses ~1000 bytes of heap space and | |
// is fairly slow to generate. | |
var callback; | |
if (DEBUG) { | |
var backtrace = new Error; | |
callback = debugCallback; | |
} else | |
callback = missingCallback; | |
return callback; | |
function debugCallback(err) { | |
if (err) { | |
backtrace.message = err.message; | |
err = backtrace; | |
missingCallback(err); | |
} | |
} | |
function missingCallback(err) { | |
if (err) { | |
if (process.throwDeprecation) | |
throw err; // Forgot a callback but don't know where? Use NODE_DEBUG=fs | |
else if (!process.noDeprecation) { | |
var msg = 'fs: missing callback ' + (err.stack || err.message); | |
if (process.traceDeprecation) | |
console.trace(msg); | |
else | |
console.error(msg); | |
} | |
} | |
} | |
} | |
function maybeCallback(cb) { | |
return typeof cb === 'function' ? cb : rethrow(); | |
} | |
var normalize = pathModule.normalize; | |
// Regexp that finds the next partion of a (partial) path | |
// result is [base_with_slash, base], e.g. ['somedir/', 'somedir'] | |
if (isWindows) { | |
var nextPartRe = /(.*?)(?:[\/\\]+|$)/g; | |
} else { | |
var nextPartRe = /(.*?)(?:[\/]+|$)/g; | |
} | |
// Regex to find the device root, including trailing slash. E.g. 'c:\\'. | |
if (isWindows) { | |
var splitRootRe = /^(?:[a-zA-Z]:|[\\\/]{2}[^\\\/]+[\\\/][^\\\/]+)?[\\\/]*/; | |
} else { | |
var splitRootRe = /^[\/]*/; | |
} | |
exports.realpathSync = function realpathSync(p, cache) { | |
// make p is absolute | |
p = pathModule.resolve(p); | |
if (cache && Object.prototype.hasOwnProperty.call(cache, p)) { | |
return cache[p]; | |
} | |
var original = p, | |
seenLinks = {}, | |
knownHard = {}; | |
// current character position in p | |
var pos; | |
// the partial path so far, including a trailing slash if any | |
var current; | |
// the partial path without a trailing slash (except when pointing at a root) | |
var base; | |
// the partial path scanned in the previous round, with slash | |
var previous; | |
start(); | |
function start() { | |
// Skip over roots | |
var m = splitRootRe.exec(p); | |
pos = m[0].length; | |
current = m[0]; | |
base = m[0]; | |
previous = ''; | |
// On windows, check that the root exists. On unix there is no need. | |
if (isWindows && !knownHard[base]) { | |
fs.lstatSync(base); | |
knownHard[base] = true; | |
} | |
} | |
// walk down the path, swapping out linked pathparts for their real | |
// values | |
// NB: p.length changes. | |
while (pos < p.length) { | |
// find the next part | |
nextPartRe.lastIndex = pos; | |
var result = nextPartRe.exec(p); | |
previous = current; | |
current += result[0]; | |
base = previous + result[1]; | |
pos = nextPartRe.lastIndex; | |
// continue if not a symlink | |
if (knownHard[base] || (cache && cache[base] === base)) { | |
continue; | |
} | |
var resolvedLink; | |
if (cache && Object.prototype.hasOwnProperty.call(cache, base)) { | |
// some known symbolic link. no need to stat again. | |
resolvedLink = cache[base]; | |
} else { | |
var stat = fs.lstatSync(base); | |
if (!stat.isSymbolicLink()) { | |
knownHard[base] = true; | |
if (cache) cache[base] = base; | |
continue; | |
} | |
// read the link if it wasn't read before | |
// dev/ino always return 0 on windows, so skip the check. | |
var linkTarget = null; | |
if (!isWindows) { | |
var id = stat.dev.toString(32) + ':' + stat.ino.toString(32); | |
if (seenLinks.hasOwnProperty(id)) { | |
linkTarget = seenLinks[id]; | |
} | |
} | |
if (linkTarget === null) { | |
fs.statSync(base); | |
linkTarget = fs.readlinkSync(base); | |
} | |
resolvedLink = pathModule.resolve(previous, linkTarget); | |
// track this, if given a cache. | |
if (cache) cache[base] = resolvedLink; | |
if (!isWindows) seenLinks[id] = linkTarget; | |
} | |
// resolve the link, then start over | |
p = pathModule.resolve(resolvedLink, p.slice(pos)); | |
start(); | |
} | |
if (cache) cache[original] = p; | |
return p; | |
}; | |
exports.realpath = function realpath(p, cache, cb) { | |
if (typeof cb !== 'function') { | |
cb = maybeCallback(cache); | |
cache = null; | |
} | |
// make p is absolute | |
p = pathModule.resolve(p); | |
if (cache && Object.prototype.hasOwnProperty.call(cache, p)) { | |
return process.nextTick(cb.bind(null, null, cache[p])); | |
} | |
var original = p, | |
seenLinks = {}, | |
knownHard = {}; | |
// current character position in p | |
var pos; | |
// the partial path so far, including a trailing slash if any | |
var current; | |
// the partial path without a trailing slash (except when pointing at a root) | |
var base; | |
// the partial path scanned in the previous round, with slash | |
var previous; | |
start(); | |
function start() { | |
// Skip over roots | |
var m = splitRootRe.exec(p); | |
pos = m[0].length; | |
current = m[0]; | |
base = m[0]; | |
previous = ''; | |
// On windows, check that the root exists. On unix there is no need. | |
if (isWindows && !knownHard[base]) { | |
fs.lstat(base, function(err) { | |
if (err) return cb(err); | |
knownHard[base] = true; | |
LOOP(); | |
}); | |
} else { | |
process.nextTick(LOOP); | |
} | |
} | |
// walk down the path, swapping out linked pathparts for their real | |
// values | |
function LOOP() { | |
// stop if scanned past end of path | |
if (pos >= p.length) { | |
if (cache) cache[original] = p; | |
return cb(null, p); | |
} | |
// find the next part | |
nextPartRe.lastIndex = pos; | |
var result = nextPartRe.exec(p); | |
previous = current; | |
current += result[0]; | |
base = previous + result[1]; | |
pos = nextPartRe.lastIndex; | |
// continue if not a symlink | |
if (knownHard[base] || (cache && cache[base] === base)) { | |
return process.nextTick(LOOP); | |
} | |
if (cache && Object.prototype.hasOwnProperty.call(cache, base)) { | |
// known symbolic link. no need to stat again. | |
return gotResolvedLink(cache[base]); | |
} | |
return fs.lstat(base, gotStat); | |
} | |
function gotStat(err, stat) { | |
if (err) return cb(err); | |
// if not a symlink, skip to the next path part | |
if (!stat.isSymbolicLink()) { | |
knownHard[base] = true; | |
if (cache) cache[base] = base; | |
return process.nextTick(LOOP); | |
} | |
// stat & read the link if not read before | |
// call gotTarget as soon as the link target is known | |
// dev/ino always return 0 on windows, so skip the check. | |
if (!isWindows) { | |
var id = stat.dev.toString(32) + ':' + stat.ino.toString(32); | |
if (seenLinks.hasOwnProperty(id)) { | |
return gotTarget(null, seenLinks[id], base); | |
} | |
} | |
fs.stat(base, function(err) { | |
if (err) return cb(err); | |
fs.readlink(base, function(err, target) { | |
if (!isWindows) seenLinks[id] = target; | |
gotTarget(err, target); | |
}); | |
}); | |
} | |
function gotTarget(err, target, base) { | |
if (err) return cb(err); | |
var resolvedLink = pathModule.resolve(previous, target); | |
if (cache) cache[base] = resolvedLink; | |
gotResolvedLink(resolvedLink); | |
} | |
function gotResolvedLink(resolvedLink) { | |
// resolve the link, then start over | |
p = pathModule.resolve(resolvedLink, p.slice(pos)); | |
start(); | |
} | |
}; | |
/***/ }), | |
/***/ 7625: | |
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { | |
exports.setopts = setopts | |
exports.ownProp = ownProp | |
exports.makeAbs = makeAbs | |
exports.finish = finish | |
exports.mark = mark | |
exports.isIgnored = isIgnored | |
exports.childrenIgnored = childrenIgnored | |
function ownProp (obj, field) { | |
return Object.prototype.hasOwnProperty.call(obj, field) | |
} | |
var path = __nccwpck_require__(5622) | |
var minimatch = __nccwpck_require__(3973) | |
var isAbsolute = __nccwpck_require__(8714) | |
var Minimatch = minimatch.Minimatch | |
function alphasort (a, b) { | |
return a.localeCompare(b, 'en') | |
} | |
function setupIgnores (self, options) { | |
self.ignore = options.ignore || [] | |
if (!Array.isArray(self.ignore)) | |
self.ignore = [self.ignore] | |
if (self.ignore.length) { | |
self.ignore = self.ignore.map(ignoreMap) | |
} | |
} | |
// ignore patterns are always in dot:true mode. | |
function ignoreMap (pattern) { | |
var gmatcher = null | |
if (pattern.slice(-3) === '/**') { | |
var gpattern = pattern.replace(/(\/\*\*)+$/, '') | |
gmatcher = new Minimatch(gpattern, { dot: true }) | |
} | |
return { | |
matcher: new Minimatch(pattern, { dot: true }), | |
gmatcher: gmatcher | |
} | |
} | |
function setopts (self, pattern, options) { | |
if (!options) | |
options = {} | |
// base-matching: just use globstar for that. | |
if (options.matchBase && -1 === pattern.indexOf("/")) { | |
if (options.noglobstar) { | |
throw new Error("base matching requires globstar") | |
} | |
pattern = "**/" + pattern | |
} | |
self.silent = !!options.silent | |
self.pattern = pattern | |
self.strict = options.strict !== false | |
self.realpath = !!options.realpath | |
self.realpathCache = options.realpathCache || Object.create(null) | |
self.follow = !!options.follow | |
self.dot = !!options.dot | |
self.mark = !!options.mark | |
self.nodir = !!options.nodir | |
if (self.nodir) | |
self.mark = true | |
self.sync = !!options.sync | |
self.nounique = !!options.nounique | |
self.nonull = !!options.nonull | |
self.nosort = !!options.nosort | |
self.nocase = !!options.nocase | |
self.stat = !!options.stat | |
self.noprocess = !!options.noprocess | |
self.absolute = !!options.absolute | |
self.maxLength = options.maxLength || Infinity | |
self.cache = options.cache || Object.create(null) | |
self.statCache = options.statCache || Object.create(null) | |
self.symlinks = options.symlinks || Object.create(null) | |
setupIgnores(self, options) | |
self.changedCwd = false | |
var cwd = process.cwd() | |
if (!ownProp(options, "cwd")) | |
self.cwd = cwd | |
else { | |
self.cwd = path.resolve(options.cwd) | |
self.changedCwd = self.cwd !== cwd | |
} | |
self.root = options.root || path.resolve(self.cwd, "/") | |
self.root = path.resolve(self.root) | |
if (process.platform === "win32") | |
self.root = self.root.replace(/\\/g, "/") | |
// TODO: is an absolute `cwd` supposed to be resolved against `root`? | |
// e.g. { cwd: '/test', root: __dirname } === path.join(__dirname, '/test') | |
self.cwdAbs = isAbsolute(self.cwd) ? self.cwd : makeAbs(self, self.cwd) | |
if (process.platform === "win32") | |
self.cwdAbs = self.cwdAbs.replace(/\\/g, "/") | |
self.nomount = !!options.nomount | |
// disable comments and negation in Minimatch. | |
// Note that they are not supported in Glob itself anyway. | |
options.nonegate = true | |
options.nocomment = true | |
self.minimatch = new Minimatch(pattern, options) | |
self.options = self.minimatch.options | |
} | |
function finish (self) { | |
var nou = self.nounique | |
var all = nou ? [] : Object.create(null) | |
for (var i = 0, l = self.matches.length; i < l; i ++) { | |
var matches = self.matches[i] | |
if (!matches || Object.keys(matches).length === 0) { | |
if (self.nonull) { | |
// do like the shell, and spit out the literal glob | |
var literal = self.minimatch.globSet[i] | |
if (nou) | |
all.push(literal) | |
else | |
all[literal] = true | |
} | |
} else { | |
// had matches | |
var m = Object.keys(matches) | |
if (nou) | |
all.push.apply(all, m) | |
else | |
m.forEach(function (m) { | |
all[m] = true | |
}) | |
} | |
} | |
if (!nou) | |
all = Object.keys(all) | |
if (!self.nosort) | |
all = all.sort(alphasort) | |
// at *some* point we statted all of these | |
if (self.mark) { | |
for (var i = 0; i < all.length; i++) { | |
all[i] = self._mark(all[i]) | |
} | |
if (self.nodir) { | |
all = all.filter(function (e) { | |
var notDir = !(/\/$/.test(e)) | |
var c = self.cache[e] || self.cache[makeAbs(self, e)] | |
if (notDir && c) | |
notDir = c !== 'DIR' && !Array.isArray(c) | |
return notDir | |
}) | |
} | |
} | |
if (self.ignore.length) | |
all = all.filter(function(m) { | |
return !isIgnored(self, m) | |
}) | |
self.found = all | |
} | |
function mark (self, p) { | |
var abs = makeAbs(self, p) | |
var c = self.cache[abs] | |
var m = p | |
if (c) { | |
var isDir = c === 'DIR' || Array.isArray(c) | |
var slash = p.slice(-1) === '/' | |
if (isDir && !slash) | |
m += '/' | |
else if (!isDir && slash) | |
m = m.slice(0, -1) | |
if (m !== p) { | |
var mabs = makeAbs(self, m) | |
self.statCache[mabs] = self.statCache[abs] | |
self.cache[mabs] = self.cache[abs] | |
} | |
} | |
return m | |
} | |
// lotta situps... | |
function makeAbs (self, f) { | |
var abs = f | |
if (f.charAt(0) === '/') { | |
abs = path.join(self.root, f) | |
} else if (isAbsolute(f) || f === '') { | |
abs = f | |
} else if (self.changedCwd) { | |
abs = path.resolve(self.cwd, f) | |
} else { | |
abs = path.resolve(f) | |
} | |
if (process.platform === 'win32') | |
abs = abs.replace(/\\/g, '/') | |
return abs | |
} | |
// Return true, if pattern ends with globstar '**', for the accompanying parent directory. | |
// Ex:- If node_modules/** is the pattern, add 'node_modules' to ignore list along with it's contents | |
function isIgnored (self, path) { | |
if (!self.ignore.length) | |
return false | |
return self.ignore.some(function(item) { | |
return item.matcher.match(path) || !!(item.gmatcher && item.gmatcher.match(path)) | |
}) | |
} | |
function childrenIgnored (self, path) { | |
if (!self.ignore.length) | |
return false | |
return self.ignore.some(function(item) { | |
return !!(item.gmatcher && item.gmatcher.match(path)) | |
}) | |
} | |
/***/ }), | |
/***/ 1957: | |
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { | |
// Approach: | |
// | |
// 1. Get the minimatch set | |
// 2. For each pattern in the set, PROCESS(pattern, false) | |
// 3. Store matches per-set, then uniq them | |
// | |
// PROCESS(pattern, inGlobStar) | |
// Get the first [n] items from pattern that are all strings | |
// Join these together. This is PREFIX. | |
// If there is no more remaining, then stat(PREFIX) and | |
// add to matches if it succeeds. END. | |
// | |
// If inGlobStar and PREFIX is symlink and points to dir | |
// set ENTRIES = [] | |
// else readdir(PREFIX) as ENTRIES | |
// If fail, END | |
// | |
// with ENTRIES | |
// If pattern[n] is GLOBSTAR | |
// // handle the case where the globstar match is empty | |
// // by pruning it out, and testing the resulting pattern | |
// PROCESS(pattern[0..n] + pattern[n+1 .. $], false) | |
// // handle other cases. | |
// for ENTRY in ENTRIES (not dotfiles) | |
// // attach globstar + tail onto the entry | |
// // Mark that this entry is a globstar match | |
// PROCESS(pattern[0..n] + ENTRY + pattern[n .. $], true) | |
// | |
// else // not globstar | |
// for ENTRY in ENTRIES (not dotfiles, unless pattern[n] is dot) | |
// Test ENTRY against pattern[n] | |
// If fails, continue | |
// If passes, PROCESS(pattern[0..n] + item + pattern[n+1 .. $]) | |
// | |
// Caveat: | |
// Cache all stats and readdirs results to minimize syscall. Since all | |
// we ever care about is existence and directory-ness, we can just keep | |
// `true` for files, and [children,...] for directories, or `false` for | |
// things that don't exist. | |
module.exports = glob | |
var fs = __nccwpck_require__(5747) | |
var rp = __nccwpck_require__(6863) | |
var minimatch = __nccwpck_require__(3973) | |
var Minimatch = minimatch.Minimatch | |
var inherits = __nccwpck_require__(4124) | |
var EE = __nccwpck_require__(8614).EventEmitter | |
var path = __nccwpck_require__(5622) | |
var assert = __nccwpck_require__(2357) | |
var isAbsolute = __nccwpck_require__(8714) | |
var globSync = __nccwpck_require__(9010) | |
var common = __nccwpck_require__(7625) | |
var setopts = common.setopts | |
var ownProp = common.ownProp | |
var inflight = __nccwpck_require__(2492) | |
var util = __nccwpck_require__(1669) | |
var childrenIgnored = common.childrenIgnored | |
var isIgnored = common.isIgnored | |
var once = __nccwpck_require__(1223) | |
function glob (pattern, options, cb) { | |
if (typeof options === 'function') cb = options, options = {} | |
if (!options) options = {} | |
if (options.sync) { | |
if (cb) | |
throw new TypeError('callback provided to sync glob') | |
return globSync(pattern, options) | |
} | |
return new Glob(pattern, options, cb) | |
} | |
glob.sync = globSync | |
var GlobSync = glob.GlobSync = globSync.GlobSync | |
// old api surface | |
glob.glob = glob | |
function extend (origin, add) { | |
if (add === null || typeof add !== 'object') { | |
return origin | |
} | |
var keys = Object.keys(add) | |
var i = keys.length | |
while (i--) { | |
origin[keys[i]] = add[keys[i]] | |
} | |
return origin | |
} | |
glob.hasMagic = function (pattern, options_) { | |
var options = extend({}, options_) | |
options.noprocess = true | |
var g = new Glob(pattern, options) | |
var set = g.minimatch.set | |
if (!pattern) | |
return false | |
if (set.length > 1) | |
return true | |
for (var j = 0; j < set[0].length; j++) { | |
if (typeof set[0][j] !== 'string') | |
return true | |
} | |
return false | |
} | |
glob.Glob = Glob | |
inherits(Glob, EE) | |
function Glob (pattern, options, cb) { | |
if (typeof options === 'function') { | |
cb = options | |
options = null | |
} | |
if (options && options.sync) { | |
if (cb) | |
throw new TypeError('callback provided to sync glob') | |
return new GlobSync(pattern, options) | |
} | |
if (!(this instanceof Glob)) | |
return new Glob(pattern, options, cb) | |
setopts(this, pattern, options) | |
this._didRealPath = false | |
// process each pattern in the minimatch set | |
var n = this.minimatch.set.length | |
// The matches are stored as {<filename>: true,...} so that | |
// duplicates are automagically pruned. | |
// Later, we do an Object.keys() on these. | |
// Keep them as a list so we can fill in when nonull is set. | |
this.matches = new Array(n) | |
if (typeof cb === 'function') { | |
cb = once(cb) | |
this.on('error', cb) | |
this.on('end', function (matches) { | |
cb(null, matches) | |
}) | |
} | |
var self = this | |
this._processing = 0 | |
this._emitQueue = [] | |
this._processQueue = [] | |
this.paused = false | |
if (this.noprocess) | |
return this | |
if (n === 0) | |
return done() | |
var sync = true | |
for (var i = 0; i < n; i ++) { | |
this._process(this.minimatch.set[i], i, false, done) | |
} | |
sync = false | |
function done () { | |
--self._processing | |
if (self._processing <= 0) { | |
if (sync) { | |
process.nextTick(function () { | |
self._finish() | |
}) | |
} else { | |
self._finish() | |
} | |
} | |
} | |
} | |
Glob.prototype._finish = function () { | |
assert(this instanceof Glob) | |
if (this.aborted) | |
return | |
if (this.realpath && !this._didRealpath) | |
return this._realpath() | |
common.finish(this) | |
this.emit('end', this.found) | |
} | |
Glob.prototype._realpath = function () { | |
if (this._didRealpath) | |
return | |
this._didRealpath = true | |
var n = this.matches.length | |
if (n === 0) | |
return this._finish() | |
var self = this | |
for (var i = 0; i < this.matches.length; i++) | |
this._realpathSet(i, next) | |
function next () { | |
if (--n === 0) | |
self._finish() | |
} | |
} | |
Glob.prototype._realpathSet = function (index, cb) { | |
var matchset = this.matches[index] | |
if (!matchset) | |
return cb() | |
var found = Object.keys(matchset) | |
var self = this | |
var n = found.length | |
if (n === 0) | |
return cb() | |
var set = this.matches[index] = Object.create(null) | |
found.forEach(function (p, i) { | |
// If there's a problem with the stat, then it means that | |
// one or more of the links in the realpath couldn't be | |
// resolved. just return the abs value in that case. | |
p = self._makeAbs(p) | |
rp.realpath(p, self.realpathCache, function (er, real) { | |
if (!er) | |
set[real] = true | |
else if (er.syscall === 'stat') | |
set[p] = true | |
else | |
self.emit('error', er) // srsly wtf right here | |
if (--n === 0) { | |
self.matches[index] = set | |
cb() | |
} | |
}) | |
}) | |
} | |
Glob.prototype._mark = function (p) { | |
return common.mark(this, p) | |
} | |
Glob.prototype._makeAbs = function (f) { | |
return common.makeAbs(this, f) | |
} | |
Glob.prototype.abort = function () { | |
this.aborted = true | |
this.emit('abort') | |
} | |
Glob.prototype.pause = function () { | |
if (!this.paused) { | |
this.paused = true | |
this.emit('pause') | |
} | |
} | |
Glob.prototype.resume = function () { | |
if (this.paused) { | |
this.emit('resume') | |
this.paused = false | |
if (this._emitQueue.length) { | |
var eq = this._emitQueue.slice(0) | |
this._emitQueue.length = 0 | |
for (var i = 0; i < eq.length; i ++) { | |
var e = eq[i] | |
this._emitMatch(e[0], e[1]) | |
} | |
} | |
if (this._processQueue.length) { | |
var pq = this._processQueue.slice(0) | |
this._processQueue.length = 0 | |
for (var i = 0; i < pq.length; i ++) { | |
var p = pq[i] | |
this._processing-- | |
this._process(p[0], p[1], p[2], p[3]) | |
} | |
} | |
} | |
} | |
Glob.prototype._process = function (pattern, index, inGlobStar, cb) { | |
assert(this instanceof Glob) | |
assert(typeof cb === 'function') | |
if (this.aborted) | |
return | |
this._processing++ | |
if (this.paused) { | |
this._processQueue.push([pattern, index, inGlobStar, cb]) | |
return | |
} | |
//console.error('PROCESS %d', this._processing, pattern) | |
// Get the first [n] parts of pattern that are all strings. | |
var n = 0 | |
while (typeof pattern[n] === 'string') { | |
n ++ | |
} | |
// now n is the index of the first one that is *not* a string. | |
// see if there's anything else | |
var prefix | |
switch (n) { | |
// if not, then this is rather simple | |
case pattern.length: | |
this._processSimple(pattern.join('/'), index, cb) | |
return | |
case 0: | |
// pattern *starts* with some non-trivial item. | |
// going to readdir(cwd), but not include the prefix in matches. | |
prefix = null | |
break | |
default: | |
// pattern has some string bits in the front. | |
// whatever it starts with, whether that's 'absolute' like /foo/bar, | |
// or 'relative' like '../baz' | |
prefix = pattern.slice(0, n).join('/') | |
break | |
} | |
var remain = pattern.slice(n) | |
// get the list of entries. | |
var read | |
if (prefix === null) | |
read = '.' | |
else if (isAbsolute(prefix) || isAbsolute(pattern.join('/'))) { | |
if (!prefix || !isAbsolute(prefix)) | |
prefix = '/' + prefix | |
read = prefix | |
} else | |
read = prefix | |
var abs = this._makeAbs(read) | |
//if ignored, skip _processing | |
if (childrenIgnored(this, read)) | |
return cb() | |
var isGlobStar = remain[0] === minimatch.GLOBSTAR | |
if (isGlobStar) | |
this._processGlobStar(prefix, read, abs, remain, index, inGlobStar, cb) | |
else | |
this._processReaddir(prefix, read, abs, remain, index, inGlobStar, cb) | |
} | |
Glob.prototype._processReaddir = function (prefix, read, abs, remain, index, inGlobStar, cb) { | |
var self = this | |
this._readdir(abs, inGlobStar, function (er, entries) { | |
return self._processReaddir2(prefix, read, abs, remain, index, inGlobStar, entries, cb) | |
}) | |
} | |
Glob.prototype._processReaddir2 = function (prefix, read, abs, remain, index, inGlobStar, entries, cb) { | |
// if the abs isn't a dir, then nothing can match! | |
if (!entries) | |
return cb() | |
// It will only match dot entries if it starts with a dot, or if | |
// dot is set. Stuff like @(.foo|.bar) isn't allowed. | |
var pn = remain[0] | |
var negate = !!this.minimatch.negate | |
var rawGlob = pn._glob | |
var dotOk = this.dot || rawGlob.charAt(0) === '.' | |
var matchedEntries = [] | |
for (var i = 0; i < entries.length; i++) { | |
var e = entries[i] | |
if (e.charAt(0) !== '.' || dotOk) { | |
var m | |
if (negate && !prefix) { | |
m = !e.match(pn) | |
} else { | |
m = e.match(pn) | |
} | |
if (m) | |
matchedEntries.push(e) | |
} | |
} | |
//console.error('prd2', prefix, entries, remain[0]._glob, matchedEntries) | |
var len = matchedEntries.length | |
// If there are no matched entries, then nothing matches. | |
if (len === 0) | |
return cb() | |
// if this is the last remaining pattern bit, then no need for | |
// an additional stat *unless* the user has specified mark or | |
// stat explicitly. We know they exist, since readdir returned | |
// them. | |
if (remain.length === 1 && !this.mark && !this.stat) { | |
if (!this.matches[index]) | |
this.matches[index] = Object.create(null) | |
for (var i = 0; i < len; i ++) { | |
var e = matchedEntries[i] | |
if (prefix) { | |
if (prefix !== '/') | |
e = prefix + '/' + e | |
else | |
e = prefix + e | |
} | |
if (e.charAt(0) === '/' && !this.nomount) { | |
e = path.join(this.root, e) | |
} | |
this._emitMatch(index, e) | |
} | |
// This was the last one, and no stats were needed | |
return cb() | |
} | |
// now test all matched entries as stand-ins for that part | |
// of the pattern. | |
remain.shift() | |
for (var i = 0; i < len; i ++) { | |
var e = matchedEntries[i] | |
var newPattern | |
if (prefix) { | |
if (prefix !== '/') | |
e = prefix + '/' + e | |
else | |
e = prefix + e | |
} | |
this._process([e].concat(remain), index, inGlobStar, cb) | |
} | |
cb() | |
} | |
Glob.prototype._emitMatch = function (index, e) { | |
if (this.aborted) | |
return | |
if (isIgnored(this, e)) | |
return | |
if (this.paused) { | |
this._emitQueue.push([index, e]) | |
return | |
} | |
var abs = isAbsolute(e) ? e : this._makeAbs(e) | |
if (this.mark) | |
e = this._mark(e) | |
if (this.absolute) | |
e = abs | |
if (this.matches[index][e]) | |
return | |
if (this.nodir) { | |
var c = this.cache[abs] | |
if (c === 'DIR' || Array.isArray(c)) | |
return | |
} | |
this.matches[index][e] = true | |
var st = this.statCache[abs] | |
if (st) | |
this.emit('stat', e, st) | |
this.emit('match', e) | |
} | |
Glob.prototype._readdirInGlobStar = function (abs, cb) { | |
if (this.aborted) | |
return | |
// follow all symlinked directories forever | |
// just proceed as if this is a non-globstar situation | |
if (this.follow) | |
return this._readdir(abs, false, cb) | |
var lstatkey = 'lstat\0' + abs | |
var self = this | |
var lstatcb = inflight(lstatkey, lstatcb_) | |
if (lstatcb) | |
fs.lstat(abs, lstatcb) | |
function lstatcb_ (er, lstat) { | |
if (er && er.code === 'ENOENT') | |
return cb() | |
var isSym = lstat && lstat.isSymbolicLink() | |
self.symlinks[abs] = isSym | |
// If it's not a symlink or a dir, then it's definitely a regular file. | |
// don't bother doing a readdir in that case. | |
if (!isSym && lstat && !lstat.isDirectory()) { | |
self.cache[abs] = 'FILE' | |
cb() | |
} else | |
self._readdir(abs, false, cb) | |
} | |
} | |
Glob.prototype._readdir = function (abs, inGlobStar, cb) { | |
if (this.aborted) | |
return | |
cb = inflight('readdir\0'+abs+'\0'+inGlobStar, cb) | |
if (!cb) | |
return | |
//console.error('RD %j %j', +inGlobStar, abs) | |
if (inGlobStar && !ownProp(this.symlinks, abs)) | |
return this._readdirInGlobStar(abs, cb) | |
if (ownProp(this.cache, abs)) { | |
var c = this.cache[abs] | |
if (!c || c === 'FILE') | |
return cb() | |
if (Array.isArray(c)) | |
return cb(null, c) | |
} | |
var self = this | |
fs.readdir(abs, readdirCb(this, abs, cb)) | |
} | |
function readdirCb (self, abs, cb) { | |
return function (er, entries) { | |
if (er) | |
self._readdirError(abs, er, cb) | |
else | |
self._readdirEntries(abs, entries, cb) | |
} | |
} | |
Glob.prototype._readdirEntries = function (abs, entries, cb) { | |
if (this.aborted) | |
return | |
// if we haven't asked to stat everything, then just | |
// assume that everything in there exists, so we can avoid | |
// having to stat it a second time. | |
if (!this.mark && !this.stat) { | |
for (var i = 0; i < entries.length; i ++) { | |
var e = entries[i] | |
if (abs === '/') | |
e = abs + e | |
else | |
e = abs + '/' + e | |
this.cache[e] = true | |
} | |
} | |
this.cache[abs] = entries | |
return cb(null, entries) | |
} | |
Glob.prototype._readdirError = function (f, er, cb) { | |
if (this.aborted) | |
return | |
// handle errors, and cache the information | |
switch (er.code) { | |
case 'ENOTSUP': // https://github.com/isaacs/node-glob/issues/205 | |
case 'ENOTDIR': // totally normal. means it *does* exist. | |
var abs = this._makeAbs(f) | |
this.cache[abs] = 'FILE' | |
if (abs === this.cwdAbs) { | |
var error = new Error(er.code + ' invalid cwd ' + this.cwd) | |
error.path = this.cwd | |
error.code = er.code | |
this.emit('error', error) | |
this.abort() | |
} | |
break | |
case 'ENOENT': // not terribly unusual | |
case 'ELOOP': | |
case 'ENAMETOOLONG': | |
case 'UNKNOWN': | |
this.cache[this._makeAbs(f)] = false | |
break | |
default: // some unusual error. Treat as failure. | |
this.cache[this._makeAbs(f)] = false | |
if (this.strict) { | |
this.emit('error', er) | |
// If the error is handled, then we abort | |
// if not, we threw out of here | |
this.abort() | |
} | |
if (!this.silent) | |
console.error('glob error', er) | |
break | |
} | |
return cb() | |
} | |
Glob.prototype._processGlobStar = function (prefix, read, abs, remain, index, inGlobStar, cb) { | |
var self = this | |
this._readdir(abs, inGlobStar, function (er, entries) { | |
self._processGlobStar2(prefix, read, abs, remain, index, inGlobStar, entries, cb) | |
}) | |
} | |
Glob.prototype._processGlobStar2 = function (prefix, read, abs, remain, index, inGlobStar, entries, cb) { | |
//console.error('pgs2', prefix, remain[0], entries) | |
// no entries means not a dir, so it can never have matches | |
// foo.txt/** doesn't match foo.txt | |
if (!entries) | |
return cb() | |
// test without the globstar, and with every child both below | |
// and replacing the globstar. | |
var remainWithoutGlobStar = remain.slice(1) | |
var gspref = prefix ? [ prefix ] : [] | |
var noGlobStar = gspref.concat(remainWithoutGlobStar) | |
// the noGlobStar pattern exits the inGlobStar state | |
this._process(noGlobStar, index, false, cb) | |
var isSym = this.symlinks[abs] | |
var len = entries.length | |
// If it's a symlink, and we're in a globstar, then stop | |
if (isSym && inGlobStar) | |
return cb() | |
for (var i = 0; i < len; i++) { | |
var e = entries[i] | |
if (e.charAt(0) === '.' && !this.dot) | |
continue | |
// these two cases enter the inGlobStar state | |
var instead = gspref.concat(entries[i], remainWithoutGlobStar) | |
this._process(instead, index, true, cb) | |
var below = gspref.concat(entries[i], remain) | |
this._process(below, index, true, cb) | |
} | |
cb() | |
} | |
Glob.prototype._processSimple = function (prefix, index, cb) { | |
// XXX review this. Shouldn't it be doing the mounting etc | |
// before doing stat? kinda weird? | |
var self = this | |
this._stat(prefix, function (er, exists) { | |
self._processSimple2(prefix, index, er, exists, cb) | |
}) | |
} | |
Glob.prototype._processSimple2 = function (prefix, index, er, exists, cb) { | |
//console.error('ps2', prefix, exists) | |
if (!this.matches[index]) | |
this.matches[index] = Object.create(null) | |
// If it doesn't exist, then just mark the lack of results | |
if (!exists) | |
return cb() | |
if (prefix && isAbsolute(prefix) && !this.nomount) { | |
var trail = /[\/\\]$/.test(prefix) | |
if (prefix.charAt(0) === '/') { | |
prefix = path.join(this.root, prefix) | |
} else { | |
prefix = path.resolve(this.root, prefix) | |
if (trail) | |
prefix += '/' | |
} | |
} | |
if (process.platform === 'win32') | |
prefix = prefix.replace(/\\/g, '/') | |
// Mark this as a match | |
this._emitMatch(index, prefix) | |
cb() | |
} | |
// Returns either 'DIR', 'FILE', or false | |
Glob.prototype._stat = function (f, cb) { | |
var abs = this._makeAbs(f) | |
var needDir = f.slice(-1) === '/' | |
if (f.length > this.maxLength) | |
return cb() | |
if (!this.stat && ownProp(this.cache, abs)) { | |
var c = this.cache[abs] | |
if (Array.isArray(c)) | |
c = 'DIR' | |
// It exists, but maybe not how we need it | |
if (!needDir || c === 'DIR') | |
return cb(null, c) | |
if (needDir && c === 'FILE') | |
return cb() | |
// otherwise we have to stat, because maybe c=true | |
// if we know it exists, but not what it is. | |
} | |
var exists | |
var stat = this.statCache[abs] | |
if (stat !== undefined) { | |
if (stat === false) | |
return cb(null, stat) | |
else { | |
var type = stat.isDirectory() ? 'DIR' : 'FILE' | |
if (needDir && type === 'FILE') | |
return cb() | |
else | |
return cb(null, type, stat) | |
} | |
} | |
var self = this | |
var statcb = inflight('stat\0' + abs, lstatcb_) | |
if (statcb) | |
fs.lstat(abs, statcb) | |
function lstatcb_ (er, lstat) { | |
if (lstat && lstat.isSymbolicLink()) { | |
// If it's a symlink, then treat it as the target, unless | |
// the target does not exist, then treat it as a file. | |
return fs.stat(abs, function (er, stat) { | |
if (er) | |
self._stat2(f, abs, null, lstat, cb) | |
else | |
self._stat2(f, abs, er, stat, cb) | |
}) | |
} else { | |
self._stat2(f, abs, er, lstat, cb) | |
} | |
} | |
} | |
Glob.prototype._stat2 = function (f, abs, er, stat, cb) { | |
if (er && (er.code === 'ENOENT' || er.code === 'ENOTDIR')) { | |
this.statCache[abs] = false | |
return cb() | |
} | |
var needDir = f.slice(-1) === '/' | |
this.statCache[abs] = stat | |
if (abs.slice(-1) === '/' && stat && !stat.isDirectory()) | |
return cb(null, false, stat) | |
var c = true | |
if (stat) | |
c = stat.isDirectory() ? 'DIR' : 'FILE' | |
this.cache[abs] = this.cache[abs] || c | |
if (needDir && c === 'FILE') | |
return cb() | |
return cb(null, c, stat) | |
} | |
/***/ }), | |
/***/ 9010: | |
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { | |
module.exports = globSync | |
globSync.GlobSync = GlobSync | |
var fs = __nccwpck_require__(5747) | |
var rp = __nccwpck_require__(6863) | |
var minimatch = __nccwpck_require__(3973) | |
var Minimatch = minimatch.Minimatch | |
var Glob = __nccwpck_require__(1957).Glob | |
var util = __nccwpck_require__(1669) | |
var path = __nccwpck_require__(5622) | |
var assert = __nccwpck_require__(2357) | |
var isAbsolute = __nccwpck_require__(8714) | |
var common = __nccwpck_require__(7625) | |
var setopts = common.setopts | |
var ownProp = common.ownProp | |
var childrenIgnored = common.childrenIgnored | |
var isIgnored = common.isIgnored | |
function globSync (pattern, options) { | |
if (typeof options === 'function' || arguments.length === 3) | |
throw new TypeError('callback provided to sync glob\n'+ | |
'See: https://github.com/isaacs/node-glob/issues/167') | |
return new GlobSync(pattern, options).found | |
} | |
function GlobSync (pattern, options) { | |
if (!pattern) | |
throw new Error('must provide pattern') | |
if (typeof options === 'function' || arguments.length === 3) | |
throw new TypeError('callback provided to sync glob\n'+ | |
'See: https://github.com/isaacs/node-glob/issues/167') | |
if (!(this instanceof GlobSync)) | |
return new GlobSync(pattern, options) | |
setopts(this, pattern, options) | |
if (this.noprocess) | |
return this | |
var n = this.minimatch.set.length | |
this.matches = new Array(n) | |
for (var i = 0; i < n; i ++) { | |
this._process(this.minimatch.set[i], i, false) | |
} | |
this._finish() | |
} | |
GlobSync.prototype._finish = function () { | |
assert(this instanceof GlobSync) | |
if (this.realpath) { | |
var self = this | |
this.matches.forEach(function (matchset, index) { | |
var set = self.matches[index] = Object.create(null) | |
for (var p in matchset) { | |
try { | |
p = self._makeAbs(p) | |
var real = rp.realpathSync(p, self.realpathCache) | |
set[real] = true | |
} catch (er) { | |
if (er.syscall === 'stat') | |
set[self._makeAbs(p)] = true | |
else | |
throw er | |
} | |
} | |
}) | |
} | |
common.finish(this) | |
} | |
GlobSync.prototype._process = function (pattern, index, inGlobStar) { | |
assert(this instanceof GlobSync) | |
// Get the first [n] parts of pattern that are all strings. | |
var n = 0 | |
while (typeof pattern[n] === 'string') { | |
n ++ | |
} | |
// now n is the index of the first one that is *not* a string. | |
// See if there's anything else | |
var prefix | |
switch (n) { | |
// if not, then this is rather simple | |
case pattern.length: | |
this._processSimple(pattern.join('/'), index) | |
return | |
case 0: | |
// pattern *starts* with some non-trivial item. | |
// going to readdir(cwd), but not include the prefix in matches. | |
prefix = null | |
break | |
default: | |
// pattern has some string bits in the front. | |
// whatever it starts with, whether that's 'absolute' like /foo/bar, | |
// or 'relative' like '../baz' | |
prefix = pattern.slice(0, n).join('/') | |
break | |
} | |
var remain = pattern.slice(n) | |
// get the list of entries. | |
var read | |
if (prefix === null) | |
read = '.' | |
else if (isAbsolute(prefix) || isAbsolute(pattern.join('/'))) { | |
if (!prefix || !isAbsolute(prefix)) | |
prefix = '/' + prefix | |
read = prefix | |
} else | |
read = prefix | |
var abs = this._makeAbs(read) | |
//if ignored, skip processing | |
if (childrenIgnored(this, read)) | |
return | |
var isGlobStar = remain[0] === minimatch.GLOBSTAR | |
if (isGlobStar) | |
this._processGlobStar(prefix, read, abs, remain, index, inGlobStar) | |
else | |
this._processReaddir(prefix, read, abs, remain, index, inGlobStar) | |
} | |
GlobSync.prototype._processReaddir = function (prefix, read, abs, remain, index, inGlobStar) { | |
var entries = this._readdir(abs, inGlobStar) | |
// if the abs isn't a dir, then nothing can match! | |
if (!entries) | |
return | |
// It will only match dot entries if it starts with a dot, or if | |
// dot is set. Stuff like @(.foo|.bar) isn't allowed. | |
var pn = remain[0] | |
var negate = !!this.minimatch.negate | |
var rawGlob = pn._glob | |
var dotOk = this.dot || rawGlob.charAt(0) === '.' | |
var matchedEntries = [] | |
for (var i = 0; i < entries.length; i++) { | |
var e = entries[i] | |
if (e.charAt(0) !== '.' || dotOk) { | |
var m | |
if (negate && !prefix) { | |
m = !e.match(pn) | |
} else { | |
m = e.match(pn) | |
} | |
if (m) | |
matchedEntries.push(e) | |
} | |
} | |
var len = matchedEntries.length | |
// If there are no matched entries, then nothing matches. | |
if (len === 0) | |
return | |
// if this is the last remaining pattern bit, then no need for | |
// an additional stat *unless* the user has specified mark or | |
// stat explicitly. We know they exist, since readdir returned | |
// them. | |
if (remain.length === 1 && !this.mark && !this.stat) { | |
if (!this.matches[index]) | |
this.matches[index] = Object.create(null) | |
for (var i = 0; i < len; i ++) { | |
var e = matchedEntries[i] | |
if (prefix) { | |
if (prefix.slice(-1) !== '/') | |
e = prefix + '/' + e | |
else | |
e = prefix + e | |
} | |
if (e.charAt(0) === '/' && !this.nomount) { | |
e = path.join(this.root, e) | |
} | |
this._emitMatch(index, e) | |
} | |
// This was the last one, and no stats were needed | |
return | |
} | |
// now test all matched entries as stand-ins for that part | |
// of the pattern. | |
remain.shift() | |
for (var i = 0; i < len; i ++) { | |
var e = matchedEntries[i] | |
var newPattern | |
if (prefix) | |
newPattern = [prefix, e] | |
else | |
newPattern = [e] | |
this._process(newPattern.concat(remain), index, inGlobStar) | |
} | |
} | |
GlobSync.prototype._emitMatch = function (index, e) { | |
if (isIgnored(this, e)) | |
return | |
var abs = this._makeAbs(e) | |
if (this.mark) | |
e = this._mark(e) | |
if (this.absolute) { | |
e = abs | |
} | |
if (this.matches[index][e]) | |
return | |
if (this.nodir) { | |
var c = this.cache[abs] | |
if (c === 'DIR' || Array.isArray(c)) | |
return | |
} | |
this.matches[index][e] = true | |
if (this.stat) | |
this._stat(e) | |
} | |
GlobSync.prototype._readdirInGlobStar = function (abs) { | |
// follow all symlinked directories forever | |
// just proceed as if this is a non-globstar situation | |
if (this.follow) | |
return this._readdir(abs, false) | |
var entries | |
var lstat | |
var stat | |
try { | |
lstat = fs.lstatSync(abs) | |
} catch (er) { | |
if (er.code === 'ENOENT') { | |
// lstat failed, doesn't exist | |
return null | |
} | |
} | |
var isSym = lstat && lstat.isSymbolicLink() | |
this.symlinks[abs] = isSym | |
// If it's not a symlink or a dir, then it's definitely a regular file. | |
// don't bother doing a readdir in that case. | |
if (!isSym && lstat && !lstat.isDirectory()) | |
this.cache[abs] = 'FILE' | |
else | |
entries = this._readdir(abs, false) | |
return entries | |
} | |
GlobSync.prototype._readdir = function (abs, inGlobStar) { | |
var entries | |
if (inGlobStar && !ownProp(this.symlinks, abs)) | |
return this._readdirInGlobStar(abs) | |
if (ownProp(this.cache, abs)) { | |
var c = this.cache[abs] | |
if (!c || c === 'FILE') | |
return null | |
if (Array.isArray(c)) | |
return c | |
} | |
try { | |
return this._readdirEntries(abs, fs.readdirSync(abs)) | |
} catch (er) { | |
this._readdirError(abs, er) | |
return null | |
} | |
} | |
GlobSync.prototype._readdirEntries = function (abs, entries) { | |
// if we haven't asked to stat everything, then just | |
// assume that everything in there exists, so we can avoid | |
// having to stat it a second time. | |
if (!this.mark && !this.stat) { | |
for (var i = 0; i < entries.length; i ++) { | |
var e = entries[i] | |
if (abs === '/') | |
e = abs + e | |
else | |
e = abs + '/' + e | |
this.cache[e] = true | |
} | |
} | |
this.cache[abs] = entries | |
// mark and cache dir-ness | |
return entries | |
} | |
GlobSync.prototype._readdirError = function (f, er) { | |
// handle errors, and cache the information | |
switch (er.code) { | |
case 'ENOTSUP': // https://github.com/isaacs/node-glob/issues/205 | |
case 'ENOTDIR': // totally normal. means it *does* exist. | |
var abs = this._makeAbs(f) | |
this.cache[abs] = 'FILE' | |
if (abs === this.cwdAbs) { | |
var error = new Error(er.code + ' invalid cwd ' + this.cwd) | |
error.path = this.cwd | |
error.code = er.code | |
throw error | |
} | |
break | |
case 'ENOENT': // not terribly unusual | |
case 'ELOOP': | |
case 'ENAMETOOLONG': | |
case 'UNKNOWN': | |
this.cache[this._makeAbs(f)] = false | |
break | |
default: // some unusual error. Treat as failure. | |
this.cache[this._makeAbs(f)] = false | |
if (this.strict) | |
throw er | |
if (!this.silent) | |
console.error('glob error', er) | |
break | |
} | |
} | |
GlobSync.prototype._processGlobStar = function (prefix, read, abs, remain, index, inGlobStar) { | |
var entries = this._readdir(abs, inGlobStar) | |
// no entries means not a dir, so it can never have matches | |
// foo.txt/** doesn't match foo.txt | |
if (!entries) | |
return | |
// test without the globstar, and with every child both below | |
// and replacing the globstar. | |
var remainWithoutGlobStar = remain.slice(1) | |
var gspref = prefix ? [ prefix ] : [] | |
var noGlobStar = gspref.concat(remainWithoutGlobStar) | |
// the noGlobStar pattern exits the inGlobStar state | |
this._process(noGlobStar, index, false) | |
var len = entries.length | |
var isSym = this.symlinks[abs] | |
// If it's a symlink, and we're in a globstar, then stop | |
if (isSym && inGlobStar) | |
return | |
for (var i = 0; i < len; i++) { | |
var e = entries[i] | |
if (e.charAt(0) === '.' && !this.dot) | |
continue | |
// these two cases enter the inGlobStar state | |
var instead = gspref.concat(entries[i], remainWithoutGlobStar) | |
this._process(instead, index, true) | |
var below = gspref.concat(entries[i], remain) | |
this._process(below, index, true) | |
} | |
} | |
GlobSync.prototype._processSimple = function (prefix, index) { | |
// XXX review this. Shouldn't it be doing the mounting etc | |
// before doing stat? kinda weird? | |
var exists = this._stat(prefix) | |
if (!this.matches[index]) | |
this.matches[index] = Object.create(null) | |
// If it doesn't exist, then just mark the lack of results | |
if (!exists) | |
return | |
if (prefix && isAbsolute(prefix) && !this.nomount) { | |
var trail = /[\/\\]$/.test(prefix) | |
if (prefix.charAt(0) === '/') { | |
prefix = path.join(this.root, prefix) | |
} else { | |
prefix = path.resolve(this.root, prefix) | |
if (trail) | |
prefix += '/' | |
} | |
} | |
if (process.platform === 'win32') | |
prefix = prefix.replace(/\\/g, '/') | |
// Mark this as a match | |
this._emitMatch(index, prefix) | |
} | |
// Returns either 'DIR', 'FILE', or false | |
GlobSync.prototype._stat = function (f) { | |
var abs = this._makeAbs(f) | |
var needDir = f.slice(-1) === '/' | |
if (f.length > this.maxLength) | |
return false | |
if (!this.stat && ownProp(this.cache, abs)) { | |
var c = this.cache[abs] | |
if (Array.isArray(c)) | |
c = 'DIR' | |
// It exists, but maybe not how we need it | |
if (!needDir || c === 'DIR') | |
return c | |
if (needDir && c === 'FILE') | |
return false | |
// otherwise we have to stat, because maybe c=true | |
// if we know it exists, but not what it is. | |
} | |
var exists | |
var stat = this.statCache[abs] | |
if (!stat) { | |
var lstat | |
try { | |
lstat = fs.lstatSync(abs) | |
} catch (er) { | |
if (er && (er.code === 'ENOENT' || er.code === 'ENOTDIR')) { | |
this.statCache[abs] = false | |
return false | |
} | |
} | |
if (lstat && lstat.isSymbolicLink()) { | |
try { | |
stat = fs.statSync(abs) | |
} catch (er) { | |
stat = lstat | |
} | |
} else { | |
stat = lstat | |
} | |
} | |
this.statCache[abs] = stat | |
var c = true | |
if (stat) | |
c = stat.isDirectory() ? 'DIR' : 'FILE' | |
this.cache[abs] = this.cache[abs] || c | |
if (needDir && c === 'FILE') | |
return false | |
return c | |
} | |
GlobSync.prototype._mark = function (p) { | |
return common.mark(this, p) | |
} | |
GlobSync.prototype._makeAbs = function (f) { | |
return common.makeAbs(this, f) | |
} | |
/***/ }), | |
/***/ 2492: | |
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { | |
var wrappy = __nccwpck_require__(2940) | |
var reqs = Object.create(null) | |
var once = __nccwpck_require__(1223) | |
module.exports = wrappy(inflight) | |
function inflight (key, cb) { | |
if (reqs[key]) { | |
reqs[key].push(cb) | |
return null | |
} else { | |
reqs[key] = [cb] | |
return makeres(key) | |
} | |
} | |
function makeres (key) { | |
return once(function RES () { | |
var cbs = reqs[key] | |
var len = cbs.length | |
var args = slice(arguments) | |
// XXX It's somewhat ambiguous whether a new callback added in this | |
// pass should be queued for later execution if something in the | |
// list of callbacks throws, or if it should just be discarded. | |
// However, it's such an edge case that it hardly matters, and either | |
// choice is likely as surprising as the other. | |
// As it happens, we do go ahead and schedule it for later execution. | |
try { | |
for (var i = 0; i < len; i++) { | |
cbs[i].apply(null, args) | |
} | |
} finally { | |
if (cbs.length > len) { | |
// added more in the interim. | |
// de-zalgo, just in case, but don't call again. | |
cbs.splice(0, len) | |
process.nextTick(function () { | |
RES.apply(null, args) | |
}) | |
} else { | |
delete reqs[key] | |
} | |
} | |
}) | |
} | |
function slice (args) { | |
var length = args.length | |
var array = [] | |
for (var i = 0; i < length; i++) array[i] = args[i] | |
return array | |
} | |
/***/ }), | |
/***/ 4124: | |
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { | |
try { | |
var util = __nccwpck_require__(1669); | |
/* istanbul ignore next */ | |
if (typeof util.inherits !== 'function') throw ''; | |
module.exports = util.inherits; | |
} catch (e) { | |
/* istanbul ignore next */ | |
module.exports = __nccwpck_require__(8544); | |
} | |
/***/ }), | |
/***/ 8544: | |
/***/ ((module) => { | |
if (typeof Object.create === 'function') { | |
// implementation from standard node.js 'util' module | |
module.exports = function inherits(ctor, superCtor) { | |
if (superCtor) { | |
ctor.super_ = superCtor | |
ctor.prototype = Object.create(superCtor.prototype, { | |
constructor: { | |
value: ctor, | |
enumerable: false, | |
writable: true, | |
configurable: true | |
} | |
}) | |
} | |
}; | |
} else { | |
// old school shim for old browsers | |
module.exports = function inherits(ctor, superCtor) { | |
if (superCtor) { | |
ctor.super_ = superCtor | |
var TempCtor = function () {} | |
TempCtor.prototype = superCtor.prototype | |
ctor.prototype = new TempCtor() | |
ctor.prototype.constructor = ctor | |
} | |
} | |
} | |
/***/ }), | |
/***/ 7129: | |
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { | |
"use strict"; | |
// A linked list to keep track of recently-used-ness | |
const Yallist = __nccwpck_require__(665) | |
const MAX = Symbol('max') | |
const LENGTH = Symbol('length') | |
const LENGTH_CALCULATOR = Symbol('lengthCalculator') | |
const ALLOW_STALE = Symbol('allowStale') | |
const MAX_AGE = Symbol('maxAge') | |
const DISPOSE = Symbol('dispose') | |
const NO_DISPOSE_ON_SET = Symbol('noDisposeOnSet') | |
const LRU_LIST = Symbol('lruList') | |
const CACHE = Symbol('cache') | |
const UPDATE_AGE_ON_GET = Symbol('updateAgeOnGet') | |
const naiveLength = () => 1 | |
// lruList is a yallist where the head is the youngest | |
// item, and the tail is the oldest. the list contains the Hit | |
// objects as the entries. | |
// Each Hit object has a reference to its Yallist.Node. This | |
// never changes. | |
// | |
// cache is a Map (or PseudoMap) that matches the keys to | |
// the Yallist.Node object. | |
class LRUCache { | |
constructor (options) { | |
if (typeof options === 'number') | |
options = { max: options } | |
if (!options) | |
options = {} | |
if (options.max && (typeof options.max !== 'number' || options.max < 0)) | |
throw new TypeError('max must be a non-negative number') | |
// Kind of weird to have a default max of Infinity, but oh well. | |
const max = this[MAX] = options.max || Infinity | |
const lc = options.length || naiveLength | |
this[LENGTH_CALCULATOR] = (typeof lc !== 'function') ? naiveLength : lc | |
this[ALLOW_STALE] = options.stale || false | |
if (options.maxAge && typeof options.maxAge !== 'number') | |
throw new TypeError('maxAge must be a number') | |
this[MAX_AGE] = options.maxAge || 0 | |
this[DISPOSE] = options.dispose | |
this[NO_DISPOSE_ON_SET] = options.noDisposeOnSet || false | |
this[UPDATE_AGE_ON_GET] = options.updateAgeOnGet || false | |
this.reset() | |
} | |
// resize the cache when the max changes. | |
set max (mL) { | |
if (typeof mL !== 'number' || mL < 0) | |
throw new TypeError('max must be a non-negative number') | |
this[MAX] = mL || Infinity | |
trim(this) | |
} | |
get max () { | |
return this[MAX] | |
} | |
set allowStale (allowStale) { | |
this[ALLOW_STALE] = !!allowStale | |
} | |
get allowStale () { | |
return this[ALLOW_STALE] | |
} | |
set maxAge (mA) { | |
if (typeof mA !== 'number') | |
throw new TypeError('maxAge must be a non-negative number') | |
this[MAX_AGE] = mA | |
trim(this) | |
} | |
get maxAge () { | |
return this[MAX_AGE] | |
} | |
// resize the cache when the lengthCalculator changes. | |
set lengthCalculator (lC) { | |
if (typeof lC !== 'function') | |
lC = naiveLength | |
if (lC !== this[LENGTH_CALCULATOR]) { | |
this[LENGTH_CALCULATOR] = lC | |
this[LENGTH] = 0 | |
this[LRU_LIST].forEach(hit => { | |
hit.length = this[LENGTH_CALCULATOR](hit.value, hit.key) | |
this[LENGTH] += hit.length | |
}) | |
} | |
trim(this) | |
} | |
get lengthCalculator () { return this[LENGTH_CALCULATOR] } | |
get length () { return this[LENGTH] } | |
get itemCount () { return this[LRU_LIST].length } | |
rforEach (fn, thisp) { | |
thisp = thisp || this | |
for (let walker = this[LRU_LIST].tail; walker !== null;) { | |
const prev = walker.prev | |
forEachStep(this, fn, walker, thisp) | |
walker = prev | |
} | |
} | |
forEach (fn, thisp) { | |
thisp = thisp || this | |
for (let walker = this[LRU_LIST].head; walker !== null;) { | |
const next = walker.next | |
forEachStep(this, fn, walker, thisp) | |
walker = next | |
} | |
} | |
keys () { | |
return this[LRU_LIST].toArray().map(k => k.key) | |
} | |
values () { | |
return this[LRU_LIST].toArray().map(k => k.value) | |
} | |
reset () { | |
if (this[DISPOSE] && | |
this[LRU_LIST] && | |
this[LRU_LIST].length) { | |
this[LRU_LIST].forEach(hit => this[DISPOSE](hit.key, hit.value)) | |
} | |
this[CACHE] = new Map() // hash of items by key | |
this[LRU_LIST] = new Yallist() // list of items in order of use recency | |
this[LENGTH] = 0 // length of items in the list | |
} | |
dump () { | |
return this[LRU_LIST].map(hit => | |
isStale(this, hit) ? false : { | |
k: hit.key, | |
v: hit.value, | |
e: hit.now + (hit.maxAge || 0) | |
}).toArray().filter(h => h) | |
} | |
dumpLru () { | |
return this[LRU_LIST] | |
} | |
set (key, value, maxAge) { | |
maxAge = maxAge || this[MAX_AGE] | |
if (maxAge && typeof maxAge !== 'number') | |
throw new TypeError('maxAge must be a number') | |
const now = maxAge ? Date.now() : 0 | |
const len = this[LENGTH_CALCULATOR](value, key) | |
if (this[CACHE].has(key)) { | |
if (len > this[MAX]) { | |
del(this, this[CACHE].get(key)) | |
return false | |
} | |
const node = this[CACHE].get(key) | |
const item = node.value | |
// dispose of the old one before overwriting | |
// split out into 2 ifs for better coverage tracking | |
if (this[DISPOSE]) { | |
if (!this[NO_DISPOSE_ON_SET]) | |
this[DISPOSE](key, item.value) | |
} | |
item.now = now | |
item.maxAge = maxAge | |
item.value = value | |
this[LENGTH] += len - item.length | |
item.length = len | |
this.get(key) | |
trim(this) | |
return true | |
} | |
const hit = new Entry(key, value, len, now, maxAge) | |
// oversized objects fall out of cache automatically. | |
if (hit.length > this[MAX]) { | |
if (this[DISPOSE]) | |
this[DISPOSE](key, value) | |
return false | |
} | |
this[LENGTH] += hit.length | |
this[LRU_LIST].unshift(hit) | |
this[CACHE].set(key, this[LRU_LIST].head) | |
trim(this) | |
return true | |
} | |
has (key) { | |
if (!this[CACHE].has(key)) return false | |
const hit = this[CACHE].get(key).value | |
return !isStale(this, hit) | |
} | |
get (key) { | |
return get(this, key, true) | |
} | |
peek (key) { | |
return get(this, key, false) | |
} | |
pop () { | |
const node = this[LRU_LIST].tail | |
if (!node) | |
return null | |
del(this, node) | |
return node.value | |
} | |
del (key) { | |
del(this, this[CACHE].get(key)) | |
} | |
load (arr) { | |
// reset the cache | |
this.reset() | |
const now = Date.now() | |
// A previous serialized cache has the most recent items first | |
for (let l = arr.length - 1; l >= 0; l--) { | |
const hit = arr[l] | |
const expiresAt = hit.e || 0 | |
if (expiresAt === 0) | |
// the item was created without expiration in a non aged cache | |
this.set(hit.k, hit.v) | |
else { | |
const maxAge = expiresAt - now | |
// dont add already expired items | |
if (maxAge > 0) { | |
this.set(hit.k, hit.v, maxAge) | |
} | |
} | |
} | |
} | |
prune () { | |
this[CACHE].forEach((value, key) => get(this, key, false)) | |
} | |
} | |
const get = (self, key, doUse) => { | |
const node = self[CACHE].get(key) | |
if (node) { | |
const hit = node.value | |
if (isStale(self, hit)) { | |
del(self, node) | |
if (!self[ALLOW_STALE]) | |
return undefined | |
} else { | |
if (doUse) { | |
if (self[UPDATE_AGE_ON_GET]) | |
node.value.now = Date.now() | |
self[LRU_LIST].unshiftNode(node) | |
} | |
} | |
return hit.value | |
} | |
} | |
const isStale = (self, hit) => { | |
if (!hit || (!hit.maxAge && !self[MAX_AGE])) | |
return false | |
const diff = Date.now() - hit.now | |
return hit.maxAge ? diff > hit.maxAge | |
: self[MAX_AGE] && (diff > self[MAX_AGE]) | |
} | |
const trim = self => { | |
if (self[LENGTH] > self[MAX]) { | |
for (let walker = self[LRU_LIST].tail; | |
self[LENGTH] > self[MAX] && walker !== null;) { | |
// We know that we're about to delete this one, and also | |
// what the next least recently used key will be, so just | |
// go ahead and set it now. | |
const prev = walker.prev | |
del(self, walker) | |
walker = prev | |
} | |
} | |
} | |
const del = (self, node) => { | |
if (node) { | |
const hit = node.value | |
if (self[DISPOSE]) | |
self[DISPOSE](hit.key, hit.value) | |
self[LENGTH] -= hit.length | |
self[CACHE].delete(hit.key) | |
self[LRU_LIST].removeNode(node) | |
} | |
} | |
class Entry { | |
constructor (key, value, length, now, maxAge) { | |
this.key = key | |
this.value = value | |
this.length = length | |
this.now = now | |
this.maxAge = maxAge || 0 | |
} | |
} | |
const forEachStep = (self, fn, node, thisp) => { | |
let hit = node.value | |
if (isStale(self, hit)) { | |
del(self, node) | |
if (!self[ALLOW_STALE]) | |
hit = undefined | |
} | |
if (hit) | |
fn.call(thisp, hit.value, hit.key, self) | |
} | |
module.exports = LRUCache | |
/***/ }), | |
/***/ 3973: | |
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { | |
module.exports = minimatch | |
minimatch.Minimatch = Minimatch | |
var path = { sep: '/' } | |
try { | |
path = __nccwpck_require__(5622) | |
} catch (er) {} | |
var GLOBSTAR = minimatch.GLOBSTAR = Minimatch.GLOBSTAR = {} | |
var expand = __nccwpck_require__(3717) | |
var plTypes = { | |
'!': { open: '(?:(?!(?:', close: '))[^/]*?)'}, | |
'?': { open: '(?:', close: ')?' }, | |
'+': { open: '(?:', close: ')+' }, | |
'*': { open: '(?:', close: ')*' }, | |
'@': { open: '(?:', close: ')' } | |
} | |
// any single thing other than / | |
// don't need to escape / when using new RegExp() | |
var qmark = '[^/]' | |
// * => any number of characters | |
var star = qmark + '*?' | |
// ** when dots are allowed. Anything goes, except .. and . | |
// not (^ or / followed by one or two dots followed by $ or /), | |
// followed by anything, any number of times. | |
var twoStarDot = '(?:(?!(?:\\\/|^)(?:\\.{1,2})($|\\\/)).)*?' | |
// not a ^ or / followed by a dot, | |
// followed by anything, any number of times. | |
var twoStarNoDot = '(?:(?!(?:\\\/|^)\\.).)*?' | |
// characters that need to be escaped in RegExp. | |
var reSpecials = charSet('().*{}+?[]^$\\!') | |
// "abc" -> { a:true, b:true, c:true } | |
function charSet (s) { | |
return s.split('').reduce(function (set, c) { | |
set[c] = true | |
return set | |
}, {}) | |
} | |
// normalizes slashes. | |
var slashSplit = /\/+/ | |
minimatch.filter = filter | |
function filter (pattern, options) { | |
options = options || {} | |
return function (p, i, list) { | |
return minimatch(p, pattern, options) | |
} | |
} | |
function ext (a, b) { | |
a = a || {} | |
b = b || {} | |
var t = {} | |
Object.keys(b).forEach(function (k) { | |
t[k] = b[k] | |
}) | |
Object.keys(a).forEach(function (k) { | |
t[k] = a[k] | |
}) | |
return t | |
} | |
minimatch.defaults = function (def) { | |
if (!def || !Object.keys(def).length) return minimatch | |
var orig = minimatch | |
var m = function minimatch (p, pattern, options) { | |
return orig.minimatch(p, pattern, ext(def, options)) | |
} | |
m.Minimatch = function Minimatch (pattern, options) { | |
return new orig.Minimatch(pattern, ext(def, options)) | |
} | |
return m | |
} | |
Minimatch.defaults = function (def) { | |
if (!def || !Object.keys(def).length) return Minimatch | |
return minimatch.defaults(def).Minimatch | |
} | |
function minimatch (p, pattern, options) { | |
if (typeof pattern !== 'string') { | |
throw new TypeError('glob pattern string required') | |
} | |
if (!options) options = {} | |
// shortcut: comments match nothing. | |
if (!options.nocomment && pattern.charAt(0) === '#') { | |
return false | |
} | |
// "" only matches "" | |
if (pattern.trim() === '') return p === '' | |
return new Minimatch(pattern, options).match(p) | |
} | |
function Minimatch (pattern, options) { | |
if (!(this instanceof Minimatch)) { | |
return new Minimatch(pattern, options) | |
} | |
if (typeof pattern !== 'string') { | |
throw new TypeError('glob pattern string required') | |
} | |
if (!options) options = {} | |
pattern = pattern.trim() | |
// windows support: need to use /, not \ | |
if (path.sep !== '/') { | |
pattern = pattern.split(path.sep).join('/') | |
} | |
this.options = options | |
this.set = [] | |
this.pattern = pattern | |
this.regexp = null | |
this.negate = false | |
this.comment = false | |
this.empty = false | |
// make the set of regexps etc. | |
this.make() | |
} | |
Minimatch.prototype.debug = function () {} | |
Minimatch.prototype.make = make | |
function make () { | |
// don't do it more than once. | |
if (this._made) return | |
var pattern = this.pattern | |
var options = this.options | |
// empty patterns and comments match nothing. | |
if (!options.nocomment && pattern.charAt(0) === '#') { | |
this.comment = true | |
return | |
} | |
if (!pattern) { | |
this.empty = true | |
return | |
} | |
// step 1: figure out negation, etc. | |
this.parseNegate() | |
// step 2: expand braces | |
var set = this.globSet = this.braceExpand() | |
if (options.debug) this.debug = console.error | |
this.debug(this.pattern, set) | |
// step 3: now we have a set, so turn each one into a series of path-portion | |
// matching patterns. | |
// These will be regexps, except in the case of "**", which is | |
// set to the GLOBSTAR object for globstar behavior, | |
// and will not contain any / characters | |
set = this.globParts = set.map(function (s) { | |
return s.split(slashSplit) | |
}) | |
this.debug(this.pattern, set) | |
// glob --> regexps | |
set = set.map(function (s, si, set) { | |
return s.map(this.parse, this) | |
}, this) | |
this.debug(this.pattern, set) | |
// filter out everything that didn't compile properly. | |
set = set.filter(function (s) { | |
return s.indexOf(false) === -1 | |
}) | |
this.debug(this.pattern, set) | |
this.set = set | |
} | |
Minimatch.prototype.parseNegate = parseNegate | |
function parseNegate () { | |
var pattern = this.pattern | |
var negate = false | |
var options = this.options | |
var negateOffset = 0 | |
if (options.nonegate) return | |
for (var i = 0, l = pattern.length | |
; i < l && pattern.charAt(i) === '!' | |
; i++) { | |
negate = !negate | |
negateOffset++ | |
} | |
if (negateOffset) this.pattern = pattern.substr(negateOffset) | |
this.negate = negate | |
} | |
// Brace expansion: | |
// a{b,c}d -> abd acd | |
// a{b,}c -> abc ac | |
// a{0..3}d -> a0d a1d a2d a3d | |
// a{b,c{d,e}f}g -> abg acdfg acefg | |
// a{b,c}d{e,f}g -> abdeg acdeg abdeg abdfg | |
// | |
// Invalid sets are not expanded. | |
// a{2..}b -> a{2..}b | |
// a{b}c -> a{b}c | |
minimatch.braceExpand = function (pattern, options) { | |
return braceExpand(pattern, options) | |
} | |
Minimatch.prototype.braceExpand = braceExpand | |
function braceExpand (pattern, options) { | |
if (!options) { | |
if (this instanceof Minimatch) { | |
options = this.options | |
} else { | |
options = {} | |
} | |
} | |
pattern = typeof pattern === 'undefined' | |
? this.pattern : pattern | |
if (typeof pattern === 'undefined') { | |
throw new TypeError('undefined pattern') | |
} | |
if (options.nobrace || | |
!pattern.match(/\{.*\}/)) { | |
// shortcut. no need to expand. | |
return [pattern] | |
} | |
return expand(pattern) | |
} | |
// parse a component of the expanded set. | |
// At this point, no pattern may contain "/" in it | |
// so we're going to return a 2d array, where each entry is the full | |
// pattern, split on '/', and then turned into a regular expression. | |
// A regexp is made at the end which joins each array with an | |
// escaped /, and another full one which joins each regexp with |. | |
// | |
// Following the lead of Bash 4.1, note that "**" only has special meaning | |
// when it is the *only* thing in a path portion. Otherwise, any series | |
// of * is equivalent to a single *. Globstar behavior is enabled by | |
// default, and can be disabled by setting options.noglobstar. | |
Minimatch.prototype.parse = parse | |
var SUBPARSE = {} | |
function parse (pattern, isSub) { | |
if (pattern.length > 1024 * 64) { | |
throw new TypeError('pattern is too long') | |
} | |
var options = this.options | |
// shortcuts | |
if (!options.noglobstar && pattern === '**') return GLOBSTAR | |
if (pattern === '') return '' | |
var re = '' | |
var hasMagic = !!options.nocase | |
var escaping = false | |
// ? => one single character | |
var patternListStack = [] | |
var negativeLists = [] | |
var stateChar | |
var inClass = false | |
var reClassStart = -1 | |
var classStart = -1 | |
// . and .. never match anything that doesn't start with ., | |
// even when options.dot is set. | |
var patternStart = pattern.charAt(0) === '.' ? '' // anything | |
// not (start or / followed by . or .. followed by / or end) | |
: options.dot ? '(?!(?:^|\\\/)\\.{1,2}(?:$|\\\/))' | |
: '(?!\\.)' | |
var self = this | |
function clearStateChar () { | |
if (stateChar) { | |
// we had some state-tracking character | |
// that wasn't consumed by this pass. | |
switch (stateChar) { | |
case '*': | |
re += star | |
hasMagic = true | |
break | |
case '?': | |
re += qmark | |
hasMagic = true | |
break | |
default: | |
re += '\\' + stateChar | |
break | |
} | |
self.debug('clearStateChar %j %j', stateChar, re) | |
stateChar = false | |
} | |
} | |
for (var i = 0, len = pattern.length, c | |
; (i < len) && (c = pattern.charAt(i)) | |
; i++) { | |
this.debug('%s\t%s %s %j', pattern, i, re, c) | |
// skip over any that are escaped. | |
if (escaping && reSpecials[c]) { | |
re += '\\' + c | |
escaping = false | |
continue | |
} | |
switch (c) { | |
case '/': | |
// completely not allowed, even escaped. | |
// Should already be path-split by now. | |
return false | |
case '\\': | |
clearStateChar() | |
escaping = true | |
continue | |
// the various stateChar values | |
// for the "extglob" stuff. | |
case '?': | |
case '*': | |
case '+': | |
case '@': | |
case '!': | |
this.debug('%s\t%s %s %j <-- stateChar', pattern, i, re, c) | |
// all of those are literals inside a class, except that | |
// the glob [!a] means [^a] in regexp | |
if (inClass) { | |
this.debug(' in class') | |
if (c === '!' && i === classStart + 1) c = '^' | |
re += c | |
continue | |
} | |
// if we already have a stateChar, then it means | |
// that there was something like ** or +? in there. | |
// Handle the stateChar, then proceed with this one. | |
self.debug('call clearStateChar %j', stateChar) | |
clearStateChar() | |
stateChar = c | |
// if extglob is disabled, then +(asdf|foo) isn't a thing. | |
// just clear the statechar *now*, rather than even diving into | |
// the patternList stuff. | |
if (options.noext) clearStateChar() | |
continue | |
case '(': | |
if (inClass) { | |
re += '(' | |
continue | |
} | |
if (!stateChar) { | |
re += '\\(' | |
continue | |
} | |
patternListStack.push({ | |
type: stateChar, | |
start: i - 1, | |
reStart: re.length, | |
open: plTypes[stateChar].open, | |
close: plTypes[stateChar].close | |
}) | |
// negation is (?:(?!js)[^/]*) | |
re += stateChar === '!' ? '(?:(?!(?:' : '(?:' | |
this.debug('plType %j %j', stateChar, re) | |
stateChar = false | |
continue | |
case ')': | |
if (inClass || !patternListStack.length) { | |
re += '\\)' | |
continue | |
} | |
clearStateChar() | |
hasMagic = true | |
var pl = patternListStack.pop() | |
// negation is (?:(?!js)[^/]*) | |
// The others are (?:<pattern>)<type> | |
re += pl.close | |
if (pl.type === '!') { | |
negativeLists.push(pl) | |
} | |
pl.reEnd = re.length | |
continue | |
case '|': | |
if (inClass || !patternListStack.length || escaping) { | |
re += '\\|' | |
escaping = false | |
continue | |
} | |
clearStateChar() | |
re += '|' | |
continue | |
// these are mostly the same in regexp and glob | |
case '[': | |
// swallow any state-tracking char before the [ | |
clearStateChar() | |
if (inClass) { | |
re += '\\' + c | |
continue | |
} | |
inClass = true | |
classStart = i | |
reClassStart = re.length | |
re += c | |
continue | |
case ']': | |
// a right bracket shall lose its special | |
// meaning and represent itself in | |
// a bracket expression if it occurs | |
// first in the list. -- POSIX.2 2.8.3.2 | |
if (i === classStart + 1 || !inClass) { | |
re += '\\' + c | |
escaping = false | |
continue | |
} | |
// handle the case where we left a class open. | |
// "[z-a]" is valid, equivalent to "\[z-a\]" | |
if (inClass) { | |
// split where the last [ was, make sure we don't have | |
// an invalid re. if so, re-walk the contents of the | |
// would-be class to re-translate any characters that | |
// were passed through as-is | |
// TODO: It would probably be faster to determine this | |
// without a try/catch and a new RegExp, but it's tricky | |
// to do safely. For now, this is safe and works. | |
var cs = pattern.substring(classStart + 1, i) | |
try { | |
RegExp('[' + cs + ']') | |
} catch (er) { | |
// not a valid class! | |
var sp = this.parse(cs, SUBPARSE) | |
re = re.substr(0, reClassStart) + '\\[' + sp[0] + '\\]' | |
hasMagic = hasMagic || sp[1] | |
inClass = false | |
continue | |
} | |
} | |
// finish up the class. | |
hasMagic = true | |
inClass = false | |
re += c | |
continue | |
default: | |
// swallow any state char that wasn't consumed | |
clearStateChar() | |
if (escaping) { | |
// no need | |
escaping = false | |
} else if (reSpecials[c] | |
&& !(c === '^' && inClass)) { | |
re += '\\' | |
} | |
re += c | |
} // switch | |
} // for | |
// handle the case where we left a class open. | |
// "[abc" is valid, equivalent to "\[abc" | |
if (inClass) { | |
// split where the last [ was, and escape it | |
// this is a huge pita. We now have to re-walk | |
// the contents of the would-be class to re-translate | |
// any characters that were passed through as-is | |
cs = pattern.substr(classStart + 1) | |
sp = this.parse(cs, SUBPARSE) | |
re = re.substr(0, reClassStart) + '\\[' + sp[0] | |
hasMagic = hasMagic || sp[1] | |
} | |
// handle the case where we had a +( thing at the *end* | |
// of the pattern. | |
// each pattern list stack adds 3 chars, and we need to go through | |
// and escape any | chars that were passed through as-is for the regexp. | |
// Go through and escape them, taking care not to double-escape any | |
// | chars that were already escaped. | |
for (pl = patternListStack.pop(); pl; pl = patternListStack.pop()) { | |
var tail = re.slice(pl.reStart + pl.open.length) | |
this.debug('setting tail', re, pl) | |
// maybe some even number of \, then maybe 1 \, followed by a | | |
tail = tail.replace(/((?:\\{2}){0,64})(\\?)\|/g, function (_, $1, $2) { | |
if (!$2) { | |
// the | isn't already escaped, so escape it. | |
$2 = '\\' | |
} | |
// need to escape all those slashes *again*, without escaping the | |
// one that we need for escaping the | character. As it works out, | |
// escaping an even number of slashes can be done by simply repeating | |
// it exactly after itself. That's why this trick works. | |
// | |
// I am sorry that you have to see this. | |
return $1 + $1 + $2 + '|' | |
}) | |
this.debug('tail=%j\n %s', tail, tail, pl, re) | |
var t = pl.type === '*' ? star | |
: pl.type === '?' ? qmark | |
: '\\' + pl.type | |
hasMagic = true | |
re = re.slice(0, pl.reStart) + t + '\\(' + tail | |
} | |
// handle trailing things that only matter at the very end. | |
clearStateChar() | |
if (escaping) { | |
// trailing \\ | |
re += '\\\\' | |
} | |
// only need to apply the nodot start if the re starts with | |
// something that could conceivably capture a dot | |
var addPatternStart = false | |
switch (re.charAt(0)) { | |
case '.': | |
case '[': | |
case '(': addPatternStart = true | |
} | |
// Hack to work around lack of negative lookbehind in JS | |
// A pattern like: *.!(x).!(y|z) needs to ensure that a name | |
// like 'a.xyz.yz' doesn't match. So, the first negative | |
// lookahead, has to look ALL the way ahead, to the end of | |
// the pattern. | |
for (var n = negativeLists.length - 1; n > -1; n--) { | |
var nl = negativeLists[n] | |
var nlBefore = re.slice(0, nl.reStart) | |
var nlFirst = re.slice(nl.reStart, nl.reEnd - 8) | |
var nlLast = re.slice(nl.reEnd - 8, nl.reEnd) | |
var nlAfter = re.slice(nl.reEnd) | |
nlLast += nlAfter | |
// Handle nested stuff like *(*.js|!(*.json)), where open parens | |
// mean that we should *not* include the ) in the bit that is considered | |
// "after" the negated section. | |
var openParensBefore = nlBefore.split('(').length - 1 | |
var cleanAfter = nlAfter | |
for (i = 0; i < openParensBefore; i++) { | |
cleanAfter = cleanAfter.replace(/\)[+*?]?/, '') | |
} | |
nlAfter = cleanAfter | |
var dollar = '' | |
if (nlAfter === '' && isSub !== SUBPARSE) { | |
dollar = '$' | |
} | |
var newRe = nlBefore + nlFirst + nlAfter + dollar + nlLast | |
re = newRe | |
} | |
// if the re is not "" at this point, then we need to make sure | |
// it doesn't match against an empty path part. | |
// Otherwise a/* will match a/, which it should not. | |
if (re !== '' && hasMagic) { | |
re = '(?=.)' + re | |
} | |
if (addPatternStart) { | |
re = patternStart + re | |
} | |
// parsing just a piece of a larger pattern. | |
if (isSub === SUBPARSE) { | |
return [re, hasMagic] | |
} | |
// skip the regexp for non-magical patterns | |
// unescape anything in it, though, so that it'll be | |
// an exact match against a file etc. | |
if (!hasMagic) { | |
return globUnescape(pattern) | |
} | |
var flags = options.nocase ? 'i' : '' | |
try { | |
var regExp = new RegExp('^' + re + '$', flags) | |
} catch (er) { | |
// If it was an invalid regular expression, then it can't match | |
// anything. This trick looks for a character after the end of | |
// the string, which is of course impossible, except in multi-line | |
// mode, but it's not a /m regex. | |
return new RegExp('$.') | |
} | |
regExp._glob = pattern | |
regExp._src = re | |
return regExp | |
} | |
minimatch.makeRe = function (pattern, options) { | |
return new Minimatch(pattern, options || {}).makeRe() | |
} | |
Minimatch.prototype.makeRe = makeRe | |
function makeRe () { | |
if (this.regexp || this.regexp === false) return this.regexp | |
// at this point, this.set is a 2d array of partial | |
// pattern strings, or "**". | |
// | |
// It's better to use .match(). This function shouldn't | |
// be used, really, but it's pretty convenient sometimes, | |
// when you just want to work with a regex. | |
var set = this.set | |
if (!set.length) { | |
this.regexp = false | |
return this.regexp | |
} | |
var options = this.options | |
var twoStar = options.noglobstar ? star | |
: options.dot ? twoStarDot | |
: twoStarNoDot | |
var flags = options.nocase ? 'i' : '' | |
var re = set.map(function (pattern) { | |
return pattern.map(function (p) { | |
return (p === GLOBSTAR) ? twoStar | |
: (typeof p === 'string') ? regExpEscape(p) | |
: p._src | |
}).join('\\\/') | |
}).join('|') | |
// must match entire pattern | |
// ending in a * or ** will make it less strict. | |
re = '^(?:' + re + ')$' | |
// can match anything, as long as it's not this. | |
if (this.negate) re = '^(?!' + re + ').*$' | |
try { | |
this.regexp = new RegExp(re, flags) | |
} catch (ex) { | |
this.regexp = false | |
} | |
return this.regexp | |
} | |
minimatch.match = function (list, pattern, options) { | |
options = options || {} | |
var mm = new Minimatch(pattern, options) | |
list = list.filter(function (f) { | |
return mm.match(f) | |
}) | |
if (mm.options.nonull && !list.length) { | |
list.push(pattern) | |
} | |
return list | |
} | |
Minimatch.prototype.match = match | |
function match (f, partial) { | |
this.debug('match', f, this.pattern) | |
// short-circuit in the case of busted things. | |
// comments, etc. | |
if (this.comment) return false | |
if (this.empty) return f === '' | |
if (f === '/' && partial) return true | |
var options = this.options | |
// windows: need to use /, not \ | |
if (path.sep !== '/') { | |
f = f.split(path.sep).join('/') | |
} | |
// treat the test path as a set of pathparts. | |
f = f.split(slashSplit) | |
this.debug(this.pattern, 'split', f) | |
// just ONE of the pattern sets in this.set needs to match | |
// in order for it to be valid. If negating, then just one | |
// match means that we have failed. | |
// Either way, return on the first hit. | |
var set = this.set | |
this.debug(this.pattern, 'set', set) | |
// Find the basename of the path by looking for the last non-empty segment | |
var filename | |
var i | |
for (i = f.length - 1; i >= 0; i--) { | |
filename = f[i] | |
if (filename) break | |
} | |
for (i = 0; i < set.length; i++) { | |
var pattern = set[i] | |
var file = f | |
if (options.matchBase && pattern.length === 1) { | |
file = [filename] | |
} | |
var hit = this.matchOne(file, pattern, partial) | |
if (hit) { | |
if (options.flipNegate) return true | |
return !this.negate | |
} | |
} | |
// didn't get any hits. this is success if it's a negative | |
// pattern, failure otherwise. | |
if (options.flipNegate) return false | |
return this.negate | |
} | |
// set partial to true to test if, for example, | |
// "/a/b" matches the start of "/*/b/*/d" | |
// Partial means, if you run out of file before you run | |
// out of pattern, then that's fine, as long as all | |
// the parts match. | |
Minimatch.prototype.matchOne = function (file, pattern, partial) { | |
var options = this.options | |
this.debug('matchOne', | |
{ 'this': this, file: file, pattern: pattern }) | |
this.debug('matchOne', file.length, pattern.length) | |
for (var fi = 0, | |
pi = 0, | |
fl = file.length, | |
pl = pattern.length | |
; (fi < fl) && (pi < pl) | |
; fi++, pi++) { | |
this.debug('matchOne loop') | |
var p = pattern[pi] | |
var f = file[fi] | |
this.debug(pattern, p, f) | |
// should be impossible. | |
// some invalid regexp stuff in the set. | |
if (p === false) return false | |
if (p === GLOBSTAR) { | |
this.debug('GLOBSTAR', [pattern, p, f]) | |
// "**" | |
// a/**/b/**/c would match the following: | |
// a/b/x/y/z/c | |
// a/x/y/z/b/c | |
// a/b/x/b/x/c | |
// a/b/c | |
// To do this, take the rest of the pattern after | |
// the **, and see if it would match the file remainder. | |
// If so, return success. | |
// If not, the ** "swallows" a segment, and try again. | |
// This is recursively awful. | |
// | |
// a/**/b/**/c matching a/b/x/y/z/c | |
// - a matches a | |
// - doublestar | |
// - matchOne(b/x/y/z/c, b/**/c) | |
// - b matches b | |
// - doublestar | |
// - matchOne(x/y/z/c, c) -> no | |
// - matchOne(y/z/c, c) -> no | |
// - matchOne(z/c, c) -> no | |
// - matchOne(c, c) yes, hit | |
var fr = fi | |
var pr = pi + 1 | |
if (pr === pl) { | |
this.debug('** at the end') | |
// a ** at the end will just swallow the rest. | |
// We have found a match. | |
// however, it will not swallow /.x, unless | |
// options.dot is set. | |
// . and .. are *never* matched by **, for explosively | |
// exponential reasons. | |
for (; fi < fl; fi++) { | |
if (file[fi] === '.' || file[fi] === '..' || | |
(!options.dot && file[fi].charAt(0) === '.')) return false | |
} | |
return true | |
} | |
// ok, let's see if we can swallow whatever we can. | |
while (fr < fl) { | |
var swallowee = file[fr] | |
this.debug('\nglobstar while', file, fr, pattern, pr, swallowee) | |
// XXX remove this slice. Just pass the start index. | |
if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) { | |
this.debug('globstar found match!', fr, fl, swallowee) | |
// found a match. | |
return true | |
} else { | |
// can't swallow "." or ".." ever. | |
// can only swallow ".foo" when explicitly asked. | |
if (swallowee === '.' || swallowee === '..' || | |
(!options.dot && swallowee.charAt(0) === '.')) { | |
this.debug('dot detected!', file, fr, pattern, pr) | |
break | |
} | |
// ** swallows a segment, and continue. | |
this.debug('globstar swallow a segment, and continue') | |
fr++ | |
} | |
} | |
// no match was found. | |
// However, in partial mode, we can't say this is necessarily over. | |
// If there's more *pattern* left, then | |
if (partial) { | |
// ran out of file | |
this.debug('\n>>> no match, partial?', file, fr, pattern, pr) | |
if (fr === fl) return true | |
} | |
return false | |
} | |
// something other than ** | |
// non-magic patterns just have to match exactly | |
// patterns with magic have been turned into regexps. | |
var hit | |
if (typeof p === 'string') { | |
if (options.nocase) { | |
hit = f.toLowerCase() === p.toLowerCase() | |
} else { | |
hit = f === p | |
} | |
this.debug('string match', p, f, hit) | |
} else { | |
hit = f.match(p) | |
this.debug('pattern match', p, f, hit) | |
} | |
if (!hit) return false | |
} | |
// Note: ending in / means that we'll get a final "" | |
// at the end of the pattern. This can only match a | |
// corresponding "" at the end of the file. | |
// If the file ends in /, then it can only match a | |
// a pattern that ends in /, unless the pattern just | |
// doesn't have any more for it. But, a/b/ should *not* | |
// match "a/b/*", even though "" matches against the | |
// [^/]*? pattern, except in partial mode, where it might | |
// simply not be reached yet. | |
// However, a/b/ should still satisfy a/* | |
// now either we fell off the end of the pattern, or we're done. | |
if (fi === fl && pi === pl) { | |
// ran out of pattern and filename at the same time. | |
// an exact hit! | |
return true | |
} else if (fi === fl) { | |
// ran out of file, but still had pattern left. | |
// this is ok if we're doing the match as part of | |
// a glob fs traversal. | |
return partial | |
} else if (pi === pl) { | |
// ran out of pattern, still have file left. | |
// this is only acceptable if we're on the very last | |
// empty segment of a file with a trailing slash. | |
// a/* should match a/b/ | |
var emptyFileEnd = (fi === fl - 1) && (file[fi] === '') | |
return emptyFileEnd | |
} | |
// should be unreachable. | |
throw new Error('wtf?') | |
} | |
// replace stuff like \* with * | |
function globUnescape (s) { | |
return s.replace(/\\(.)/g, '$1') | |
} | |
function regExpEscape (s) { | |
return s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&') | |
} | |
/***/ }), | |
/***/ 467: | |
/***/ ((module, exports, __nccwpck_require__) => { | |
"use strict"; | |
Object.defineProperty(exports, "__esModule", ({ value: true })); | |
function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } | |
var Stream = _interopDefault(__nccwpck_require__(2413)); | |
var http = _interopDefault(__nccwpck_require__(8605)); | |
var Url = _interopDefault(__nccwpck_require__(8835)); | |
var https = _interopDefault(__nccwpck_require__(7211)); | |
var zlib = _interopDefault(__nccwpck_require__(8761)); | |
// Based on https://github.com/tmpvar/jsdom/blob/aa85b2abf07766ff7bf5c1f6daafb3726f2f2db5/lib/jsdom/living/blob.js | |
// fix for "Readable" isn't a named export issue | |
const Readable = Stream.Readable; | |
const BUFFER = Symbol('buffer'); | |
const TYPE = Symbol('type'); | |
class Blob { | |
constructor() { | |
this[TYPE] = ''; | |
const blobParts = arguments[0]; | |
const options = arguments[1]; | |
const buffers = []; | |
let size = 0; | |
if (blobParts) { | |
const a = blobParts; | |
const length = Number(a.length); | |
for (let i = 0; i < length; i++) { | |
const element = a[i]; | |
let buffer; | |
if (element instanceof Buffer) { | |
buffer = element; | |
} else if (ArrayBuffer.isView(element)) { | |
buffer = Buffer.from(element.buffer, element.byteOffset, element.byteLength); | |
} else if (element instanceof ArrayBuffer) { | |
buffer = Buffer.from(element); | |
} else if (element instanceof Blob) { | |
buffer = element[BUFFER]; | |
} else { | |
buffer = Buffer.from(typeof element === 'string' ? element : String(element)); | |
} | |
size += buffer.length; | |
buffers.push(buffer); | |
} | |
} | |
this[BUFFER] = Buffer.concat(buffers); | |
let type = options && options.type !== undefined && String(options.type).toLowerCase(); | |
if (type && !/[^\u0020-\u007E]/.test(type)) { | |
this[TYPE] = type; | |
} | |
} | |
get size() { | |
return this[BUFFER].length; | |
} | |
get type() { | |
return this[TYPE]; | |
} | |
text() { | |
return Promise.resolve(this[BUFFER].toString()); | |
} | |
arrayBuffer() { | |
const buf = this[BUFFER]; | |
const ab = buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); | |
return Promise.resolve(ab); | |
} | |
stream() { | |
const readable = new Readable(); | |
readable._read = function () {}; | |
readable.push(this[BUFFER]); | |
readable.push(null); | |
return readable; | |
} | |
toString() { | |
return '[object Blob]'; | |
} | |
slice() { | |
const size = this.size; | |
const start = arguments[0]; | |
const end = arguments[1]; | |
let relativeStart, relativeEnd; | |
if (start === undefined) { | |
relativeStart = 0; | |
} else if (start < 0) { | |
relativeStart = Math.max(size + start, 0); | |
} else { | |
relativeStart = Math.min(start, size); | |
} | |
if (end === undefined) { | |
relativeEnd = size; | |
} else if (end < 0) { | |
relativeEnd = Math.max(size + end, 0); | |
} else { | |
relativeEnd = Math.min(end, size); | |
} | |
const span = Math.max(relativeEnd - relativeStart, 0); | |
const buffer = this[BUFFER]; | |
const slicedBuffer = buffer.slice(relativeStart, relativeStart + span); | |
const blob = new Blob([], { type: arguments[2] }); | |
blob[BUFFER] = slicedBuffer; | |
return blob; | |
} | |
} | |
Object.defineProperties(Blob.prototype, { | |
size: { enumerable: true }, | |
type: { enumerable: true }, | |
slice: { enumerable: true } | |
}); | |
Object.defineProperty(Blob.prototype, Symbol.toStringTag, { | |
value: 'Blob', | |
writable: false, | |
enumerable: false, | |
configurable: true | |
}); | |
/** | |
* fetch-error.js | |
* | |
* FetchError interface for operational errors | |
*/ | |
/** | |
* Create FetchError instance | |
* | |
* @param String message Error message for human | |
* @param String type Error type for machine | |
* @param String systemError For Node.js system error | |
* @return FetchError | |
*/ | |
function FetchError(message, type, systemError) { | |
Error.call(this, message); | |
this.message = message; | |
this.type = type; | |
// when err.type is `system`, err.code contains system error code | |
if (systemError) { | |
this.code = this.errno = systemError.code; | |
} | |
// hide custom error implementation details from end-users | |
Error.captureStackTrace(this, this.constructor); | |
} | |
FetchError.prototype = Object.create(Error.prototype); | |
FetchError.prototype.constructor = FetchError; | |
FetchError.prototype.name = 'FetchError'; | |
let convert; | |
try { | |
convert = __nccwpck_require__(2877).convert; | |
} catch (e) {} | |
const INTERNALS = Symbol('Body internals'); | |
// fix an issue where "PassThrough" isn't a named export for node <10 | |
const PassThrough = Stream.PassThrough; | |
/** | |
* Body mixin | |
* | |
* Ref: https://fetch.spec.whatwg.org/#body | |
* | |
* @param Stream body Readable stream | |
* @param Object opts Response options | |
* @return Void | |
*/ | |
function Body(body) { | |
var _this = this; | |
var _ref = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}, | |
_ref$size = _ref.size; | |
let size = _ref$size === undefined ? 0 : _ref$size; | |
var _ref$timeout = _ref.timeout; | |
let timeout = _ref$timeout === undefined ? 0 : _ref$timeout; | |
if (body == null) { | |
// body is undefined or null | |
body = null; | |
} else if (isURLSearchParams(body)) { | |
// body is a URLSearchParams | |
body = Buffer.from(body.toString()); | |
} else if (isBlob(body)) ; else if (Buffer.isBuffer(body)) ; else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') { | |
// body is ArrayBuffer | |
body = Buffer.from(body); | |
} else if (ArrayBuffer.isView(body)) { | |
// body is ArrayBufferView | |
body = Buffer.from(body.buffer, body.byteOffset, body.byteLength); | |
} else if (body instanceof Stream) ; else { | |
// none of the above | |
// coerce to string then buffer | |
body = Buffer.from(String(body)); | |
} | |
this[INTERNALS] = { | |
body, | |
disturbed: false, | |
error: null | |
}; | |
this.size = size; | |
this.timeout = timeout; | |
if (body instanceof Stream) { | |
body.on('error', function (err) { | |
const error = err.name === 'AbortError' ? err : new FetchError(`Invalid response body while trying to fetch ${_this.url}: ${err.message}`, 'system', err); | |
_this[INTERNALS].error = error; | |
}); | |
} | |
} | |
Body.prototype = { | |
get body() { | |
return this[INTERNALS].body; | |
}, | |
get bodyUsed() { | |
return this[INTERNALS].disturbed; | |
}, | |
/** | |
* Decode response as ArrayBuffer | |
* | |
* @return Promise | |
*/ | |
arrayBuffer() { | |
return consumeBody.call(this).then(function (buf) { | |
return buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); | |
}); | |
}, | |
/** | |
* Return raw response as Blob | |
* | |
* @return Promise | |
*/ | |
blob() { | |
let ct = this.headers && this.headers.get('content-type') || ''; | |
return consumeBody.call(this).then(function (buf) { | |
return Object.assign( | |
// Prevent copying | |
new Blob([], { | |
type: ct.toLowerCase() | |
}), { | |
[BUFFER]: buf | |
}); | |
}); | |
}, | |
/** | |
* Decode response as json | |
* | |
* @return Promise | |
*/ | |
json() { | |
var _this2 = this; | |
return consumeBody.call(this).then(function (buffer) { | |
try { | |
return JSON.parse(buffer.toString()); | |
} catch (err) { | |
return Body.Promise.reject(new FetchError(`invalid json response body at ${_this2.url} reason: ${err.message}`, 'invalid-json')); | |
} | |
}); | |
}, | |
/** | |
* Decode response as text | |
* | |
* @return Promise | |
*/ | |
text() { | |
return consumeBody.call(this).then(function (buffer) { | |
return buffer.toString(); | |
}); | |
}, | |
/** | |
* Decode response as buffer (non-spec api) | |
* | |
* @return Promise | |
*/ | |
buffer() { | |
return consumeBody.call(this); | |
}, | |
/** | |
* Decode response as text, while automatically detecting the encoding and | |
* trying to decode to UTF-8 (non-spec api) | |
* | |
* @return Promise | |
*/ | |
textConverted() { | |
var _this3 = this; | |
return consumeBody.call(this).then(function (buffer) { | |
return convertBody(buffer, _this3.headers); | |
}); | |
} | |
}; | |
// In browsers, all properties are enumerable. | |
Object.defineProperties(Body.prototype, { | |
body: { enumerable: true }, | |
bodyUsed: { enumerable: true }, | |
arrayBuffer: { enumerable: true }, | |
blob: { enumerable: true }, | |
json: { enumerable: true }, | |
text: { enumerable: true } | |
}); | |
Body.mixIn = function (proto) { | |
for (const name of Object.getOwnPropertyNames(Body.prototype)) { | |
// istanbul ignore else: future proof | |
if (!(name in proto)) { | |
const desc = Object.getOwnPropertyDescriptor(Body.prototype, name); | |
Object.defineProperty(proto, name, desc); | |
} | |
} | |
}; | |
/** | |
* Consume and convert an entire Body to a Buffer. | |
* | |
* Ref: https://fetch.spec.whatwg.org/#concept-body-consume-body | |
* | |
* @return Promise | |
*/ | |
function consumeBody() { | |
var _this4 = this; | |
if (this[INTERNALS].disturbed) { | |
return Body.Promise.reject(new TypeError(`body used already for: ${this.url}`)); | |
} | |
this[INTERNALS].disturbed = true; | |
if (this[INTERNALS].error) { | |
return Body.Promise.reject(this[INTERNALS].error); | |
} | |
let body = this.body; | |
// body is null | |
if (body === null) { | |
return Body.Promise.resolve(Buffer.alloc(0)); | |
} | |
// body is blob | |
if (isBlob(body)) { | |
body = body.stream(); | |
} | |
// body is buffer | |
if (Buffer.isBuffer(body)) { | |
return Body.Promise.resolve(body); | |
} | |
// istanbul ignore if: should never happen | |
if (!(body instanceof Stream)) { | |
return Body.Promise.resolve(Buffer.alloc(0)); | |
} | |
// body is stream | |
// get ready to actually consume the body | |
let accum = []; | |
let accumBytes = 0; | |
let abort = false; | |
return new Body.Promise(function (resolve, reject) { | |
let resTimeout; | |
// allow timeout on slow response body | |
if (_this4.timeout) { | |
resTimeout = setTimeout(function () { | |
abort = true; | |
reject(new FetchError(`Response timeout while trying to fetch ${_this4.url} (over ${_this4.timeout}ms)`, 'body-timeout')); | |
}, _this4.timeout); | |
} | |
// handle stream errors | |
body.on('error', function (err) { | |
if (err.name === 'AbortError') { | |
// if the request was aborted, reject with this Error | |
abort = true; | |
reject(err); | |
} else { | |
// other errors, such as incorrect content-encoding | |
reject(new FetchError(`Invalid response body while trying to fetch ${_this4.url}: ${err.message}`, 'system', err)); | |
} | |
}); | |
body.on('data', function (chunk) { | |
if (abort || chunk === null) { | |
return; | |
} | |
if (_this4.size && accumBytes + chunk.length > _this4.size) { | |
abort = true; | |
reject(new FetchError(`content size at ${_this4.url} over limit: ${_this4.size}`, 'max-size')); | |
return; | |
} | |
accumBytes += chunk.length; | |
accum.push(chunk); | |
}); | |
body.on('end', function () { | |
if (abort) { | |
return; | |
} | |
clearTimeout(resTimeout); | |
try { | |
resolve(Buffer.concat(accum, accumBytes)); | |
} catch (err) { | |
// handle streams that have accumulated too much data (issue #414) | |
reject(new FetchError(`Could not create Buffer from response body for ${_this4.url}: ${err.message}`, 'system', err)); | |
} | |
}); | |
}); | |
} | |
/** | |
* Detect buffer encoding and convert to target encoding | |
* ref: http://www.w3.org/TR/2011/WD-html5-20110113/parsing.html#determining-the-character-encoding | |
* | |
* @param Buffer buffer Incoming buffer | |
* @param String encoding Target encoding | |
* @return String | |
*/ | |
function convertBody(buffer, headers) { | |
if (typeof convert !== 'function') { | |
throw new Error('The package `encoding` must be installed to use the textConverted() function'); | |
} | |
const ct = headers.get('content-type'); | |
let charset = 'utf-8'; | |
let res, str; | |
// header | |
if (ct) { | |
res = /charset=([^;]*)/i.exec(ct); | |
} | |
// no charset in content type, peek at response body for at most 1024 bytes | |
str = buffer.slice(0, 1024).toString(); | |
// html5 | |
if (!res && str) { | |
res = /<meta.+?charset=(['"])(.+?)\1/i.exec(str); | |
} | |
// html4 | |
if (!res && str) { | |
res = /<meta[\s]+?http-equiv=(['"])content-type\1[\s]+?content=(['"])(.+?)\2/i.exec(str); | |
if (!res) { | |
res = /<meta[\s]+?content=(['"])(.+?)\1[\s]+?http-equiv=(['"])content-type\3/i.exec(str); | |
if (res) { | |
res.pop(); // drop last quote | |
} | |
} | |
if (res) { | |
res = /charset=(.*)/i.exec(res.pop()); | |
} | |
} | |
// xml | |
if (!res && str) { | |
res = /<\?xml.+?encoding=(['"])(.+?)\1/i.exec(str); | |
} | |
// found charset | |
if (res) { | |
charset = res.pop(); | |
// prevent decode issues when sites use incorrect encoding | |
// ref: https://hsivonen.fi/encoding-menu/ | |
if (charset === 'gb2312' || charset === 'gbk') { | |
charset = 'gb18030'; | |
} | |
} | |
// turn raw buffers into a single utf-8 buffer | |
return convert(buffer, 'UTF-8', charset).toString(); | |
} | |
/** | |
* Detect a URLSearchParams object | |
* ref: https://github.com/bitinn/node-fetch/issues/296#issuecomment-307598143 | |
* | |
* @param Object obj Object to detect by type or brand | |
* @return String | |
*/ | |
function isURLSearchParams(obj) { | |
// Duck-typing as a necessary condition. | |
if (typeof obj !== 'object' || typeof obj.append !== 'function' || typeof obj.delete !== 'function' || typeof obj.get !== 'function' || typeof obj.getAll !== 'function' || typeof obj.has !== 'function' || typeof obj.set !== 'function') { | |
return false; | |
} | |
// Brand-checking and more duck-typing as optional condition. | |
return obj.constructor.name === 'URLSearchParams' || Object.prototype.toString.call(obj) === '[object URLSearchParams]' || typeof obj.sort === 'function'; | |
} | |
/** | |
* Check if `obj` is a W3C `Blob` object (which `File` inherits from) | |
* @param {*} obj | |
* @return {boolean} | |
*/ | |
function isBlob(obj) { | |
return typeof obj === 'object' && typeof obj.arrayBuffer === 'function' && typeof obj.type === 'string' && typeof obj.stream === 'function' && typeof obj.constructor === 'function' && typeof obj.constructor.name === 'string' && /^(Blob|File)$/.test(obj.constructor.name) && /^(Blob|File)$/.test(obj[Symbol.toStringTag]); | |
} | |
/** | |
* Clone body given Res/Req instance | |
* | |
* @param Mixed instance Response or Request instance | |
* @return Mixed | |
*/ | |
function clone(instance) { | |
let p1, p2; | |
let body = instance.body; | |
// don't allow cloning a used body | |
if (instance.bodyUsed) { | |
throw new Error('cannot clone body after it is used'); | |
} | |
// check that body is a stream and not form-data object | |
// note: we can't clone the form-data object without having it as a dependency | |
if (body instanceof Stream && typeof body.getBoundary !== 'function') { | |
// tee instance body | |
p1 = new PassThrough(); | |
p2 = new PassThrough(); | |
body.pipe(p1); | |
body.pipe(p2); | |
// set instance body to teed body and return the other teed body | |
instance[INTERNALS].body = p1; | |
body = p2; | |
} | |
return body; | |
} | |
/** | |
* Performs the operation "extract a `Content-Type` value from |object|" as | |
* specified in the specification: | |
* https://fetch.spec.whatwg.org/#concept-bodyinit-extract | |
* | |
* This function assumes that instance.body is present. | |
* | |
* @param Mixed instance Any options.body input | |
*/ | |
function extractContentType(body) { | |
if (body === null) { | |
// body is null | |
return null; | |
} else if (typeof body === 'string') { | |
// body is string | |
return 'text/plain;charset=UTF-8'; | |
} else if (isURLSearchParams(body)) { | |
// body is a URLSearchParams | |
return 'application/x-www-form-urlencoded;charset=UTF-8'; | |
} else if (isBlob(body)) { | |
// body is blob | |
return body.type || null; | |
} else if (Buffer.isBuffer(body)) { | |
// body is buffer | |
return null; | |
} else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') { | |
// body is ArrayBuffer | |
return null; | |
} else if (ArrayBuffer.isView(body)) { | |
// body is ArrayBufferView | |
return null; | |
} else if (typeof body.getBoundary === 'function') { | |
// detect form data input from form-data module | |
return `multipart/form-data;boundary=${body.getBoundary()}`; | |
} else if (body instanceof Stream) { | |
// body is stream | |
// can't really do much about this | |
return null; | |
} else { | |
// Body constructor defaults other things to string | |
return 'text/plain;charset=UTF-8'; | |
} | |
} | |
/** | |
* The Fetch Standard treats this as if "total bytes" is a property on the body. | |
* For us, we have to explicitly get it with a function. | |
* | |
* ref: https://fetch.spec.whatwg.org/#concept-body-total-bytes | |
* | |
* @param Body instance Instance of Body | |
* @return Number? Number of bytes, or null if not possible | |
*/ | |
function getTotalBytes(instance) { | |
const body = instance.body; | |
if (body === null) { | |
// body is null | |
return 0; | |
} else if (isBlob(body)) { | |
return body.size; | |
} else if (Buffer.isBuffer(body)) { | |
// body is buffer | |
return body.length; | |
} else if (body && typeof body.getLengthSync === 'function') { | |
// detect form data input from form-data module | |
if (body._lengthRetrievers && body._lengthRetrievers.length == 0 || // 1.x | |
body.hasKnownLength && body.hasKnownLength()) { | |
// 2.x | |
return body.getLengthSync(); | |
} | |
return null; | |
} else { | |
// body is stream | |
return null; | |
} | |
} | |
/** | |
* Write a Body to a Node.js WritableStream (e.g. http.Request) object. | |
* | |
* @param Body instance Instance of Body | |
* @return Void | |
*/ | |
function writeToStream(dest, instance) { | |
const body = instance.body; | |
if (body === null) { | |
// body is null | |
dest.end(); | |
} else if (isBlob(body)) { | |
body.stream().pipe(dest); | |
} else if (Buffer.isBuffer(body)) { | |
// body is buffer | |
dest.write(body); | |
dest.end(); | |
} else { | |
// body is stream | |
body.pipe(dest); | |
} | |
} | |
// expose Promise | |
Body.Promise = global.Promise; | |
/** | |
* headers.js | |
* | |
* Headers class offers convenient helpers | |
*/ | |
const invalidTokenRegex = /[^\^_`a-zA-Z\-0-9!#$%&'*+.|~]/; | |
const invalidHeaderCharRegex = /[^\t\x20-\x7e\x80-\xff]/; | |
function validateName(name) { | |
name = `${name}`; | |
if (invalidTokenRegex.test(name) || name === '') { | |
throw new TypeError(`${name} is not a legal HTTP header name`); | |
} | |
} | |
function validateValue(value) { | |
value = `${value}`; | |
if (invalidHeaderCharRegex.test(value)) { | |
throw new TypeError(`${value} is not a legal HTTP header value`); | |
} | |
} | |
/** | |
* Find the key in the map object given a header name. | |
* | |
* Returns undefined if not found. | |
* | |
* @param String name Header name | |
* @return String|Undefined | |
*/ | |
function find(map, name) { | |
name = name.toLowerCase(); | |
for (const key in map) { | |
if (key.toLowerCase() === name) { | |
return key; | |
} | |
} | |
return undefined; | |
} | |
const MAP = Symbol('map'); | |
class Headers { | |
/** | |
* Headers class | |
* | |
* @param Object headers Response headers | |
* @return Void | |
*/ | |
constructor() { | |
let init = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : undefined; | |
this[MAP] = Object.create(null); | |
if (init instanceof Headers) { | |
const rawHeaders = init.raw(); | |
const headerNames = Object.keys(rawHeaders); | |
for (const headerName of headerNames) { | |
for (const value of rawHeaders[headerName]) { | |
this.append(headerName, value); | |
} | |
} | |
return; | |
} | |
// We don't worry about converting prop to ByteString here as append() | |
// will handle it. | |
if (init == null) ; else if (typeof init === 'object') { | |
const method = init[Symbol.iterator]; | |
if (method != null) { | |
if (typeof method !== 'function') { | |
throw new TypeError('Header pairs must be iterable'); | |
} | |
// sequence<sequence<ByteString>> | |
// Note: per spec we have to first exhaust the lists then process them | |
const pairs = []; | |
for (const pair of init) { | |
if (typeof pair !== 'object' || typeof pair[Symbol.iterator] !== 'function') { | |
throw new TypeError('Each header pair must be iterable'); | |
} | |
pairs.push(Array.from(pair)); | |
} | |
for (const pair of pairs) { | |
if (pair.length !== 2) { | |
throw new TypeError('Each header pair must be a name/value tuple'); | |
} | |
this.append(pair[0], pair[1]); | |
} | |
} else { | |
// record<ByteString, ByteString> | |
for (const key of Object.keys(init)) { | |
const value = init[key]; | |
this.append(key, value); | |
} | |
} | |
} else { | |
throw new TypeError('Provided initializer must be an object'); | |
} | |
} | |
/** | |
* Return combined header value given name | |
* | |
* @param String name Header name | |
* @return Mixed | |
*/ | |
get(name) { | |
name = `${name}`; | |
validateName(name); | |
const key = find(this[MAP], name); | |
if (key === undefined) { | |
return null; | |
} | |
return this[MAP][key].join(', '); | |
} | |
/** | |
* Iterate over all headers | |
* | |
* @param Function callback Executed for each item with parameters (value, name, thisArg) | |
* @param Boolean thisArg `this` context for callback function | |
* @return Void | |
*/ | |
forEach(callback) { | |
let thisArg = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : undefined; | |
let pairs = getHeaders(this); | |
let i = 0; | |
while (i < pairs.length) { | |
var _pairs$i = pairs[i]; | |
const name = _pairs$i[0], | |
value = _pairs$i[1]; | |
callback.call(thisArg, value, name, this); | |
pairs = getHeaders(this); | |
i++; | |
} | |
} | |
/** | |
* Overwrite header values given name | |
* | |
* @param String name Header name | |
* @param String value Header value | |
* @return Void | |
*/ | |
set(name, value) { | |
name = `${name}`; | |
value = `${value}`; | |
validateName(name); | |
validateValue(value); | |
const key = find(this[MAP], name); | |
this[MAP][key !== undefined ? key : name] = [value]; | |
} | |
/** | |
* Append a value onto existing header | |
* | |
* @param String name Header name | |
* @param String value Header value | |
* @return Void | |
*/ | |
append(name, value) { | |
name = `${name}`; | |
value = `${value}`; | |
validateName(name); | |
validateValue(value); | |
const key = find(this[MAP], name); | |
if (key !== undefined) { | |
this[MAP][key].push(value); | |
} else { | |
this[MAP][name] = [value]; | |
} | |
} | |
/** | |
* Check for header name existence | |
* | |
* @param String name Header name | |
* @return Boolean | |
*/ | |
has(name) { | |
name = `${name}`; | |
validateName(name); | |
return find(this[MAP], name) !== undefined; | |
} | |
/** | |
* Delete all header values given name | |
* | |
* @param String name Header name | |
* @return Void | |
*/ | |
delete(name) { | |
name = `${name}`; | |
validateName(name); | |
const key = find(this[MAP], name); | |
if (key !== undefined) { | |
delete this[MAP][key]; | |
} | |
} | |
/** | |
* Return raw headers (non-spec api) | |
* | |
* @return Object | |
*/ | |
raw() { | |
return this[MAP]; | |
} | |
/** | |
* Get an iterator on keys. | |
* | |
* @return Iterator | |
*/ | |
keys() { | |
return createHeadersIterator(this, 'key'); | |
} | |
/** | |
* Get an iterator on values. | |
* | |
* @return Iterator | |
*/ | |
values() { | |
return createHeadersIterator(this, 'value'); | |
} | |
/** | |
* Get an iterator on entries. | |
* | |
* This is the default iterator of the Headers object. | |
* | |
* @return Iterator | |
*/ | |
[Symbol.iterator]() { | |
return createHeadersIterator(this, 'key+value'); | |
} | |
} | |
Headers.prototype.entries = Headers.prototype[Symbol.iterator]; | |
Object.defineProperty(Headers.prototype, Symbol.toStringTag, { | |
value: 'Headers', | |
writable: false, | |
enumerable: false, | |
configurable: true | |
}); | |
Object.defineProperties(Headers.prototype, { | |
get: { enumerable: true }, | |
forEach: { enumerable: true }, | |
set: { enumerable: true }, | |
append: { enumerable: true }, | |
has: { enumerable: true }, | |
delete: { enumerable: true }, | |
keys: { enumerable: true }, | |
values: { enumerable: true }, | |
entries: { enumerable: true } | |
}); | |
function getHeaders(headers) { | |
let kind = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'key+value'; | |
const keys = Object.keys(headers[MAP]).sort(); | |
return keys.map(kind === 'key' ? function (k) { | |
return k.toLowerCase(); | |
} : kind === 'value' ? function (k) { | |
return headers[MAP][k].join(', '); | |
} : function (k) { | |
return [k.toLowerCase(), headers[MAP][k].join(', ')]; | |
}); | |
} | |
const INTERNAL = Symbol('internal'); | |
function createHeadersIterator(target, kind) { | |
const iterator = Object.create(HeadersIteratorPrototype); | |
iterator[INTERNAL] = { | |
target, | |
kind, | |
index: 0 | |
}; | |
return iterator; | |
} | |
const HeadersIteratorPrototype = Object.setPrototypeOf({ | |
next() { | |
// istanbul ignore if | |
if (!this || Object.getPrototypeOf(this) !== HeadersIteratorPrototype) { | |
throw new TypeError('Value of `this` is not a HeadersIterator'); | |
} | |
var _INTERNAL = this[INTERNAL]; | |
const target = _INTERNAL.target, | |
kind = _INTERNAL.kind, | |
index = _INTERNAL.index; | |
const values = getHeaders(target, kind); | |
const len = values.length; | |
if (index >= len) { | |
return { | |
value: undefined, | |
done: true | |
}; | |
} | |
this[INTERNAL].index = index + 1; | |
return { | |
value: values[index], | |
done: false | |
}; | |
} | |
}, Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]()))); | |
Object.defineProperty(HeadersIteratorPrototype, Symbol.toStringTag, { | |
value: 'HeadersIterator', | |
writable: false, | |
enumerable: false, | |
configurable: true | |
}); | |
/** | |
* Export the Headers object in a form that Node.js can consume. | |
* | |
* @param Headers headers | |
* @return Object | |
*/ | |
function exportNodeCompatibleHeaders(headers) { | |
const obj = Object.assign({ __proto__: null }, headers[MAP]); | |
// http.request() only supports string as Host header. This hack makes | |
// specifying custom Host header possible. | |
const hostHeaderKey = find(headers[MAP], 'Host'); | |
if (hostHeaderKey !== undefined) { | |
obj[hostHeaderKey] = obj[hostHeaderKey][0]; | |
} | |
return obj; | |
} | |
/** | |
* Create a Headers object from an object of headers, ignoring those that do | |
* not conform to HTTP grammar productions. | |
* | |
* @param Object obj Object of headers | |
* @return Headers | |
*/ | |
function createHeadersLenient(obj) { | |
const headers = new Headers(); | |
for (const name of Object.keys(obj)) { | |
if (invalidTokenRegex.test(name)) { | |
continue; | |
} | |
if (Array.isArray(obj[name])) { | |
for (const val of obj[name]) { | |
if (invalidHeaderCharRegex.test(val)) { | |
continue; | |
} | |
if (headers[MAP][name] === undefined) { | |
headers[MAP][name] = [val]; | |
} else { | |
headers[MAP][name].push(val); | |
} | |
} | |
} else if (!invalidHeaderCharRegex.test(obj[name])) { | |
headers[MAP][name] = [obj[name]]; | |
} | |
} | |
return headers; | |
} | |
const INTERNALS$1 = Symbol('Response internals'); | |
// fix an issue where "STATUS_CODES" aren't a named export for node <10 | |
const STATUS_CODES = http.STATUS_CODES; | |
/** | |
* Response class | |
* | |
* @param Stream body Readable stream | |
* @param Object opts Response options | |
* @return Void | |
*/ | |
class Response { | |
constructor() { | |
let body = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : null; | |
let opts = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; | |
Body.call(this, body, opts); | |
const status = opts.status || 200; | |
const headers = new Headers(opts.headers); | |
if (body != null && !headers.has('Content-Type')) { | |
const contentType = extractContentType(body); | |
if (contentType) { | |
headers.append('Content-Type', contentType); | |
} | |
} | |
this[INTERNALS$1] = { | |
url: opts.url, | |
status, | |
statusText: opts.statusText || STATUS_CODES[status], | |
headers, | |
counter: opts.counter | |
}; | |
} | |
get url() { | |
return this[INTERNALS$1].url || ''; | |
} | |
get status() { | |
return this[INTERNALS$1].status; | |
} | |
/** | |
* Convenience property representing if the request ended normally | |
*/ | |
get ok() { | |
return this[INTERNALS$1].status >= 200 && this[INTERNALS$1].status < 300; | |
} | |
get redirected() { | |
return this[INTERNALS$1].counter > 0; | |
} | |
get statusText() { | |
return this[INTERNALS$1].statusText; | |
} | |
get headers() { | |
return this[INTERNALS$1].headers; | |
} | |
/** | |
* Clone this response | |
* | |
* @return Response | |
*/ | |
clone() { | |
return new Response(clone(this), { | |
url: this.url, | |
status: this.status, | |
statusText: this.statusText, | |
headers: this.headers, | |
ok: this.ok, | |
redirected: this.redirected | |
}); | |
} | |
} | |
Body.mixIn(Response.prototype); | |
Object.defineProperties(Response.prototype, { | |
url: { enumerable: true }, | |
status: { enumerable: true }, | |
ok: { enumerable: true }, | |
redirected: { enumerable: true }, | |
statusText: { enumerable: true }, | |
headers: { enumerable: true }, | |
clone: { enumerable: true } | |
}); | |
Object.defineProperty(Response.prototype, Symbol.toStringTag, { | |
value: 'Response', | |
writable: false, | |
enumerable: false, | |
configurable: true | |
}); | |
const INTERNALS$2 = Symbol('Request internals'); | |
// fix an issue where "format", "parse" aren't a named export for node <10 | |
const parse_url = Url.parse; | |
const format_url = Url.format; | |
const streamDestructionSupported = 'destroy' in Stream.Readable.prototype; | |
/** | |
* Check if a value is an instance of Request. | |
* | |
* @param Mixed input | |
* @return Boolean | |
*/ | |
function isRequest(input) { | |
return typeof input === 'object' && typeof input[INTERNALS$2] === 'object'; | |
} | |
function isAbortSignal(signal) { | |
const proto = signal && typeof signal === 'object' && Object.getPrototypeOf(signal); | |
return !!(proto && proto.constructor.name === 'AbortSignal'); | |
} | |
/** | |
* Request class | |
* | |
* @param Mixed input Url or Request instance | |
* @param Object init Custom options | |
* @return Void | |
*/ | |
class Request { | |
constructor(input) { | |
let init = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; | |
let parsedURL; | |
// normalize input | |
if (!isRequest(input)) { | |
if (input && input.href) { | |
// in order to support Node.js' Url objects; though WHATWG's URL objects | |
// will fall into this branch also (since their `toString()` will return | |
// `href` property anyway) | |
parsedURL = parse_url(input.href); | |
} else { | |
// coerce input to a string before attempting to parse | |
parsedURL = parse_url(`${input}`); | |
} | |
input = {}; | |
} else { | |
parsedURL = parse_url(input.url); | |
} | |
let method = init.method || input.method || 'GET'; | |
method = method.toUpperCase(); | |
if ((init.body != null || isRequest(input) && input.body !== null) && (method === 'GET' || method === 'HEAD')) { | |
throw new TypeError('Request with GET/HEAD method cannot have body'); | |
} | |
let inputBody = init.body != null ? init.body : isRequest(input) && input.body !== null ? clone(input) : null; | |
Body.call(this, inputBody, { | |
timeout: init.timeout || input.timeout || 0, | |
size: init.size || input.size || 0 | |
}); | |
const headers = new Headers(init.headers || input.headers || {}); | |
if (inputBody != null && !headers.has('Content-Type')) { | |
const contentType = extractContentType(inputBody); | |
if (contentType) { | |
headers.append('Content-Type', contentType); | |
} | |
} | |
let signal = isRequest(input) ? input.signal : null; | |
if ('signal' in init) signal = init.signal; | |
if (signal != null && !isAbortSignal(signal)) { | |
throw new TypeError('Expected signal to be an instanceof AbortSignal'); | |
} | |
this[INTERNALS$2] = { | |
method, | |
redirect: init.redirect || input.redirect || 'follow', | |
headers, | |
parsedURL, | |
signal | |
}; | |
// node-fetch-only options | |
this.follow = init.follow !== undefined ? init.follow : input.follow !== undefined ? input.follow : 20; | |
this.compress = init.compress !== undefined ? init.compress : input.compress !== undefined ? input.compress : true; | |
this.counter = init.counter || input.counter || 0; | |
this.agent = init.agent || input.agent; | |
} | |
get method() { | |
return this[INTERNALS$2].method; | |
} | |
get url() { | |
return format_url(this[INTERNALS$2].parsedURL); | |
} | |
get headers() { | |
return this[INTERNALS$2].headers; | |
} | |
get redirect() { | |
return this[INTERNALS$2].redirect; | |
} | |
get signal() { | |
return this[INTERNALS$2].signal; | |
} | |
/** | |
* Clone this request | |
* | |
* @return Request | |
*/ | |
clone() { | |
return new Request(this); | |
} | |
} | |
Body.mixIn(Request.prototype); | |
Object.defineProperty(Request.prototype, Symbol.toStringTag, { | |
value: 'Request', | |
writable: false, | |
enumerable: false, | |
configurable: true | |
}); | |
Object.defineProperties(Request.prototype, { | |
method: { enumerable: true }, | |
url: { enumerable: true }, | |
headers: { enumerable: true }, | |
redirect: { enumerable: true }, | |
clone: { enumerable: true }, | |
signal: { enumerable: true } | |
}); | |
/** | |
* Convert a Request to Node.js http request options. | |
* | |
* @param Request A Request instance | |
* @return Object The options object to be passed to http.request | |
*/ | |
function getNodeRequestOptions(request) { | |
const parsedURL = request[INTERNALS$2].parsedURL; | |
const headers = new Headers(request[INTERNALS$2].headers); | |
// fetch step 1.3 | |
if (!headers.has('Accept')) { | |
headers.set('Accept', '*/*'); | |
} | |
// Basic fetch | |
if (!parsedURL.protocol || !parsedURL.hostname) { | |
throw new TypeError('Only absolute URLs are supported'); | |
} | |
if (!/^https?:$/.test(parsedURL.protocol)) { | |
throw new TypeError('Only HTTP(S) protocols are supported'); | |
} | |
if (request.signal && request.body instanceof Stream.Readable && !streamDestructionSupported) { | |
throw new Error('Cancellation of streamed requests with AbortSignal is not supported in node < 8'); | |
} | |
// HTTP-network-or-cache fetch steps 2.4-2.7 | |
let contentLengthValue = null; | |
if (request.body == null && /^(POST|PUT)$/i.test(request.method)) { | |
contentLengthValue = '0'; | |
} | |
if (request.body != null) { | |
const totalBytes = getTotalBytes(request); | |
if (typeof totalBytes === 'number') { | |
contentLengthValue = String(totalBytes); | |
} | |
} | |
if (contentLengthValue) { | |
headers.set('Content-Length', contentLengthValue); | |
} | |
// HTTP-network-or-cache fetch step 2.11 | |
if (!headers.has('User-Agent')) { | |
headers.set('User-Agent', 'node-fetch/1.0 (+https://github.com/bitinn/node-fetch)'); | |
} | |
// HTTP-network-or-cache fetch step 2.15 | |
if (request.compress && !headers.has('Accept-Encoding')) { | |
headers.set('Accept-Encoding', 'gzip,deflate'); | |
} | |
let agent = request.agent; | |
if (typeof agent === 'function') { | |
agent = agent(parsedURL); | |
} | |
if (!headers.has('Connection') && !agent) { | |
headers.set('Connection', 'close'); | |
} | |
// HTTP-network fetch step 4.2 | |
// chunked encoding is handled by Node.js | |
return Object.assign({}, parsedURL, { | |
method: request.method, | |
headers: exportNodeCompatibleHeaders(headers), | |
agent | |
}); | |
} | |
/** | |
* abort-error.js | |
* | |
* AbortError interface for cancelled requests | |
*/ | |
/** | |
* Create AbortError instance | |
* | |
* @param String message Error message for human | |
* @return AbortError | |
*/ | |
function AbortError(message) { | |
Error.call(this, message); | |
this.type = 'aborted'; | |
this.message = message; | |
// hide custom error implementation details from end-users | |
Error.captureStackTrace(this, this.constructor); | |
} | |
AbortError.prototype = Object.create(Error.prototype); | |
AbortError.prototype.constructor = AbortError; | |
AbortError.prototype.name = 'AbortError'; | |
// fix an issue where "PassThrough", "resolve" aren't a named export for node <10 | |
const PassThrough$1 = Stream.PassThrough; | |
const resolve_url = Url.resolve; | |
/** | |
* Fetch function | |
* | |
* @param Mixed url Absolute url or Request instance | |
* @param Object opts Fetch options | |
* @return Promise | |
*/ | |
function fetch(url, opts) { | |
// allow custom promise | |
if (!fetch.Promise) { | |
throw new Error('native promise missing, set fetch.Promise to your favorite alternative'); | |
} | |
Body.Promise = fetch.Promise; | |
// wrap http.request into fetch | |
return new fetch.Promise(function (resolve, reject) { | |
// build request object | |
const request = new Request(url, opts); | |
const options = getNodeRequestOptions(request); | |
const send = (options.protocol === 'https:' ? https : http).request; | |
const signal = request.signal; | |
let response = null; | |
const abort = function abort() { | |
let error = new AbortError('The user aborted a request.'); | |
reject(error); | |
if (request.body && request.body instanceof Stream.Readable) { | |
request.body.destroy(error); | |
} | |
if (!response || !response.body) return; | |
response.body.emit('error', error); | |
}; | |
if (signal && signal.aborted) { | |
abort(); | |
return; | |
} | |
const abortAndFinalize = function abortAndFinalize() { | |
abort(); | |
finalize(); | |
}; | |
// send request | |
const req = send(options); | |
let reqTimeout; | |
if (signal) { | |
signal.addEventListener('abort', abortAndFinalize); | |
} | |
function finalize() { | |
req.abort(); | |
if (signal) signal.removeEventListener('abort', abortAndFinalize); | |
clearTimeout(reqTimeout); | |
} | |
if (request.timeout) { | |
req.once('socket', function (socket) { | |
reqTimeout = setTimeout(function () { | |
reject(new FetchError(`network timeout at: ${request.url}`, 'request-timeout')); | |
finalize(); | |
}, request.timeout); | |
}); | |
} | |
req.on('error', function (err) { | |
reject(new FetchError(`request to ${request.url} failed, reason: ${err.message}`, 'system', err)); | |
finalize(); | |
}); | |
req.on('response', function (res) { | |
clearTimeout(reqTimeout); | |
const headers = createHeadersLenient(res.headers); | |
// HTTP fetch step 5 | |
if (fetch.isRedirect(res.statusCode)) { | |
// HTTP fetch step 5.2 | |
const location = headers.get('Location'); | |
// HTTP fetch step 5.3 | |
const locationURL = location === null ? null : resolve_url(request.url, location); | |
// HTTP fetch step 5.5 | |
switch (request.redirect) { | |
case 'error': | |
reject(new FetchError(`uri requested responds with a redirect, redirect mode is set to error: ${request.url}`, 'no-redirect')); | |
finalize(); | |
return; | |
case 'manual': | |
// node-fetch-specific step: make manual redirect a bit easier to use by setting the Location header value to the resolved URL. | |
if (locationURL !== null) { | |
// handle corrupted header | |
try { | |
headers.set('Location', locationURL); | |
} catch (err) { | |
// istanbul ignore next: nodejs server prevent invalid response headers, we can't test this through normal request | |
reject(err); | |
} | |
} | |
break; | |
case 'follow': | |
// HTTP-redirect fetch step 2 | |
if (locationURL === null) { | |
break; | |
} | |
// HTTP-redirect fetch step 5 | |
if (request.counter >= request.follow) { | |
reject(new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect')); | |
finalize(); | |
return; | |
} | |
// HTTP-redirect fetch step 6 (counter increment) | |
// Create a new Request object. | |
const requestOpts = { | |
headers: new Headers(request.headers), | |
follow: request.follow, | |
counter: request.counter + 1, | |
agent: request.agent, | |
compress: request.compress, | |
method: request.method, | |
body: request.body, | |
signal: request.signal, | |
timeout: request.timeout, | |
size: request.size | |
}; | |
// HTTP-redirect fetch step 9 | |
if (res.statusCode !== 303 && request.body && getTotalBytes(request) === null) { | |
reject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect')); | |
finalize(); | |
return; | |
} | |
// HTTP-redirect fetch step 11 | |
if (res.statusCode === 303 || (res.statusCode === 301 || res.statusCode === 302) && request.method === 'POST') { | |
requestOpts.method = 'GET'; | |
requestOpts.body = undefined; | |
requestOpts.headers.delete('content-length'); | |
} | |
// HTTP-redirect fetch step 15 | |
resolve(fetch(new Request(locationURL, requestOpts))); | |
finalize(); | |
return; | |
} | |
} | |
// prepare response | |
res.once('end', function () { | |
if (signal) signal.removeEventListener('abort', abortAndFinalize); | |
}); | |
let body = res.pipe(new PassThrough$1()); | |
const response_options = { | |
url: request.url, | |
status: res.statusCode, | |
statusText: res.statusMessage, | |
headers: headers, | |
size: request.size, | |
timeout: request.timeout, | |
counter: request.counter | |
}; | |
// HTTP-network fetch step 12.1.1.3 | |
const codings = headers.get('Content-Encoding'); | |
// HTTP-network fetch step 12.1.1.4: handle content codings | |
// in following scenarios we ignore compression support | |
// 1. compression support is disabled | |
// 2. HEAD request | |
// 3. no Content-Encoding header | |
// 4. no content response (204) | |
// 5. content not modified response (304) | |
if (!request.compress || request.method === 'HEAD' || codings === null || res.statusCode === 204 || res.statusCode === 304) { | |
response = new Response(body, response_options); | |
resolve(response); | |
return; | |
} | |
// For Node v6+ | |
// Be less strict when decoding compressed responses, since sometimes | |
// servers send slightly invalid responses that are still accepted | |
// by common browsers. | |
// Always using Z_SYNC_FLUSH is what cURL does. | |
const zlibOptions = { | |
flush: zlib.Z_SYNC_FLUSH, | |
finishFlush: zlib.Z_SYNC_FLUSH | |
}; | |
// for gzip | |
if (codings == 'gzip' || codings == 'x-gzip') { | |
body = body.pipe(zlib.createGunzip(zlibOptions)); | |
response = new Response(body, response_options); | |
resolve(response); | |
return; | |
} | |
// for deflate | |
if (codings == 'deflate' || codings == 'x-deflate') { | |
// handle the infamous raw deflate response from old servers | |
// a hack for old IIS and Apache servers | |
const raw = res.pipe(new PassThrough$1()); | |
raw.once('data', function (chunk) { | |
// see http://stackoverflow.com/questions/37519828 | |
if ((chunk[0] & 0x0F) === 0x08) { | |
body = body.pipe(zlib.createInflate()); | |
} else { | |
body = body.pipe(zlib.createInflateRaw()); | |
} | |
response = new Response(body, response_options); | |
resolve(response); | |
}); | |
return; | |
} | |
// for br | |
if (codings == 'br' && typeof zlib.createBrotliDecompress === 'function') { | |
body = body.pipe(zlib.createBrotliDecompress()); | |
response = new Response(body, response_options); | |
resolve(response); | |
return; | |
} | |
// otherwise, use response as-is | |
response = new Response(body, response_options); | |
resolve(response); | |
}); | |
writeToStream(req, request); | |
}); | |
} | |
/** | |
* Redirect code matching | |
* | |
* @param Number code Status code | |
* @return Boolean | |
*/ | |
fetch.isRedirect = function (code) { | |
return code === 301 || code === 302 || code === 303 || code === 307 || code === 308; | |
}; | |
// expose Promise | |
fetch.Promise = global.Promise; | |
module.exports = exports = fetch; | |
Object.defineProperty(exports, "__esModule", ({ value: true })); | |
exports.default = exports; | |
exports.Headers = Headers; | |
exports.Request = Request; | |
exports.Response = Response; | |
exports.FetchError = FetchError; | |
/***/ }), | |
/***/ 1223: | |
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { | |
var wrappy = __nccwpck_require__(2940) | |
module.exports = wrappy(once) | |
module.exports.strict = wrappy(onceStrict) | |
once.proto = once(function () { | |
Object.defineProperty(Function.prototype, 'once', { | |
value: function () { | |
return once(this) | |
}, | |
configurable: true | |
}) | |
Object.defineProperty(Function.prototype, 'onceStrict', { | |
value: function () { | |
return onceStrict(this) | |
}, | |
configurable: true | |
}) | |
}) | |
function once (fn) { | |
var f = function () { | |
if (f.called) return f.value | |
f.called = true | |
return f.value = fn.apply(this, arguments) | |
} | |
f.called = false | |
return f | |
} | |
function onceStrict (fn) { | |
var f = function () { | |
if (f.called) | |
throw new Error(f.onceError) | |
f.called = true | |
return f.value = fn.apply(this, arguments) | |
} | |
var name = fn.name || 'Function wrapped with `once`' | |
f.onceError = name + " shouldn't be called more than once" | |
f.called = false | |
return f | |
} | |
/***/ }), | |
/***/ 8714: | |
/***/ ((module) => { | |
"use strict"; | |
function posix(path) { | |
return path.charAt(0) === '/'; | |
} | |
function win32(path) { | |
// https://github.com/nodejs/node/blob/b3fcc245fb25539909ef1d5eaa01dbf92e168633/lib/path.js#L56 | |
var splitDeviceRe = /^([a-zA-Z]:|[\\\/]{2}[^\\\/]+[\\\/]+[^\\\/]+)?([\\\/])?([\s\S]*?)$/; | |
var result = splitDeviceRe.exec(path); | |
var device = result[1] || ''; | |
var isUnc = Boolean(device && device.charAt(1) !== ':'); | |
// UNC paths are always absolute | |
return Boolean(result[2] || isUnc); | |
} | |
module.exports = process.platform === 'win32' ? win32 : posix; | |
module.exports.posix = posix; | |
module.exports.win32 = win32; | |
/***/ }), | |
/***/ 4959: | |
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { | |
const assert = __nccwpck_require__(2357) | |
const path = __nccwpck_require__(5622) | |
const fs = __nccwpck_require__(5747) | |
let glob = undefined | |
try { | |
glob = __nccwpck_require__(1957) | |
} catch (_err) { | |
// treat glob as optional. | |
} | |
const defaultGlobOpts = { | |
nosort: true, | |
silent: true | |
} | |
// for EMFILE handling | |
let timeout = 0 | |
const isWindows = (process.platform === "win32") | |
const defaults = options => { | |
const methods = [ | |
'unlink', | |
'chmod', | |
'stat', | |
'lstat', | |
'rmdir', | |
'readdir' | |
] | |
methods.forEach(m => { | |
options[m] = options[m] || fs[m] | |
m = m + 'Sync' | |
options[m] = options[m] || fs[m] | |
}) | |
options.maxBusyTries = options.maxBusyTries || 3 | |
options.emfileWait = options.emfileWait || 1000 | |
if (options.glob === false) { | |
options.disableGlob = true | |
} | |
if (options.disableGlob !== true && glob === undefined) { | |
throw Error('glob dependency not found, set `options.disableGlob = true` if intentional') | |
} | |
options.disableGlob = options.disableGlob || false | |
options.glob = options.glob || defaultGlobOpts | |
} | |
const rimraf = (p, options, cb) => { | |
if (typeof options === 'function') { | |
cb = options | |
options = {} | |
} | |
assert(p, 'rimraf: missing path') | |
assert.equal(typeof p, 'string', 'rimraf: path should be a string') | |
assert.equal(typeof cb, 'function', 'rimraf: callback function required') | |
assert(options, 'rimraf: invalid options argument provided') | |
assert.equal(typeof options, 'object', 'rimraf: options should be object') | |
defaults(options) | |
let busyTries = 0 | |
let errState = null | |
let n = 0 | |
const next = (er) => { | |
errState = errState || er | |
if (--n === 0) | |
cb(errState) | |
} | |
const afterGlob = (er, results) => { | |
if (er) | |
return cb(er) | |
n = results.length | |
if (n === 0) | |
return cb() | |
results.forEach(p => { | |
const CB = (er) => { | |
if (er) { | |
if ((er.code === "EBUSY" || er.code === "ENOTEMPTY" || er.code === "EPERM") && | |
busyTries < options.maxBusyTries) { | |
busyTries ++ | |
// try again, with the same exact callback as this one. | |
return setTimeout(() => rimraf_(p, options, CB), busyTries * 100) | |
} | |
// this one won't happen if graceful-fs is used. | |
if (er.code === "EMFILE" && timeout < options.emfileWait) { | |
return setTimeout(() => rimraf_(p, options, CB), timeout ++) | |
} | |
// already gone | |
if (er.code === "ENOENT") er = null | |
} | |
timeout = 0 | |
next(er) | |
} | |
rimraf_(p, options, CB) | |
}) | |
} | |
if (options.disableGlob || !glob.hasMagic(p)) | |
return afterGlob(null, [p]) | |
options.lstat(p, (er, stat) => { | |
if (!er) | |
return afterGlob(null, [p]) | |
glob(p, options.glob, afterGlob) | |
}) | |
} | |
// Two possible strategies. | |
// 1. Assume it's a file. unlink it, then do the dir stuff on EPERM or EISDIR | |
// 2. Assume it's a directory. readdir, then do the file stuff on ENOTDIR | |
// | |
// Both result in an extra syscall when you guess wrong. However, there | |
// are likely far more normal files in the world than directories. This | |
// is based on the assumption that a the average number of files per | |
// directory is >= 1. | |
// | |
// If anyone ever complains about this, then I guess the strategy could | |
// be made configurable somehow. But until then, YAGNI. | |
const rimraf_ = (p, options, cb) => { | |
assert(p) | |
assert(options) | |
assert(typeof cb === 'function') | |
// sunos lets the root user unlink directories, which is... weird. | |
// so we have to lstat here and make sure it's not a dir. | |
options.lstat(p, (er, st) => { | |
if (er && er.code === "ENOENT") | |
return cb(null) | |
// Windows can EPERM on stat. Life is suffering. | |
if (er && er.code === "EPERM" && isWindows) | |
fixWinEPERM(p, options, er, cb) | |
if (st && st.isDirectory()) | |
return rmdir(p, options, er, cb) | |
options.unlink(p, er => { | |
if (er) { | |
if (er.code === "ENOENT") | |
return cb(null) | |
if (er.code === "EPERM") | |
return (isWindows) | |
? fixWinEPERM(p, options, er, cb) | |
: rmdir(p, options, er, cb) | |
if (er.code === "EISDIR") | |
return rmdir(p, options, er, cb) | |
} | |
return cb(er) | |
}) | |
}) | |
} | |
const fixWinEPERM = (p, options, er, cb) => { | |
assert(p) | |
assert(options) | |
assert(typeof cb === 'function') | |
options.chmod(p, 0o666, er2 => { | |
if (er2) | |
cb(er2.code === "ENOENT" ? null : er) | |
else | |
options.stat(p, (er3, stats) => { | |
if (er3) | |
cb(er3.code === "ENOENT" ? null : er) | |
else if (stats.isDirectory()) | |
rmdir(p, options, er, cb) | |
else | |
options.unlink(p, cb) | |
}) | |
}) | |
} | |
const fixWinEPERMSync = (p, options, er) => { | |
assert(p) | |
assert(options) | |
try { | |
options.chmodSync(p, 0o666) | |
} catch (er2) { | |
if (er2.code === "ENOENT") | |
return | |
else | |
throw er | |
} | |
let stats | |
try { | |
stats = options.statSync(p) | |
} catch (er3) { | |
if (er3.code === "ENOENT") | |
return | |
else | |
throw er | |
} | |
if (stats.isDirectory()) | |
rmdirSync(p, options, er) | |
else | |
options.unlinkSync(p) | |
} | |
const rmdir = (p, options, originalEr, cb) => { | |
assert(p) | |
assert(options) | |
assert(typeof cb === 'function') | |
// try to rmdir first, and only readdir on ENOTEMPTY or EEXIST (SunOS) | |
// if we guessed wrong, and it's not a directory, then | |
// raise the original error. | |
options.rmdir(p, er => { | |
if (er && (er.code === "ENOTEMPTY" || er.code === "EEXIST" || er.code === "EPERM")) | |
rmkids(p, options, cb) | |
else if (er && er.code === "ENOTDIR") | |
cb(originalEr) | |
else | |
cb(er) | |
}) | |
} | |
const rmkids = (p, options, cb) => { | |
assert(p) | |
assert(options) | |
assert(typeof cb === 'function') | |
options.readdir(p, (er, files) => { | |
if (er) | |
return cb(er) | |
let n = files.length | |
if (n === 0) | |
return options.rmdir(p, cb) | |
let errState | |
files.forEach(f => { | |
rimraf(path.join(p, f), options, er => { | |
if (errState) | |
return | |
if (er) | |
return cb(errState = er) | |
if (--n === 0) | |
options.rmdir(p, cb) | |
}) | |
}) | |
}) | |
} | |
// this looks simpler, and is strictly *faster*, but will | |
// tie up the JavaScript thread and fail on excessively | |
// deep directory trees. | |
const rimrafSync = (p, options) => { | |
options = options || {} | |
defaults(options) | |
assert(p, 'rimraf: missing path') | |
assert.equal(typeof p, 'string', 'rimraf: path should be a string') | |
assert(options, 'rimraf: missing options') | |
assert.equal(typeof options, 'object', 'rimraf: options should be object') | |
let results | |
if (options.disableGlob || !glob.hasMagic(p)) { | |
results = [p] | |
} else { | |
try { | |
options.lstatSync(p) | |
results = [p] | |
} catch (er) { | |
results = glob.sync(p, options.glob) | |
} | |
} | |
if (!results.length) | |
return | |
for (let i = 0; i < results.length; i++) { | |
const p = results[i] | |
let st | |
try { | |
st = options.lstatSync(p) | |
} catch (er) { | |
if (er.code === "ENOENT") | |
return | |
// Windows can EPERM on stat. Life is suffering. | |
if (er.code === "EPERM" && isWindows) | |
fixWinEPERMSync(p, options, er) | |
} | |
try { | |
// sunos lets the root user unlink directories, which is... weird. | |
if (st && st.isDirectory()) | |
rmdirSync(p, options, null) | |
else | |
options.unlinkSync(p) | |
} catch (er) { | |
if (er.code === "ENOENT") | |
return | |
if (er.code === "EPERM") | |
return isWindows ? fixWinEPERMSync(p, options, er) : rmdirSync(p, options, er) | |
if (er.code !== "EISDIR") | |
throw er | |
rmdirSync(p, options, er) | |
} | |
} | |
} | |
const rmdirSync = (p, options, originalEr) => { | |
assert(p) | |
assert(options) | |
try { | |
options.rmdirSync(p) | |
} catch (er) { | |
if (er.code === "ENOENT") | |
return | |
if (er.code === "ENOTDIR") | |
throw originalEr | |
if (er.code === "ENOTEMPTY" || er.code === "EEXIST" || er.code === "EPERM") | |
rmkidsSync(p, options) | |
} | |
} | |
const rmkidsSync = (p, options) => { | |
assert(p) | |
assert(options) | |
options.readdirSync(p).forEach(f => rimrafSync(path.join(p, f), options)) | |
// We only end up here once we got ENOTEMPTY at least once, and | |
// at this point, we are guaranteed to have removed all the kids. | |
// So, we know that it won't be ENOENT or ENOTDIR or anything else. | |
// try really hard to delete stuff on windows, because it has a | |
// PROFOUNDLY annoying habit of not closing handles promptly when | |
// files are deleted, resulting in spurious ENOTEMPTY errors. | |
const retries = isWindows ? 100 : 1 | |
let i = 0 | |
do { | |
let threw = true | |
try { | |
const ret = options.rmdirSync(p, options) | |
threw = false | |
return ret | |
} finally { | |
if (++i < retries && threw) | |
continue | |
} | |
} while (true) | |
} | |
module.exports = rimraf | |
rimraf.sync = rimrafSync | |
/***/ }), | |
/***/ 1532: | |
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { | |
const ANY = Symbol('SemVer ANY') | |
// hoisted class for cyclic dependency | |
class Comparator { | |
static get ANY () { | |
return ANY | |
} | |
constructor (comp, options) { | |
options = parseOptions(options) | |
if (comp instanceof Comparator) { | |
if (comp.loose === !!options.loose) { | |
return comp | |
} else { | |
comp = comp.value | |
} | |
} | |
debug('comparator', comp, options) | |
this.options = options | |
this.loose = !!options.loose | |
this.parse(comp) | |
if (this.semver === ANY) { | |
this.value = '' | |
} else { | |
this.value = this.operator + this.semver.version | |
} | |
debug('comp', this) | |
} | |
parse (comp) { | |
const r = this.options.loose ? re[t.COMPARATORLOOSE] : re[t.COMPARATOR] | |
const m = comp.match(r) | |
if (!m) { | |
throw new TypeError(`Invalid comparator: ${comp}`) | |
} | |
this.operator = m[1] !== undefined ? m[1] : '' | |
if (this.operator === '=') { | |
this.operator = '' | |
} | |
// if it literally is just '>' or '' then allow anything. | |
if (!m[2]) { | |
this.semver = ANY | |
} else { | |
this.semver = new SemVer(m[2], this.options.loose) | |
} | |
} | |
toString () { | |
return this.value | |
} | |
test (version) { | |
debug('Comparator.test', version, this.options.loose) | |
if (this.semver === ANY || version === ANY) { | |
return true | |
} | |
if (typeof version === 'string') { | |
try { | |
version = new SemVer(version, this.options) | |
} catch (er) { | |
return false | |
} | |
} | |
return cmp(version, this.operator, this.semver, this.options) | |
} | |
intersects (comp, options) { | |
if (!(comp instanceof Comparator)) { | |
throw new TypeError('a Comparator is required') | |
} | |
if (!options || typeof options !== 'object') { | |
options = { | |
loose: !!options, | |
includePrerelease: false | |
} | |
} | |
if (this.operator === '') { | |
if (this.value === '') { | |
return true | |
} | |
return new Range(comp.value, options).test(this.value) | |
} else if (comp.operator === '') { | |
if (comp.value === '') { | |
return true | |
} | |
return new Range(this.value, options).test(comp.semver) | |
} | |
const sameDirectionIncreasing = | |
(this.operator === '>=' || this.operator === '>') && | |
(comp.operator === '>=' || comp.operator === '>') | |
const sameDirectionDecreasing = | |
(this.operator === '<=' || this.operator === '<') && | |
(comp.operator === '<=' || comp.operator === '<') | |
const sameSemVer = this.semver.version === comp.semver.version | |
const differentDirectionsInclusive = | |
(this.operator === '>=' || this.operator === '<=') && | |
(comp.operator === '>=' || comp.operator === '<=') | |
const oppositeDirectionsLessThan = | |
cmp(this.semver, '<', comp.semver, options) && | |
(this.operator === '>=' || this.operator === '>') && | |
(comp.operator === '<=' || comp.operator === '<') | |
const oppositeDirectionsGreaterThan = | |
cmp(this.semver, '>', comp.semver, options) && | |
(this.operator === '<=' || this.operator === '<') && | |
(comp.operator === '>=' || comp.operator === '>') | |
return ( | |
sameDirectionIncreasing || | |
sameDirectionDecreasing || | |
(sameSemVer && differentDirectionsInclusive) || | |
oppositeDirectionsLessThan || | |
oppositeDirectionsGreaterThan | |
) | |
} | |
} | |
module.exports = Comparator | |
const parseOptions = __nccwpck_require__(785) | |
const {re, t} = __nccwpck_require__(9523) | |
const cmp = __nccwpck_require__(5098) | |
const debug = __nccwpck_require__(427) | |
const SemVer = __nccwpck_require__(8088) | |
const Range = __nccwpck_require__(9828) | |
/***/ }), | |
/***/ 9828: | |
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { | |
// hoisted class for cyclic dependency | |
class Range { | |
constructor (range, options) { | |
options = parseOptions(options) | |
if (range instanceof Range) { | |
if ( | |
range.loose === !!options.loose && | |
range.includePrerelease === !!options.includePrerelease | |
) { | |
return range | |
} else { | |
return new Range(range.raw, options) | |
} | |
} | |
if (range instanceof Comparator) { | |
// just put it in the set and return | |
this.raw = range.value | |
this.set = [[range]] | |
this.format() | |
return this | |
} | |
this.options = options | |
this.loose = !!options.loose | |
this.includePrerelease = !!options.includePrerelease | |
// First, split based on boolean or || | |
this.raw = range | |
this.set = range | |
.split(/\s*\|\|\s*/) | |
// map the range to a 2d array of comparators | |
.map(range => this.parseRange(range.trim())) | |
// throw out any comparator lists that are empty | |
// this generally means that it was not a valid range, which is allowed | |
// in loose mode, but will still throw if the WHOLE range is invalid. | |
.filter(c => c.length) | |
if (!this.set.length) { | |
throw new TypeError(`Invalid SemVer Range: ${range}`) | |
} | |
// if we have any that are not the null set, throw out null sets. | |
if (this.set.length > 1) { | |
// keep the first one, in case they're all null sets | |
const first = this.set[0] | |
this.set = this.set.filter(c => !isNullSet(c[0])) | |
if (this.set.length === 0) | |
this.set = [first] | |
else if (this.set.length > 1) { | |
// if we have any that are *, then the range is just * | |
for (const c of this.set) { | |
if (c.length === 1 && isAny(c[0])) { | |
this.set = [c] | |
break | |
} | |
} | |
} | |
} | |
this.format() | |
} | |
format () { | |
this.range = this.set | |
.map((comps) => { | |
return comps.join(' ').trim() | |
}) | |
.join('||') | |
.trim() | |
return this.range | |
} | |
toString () { | |
return this.range | |
} | |
parseRange (range) { | |
range = range.trim() | |
// memoize range parsing for performance. | |
// this is a very hot path, and fully deterministic. | |
const memoOpts = Object.keys(this.options).join(',') | |
const memoKey = `parseRange:${memoOpts}:${range}` | |
const cached = cache.get(memoKey) | |
if (cached) | |
return cached | |
const loose = this.options.loose | |
// `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4` | |
const hr = loose ? re[t.HYPHENRANGELOOSE] : re[t.HYPHENRANGE] | |
range = range.replace(hr, hyphenReplace(this.options.includePrerelease)) | |
debug('hyphen replace', range) | |
// `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5` | |
range = range.replace(re[t.COMPARATORTRIM], comparatorTrimReplace) | |
debug('comparator trim', range, re[t.COMPARATORTRIM]) | |
// `~ 1.2.3` => `~1.2.3` | |
range = range.replace(re[t.TILDETRIM], tildeTrimReplace) | |
// `^ 1.2.3` => `^1.2.3` | |
range = range.replace(re[t.CARETTRIM], caretTrimReplace) | |
// normalize spaces | |
range = range.split(/\s+/).join(' ') | |
// At this point, the range is completely trimmed and | |
// ready to be split into comparators. | |
const compRe = loose ? re[t.COMPARATORLOOSE] : re[t.COMPARATOR] | |
const rangeList = range | |
.split(' ') | |
.map(comp => parseComparator(comp, this.options)) | |
.join(' ') | |
.split(/\s+/) | |
// >=0.0.0 is equivalent to * | |
.map(comp => replaceGTE0(comp, this.options)) | |
// in loose mode, throw out any that are not valid comparators | |
.filter(this.options.loose ? comp => !!comp.match(compRe) : () => true) | |
.map(comp => new Comparator(comp, this.options)) | |
// if any comparators are the null set, then replace with JUST null set | |
// if more than one comparator, remove any * comparators | |
// also, don't include the same comparator more than once | |
const l = rangeList.length | |
const rangeMap = new Map() | |
for (const comp of rangeList) { | |
if (isNullSet(comp)) | |
return [comp] | |
rangeMap.set(comp.value, comp) | |
} | |
if (rangeMap.size > 1 && rangeMap.has('')) | |
rangeMap.delete('') | |
const result = [...rangeMap.values()] | |
cache.set(memoKey, result) | |
return result | |
} | |
intersects (range, options) { | |
if (!(range instanceof Range)) { | |
throw new TypeError('a Range is required') | |
} | |
return this.set.some((thisComparators) => { | |
return ( | |
isSatisfiable(thisComparators, options) && | |
range.set.some((rangeComparators) => { | |
return ( | |
isSatisfiable(rangeComparators, options) && | |
thisComparators.every((thisComparator) => { | |
return rangeComparators.every((rangeComparator) => { | |
return thisComparator.intersects(rangeComparator, options) | |
}) | |
}) | |
) | |
}) | |
) | |
}) | |
} | |
// if ANY of the sets match ALL of its comparators, then pass | |
test (version) { | |
if (!version) { | |
return false | |
} | |
if (typeof version === 'string') { | |
try { | |
version = new SemVer(version, this.options) | |
} catch (er) { | |
return false | |
} | |
} | |
for (let i = 0; i < this.set.length; i++) { | |
if (testSet(this.set[i], version, this.options)) { | |
return true | |
} | |
} | |
return false | |
} | |
} | |
module.exports = Range | |
const LRU = __nccwpck_require__(7129) | |
const cache = new LRU({ max: 1000 }) | |
const parseOptions = __nccwpck_require__(785) | |
const Comparator = __nccwpck_require__(1532) | |
const debug = __nccwpck_require__(427) | |
const SemVer = __nccwpck_require__(8088) | |
const { | |
re, | |
t, | |
comparatorTrimReplace, | |
tildeTrimReplace, | |
caretTrimReplace | |
} = __nccwpck_require__(9523) | |
const isNullSet = c => c.value === '<0.0.0-0' | |
const isAny = c => c.value === '' | |
// take a set of comparators and determine whether there | |
// exists a version which can satisfy it | |
const isSatisfiable = (comparators, options) => { | |
let result = true | |
const remainingComparators = comparators.slice() | |
let testComparator = remainingComparators.pop() | |
while (result && remainingComparators.length) { | |
result = remainingComparators.every((otherComparator) => { | |
return testComparator.intersects(otherComparator, options) | |
}) | |
testComparator = remainingComparators.pop() | |
} | |
return result | |
} | |
// comprised of xranges, tildes, stars, and gtlt's at this point. | |
// already replaced the hyphen ranges | |
// turn into a set of JUST comparators. | |
const parseComparator = (comp, options) => { | |
debug('comp', comp, options) | |
comp = replaceCarets(comp, options) | |
debug('caret', comp) | |
comp = replaceTildes(comp, options) | |
debug('tildes', comp) | |
comp = replaceXRanges(comp, options) | |
debug('xrange', comp) | |
comp = replaceStars(comp, options) | |
debug('stars', comp) | |
return comp | |
} | |
const isX = id => !id || id.toLowerCase() === 'x' || id === '*' | |
// ~, ~> --> * (any, kinda silly) | |
// ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0 <3.0.0-0 | |
// ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0 <2.1.0-0 | |
// ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0 <1.3.0-0 | |
// ~1.2.3, ~>1.2.3 --> >=1.2.3 <1.3.0-0 | |
// ~1.2.0, ~>1.2.0 --> >=1.2.0 <1.3.0-0 | |
const replaceTildes = (comp, options) => | |
comp.trim().split(/\s+/).map((comp) => { | |
return replaceTilde(comp, options) | |
}).join(' ') | |
const replaceTilde = (comp, options) => { | |
const r = options.loose ? re[t.TILDELOOSE] : re[t.TILDE] | |
return comp.replace(r, (_, M, m, p, pr) => { | |
debug('tilde', comp, _, M, m, p, pr) | |
let ret | |
if (isX(M)) { | |
ret = '' | |
} else if (isX(m)) { | |
ret = `>=${M}.0.0 <${+M + 1}.0.0-0` | |
} else if (isX(p)) { | |
// ~1.2 == >=1.2.0 <1.3.0-0 | |
ret = `>=${M}.${m}.0 <${M}.${+m + 1}.0-0` | |
} else if (pr) { | |
debug('replaceTilde pr', pr) | |
ret = `>=${M}.${m}.${p}-${pr | |
} <${M}.${+m + 1}.0-0` | |
} else { | |
// ~1.2.3 == >=1.2.3 <1.3.0-0 | |
ret = `>=${M}.${m}.${p | |
} <${M}.${+m + 1}.0-0` | |
} | |
debug('tilde return', ret) | |
return ret | |
}) | |
} | |
// ^ --> * (any, kinda silly) | |
// ^2, ^2.x, ^2.x.x --> >=2.0.0 <3.0.0-0 | |
// ^2.0, ^2.0.x --> >=2.0.0 <3.0.0-0 | |
// ^1.2, ^1.2.x --> >=1.2.0 <2.0.0-0 | |
// ^1.2.3 --> >=1.2.3 <2.0.0-0 | |
// ^1.2.0 --> >=1.2.0 <2.0.0-0 | |
const replaceCarets = (comp, options) => | |
comp.trim().split(/\s+/).map((comp) => { | |
return replaceCaret(comp, options) | |
}).join(' ') | |
const replaceCaret = (comp, options) => { | |
debug('caret', comp, options) | |
const r = options.loose ? re[t.CARETLOOSE] : re[t.CARET] | |
const z = options.includePrerelease ? '-0' : '' | |
return comp.replace(r, (_, M, m, p, pr) => { | |
debug('caret', comp, _, M, m, p, pr) | |
let ret | |
if (isX(M)) { | |
ret = '' | |
} else if (isX(m)) { | |
ret = `>=${M}.0.0${z} <${+M + 1}.0.0-0` | |
} else if (isX(p)) { | |
if (M === '0') { | |
ret = `>=${M}.${m}.0${z} <${M}.${+m + 1}.0-0` | |
} else { | |
ret = `>=${M}.${m}.0${z} <${+M + 1}.0.0-0` | |
} | |
} else if (pr) { | |
debug('replaceCaret pr', pr) | |
if (M === '0') { | |
if (m === '0') { | |
ret = `>=${M}.${m}.${p}-${pr | |
} <${M}.${m}.${+p + 1}-0` | |
} else { | |
ret = `>=${M}.${m}.${p}-${pr | |
} <${M}.${+m + 1}.0-0` | |
} | |
} else { | |
ret = `>=${M}.${m}.${p}-${pr | |
} <${+M + 1}.0.0-0` | |
} | |
} else { | |
debug('no pr') | |
if (M === '0') { | |
if (m === '0') { | |
ret = `>=${M}.${m}.${p | |
}${z} <${M}.${m}.${+p + 1}-0` | |
} else { | |
ret = `>=${M}.${m}.${p | |
}${z} <${M}.${+m + 1}.0-0` | |
} | |
} else { | |
ret = `>=${M}.${m}.${p | |
} <${+M + 1}.0.0-0` | |
} | |
} | |
debug('caret return', ret) | |
return ret | |
}) | |
} | |
const replaceXRanges = (comp, options) => { | |
debug('replaceXRanges', comp, options) | |
return comp.split(/\s+/).map((comp) => { | |
return replaceXRange(comp, options) | |
}).join(' ') | |
} | |
const replaceXRange = (comp, options) => { | |
comp = comp.trim() | |
const r = options.loose ? re[t.XRANGELOOSE] : re[t.XRANGE] | |
return comp.replace(r, (ret, gtlt, M, m, p, pr) => { | |
debug('xRange', comp, ret, gtlt, M, m, p, pr) | |
const xM = isX(M) | |
const xm = xM || isX(m) | |
const xp = xm || isX(p) | |
const anyX = xp | |
if (gtlt === '=' && anyX) { | |
gtlt = '' | |
} | |
// if we're including prereleases in the match, then we need | |
// to fix this to -0, the lowest possible prerelease value | |
pr = options.includePrerelease ? '-0' : '' | |
if (xM) { | |
if (gtlt === '>' || gtlt === '<') { | |
// nothing is allowed | |
ret = '<0.0.0-0' | |
} else { | |
// nothing is forbidden | |
ret = '*' | |
} | |
} else if (gtlt && anyX) { | |
// we know patch is an x, because we have any x at all. | |
// replace X with 0 | |
if (xm) { | |
m = 0 | |
} | |
p = 0 | |
if (gtlt === '>') { | |
// >1 => >=2.0.0 | |
// >1.2 => >=1.3.0 | |
gtlt = '>=' | |
if (xm) { | |
M = +M + 1 | |
m = 0 | |
p = 0 | |
} else { | |
m = +m + 1 | |
p = 0 | |
} | |
} else if (gtlt === '<=') { | |
// <=0.7.x is actually <0.8.0, since any 0.7.x should | |
// pass. Similarly, <=7.x is actually <8.0.0, etc. | |
gtlt = '<' | |
if (xm) { | |
M = +M + 1 | |
} else { | |
m = +m + 1 | |
} | |
} | |
if (gtlt === '<') | |
pr = '-0' | |
ret = `${gtlt + M}.${m}.${p}${pr}` | |
} else if (xm) { | |
ret = `>=${M}.0.0${pr} <${+M + 1}.0.0-0` | |
} else if (xp) { | |
ret = `>=${M}.${m}.0${pr | |
} <${M}.${+m + 1}.0-0` | |
} | |
debug('xRange return', ret) | |
return ret | |
}) | |
} | |
// Because * is AND-ed with everything else in the comparator, | |
// and '' means "any version", just remove the *s entirely. | |
const replaceStars = (comp, options) => { | |
debug('replaceStars', comp, options) | |
// Looseness is ignored here. star is always as loose as it gets! | |
return comp.trim().replace(re[t.STAR], '') | |
} | |
const replaceGTE0 = (comp, options) => { | |
debug('replaceGTE0', comp, options) | |
return comp.trim() | |
.replace(re[options.includePrerelease ? t.GTE0PRE : t.GTE0], '') | |
} | |
// This function is passed to string.replace(re[t.HYPHENRANGE]) | |
// M, m, patch, prerelease, build | |
// 1.2 - 3.4.5 => >=1.2.0 <=3.4.5 | |
// 1.2.3 - 3.4 => >=1.2.0 <3.5.0-0 Any 3.4.x will do | |
// 1.2 - 3.4 => >=1.2.0 <3.5.0-0 | |
const hyphenReplace = incPr => ($0, | |
from, fM, fm, fp, fpr, fb, | |
to, tM, tm, tp, tpr, tb) => { | |
if (isX(fM)) { | |
from = '' | |
} else if (isX(fm)) { | |
from = `>=${fM}.0.0${incPr ? '-0' : ''}` | |
} else if (isX(fp)) { | |
from = `>=${fM}.${fm}.0${incPr ? '-0' : ''}` | |
} else if (fpr) { | |
from = `>=${from}` | |
} else { | |
from = `>=${from}${incPr ? '-0' : ''}` | |
} | |
if (isX(tM)) { | |
to = '' | |
} else if (isX(tm)) { | |
to = `<${+tM + 1}.0.0-0` | |
} else if (isX(tp)) { | |
to = `<${tM}.${+tm + 1}.0-0` | |
} else if (tpr) { | |
to = `<=${tM}.${tm}.${tp}-${tpr}` | |
} else if (incPr) { | |
to = `<${tM}.${tm}.${+tp + 1}-0` | |
} else { | |
to = `<=${to}` | |
} | |
return (`${from} ${to}`).trim() | |
} | |
const testSet = (set, version, options) => { | |
for (let i = 0; i < set.length; i++) { | |
if (!set[i].test(version)) { | |
return false | |
} | |
} | |
if (version.prerelease.length && !options.includePrerelease) { | |
// Find the set of versions that are allowed to have prereleases | |
// For example, ^1.2.3-pr.1 desugars to >=1.2.3-pr.1 <2.0.0 | |
// That should allow `1.2.3-pr.2` to pass. | |
// However, `1.2.4-alpha.notready` should NOT be allowed, | |
// even though it's within the range set by the comparators. | |
for (let i = 0; i < set.length; i++) { | |
debug(set[i].semver) | |
if (set[i].semver === Comparator.ANY) { | |
continue | |
} | |
if (set[i].semver.prerelease.length > 0) { | |
const allowed = set[i].semver | |
if (allowed.major === version.major && | |
allowed.minor === version.minor && | |
allowed.patch === version.patch) { | |
return true | |
} | |
} | |
} | |
// Version has a -pre, but it's not one of the ones we like. | |
return false | |
} | |
return true | |
} | |
/***/ }), | |
/***/ 8088: | |
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { | |
const debug = __nccwpck_require__(427) | |
const { MAX_LENGTH, MAX_SAFE_INTEGER } = __nccwpck_require__(2293) | |
const { re, t } = __nccwpck_require__(9523) | |
const parseOptions = __nccwpck_require__(785) | |
const { compareIdentifiers } = __nccwpck_require__(2463) | |
class SemVer { | |
constructor (version, options) { | |
options = parseOptions(options) | |
if (version instanceof SemVer) { | |
if (version.loose === !!options.loose && | |
version.includePrerelease === !!options.includePrerelease) { | |
return version | |
} else { | |
version = version.version | |
} | |
} else if (typeof version !== 'string') { | |
throw new TypeError(`Invalid Version: ${version}`) | |
} | |
if (version.length > MAX_LENGTH) { | |
throw new TypeError( | |
`version is longer than ${MAX_LENGTH} characters` | |
) | |
} | |
debug('SemVer', version, options) | |
this.options = options | |
this.loose = !!options.loose | |
// this isn't actually relevant for versions, but keep it so that we | |
// don't run into trouble passing this.options around. | |
this.includePrerelease = !!options.includePrerelease | |
const m = version.trim().match(options.loose ? re[t.LOOSE] : re[t.FULL]) | |
if (!m) { | |
throw new TypeError(`Invalid Version: ${version}`) | |
} | |
this.raw = version | |
// these are actually numbers | |
this.major = +m[1] | |
this.minor = +m[2] | |
this.patch = +m[3] | |
if (this.major > MAX_SAFE_INTEGER || this.major < 0) { | |
throw new TypeError('Invalid major version') | |
} | |
if (this.minor > MAX_SAFE_INTEGER || this.minor < 0) { | |
throw new TypeError('Invalid minor version') | |
} | |
if (this.patch > MAX_SAFE_INTEGER || this.patch < 0) { | |
throw new TypeError('Invalid patch version') | |
} | |
// numberify any prerelease numeric ids | |
if (!m[4]) { | |
this.prerelease = [] | |
} else { | |
this.prerelease = m[4].split('.').map((id) => { | |
if (/^[0-9]+$/.test(id)) { | |
const num = +id | |
if (num >= 0 && num < MAX_SAFE_INTEGER) { | |
return num | |
} | |
} | |
return id | |
}) | |
} | |
this.build = m[5] ? m[5].split('.') : [] | |
this.format() | |
} | |
format () { | |
this.version = `${this.major}.${this.minor}.${this.patch}` | |
if (this.prerelease.length) { | |
this.version += `-${this.prerelease.join('.')}` | |
} | |
return this.version | |
} | |
toString () { | |
return this.version | |
} | |
compare (other) { | |
debug('SemVer.compare', this.version, this.options, other) | |
if (!(other instanceof SemVer)) { | |
if (typeof other === 'string' && other === this.version) { | |
return 0 | |
} | |
other = new SemVer(other, this.options) | |
} | |
if (other.version === this.version) { | |
return 0 | |
} | |
return this.compareMain(other) || this.comparePre(other) | |
} | |
compareMain (other) { | |
if (!(other instanceof SemVer)) { | |
other = new SemVer(other, this.options) | |
} | |
return ( | |
compareIdentifiers(this.major, other.major) || | |
compareIdentifiers(this.minor, other.minor) || | |
compareIdentifiers(this.patch, other.patch) | |
) | |
} | |
comparePre (other) { | |
if (!(other instanceof SemVer)) { | |
other = new SemVer(other, this.options) | |
} | |
// NOT having a prerelease is > having one | |
if (this.prerelease.length && !other.prerelease.length) { | |
return -1 | |
} else if (!this.prerelease.length && other.prerelease.length) { | |
return 1 | |
} else if (!this.prerelease.length && !other.prerelease.length) { | |
return 0 | |
} | |
let i = 0 | |
do { | |
const a = this.prerelease[i] | |
const b = other.prerelease[i] | |
debug('prerelease compare', i, a, b) | |
if (a === undefined && b === undefined) { | |
return 0 | |
} else if (b === undefined) { | |
return 1 | |
} else if (a === undefined) { | |
return -1 | |
} else if (a === b) { | |
continue | |
} else { | |
return compareIdentifiers(a, b) | |
} | |
} while (++i) | |
} | |
compareBuild (other) { | |
if (!(other instanceof SemVer)) { | |
other = new SemVer(other, this.options) | |
} | |
let i = 0 | |
do { | |
const a = this.build[i] | |
const b = other.build[i] | |
debug('prerelease compare', i, a, b) | |
if (a === undefined && b === undefined) { | |
return 0 | |
} else if (b === undefined) { | |
return 1 | |
} else if (a === undefined) { | |
return -1 | |
} else if (a === b) { | |
continue | |
} else { | |
return compareIdentifiers(a, b) | |
} | |
} while (++i) | |
} | |
// preminor will bump the version up to the next minor release, and immediately | |
// down to pre-release. premajor and prepatch work the same way. | |
inc (release, identifier) { | |
switch (release) { | |
case 'premajor': | |
this.prerelease.length = 0 | |
this.patch = 0 | |
this.minor = 0 | |
this.major++ | |
this.inc('pre', identifier) | |
break | |
case 'preminor': | |
this.prerelease.length = 0 | |
this.patch = 0 | |
this.minor++ | |
this.inc('pre', identifier) | |
break | |
case 'prepatch': | |
// If this is already a prerelease, it will bump to the next version | |
// drop any prereleases that might already exist, since they are not | |
// relevant at this point. | |
this.prerelease.length = 0 | |
this.inc('patch', identifier) | |
this.inc('pre', identifier) | |
break | |
// If the input is a non-prerelease version, this acts the same as | |
// prepatch. | |
case 'prerelease': | |
if (this.prerelease.length === 0) { | |
this.inc('patch', identifier) | |
} | |
this.inc('pre', identifier) | |
break | |
case 'major': | |
// If this is a pre-major version, bump up to the same major version. | |
// Otherwise increment major. | |
// 1.0.0-5 bumps to 1.0.0 | |
// 1.1.0 bumps to 2.0.0 | |
if ( | |
this.minor !== 0 || | |
this.patch !== 0 || | |
this.prerelease.length === 0 | |
) { | |
this.major++ | |
} | |
this.minor = 0 | |
this.patch = 0 | |
this.prerelease = [] | |
break | |
case 'minor': | |
// If this is a pre-minor version, bump up to the same minor version. | |
// Otherwise increment minor. | |
// 1.2.0-5 bumps to 1.2.0 | |
// 1.2.1 bumps to 1.3.0 | |
if (this.patch !== 0 || this.prerelease.length === 0) { | |
this.minor++ | |
} | |
this.patch = 0 | |
this.prerelease = [] | |
break | |
case 'patch': | |
// If this is not a pre-release version, it will increment the patch. | |
// If it is a pre-release it will bump up to the same patch version. | |
// 1.2.0-5 patches to 1.2.0 | |
// 1.2.0 patches to 1.2.1 | |
if (this.prerelease.length === 0) { | |
this.patch++ | |
} | |
this.prerelease = [] | |
break | |
// This probably shouldn't be used publicly. | |
// 1.0.0 'pre' would become 1.0.0-0 which is the wrong direction. | |
case 'pre': | |
if (this.prerelease.length === 0) { | |
this.prerelease = [0] | |
} else { | |
let i = this.prerelease.length | |
while (--i >= 0) { | |
if (typeof this.prerelease[i] === 'number') { | |
this.prerelease[i]++ | |
i = -2 | |
} | |
} | |
if (i === -1) { | |
// didn't increment anything | |
this.prerelease.push(0) | |
} | |
} | |
if (identifier) { | |
// 1.2.0-beta.1 bumps to 1.2.0-beta.2, | |
// 1.2.0-beta.fooblz or 1.2.0-beta bumps to 1.2.0-beta.0 | |
if (this.prerelease[0] === identifier) { | |
if (isNaN(this.prerelease[1])) { | |
this.prerelease = [identifier, 0] | |
} | |
} else { | |
this.prerelease = [identifier, 0] | |
} | |
} | |
break | |
default: | |
throw new Error(`invalid increment argument: ${release}`) | |
} | |
this.format() | |
this.raw = this.version | |
return this | |
} | |
} | |
module.exports = SemVer | |
/***/ }), | |
/***/ 8848: | |
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { | |
const parse = __nccwpck_require__(5925) | |
const clean = (version, options) => { | |
const s = parse(version.trim().replace(/^[=v]+/, ''), options) | |
return s ? s.version : null | |
} | |
module.exports = clean | |
/***/ }), | |
/***/ 5098: | |
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { | |
const eq = __nccwpck_require__(1898) | |
const neq = __nccwpck_require__(6017) | |
const gt = __nccwpck_require__(4123) | |
const gte = __nccwpck_require__(5522) | |
const lt = __nccwpck_require__(194) | |
const lte = __nccwpck_require__(7520) | |
const cmp = (a, op, b, loose) => { | |
switch (op) { | |
case '===': | |
if (typeof a === 'object') | |
a = a.version | |
if (typeof b === 'object') | |
b = b.version | |
return a === b | |
case '!==': | |
if (typeof a === 'object') | |
a = a.version | |
if (typeof b === 'object') | |
b = b.version | |
return a !== b | |
case '': | |
case '=': | |
case '==': | |
return eq(a, b, loose) | |
case '!=': | |
return neq(a, b, loose) | |
case '>': | |
return gt(a, b, loose) | |
case '>=': | |
return gte(a, b, loose) | |
case '<': | |
return lt(a, b, loose) | |
case '<=': | |
return lte(a, b, loose) | |
default: | |
throw new TypeError(`Invalid operator: ${op}`) | |
} | |
} | |
module.exports = cmp | |
/***/ }), | |
/***/ 3466: | |
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { | |
const SemVer = __nccwpck_require__(8088) | |
const parse = __nccwpck_require__(5925) | |
const {re, t} = __nccwpck_require__(9523) | |
const coerce = (version, options) => { | |
if (version instanceof SemVer) { | |
return version | |
} | |
if (typeof version === 'number') { | |
version = String(version) | |
} | |
if (typeof version !== 'string') { | |
return null | |
} | |
options = options || {} | |
let match = null | |
if (!options.rtl) { | |
match = version.match(re[t.COERCE]) | |
} else { | |
// Find the right-most coercible string that does not share | |
// a terminus with a more left-ward coercible string. | |
// Eg, '1.2.3.4' wants to coerce '2.3.4', not '3.4' or '4' | |
// | |
// Walk through the string checking with a /g regexp | |
// Manually set the index so as to pick up overlapping matches. | |
// Stop when we get a match that ends at the string end, since no | |
// coercible string can be more right-ward without the same terminus. | |
let next | |
while ((next = re[t.COERCERTL].exec(version)) && | |
(!match || match.index + match[0].length !== version.length) | |
) { | |
if (!match || | |
next.index + next[0].length !== match.index + match[0].length) { | |
match = next | |
} | |
re[t.COERCERTL].lastIndex = next.index + next[1].length + next[2].length | |
} | |
// leave it in a clean state | |
re[t.COERCERTL].lastIndex = -1 | |
} | |
if (match === null) | |
return null | |
return parse(`${match[2]}.${match[3] || '0'}.${match[4] || '0'}`, options) | |
} | |
module.exports = coerce | |
/***/ }), | |
/***/ 2156: | |
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { | |
const SemVer = __nccwpck_require__(8088) | |
const compareBuild = (a, b, loose) => { | |
const versionA = new SemVer(a, loose) | |
const versionB = new SemVer(b, loose) | |
return versionA.compare(versionB) || versionA.compareBuild(versionB) | |
} | |
module.exports = compareBuild | |
/***/ }), | |
/***/ 2804: | |
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { | |
const compare = __nccwpck_require__(4309) | |
const compareLoose = (a, b) => compare(a, b, true) | |
module.exports = compareLoose | |
/***/ }), | |
/***/ 4309: | |
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { | |
const SemVer = __nccwpck_require__(8088) | |
const compare = (a, b, loose) => | |
new SemVer(a, loose).compare(new SemVer(b, loose)) | |
module.exports = compare | |
/***/ }), | |
/***/ 4297: | |
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { | |
const parse = __nccwpck_require__(5925) | |
const eq = __nccwpck_require__(1898) | |
const diff = (version1, version2) => { | |
if (eq(version1, version2)) { | |
return null | |
} else { | |
const v1 = parse(version1) | |
const v2 = parse(version2) | |
const hasPre = v1.prerelease.length || v2.prerelease.length | |
const prefix = hasPre ? 'pre' : '' | |
const defaultResult = hasPre ? 'prerelease' : '' | |
for (const key in v1) { | |
if (key === 'major' || key === 'minor' || key === 'patch') { | |
if (v1[key] !== v2[key]) { | |
return prefix + key | |
} | |
} | |
} | |
return defaultResult // may be undefined | |
} | |
} | |
module.exports = diff | |
/***/ }), | |
/***/ 1898: | |
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { | |
const compare = __nccwpck_require__(4309) | |
const eq = (a, b, loose) => compare(a, b, loose) === 0 | |
module.exports = eq | |
/***/ }), | |
/***/ 4123: | |
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { | |
const compare = __nccwpck_require__(4309) | |
const gt = (a, b, loose) => compare(a, b, loose) > 0 | |
module.exports = gt | |
/***/ }), | |
/***/ 5522: | |
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { | |
const compare = __nccwpck_require__(4309) | |
const gte = (a, b, loose) => compare(a, b, loose) >= 0 | |
module.exports = gte | |
/***/ }), | |
/***/ 900: | |
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { | |
const SemVer = __nccwpck_require__(8088) | |
const inc = (version, release, options, identifier) => { | |
if (typeof (options) === 'string') { | |
identifier = options | |
options = undefined | |
} | |
try { | |
return new SemVer(version, options).inc(release, identifier).version | |
} catch (er) { | |
return null | |
} | |
} | |
module.exports = inc | |
/***/ }), | |
/***/ 194: | |
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { | |
const compare = __nccwpck_require__(4309) | |
const lt = (a, b, loose) => compare(a, b, loose) < 0 | |
module.exports = lt | |
/***/ }), | |
/***/ 7520: | |
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { | |
const compare = __nccwpck_require__(4309) | |
const lte = (a, b, loose) => compare(a, b, loose) <= 0 | |
module.exports = lte | |
/***/ }), | |
/***/ 6688: | |
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { | |
const SemVer = __nccwpck_require__(8088) | |
const major = (a, loose) => new SemVer(a, loose).major | |
module.exports = major | |
/***/ }), | |
/***/ 8447: | |
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { | |
const SemVer = __nccwpck_require__(8088) | |
const minor = (a, loose) => new SemVer(a, loose).minor | |
module.exports = minor | |
/***/ }), | |
/***/ 6017: | |
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { | |
const compare = __nccwpck_require__(4309) | |
const neq = (a, b, loose) => compare(a, b, loose) !== 0 | |
module.exports = neq | |
/***/ }), | |
/***/ 5925: | |
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { | |
const {MAX_LENGTH} = __nccwpck_require__(2293) | |
const { re, t } = __nccwpck_require__(9523) | |
const SemVer = __nccwpck_require__(8088) | |
const parseOptions = __nccwpck_require__(785) | |
const parse = (version, options) => { | |
options = parseOptions(options) | |
if (version instanceof SemVer) { | |
return version | |
} | |
if (typeof version !== 'string') { | |
return null | |
} | |
if (version.length > MAX_LENGTH) { | |
return null | |
} | |
const r = options.loose ? re[t.LOOSE] : re[t.FULL] | |
if (!r.test(version)) { | |
return null | |
} | |
try { | |
return new SemVer(version, options) | |
} catch (er) { | |
return null | |
} | |
} | |
module.exports = parse | |
/***/ }), | |
/***/ 2866: | |
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { | |
const SemVer = __nccwpck_require__(8088) | |
const patch = (a, loose) => new SemVer(a, loose).patch | |
module.exports = patch | |
/***/ }), | |
/***/ 4016: | |
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { | |
const parse = __nccwpck_require__(5925) | |
const prerelease = (version, options) => { | |
const parsed = parse(version, options) | |
return (parsed && parsed.prerelease.length) ? parsed.prerelease : null | |
} | |
module.exports = prerelease | |
/***/ }), | |
/***/ 6417: | |
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { | |
const compare = __nccwpck_require__(4309) | |
const rcompare = (a, b, loose) => compare(b, a, loose) | |
module.exports = rcompare | |
/***/ }), | |
/***/ 8701: | |
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { | |
const compareBuild = __nccwpck_require__(2156) | |
const rsort = (list, loose) => list.sort((a, b) => compareBuild(b, a, loose)) | |
module.exports = rsort | |
/***/ }), | |
/***/ 6055: | |
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { | |
const Range = __nccwpck_require__(9828) | |
const satisfies = (version, range, options) => { | |
try { | |
range = new Range(range, options) | |
} catch (er) { | |
return false | |
} | |
return range.test(version) | |
} | |
module.exports = satisfies | |
/***/ }), | |
/***/ 1426: | |
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { | |
const compareBuild = __nccwpck_require__(2156) | |
const sort = (list, loose) => list.sort((a, b) => compareBuild(a, b, loose)) | |
module.exports = sort | |
/***/ }), | |
/***/ 9601: | |
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { | |
const parse = __nccwpck_require__(5925) | |
const valid = (version, options) => { | |
const v = parse(version, options) | |
return v ? v.version : null | |
} | |
module.exports = valid | |
/***/ }), | |
/***/ 1383: | |
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { | |
// just pre-load all the stuff that index.js lazily exports | |
const internalRe = __nccwpck_require__(9523) | |
module.exports = { | |
re: internalRe.re, | |
src: internalRe.src, | |
tokens: internalRe.t, | |
SEMVER_SPEC_VERSION: __nccwpck_require__(2293).SEMVER_SPEC_VERSION, | |
SemVer: __nccwpck_require__(8088), | |
compareIdentifiers: __nccwpck_require__(2463).compareIdentifiers, | |
rcompareIdentifiers: __nccwpck_require__(2463).rcompareIdentifiers, | |
parse: __nccwpck_require__(5925), | |
valid: __nccwpck_require__(9601), | |
clean: __nccwpck_require__(8848), | |
inc: __nccwpck_require__(900), | |
diff: __nccwpck_require__(4297), | |
major: __nccwpck_require__(6688), | |
minor: __nccwpck_require__(8447), | |
patch: __nccwpck_require__(2866), | |
prerelease: __nccwpck_require__(4016), | |
compare: __nccwpck_require__(4309), | |
rcompare: __nccwpck_require__(6417), | |
compareLoose: __nccwpck_require__(2804), | |
compareBuild: __nccwpck_require__(2156), | |
sort: __nccwpck_require__(1426), | |
rsort: __nccwpck_require__(8701), | |
gt: __nccwpck_require__(4123), | |
lt: __nccwpck_require__(194), | |
eq: __nccwpck_require__(1898), | |
neq: __nccwpck_require__(6017), | |
gte: __nccwpck_require__(5522), | |
lte: __nccwpck_require__(7520), | |
cmp: __nccwpck_require__(5098), | |
coerce: __nccwpck_require__(3466), | |
Comparator: __nccwpck_require__(1532), | |
Range: __nccwpck_require__(9828), | |
satisfies: __nccwpck_require__(6055), | |
toComparators: __nccwpck_require__(2706), | |
maxSatisfying: __nccwpck_require__(579), | |
minSatisfying: __nccwpck_require__(832), | |
minVersion: __nccwpck_require__(4179), | |
validRange: __nccwpck_require__(2098), | |
outside: __nccwpck_require__(420), | |
gtr: __nccwpck_require__(9380), | |
ltr: __nccwpck_require__(3323), | |
intersects: __nccwpck_require__(7008), | |
simplifyRange: __nccwpck_require__(5297), | |
subset: __nccwpck_require__(7863), | |
} | |
/***/ }), | |
/***/ 2293: | |
/***/ ((module) => { | |
// Note: this is the semver.org version of the spec that it implements | |
// Not necessarily the package version of this code. | |
const SEMVER_SPEC_VERSION = '2.0.0' | |
const MAX_LENGTH = 256 | |
const MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER || | |
/* istanbul ignore next */ 9007199254740991 | |
// Max safe segment length for coercion. | |
const MAX_SAFE_COMPONENT_LENGTH = 16 | |
module.exports = { | |
SEMVER_SPEC_VERSION, | |
MAX_LENGTH, | |
MAX_SAFE_INTEGER, | |
MAX_SAFE_COMPONENT_LENGTH | |
} | |
/***/ }), | |
/***/ 427: | |
/***/ ((module) => { | |
const debug = ( | |
typeof process === 'object' && | |
process.env && | |
process.env.NODE_DEBUG && | |
/\bsemver\b/i.test(process.env.NODE_DEBUG) | |
) ? (...args) => console.error('SEMVER', ...args) | |
: () => {} | |
module.exports = debug | |
/***/ }), | |
/***/ 2463: | |
/***/ ((module) => { | |
const numeric = /^[0-9]+$/ | |
const compareIdentifiers = (a, b) => { | |
const anum = numeric.test(a) | |
const bnum = numeric.test(b) | |
if (anum && bnum) { | |
a = +a | |
b = +b | |
} | |
return a === b ? 0 | |
: (anum && !bnum) ? -1 | |
: (bnum && !anum) ? 1 | |
: a < b ? -1 | |
: 1 | |
} | |
const rcompareIdentifiers = (a, b) => compareIdentifiers(b, a) | |
module.exports = { | |
compareIdentifiers, | |
rcompareIdentifiers | |
} | |
/***/ }), | |
/***/ 785: | |
/***/ ((module) => { | |
// parse out just the options we care about so we always get a consistent | |
// obj with keys in a consistent order. | |
const opts = ['includePrerelease', 'loose', 'rtl'] | |
const parseOptions = options => | |
!options ? {} | |
: typeof options !== 'object' ? { loose: true } | |
: opts.filter(k => options[k]).reduce((options, k) => { | |
options[k] = true | |
return options | |
}, {}) | |
module.exports = parseOptions | |
/***/ }), | |
/***/ 9523: | |
/***/ ((module, exports, __nccwpck_require__) => { | |
const { MAX_SAFE_COMPONENT_LENGTH } = __nccwpck_require__(2293) | |
const debug = __nccwpck_require__(427) | |
exports = module.exports = {} | |
// The actual regexps go on exports.re | |
const re = exports.re = [] | |
const src = exports.src = [] | |
const t = exports.t = {} | |
let R = 0 | |
const createToken = (name, value, isGlobal) => { | |
const index = R++ | |
debug(index, value) | |
t[name] = index | |
src[index] = value | |
re[index] = new RegExp(value, isGlobal ? 'g' : undefined) | |
} | |
// The following Regular Expressions can be used for tokenizing, | |
// validating, and parsing SemVer version strings. | |
// ## Numeric Identifier | |
// A single `0`, or a non-zero digit followed by zero or more digits. | |
createToken('NUMERICIDENTIFIER', '0|[1-9]\\d*') | |
createToken('NUMERICIDENTIFIERLOOSE', '[0-9]+') | |
// ## Non-numeric Identifier | |
// Zero or more digits, followed by a letter or hyphen, and then zero or | |
// more letters, digits, or hyphens. | |
createToken('NONNUMERICIDENTIFIER', '\\d*[a-zA-Z-][a-zA-Z0-9-]*') | |
// ## Main Version | |
// Three dot-separated numeric identifiers. | |
createToken('MAINVERSION', `(${src[t.NUMERICIDENTIFIER]})\\.` + | |
`(${src[t.NUMERICIDENTIFIER]})\\.` + | |
`(${src[t.NUMERICIDENTIFIER]})`) | |
createToken('MAINVERSIONLOOSE', `(${src[t.NUMERICIDENTIFIERLOOSE]})\\.` + | |
`(${src[t.NUMERICIDENTIFIERLOOSE]})\\.` + | |
`(${src[t.NUMERICIDENTIFIERLOOSE]})`) | |
// ## Pre-release Version Identifier | |
// A numeric identifier, or a non-numeric identifier. | |
createToken('PRERELEASEIDENTIFIER', `(?:${src[t.NUMERICIDENTIFIER] | |
}|${src[t.NONNUMERICIDENTIFIER]})`) | |
createToken('PRERELEASEIDENTIFIERLOOSE', `(?:${src[t.NUMERICIDENTIFIERLOOSE] | |
}|${src[t.NONNUMERICIDENTIFIER]})`) | |
// ## Pre-release Version | |
// Hyphen, followed by one or more dot-separated pre-release version | |
// identifiers. | |
createToken('PRERELEASE', `(?:-(${src[t.PRERELEASEIDENTIFIER] | |
}(?:\\.${src[t.PRERELEASEIDENTIFIER]})*))`) | |
createToken('PRERELEASELOOSE', `(?:-?(${src[t.PRERELEASEIDENTIFIERLOOSE] | |
}(?:\\.${src[t.PRERELEASEIDENTIFIERLOOSE]})*))`) | |
// ## Build Metadata Identifier | |
// Any combination of digits, letters, or hyphens. | |
createToken('BUILDIDENTIFIER', '[0-9A-Za-z-]+') | |
// ## Build Metadata | |
// Plus sign, followed by one or more period-separated build metadata | |
// identifiers. | |
createToken('BUILD', `(?:\\+(${src[t.BUILDIDENTIFIER] | |
}(?:\\.${src[t.BUILDIDENTIFIER]})*))`) | |
// ## Full Version String | |
// A main version, followed optionally by a pre-release version and | |
// build metadata. | |
// Note that the only major, minor, patch, and pre-release sections of | |
// the version string are capturing groups. The build metadata is not a | |
// capturing group, because it should not ever be used in version | |
// comparison. | |
createToken('FULLPLAIN', `v?${src[t.MAINVERSION] | |
}${src[t.PRERELEASE]}?${ | |
src[t.BUILD]}?`) | |
createToken('FULL', `^${src[t.FULLPLAIN]}$`) | |
// like full, but allows v1.2.3 and =1.2.3, which people do sometimes. | |
// also, 1.0.0alpha1 (prerelease without the hyphen) which is pretty | |
// common in the npm registry. | |
createToken('LOOSEPLAIN', `[v=\\s]*${src[t.MAINVERSIONLOOSE] | |
}${src[t.PRERELEASELOOSE]}?${ | |
src[t.BUILD]}?`) | |
createToken('LOOSE', `^${src[t.LOOSEPLAIN]}$`) | |
createToken('GTLT', '((?:<|>)?=?)') | |
// Something like "2.*" or "1.2.x". | |
// Note that "x.x" is a valid xRange identifer, meaning "any version" | |
// Only the first item is strictly required. | |
createToken('XRANGEIDENTIFIERLOOSE', `${src[t.NUMERICIDENTIFIERLOOSE]}|x|X|\\*`) | |
createToken('XRANGEIDENTIFIER', `${src[t.NUMERICIDENTIFIER]}|x|X|\\*`) | |
createToken('XRANGEPLAIN', `[v=\\s]*(${src[t.XRANGEIDENTIFIER]})` + | |
`(?:\\.(${src[t.XRANGEIDENTIFIER]})` + | |
`(?:\\.(${src[t.XRANGEIDENTIFIER]})` + | |
`(?:${src[t.PRERELEASE]})?${ | |
src[t.BUILD]}?` + | |
`)?)?`) | |
createToken('XRANGEPLAINLOOSE', `[v=\\s]*(${src[t.XRANGEIDENTIFIERLOOSE]})` + | |
`(?:\\.(${src[t.XRANGEIDENTIFIERLOOSE]})` + | |
`(?:\\.(${src[t.XRANGEIDENTIFIERLOOSE]})` + | |
`(?:${src[t.PRERELEASELOOSE]})?${ | |
src[t.BUILD]}?` + | |
`)?)?`) | |
createToken('XRANGE', `^${src[t.GTLT]}\\s*${src[t.XRANGEPLAIN]}$`) | |
createToken('XRANGELOOSE', `^${src[t.GTLT]}\\s*${src[t.XRANGEPLAINLOOSE]}$`) | |
// Coercion. | |
// Extract anything that could conceivably be a part of a valid semver | |
createToken('COERCE', `${'(^|[^\\d])' + | |
'(\\d{1,'}${MAX_SAFE_COMPONENT_LENGTH}})` + | |
`(?:\\.(\\d{1,${MAX_SAFE_COMPONENT_LENGTH}}))?` + | |
`(?:\\.(\\d{1,${MAX_SAFE_COMPONENT_LENGTH}}))?` + | |
`(?:$|[^\\d])`) | |
createToken('COERCERTL', src[t.COERCE], true) | |
// Tilde ranges. | |
// Meaning is "reasonably at or greater than" | |
createToken('LONETILDE', '(?:~>?)') | |
createToken('TILDETRIM', `(\\s*)${src[t.LONETILDE]}\\s+`, true) | |
exports.tildeTrimReplace = '$1~' | |
createToken('TILDE', `^${src[t.LONETILDE]}${src[t.XRANGEPLAIN]}$`) | |
createToken('TILDELOOSE', `^${src[t.LONETILDE]}${src[t.XRANGEPLAINLOOSE]}$`) | |
// Caret ranges. | |
// Meaning is "at least and backwards compatible with" | |
createToken('LONECARET', '(?:\\^)') | |
createToken('CARETTRIM', `(\\s*)${src[t.LONECARET]}\\s+`, true) | |
exports.caretTrimReplace = '$1^' | |
createToken('CARET', `^${src[t.LONECARET]}${src[t.XRANGEPLAIN]}$`) | |
createToken('CARETLOOSE', `^${src[t.LONECARET]}${src[t.XRANGEPLAINLOOSE]}$`) | |
// A simple gt/lt/eq thing, or just "" to indicate "any version" | |
createToken('COMPARATORLOOSE', `^${src[t.GTLT]}\\s*(${src[t.LOOSEPLAIN]})$|^$`) | |
createToken('COMPARATOR', `^${src[t.GTLT]}\\s*(${src[t.FULLPLAIN]})$|^$`) | |
// An expression to strip any whitespace between the gtlt and the thing | |
// it modifies, so that `> 1.2.3` ==> `>1.2.3` | |
createToken('COMPARATORTRIM', `(\\s*)${src[t.GTLT] | |
}\\s*(${src[t.LOOSEPLAIN]}|${src[t.XRANGEPLAIN]})`, true) | |
exports.comparatorTrimReplace = '$1$2$3' | |
// Something like `1.2.3 - 1.2.4` | |
// Note that these all use the loose form, because they'll be | |
// checked against either the strict or loose comparator form | |
// later. | |
createToken('HYPHENRANGE', `^\\s*(${src[t.XRANGEPLAIN]})` + | |
`\\s+-\\s+` + | |
`(${src[t.XRANGEPLAIN]})` + | |
`\\s*$`) | |
createToken('HYPHENRANGELOOSE', `^\\s*(${src[t.XRANGEPLAINLOOSE]})` + | |
`\\s+-\\s+` + | |
`(${src[t.XRANGEPLAINLOOSE]})` + | |
`\\s*$`) | |
// Star ranges basically just allow anything at all. | |
createToken('STAR', '(<|>)?=?\\s*\\*') | |
// >=0.0.0 is like a star | |
createToken('GTE0', '^\\s*>=\\s*0\.0\.0\\s*$') | |
createToken('GTE0PRE', '^\\s*>=\\s*0\.0\.0-0\\s*$') | |
/***/ }), | |
/***/ 9380: | |
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { | |
// Determine if version is greater than all the versions possible in the range. | |
const outside = __nccwpck_require__(420) | |
const gtr = (version, range, options) => outside(version, range, '>', options) | |
module.exports = gtr | |
/***/ }), | |
/***/ 7008: | |
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { | |
const Range = __nccwpck_require__(9828) | |
const intersects = (r1, r2, options) => { | |
r1 = new Range(r1, options) | |
r2 = new Range(r2, options) | |
return r1.intersects(r2) | |
} | |
module.exports = intersects | |
/***/ }), | |
/***/ 3323: | |
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { | |
const outside = __nccwpck_require__(420) | |
// Determine if version is less than all the versions possible in the range | |
const ltr = (version, range, options) => outside(version, range, '<', options) | |
module.exports = ltr | |
/***/ }), | |
/***/ 579: | |
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { | |
const SemVer = __nccwpck_require__(8088) | |
const Range = __nccwpck_require__(9828) | |
const maxSatisfying = (versions, range, options) => { | |
let max = null | |
let maxSV = null | |
let rangeObj = null | |
try { | |
rangeObj = new Range(range, options) | |
} catch (er) { | |
return null | |
} | |
versions.forEach((v) => { | |
if (rangeObj.test(v)) { | |
// satisfies(v, range, options) | |
if (!max || maxSV.compare(v) === -1) { | |
// compare(max, v, true) | |
max = v | |
maxSV = new SemVer(max, options) | |
} | |
} | |
}) | |
return max | |
} | |
module.exports = maxSatisfying | |
/***/ }), | |
/***/ 832: | |
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { | |
const SemVer = __nccwpck_require__(8088) | |
const Range = __nccwpck_require__(9828) | |
const minSatisfying = (versions, range, options) => { | |
let min = null | |
let minSV = null | |
let rangeObj = null | |
try { | |
rangeObj = new Range(range, options) | |
} catch (er) { | |
return null | |
} | |
versions.forEach((v) => { | |
if (rangeObj.test(v)) { | |
// satisfies(v, range, options) | |
if (!min || minSV.compare(v) === 1) { | |
// compare(min, v, true) | |
min = v | |
minSV = new SemVer(min, options) | |
} | |
} | |
}) | |
return min | |
} | |
module.exports = minSatisfying | |
/***/ }), | |
/***/ 4179: | |
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { | |
const SemVer = __nccwpck_require__(8088) | |
const Range = __nccwpck_require__(9828) | |
const gt = __nccwpck_require__(4123) | |
const minVersion = (range, loose) => { | |
range = new Range(range, loose) | |
let minver = new SemVer('0.0.0') | |
if (range.test(minver)) { | |
return minver | |
} | |
minver = new SemVer('0.0.0-0') | |
if (range.test(minver)) { | |
return minver | |
} | |
minver = null | |
for (let i = 0; i < range.set.length; ++i) { | |
const comparators = range.set[i] | |
let setMin = null | |
comparators.forEach((comparator) => { | |
// Clone to avoid manipulating the comparator's semver object. | |
const compver = new SemVer(comparator.semver.version) | |
switch (comparator.operator) { | |
case '>': | |
if (compver.prerelease.length === 0) { | |
compver.patch++ | |
} else { | |
compver.prerelease.push(0) | |
} | |
compver.raw = compver.format() | |
/* fallthrough */ | |
case '': | |
case '>=': | |
if (!setMin || gt(compver, setMin)) { | |
setMin = compver | |
} | |
break | |
case '<': | |
case '<=': | |
/* Ignore maximum versions */ | |
break | |
/* istanbul ignore next */ | |
default: | |
throw new Error(`Unexpected operation: ${comparator.operator}`) | |
} | |
}) | |
if (setMin && (!minver || gt(minver, setMin))) | |
minver = setMin | |
} | |
if (minver && range.test(minver)) { | |
return minver | |
} | |
return null | |
} | |
module.exports = minVersion | |
/***/ }), | |
/***/ 420: | |
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { | |
const SemVer = __nccwpck_require__(8088) | |
const Comparator = __nccwpck_require__(1532) | |
const {ANY} = Comparator | |
const Range = __nccwpck_require__(9828) | |
const satisfies = __nccwpck_require__(6055) | |
const gt = __nccwpck_require__(4123) | |
const lt = __nccwpck_require__(194) | |
const lte = __nccwpck_require__(7520) | |
const gte = __nccwpck_require__(5522) | |
const outside = (version, range, hilo, options) => { | |
version = new SemVer(version, options) | |
range = new Range(range, options) | |
let gtfn, ltefn, ltfn, comp, ecomp | |
switch (hilo) { | |
case '>': | |
gtfn = gt | |
ltefn = lte | |
ltfn = lt | |
comp = '>' | |
ecomp = '>=' | |
break | |
case '<': | |
gtfn = lt | |
ltefn = gte | |
ltfn = gt | |
comp = '<' | |
ecomp = '<=' | |
break | |
default: | |
throw new TypeError('Must provide a hilo val of "<" or ">"') | |
} | |
// If it satisfies the range it is not outside | |
if (satisfies(version, range, options)) { | |
return false | |
} | |
// From now on, variable terms are as if we're in "gtr" mode. | |
// but note that everything is flipped for the "ltr" function. | |
for (let i = 0; i < range.set.length; ++i) { | |
const comparators = range.set[i] | |
let high = null | |
let low = null | |
comparators.forEach((comparator) => { | |
if (comparator.semver === ANY) { | |
comparator = new Comparator('>=0.0.0') | |
} | |
high = high || comparator | |
low = low || comparator | |
if (gtfn(comparator.semver, high.semver, options)) { | |
high = comparator | |
} else if (ltfn(comparator.semver, low.semver, options)) { | |
low = comparator | |
} | |
}) | |
// If the edge version comparator has a operator then our version | |
// isn't outside it | |
if (high.operator === comp || high.operator === ecomp) { | |
return false | |
} | |
// If the lowest version comparator has an operator and our version | |
// is less than it then it isn't higher than the range | |
if ((!low.operator || low.operator === comp) && | |
ltefn(version, low.semver)) { | |
return false | |
} else if (low.operator === ecomp && ltfn(version, low.semver)) { | |
return false | |
} | |
} | |
return true | |
} | |
module.exports = outside | |
/***/ }), | |
/***/ 5297: | |
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { | |
// given a set of versions and a range, create a "simplified" range | |
// that includes the same versions that the original range does | |
// If the original range is shorter than the simplified one, return that. | |
const satisfies = __nccwpck_require__(6055) | |
const compare = __nccwpck_require__(4309) | |
module.exports = (versions, range, options) => { | |
const set = [] | |
let min = null | |
let prev = null | |
const v = versions.sort((a, b) => compare(a, b, options)) | |
for (const version of v) { | |
const included = satisfies(version, range, options) | |
if (included) { | |
prev = version | |
if (!min) | |
min = version | |
} else { | |
if (prev) { | |
set.push([min, prev]) | |
} | |
prev = null | |
min = null | |
} | |
} | |
if (min) | |
set.push([min, null]) | |
const ranges = [] | |
for (const [min, max] of set) { | |
if (min === max) | |
ranges.push(min) | |
else if (!max && min === v[0]) | |
ranges.push('*') | |
else if (!max) | |
ranges.push(`>=${min}`) | |
else if (min === v[0]) | |
ranges.push(`<=${max}`) | |
else | |
ranges.push(`${min} - ${max}`) | |
} | |
const simplified = ranges.join(' || ') | |
const original = typeof range.raw === 'string' ? range.raw : String(range) | |
return simplified.length < original.length ? simplified : range | |
} | |
/***/ }), | |
/***/ 7863: | |
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { | |
const Range = __nccwpck_require__(9828) | |
const Comparator = __nccwpck_require__(1532) | |
const { ANY } = Comparator | |
const satisfies = __nccwpck_require__(6055) | |
const compare = __nccwpck_require__(4309) | |
// Complex range `r1 || r2 || ...` is a subset of `R1 || R2 || ...` iff: | |
// - Every simple range `r1, r2, ...` is a null set, OR | |
// - Every simple range `r1, r2, ...` which is not a null set is a subset of | |
// some `R1, R2, ...` | |
// | |
// Simple range `c1 c2 ...` is a subset of simple range `C1 C2 ...` iff: | |
// - If c is only the ANY comparator | |
// - If C is only the ANY comparator, return true | |
// - Else if in prerelease mode, return false | |
// - else replace c with `[>=0.0.0]` | |
// - If C is only the ANY comparator | |
// - if in prerelease mode, return true | |
// - else replace C with `[>=0.0.0]` | |
// - Let EQ be the set of = comparators in c | |
// - If EQ is more than one, return true (null set) | |
// - Let GT be the highest > or >= comparator in c | |
// - Let LT be the lowest < or <= comparator in c | |
// - If GT and LT, and GT.semver > LT.semver, return true (null set) | |
// - If any C is a = range, and GT or LT are set, return false | |
// - If EQ | |
// - If GT, and EQ does not satisfy GT, return true (null set) | |
// - If LT, and EQ does not satisfy LT, return true (null set) | |
// - If EQ satisfies every C, return true | |
// - Else return false | |
// - If GT | |
// - If GT.semver is lower than any > or >= comp in C, return false | |
// - If GT is >=, and GT.semver does not satisfy every C, return false | |
// - If GT.semver has a prerelease, and not in prerelease mode | |
// - If no C has a prerelease and the GT.semver tuple, return false | |
// - If LT | |
// - If LT.semver is greater than any < or <= comp in C, return false | |
// - If LT is <=, and LT.semver does not satisfy every C, return false | |
// - If GT.semver has a prerelease, and not in prerelease mode | |
// - If no C has a prerelease and the LT.semver tuple, return false | |
// - Else return true | |
const subset = (sub, dom, options = {}) => { | |
if (sub === dom) | |
return true | |
sub = new Range(sub, options) | |
dom = new Range(dom, options) | |
let sawNonNull = false | |
OUTER: for (const simpleSub of sub.set) { | |
for (const simpleDom of dom.set) { | |
const isSub = simpleSubset(simpleSub, simpleDom, options) | |
sawNonNull = sawNonNull || isSub !== null | |
if (isSub) | |
continue OUTER | |
} | |
// the null set is a subset of everything, but null simple ranges in | |
// a complex range should be ignored. so if we saw a non-null range, | |
// then we know this isn't a subset, but if EVERY simple range was null, | |
// then it is a subset. | |
if (sawNonNull) | |
return false | |
} | |
return true | |
} | |
const simpleSubset = (sub, dom, options) => { | |
if (sub === dom) | |
return true | |
if (sub.length === 1 && sub[0].semver === ANY) { | |
if (dom.length === 1 && dom[0].semver === ANY) | |
return true | |
else if (options.includePrerelease) | |
sub = [ new Comparator('>=0.0.0-0') ] | |
else | |
sub = [ new Comparator('>=0.0.0') ] | |
} | |
if (dom.length === 1 && dom[0].semver === ANY) { | |
if (options.includePrerelease) | |
return true | |
else | |
dom = [ new Comparator('>=0.0.0') ] | |
} | |
const eqSet = new Set() | |
let gt, lt | |
for (const c of sub) { | |
if (c.operator === '>' || c.operator === '>=') | |
gt = higherGT(gt, c, options) | |
else if (c.operator === '<' || c.operator === '<=') | |
lt = lowerLT(lt, c, options) | |
else | |
eqSet.add(c.semver) | |
} | |
if (eqSet.size > 1) | |
return null | |
let gtltComp | |
if (gt && lt) { | |
gtltComp = compare(gt.semver, lt.semver, options) | |
if (gtltComp > 0) | |
return null | |
else if (gtltComp === 0 && (gt.operator !== '>=' || lt.operator !== '<=')) | |
return null | |
} | |
// will iterate one or zero times | |
for (const eq of eqSet) { | |
if (gt && !satisfies(eq, String(gt), options)) | |
return null | |
if (lt && !satisfies(eq, String(lt), options)) | |
return null | |
for (const c of dom) { | |
if (!satisfies(eq, String(c), options)) | |
return false | |
} | |
return true | |
} | |
let higher, lower | |
let hasDomLT, hasDomGT | |
// if the subset has a prerelease, we need a comparator in the superset | |
// with the same tuple and a prerelease, or it's not a subset | |
let needDomLTPre = lt && | |
!options.includePrerelease && | |
lt.semver.prerelease.length ? lt.semver : false | |
let needDomGTPre = gt && | |
!options.includePrerelease && | |
gt.semver.prerelease.length ? gt.semver : false | |
// exception: <1.2.3-0 is the same as <1.2.3 | |
if (needDomLTPre && needDomLTPre.prerelease.length === 1 && | |
lt.operator === '<' && needDomLTPre.prerelease[0] === 0) { | |
needDomLTPre = false | |
} | |
for (const c of dom) { | |
hasDomGT = hasDomGT || c.operator === '>' || c.operator === '>=' | |
hasDomLT = hasDomLT || c.operator === '<' || c.operator === '<=' | |
if (gt) { | |
if (needDomGTPre) { | |
if (c.semver.prerelease && c.semver.prerelease.length && | |
c.semver.major === needDomGTPre.major && | |
c.semver.minor === needDomGTPre.minor && | |
c.semver.patch === needDomGTPre.patch) { | |
needDomGTPre = false | |
} | |
} | |
if (c.operator === '>' || c.operator === '>=') { | |
higher = higherGT(gt, c, options) | |
if (higher === c && higher !== gt) | |
return false | |
} else if (gt.operator === '>=' && !satisfies(gt.semver, String(c), options)) | |
return false | |
} | |
if (lt) { | |
if (needDomLTPre) { | |
if (c.semver.prerelease && c.semver.prerelease.length && | |
c.semver.major === needDomLTPre.major && | |
c.semver.minor === needDomLTPre.minor && | |
c.semver.patch === needDomLTPre.patch) { | |
needDomLTPre = false | |
} | |
} | |
if (c.operator === '<' || c.operator === '<=') { | |
lower = lowerLT(lt, c, options) | |
if (lower === c && lower !== lt) | |
return false | |
} else if (lt.operator === '<=' && !satisfies(lt.semver, String(c), options)) | |
return false | |
} | |
if (!c.operator && (lt || gt) && gtltComp !== 0) | |
return false | |
} | |
// if there was a < or >, and nothing in the dom, then must be false | |
// UNLESS it was limited by another range in the other direction. | |
// Eg, >1.0.0 <1.0.1 is still a subset of <2.0.0 | |
if (gt && hasDomLT && !lt && gtltComp !== 0) | |
return false | |
if (lt && hasDomGT && !gt && gtltComp !== 0) | |
return false | |
// we needed a prerelease range in a specific tuple, but didn't get one | |
// then this isn't a subset. eg >=1.2.3-pre is not a subset of >=1.0.0, | |
// because it includes prereleases in the 1.2.3 tuple | |
if (needDomGTPre || needDomLTPre) | |
return false | |
return true | |
} | |
// >=1.2.3 is lower than >1.2.3 | |
const higherGT = (a, b, options) => { | |
if (!a) | |
return b | |
const comp = compare(a.semver, b.semver, options) | |
return comp > 0 ? a | |
: comp < 0 ? b | |
: b.operator === '>' && a.operator === '>=' ? b | |
: a | |
} | |
// <=1.2.3 is higher than <1.2.3 | |
const lowerLT = (a, b, options) => { | |
if (!a) | |
return b | |
const comp = compare(a.semver, b.semver, options) | |
return comp < 0 ? a | |
: comp > 0 ? b | |
: b.operator === '<' && a.operator === '<=' ? b | |
: a | |
} | |
module.exports = subset | |
/***/ }), | |
/***/ 2706: | |
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { | |
const Range = __nccwpck_require__(9828) | |
// Mostly just for testing and legacy API reasons | |
const toComparators = (range, options) => | |
new Range(range, options).set | |
.map(comp => comp.map(c => c.value).join(' ').trim().split(' ')) | |
module.exports = toComparators | |
/***/ }), | |
/***/ 2098: | |
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { | |
const Range = __nccwpck_require__(9828) | |
const validRange = (range, options) => { | |
try { | |
// Return '*' instead of '' so that truthiness works. | |
// This will throw if it's invalid anyway | |
return new Range(range, options).range || '*' | |
} catch (er) { | |
return null | |
} | |
} | |
module.exports = validRange | |
/***/ }), | |
/***/ 8517: | |
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { | |
/*! | |
* Tmp | |
* | |
* Copyright (c) 2011-2017 KARASZI Istvan <github@spam.raszi.hu> | |
* | |
* MIT Licensed | |
*/ | |
/* | |
* Module dependencies. | |
*/ | |
const fs = __nccwpck_require__(5747); | |
const os = __nccwpck_require__(2087); | |
const path = __nccwpck_require__(5622); | |
const crypto = __nccwpck_require__(3373); | |
const _c = { fs: fs.constants, os: os.constants }; | |
const rimraf = __nccwpck_require__(4959); | |
/* | |
* The working inner variables. | |
*/ | |
const | |
// the random characters to choose from | |
RANDOM_CHARS = '0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz', | |
TEMPLATE_PATTERN = /XXXXXX/, | |
DEFAULT_TRIES = 3, | |
CREATE_FLAGS = (_c.O_CREAT || _c.fs.O_CREAT) | (_c.O_EXCL || _c.fs.O_EXCL) | (_c.O_RDWR || _c.fs.O_RDWR), | |
// constants are off on the windows platform and will not match the actual errno codes | |
IS_WIN32 = os.platform() === 'win32', | |
EBADF = _c.EBADF || _c.os.errno.EBADF, | |
ENOENT = _c.ENOENT || _c.os.errno.ENOENT, | |
DIR_MODE = 0o700 /* 448 */, | |
FILE_MODE = 0o600 /* 384 */, | |
EXIT = 'exit', | |
// this will hold the objects need to be removed on exit | |
_removeObjects = [], | |
// API change in fs.rmdirSync leads to error when passing in a second parameter, e.g. the callback | |
FN_RMDIR_SYNC = fs.rmdirSync.bind(fs), | |
FN_RIMRAF_SYNC = rimraf.sync; | |
let | |
_gracefulCleanup = false; | |
/** | |
* Gets a temporary file name. | |
* | |
* @param {(Options|tmpNameCallback)} options options or callback | |
* @param {?tmpNameCallback} callback the callback function | |
*/ | |
function tmpName(options, callback) { | |
const | |
args = _parseArguments(options, callback), | |
opts = args[0], | |
cb = args[1]; | |
try { | |
_assertAndSanitizeOptions(opts); | |
} catch (err) { | |
return cb(err); | |
} | |
let tries = opts.tries; | |
(function _getUniqueName() { | |
try { | |
const name = _generateTmpName(opts); | |
// check whether the path exists then retry if needed | |
fs.stat(name, function (err) { | |
/* istanbul ignore else */ | |
if (!err) { | |
/* istanbul ignore else */ | |
if (tries-- > 0) return _getUniqueName(); | |
return cb(new Error('Could not get a unique tmp filename, max tries reached ' + name)); | |
} | |
cb(null, name); | |
}); | |
} catch (err) { | |
cb(err); | |
} | |
}()); | |
} | |
/** | |
* Synchronous version of tmpName. | |
* | |
* @param {Object} options | |
* @returns {string} the generated random name | |
* @throws {Error} if the options are invalid or could not generate a filename | |
*/ | |
function tmpNameSync(options) { | |
const | |
args = _parseArguments(options), | |
opts = args[0]; | |
_assertAndSanitizeOptions(opts); | |
let tries = opts.tries; | |
do { | |
const name = _generateTmpName(opts); | |
try { | |
fs.statSync(name); | |
} catch (e) { | |
return name; | |
} | |
} while (tries-- > 0); | |
throw new Error('Could not get a unique tmp filename, max tries reached'); | |
} | |
/** | |
* Creates and opens a temporary file. | |
* | |
* @param {(Options|null|undefined|fileCallback)} options the config options or the callback function or null or undefined | |
* @param {?fileCallback} callback | |
*/ | |
function file(options, callback) { | |
const | |
args = _parseArguments(options, callback), | |
opts = args[0], | |
cb = args[1]; | |
// gets a temporary filename | |
tmpName(opts, function _tmpNameCreated(err, name) { | |
/* istanbul ignore else */ | |
if (err) return cb(err); | |
// create and open the file | |
fs.open(name, CREATE_FLAGS, opts.mode || FILE_MODE, function _fileCreated(err, fd) { | |
/* istanbu ignore else */ | |
if (err) return cb(err); | |
if (opts.discardDescriptor) { | |
return fs.close(fd, function _discardCallback(possibleErr) { | |
// the chance of getting an error on close here is rather low and might occur in the most edgiest cases only | |
return cb(possibleErr, name, undefined, _prepareTmpFileRemoveCallback(name, -1, opts, false)); | |
}); | |
} else { | |
// detachDescriptor passes the descriptor whereas discardDescriptor closes it, either way, we no longer care | |
// about the descriptor | |
const discardOrDetachDescriptor = opts.discardDescriptor || opts.detachDescriptor; | |
cb(null, name, fd, _prepareTmpFileRemoveCallback(name, discardOrDetachDescriptor ? -1 : fd, opts, false)); | |
} | |
}); | |
}); | |
} | |
/** | |
* Synchronous version of file. | |
* | |
* @param {Options} options | |
* @returns {FileSyncObject} object consists of name, fd and removeCallback | |
* @throws {Error} if cannot create a file | |
*/ | |
function fileSync(options) { | |
const | |
args = _parseArguments(options), | |
opts = args[0]; | |
const discardOrDetachDescriptor = opts.discardDescriptor || opts.detachDescriptor; | |
const name = tmpNameSync(opts); | |
var fd = fs.openSync(name, CREATE_FLAGS, opts.mode || FILE_MODE); | |
/* istanbul ignore else */ | |
if (opts.discardDescriptor) { | |
fs.closeSync(fd); | |
fd = undefined; | |
} | |
return { | |
name: name, | |
fd: fd, | |
removeCallback: _prepareTmpFileRemoveCallback(name, discardOrDetachDescriptor ? -1 : fd, opts, true) | |
}; | |
} | |
/** | |
* Creates a temporary directory. | |
* | |
* @param {(Options|dirCallback)} options the options or the callback function | |
* @param {?dirCallback} callback | |
*/ | |
function dir(options, callback) { | |
const | |
args = _parseArguments(options, callback), | |
opts = args[0], | |
cb = args[1]; | |
// gets a temporary filename | |
tmpName(opts, function _tmpNameCreated(err, name) { | |
/* istanbul ignore else */ | |
if (err) return cb(err); | |
// create the directory | |
fs.mkdir(name, opts.mode || DIR_MODE, function _dirCreated(err) { | |
/* istanbul ignore else */ | |
if (err) return cb(err); | |
cb(null, name, _prepareTmpDirRemoveCallback(name, opts, false)); | |
}); | |
}); | |
} | |
/** | |
* Synchronous version of dir. | |
* | |
* @param {Options} options | |
* @returns {DirSyncObject} object consists of name and removeCallback | |
* @throws {Error} if it cannot create a directory | |
*/ | |
function dirSync(options) { | |
const | |
args = _parseArguments(options), | |
opts = args[0]; | |
const name = tmpNameSync(opts); | |
fs.mkdirSync(name, opts.mode || DIR_MODE); | |
return { | |
name: name, | |
removeCallback: _prepareTmpDirRemoveCallback(name, opts, true) | |
}; | |
} | |
/** | |
* Removes files asynchronously. | |
* | |
* @param {Object} fdPath | |
* @param {Function} next | |
* @private | |
*/ | |
function _removeFileAsync(fdPath, next) { | |
const _handler = function (err) { | |
if (err && !_isENOENT(err)) { | |
// reraise any unanticipated error | |
return next(err); | |
} | |
next(); | |
}; | |
if (0 <= fdPath[0]) | |
fs.close(fdPath[0], function () { | |
fs.unlink(fdPath[1], _handler); | |
}); | |
else fs.unlink(fdPath[1], _handler); | |
} | |
/** | |
* Removes files synchronously. | |
* | |
* @param {Object} fdPath | |
* @private | |
*/ | |
function _removeFileSync(fdPath) { | |
let rethrownException = null; | |
try { | |
if (0 <= fdPath[0]) fs.closeSync(fdPath[0]); | |
} catch (e) { | |
// reraise any unanticipated error | |
if (!_isEBADF(e) && !_isENOENT(e)) throw e; | |
} finally { | |
try { | |
fs.unlinkSync(fdPath[1]); | |
} | |
catch (e) { | |
// reraise any unanticipated error | |
if (!_isENOENT(e)) rethrownException = e; | |
} | |
} | |
if (rethrownException !== null) { | |
throw rethrownException; | |
} | |
} | |
/** | |
* Prepares the callback for removal of the temporary file. | |
* | |
* Returns either a sync callback or a async callback depending on whether | |
* fileSync or file was called, which is expressed by the sync parameter. | |
* | |
* @param {string} name the path of the file | |
* @param {number} fd file descriptor | |
* @param {Object} opts | |
* @param {boolean} sync | |
* @returns {fileCallback | fileCallbackSync} | |
* @private | |
*/ | |
function _prepareTmpFileRemoveCallback(name, fd, opts, sync) { | |
const removeCallbackSync = _prepareRemoveCallback(_removeFileSync, [fd, name], sync); | |
const removeCallback = _prepareRemoveCallback(_removeFileAsync, [fd, name], sync, removeCallbackSync); | |
if (!opts.keep) _removeObjects.unshift(removeCallbackSync); | |
return sync ? removeCallbackSync : removeCallback; | |
} | |
/** | |
* Prepares the callback for removal of the temporary directory. | |
* | |
* Returns either a sync callback or a async callback depending on whether | |
* tmpFileSync or tmpFile was called, which is expressed by the sync parameter. | |
* | |
* @param {string} name | |
* @param {Object} opts | |
* @param {boolean} sync | |
* @returns {Function} the callback | |
* @private | |
*/ | |
function _prepareTmpDirRemoveCallback(name, opts, sync) { | |
const removeFunction = opts.unsafeCleanup ? rimraf : fs.rmdir.bind(fs); | |
const removeFunctionSync = opts.unsafeCleanup ? FN_RIMRAF_SYNC : FN_RMDIR_SYNC; | |
const removeCallbackSync = _prepareRemoveCallback(removeFunctionSync, name, sync); | |
const removeCallback = _prepareRemoveCallback(removeFunction, name, sync, removeCallbackSync); | |
if (!opts.keep) _removeObjects.unshift(removeCallbackSync); | |
return sync ? removeCallbackSync : removeCallback; | |
} | |
/** | |
* Creates a guarded function wrapping the removeFunction call. | |
* | |
* The cleanup callback is save to be called multiple times. | |
* Subsequent invocations will be ignored. | |
* | |
* @param {Function} removeFunction | |
* @param {string} fileOrDirName | |
* @param {boolean} sync | |
* @param {cleanupCallbackSync?} cleanupCallbackSync | |
* @returns {cleanupCallback | cleanupCallbackSync} | |
* @private | |
*/ | |
function _prepareRemoveCallback(removeFunction, fileOrDirName, sync, cleanupCallbackSync) { | |
let called = false; | |
// if sync is true, the next parameter will be ignored | |
return function _cleanupCallback(next) { | |
/* istanbul ignore else */ | |
if (!called) { | |
// remove cleanupCallback from cache | |
const toRemove = cleanupCallbackSync || _cleanupCallback; | |
const index = _removeObjects.indexOf(toRemove); | |
/* istanbul ignore else */ | |
if (index >= 0) _removeObjects.splice(index, 1); | |
called = true; | |
if (sync || removeFunction === FN_RMDIR_SYNC || removeFunction === FN_RIMRAF_SYNC) { | |
return removeFunction(fileOrDirName); | |
} else { | |
return removeFunction(fileOrDirName, next || function() {}); | |
} | |
} | |
}; | |
} | |
/** | |
* The garbage collector. | |
* | |
* @private | |
*/ | |
function _garbageCollector() { | |
/* istanbul ignore else */ | |
if (!_gracefulCleanup) return; | |
// the function being called removes itself from _removeObjects, | |
// loop until _removeObjects is empty | |
while (_removeObjects.length) { | |
try { | |
_removeObjects[0](); | |
} catch (e) { | |
// already removed? | |
} | |
} | |
} | |
/** | |
* Random name generator based on crypto. | |
* Adapted from http://blog.tompawlak.org/how-to-generate-random-values-nodejs-javascript | |
* | |
* @param {number} howMany | |
* @returns {string} the generated random name | |
* @private | |
*/ | |
function _randomChars(howMany) { | |
let | |
value = [], | |
rnd = null; | |
// make sure that we do not fail because we ran out of entropy | |
try { | |
rnd = crypto.randomBytes(howMany); | |
} catch (e) { | |
rnd = crypto.pseudoRandomBytes(howMany); | |
} | |
for (var i = 0; i < howMany; i++) { | |
value.push(RANDOM_CHARS[rnd[i] % RANDOM_CHARS.length]); | |
} | |
return value.join(''); | |
} | |
/** | |
* Helper which determines whether a string s is blank, that is undefined, or empty or null. | |
* | |
* @private | |
* @param {string} s | |
* @returns {Boolean} true whether the string s is blank, false otherwise | |
*/ | |
function _isBlank(s) { | |
return s === null || _isUndefined(s) || !s.trim(); | |
} | |
/** | |
* Checks whether the `obj` parameter is defined or not. | |
* | |
* @param {Object} obj | |
* @returns {boolean} true if the object is undefined | |
* @private | |
*/ | |
function _isUndefined(obj) { | |
return typeof obj === 'undefined'; | |
} | |
/** | |
* Parses the function arguments. | |
* | |
* This function helps to have optional arguments. | |
* | |
* @param {(Options|null|undefined|Function)} options | |
* @param {?Function} callback | |
* @returns {Array} parsed arguments | |
* @private | |
*/ | |
function _parseArguments(options, callback) { | |
/* istanbul ignore else */ | |
if (typeof options === 'function') { | |
return [{}, options]; | |
} | |
/* istanbul ignore else */ | |
if (_isUndefined(options)) { | |
return [{}, callback]; | |
} | |
// copy options so we do not leak the changes we make internally | |
const actualOptions = {}; | |
for (const key of Object.getOwnPropertyNames(options)) { | |
actualOptions[key] = options[key]; | |
} | |
return [actualOptions, callback]; | |
} | |
/** | |
* Generates a new temporary name. | |
* | |
* @param {Object} opts | |
* @returns {string} the new random name according to opts | |
* @private | |
*/ | |
function _generateTmpName(opts) { | |
const tmpDir = opts.tmpdir; | |
/* istanbul ignore else */ | |
if (!_isUndefined(opts.name)) | |
return path.join(tmpDir, opts.dir, opts.name); | |
/* istanbul ignore else */ | |
if (!_isUndefined(opts.template)) | |
return path.join(tmpDir, opts.dir, opts.template).replace(TEMPLATE_PATTERN, _randomChars(6)); | |
// prefix and postfix | |
const name = [ | |
opts.prefix ? opts.prefix : 'tmp', | |
'-', | |
process.pid, | |
'-', | |
_randomChars(12), | |
opts.postfix ? '-' + opts.postfix : '' | |
].join(''); | |
return path.join(tmpDir, opts.dir, name); | |
} | |
/** | |
* Asserts whether the specified options are valid, also sanitizes options and provides sane defaults for missing | |
* options. | |
* | |
* @param {Options} options | |
* @private | |
*/ | |
function _assertAndSanitizeOptions(options) { | |
options.tmpdir = _getTmpDir(options); | |
const tmpDir = options.tmpdir; | |
/* istanbul ignore else */ | |
if (!_isUndefined(options.name)) | |
_assertIsRelative(options.name, 'name', tmpDir); | |
/* istanbul ignore else */ | |
if (!_isUndefined(options.dir)) | |
_assertIsRelative(options.dir, 'dir', tmpDir); | |
/* istanbul ignore else */ | |
if (!_isUndefined(options.template)) { | |
_assertIsRelative(options.template, 'template', tmpDir); | |
if (!options.template.match(TEMPLATE_PATTERN)) | |
throw new Error(`Invalid template, found "${options.template}".`); | |
} | |
/* istanbul ignore else */ | |
if (!_isUndefined(options.tries) && isNaN(options.tries) || options.tries < 0) | |
throw new Error(`Invalid tries, found "${options.tries}".`); | |
// if a name was specified we will try once | |
options.tries = _isUndefined(options.name) ? options.tries || DEFAULT_TRIES : 1; | |
options.keep = !!options.keep; | |
options.detachDescriptor = !!options.detachDescriptor; | |
options.discardDescriptor = !!options.discardDescriptor; | |
options.unsafeCleanup = !!options.unsafeCleanup; | |
// sanitize dir, also keep (multiple) blanks if the user, purportedly sane, requests us to | |
options.dir = _isUndefined(options.dir) ? '' : path.relative(tmpDir, _resolvePath(options.dir, tmpDir)); | |
options.template = _isUndefined(options.template) ? undefined : path.relative(tmpDir, _resolvePath(options.template, tmpDir)); | |
// sanitize further if template is relative to options.dir | |
options.template = _isBlank(options.template) ? undefined : path.relative(options.dir, options.template); | |
// for completeness' sake only, also keep (multiple) blanks if the user, purportedly sane, requests us to | |
options.name = _isUndefined(options.name) ? undefined : _sanitizeName(options.name); | |
options.prefix = _isUndefined(options.prefix) ? '' : options.prefix; | |
options.postfix = _isUndefined(options.postfix) ? '' : options.postfix; | |
} | |
/** | |
* Resolve the specified path name in respect to tmpDir. | |
* | |
* The specified name might include relative path components, e.g. ../ | |
* so we need to resolve in order to be sure that is is located inside tmpDir | |
* | |
* @param name | |
* @param tmpDir | |
* @returns {string} | |
* @private | |
*/ | |
function _resolvePath(name, tmpDir) { | |
const sanitizedName = _sanitizeName(name); | |
if (sanitizedName.startsWith(tmpDir)) { | |
return path.resolve(sanitizedName); | |
} else { | |
return path.resolve(path.join(tmpDir, sanitizedName)); | |
} | |
} | |
/** | |
* Sanitize the specified path name by removing all quote characters. | |
* | |
* @param name | |
* @returns {string} | |
* @private | |
*/ | |
function _sanitizeName(name) { | |
if (_isBlank(name)) { | |
return name; | |
} | |
return name.replace(/["']/g, ''); | |
} | |
/** | |
* Asserts whether specified name is relative to the specified tmpDir. | |
* | |
* @param {string} name | |
* @param {string} option | |
* @param {string} tmpDir | |
* @throws {Error} | |
* @private | |
*/ | |
function _assertIsRelative(name, option, tmpDir) { | |
if (option === 'name') { | |
// assert that name is not absolute and does not contain a path | |
if (path.isAbsolute(name)) | |
throw new Error(`${option} option must not contain an absolute path, found "${name}".`); | |
// must not fail on valid .<name> or ..<name> or similar such constructs | |
let basename = path.basename(name); | |
if (basename === '..' || basename === '.' || basename !== name) | |
throw new Error(`${option} option must not contain a path, found "${name}".`); | |
} | |
else { // if (option === 'dir' || option === 'template') { | |
// assert that dir or template are relative to tmpDir | |
if (path.isAbsolute(name) && !name.startsWith(tmpDir)) { | |
throw new Error(`${option} option must be relative to "${tmpDir}", found "${name}".`); | |
} | |
let resolvedPath = _resolvePath(name, tmpDir); | |
if (!resolvedPath.startsWith(tmpDir)) | |
throw new Error(`${option} option must be relative to "${tmpDir}", found "${resolvedPath}".`); | |
} | |
} | |
/** | |
* Helper for testing against EBADF to compensate changes made to Node 7.x under Windows. | |
* | |
* @private | |
*/ | |
function _isEBADF(error) { | |
return _isExpectedError(error, -EBADF, 'EBADF'); | |
} | |
/** | |
* Helper for testing against ENOENT to compensate changes made to Node 7.x under Windows. | |
* | |
* @private | |
*/ | |
function _isENOENT(error) { | |
return _isExpectedError(error, -ENOENT, 'ENOENT'); | |
} | |
/** | |
* Helper to determine whether the expected error code matches the actual code and errno, | |
* which will differ between the supported node versions. | |
* | |
* - Node >= 7.0: | |
* error.code {string} | |
* error.errno {number} any numerical value will be negated | |
* | |
* CAVEAT | |
* | |
* On windows, the errno for EBADF is -4083 but os.constants.errno.EBADF is different and we must assume that ENOENT | |
* is no different here. | |
* | |
* @param {SystemError} error | |
* @param {number} errno | |
* @param {string} code | |
* @private | |
*/ | |
function _isExpectedError(error, errno, code) { | |
return IS_WIN32 ? error.code === code : error.code === code && error.errno === errno; | |
} | |
/** | |
* Sets the graceful cleanup. | |
* | |
* If graceful cleanup is set, tmp will remove all controlled temporary objects on process exit, otherwise the | |
* temporary objects will remain in place, waiting to be cleaned up on system restart or otherwise scheduled temporary | |
* object removals. | |
*/ | |
function setGracefulCleanup() { | |
_gracefulCleanup = true; | |
} | |
/** | |
* Returns the currently configured tmp dir from os.tmpdir(). | |
* | |
* @private | |
* @param {?Options} options | |
* @returns {string} the currently configured tmp dir | |
*/ | |
function _getTmpDir(options) { | |
return path.resolve(_sanitizeName(options && options.tmpdir || os.tmpdir())); | |
} | |
// Install process exit listener | |
process.addListener(EXIT, _garbageCollector); | |
/** | |
* Configuration options. | |
* | |
* @typedef {Object} Options | |
* @property {?boolean} keep the temporary object (file or dir) will not be garbage collected | |
* @property {?number} tries the number of tries before give up the name generation | |
* @property (?int) mode the access mode, defaults are 0o700 for directories and 0o600 for files | |
* @property {?string} template the "mkstemp" like filename template | |
* @property {?string} name fixed name relative to tmpdir or the specified dir option | |
* @property {?string} dir tmp directory relative to the root tmp directory in use | |
* @property {?string} prefix prefix for the generated name | |
* @property {?string} postfix postfix for the generated name | |
* @property {?string} tmpdir the root tmp directory which overrides the os tmpdir | |
* @property {?boolean} unsafeCleanup recursively removes the created temporary directory, even when it's not empty | |
* @property {?boolean} detachDescriptor detaches the file descriptor, caller is responsible for closing the file, tmp will no longer try closing the file during garbage collection | |
* @property {?boolean} discardDescriptor discards the file descriptor (closes file, fd is -1), tmp will no longer try closing the file during garbage collection | |
*/ | |
/** | |
* @typedef {Object} FileSyncObject | |
* @property {string} name the name of the file | |
* @property {string} fd the file descriptor or -1 if the fd has been discarded | |
* @property {fileCallback} removeCallback the callback function to remove the file | |
*/ | |
/** | |
* @typedef {Object} DirSyncObject | |
* @property {string} name the name of the directory | |
* @property {fileCallback} removeCallback the callback function to remove the directory | |
*/ | |
/** | |
* @callback tmpNameCallback | |
* @param {?Error} err the error object if anything goes wrong | |
* @param {string} name the temporary file name | |
*/ | |
/** | |
* @callback fileCallback | |
* @param {?Error} err the error object if anything goes wrong | |
* @param {string} name the temporary file name | |
* @param {number} fd the file descriptor or -1 if the fd had been discarded | |
* @param {cleanupCallback} fn the cleanup callback function | |
*/ | |
/** | |
* @callback fileCallbackSync | |
* @param {?Error} err the error object if anything goes wrong | |
* @param {string} name the temporary file name | |
* @param {number} fd the file descriptor or -1 if the fd had been discarded | |
* @param {cleanupCallbackSync} fn the cleanup callback function | |
*/ | |
/** | |
* @callback dirCallback | |
* @param {?Error} err the error object if anything goes wrong | |
* @param {string} name the temporary file name | |
* @param {cleanupCallback} fn the cleanup callback function | |
*/ | |
/** | |
* @callback dirCallbackSync | |
* @param {?Error} err the error object if anything goes wrong | |
* @param {string} name the temporary file name | |
* @param {cleanupCallbackSync} fn the cleanup callback function | |
*/ | |
/** | |
* Removes the temporary created file or directory. | |
* | |
* @callback cleanupCallback | |
* @param {simpleCallback} [next] function to call whenever the tmp object needs to be removed | |
*/ | |
/** | |
* Removes the temporary created file or directory. | |
* | |
* @callback cleanupCallbackSync | |
*/ | |
/** | |
* Callback function for function composition. | |
* @see {@link https://github.com/raszi/node-tmp/issues/57|raszi/node-tmp#57} | |
* | |
* @callback simpleCallback | |
*/ | |
// exporting all the needed methods | |
// evaluate _getTmpDir() lazily, mainly for simplifying testing but it also will | |
// allow users to reconfigure the temporary directory | |
Object.defineProperty(module.exports, "tmpdir", ({ | |
enumerable: true, | |
configurable: false, | |
get: function () { | |
return _getTmpDir(); | |
} | |
})); | |
module.exports.dir = dir; | |
module.exports.dirSync = dirSync; | |
module.exports.file = file; | |
module.exports.fileSync = fileSync; | |
module.exports.tmpName = tmpName; | |
module.exports.tmpNameSync = tmpNameSync; | |
module.exports.setGracefulCleanup = setGracefulCleanup; | |
/***/ }), | |
/***/ 4294: | |
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { | |
module.exports = __nccwpck_require__(4219); | |
/***/ }), | |
/***/ 4219: | |
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { | |
"use strict"; | |
var net = __nccwpck_require__(1631); | |
var tls = __nccwpck_require__(8818); | |
var http = __nccwpck_require__(8605); | |
var https = __nccwpck_require__(7211); | |
var events = __nccwpck_require__(8614); | |
var assert = __nccwpck_require__(2357); | |
var util = __nccwpck_require__(1669); | |
exports.httpOverHttp = httpOverHttp; | |
exports.httpsOverHttp = httpsOverHttp; | |
exports.httpOverHttps = httpOverHttps; | |
exports.httpsOverHttps = httpsOverHttps; | |
function httpOverHttp(options) { | |
var agent = new TunnelingAgent(options); | |
agent.request = http.request; | |
return agent; | |
} | |
function httpsOverHttp(options) { | |
var agent = new TunnelingAgent(options); | |
agent.request = http.request; | |
agent.createSocket = createSecureSocket; | |
agent.defaultPort = 443; | |
return agent; | |
} | |
function httpOverHttps(options) { | |
var agent = new TunnelingAgent(options); | |
agent.request = https.request; | |
return agent; | |
} | |
function httpsOverHttps(options) { | |
var agent = new TunnelingAgent(options); | |
agent.request = https.request; | |
agent.createSocket = createSecureSocket; | |
agent.defaultPort = 443; | |
return agent; | |
} | |
function TunnelingAgent(options) { | |
var self = this; | |
self.options = options || {}; | |
self.proxyOptions = self.options.proxy || {}; | |
self.maxSockets = self.options.maxSockets || http.Agent.defaultMaxSockets; | |
self.requests = []; | |
self.sockets = []; | |
self.on('free', function onFree(socket, host, port, localAddress) { | |
var options = toOptions(host, port, localAddress); | |
for (var i = 0, len = self.requests.length; i < len; ++i) { | |
var pending = self.requests[i]; | |
if (pending.host === options.host && pending.port === options.port) { | |
// Detect the request to connect same origin server, | |
// reuse the connection. | |
self.requests.splice(i, 1); | |
pending.request.onSocket(socket); | |
return; | |
} | |
} | |
socket.destroy(); | |
self.removeSocket(socket); | |
}); | |
} | |
util.inherits(TunnelingAgent, events.EventEmitter); | |
TunnelingAgent.prototype.addRequest = function addRequest(req, host, port, localAddress) { | |
var self = this; | |
var options = mergeOptions({request: req}, self.options, toOptions(host, port, localAddress)); | |
if (self.sockets.length >= this.maxSockets) { | |
// We are over limit so we'll add it to the queue. | |
self.requests.push(options); | |
return; | |
} | |
// If we are under maxSockets create a new one. | |
self.createSocket(options, function(socket) { | |
socket.on('free', onFree); | |
socket.on('close', onCloseOrRemove); | |
socket.on('agentRemove', onCloseOrRemove); | |
req.onSocket(socket); | |
function onFree() { | |
self.emit('free', socket, options); | |
} | |
function onCloseOrRemove(err) { | |
self.removeSocket(socket); | |
socket.removeListener('free', onFree); | |
socket.removeListener('close', onCloseOrRemove); | |
socket.removeListener('agentRemove', onCloseOrRemove); | |
} | |
}); | |
}; | |
TunnelingAgent.prototype.createSocket = function createSocket(options, cb) { | |
var self = this; | |
var placeholder = {}; | |
self.sockets.push(placeholder); | |
var connectOptions = mergeOptions({}, self.proxyOptions, { | |
method: 'CONNECT', | |
path: options.host + ':' + options.port, | |
agent: false, | |
headers: { | |
host: options.host + ':' + options.port | |
} | |
}); | |
if (options.localAddress) { | |
connectOptions.localAddress = options.localAddress; | |
} | |
if (connectOptions.proxyAuth) { | |
connectOptions.headers = connectOptions.headers || {}; | |
connectOptions.headers['Proxy-Authorization'] = 'Basic ' + | |
new Buffer(connectOptions.proxyAuth).toString('base64'); | |
} | |
debug('making CONNECT request'); | |
var connectReq = self.request(connectOptions); | |
connectReq.useChunkedEncodingByDefault = false; // for v0.6 | |
connectReq.once('response', onResponse); // for v0.6 | |
connectReq.once('upgrade', onUpgrade); // for v0.6 | |
connectReq.once('connect', onConnect); // for v0.7 or later | |
connectReq.once('error', onError); | |
connectReq.end(); | |
function onResponse(res) { | |
// Very hacky. This is necessary to avoid http-parser leaks. | |
res.upgrade = true; | |
} | |
function onUpgrade(res, socket, head) { | |
// Hacky. | |
process.nextTick(function() { | |
onConnect(res, socket, head); | |
}); | |
} | |
function onConnect(res, socket, head) { | |
connectReq.removeAllListeners(); | |
socket.removeAllListeners(); | |
if (res.statusCode !== 200) { | |
debug('tunneling socket could not be established, statusCode=%d', | |
res.statusCode); | |
socket.destroy(); | |
var error = new Error('tunneling socket could not be established, ' + | |
'statusCode=' + res.statusCode); | |
error.code = 'ECONNRESET'; | |
options.request.emit('error', error); | |
self.removeSocket(placeholder); | |
return; | |
} | |
if (head.length > 0) { | |
debug('got illegal response body from proxy'); | |
socket.destroy(); | |
var error = new Error('got illegal response body from proxy'); | |
error.code = 'ECONNRESET'; | |
options.request.emit('error', error); | |
self.removeSocket(placeholder); | |
return; | |
} | |
debug('tunneling connection has established'); | |
self.sockets[self.sockets.indexOf(placeholder)] = socket; | |
return cb(socket); | |
} | |
function onError(cause) { | |
connectReq.removeAllListeners(); | |
debug('tunneling socket could not be established, cause=%s\n', | |
cause.message, cause.stack); | |
var error = new Error('tunneling socket could not be established, ' + | |
'cause=' + cause.message); | |
error.code = 'ECONNRESET'; | |
options.request.emit('error', error); | |
self.removeSocket(placeholder); | |
} | |
}; | |
TunnelingAgent.prototype.removeSocket = function removeSocket(socket) { | |
var pos = this.sockets.indexOf(socket) | |
if (pos === -1) { | |
return; | |
} | |
this.sockets.splice(pos, 1); | |
var pending = this.requests.shift(); | |
if (pending) { | |
// If we have pending requests and a socket gets closed a new one | |
// needs to be created to take over in the pool for the one that closed. | |
this.createSocket(pending, function(socket) { | |
pending.request.onSocket(socket); | |
}); | |
} | |
}; | |
function createSecureSocket(options, cb) { | |
var self = this; | |
TunnelingAgent.prototype.createSocket.call(self, options, function(socket) { | |
var hostHeader = options.request.getHeader('host'); | |
var tlsOptions = mergeOptions({}, self.options, { | |
socket: socket, | |
servername: hostHeader ? hostHeader.replace(/:.*$/, '') : options.host | |
}); | |
// 0 is dummy port for v0.6 | |
var secureSocket = tls.connect(0, tlsOptions); | |
self.sockets[self.sockets.indexOf(socket)] = secureSocket; | |
cb(secureSocket); | |
}); | |
} | |
function toOptions(host, port, localAddress) { | |
if (typeof host === 'string') { // since v0.10 | |
return { | |
host: host, | |
port: port, | |
localAddress: localAddress | |
}; | |
} | |
return host; // for v0.11 or later | |
} | |
function mergeOptions(target) { | |
for (var i = 1, len = arguments.length; i < len; ++i) { | |
var overrides = arguments[i]; | |
if (typeof overrides === 'object') { | |
var keys = Object.keys(overrides); | |
for (var j = 0, keyLen = keys.length; j < keyLen; ++j) { | |
var k = keys[j]; | |
if (overrides[k] !== undefined) { | |
target[k] = overrides[k]; | |
} | |
} | |
} | |
} | |
return target; | |
} | |
var debug; | |
if (process.env.NODE_DEBUG && /\btunnel\b/.test(process.env.NODE_DEBUG)) { | |
debug = function() { | |
var args = Array.prototype.slice.call(arguments); | |
if (typeof args[0] === 'string') { | |
args[0] = 'TUNNEL: ' + args[0]; | |
} else { | |
args.unshift('TUNNEL:'); | |
} | |
console.error.apply(console, args); | |
} | |
} else { | |
debug = function() {}; | |
} | |
exports.debug = debug; // for test | |
/***/ }), | |
/***/ 5030: | |
/***/ ((__unused_webpack_module, exports) => { | |
"use strict"; | |
Object.defineProperty(exports, "__esModule", ({ value: true })); | |
function getUserAgent() { | |
if (typeof navigator === "object" && "userAgent" in navigator) { | |
return navigator.userAgent; | |
} | |
if (typeof process === "object" && "version" in process) { | |
return `Node.js/${process.version.substr(1)} (${process.platform}; ${process.arch})`; | |
} | |
return "<environment undetectable>"; | |
} | |
exports.getUserAgent = getUserAgent; | |
//# sourceMappingURL=index.js.map | |
/***/ }), | |
/***/ 2940: | |
/***/ ((module) => { | |
// Returns a wrapper function that returns a wrapped callback | |
// The wrapper function should do some stuff, and return a | |
// presumably different callback function. | |
// This makes sure that own properties are retained, so that | |
// decorations and such are not lost along the way. | |
module.exports = wrappy | |
function wrappy (fn, cb) { | |
if (fn && cb) return wrappy(fn)(cb) | |
if (typeof fn !== 'function') | |
throw new TypeError('need wrapper function') | |
Object.keys(fn).forEach(function (k) { | |
wrapper[k] = fn[k] | |
}) | |
return wrapper | |
function wrapper() { | |
var args = new Array(arguments.length) | |
for (var i = 0; i < args.length; i++) { | |
args[i] = arguments[i] | |
} | |
var ret = fn.apply(this, args) | |
var cb = args[args.length-1] | |
if (typeof ret === 'function' && ret !== cb) { | |
Object.keys(cb).forEach(function (k) { | |
ret[k] = cb[k] | |
}) | |
} | |
return ret | |
} | |
} | |
/***/ }), | |
/***/ 4091: | |
/***/ ((module) => { | |
"use strict"; | |
module.exports = function (Yallist) { | |
Yallist.prototype[Symbol.iterator] = function* () { | |
for (let walker = this.head; walker; walker = walker.next) { | |
yield walker.value | |
} | |
} | |
} | |
/***/ }), | |
/***/ 665: | |
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { | |
"use strict"; | |
module.exports = Yallist | |
Yallist.Node = Node | |
Yallist.create = Yallist | |
function Yallist (list) { | |
var self = this | |
if (!(self instanceof Yallist)) { | |
self = new Yallist() | |
} | |
self.tail = null | |
self.head = null | |
self.length = 0 | |
if (list && typeof list.forEach === 'function') { | |
list.forEach(function (item) { | |
self.push(item) | |
}) | |
} else if (arguments.length > 0) { | |
for (var i = 0, l = arguments.length; i < l; i++) { | |
self.push(arguments[i]) | |
} | |
} | |
return self | |
} | |
Yallist.prototype.removeNode = function (node) { | |
if (node.list !== this) { | |
throw new Error('removing node which does not belong to this list') | |
} | |
var next = node.next | |
var prev = node.prev | |
if (next) { | |
next.prev = prev | |
} | |
if (prev) { | |
prev.next = next | |
} | |
if (node === this.head) { | |
this.head = next | |
} | |
if (node === this.tail) { | |
this.tail = prev | |
} | |
node.list.length-- | |
node.next = null | |
node.prev = null | |
node.list = null | |
return next | |
} | |
Yallist.prototype.unshiftNode = function (node) { | |
if (node === this.head) { | |
return | |
} | |
if (node.list) { | |
node.list.removeNode(node) | |
} | |
var head = this.head | |
node.list = this | |
node.next = head | |
if (head) { | |
head.prev = node | |
} | |
this.head = node | |
if (!this.tail) { | |
this.tail = node | |
} | |
this.length++ | |
} | |
Yallist.prototype.pushNode = function (node) { | |
if (node === this.tail) { | |
return | |
} | |
if (node.list) { | |
node.list.removeNode(node) | |
} | |
var tail = this.tail | |
node.list = this | |
node.prev = tail | |
if (tail) { | |
tail.next = node | |
} | |
this.tail = node | |
if (!this.head) { | |
this.head = node | |
} | |
this.length++ | |
} | |
Yallist.prototype.push = function () { | |
for (var i = 0, l = arguments.length; i < l; i++) { | |
push(this, arguments[i]) | |
} | |
return this.length | |
} | |
Yallist.prototype.unshift = function () { | |
for (var i = 0, l = arguments.length; i < l; i++) { | |
unshift(this, arguments[i]) | |
} | |
return this.length | |
} | |
Yallist.prototype.pop = function () { | |
if (!this.tail) { | |
return undefined | |
} | |
var res = this.tail.value | |
this.tail = this.tail.prev | |
if (this.tail) { | |
this.tail.next = null | |
} else { | |
this.head = null | |
} | |
this.length-- | |
return res | |
} | |
Yallist.prototype.shift = function () { | |
if (!this.head) { | |
return undefined | |
} | |
var res = this.head.value | |
this.head = this.head.next | |
if (this.head) { | |
this.head.prev = null | |
} else { | |
this.tail = null | |
} | |
this.length-- | |
return res | |
} | |
Yallist.prototype.forEach = function (fn, thisp) { | |
thisp = thisp || this | |
for (var walker = this.head, i = 0; walker !== null; i++) { | |
fn.call(thisp, walker.value, i, this) | |
walker = walker.next | |
} | |
} | |
Yallist.prototype.forEachReverse = function (fn, thisp) { | |
thisp = thisp || this | |
for (var walker = this.tail, i = this.length - 1; walker !== null; i--) { | |
fn.call(thisp, walker.value, i, this) | |
walker = walker.prev | |
} | |
} | |
Yallist.prototype.get = function (n) { | |
for (var i = 0, walker = this.head; walker !== null && i < n; i++) { | |
// abort out of the list early if we hit a cycle | |
walker = walker.next | |
} | |
if (i === n && walker !== null) { | |
return walker.value | |
} | |
} | |
Yallist.prototype.getReverse = function (n) { | |
for (var i = 0, walker = this.tail; walker !== null && i < n; i++) { | |
// abort out of the list early if we hit a cycle | |
walker = walker.prev | |
} | |
if (i === n && walker !== null) { | |
return walker.value | |
} | |
} | |
Yallist.prototype.map = function (fn, thisp) { | |
thisp = thisp || this | |
var res = new Yallist() | |
for (var walker = this.head; walker !== null;) { | |
res.push(fn.call(thisp, walker.value, this)) | |
walker = walker.next | |
} | |
return res | |
} | |
Yallist.prototype.mapReverse = function (fn, thisp) { | |
thisp = thisp || this | |
var res = new Yallist() | |
for (var walker = this.tail; walker !== null;) { | |
res.push(fn.call(thisp, walker.value, this)) | |
walker = walker.prev | |
} | |
return res | |
} | |
Yallist.prototype.reduce = function (fn, initial) { | |
var acc | |
var walker = this.head | |
if (arguments.length > 1) { | |
acc = initial | |
} else if (this.head) { | |
walker = this.head.next | |
acc = this.head.value | |
} else { | |
throw new TypeError('Reduce of empty list with no initial value') | |
} | |
for (var i = 0; walker !== null; i++) { | |
acc = fn(acc, walker.value, i) | |
walker = walker.next | |
} | |
return acc | |
} | |
Yallist.prototype.reduceReverse = function (fn, initial) { | |
var acc | |
var walker = this.tail | |
if (arguments.length > 1) { | |
acc = initial | |
} else if (this.tail) { | |
walker = this.tail.prev | |
acc = this.tail.value | |
} else { | |
throw new TypeError('Reduce of empty list with no initial value') | |
} | |
for (var i = this.length - 1; walker !== null; i--) { | |
acc = fn(acc, walker.value, i) | |
walker = walker.prev | |
} | |
return acc | |
} | |
Yallist.prototype.toArray = function () { | |
var arr = new Array(this.length) | |
for (var i = 0, walker = this.head; walker !== null; i++) { | |
arr[i] = walker.value | |
walker = walker.next | |
} | |
return arr | |
} | |
Yallist.prototype.toArrayReverse = function () { | |
var arr = new Array(this.length) | |
for (var i = 0, walker = this.tail; walker !== null; i++) { | |
arr[i] = walker.value | |
walker = walker.prev | |
} | |
return arr | |
} | |
Yallist.prototype.slice = function (from, to) { | |
to = to || this.length | |
if (to < 0) { | |
to += this.length | |
} | |
from = from || 0 | |
if (from < 0) { | |
from += this.length | |
} | |
var ret = new Yallist() | |
if (to < from || to < 0) { | |
return ret | |
} | |
if (from < 0) { | |
from = 0 | |
} | |
if (to > this.length) { | |
to = this.length | |
} | |
for (var i = 0, walker = this.head; walker !== null && i < from; i++) { | |
walker = walker.next | |
} | |
for (; walker !== null && i < to; i++, walker = walker.next) { | |
ret.push(walker.value) | |
} | |
return ret | |
} | |
Yallist.prototype.sliceReverse = function (from, to) { | |
to = to || this.length | |
if (to < 0) { | |
to += this.length | |
} | |
from = from || 0 | |
if (from < 0) { | |
from += this.length | |
} | |
var ret = new Yallist() | |
if (to < from || to < 0) { | |
return ret | |
} | |
if (from < 0) { | |
from = 0 | |
} | |
if (to > this.length) { | |
to = this.length | |
} | |
for (var i = this.length, walker = this.tail; walker !== null && i > to; i--) { | |
walker = walker.prev | |
} | |
for (; walker !== null && i > from; i--, walker = walker.prev) { | |
ret.push(walker.value) | |
} | |
return ret | |
} | |
Yallist.prototype.splice = function (start, deleteCount, ...nodes) { | |
if (start > this.length) { | |
start = this.length - 1 | |
} | |
if (start < 0) { | |
start = this.length + start; | |
} | |
for (var i = 0, walker = this.head; walker !== null && i < start; i++) { | |
walker = walker.next | |
} | |
var ret = [] | |
for (var i = 0; walker && i < deleteCount; i++) { | |
ret.push(walker.value) | |
walker = this.removeNode(walker) | |
} | |
if (walker === null) { | |
walker = this.tail | |
} | |
if (walker !== this.head && walker !== this.tail) { | |
walker = walker.prev | |
} | |
for (var i = 0; i < nodes.length; i++) { | |
walker = insert(this, walker, nodes[i]) | |
} | |
return ret; | |
} | |
Yallist.prototype.reverse = function () { | |
var head = this.head | |
var tail = this.tail | |
for (var walker = head; walker !== null; walker = walker.prev) { | |
var p = walker.prev | |
walker.prev = walker.next | |
walker.next = p | |
} | |
this.head = tail | |
this.tail = head | |
return this | |
} | |
function insert (self, node, value) { | |
var inserted = node === self.head ? | |
new Node(value, null, node, self) : | |
new Node(value, node, node.next, self) | |
if (inserted.next === null) { | |
self.tail = inserted | |
} | |
if (inserted.prev === null) { | |
self.head = inserted | |
} | |
self.length++ | |
return inserted | |
} | |
function push (self, item) { | |
self.tail = new Node(item, self.tail, null, self) | |
if (!self.head) { | |
self.head = self.tail | |
} | |
self.length++ | |
} | |
function unshift (self, item) { | |
self.head = new Node(item, null, self.head, self) | |
if (!self.tail) { | |
self.tail = self.head | |
} | |
self.length++ | |
} | |
function Node (value, prev, next, list) { | |
if (!(this instanceof Node)) { | |
return new Node(value, prev, next, list) | |
} | |
this.list = list | |
this.value = value | |
if (prev) { | |
prev.next = this | |
this.prev = prev | |
} else { | |
this.prev = null | |
} | |
if (next) { | |
next.prev = this | |
this.next = next | |
} else { | |
this.next = null | |
} | |
} | |
try { | |
// add if support for Symbol.iterator is present | |
__nccwpck_require__(4091)(Yallist) | |
} catch (er) {} | |
/***/ }), | |
/***/ 2877: | |
/***/ ((module) => { | |
module.exports = eval("require")("encoding"); | |
/***/ }), | |
/***/ 2357: | |
/***/ ((module) => { | |
"use strict"; | |
module.exports = require("assert");; | |
/***/ }), | |
/***/ 3129: | |
/***/ ((module) => { | |
"use strict"; | |
module.exports = require("child_process");; | |
/***/ }), | |
/***/ 3373: | |
/***/ ((module) => { | |
"use strict"; | |
module.exports = require("crypto");; | |
/***/ }), | |
/***/ 8614: | |
/***/ ((module) => { | |
"use strict"; | |
module.exports = require("events");; | |
/***/ }), | |
/***/ 5747: | |
/***/ ((module) => { | |
"use strict"; | |
module.exports = require("fs");; | |
/***/ }), | |
/***/ 8605: | |
/***/ ((module) => { | |
"use strict"; | |
module.exports = require("http");; | |
/***/ }), | |
/***/ 7211: | |
/***/ ((module) => { | |
"use strict"; | |
module.exports = require("https");; | |
/***/ }), | |
/***/ 1631: | |
/***/ ((module) => { | |
"use strict"; | |
module.exports = require("net");; | |
/***/ }), | |
/***/ 2087: | |
/***/ ((module) => { | |
"use strict"; | |
module.exports = require("os");; | |
/***/ }), | |
/***/ 5622: | |
/***/ ((module) => { | |
"use strict"; | |
module.exports = require("path");; | |
/***/ }), | |
/***/ 2413: | |
/***/ ((module) => { | |
"use strict"; | |
module.exports = require("stream");; | |
/***/ }), | |
/***/ 4304: | |
/***/ ((module) => { | |
"use strict"; | |
module.exports = require("string_decoder");; | |
/***/ }), | |
/***/ 8213: | |
/***/ ((module) => { | |
"use strict"; | |
module.exports = require("timers");; | |
/***/ }), | |
/***/ 8818: | |
/***/ ((module) => { | |
"use strict"; | |
module.exports = require("tls");; | |
/***/ }), | |
/***/ 8835: | |
/***/ ((module) => { | |
"use strict"; | |
module.exports = require("url");; | |
/***/ }), | |
/***/ 1669: | |
/***/ ((module) => { | |
"use strict"; | |
module.exports = require("util");; | |
/***/ }), | |
/***/ 8761: | |
/***/ ((module) => { | |
"use strict"; | |
module.exports = require("zlib");; | |
/***/ }) | |
/******/ }); | |
/************************************************************************/ | |
/******/ // The module cache | |
/******/ var __webpack_module_cache__ = {}; | |
/******/ | |
/******/ // The require function | |
/******/ function __nccwpck_require__(moduleId) { | |
/******/ // Check if module is in cache | |
/******/ var cachedModule = __webpack_module_cache__[moduleId]; | |
/******/ if (cachedModule !== undefined) { | |
/******/ return cachedModule.exports; | |
/******/ } | |
/******/ // Create a new module (and put it into the cache) | |
/******/ var module = __webpack_module_cache__[moduleId] = { | |
/******/ // no module.id needed | |
/******/ // no module.loaded needed | |
/******/ exports: {} | |
/******/ }; | |
/******/ | |
/******/ // Execute the module function | |
/******/ var threw = true; | |
/******/ try { | |
/******/ __webpack_modules__[moduleId].call(module.exports, module, module.exports, __nccwpck_require__); | |
/******/ threw = false; | |
/******/ } finally { | |
/******/ if(threw) delete __webpack_module_cache__[moduleId]; | |
/******/ } | |
/******/ | |
/******/ // Return the exports of the module | |
/******/ return module.exports; | |
/******/ } | |
/******/ | |
/************************************************************************/ | |
/******/ /* webpack/runtime/compat */ | |
/******/ | |
/******/ if (typeof __nccwpck_require__ !== 'undefined') __nccwpck_require__.ab = __dirname + "/";/************************************************************************/ | |
/******/ | |
/******/ // startup | |
/******/ // Load entry module and return exports | |
/******/ // This entry module is referenced by other modules so it can't be inlined | |
/******/ var __webpack_exports__ = __nccwpck_require__(3109); | |
/******/ module.exports = __webpack_exports__; | |
/******/ | |
/******/ })() | |
; |