1
Fork 0
mirror of https://gitea.com/actions/checkout.git synced 2024-11-23 00:55:57 +01:00
checkout/dist/index.js

18298 lines
880 KiB
JavaScript
Raw Normal View History

/******/ (() => { // webpackBootstrap
/******/ var __webpack_modules__ = ({
/***/ 7219:
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.fileExistsSync = exports.existsSync = exports.directoryExistsSync = void 0;
const fs = __importStar(__nccwpck_require__(7147));
function directoryExistsSync(path, required) {
var _a, _b, _c;
if (!path) {
throw new Error("Arg 'path' must not be empty");
}
let stats;
try {
stats = fs.statSync(path);
}
catch (error) {
if (((_a = error) === null || _a === void 0 ? void 0 : _a.code) === 'ENOENT') {
if (!required) {
return false;
}
throw new Error(`Directory '${path}' does not exist`);
}
throw new Error(`Encountered an error when checking whether path '${path}' exists: ${(_c = (_b = error) === null || _b === void 0 ? void 0 : _b.message) !== null && _c !== void 0 ? _c : error}`);
}
if (stats.isDirectory()) {
return true;
}
else if (!required) {
return false;
}
throw new Error(`Directory '${path}' does not exist`);
}
exports.directoryExistsSync = directoryExistsSync;
function existsSync(path) {
var _a, _b, _c;
if (!path) {
throw new Error("Arg 'path' must not be empty");
}
try {
fs.statSync(path);
}
catch (error) {
if (((_a = error) === null || _a === void 0 ? void 0 : _a.code) === 'ENOENT') {
return false;
}
throw new Error(`Encountered an error when checking whether path '${path}' exists: ${(_c = (_b = error) === null || _b === void 0 ? void 0 : _b.message) !== null && _c !== void 0 ? _c : error}`);
}
return true;
}
exports.existsSync = existsSync;
function fileExistsSync(path) {
var _a, _b, _c;
if (!path) {
throw new Error("Arg 'path' must not be empty");
}
let stats;
try {
stats = fs.statSync(path);
}
catch (error) {
if (((_a = error) === null || _a === void 0 ? void 0 : _a.code) === 'ENOENT') {
return false;
}
throw new Error(`Encountered an error when checking whether path '${path}' exists: ${(_c = (_b = error) === null || _b === void 0 ? void 0 : _b.message) !== null && _c !== void 0 ? _c : error}`);
}
if (!stats.isDirectory()) {
return true;
}
return false;
}
exports.fileExistsSync = fileExistsSync;
/***/ }),
/***/ 2565:
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
"use strict";
2022-12-12 19:44:24 +01:00
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
2022-12-12 19:44:24 +01:00
__setModuleDefault(result, mod);
return result;
};
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.createAuthHelper = void 0;
const assert = __importStar(__nccwpck_require__(9491));
const core = __importStar(__nccwpck_require__(2186));
const exec = __importStar(__nccwpck_require__(1514));
const fs = __importStar(__nccwpck_require__(7147));
const io = __importStar(__nccwpck_require__(7436));
const os = __importStar(__nccwpck_require__(2037));
const path = __importStar(__nccwpck_require__(1017));
const regexpHelper = __importStar(__nccwpck_require__(3120));
const stateHelper = __importStar(__nccwpck_require__(8647));
const urlHelper = __importStar(__nccwpck_require__(9437));
const v4_1 = __importDefault(__nccwpck_require__(824));
const IS_WINDOWS = process.platform === 'win32';
const SSH_COMMAND_KEY = 'core.sshCommand';
function createAuthHelper(git, settings) {
return new GitAuthHelper(git, settings);
}
exports.createAuthHelper = createAuthHelper;
class GitAuthHelper {
constructor(gitCommandManager, gitSourceSettings) {
this.insteadOfValues = [];
this.sshCommand = '';
this.sshKeyPath = '';
this.sshKnownHostsPath = '';
this.temporaryHomePath = '';
this.git = gitCommandManager;
this.settings = gitSourceSettings || {};
// Token auth header
const serverUrl = urlHelper.getServerUrl(this.settings.githubServerUrl);
this.tokenConfigKey = `http.${serverUrl.origin}/.extraheader`; // "origin" is SCHEME://HOSTNAME[:PORT]
const basicCredential = Buffer.from(`x-access-token:${this.settings.authToken}`, 'utf8').toString('base64');
core.setSecret(basicCredential);
this.tokenPlaceholderConfigValue = `AUTHORIZATION: basic ***`;
this.tokenConfigValue = `AUTHORIZATION: basic ${basicCredential}`;
// Instead of SSH URL
this.insteadOfKey = `url.${serverUrl.origin}/.insteadOf`; // "origin" is SCHEME://HOSTNAME[:PORT]
this.insteadOfValues.push(`git@${serverUrl.hostname}:`);
if (this.settings.workflowOrganizationId) {
this.insteadOfValues.push(`org-${this.settings.workflowOrganizationId}@github.com:`);
}
}
configureAuth() {
return __awaiter(this, void 0, void 0, function* () {
// Remove possible previous values
yield this.removeAuth();
// Configure new values
yield this.configureSsh();
yield this.configureToken();
});
}
configureTempGlobalConfig() {
var _a, _b;
return __awaiter(this, void 0, void 0, function* () {
// Already setup global config
if (((_a = this.temporaryHomePath) === null || _a === void 0 ? void 0 : _a.length) > 0) {
return path.join(this.temporaryHomePath, '.gitconfig');
}
// Create a temp home directory
const runnerTemp = process.env['RUNNER_TEMP'] || '';
assert.ok(runnerTemp, 'RUNNER_TEMP is not defined');
const uniqueId = (0, v4_1.default)();
this.temporaryHomePath = path.join(runnerTemp, uniqueId);
yield fs.promises.mkdir(this.temporaryHomePath, { recursive: true });
// Copy the global git config
const gitConfigPath = path.join(process.env['HOME'] || os.homedir(), '.gitconfig');
const newGitConfigPath = path.join(this.temporaryHomePath, '.gitconfig');
let configExists = false;
try {
yield fs.promises.stat(gitConfigPath);
configExists = true;
}
catch (err) {
if (((_b = err) === null || _b === void 0 ? void 0 : _b.code) !== 'ENOENT') {
throw err;
}
}
if (configExists) {
core.info(`Copying '${gitConfigPath}' to '${newGitConfigPath}'`);
yield io.cp(gitConfigPath, newGitConfigPath);
}
else {
yield fs.promises.writeFile(newGitConfigPath, '');
}
// Override HOME
core.info(`Temporarily overriding HOME='${this.temporaryHomePath}' before making global git config changes`);
this.git.setEnvironmentVariable('HOME', this.temporaryHomePath);
return newGitConfigPath;
});
}
configureGlobalAuth() {
return __awaiter(this, void 0, void 0, function* () {
// 'configureTempGlobalConfig' noops if already set, just returns the path
const newGitConfigPath = yield this.configureTempGlobalConfig();
try {
// Configure the token
yield this.configureToken(newGitConfigPath, true);
// Configure HTTPS instead of SSH
yield this.git.tryConfigUnset(this.insteadOfKey, true);
if (!this.settings.sshKey) {
for (const insteadOfValue of this.insteadOfValues) {
yield this.git.config(this.insteadOfKey, insteadOfValue, true, true);
}
}
}
catch (err) {
// Unset in case somehow written to the real global config
core.info('Encountered an error when attempting to configure token. Attempting unconfigure.');
yield this.git.tryConfigUnset(this.tokenConfigKey, true);
throw err;
2022-12-12 19:44:24 +01:00
}
});
}
configureSubmoduleAuth() {
return __awaiter(this, void 0, void 0, function* () {
// Remove possible previous HTTPS instead of SSH
yield this.removeGitConfig(this.insteadOfKey, true);
if (this.settings.persistCredentials) {
// Configure a placeholder value. This approach avoids the credential being captured
// by process creation audit events, which are commonly logged. For more information,
// refer to https://docs.microsoft.com/en-us/windows-server/identity/ad-ds/manage/component-updates/command-line-process-auditing
const output = yield this.git.submoduleForeach(
// wrap the pipeline in quotes to make sure it's handled properly by submoduleForeach, rather than just the first part of the pipeline
`sh -c "git config --local '${this.tokenConfigKey}' '${this.tokenPlaceholderConfigValue}' && git config --local --show-origin --name-only --get-regexp remote.origin.url"`, this.settings.nestedSubmodules);
// Replace the placeholder
const configPaths = output.match(/(?<=(^|\n)file:)[^\t]+(?=\tremote\.origin\.url)/g) || [];
for (const configPath of configPaths) {
core.debug(`Replacing token placeholder in '${configPath}'`);
yield this.replaceTokenPlaceholder(configPath);
}
if (this.settings.sshKey) {
// Configure core.sshCommand
yield this.git.submoduleForeach(`git config --local '${SSH_COMMAND_KEY}' '${this.sshCommand}'`, this.settings.nestedSubmodules);
}
else {
// Configure HTTPS instead of SSH
for (const insteadOfValue of this.insteadOfValues) {
yield this.git.submoduleForeach(`git config --local --add '${this.insteadOfKey}' '${insteadOfValue}'`, this.settings.nestedSubmodules);
}
}
}
});
}
removeAuth() {
return __awaiter(this, void 0, void 0, function* () {
yield this.removeSsh();
yield this.removeToken();
});
}
removeGlobalConfig() {
var _a;
return __awaiter(this, void 0, void 0, function* () {
if (((_a = this.temporaryHomePath) === null || _a === void 0 ? void 0 : _a.length) > 0) {
core.debug(`Unsetting HOME override`);
this.git.removeEnvironmentVariable('HOME');
yield io.rmRF(this.temporaryHomePath);
}
});
}
configureSsh() {
var _a;
return __awaiter(this, void 0, void 0, function* () {
if (!this.settings.sshKey) {
return;
}
// Write key
const runnerTemp = process.env['RUNNER_TEMP'] || '';
assert.ok(runnerTemp, 'RUNNER_TEMP is not defined');
const uniqueId = (0, v4_1.default)();
this.sshKeyPath = path.join(runnerTemp, uniqueId);
stateHelper.setSshKeyPath(this.sshKeyPath);
yield fs.promises.mkdir(runnerTemp, { recursive: true });
yield fs.promises.writeFile(this.sshKeyPath, this.settings.sshKey.trim() + '\n', { mode: 0o600 });
// Remove inherited permissions on Windows
if (IS_WINDOWS) {
const icacls = yield io.which('icacls.exe');
yield exec.exec(`"${icacls}" "${this.sshKeyPath}" /grant:r "${process.env['USERDOMAIN']}\\${process.env['USERNAME']}:F"`);
yield exec.exec(`"${icacls}" "${this.sshKeyPath}" /inheritance:r`);
}
// Write known hosts
const userKnownHostsPath = path.join(os.homedir(), '.ssh', 'known_hosts');
let userKnownHosts = '';
try {
userKnownHosts = (yield fs.promises.readFile(userKnownHostsPath)).toString();
}
catch (err) {
if (((_a = err) === null || _a === void 0 ? void 0 : _a.code) !== 'ENOENT') {
throw err;
}
}
let knownHosts = '';
if (userKnownHosts) {
knownHosts += `# Begin from ${userKnownHostsPath}\n${userKnownHosts}\n# End from ${userKnownHostsPath}\n`;
}
if (this.settings.sshKnownHosts) {
knownHosts += `# Begin from input known hosts\n${this.settings.sshKnownHosts}\n# end from input known hosts\n`;
}
knownHosts += `# Begin implicitly added github.com\ngithub.com ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQCj7ndNxQowgcQnjshcLrqPEiiphnt+VTTvDP6mHBL9j1aNUkY4Ue1gvwnGLVlOhGeYrnZaMgRK6+PKCUXaDbC7qtbW8gIkhL7aGCsOr/C56SJMy/BCZfxd1nWzAOxSDPgVsmerOBYfNqltV9/hWCqBywINIR+5dIg6JTJ72pcEpEjcYgXkE2YEFXV1JHnsKgbLWNlhScqb2UmyRkQyytRLtL+38TGxkxCflmO+5Z8CSSNY7GidjMIZ7Q4zMjA2n1nGrlTDkzwDCsw+wqFPGQA179cnfGWOWRVruj16z6XyvxvjJwbz0wQZ75XK5tKSb7FNyeIEs4TT4jk+S4dhPeAUC5y+bDYirYgM4GC7uEnztnZyaVWQ7B381AK4Qdrwt51ZqExKbQpTUNn+EjqoTwvqNj4kqx5QUCI0ThS/YkOxJCXmPUWZbhjpCg56i+2aB6CmK2JGhn57K5mj0MNdBXA4/WnwH6XoPWJzK5Nyu2zB3nAZp+S5hpQs+p1vN1/wsjk=\n# End implicitly added github.com\n`;
this.sshKnownHostsPath = path.join(runnerTemp, `${uniqueId}_known_hosts`);
stateHelper.setSshKnownHostsPath(this.sshKnownHostsPath);
yield fs.promises.writeFile(this.sshKnownHostsPath, knownHosts);
// Configure GIT_SSH_COMMAND
const sshPath = yield io.which('ssh', true);
this.sshCommand = `"${sshPath}" -i "$RUNNER_TEMP/${path.basename(this.sshKeyPath)}"`;
if (this.settings.sshStrict) {
this.sshCommand += ' -o StrictHostKeyChecking=yes -o CheckHostIP=no';
}
this.sshCommand += ` -o "UserKnownHostsFile=$RUNNER_TEMP/${path.basename(this.sshKnownHostsPath)}"`;
core.info(`Temporarily overriding GIT_SSH_COMMAND=${this.sshCommand}`);
this.git.setEnvironmentVariable('GIT_SSH_COMMAND', this.sshCommand);
// Configure core.sshCommand
if (this.settings.persistCredentials) {
yield this.git.config(SSH_COMMAND_KEY, this.sshCommand);
}
});
}
configureToken(configPath, globalConfig) {
return __awaiter(this, void 0, void 0, function* () {
// Validate args
assert.ok((configPath && globalConfig) || (!configPath && !globalConfig), 'Unexpected configureToken parameter combinations');
// Default config path
if (!configPath && !globalConfig) {
configPath = path.join(this.git.getWorkingDirectory(), '.git', 'config');
}
// Configure a placeholder value. This approach avoids the credential being captured
// by process creation audit events, which are commonly logged. For more information,
// refer to https://docs.microsoft.com/en-us/windows-server/identity/ad-ds/manage/component-updates/command-line-process-auditing
yield this.git.config(this.tokenConfigKey, this.tokenPlaceholderConfigValue, globalConfig);
// Replace the placeholder
yield this.replaceTokenPlaceholder(configPath || '');
});
}
replaceTokenPlaceholder(configPath) {
return __awaiter(this, void 0, void 0, function* () {
assert.ok(configPath, 'configPath is not defined');
let content = (yield fs.promises.readFile(configPath)).toString();
const placeholderIndex = content.indexOf(this.tokenPlaceholderConfigValue);
if (placeholderIndex < 0 ||
placeholderIndex != content.lastIndexOf(this.tokenPlaceholderConfigValue)) {
throw new Error(`Unable to replace auth placeholder in ${configPath}`);
}
assert.ok(this.tokenConfigValue, 'tokenConfigValue is not defined');
content = content.replace(this.tokenPlaceholderConfigValue, this.tokenConfigValue);
yield fs.promises.writeFile(configPath, content);
});
}
removeSsh() {
var _a, _b;
return __awaiter(this, void 0, void 0, function* () {
// SSH key
const keyPath = this.sshKeyPath || stateHelper.SshKeyPath;
if (keyPath) {
try {
yield io.rmRF(keyPath);
}
catch (err) {
core.debug(`${(_b = (_a = err) === null || _a === void 0 ? void 0 : _a.message) !== null && _b !== void 0 ? _b : err}`);
core.warning(`Failed to remove SSH key '${keyPath}'`);
}
}
// SSH known hosts
const knownHostsPath = this.sshKnownHostsPath || stateHelper.SshKnownHostsPath;
if (knownHostsPath) {
try {
yield io.rmRF(knownHostsPath);
}
catch (_c) {
// Intentionally empty
}
}
// SSH command
yield this.removeGitConfig(SSH_COMMAND_KEY);
});
}
removeToken() {
return __awaiter(this, void 0, void 0, function* () {
// HTTP extra header
yield this.removeGitConfig(this.tokenConfigKey);
});
}
removeGitConfig(configKey, submoduleOnly = false) {
return __awaiter(this, void 0, void 0, function* () {
if (!submoduleOnly) {
if ((yield this.git.configExists(configKey)) &&
!(yield this.git.tryConfigUnset(configKey))) {
// Load the config contents
core.warning(`Failed to remove '${configKey}' from the git config`);
}
}
const pattern = regexpHelper.escape(configKey);
yield this.git.submoduleForeach(
// wrap the pipeline in quotes to make sure it's handled properly by submoduleForeach, rather than just the first part of the pipeline
`sh -c "git config --local --name-only --get-regexp '${pattern}' && git config --local --unset-all '${configKey}' || :"`, true);
});
}
}
/***/ }),
/***/ 738:
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.createCommandManager = exports.MinimumGitVersion = void 0;
const core = __importStar(__nccwpck_require__(2186));
const exec = __importStar(__nccwpck_require__(1514));
Add support for sparse checkouts (#1369) * Add support for sparse checkouts * sparse-checkout: optionally turn off cone mode While it _is_ true that cone mode is the default nowadays (mainly for performance reasons: code mode is much faster than non-cone mode), there _are_ legitimate use cases where non-cone mode is really useful. Let's add a flag to optionally disable cone mode. Signed-off-by: Johannes Schindelin <johannes.schindelin@gmx.de> * Verify minimum Git version for sparse checkout The `git sparse-checkout` command is available only since Git version v2.25.0. The `actions/checkout` Action actually supports older Git versions than that; As of time of writing, the minimum version is v2.18.0. Instead of raising this minimum version even for users who do not require a sparse checkout, only check for this minimum version specifically when a sparse checkout was asked for. Suggested-by: Tingluo Huang <tingluohuang@github.com> Signed-off-by: Johannes Schindelin <johannes.schindelin@gmx.de> * Support sparse checkout/LFS better Instead of fetching all the LFS objects present in the current revision in a sparse checkout, whether they are needed inside the sparse cone or not, let's instead only pull the ones that are actually needed. To do that, let's avoid running that preemptive `git lfs fetch` call in case of a sparse checkout. An alternative that was considered during the development of this patch (and ultimately rejected) was to use `git lfs pull --include <path>...`, but it turned out to be too inflexible because it requires exact paths, not the patterns that are available via the sparse checkout definition, and that risks running into command-line length limitations. Signed-off-by: Johannes Schindelin <johannes.schindelin@gmx.de> --------- Signed-off-by: Johannes Schindelin <johannes.schindelin@gmx.de> Co-authored-by: Daniel <daniel.fernandez@feverup.com>
2023-06-09 15:08:21 +02:00
const fs = __importStar(__nccwpck_require__(7147));
const fshelper = __importStar(__nccwpck_require__(7219));
const io = __importStar(__nccwpck_require__(7436));
const path = __importStar(__nccwpck_require__(1017));
const refHelper = __importStar(__nccwpck_require__(8601));
const regexpHelper = __importStar(__nccwpck_require__(3120));
const retryHelper = __importStar(__nccwpck_require__(2155));
const git_version_1 = __nccwpck_require__(3142);
// Auth header not supported before 2.9
// Wire protocol v2 not supported before 2.18
exports.MinimumGitVersion = new git_version_1.GitVersion('2.18');
Add support for sparse checkouts (#1369) * Add support for sparse checkouts * sparse-checkout: optionally turn off cone mode While it _is_ true that cone mode is the default nowadays (mainly for performance reasons: code mode is much faster than non-cone mode), there _are_ legitimate use cases where non-cone mode is really useful. Let's add a flag to optionally disable cone mode. Signed-off-by: Johannes Schindelin <johannes.schindelin@gmx.de> * Verify minimum Git version for sparse checkout The `git sparse-checkout` command is available only since Git version v2.25.0. The `actions/checkout` Action actually supports older Git versions than that; As of time of writing, the minimum version is v2.18.0. Instead of raising this minimum version even for users who do not require a sparse checkout, only check for this minimum version specifically when a sparse checkout was asked for. Suggested-by: Tingluo Huang <tingluohuang@github.com> Signed-off-by: Johannes Schindelin <johannes.schindelin@gmx.de> * Support sparse checkout/LFS better Instead of fetching all the LFS objects present in the current revision in a sparse checkout, whether they are needed inside the sparse cone or not, let's instead only pull the ones that are actually needed. To do that, let's avoid running that preemptive `git lfs fetch` call in case of a sparse checkout. An alternative that was considered during the development of this patch (and ultimately rejected) was to use `git lfs pull --include <path>...`, but it turned out to be too inflexible because it requires exact paths, not the patterns that are available via the sparse checkout definition, and that risks running into command-line length limitations. Signed-off-by: Johannes Schindelin <johannes.schindelin@gmx.de> --------- Signed-off-by: Johannes Schindelin <johannes.schindelin@gmx.de> Co-authored-by: Daniel <daniel.fernandez@feverup.com>
2023-06-09 15:08:21 +02:00
function createCommandManager(workingDirectory, lfs, doSparseCheckout) {
return __awaiter(this, void 0, void 0, function* () {
Add support for sparse checkouts (#1369) * Add support for sparse checkouts * sparse-checkout: optionally turn off cone mode While it _is_ true that cone mode is the default nowadays (mainly for performance reasons: code mode is much faster than non-cone mode), there _are_ legitimate use cases where non-cone mode is really useful. Let's add a flag to optionally disable cone mode. Signed-off-by: Johannes Schindelin <johannes.schindelin@gmx.de> * Verify minimum Git version for sparse checkout The `git sparse-checkout` command is available only since Git version v2.25.0. The `actions/checkout` Action actually supports older Git versions than that; As of time of writing, the minimum version is v2.18.0. Instead of raising this minimum version even for users who do not require a sparse checkout, only check for this minimum version specifically when a sparse checkout was asked for. Suggested-by: Tingluo Huang <tingluohuang@github.com> Signed-off-by: Johannes Schindelin <johannes.schindelin@gmx.de> * Support sparse checkout/LFS better Instead of fetching all the LFS objects present in the current revision in a sparse checkout, whether they are needed inside the sparse cone or not, let's instead only pull the ones that are actually needed. To do that, let's avoid running that preemptive `git lfs fetch` call in case of a sparse checkout. An alternative that was considered during the development of this patch (and ultimately rejected) was to use `git lfs pull --include <path>...`, but it turned out to be too inflexible because it requires exact paths, not the patterns that are available via the sparse checkout definition, and that risks running into command-line length limitations. Signed-off-by: Johannes Schindelin <johannes.schindelin@gmx.de> --------- Signed-off-by: Johannes Schindelin <johannes.schindelin@gmx.de> Co-authored-by: Daniel <daniel.fernandez@feverup.com>
2023-06-09 15:08:21 +02:00
return yield GitCommandManager.createCommandManager(workingDirectory, lfs, doSparseCheckout);
});
}
exports.createCommandManager = createCommandManager;
class GitCommandManager {
// Private constructor; use createCommandManager()
constructor() {
this.gitEnv = {
GIT_TERMINAL_PROMPT: '0',
GCM_INTERACTIVE: 'Never' // Disable prompting for git credential manager
};
this.gitPath = '';
this.lfs = false;
Add support for sparse checkouts (#1369) * Add support for sparse checkouts * sparse-checkout: optionally turn off cone mode While it _is_ true that cone mode is the default nowadays (mainly for performance reasons: code mode is much faster than non-cone mode), there _are_ legitimate use cases where non-cone mode is really useful. Let's add a flag to optionally disable cone mode. Signed-off-by: Johannes Schindelin <johannes.schindelin@gmx.de> * Verify minimum Git version for sparse checkout The `git sparse-checkout` command is available only since Git version v2.25.0. The `actions/checkout` Action actually supports older Git versions than that; As of time of writing, the minimum version is v2.18.0. Instead of raising this minimum version even for users who do not require a sparse checkout, only check for this minimum version specifically when a sparse checkout was asked for. Suggested-by: Tingluo Huang <tingluohuang@github.com> Signed-off-by: Johannes Schindelin <johannes.schindelin@gmx.de> * Support sparse checkout/LFS better Instead of fetching all the LFS objects present in the current revision in a sparse checkout, whether they are needed inside the sparse cone or not, let's instead only pull the ones that are actually needed. To do that, let's avoid running that preemptive `git lfs fetch` call in case of a sparse checkout. An alternative that was considered during the development of this patch (and ultimately rejected) was to use `git lfs pull --include <path>...`, but it turned out to be too inflexible because it requires exact paths, not the patterns that are available via the sparse checkout definition, and that risks running into command-line length limitations. Signed-off-by: Johannes Schindelin <johannes.schindelin@gmx.de> --------- Signed-off-by: Johannes Schindelin <johannes.schindelin@gmx.de> Co-authored-by: Daniel <daniel.fernandez@feverup.com>
2023-06-09 15:08:21 +02:00
this.doSparseCheckout = false;
this.workingDirectory = '';
}
branchDelete(remote, branch) {
return __awaiter(this, void 0, void 0, function* () {
const args = ['branch', '--delete', '--force'];
if (remote) {
args.push('--remote');
}
args.push(branch);
yield this.execGit(args);
});
}
branchExists(remote, pattern) {
return __awaiter(this, void 0, void 0, function* () {
const args = ['branch', '--list'];
if (remote) {
args.push('--remote');
}
args.push(pattern);
const output = yield this.execGit(args);
return !!output.stdout.trim();
});
}
branchList(remote) {
return __awaiter(this, void 0, void 0, function* () {
const result = [];
// Note, this implementation uses "rev-parse --symbolic-full-name" because the output from
// "branch --list" is more difficult when in a detached HEAD state.
// TODO(https://github.com/actions/checkout/issues/786): this implementation uses
// "rev-parse --symbolic-full-name" because there is a bug
// in Git 2.18 that causes "rev-parse --symbolic" to output symbolic full names. When
// 2.18 is no longer supported, we can switch back to --symbolic.
const args = ['rev-parse', '--symbolic-full-name'];
if (remote) {
args.push('--remotes=origin');
}
else {
args.push('--branches');
}
const stderr = [];
const errline = [];
const stdout = [];
const stdline = [];
const listeners = {
stderr: (data) => {
stderr.push(data.toString());
},
errline: (data) => {
errline.push(data.toString());
},
stdout: (data) => {
stdout.push(data.toString());
},
stdline: (data) => {
stdline.push(data.toString());
}
};
// Suppress the output in order to avoid flooding annotations with innocuous errors.
yield this.execGit(args, false, true, listeners);
core.debug(`stderr callback is: ${stderr}`);
core.debug(`errline callback is: ${errline}`);
core.debug(`stdout callback is: ${stdout}`);
core.debug(`stdline callback is: ${stdline}`);
for (let branch of stdline) {
branch = branch.trim();
if (!branch) {
continue;
}
if (branch.startsWith('refs/heads/')) {
branch = branch.substring('refs/heads/'.length);
}
else if (branch.startsWith('refs/remotes/')) {
branch = branch.substring('refs/remotes/'.length);
}
result.push(branch);
}
return result;
});
}
Add support for sparse checkouts (#1369) * Add support for sparse checkouts * sparse-checkout: optionally turn off cone mode While it _is_ true that cone mode is the default nowadays (mainly for performance reasons: code mode is much faster than non-cone mode), there _are_ legitimate use cases where non-cone mode is really useful. Let's add a flag to optionally disable cone mode. Signed-off-by: Johannes Schindelin <johannes.schindelin@gmx.de> * Verify minimum Git version for sparse checkout The `git sparse-checkout` command is available only since Git version v2.25.0. The `actions/checkout` Action actually supports older Git versions than that; As of time of writing, the minimum version is v2.18.0. Instead of raising this minimum version even for users who do not require a sparse checkout, only check for this minimum version specifically when a sparse checkout was asked for. Suggested-by: Tingluo Huang <tingluohuang@github.com> Signed-off-by: Johannes Schindelin <johannes.schindelin@gmx.de> * Support sparse checkout/LFS better Instead of fetching all the LFS objects present in the current revision in a sparse checkout, whether they are needed inside the sparse cone or not, let's instead only pull the ones that are actually needed. To do that, let's avoid running that preemptive `git lfs fetch` call in case of a sparse checkout. An alternative that was considered during the development of this patch (and ultimately rejected) was to use `git lfs pull --include <path>...`, but it turned out to be too inflexible because it requires exact paths, not the patterns that are available via the sparse checkout definition, and that risks running into command-line length limitations. Signed-off-by: Johannes Schindelin <johannes.schindelin@gmx.de> --------- Signed-off-by: Johannes Schindelin <johannes.schindelin@gmx.de> Co-authored-by: Daniel <daniel.fernandez@feverup.com>
2023-06-09 15:08:21 +02:00
sparseCheckout(sparseCheckout) {
return __awaiter(this, void 0, void 0, function* () {
yield this.execGit(['sparse-checkout', 'set', ...sparseCheckout]);
});
}
sparseCheckoutNonConeMode(sparseCheckout) {
return __awaiter(this, void 0, void 0, function* () {
yield this.execGit(['config', 'core.sparseCheckout', 'true']);
const output = yield this.execGit([
'rev-parse',
'--git-path',
'info/sparse-checkout'
]);
const sparseCheckoutPath = path.join(this.workingDirectory, output.stdout.trimRight());
yield fs.promises.appendFile(sparseCheckoutPath, `\n${sparseCheckout.join('\n')}\n`);
});
}
checkout(ref, startPoint) {
return __awaiter(this, void 0, void 0, function* () {
const args = ['checkout', '--progress', '--force'];
if (startPoint) {
args.push('-B', ref, startPoint);
}
else {
args.push(ref);
}
yield this.execGit(args);
});
}
checkoutDetach() {
return __awaiter(this, void 0, void 0, function* () {
const args = ['checkout', '--detach'];
yield this.execGit(args);
});
}
config(configKey, configValue, globalConfig, add) {
return __awaiter(this, void 0, void 0, function* () {
const args = ['config', globalConfig ? '--global' : '--local'];
if (add) {
args.push('--add');
}
args.push(...[configKey, configValue]);
yield this.execGit(args);
});
}
configExists(configKey, globalConfig) {
return __awaiter(this, void 0, void 0, function* () {
const pattern = regexpHelper.escape(configKey);
const output = yield this.execGit([
'config',
globalConfig ? '--global' : '--local',
'--name-only',
'--get-regexp',
pattern
], true);
return output.exitCode === 0;
});
}
Add support for sparse checkouts (#1369) * Add support for sparse checkouts * sparse-checkout: optionally turn off cone mode While it _is_ true that cone mode is the default nowadays (mainly for performance reasons: code mode is much faster than non-cone mode), there _are_ legitimate use cases where non-cone mode is really useful. Let's add a flag to optionally disable cone mode. Signed-off-by: Johannes Schindelin <johannes.schindelin@gmx.de> * Verify minimum Git version for sparse checkout The `git sparse-checkout` command is available only since Git version v2.25.0. The `actions/checkout` Action actually supports older Git versions than that; As of time of writing, the minimum version is v2.18.0. Instead of raising this minimum version even for users who do not require a sparse checkout, only check for this minimum version specifically when a sparse checkout was asked for. Suggested-by: Tingluo Huang <tingluohuang@github.com> Signed-off-by: Johannes Schindelin <johannes.schindelin@gmx.de> * Support sparse checkout/LFS better Instead of fetching all the LFS objects present in the current revision in a sparse checkout, whether they are needed inside the sparse cone or not, let's instead only pull the ones that are actually needed. To do that, let's avoid running that preemptive `git lfs fetch` call in case of a sparse checkout. An alternative that was considered during the development of this patch (and ultimately rejected) was to use `git lfs pull --include <path>...`, but it turned out to be too inflexible because it requires exact paths, not the patterns that are available via the sparse checkout definition, and that risks running into command-line length limitations. Signed-off-by: Johannes Schindelin <johannes.schindelin@gmx.de> --------- Signed-off-by: Johannes Schindelin <johannes.schindelin@gmx.de> Co-authored-by: Daniel <daniel.fernandez@feverup.com>
2023-06-09 15:08:21 +02:00
fetch(refSpec, options) {
return __awaiter(this, void 0, void 0, function* () {
const args = ['-c', 'protocol.version=2', 'fetch'];
if (!refSpec.some(x => x === refHelper.tagsRefSpec) && !options.fetchTags) {
args.push('--no-tags');
}
args.push('--prune', '--no-recurse-submodules');
if (options.showProgress) {
args.push('--progress');
}
Add support for sparse checkouts (#1369) * Add support for sparse checkouts * sparse-checkout: optionally turn off cone mode While it _is_ true that cone mode is the default nowadays (mainly for performance reasons: code mode is much faster than non-cone mode), there _are_ legitimate use cases where non-cone mode is really useful. Let's add a flag to optionally disable cone mode. Signed-off-by: Johannes Schindelin <johannes.schindelin@gmx.de> * Verify minimum Git version for sparse checkout The `git sparse-checkout` command is available only since Git version v2.25.0. The `actions/checkout` Action actually supports older Git versions than that; As of time of writing, the minimum version is v2.18.0. Instead of raising this minimum version even for users who do not require a sparse checkout, only check for this minimum version specifically when a sparse checkout was asked for. Suggested-by: Tingluo Huang <tingluohuang@github.com> Signed-off-by: Johannes Schindelin <johannes.schindelin@gmx.de> * Support sparse checkout/LFS better Instead of fetching all the LFS objects present in the current revision in a sparse checkout, whether they are needed inside the sparse cone or not, let's instead only pull the ones that are actually needed. To do that, let's avoid running that preemptive `git lfs fetch` call in case of a sparse checkout. An alternative that was considered during the development of this patch (and ultimately rejected) was to use `git lfs pull --include <path>...`, but it turned out to be too inflexible because it requires exact paths, not the patterns that are available via the sparse checkout definition, and that risks running into command-line length limitations. Signed-off-by: Johannes Schindelin <johannes.schindelin@gmx.de> --------- Signed-off-by: Johannes Schindelin <johannes.schindelin@gmx.de> Co-authored-by: Daniel <daniel.fernandez@feverup.com>
2023-06-09 15:08:21 +02:00
if (options.filter) {
args.push(`--filter=${options.filter}`);
}
if (options.fetchDepth && options.fetchDepth > 0) {
args.push(`--depth=${options.fetchDepth}`);
}
else if (fshelper.fileExistsSync(path.join(this.workingDirectory, '.git', 'shallow'))) {
args.push('--unshallow');
}
args.push('origin');
for (const arg of refSpec) {
args.push(arg);
}
const that = this;
yield retryHelper.execute(() => __awaiter(this, void 0, void 0, function* () {
yield that.execGit(args);
}));
});
}
getDefaultBranch(repositoryUrl) {
return __awaiter(this, void 0, void 0, function* () {
let output;
yield retryHelper.execute(() => __awaiter(this, void 0, void 0, function* () {
output = yield this.execGit([
'ls-remote',
'--quiet',
'--exit-code',
'--symref',
repositoryUrl,
'HEAD'
]);
}));
if (output) {
// Satisfy compiler, will always be set
for (let line of output.stdout.trim().split('\n')) {
line = line.trim();
if (line.startsWith('ref:') || line.endsWith('HEAD')) {
return line
.substr('ref:'.length, line.length - 'ref:'.length - 'HEAD'.length)
.trim();
}
}
}
throw new Error('Unexpected output when retrieving default branch');
});
}
getWorkingDirectory() {
return this.workingDirectory;
}
init() {
return __awaiter(this, void 0, void 0, function* () {
yield this.execGit(['init', this.workingDirectory]);
});
}
isDetached() {
return __awaiter(this, void 0, void 0, function* () {
// Note, "branch --show-current" would be simpler but isn't available until Git 2.22
const output = yield this.execGit(['rev-parse', '--symbolic-full-name', '--verify', '--quiet', 'HEAD'], true);
return !output.stdout.trim().startsWith('refs/heads/');
});
}
lfsFetch(ref) {
return __awaiter(this, void 0, void 0, function* () {
const args = ['lfs', 'fetch', 'origin', ref];
const that = this;
yield retryHelper.execute(() => __awaiter(this, void 0, void 0, function* () {
yield that.execGit(args);
}));
});
}
lfsInstall() {
return __awaiter(this, void 0, void 0, function* () {
yield this.execGit(['lfs', 'install', '--local']);
});
}
log1(format) {
return __awaiter(this, void 0, void 0, function* () {
const args = format ? ['log', '-1', format] : ['log', '-1'];
const silent = format ? false : true;
const output = yield this.execGit(args, false, silent);
return output.stdout;
});
}
remoteAdd(remoteName, remoteUrl) {
return __awaiter(this, void 0, void 0, function* () {
yield this.execGit(['remote', 'add', remoteName, remoteUrl]);
});
}
removeEnvironmentVariable(name) {
delete this.gitEnv[name];
}
/**
* Resolves a ref to a SHA. For a branch or lightweight tag, the commit SHA is returned.
* For an annotated tag, the tag SHA is returned.
* @param {string} ref For example: 'refs/heads/main' or '/refs/tags/v1'
* @returns {Promise<string>}
*/
revParse(ref) {
return __awaiter(this, void 0, void 0, function* () {
const output = yield this.execGit(['rev-parse', ref]);
return output.stdout.trim();
});
}
setEnvironmentVariable(name, value) {
this.gitEnv[name] = value;
}
shaExists(sha) {
return __awaiter(this, void 0, void 0, function* () {
const args = ['rev-parse', '--verify', '--quiet', `${sha}^{object}`];
const output = yield this.execGit(args, true);
return output.exitCode === 0;
});
}
submoduleForeach(command, recursive) {
return __awaiter(this, void 0, void 0, function* () {
const args = ['submodule', 'foreach'];
if (recursive) {
args.push('--recursive');
}
args.push(command);
const output = yield this.execGit(args);
return output.stdout;
});
}
submoduleSync(recursive) {
return __awaiter(this, void 0, void 0, function* () {
const args = ['submodule', 'sync'];
if (recursive) {
args.push('--recursive');
}
yield this.execGit(args);
});
}
submoduleUpdate(fetchDepth, recursive) {
return __awaiter(this, void 0, void 0, function* () {
const args = ['-c', 'protocol.version=2'];
args.push('submodule', 'update', '--init', '--force');
if (fetchDepth > 0) {
args.push(`--depth=${fetchDepth}`);
}
if (recursive) {
args.push('--recursive');
}
yield this.execGit(args);
});
}
submoduleStatus() {
return __awaiter(this, void 0, void 0, function* () {
const output = yield this.execGit(['submodule', 'status'], true);
core.debug(output.stdout);
return output.exitCode === 0;
});
}
tagExists(pattern) {
return __awaiter(this, void 0, void 0, function* () {
const output = yield this.execGit(['tag', '--list', pattern]);
return !!output.stdout.trim();
});
}
tryClean() {
return __awaiter(this, void 0, void 0, function* () {
const output = yield this.execGit(['clean', '-ffdx'], true);
return output.exitCode === 0;
});
}
tryConfigUnset(configKey, globalConfig) {
return __awaiter(this, void 0, void 0, function* () {
const output = yield this.execGit([
'config',
globalConfig ? '--global' : '--local',
'--unset-all',
configKey
], true);
return output.exitCode === 0;
});
}
tryDisableAutomaticGarbageCollection() {
return __awaiter(this, void 0, void 0, function* () {
const output = yield this.execGit(['config', '--local', 'gc.auto', '0'], true);
return output.exitCode === 0;
});
}
tryGetFetchUrl() {
return __awaiter(this, void 0, void 0, function* () {
const output = yield this.execGit(['config', '--local', '--get', 'remote.origin.url'], true);
if (output.exitCode !== 0) {
return '';
}
const stdout = output.stdout.trim();
if (stdout.includes('\n')) {
return '';
}
return stdout;
});
}
tryReset() {
return __awaiter(this, void 0, void 0, function* () {
const output = yield this.execGit(['reset', '--hard', 'HEAD'], true);
return output.exitCode === 0;
});
}
Add support for sparse checkouts (#1369) * Add support for sparse checkouts * sparse-checkout: optionally turn off cone mode While it _is_ true that cone mode is the default nowadays (mainly for performance reasons: code mode is much faster than non-cone mode), there _are_ legitimate use cases where non-cone mode is really useful. Let's add a flag to optionally disable cone mode. Signed-off-by: Johannes Schindelin <johannes.schindelin@gmx.de> * Verify minimum Git version for sparse checkout The `git sparse-checkout` command is available only since Git version v2.25.0. The `actions/checkout` Action actually supports older Git versions than that; As of time of writing, the minimum version is v2.18.0. Instead of raising this minimum version even for users who do not require a sparse checkout, only check for this minimum version specifically when a sparse checkout was asked for. Suggested-by: Tingluo Huang <tingluohuang@github.com> Signed-off-by: Johannes Schindelin <johannes.schindelin@gmx.de> * Support sparse checkout/LFS better Instead of fetching all the LFS objects present in the current revision in a sparse checkout, whether they are needed inside the sparse cone or not, let's instead only pull the ones that are actually needed. To do that, let's avoid running that preemptive `git lfs fetch` call in case of a sparse checkout. An alternative that was considered during the development of this patch (and ultimately rejected) was to use `git lfs pull --include <path>...`, but it turned out to be too inflexible because it requires exact paths, not the patterns that are available via the sparse checkout definition, and that risks running into command-line length limitations. Signed-off-by: Johannes Schindelin <johannes.schindelin@gmx.de> --------- Signed-off-by: Johannes Schindelin <johannes.schindelin@gmx.de> Co-authored-by: Daniel <daniel.fernandez@feverup.com>
2023-06-09 15:08:21 +02:00
static createCommandManager(workingDirectory, lfs, doSparseCheckout) {
return __awaiter(this, void 0, void 0, function* () {
const result = new GitCommandManager();
Add support for sparse checkouts (#1369) * Add support for sparse checkouts * sparse-checkout: optionally turn off cone mode While it _is_ true that cone mode is the default nowadays (mainly for performance reasons: code mode is much faster than non-cone mode), there _are_ legitimate use cases where non-cone mode is really useful. Let's add a flag to optionally disable cone mode. Signed-off-by: Johannes Schindelin <johannes.schindelin@gmx.de> * Verify minimum Git version for sparse checkout The `git sparse-checkout` command is available only since Git version v2.25.0. The `actions/checkout` Action actually supports older Git versions than that; As of time of writing, the minimum version is v2.18.0. Instead of raising this minimum version even for users who do not require a sparse checkout, only check for this minimum version specifically when a sparse checkout was asked for. Suggested-by: Tingluo Huang <tingluohuang@github.com> Signed-off-by: Johannes Schindelin <johannes.schindelin@gmx.de> * Support sparse checkout/LFS better Instead of fetching all the LFS objects present in the current revision in a sparse checkout, whether they are needed inside the sparse cone or not, let's instead only pull the ones that are actually needed. To do that, let's avoid running that preemptive `git lfs fetch` call in case of a sparse checkout. An alternative that was considered during the development of this patch (and ultimately rejected) was to use `git lfs pull --include <path>...`, but it turned out to be too inflexible because it requires exact paths, not the patterns that are available via the sparse checkout definition, and that risks running into command-line length limitations. Signed-off-by: Johannes Schindelin <johannes.schindelin@gmx.de> --------- Signed-off-by: Johannes Schindelin <johannes.schindelin@gmx.de> Co-authored-by: Daniel <daniel.fernandez@feverup.com>
2023-06-09 15:08:21 +02:00
yield result.initializeCommandManager(workingDirectory, lfs, doSparseCheckout);
return result;
});
}
execGit(args, allowAllExitCodes = false, silent = false, customListeners = {}) {
return __awaiter(this, void 0, void 0, function* () {
fshelper.directoryExistsSync(this.workingDirectory, true);
const result = new GitOutput();
const env = {};
for (const key of Object.keys(process.env)) {
env[key] = process.env[key];
}
for (const key of Object.keys(this.gitEnv)) {
env[key] = this.gitEnv[key];
}
const defaultListener = {
stdout: (data) => {
stdout.push(data.toString());
}
};
const mergedListeners = Object.assign(Object.assign({}, defaultListener), customListeners);
const stdout = [];
const options = {
cwd: this.workingDirectory,
env,
silent,
ignoreReturnCode: allowAllExitCodes,
listeners: mergedListeners
};
result.exitCode = yield exec.exec(`"${this.gitPath}"`, args, options);
result.stdout = stdout.join('');
core.debug(result.exitCode.toString());
core.debug(result.stdout);
return result;
});
}
Add support for sparse checkouts (#1369) * Add support for sparse checkouts * sparse-checkout: optionally turn off cone mode While it _is_ true that cone mode is the default nowadays (mainly for performance reasons: code mode is much faster than non-cone mode), there _are_ legitimate use cases where non-cone mode is really useful. Let's add a flag to optionally disable cone mode. Signed-off-by: Johannes Schindelin <johannes.schindelin@gmx.de> * Verify minimum Git version for sparse checkout The `git sparse-checkout` command is available only since Git version v2.25.0. The `actions/checkout` Action actually supports older Git versions than that; As of time of writing, the minimum version is v2.18.0. Instead of raising this minimum version even for users who do not require a sparse checkout, only check for this minimum version specifically when a sparse checkout was asked for. Suggested-by: Tingluo Huang <tingluohuang@github.com> Signed-off-by: Johannes Schindelin <johannes.schindelin@gmx.de> * Support sparse checkout/LFS better Instead of fetching all the LFS objects present in the current revision in a sparse checkout, whether they are needed inside the sparse cone or not, let's instead only pull the ones that are actually needed. To do that, let's avoid running that preemptive `git lfs fetch` call in case of a sparse checkout. An alternative that was considered during the development of this patch (and ultimately rejected) was to use `git lfs pull --include <path>...`, but it turned out to be too inflexible because it requires exact paths, not the patterns that are available via the sparse checkout definition, and that risks running into command-line length limitations. Signed-off-by: Johannes Schindelin <johannes.schindelin@gmx.de> --------- Signed-off-by: Johannes Schindelin <johannes.schindelin@gmx.de> Co-authored-by: Daniel <daniel.fernandez@feverup.com>
2023-06-09 15:08:21 +02:00
initializeCommandManager(workingDirectory, lfs, doSparseCheckout) {
return __awaiter(this, void 0, void 0, function* () {
this.workingDirectory = workingDirectory;
// Git-lfs will try to pull down assets if any of the local/user/system setting exist.
// If the user didn't enable `LFS` in their pipeline definition, disable LFS fetch/checkout.
this.lfs = lfs;
if (!this.lfs) {
this.gitEnv['GIT_LFS_SKIP_SMUDGE'] = '1';
}
this.gitPath = yield io.which('git', true);
// Git version
core.debug('Getting git version');
let gitVersion = new git_version_1.GitVersion();
let gitOutput = yield this.execGit(['version']);
let stdout = gitOutput.stdout.trim();
if (!stdout.includes('\n')) {
const match = stdout.match(/\d+\.\d+(\.\d+)?/);
if (match) {
gitVersion = new git_version_1.GitVersion(match[0]);
}
}
if (!gitVersion.isValid()) {
throw new Error('Unable to determine git version');
}
// Minimum git version
if (!gitVersion.checkMinimum(exports.MinimumGitVersion)) {
throw new Error(`Minimum required git version is ${exports.MinimumGitVersion}. Your git ('${this.gitPath}') is ${gitVersion}`);
}
if (this.lfs) {
// Git-lfs version
core.debug('Getting git-lfs version');
let gitLfsVersion = new git_version_1.GitVersion();
const gitLfsPath = yield io.which('git-lfs', true);
gitOutput = yield this.execGit(['lfs', 'version']);
stdout = gitOutput.stdout.trim();
if (!stdout.includes('\n')) {
const match = stdout.match(/\d+\.\d+(\.\d+)?/);
if (match) {
gitLfsVersion = new git_version_1.GitVersion(match[0]);
}
}
if (!gitLfsVersion.isValid()) {
throw new Error('Unable to determine git-lfs version');
}
// Minimum git-lfs version
// Note:
// - Auth header not supported before 2.1
const minimumGitLfsVersion = new git_version_1.GitVersion('2.1');
if (!gitLfsVersion.checkMinimum(minimumGitLfsVersion)) {
throw new Error(`Minimum required git-lfs version is ${minimumGitLfsVersion}. Your git-lfs ('${gitLfsPath}') is ${gitLfsVersion}`);
}
}
Add support for sparse checkouts (#1369) * Add support for sparse checkouts * sparse-checkout: optionally turn off cone mode While it _is_ true that cone mode is the default nowadays (mainly for performance reasons: code mode is much faster than non-cone mode), there _are_ legitimate use cases where non-cone mode is really useful. Let's add a flag to optionally disable cone mode. Signed-off-by: Johannes Schindelin <johannes.schindelin@gmx.de> * Verify minimum Git version for sparse checkout The `git sparse-checkout` command is available only since Git version v2.25.0. The `actions/checkout` Action actually supports older Git versions than that; As of time of writing, the minimum version is v2.18.0. Instead of raising this minimum version even for users who do not require a sparse checkout, only check for this minimum version specifically when a sparse checkout was asked for. Suggested-by: Tingluo Huang <tingluohuang@github.com> Signed-off-by: Johannes Schindelin <johannes.schindelin@gmx.de> * Support sparse checkout/LFS better Instead of fetching all the LFS objects present in the current revision in a sparse checkout, whether they are needed inside the sparse cone or not, let's instead only pull the ones that are actually needed. To do that, let's avoid running that preemptive `git lfs fetch` call in case of a sparse checkout. An alternative that was considered during the development of this patch (and ultimately rejected) was to use `git lfs pull --include <path>...`, but it turned out to be too inflexible because it requires exact paths, not the patterns that are available via the sparse checkout definition, and that risks running into command-line length limitations. Signed-off-by: Johannes Schindelin <johannes.schindelin@gmx.de> --------- Signed-off-by: Johannes Schindelin <johannes.schindelin@gmx.de> Co-authored-by: Daniel <daniel.fernandez@feverup.com>
2023-06-09 15:08:21 +02:00
this.doSparseCheckout = doSparseCheckout;
if (this.doSparseCheckout) {
// The `git sparse-checkout` command was introduced in Git v2.25.0
const minimumGitSparseCheckoutVersion = new git_version_1.GitVersion('2.25');
if (!gitVersion.checkMinimum(minimumGitSparseCheckoutVersion)) {
throw new Error(`Minimum Git version required for sparse checkout is ${minimumGitSparseCheckoutVersion}. Your git ('${this.gitPath}') is ${gitVersion}`);
}
}
// Set the user agent
const gitHttpUserAgent = `git/${gitVersion} (github-actions-checkout)`;
core.debug(`Set git useragent to: ${gitHttpUserAgent}`);
this.gitEnv['GIT_HTTP_USER_AGENT'] = gitHttpUserAgent;
});
}
}
class GitOutput {
constructor() {
this.stdout = '';
this.exitCode = 0;
}
}
2021-10-19 17:05:28 +02:00
/***/ }),
/***/ 8609:
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
2021-10-19 17:05:28 +02:00
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
2021-10-19 17:05:28 +02:00
};
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
2021-10-19 17:05:28 +02:00
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.prepareExistingDirectory = void 0;
const assert = __importStar(__nccwpck_require__(9491));
const core = __importStar(__nccwpck_require__(2186));
const fs = __importStar(__nccwpck_require__(7147));
const fsHelper = __importStar(__nccwpck_require__(7219));
const io = __importStar(__nccwpck_require__(7436));
const path = __importStar(__nccwpck_require__(1017));
function prepareExistingDirectory(git, repositoryPath, repositoryUrl, clean, ref) {
var _a, _b;
return __awaiter(this, void 0, void 0, function* () {
assert.ok(repositoryPath, 'Expected repositoryPath to be defined');
assert.ok(repositoryUrl, 'Expected repositoryUrl to be defined');
// Indicates whether to delete the directory contents
let remove = false;
// Check whether using git or REST API
if (!git) {
remove = true;
}
// Fetch URL does not match
else if (!fsHelper.directoryExistsSync(path.join(repositoryPath, '.git')) ||
repositoryUrl !== (yield git.tryGetFetchUrl())) {
remove = true;
}
else {
// Delete any index.lock and shallow.lock left by a previously canceled run or crashed git process
const lockPaths = [
path.join(repositoryPath, '.git', 'index.lock'),
path.join(repositoryPath, '.git', 'shallow.lock')
];
for (const lockPath of lockPaths) {
try {
yield io.rmRF(lockPath);
}
catch (error) {
core.debug(`Unable to delete '${lockPath}'. ${(_b = (_a = error) === null || _a === void 0 ? void 0 : _a.message) !== null && _b !== void 0 ? _b : error}`);
}
}
try {
core.startGroup('Removing previously created refs, to avoid conflicts');
// Checkout detached HEAD
if (!(yield git.isDetached())) {
yield git.checkoutDetach();
}
// Remove all refs/heads/*
let branches = yield git.branchList(false);
for (const branch of branches) {
yield git.branchDelete(false, branch);
}
// Remove any conflicting refs/remotes/origin/*
// Example 1: Consider ref is refs/heads/foo and previously fetched refs/remotes/origin/foo/bar
// Example 2: Consider ref is refs/heads/foo/bar and previously fetched refs/remotes/origin/foo
if (ref) {
ref = ref.startsWith('refs/') ? ref : `refs/heads/${ref}`;
if (ref.startsWith('refs/heads/')) {
const upperName1 = ref.toUpperCase().substr('REFS/HEADS/'.length);
const upperName1Slash = `${upperName1}/`;
branches = yield git.branchList(true);
for (const branch of branches) {
const upperName2 = branch.substr('origin/'.length).toUpperCase();
const upperName2Slash = `${upperName2}/`;
if (upperName1.startsWith(upperName2Slash) ||
upperName2.startsWith(upperName1Slash)) {
yield git.branchDelete(true, branch);
}
}
}
}
core.endGroup();
// Check for submodules and delete any existing files if submodules are present
if (!(yield git.submoduleStatus())) {
remove = true;
core.info('Bad Submodules found, removing existing files');
}
// Clean
if (clean) {
core.startGroup('Cleaning the repository');
if (!(yield git.tryClean())) {
2023-04-19 16:55:10 +02:00
core.debug(`The clean command failed. This might be caused by: 1) path too long, 2) permission issue, or 3) file in use. For further investigation, manually run 'git clean -ffdx' on the directory '${repositoryPath}'.`);
remove = true;
}
else if (!(yield git.tryReset())) {
remove = true;
}
core.endGroup();
if (remove) {
core.warning(`Unable to clean or reset the repository. The repository will be recreated instead.`);
}
}
}
catch (error) {
core.warning(`Unable to prepare the existing repository. The repository will be recreated instead.`);
remove = true;
}
}
if (remove) {
// Delete the contents of the directory. Don't delete the directory itself
// since it might be the current working directory.
core.info(`Deleting the contents of '${repositoryPath}'`);
for (const file of yield fs.promises.readdir(repositoryPath)) {
yield io.rmRF(path.join(repositoryPath, file));
}
}
});
}
exports.prepareExistingDirectory = prepareExistingDirectory;
/***/ }),
/***/ 9210:
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.cleanup = exports.getSource = void 0;
const core = __importStar(__nccwpck_require__(2186));
const fsHelper = __importStar(__nccwpck_require__(7219));
const gitAuthHelper = __importStar(__nccwpck_require__(2565));
const gitCommandManager = __importStar(__nccwpck_require__(738));
const gitDirectoryHelper = __importStar(__nccwpck_require__(8609));
const githubApiHelper = __importStar(__nccwpck_require__(138));
const io = __importStar(__nccwpck_require__(7436));
const path = __importStar(__nccwpck_require__(1017));
const refHelper = __importStar(__nccwpck_require__(8601));
const stateHelper = __importStar(__nccwpck_require__(8647));
const urlHelper = __importStar(__nccwpck_require__(9437));
function getSource(settings) {
return __awaiter(this, void 0, void 0, function* () {
// Repository URL
core.info(`Syncing repository: ${settings.repositoryOwner}/${settings.repositoryName}`);
const repositoryUrl = urlHelper.getFetchUrl(settings);
// Remove conflicting file path
if (fsHelper.fileExistsSync(settings.repositoryPath)) {
yield io.rmRF(settings.repositoryPath);
}
// Create directory
let isExisting = true;
if (!fsHelper.directoryExistsSync(settings.repositoryPath)) {
isExisting = false;
yield io.mkdirP(settings.repositoryPath);
}
// Git command manager
core.startGroup('Getting Git version info');
const git = yield getGitCommandManager(settings);
core.endGroup();
let authHelper = null;
try {
if (git) {
authHelper = gitAuthHelper.createAuthHelper(git, settings);
if (settings.setSafeDirectory) {
// Setup the repository path as a safe directory, so if we pass this into a container job with a different user it doesn't fail
// Otherwise all git commands we run in a container fail
yield authHelper.configureTempGlobalConfig();
core.info(`Adding repository directory to the temporary git global config as a safe directory`);
yield git
.config('safe.directory', settings.repositoryPath, true, true)
.catch(error => {
core.info(`Failed to initialize safe directory with error: ${error}`);
});
stateHelper.setSafeDirectory();
}
}
// Prepare existing directory, otherwise recreate
if (isExisting) {
yield gitDirectoryHelper.prepareExistingDirectory(git, settings.repositoryPath, repositoryUrl, settings.clean, settings.ref);
}
if (!git) {
// Downloading using REST API
core.info(`The repository will be downloaded using the GitHub REST API`);
core.info(`To create a local Git repository instead, add Git ${gitCommandManager.MinimumGitVersion} or higher to the PATH`);
if (settings.submodules) {
throw new Error(`Input 'submodules' not supported when falling back to download using the GitHub REST API. To create a local Git repository instead, add Git ${gitCommandManager.MinimumGitVersion} or higher to the PATH.`);
}
else if (settings.sshKey) {
throw new Error(`Input 'ssh-key' not supported when falling back to download using the GitHub REST API. To create a local Git repository instead, add Git ${gitCommandManager.MinimumGitVersion} or higher to the PATH.`);
}
yield githubApiHelper.downloadRepository(settings.authToken, settings.repositoryOwner, settings.repositoryName, settings.ref, settings.commit, settings.repositoryPath, settings.githubServerUrl);
return;
}
// Save state for POST action
stateHelper.setRepositoryPath(settings.repositoryPath);
// Initialize the repository
if (!fsHelper.directoryExistsSync(path.join(settings.repositoryPath, '.git'))) {
core.startGroup('Initializing the repository');
yield git.init();
yield git.remoteAdd('origin', repositoryUrl);
core.endGroup();
}
// Disable automatic garbage collection
core.startGroup('Disabling automatic garbage collection');
if (!(yield git.tryDisableAutomaticGarbageCollection())) {
core.warning(`Unable to turn off git automatic garbage collection. The git fetch operation may trigger garbage collection and cause a delay.`);
}
core.endGroup();
// If we didn't initialize it above, do it now
if (!authHelper) {
authHelper = gitAuthHelper.createAuthHelper(git, settings);
}
// Configure auth
core.startGroup('Setting up auth');
yield authHelper.configureAuth();
core.endGroup();
// Determine the default branch
if (!settings.ref && !settings.commit) {
core.startGroup('Determining the default branch');
if (settings.sshKey) {
settings.ref = yield git.getDefaultBranch(repositoryUrl);
}
else {
settings.ref = yield githubApiHelper.getDefaultBranch(settings.authToken, settings.repositoryOwner, settings.repositoryName, settings.githubServerUrl);
}
core.endGroup();
}
// LFS install
if (settings.lfs) {
yield git.lfsInstall();
}
// Fetch
core.startGroup('Fetching the repository');
Add support for sparse checkouts (#1369) * Add support for sparse checkouts * sparse-checkout: optionally turn off cone mode While it _is_ true that cone mode is the default nowadays (mainly for performance reasons: code mode is much faster than non-cone mode), there _are_ legitimate use cases where non-cone mode is really useful. Let's add a flag to optionally disable cone mode. Signed-off-by: Johannes Schindelin <johannes.schindelin@gmx.de> * Verify minimum Git version for sparse checkout The `git sparse-checkout` command is available only since Git version v2.25.0. The `actions/checkout` Action actually supports older Git versions than that; As of time of writing, the minimum version is v2.18.0. Instead of raising this minimum version even for users who do not require a sparse checkout, only check for this minimum version specifically when a sparse checkout was asked for. Suggested-by: Tingluo Huang <tingluohuang@github.com> Signed-off-by: Johannes Schindelin <johannes.schindelin@gmx.de> * Support sparse checkout/LFS better Instead of fetching all the LFS objects present in the current revision in a sparse checkout, whether they are needed inside the sparse cone or not, let's instead only pull the ones that are actually needed. To do that, let's avoid running that preemptive `git lfs fetch` call in case of a sparse checkout. An alternative that was considered during the development of this patch (and ultimately rejected) was to use `git lfs pull --include <path>...`, but it turned out to be too inflexible because it requires exact paths, not the patterns that are available via the sparse checkout definition, and that risks running into command-line length limitations. Signed-off-by: Johannes Schindelin <johannes.schindelin@gmx.de> --------- Signed-off-by: Johannes Schindelin <johannes.schindelin@gmx.de> Co-authored-by: Daniel <daniel.fernandez@feverup.com>
2023-06-09 15:08:21 +02:00
const fetchOptions = {};
if (settings.filter) {
fetchOptions.filter = settings.filter;
}
else if (settings.sparseCheckout) {
Add support for sparse checkouts (#1369) * Add support for sparse checkouts * sparse-checkout: optionally turn off cone mode While it _is_ true that cone mode is the default nowadays (mainly for performance reasons: code mode is much faster than non-cone mode), there _are_ legitimate use cases where non-cone mode is really useful. Let's add a flag to optionally disable cone mode. Signed-off-by: Johannes Schindelin <johannes.schindelin@gmx.de> * Verify minimum Git version for sparse checkout The `git sparse-checkout` command is available only since Git version v2.25.0. The `actions/checkout` Action actually supports older Git versions than that; As of time of writing, the minimum version is v2.18.0. Instead of raising this minimum version even for users who do not require a sparse checkout, only check for this minimum version specifically when a sparse checkout was asked for. Suggested-by: Tingluo Huang <tingluohuang@github.com> Signed-off-by: Johannes Schindelin <johannes.schindelin@gmx.de> * Support sparse checkout/LFS better Instead of fetching all the LFS objects present in the current revision in a sparse checkout, whether they are needed inside the sparse cone or not, let's instead only pull the ones that are actually needed. To do that, let's avoid running that preemptive `git lfs fetch` call in case of a sparse checkout. An alternative that was considered during the development of this patch (and ultimately rejected) was to use `git lfs pull --include <path>...`, but it turned out to be too inflexible because it requires exact paths, not the patterns that are available via the sparse checkout definition, and that risks running into command-line length limitations. Signed-off-by: Johannes Schindelin <johannes.schindelin@gmx.de> --------- Signed-off-by: Johannes Schindelin <johannes.schindelin@gmx.de> Co-authored-by: Daniel <daniel.fernandez@feverup.com>
2023-06-09 15:08:21 +02:00
fetchOptions.filter = 'blob:none';
}
if (settings.fetchDepth <= 0) {
// Fetch all branches and tags
let refSpec = refHelper.getRefSpecForAllHistory(settings.ref, settings.commit);
Add support for sparse checkouts (#1369) * Add support for sparse checkouts * sparse-checkout: optionally turn off cone mode While it _is_ true that cone mode is the default nowadays (mainly for performance reasons: code mode is much faster than non-cone mode), there _are_ legitimate use cases where non-cone mode is really useful. Let's add a flag to optionally disable cone mode. Signed-off-by: Johannes Schindelin <johannes.schindelin@gmx.de> * Verify minimum Git version for sparse checkout The `git sparse-checkout` command is available only since Git version v2.25.0. The `actions/checkout` Action actually supports older Git versions than that; As of time of writing, the minimum version is v2.18.0. Instead of raising this minimum version even for users who do not require a sparse checkout, only check for this minimum version specifically when a sparse checkout was asked for. Suggested-by: Tingluo Huang <tingluohuang@github.com> Signed-off-by: Johannes Schindelin <johannes.schindelin@gmx.de> * Support sparse checkout/LFS better Instead of fetching all the LFS objects present in the current revision in a sparse checkout, whether they are needed inside the sparse cone or not, let's instead only pull the ones that are actually needed. To do that, let's avoid running that preemptive `git lfs fetch` call in case of a sparse checkout. An alternative that was considered during the development of this patch (and ultimately rejected) was to use `git lfs pull --include <path>...`, but it turned out to be too inflexible because it requires exact paths, not the patterns that are available via the sparse checkout definition, and that risks running into command-line length limitations. Signed-off-by: Johannes Schindelin <johannes.schindelin@gmx.de> --------- Signed-off-by: Johannes Schindelin <johannes.schindelin@gmx.de> Co-authored-by: Daniel <daniel.fernandez@feverup.com>
2023-06-09 15:08:21 +02:00
yield git.fetch(refSpec, fetchOptions);
// When all history is fetched, the ref we're interested in may have moved to a different
// commit (push or force push). If so, fetch again with a targeted refspec.
if (!(yield refHelper.testRef(git, settings.ref, settings.commit))) {
refSpec = refHelper.getRefSpec(settings.ref, settings.commit);
Add support for sparse checkouts (#1369) * Add support for sparse checkouts * sparse-checkout: optionally turn off cone mode While it _is_ true that cone mode is the default nowadays (mainly for performance reasons: code mode is much faster than non-cone mode), there _are_ legitimate use cases where non-cone mode is really useful. Let's add a flag to optionally disable cone mode. Signed-off-by: Johannes Schindelin <johannes.schindelin@gmx.de> * Verify minimum Git version for sparse checkout The `git sparse-checkout` command is available only since Git version v2.25.0. The `actions/checkout` Action actually supports older Git versions than that; As of time of writing, the minimum version is v2.18.0. Instead of raising this minimum version even for users who do not require a sparse checkout, only check for this minimum version specifically when a sparse checkout was asked for. Suggested-by: Tingluo Huang <tingluohuang@github.com> Signed-off-by: Johannes Schindelin <johannes.schindelin@gmx.de> * Support sparse checkout/LFS better Instead of fetching all the LFS objects present in the current revision in a sparse checkout, whether they are needed inside the sparse cone or not, let's instead only pull the ones that are actually needed. To do that, let's avoid running that preemptive `git lfs fetch` call in case of a sparse checkout. An alternative that was considered during the development of this patch (and ultimately rejected) was to use `git lfs pull --include <path>...`, but it turned out to be too inflexible because it requires exact paths, not the patterns that are available via the sparse checkout definition, and that risks running into command-line length limitations. Signed-off-by: Johannes Schindelin <johannes.schindelin@gmx.de> --------- Signed-off-by: Johannes Schindelin <johannes.schindelin@gmx.de> Co-authored-by: Daniel <daniel.fernandez@feverup.com>
2023-06-09 15:08:21 +02:00
yield git.fetch(refSpec, fetchOptions);
}
}
else {
Add support for sparse checkouts (#1369) * Add support for sparse checkouts * sparse-checkout: optionally turn off cone mode While it _is_ true that cone mode is the default nowadays (mainly for performance reasons: code mode is much faster than non-cone mode), there _are_ legitimate use cases where non-cone mode is really useful. Let's add a flag to optionally disable cone mode. Signed-off-by: Johannes Schindelin <johannes.schindelin@gmx.de> * Verify minimum Git version for sparse checkout The `git sparse-checkout` command is available only since Git version v2.25.0. The `actions/checkout` Action actually supports older Git versions than that; As of time of writing, the minimum version is v2.18.0. Instead of raising this minimum version even for users who do not require a sparse checkout, only check for this minimum version specifically when a sparse checkout was asked for. Suggested-by: Tingluo Huang <tingluohuang@github.com> Signed-off-by: Johannes Schindelin <johannes.schindelin@gmx.de> * Support sparse checkout/LFS better Instead of fetching all the LFS objects present in the current revision in a sparse checkout, whether they are needed inside the sparse cone or not, let's instead only pull the ones that are actually needed. To do that, let's avoid running that preemptive `git lfs fetch` call in case of a sparse checkout. An alternative that was considered during the development of this patch (and ultimately rejected) was to use `git lfs pull --include <path>...`, but it turned out to be too inflexible because it requires exact paths, not the patterns that are available via the sparse checkout definition, and that risks running into command-line length limitations. Signed-off-by: Johannes Schindelin <johannes.schindelin@gmx.de> --------- Signed-off-by: Johannes Schindelin <johannes.schindelin@gmx.de> Co-authored-by: Daniel <daniel.fernandez@feverup.com>
2023-06-09 15:08:21 +02:00
fetchOptions.fetchDepth = settings.fetchDepth;
fetchOptions.fetchTags = settings.fetchTags;
const refSpec = refHelper.getRefSpec(settings.ref, settings.commit);
Add support for sparse checkouts (#1369) * Add support for sparse checkouts * sparse-checkout: optionally turn off cone mode While it _is_ true that cone mode is the default nowadays (mainly for performance reasons: code mode is much faster than non-cone mode), there _are_ legitimate use cases where non-cone mode is really useful. Let's add a flag to optionally disable cone mode. Signed-off-by: Johannes Schindelin <johannes.schindelin@gmx.de> * Verify minimum Git version for sparse checkout The `git sparse-checkout` command is available only since Git version v2.25.0. The `actions/checkout` Action actually supports older Git versions than that; As of time of writing, the minimum version is v2.18.0. Instead of raising this minimum version even for users who do not require a sparse checkout, only check for this minimum version specifically when a sparse checkout was asked for. Suggested-by: Tingluo Huang <tingluohuang@github.com> Signed-off-by: Johannes Schindelin <johannes.schindelin@gmx.de> * Support sparse checkout/LFS better Instead of fetching all the LFS objects present in the current revision in a sparse checkout, whether they are needed inside the sparse cone or not, let's instead only pull the ones that are actually needed. To do that, let's avoid running that preemptive `git lfs fetch` call in case of a sparse checkout. An alternative that was considered during the development of this patch (and ultimately rejected) was to use `git lfs pull --include <path>...`, but it turned out to be too inflexible because it requires exact paths, not the patterns that are available via the sparse checkout definition, and that risks running into command-line length limitations. Signed-off-by: Johannes Schindelin <johannes.schindelin@gmx.de> --------- Signed-off-by: Johannes Schindelin <johannes.schindelin@gmx.de> Co-authored-by: Daniel <daniel.fernandez@feverup.com>
2023-06-09 15:08:21 +02:00
yield git.fetch(refSpec, fetchOptions);
}
core.endGroup();
// Checkout info
core.startGroup('Determining the checkout info');
const checkoutInfo = yield refHelper.getCheckoutInfo(git, settings.ref, settings.commit);
core.endGroup();
// LFS fetch
// Explicit lfs-fetch to avoid slow checkout (fetches one lfs object at a time).
// Explicit lfs fetch will fetch lfs objects in parallel.
Add support for sparse checkouts (#1369) * Add support for sparse checkouts * sparse-checkout: optionally turn off cone mode While it _is_ true that cone mode is the default nowadays (mainly for performance reasons: code mode is much faster than non-cone mode), there _are_ legitimate use cases where non-cone mode is really useful. Let's add a flag to optionally disable cone mode. Signed-off-by: Johannes Schindelin <johannes.schindelin@gmx.de> * Verify minimum Git version for sparse checkout The `git sparse-checkout` command is available only since Git version v2.25.0. The `actions/checkout` Action actually supports older Git versions than that; As of time of writing, the minimum version is v2.18.0. Instead of raising this minimum version even for users who do not require a sparse checkout, only check for this minimum version specifically when a sparse checkout was asked for. Suggested-by: Tingluo Huang <tingluohuang@github.com> Signed-off-by: Johannes Schindelin <johannes.schindelin@gmx.de> * Support sparse checkout/LFS better Instead of fetching all the LFS objects present in the current revision in a sparse checkout, whether they are needed inside the sparse cone or not, let's instead only pull the ones that are actually needed. To do that, let's avoid running that preemptive `git lfs fetch` call in case of a sparse checkout. An alternative that was considered during the development of this patch (and ultimately rejected) was to use `git lfs pull --include <path>...`, but it turned out to be too inflexible because it requires exact paths, not the patterns that are available via the sparse checkout definition, and that risks running into command-line length limitations. Signed-off-by: Johannes Schindelin <johannes.schindelin@gmx.de> --------- Signed-off-by: Johannes Schindelin <johannes.schindelin@gmx.de> Co-authored-by: Daniel <daniel.fernandez@feverup.com>
2023-06-09 15:08:21 +02:00
// For sparse checkouts, let `checkout` fetch the needed objects lazily.
if (settings.lfs && !settings.sparseCheckout) {
core.startGroup('Fetching LFS objects');
yield git.lfsFetch(checkoutInfo.startPoint || checkoutInfo.ref);
core.endGroup();
}
Add support for sparse checkouts (#1369) * Add support for sparse checkouts * sparse-checkout: optionally turn off cone mode While it _is_ true that cone mode is the default nowadays (mainly for performance reasons: code mode is much faster than non-cone mode), there _are_ legitimate use cases where non-cone mode is really useful. Let's add a flag to optionally disable cone mode. Signed-off-by: Johannes Schindelin <johannes.schindelin@gmx.de> * Verify minimum Git version for sparse checkout The `git sparse-checkout` command is available only since Git version v2.25.0. The `actions/checkout` Action actually supports older Git versions than that; As of time of writing, the minimum version is v2.18.0. Instead of raising this minimum version even for users who do not require a sparse checkout, only check for this minimum version specifically when a sparse checkout was asked for. Suggested-by: Tingluo Huang <tingluohuang@github.com> Signed-off-by: Johannes Schindelin <johannes.schindelin@gmx.de> * Support sparse checkout/LFS better Instead of fetching all the LFS objects present in the current revision in a sparse checkout, whether they are needed inside the sparse cone or not, let's instead only pull the ones that are actually needed. To do that, let's avoid running that preemptive `git lfs fetch` call in case of a sparse checkout. An alternative that was considered during the development of this patch (and ultimately rejected) was to use `git lfs pull --include <path>...`, but it turned out to be too inflexible because it requires exact paths, not the patterns that are available via the sparse checkout definition, and that risks running into command-line length limitations. Signed-off-by: Johannes Schindelin <johannes.schindelin@gmx.de> --------- Signed-off-by: Johannes Schindelin <johannes.schindelin@gmx.de> Co-authored-by: Daniel <daniel.fernandez@feverup.com>
2023-06-09 15:08:21 +02:00
// Sparse checkout
if (settings.sparseCheckout) {
core.startGroup('Setting up sparse checkout');
if (settings.sparseCheckoutConeMode) {
yield git.sparseCheckout(settings.sparseCheckout);
}
else {
yield git.sparseCheckoutNonConeMode(settings.sparseCheckout);
}
core.endGroup();
}
// Checkout
core.startGroup('Checking out the ref');
yield git.checkout(checkoutInfo.ref, checkoutInfo.startPoint);
core.endGroup();
// Submodules
if (settings.submodules) {
// Temporarily override global config
core.startGroup('Setting up auth for fetching submodules');
yield authHelper.configureGlobalAuth();
core.endGroup();
// Checkout submodules
core.startGroup('Fetching submodules');
yield git.submoduleSync(settings.nestedSubmodules);
yield git.submoduleUpdate(settings.fetchDepth, settings.nestedSubmodules);
yield git.submoduleForeach('git config --local gc.auto 0', settings.nestedSubmodules);
core.endGroup();
// Persist credentials
if (settings.persistCredentials) {
core.startGroup('Persisting credentials for submodules');
yield authHelper.configureSubmoduleAuth();
core.endGroup();
}
}
// Get commit information
const commitInfo = yield git.log1();
// Log commit sha
yield git.log1("--format='%H'");
// Check for incorrect pull request merge commit
yield refHelper.checkCommitInfo(settings.authToken, commitInfo, settings.repositoryOwner, settings.repositoryName, settings.ref, settings.commit, settings.githubServerUrl);
}
finally {
// Remove auth
if (authHelper) {
if (!settings.persistCredentials) {
core.startGroup('Removing auth');
yield authHelper.removeAuth();
core.endGroup();
}
authHelper.removeGlobalConfig();
}
}
});
}
exports.getSource = getSource;
function cleanup(repositoryPath) {
return __awaiter(this, void 0, void 0, function* () {
// Repo exists?
if (!repositoryPath ||
!fsHelper.fileExistsSync(path.join(repositoryPath, '.git', 'config'))) {
return;
}
let git;
try {
Add support for sparse checkouts (#1369) * Add support for sparse checkouts * sparse-checkout: optionally turn off cone mode While it _is_ true that cone mode is the default nowadays (mainly for performance reasons: code mode is much faster than non-cone mode), there _are_ legitimate use cases where non-cone mode is really useful. Let's add a flag to optionally disable cone mode. Signed-off-by: Johannes Schindelin <johannes.schindelin@gmx.de> * Verify minimum Git version for sparse checkout The `git sparse-checkout` command is available only since Git version v2.25.0. The `actions/checkout` Action actually supports older Git versions than that; As of time of writing, the minimum version is v2.18.0. Instead of raising this minimum version even for users who do not require a sparse checkout, only check for this minimum version specifically when a sparse checkout was asked for. Suggested-by: Tingluo Huang <tingluohuang@github.com> Signed-off-by: Johannes Schindelin <johannes.schindelin@gmx.de> * Support sparse checkout/LFS better Instead of fetching all the LFS objects present in the current revision in a sparse checkout, whether they are needed inside the sparse cone or not, let's instead only pull the ones that are actually needed. To do that, let's avoid running that preemptive `git lfs fetch` call in case of a sparse checkout. An alternative that was considered during the development of this patch (and ultimately rejected) was to use `git lfs pull --include <path>...`, but it turned out to be too inflexible because it requires exact paths, not the patterns that are available via the sparse checkout definition, and that risks running into command-line length limitations. Signed-off-by: Johannes Schindelin <johannes.schindelin@gmx.de> --------- Signed-off-by: Johannes Schindelin <johannes.schindelin@gmx.de> Co-authored-by: Daniel <daniel.fernandez@feverup.com>
2023-06-09 15:08:21 +02:00
git = yield gitCommandManager.createCommandManager(repositoryPath, false, false);
}
catch (_a) {
return;
}
// Remove auth
const authHelper = gitAuthHelper.createAuthHelper(git);
try {
if (stateHelper.PostSetSafeDirectory) {
// Setup the repository path as a safe directory, so if we pass this into a container job with a different user it doesn't fail
// Otherwise all git commands we run in a container fail
yield authHelper.configureTempGlobalConfig();
core.info(`Adding repository directory to the temporary git global config as a safe directory`);
yield git
.config('safe.directory', repositoryPath, true, true)
.catch(error => {
core.info(`Failed to initialize safe directory with error: ${error}`);
});
}
yield authHelper.removeAuth();
}
finally {
yield authHelper.removeGlobalConfig();
}
});
}
exports.cleanup = cleanup;
function getGitCommandManager(settings) {
return __awaiter(this, void 0, void 0, function* () {
core.info(`Working directory is '${settings.repositoryPath}'`);
try {
Add support for sparse checkouts (#1369) * Add support for sparse checkouts * sparse-checkout: optionally turn off cone mode While it _is_ true that cone mode is the default nowadays (mainly for performance reasons: code mode is much faster than non-cone mode), there _are_ legitimate use cases where non-cone mode is really useful. Let's add a flag to optionally disable cone mode. Signed-off-by: Johannes Schindelin <johannes.schindelin@gmx.de> * Verify minimum Git version for sparse checkout The `git sparse-checkout` command is available only since Git version v2.25.0. The `actions/checkout` Action actually supports older Git versions than that; As of time of writing, the minimum version is v2.18.0. Instead of raising this minimum version even for users who do not require a sparse checkout, only check for this minimum version specifically when a sparse checkout was asked for. Suggested-by: Tingluo Huang <tingluohuang@github.com> Signed-off-by: Johannes Schindelin <johannes.schindelin@gmx.de> * Support sparse checkout/LFS better Instead of fetching all the LFS objects present in the current revision in a sparse checkout, whether they are needed inside the sparse cone or not, let's instead only pull the ones that are actually needed. To do that, let's avoid running that preemptive `git lfs fetch` call in case of a sparse checkout. An alternative that was considered during the development of this patch (and ultimately rejected) was to use `git lfs pull --include <path>...`, but it turned out to be too inflexible because it requires exact paths, not the patterns that are available via the sparse checkout definition, and that risks running into command-line length limitations. Signed-off-by: Johannes Schindelin <johannes.schindelin@gmx.de> --------- Signed-off-by: Johannes Schindelin <johannes.schindelin@gmx.de> Co-authored-by: Daniel <daniel.fernandez@feverup.com>
2023-06-09 15:08:21 +02:00
return yield gitCommandManager.createCommandManager(settings.repositoryPath, settings.lfs, settings.sparseCheckout != null);
}
catch (err) {
// Git is required for LFS
if (settings.lfs) {
throw err;
}
// Otherwise fallback to REST API
return undefined;
}
});
}
2021-10-19 17:05:28 +02:00
/***/ }),
/***/ 3142:
/***/ ((__unused_webpack_module, exports) => {
2021-10-19 17:05:28 +02:00
"use strict";
2021-10-19 17:05:28 +02:00
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.GitVersion = void 0;
class GitVersion {
/**
* Used for comparing the version of git and git-lfs against the minimum required version
* @param version the version string, e.g. 1.2 or 1.2.3
*/
constructor(version) {
this.major = NaN;
this.minor = NaN;
this.patch = NaN;
if (version) {
const match = version.match(/^(\d+)\.(\d+)(\.(\d+))?$/);
if (match) {
this.major = Number(match[1]);
this.minor = Number(match[2]);
if (match[4]) {
this.patch = Number(match[4]);
}
}
2021-10-19 17:05:28 +02:00
}
}
/**
* Compares the instance against a minimum required version
* @param minimum Minimum version
*/
checkMinimum(minimum) {
if (!minimum.isValid()) {
throw new Error('Arg minimum is not a valid version');
}
// Major is insufficient
if (this.major < minimum.major) {
return false;
}
// Major is equal
if (this.major === minimum.major) {
// Minor is insufficient
if (this.minor < minimum.minor) {
return false;
}
// Minor is equal
if (this.minor === minimum.minor) {
// Patch is insufficient
if (this.patch && this.patch < (minimum.patch || 0)) {
return false;
}
}
}
return true;
2021-10-19 17:05:28 +02:00
}
/**
* Indicates whether the instance was constructed from a valid version string
*/
isValid() {
return !isNaN(this.major);
2021-10-19 17:05:28 +02:00
}
/**
* Returns the version as a string, e.g. 1.2 or 1.2.3
*/
toString() {
let result = '';
if (this.isValid()) {
result = `${this.major}.${this.minor}`;
if (!isNaN(this.patch)) {
result += `.${this.patch}`;
}
2021-10-19 17:05:28 +02:00
}
return result;
2021-10-19 17:05:28 +02:00
}
}
exports.GitVersion = GitVersion;
2021-10-19 17:05:28 +02:00
/***/ }),
2021-10-19 17:05:28 +02:00
/***/ 138:
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
2021-10-19 17:05:28 +02:00
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.getDefaultBranch = exports.downloadRepository = void 0;
const assert = __importStar(__nccwpck_require__(9491));
const core = __importStar(__nccwpck_require__(2186));
const fs = __importStar(__nccwpck_require__(7147));
const github = __importStar(__nccwpck_require__(5438));
const io = __importStar(__nccwpck_require__(7436));
const path = __importStar(__nccwpck_require__(1017));
const retryHelper = __importStar(__nccwpck_require__(2155));
const toolCache = __importStar(__nccwpck_require__(7784));
const v4_1 = __importDefault(__nccwpck_require__(824));
const url_helper_1 = __nccwpck_require__(9437);
const IS_WINDOWS = process.platform === 'win32';
function downloadRepository(authToken, owner, repo, ref, commit, repositoryPath, baseUrl) {
return __awaiter(this, void 0, void 0, function* () {
// Determine the default branch
if (!ref && !commit) {
core.info('Determining the default branch');
ref = yield getDefaultBranch(authToken, owner, repo, baseUrl);
2021-10-19 17:05:28 +02:00
}
// Download the archive
let archiveData = yield retryHelper.execute(() => __awaiter(this, void 0, void 0, function* () {
core.info('Downloading the archive');
return yield downloadArchive(authToken, owner, repo, ref, commit, baseUrl);
}));
// Write archive to disk
core.info('Writing archive to disk');
const uniqueId = (0, v4_1.default)();
const archivePath = path.join(repositoryPath, `${uniqueId}.tar.gz`);
yield fs.promises.writeFile(archivePath, archiveData);
archiveData = Buffer.from(''); // Free memory
// Extract archive
core.info('Extracting the archive');
const extractPath = path.join(repositoryPath, uniqueId);
yield io.mkdirP(extractPath);
if (IS_WINDOWS) {
yield toolCache.extractZip(archivePath, extractPath);
2021-10-19 17:05:28 +02:00
}
else {
yield toolCache.extractTar(archivePath, extractPath);
2021-10-19 17:05:28 +02:00
}
yield io.rmRF(archivePath);
// Determine the path of the repository content. The archive contains
// a top-level folder and the repository content is inside.
const archiveFileNames = yield fs.promises.readdir(extractPath);
assert.ok(archiveFileNames.length == 1, 'Expected exactly one directory inside archive');
const archiveVersion = archiveFileNames[0]; // The top-level folder name includes the short SHA
core.info(`Resolved version ${archiveVersion}`);
const tempRepositoryPath = path.join(extractPath, archiveVersion);
// Move the files
for (const fileName of yield fs.promises.readdir(tempRepositoryPath)) {
const sourcePath = path.join(tempRepositoryPath, fileName);
const targetPath = path.join(repositoryPath, fileName);
if (IS_WINDOWS) {
yield io.cp(sourcePath, targetPath, { recursive: true }); // Copy on Windows (Windows Defender may have a lock)
}
else {
yield io.mv(sourcePath, targetPath);
}
2021-10-19 17:05:28 +02:00
}
yield io.rmRF(extractPath);
});
}
exports.downloadRepository = downloadRepository;
/**
* Looks up the default branch name
*/
function getDefaultBranch(authToken, owner, repo, baseUrl) {
return __awaiter(this, void 0, void 0, function* () {
return yield retryHelper.execute(() => __awaiter(this, void 0, void 0, function* () {
var _a;
core.info('Retrieving the default branch name');
const octokit = github.getOctokit(authToken, {
baseUrl: (0, url_helper_1.getServerApiUrl)(baseUrl)
});
let result;
try {
// Get the default branch from the repo info
const response = yield octokit.rest.repos.get({ owner, repo });
result = response.data.default_branch;
assert.ok(result, 'default_branch cannot be empty');
}
catch (err) {
// Handle .wiki repo
if (((_a = err) === null || _a === void 0 ? void 0 : _a.status) === 404 &&
repo.toUpperCase().endsWith('.WIKI')) {
result = 'master';
}
// Otherwise error
else {
throw err;
}
}
// Print the default branch
core.info(`Default branch '${result}'`);
// Prefix with 'refs/heads'
if (!result.startsWith('refs/')) {
result = `refs/heads/${result}`;
}
return result;
}));
});
}
exports.getDefaultBranch = getDefaultBranch;
function downloadArchive(authToken, owner, repo, ref, commit, baseUrl) {
return __awaiter(this, void 0, void 0, function* () {
const octokit = github.getOctokit(authToken, {
baseUrl: (0, url_helper_1.getServerApiUrl)(baseUrl)
});
const download = IS_WINDOWS
? octokit.rest.repos.downloadZipballArchive
: octokit.rest.repos.downloadTarballArchive;
const response = yield download({
owner: owner,
repo: repo,
ref: commit || ref
});
return Buffer.from(response.data); // response.data is ArrayBuffer
});
2021-10-19 17:05:28 +02:00
}
/***/ }),
2021-10-19 17:05:28 +02:00
/***/ 5480:
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
2021-10-19 17:05:28 +02:00
"use strict";
2021-10-19 17:05:28 +02:00
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.getInputs = void 0;
const core = __importStar(__nccwpck_require__(2186));
const fsHelper = __importStar(__nccwpck_require__(7219));
const github = __importStar(__nccwpck_require__(5438));
const path = __importStar(__nccwpck_require__(1017));
const workflowContextHelper = __importStar(__nccwpck_require__(9568));
function getInputs() {
return __awaiter(this, void 0, void 0, function* () {
const result = {};
// GitHub workspace
let githubWorkspacePath = process.env['GITHUB_WORKSPACE'];
if (!githubWorkspacePath) {
throw new Error('GITHUB_WORKSPACE not defined');
}
githubWorkspacePath = path.resolve(githubWorkspacePath);
core.debug(`GITHUB_WORKSPACE = '${githubWorkspacePath}'`);
fsHelper.directoryExistsSync(githubWorkspacePath, true);
// Qualified repository
const qualifiedRepository = core.getInput('repository') ||
`${github.context.repo.owner}/${github.context.repo.repo}`;
core.debug(`qualified repository = '${qualifiedRepository}'`);
const splitRepository = qualifiedRepository.split('/');
if (splitRepository.length !== 2 ||
!splitRepository[0] ||
!splitRepository[1]) {
throw new Error(`Invalid repository '${qualifiedRepository}'. Expected format {owner}/{repo}.`);
}
result.repositoryOwner = splitRepository[0];
result.repositoryName = splitRepository[1];
// Repository path
result.repositoryPath = core.getInput('path') || '.';
result.repositoryPath = path.resolve(githubWorkspacePath, result.repositoryPath);
if (!(result.repositoryPath + path.sep).startsWith(githubWorkspacePath + path.sep)) {
throw new Error(`Repository path '${result.repositoryPath}' is not under '${githubWorkspacePath}'`);
}
// Workflow repository?
const isWorkflowRepository = qualifiedRepository.toUpperCase() ===
`${github.context.repo.owner}/${github.context.repo.repo}`.toUpperCase();
// Source branch, source version
result.ref = core.getInput('ref');
if (!result.ref) {
if (isWorkflowRepository) {
result.ref = github.context.ref;
result.commit = github.context.sha;
// Some events have an unqualifed ref. For example when a PR is merged (pull_request closed event),
// the ref is unqualifed like "main" instead of "refs/heads/main".
if (result.commit && result.ref && !result.ref.startsWith('refs/')) {
result.ref = `refs/heads/${result.ref}`;
}
}
}
// SHA?
else if (result.ref.match(/^[0-9a-fA-F]{40}$/)) {
result.commit = result.ref;
result.ref = '';
}
core.debug(`ref = '${result.ref}'`);
core.debug(`commit = '${result.commit}'`);
// Clean
result.clean = (core.getInput('clean') || 'true').toUpperCase() === 'TRUE';
core.debug(`clean = ${result.clean}`);
// Filter
const filter = core.getInput('filter');
if (filter) {
result.filter = filter;
}
core.debug(`filter = ${result.filter}`);
Add support for sparse checkouts (#1369) * Add support for sparse checkouts * sparse-checkout: optionally turn off cone mode While it _is_ true that cone mode is the default nowadays (mainly for performance reasons: code mode is much faster than non-cone mode), there _are_ legitimate use cases where non-cone mode is really useful. Let's add a flag to optionally disable cone mode. Signed-off-by: Johannes Schindelin <johannes.schindelin@gmx.de> * Verify minimum Git version for sparse checkout The `git sparse-checkout` command is available only since Git version v2.25.0. The `actions/checkout` Action actually supports older Git versions than that; As of time of writing, the minimum version is v2.18.0. Instead of raising this minimum version even for users who do not require a sparse checkout, only check for this minimum version specifically when a sparse checkout was asked for. Suggested-by: Tingluo Huang <tingluohuang@github.com> Signed-off-by: Johannes Schindelin <johannes.schindelin@gmx.de> * Support sparse checkout/LFS better Instead of fetching all the LFS objects present in the current revision in a sparse checkout, whether they are needed inside the sparse cone or not, let's instead only pull the ones that are actually needed. To do that, let's avoid running that preemptive `git lfs fetch` call in case of a sparse checkout. An alternative that was considered during the development of this patch (and ultimately rejected) was to use `git lfs pull --include <path>...`, but it turned out to be too inflexible because it requires exact paths, not the patterns that are available via the sparse checkout definition, and that risks running into command-line length limitations. Signed-off-by: Johannes Schindelin <johannes.schindelin@gmx.de> --------- Signed-off-by: Johannes Schindelin <johannes.schindelin@gmx.de> Co-authored-by: Daniel <daniel.fernandez@feverup.com>
2023-06-09 15:08:21 +02:00
// Sparse checkout
const sparseCheckout = core.getMultilineInput('sparse-checkout');
if (sparseCheckout.length) {
result.sparseCheckout = sparseCheckout;
core.debug(`sparse checkout = ${result.sparseCheckout}`);
}
result.sparseCheckoutConeMode =
(core.getInput('sparse-checkout-cone-mode') || 'true').toUpperCase() ===
'TRUE';
// Fetch depth
result.fetchDepth = Math.floor(Number(core.getInput('fetch-depth') || '1'));
if (isNaN(result.fetchDepth) || result.fetchDepth < 0) {
result.fetchDepth = 0;
}
core.debug(`fetch depth = ${result.fetchDepth}`);
// Fetch tags
result.fetchTags =
(core.getInput('fetch-tags') || 'false').toUpperCase() === 'TRUE';
core.debug(`fetch tags = ${result.fetchTags}`);
// Show fetch progress
result.showProgress =
(core.getInput('show-progress') || 'true').toUpperCase() === 'TRUE';
core.debug(`show progress = ${result.showProgress}`);
// LFS
result.lfs = (core.getInput('lfs') || 'false').toUpperCase() === 'TRUE';
core.debug(`lfs = ${result.lfs}`);
// Submodules
result.submodules = false;
result.nestedSubmodules = false;
const submodulesString = (core.getInput('submodules') || '').toUpperCase();
if (submodulesString == 'RECURSIVE') {
result.submodules = true;
result.nestedSubmodules = true;
}
else if (submodulesString == 'TRUE') {
result.submodules = true;
}
core.debug(`submodules = ${result.submodules}`);
core.debug(`recursive submodules = ${result.nestedSubmodules}`);
// Auth token
result.authToken = core.getInput('token', { required: true });
// SSH
result.sshKey = core.getInput('ssh-key');
result.sshKnownHosts = core.getInput('ssh-known-hosts');
result.sshStrict =
(core.getInput('ssh-strict') || 'true').toUpperCase() === 'TRUE';
// Persist credentials
result.persistCredentials =
(core.getInput('persist-credentials') || 'false').toUpperCase() === 'TRUE';
// Workflow organization ID
result.workflowOrganizationId = yield workflowContextHelper.getOrganizationId();
// Set safe.directory in git global config.
result.setSafeDirectory =
(core.getInput('set-safe-directory') || 'true').toUpperCase() === 'TRUE';
// Determine the GitHub URL that the repository is being hosted from
result.githubServerUrl = core.getInput('github-server-url');
core.debug(`GitHub Host URL = ${result.githubServerUrl}`);
return result;
});
2021-10-19 17:05:28 +02:00
}
exports.getInputs = getInputs;
2021-10-19 17:05:28 +02:00
/***/ }),
2021-10-19 17:05:28 +02:00
/***/ 3109:
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
2021-10-19 17:05:28 +02:00
"use strict";
2021-10-19 17:05:28 +02:00
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
const core = __importStar(__nccwpck_require__(2186));
const coreCommand = __importStar(__nccwpck_require__(7351));
const gitSourceProvider = __importStar(__nccwpck_require__(9210));
const inputHelper = __importStar(__nccwpck_require__(5480));
const path = __importStar(__nccwpck_require__(1017));
const stateHelper = __importStar(__nccwpck_require__(8647));
function run() {
var _a, _b;
return __awaiter(this, void 0, void 0, function* () {
2021-10-19 17:05:28 +02:00
try {
const sourceSettings = yield inputHelper.getInputs();
try {
// Register problem matcher
coreCommand.issueCommand('add-matcher', {}, path.join(__dirname, 'problem-matcher.json'));
// Get sources
yield gitSourceProvider.getSource(sourceSettings);
}
finally {
// Unregister problem matcher
coreCommand.issueCommand('remove-matcher', { owner: 'checkout-git' }, '');
}
2021-10-19 17:05:28 +02:00
}
catch (error) {
core.setFailed(`${(_b = (_a = error) === null || _a === void 0 ? void 0 : _a.message) !== null && _b !== void 0 ? _b : error}`);
2021-10-19 17:05:28 +02:00
}
});
2021-10-19 17:05:28 +02:00
}
function cleanup() {
var _a, _b;
return __awaiter(this, void 0, void 0, function* () {
2021-10-19 17:05:28 +02:00
try {
yield gitSourceProvider.cleanup(stateHelper.RepositoryPath);
2021-10-19 17:05:28 +02:00
}
catch (error) {
core.warning(`${(_b = (_a = error) === null || _a === void 0 ? void 0 : _a.message) !== null && _b !== void 0 ? _b : error}`);
}
});
2021-10-19 17:05:28 +02:00
}
// Main
if (!stateHelper.IsPost) {
run();
2021-10-19 17:05:28 +02:00
}
// Post
else {
cleanup();
2021-10-19 17:05:28 +02:00
}
/***/ }),
/***/ 8601:
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.checkCommitInfo = exports.testRef = exports.getRefSpec = exports.getRefSpecForAllHistory = exports.getCheckoutInfo = exports.tagsRefSpec = void 0;
const core = __importStar(__nccwpck_require__(2186));
const github = __importStar(__nccwpck_require__(5438));
const url_helper_1 = __nccwpck_require__(9437);
exports.tagsRefSpec = '+refs/tags/*:refs/tags/*';
function getCheckoutInfo(git, ref, commit) {
return __awaiter(this, void 0, void 0, function* () {
if (!git) {
throw new Error('Arg git cannot be empty');
}
if (!ref && !commit) {
throw new Error('Args ref and commit cannot both be empty');
}
const result = {};
const upperRef = (ref || '').toUpperCase();
// SHA only
if (!ref) {
result.ref = commit;
}
// refs/heads/
else if (upperRef.startsWith('REFS/HEADS/')) {
const branch = ref.substring('refs/heads/'.length);
result.ref = branch;
result.startPoint = `refs/remotes/origin/${branch}`;
}
// refs/pull/
else if (upperRef.startsWith('REFS/PULL/')) {
const branch = ref.substring('refs/pull/'.length);
result.ref = `refs/remotes/pull/${branch}`;
}
// refs/tags/
else if (upperRef.startsWith('REFS/')) {
result.ref = ref;
}
// Unqualified ref, check for a matching branch or tag
else {
if (yield git.branchExists(true, `origin/${ref}`)) {
result.ref = ref;
result.startPoint = `refs/remotes/origin/${ref}`;
}
else if (yield git.tagExists(`${ref}`)) {
result.ref = `refs/tags/${ref}`;
}
else {
throw new Error(`A branch or tag with the name '${ref}' could not be found`);
}
}
return result;
});
}
exports.getCheckoutInfo = getCheckoutInfo;
function getRefSpecForAllHistory(ref, commit) {
const result = ['+refs/heads/*:refs/remotes/origin/*', exports.tagsRefSpec];
if (ref && ref.toUpperCase().startsWith('REFS/PULL/')) {
const branch = ref.substring('refs/pull/'.length);
result.push(`+${commit || ref}:refs/remotes/pull/${branch}`);
}
return result;
}
exports.getRefSpecForAllHistory = getRefSpecForAllHistory;
function getRefSpec(ref, commit) {
if (!ref && !commit) {
throw new Error('Args ref and commit cannot both be empty');
}
const upperRef = (ref || '').toUpperCase();
// SHA
if (commit) {
// refs/heads
if (upperRef.startsWith('REFS/HEADS/')) {
const branch = ref.substring('refs/heads/'.length);
return [`+${commit}:refs/remotes/origin/${branch}`];
}
// refs/pull/
else if (upperRef.startsWith('REFS/PULL/')) {
const branch = ref.substring('refs/pull/'.length);
return [`+${commit}:refs/remotes/pull/${branch}`];
}
// refs/tags/
else if (upperRef.startsWith('REFS/TAGS/')) {
return [`+${commit}:${ref}`];
}
// Otherwise no destination ref
else {
return [commit];
}
}
// Unqualified ref, check for a matching branch or tag
else if (!upperRef.startsWith('REFS/')) {
return [
`+refs/heads/${ref}*:refs/remotes/origin/${ref}*`,
`+refs/tags/${ref}*:refs/tags/${ref}*`
];
}
// refs/heads/
else if (upperRef.startsWith('REFS/HEADS/')) {
const branch = ref.substring('refs/heads/'.length);
return [`+${ref}:refs/remotes/origin/${branch}`];
}
// refs/pull/
else if (upperRef.startsWith('REFS/PULL/')) {
const branch = ref.substring('refs/pull/'.length);
return [`+${ref}:refs/remotes/pull/${branch}`];
}
// refs/tags/
else {
return [`+${ref}:${ref}`];
}
}
exports.getRefSpec = getRefSpec;
/**
* Tests whether the initial fetch created the ref at the expected commit
*/
function testRef(git, ref, commit) {
return __awaiter(this, void 0, void 0, function* () {
if (!git) {
throw new Error('Arg git cannot be empty');
}
if (!ref && !commit) {
throw new Error('Args ref and commit cannot both be empty');
}
// No SHA? Nothing to test
if (!commit) {
return true;
}
// SHA only?
else if (!ref) {
return yield git.shaExists(commit);
}
const upperRef = ref.toUpperCase();
// refs/heads/
if (upperRef.startsWith('REFS/HEADS/')) {
const branch = ref.substring('refs/heads/'.length);
return ((yield git.branchExists(true, `origin/${branch}`)) &&
commit === (yield git.revParse(`refs/remotes/origin/${branch}`)));
}
// refs/pull/
else if (upperRef.startsWith('REFS/PULL/')) {
// Assume matches because fetched using the commit
return true;
}
// refs/tags/
else if (upperRef.startsWith('REFS/TAGS/')) {
const tagName = ref.substring('refs/tags/'.length);
Fix checkout of annotated tag loosing annotation Currently, a check is done after fetch to ensure that the repo state has not changed since the workflow was triggered. This check will reset the checkout to the commit that triggered the workflow, even if the branch or tag has moved since. The issue is that the check currently sees what "object" the ref points to. For an annotated tag, that is the annotation, not the commit. This means the check always fails for annotated tags, and they are reset to the commit, losing the annotation. Losing the annotation can be fatal, as `git describe` will only match annotated tags. The fix is simple: check if the tag points at the right commit, ignoring any other type of object. This is done with the <rev>^{commit} syntax. From the git-rev-parse docs: > <rev>^{<type>}, e.g. v0.99.8^{commit} > A suffix ^ followed by an object type name enclosed in brace pair > means dereference the object at <rev> recursively until an object of > type <type> is found or the object cannot be dereferenced anymore (in > which case, barf). For example, if <rev> is a commit-ish, > <rev>^{commit} describes the corresponding commit object. Similarly, > if <rev> is a tree-ish, <rev>^{tree} describes the corresponding tree > object. <rev>^0 is a short-hand for <rev>^{commit}. If the check still fails, we will still reset the tag to the commit, losing the annotation. However, there is no way to truly recover in this situtation, as GitHub does not capture the annotation on workflow start, and since the history has changed, we can not trust the new tag to contain the same data as it did before. Fixes #290 Closes #697
2023-10-06 18:40:07 +02:00
return ((yield git.tagExists(tagName)) &&
commit === (yield git.revParse(`${ref}^{commit}`)));
}
// Unexpected
else {
core.debug(`Unexpected ref format '${ref}' when testing ref info`);
return true;
}
});
}
exports.testRef = testRef;
function checkCommitInfo(token, commitInfo, repositoryOwner, repositoryName, ref, commit, baseUrl) {
var _a, _b;
return __awaiter(this, void 0, void 0, function* () {
try {
// GHES?
if ((0, url_helper_1.isGhes)(baseUrl)) {
return;
}
// Auth token?
if (!token) {
return;
}
// Public PR synchronize, for workflow repo?
if (fromPayload('repository.private') !== false ||
github.context.eventName !== 'pull_request' ||
fromPayload('action') !== 'synchronize' ||
repositoryOwner !== github.context.repo.owner ||
repositoryName !== github.context.repo.repo ||
ref !== github.context.ref ||
!ref.startsWith('refs/pull/') ||
commit !== github.context.sha) {
return;
}
// Head SHA
const expectedHeadSha = fromPayload('after');
if (!expectedHeadSha) {
core.debug('Unable to determine head sha');
return;
}
// Base SHA
const expectedBaseSha = fromPayload('pull_request.base.sha');
if (!expectedBaseSha) {
core.debug('Unable to determine base sha');
return;
}
// Expected message?
const expectedMessage = `Merge ${expectedHeadSha} into ${expectedBaseSha}`;
if (commitInfo.indexOf(expectedMessage) >= 0) {
return;
}
// Extract details from message
const match = commitInfo.match(/Merge ([0-9a-f]{40}) into ([0-9a-f]{40})/);
if (!match) {
core.debug('Unexpected message format');
return;
}
// Post telemetry
const actualHeadSha = match[1];
if (actualHeadSha !== expectedHeadSha) {
core.debug(`Expected head sha ${expectedHeadSha}; actual head sha ${actualHeadSha}`);
const octokit = github.getOctokit(token, {
baseUrl: (0, url_helper_1.getServerApiUrl)(baseUrl),
userAgent: `actions-checkout-tracepoint/1.0 (code=STALE_MERGE;owner=${repositoryOwner};repo=${repositoryName};pr=${fromPayload('number')};run_id=${process.env['GITHUB_RUN_ID']};expected_head_sha=${expectedHeadSha};actual_head_sha=${actualHeadSha})`
});
yield octokit.rest.repos.get({
owner: repositoryOwner,
repo: repositoryName
});
}
}
catch (err) {
core.debug(`Error when validating commit info: ${(_b = (_a = err) === null || _a === void 0 ? void 0 : _a.stack) !== null && _b !== void 0 ? _b : err}`);
}
});
}
exports.checkCommitInfo = checkCommitInfo;
function fromPayload(path) {
return select(github.context.payload, path);
}
function select(obj, path) {
if (!obj) {
return undefined;
}
const i = path.indexOf('.');
if (i < 0) {
return obj[path];
}
const key = path.substr(0, i);
return select(obj[key], path.substr(i + 1));
}
2021-10-19 17:05:28 +02:00
/***/ }),
/***/ 3120:
/***/ ((__unused_webpack_module, exports) => {
2021-10-19 17:05:28 +02:00
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.escape = void 0;
function escape(value) {
return value.replace(/[^a-zA-Z0-9_]/g, x => {
return `\\${x}`;
});
}
exports.escape = escape;
2021-10-19 17:05:28 +02:00
/***/ }),
/***/ 2155:
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.execute = exports.RetryHelper = void 0;
const core = __importStar(__nccwpck_require__(2186));
const defaultMaxAttempts = 3;
const defaultMinSeconds = 10;
const defaultMaxSeconds = 20;
class RetryHelper {
constructor(maxAttempts = defaultMaxAttempts, minSeconds = defaultMinSeconds, maxSeconds = defaultMaxSeconds) {
this.maxAttempts = maxAttempts;
this.minSeconds = Math.floor(minSeconds);
this.maxSeconds = Math.floor(maxSeconds);
if (this.minSeconds > this.maxSeconds) {
throw new Error('min seconds should be less than or equal to max seconds');
}
}
execute(action) {
var _a;
return __awaiter(this, void 0, void 0, function* () {
let attempt = 1;
while (attempt < this.maxAttempts) {
// Try
try {
return yield action();
}
catch (err) {
core.info((_a = err) === null || _a === void 0 ? void 0 : _a.message);
}
// Sleep
const seconds = this.getSleepAmount();
core.info(`Waiting ${seconds} seconds before trying again`);
yield this.sleep(seconds);
attempt++;
}
// Last attempt
return yield action();
});
}
getSleepAmount() {
return (Math.floor(Math.random() * (this.maxSeconds - this.minSeconds + 1)) +
this.minSeconds);
}
sleep(seconds) {
return __awaiter(this, void 0, void 0, function* () {
return new Promise(resolve => setTimeout(resolve, seconds * 1000));
});
}
}
exports.RetryHelper = RetryHelper;
function execute(action) {
return __awaiter(this, void 0, void 0, function* () {
const retryHelper = new RetryHelper();
return yield retryHelper.execute(action);
});
}
exports.execute = execute;
/***/ }),
/***/ 8647:
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.setSafeDirectory = exports.setSshKnownHostsPath = exports.setSshKeyPath = exports.setRepositoryPath = exports.SshKnownHostsPath = exports.SshKeyPath = exports.PostSetSafeDirectory = exports.RepositoryPath = exports.IsPost = void 0;
const core = __importStar(__nccwpck_require__(2186));
/**
* Indicates whether the POST action is running
*/
exports.IsPost = !!core.getState('isPost');
/**
* The repository path for the POST action. The value is empty during the MAIN action.
*/
exports.RepositoryPath = core.getState('repositoryPath');
/**
* The set-safe-directory for the POST action. The value is set if input: 'safe-directory' is set during the MAIN action.
*/
exports.PostSetSafeDirectory = core.getState('setSafeDirectory') === 'true';
/**
* The SSH key path for the POST action. The value is empty during the MAIN action.
*/
exports.SshKeyPath = core.getState('sshKeyPath');
/**
* The SSH known hosts path for the POST action. The value is empty during the MAIN action.
*/
exports.SshKnownHostsPath = core.getState('sshKnownHostsPath');
/**
* Save the repository path so the POST action can retrieve the value.
*/
function setRepositoryPath(repositoryPath) {
core.saveState('repositoryPath', repositoryPath);
}
exports.setRepositoryPath = setRepositoryPath;
/**
* Save the SSH key path so the POST action can retrieve the value.
*/
function setSshKeyPath(sshKeyPath) {
core.saveState('sshKeyPath', sshKeyPath);
}
exports.setSshKeyPath = setSshKeyPath;
/**
* Save the SSH known hosts path so the POST action can retrieve the value.
*/
function setSshKnownHostsPath(sshKnownHostsPath) {
core.saveState('sshKnownHostsPath', sshKnownHostsPath);
}
exports.setSshKnownHostsPath = setSshKnownHostsPath;
/**
* Save the set-safe-directory input so the POST action can retrieve the value.
*/
function setSafeDirectory() {
core.saveState('setSafeDirectory', 'true');
}
exports.setSafeDirectory = setSafeDirectory;
// Publish a variable so that when the POST action runs, it can determine it should run the cleanup logic.
// This is necessary since we don't have a separate entry point.
if (!exports.IsPost) {
core.saveState('isPost', 'true');
}
/***/ }),
/***/ 9437:
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.isGhes = exports.getServerApiUrl = exports.getServerUrl = exports.getFetchUrl = void 0;
const assert = __importStar(__nccwpck_require__(9491));
const url_1 = __nccwpck_require__(7310);
function getFetchUrl(settings) {
assert.ok(settings.repositoryOwner, 'settings.repositoryOwner must be defined');
assert.ok(settings.repositoryName, 'settings.repositoryName must be defined');
const serviceUrl = getServerUrl(settings.githubServerUrl);
const encodedOwner = encodeURIComponent(settings.repositoryOwner);
const encodedName = encodeURIComponent(settings.repositoryName);
if (settings.sshKey) {
return `git@${serviceUrl.hostname}:${encodedOwner}/${encodedName}.git`;
}
// "origin" is SCHEME://HOSTNAME[:PORT]
return `${serviceUrl.origin}/${encodedOwner}/${encodedName}`;
}
exports.getFetchUrl = getFetchUrl;
function getServerUrl(url) {
let urlValue = url && url.trim().length > 0
? url
: process.env['GITHUB_SERVER_URL'] || 'https://github.com';
return new url_1.URL(urlValue);
}
exports.getServerUrl = getServerUrl;
function getServerApiUrl(url) {
let apiUrl = 'https://api.github.com';
if (isGhes(url)) {
const serverUrl = getServerUrl(url);
apiUrl = new url_1.URL(`${serverUrl.origin}/api/v3`).toString();
}
return apiUrl;
}
exports.getServerApiUrl = getServerApiUrl;
function isGhes(url) {
const ghUrl = getServerUrl(url);
return ghUrl.hostname.toUpperCase() !== 'GITHUB.COM';
}
exports.isGhes = isGhes;
2021-10-13 23:07:05 +02:00
/***/ }),
/***/ 9568:
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
2021-10-13 23:07:05 +02:00
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.getOrganizationId = void 0;
const core = __importStar(__nccwpck_require__(2186));
const fs = __importStar(__nccwpck_require__(7147));
/**
* Gets the organization ID of the running workflow or undefined if the value cannot be loaded from the GITHUB_EVENT_PATH
*/
function getOrganizationId() {
var _a, _b;
return __awaiter(this, void 0, void 0, function* () {
try {
const eventPath = process.env.GITHUB_EVENT_PATH;
if (!eventPath) {
core.debug(`GITHUB_EVENT_PATH is not defined`);
return;
}
const content = yield fs.promises.readFile(eventPath, { encoding: 'utf8' });
const event = JSON.parse(content);
const id = (_b = (_a = event === null || event === void 0 ? void 0 : event.repository) === null || _a === void 0 ? void 0 : _a.owner) === null || _b === void 0 ? void 0 : _b.id;
if (typeof id !== 'number') {
core.debug('Repository owner ID not found within GITHUB event info');
return;
}
return id;
}
catch (err) {
core.debug(`Unable to load organization ID from GITHUB_EVENT_PATH: ${err
.message || err}`);
}
});
}
exports.getOrganizationId = getOrganizationId;
/***/ }),
/***/ 7351:
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.issue = exports.issueCommand = void 0;
const os = __importStar(__nccwpck_require__(2037));
const utils_1 = __nccwpck_require__(5278);
/**
* Commands
*
* Command Format:
* ::name key=value,key=value::message
*
* Examples:
* ::warning::This is the message
* ::set-env name=MY_VAR::some value
*/
function issueCommand(command, properties, message) {
const cmd = new Command(command, properties, message);
process.stdout.write(cmd.toString() + os.EOL);
}
exports.issueCommand = issueCommand;
function issue(name, message = '') {
issueCommand(name, {}, message);
}
exports.issue = issue;
const CMD_STRING = '::';
class Command {
constructor(command, properties, message) {
if (!command) {
command = 'missing.command';
}
this.command = command;
this.properties = properties;
this.message = message;
}
toString() {
let cmdStr = CMD_STRING + this.command;
if (this.properties && Object.keys(this.properties).length > 0) {
cmdStr += ' ';
let first = true;
for (const key in this.properties) {
if (this.properties.hasOwnProperty(key)) {
const val = this.properties[key];
if (val) {
if (first) {
first = false;
}
else {
cmdStr += ',';
}
cmdStr += `${key}=${escapeProperty(val)}`;
}
}
}
}
cmdStr += `${CMD_STRING}${escapeData(this.message)}`;
return cmdStr;
}
}
function escapeData(s) {
return utils_1.toCommandValue(s)
.replace(/%/g, '%25')
.replace(/\r/g, '%0D')
.replace(/\n/g, '%0A');
}
function escapeProperty(s) {
return utils_1.toCommandValue(s)
.replace(/%/g, '%25')
.replace(/\r/g, '%0D')
.replace(/\n/g, '%0A')
.replace(/:/g, '%3A')
.replace(/,/g, '%2C');
}
//# sourceMappingURL=command.js.map
/***/ }),
/***/ 2186:
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
2021-10-13 23:07:05 +02:00
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
2021-10-13 23:07:05 +02:00
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
2021-10-13 23:07:05 +02:00
return result;
};
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
2021-10-13 23:07:05 +02:00
});
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.getIDToken = exports.getState = exports.saveState = exports.group = exports.endGroup = exports.startGroup = exports.info = exports.notice = exports.warning = exports.error = exports.debug = exports.isDebug = exports.setFailed = exports.setCommandEcho = exports.setOutput = exports.getBooleanInput = exports.getMultilineInput = exports.getInput = exports.addPath = exports.setSecret = exports.exportVariable = exports.ExitCode = void 0;
const command_1 = __nccwpck_require__(7351);
const file_command_1 = __nccwpck_require__(717);
const utils_1 = __nccwpck_require__(5278);
const os = __importStar(__nccwpck_require__(2037));
const path = __importStar(__nccwpck_require__(1017));
const oidc_utils_1 = __nccwpck_require__(8041);
/**
* The code to exit an action
*/
var ExitCode;
(function (ExitCode) {
/**
* A code indicating that the action was successful
*/
ExitCode[ExitCode["Success"] = 0] = "Success";
/**
* A code indicating that the action was a failure
*/
ExitCode[ExitCode["Failure"] = 1] = "Failure";
})(ExitCode = exports.ExitCode || (exports.ExitCode = {}));
//-----------------------------------------------------------------------
// Variables
//-----------------------------------------------------------------------
/**
* Sets env variable for this action and future actions in the job
* @param name the name of the variable to set
* @param val the value of the variable. Non-string values will be converted to a string via JSON.stringify
*/
// eslint-disable-next-line @typescript-eslint/no-explicit-any
function exportVariable(name, val) {
const convertedVal = utils_1.toCommandValue(val);
process.env[name] = convertedVal;
const filePath = process.env['GITHUB_ENV'] || '';
if (filePath) {
return file_command_1.issueFileCommand('ENV', file_command_1.prepareKeyValueMessage(name, val));
}
command_1.issueCommand('set-env', { name }, convertedVal);
}
exports.exportVariable = exportVariable;
/**
* Registers a secret which will get masked from logs
* @param secret value of the secret
*/
function setSecret(secret) {
command_1.issueCommand('add-mask', {}, secret);
}
exports.setSecret = setSecret;
/**
* Prepends inputPath to the PATH (for this action and future actions)
* @param inputPath
*/
function addPath(inputPath) {
const filePath = process.env['GITHUB_PATH'] || '';
if (filePath) {
file_command_1.issueFileCommand('PATH', inputPath);
}
else {
command_1.issueCommand('add-path', {}, inputPath);
}
process.env['PATH'] = `${inputPath}${path.delimiter}${process.env['PATH']}`;
}
exports.addPath = addPath;
/**
* Gets the value of an input.
* Unless trimWhitespace is set to false in InputOptions, the value is also trimmed.
* Returns an empty string if the value is not defined.
*
* @param name name of the input to get
* @param options optional. See InputOptions.
* @returns string
*/
function getInput(name, options) {
const val = process.env[`INPUT_${name.replace(/ /g, '_').toUpperCase()}`] || '';
if (options && options.required && !val) {
throw new Error(`Input required and not supplied: ${name}`);
}
if (options && options.trimWhitespace === false) {
return val;
}
return val.trim();
}
exports.getInput = getInput;
/**
* Gets the values of an multiline input. Each value is also trimmed.
*
* @param name name of the input to get
* @param options optional. See InputOptions.
* @returns string[]
*
*/
function getMultilineInput(name, options) {
const inputs = getInput(name, options)
.split('\n')
.filter(x => x !== '');
if (options && options.trimWhitespace === false) {
return inputs;
}
return inputs.map(input => input.trim());
}
exports.getMultilineInput = getMultilineInput;
/**
* Gets the input value of the boolean type in the YAML 1.2 "core schema" specification.
* Support boolean input list: `true | True | TRUE | false | False | FALSE` .
* The return value is also in boolean type.
* ref: https://yaml.org/spec/1.2/spec.html#id2804923
*
* @param name name of the input to get
* @param options optional. See InputOptions.
* @returns boolean
*/
function getBooleanInput(name, options) {
const trueValue = ['true', 'True', 'TRUE'];
const falseValue = ['false', 'False', 'FALSE'];
const val = getInput(name, options);
if (trueValue.includes(val))
return true;
if (falseValue.includes(val))
return false;
throw new TypeError(`Input does not meet YAML 1.2 "Core Schema" specification: ${name}\n` +
`Support boolean input list: \`true | True | TRUE | false | False | FALSE\``);
}
exports.getBooleanInput = getBooleanInput;
/**
* Sets the value of an output.
*
* @param name name of the output to set
* @param value value to store. Non-string values will be converted to a string via JSON.stringify
*/
// eslint-disable-next-line @typescript-eslint/no-explicit-any
function setOutput(name, value) {
const filePath = process.env['GITHUB_OUTPUT'] || '';
if (filePath) {
return file_command_1.issueFileCommand('OUTPUT', file_command_1.prepareKeyValueMessage(name, value));
}
process.stdout.write(os.EOL);
command_1.issueCommand('set-output', { name }, utils_1.toCommandValue(value));
}
exports.setOutput = setOutput;
/**
* Enables or disables the echoing of commands into stdout for the rest of the step.
* Echoing is disabled by default if ACTIONS_STEP_DEBUG is not set.
*
*/
function setCommandEcho(enabled) {
command_1.issue('echo', enabled ? 'on' : 'off');
}
exports.setCommandEcho = setCommandEcho;
//-----------------------------------------------------------------------
// Results
//-----------------------------------------------------------------------
/**
* Sets the action status to failed.
* When the action exits it will be with an exit code of 1
* @param message add error issue message
*/
function setFailed(message) {
process.exitCode = ExitCode.Failure;
error(message);
}
exports.setFailed = setFailed;
//-----------------------------------------------------------------------
// Logging Commands
//-----------------------------------------------------------------------
/**
* Gets whether Actions Step Debug is on or not
*/
function isDebug() {
return process.env['RUNNER_DEBUG'] === '1';
}
exports.isDebug = isDebug;
/**
* Writes debug message to user log
* @param message debug message
*/
function debug(message) {
command_1.issueCommand('debug', {}, message);
}
exports.debug = debug;
/**
* Adds an error issue
* @param message error issue message. Errors will be converted to string via toString()
* @param properties optional properties to add to the annotation.
*/
function error(message, properties = {}) {
command_1.issueCommand('error', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message);
}
exports.error = error;
/**
* Adds a warning issue
* @param message warning issue message. Errors will be converted to string via toString()
* @param properties optional properties to add to the annotation.
*/
function warning(message, properties = {}) {
command_1.issueCommand('warning', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message);
}
exports.warning = warning;
/**
* Adds a notice issue
* @param message notice issue message. Errors will be converted to string via toString()
* @param properties optional properties to add to the annotation.
*/
function notice(message, properties = {}) {
command_1.issueCommand('notice', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message);
}
exports.notice = notice;
/**
* Writes info to log with console.log.
* @param message info message
*/
function info(message) {
process.stdout.write(message + os.EOL);
}
exports.info = info;
/**
* Begin an output group.
*
* Output until the next `groupEnd` will be foldable in this group
*
* @param name The name of the output group
*/
function startGroup(name) {
command_1.issue('group', name);
}
exports.startGroup = startGroup;
/**
* End an output group.
*/
function endGroup() {
command_1.issue('endgroup');
}
exports.endGroup = endGroup;
/**
* Wrap an asynchronous function call in a group.
*
* Returns the same type as the function itself.
*
* @param name The name of the group
* @param fn The function to wrap in the group
*/
function group(name, fn) {
return __awaiter(this, void 0, void 0, function* () {
startGroup(name);
let result;
try {
result = yield fn();
}
finally {
endGroup();
}
return result;
});
}
exports.group = group;
//-----------------------------------------------------------------------
// Wrapper action state
//-----------------------------------------------------------------------
/**
* Saves state for current action, the state can only be retrieved by this action's post job execution.
*
* @param name name of the state to store
* @param value value to store. Non-string values will be converted to a string via JSON.stringify
*/
// eslint-disable-next-line @typescript-eslint/no-explicit-any
function saveState(name, value) {
const filePath = process.env['GITHUB_STATE'] || '';
if (filePath) {
return file_command_1.issueFileCommand('STATE', file_command_1.prepareKeyValueMessage(name, value));
}
command_1.issueCommand('save-state', { name }, utils_1.toCommandValue(value));
}
exports.saveState = saveState;
/**
* Gets the value of an state set by this action's main execution.
*
* @param name name of the state to get
* @returns string
*/
function getState(name) {
return process.env[`STATE_${name}`] || '';
}
exports.getState = getState;
function getIDToken(aud) {
return __awaiter(this, void 0, void 0, function* () {
return yield oidc_utils_1.OidcClient.getIDToken(aud);
});
}
exports.getIDToken = getIDToken;
/**
* Summary exports
*/
var summary_1 = __nccwpck_require__(1327);
Object.defineProperty(exports, "summary", ({ enumerable: true, get: function () { return summary_1.summary; } }));
/**
* @deprecated use core.summary
*/
var summary_2 = __nccwpck_require__(1327);
Object.defineProperty(exports, "markdownSummary", ({ enumerable: true, get: function () { return summary_2.markdownSummary; } }));
/**
* Path exports
*/
var path_utils_1 = __nccwpck_require__(2981);
Object.defineProperty(exports, "toPosixPath", ({ enumerable: true, get: function () { return path_utils_1.toPosixPath; } }));
Object.defineProperty(exports, "toWin32Path", ({ enumerable: true, get: function () { return path_utils_1.toWin32Path; } }));
Object.defineProperty(exports, "toPlatformPath", ({ enumerable: true, get: function () { return path_utils_1.toPlatformPath; } }));
//# sourceMappingURL=core.js.map
/***/ }),
/***/ 717:
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
"use strict";
// For internal use, subject to change.
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.prepareKeyValueMessage = exports.issueFileCommand = void 0;
// We use any as a valid input type
/* eslint-disable @typescript-eslint/no-explicit-any */
const fs = __importStar(__nccwpck_require__(7147));
const os = __importStar(__nccwpck_require__(2037));
const uuid_1 = __nccwpck_require__(8974);
const utils_1 = __nccwpck_require__(5278);
function issueFileCommand(command, message) {
const filePath = process.env[`GITHUB_${command}`];
if (!filePath) {
throw new Error(`Unable to find environment variable for file command ${command}`);
}
if (!fs.existsSync(filePath)) {
throw new Error(`Missing file at path: ${filePath}`);
}
fs.appendFileSync(filePath, `${utils_1.toCommandValue(message)}${os.EOL}`, {
encoding: 'utf8'
});
}
exports.issueFileCommand = issueFileCommand;
function prepareKeyValueMessage(key, value) {
const delimiter = `ghadelimiter_${uuid_1.v4()}`;
const convertedValue = utils_1.toCommandValue(value);
// These should realistically never happen, but just in case someone finds a
// way to exploit uuid generation let's not allow keys or values that contain
// the delimiter.
if (key.includes(delimiter)) {
throw new Error(`Unexpected input: name should not contain the delimiter "${delimiter}"`);
}
if (convertedValue.includes(delimiter)) {
throw new Error(`Unexpected input: value should not contain the delimiter "${delimiter}"`);
}
return `${key}<<${delimiter}${os.EOL}${convertedValue}${os.EOL}${delimiter}`;
}
exports.prepareKeyValueMessage = prepareKeyValueMessage;
//# sourceMappingURL=file-command.js.map
/***/ }),
/***/ 8041:
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.OidcClient = void 0;
const http_client_1 = __nccwpck_require__(6255);
const auth_1 = __nccwpck_require__(5526);
const core_1 = __nccwpck_require__(2186);
class OidcClient {
static createHttpClient(allowRetry = true, maxRetry = 10) {
const requestOptions = {
allowRetries: allowRetry,
maxRetries: maxRetry
};
return new http_client_1.HttpClient('actions/oidc-client', [new auth_1.BearerCredentialHandler(OidcClient.getRequestToken())], requestOptions);
}
static getRequestToken() {
const token = process.env['ACTIONS_ID_TOKEN_REQUEST_TOKEN'];
if (!token) {
throw new Error('Unable to get ACTIONS_ID_TOKEN_REQUEST_TOKEN env variable');
}
return token;
}
static getIDTokenUrl() {
const runtimeUrl = process.env['ACTIONS_ID_TOKEN_REQUEST_URL'];
if (!runtimeUrl) {
throw new Error('Unable to get ACTIONS_ID_TOKEN_REQUEST_URL env variable');
}
return runtimeUrl;
}
static getCall(id_token_url) {
var _a;
return __awaiter(this, void 0, void 0, function* () {
const httpclient = OidcClient.createHttpClient();
const res = yield httpclient
.getJson(id_token_url)
.catch(error => {
throw new Error(`Failed to get ID Token. \n
Error Code : ${error.statusCode}\n
Error Message: ${error.result.message}`);
});
const id_token = (_a = res.result) === null || _a === void 0 ? void 0 : _a.value;
if (!id_token) {
throw new Error('Response json body do not have ID Token field');
}
return id_token;
});
}
static getIDToken(audience) {
return __awaiter(this, void 0, void 0, function* () {
try {
// New ID Token is requested from action service
let id_token_url = OidcClient.getIDTokenUrl();
if (audience) {
const encodedAudience = encodeURIComponent(audience);
id_token_url = `${id_token_url}&audience=${encodedAudience}`;
}
core_1.debug(`ID token url is ${id_token_url}`);
const id_token = yield OidcClient.getCall(id_token_url);
core_1.setSecret(id_token);
return id_token;
}
catch (error) {
throw new Error(`Error message: ${error.message}`);
}
});
}
}
exports.OidcClient = OidcClient;
//# sourceMappingURL=oidc-utils.js.map
/***/ }),
/***/ 2981:
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.toPlatformPath = exports.toWin32Path = exports.toPosixPath = void 0;
const path = __importStar(__nccwpck_require__(1017));
/**
* toPosixPath converts the given path to the posix form. On Windows, \\ will be
* replaced with /.
*
* @param pth. Path to transform.
* @return string Posix path.
*/
function toPosixPath(pth) {
return pth.replace(/[\\]/g, '/');
}
exports.toPosixPath = toPosixPath;
/**
* toWin32Path converts the given path to the win32 form. On Linux, / will be
* replaced with \\.
*
* @param pth. Path to transform.
* @return string Win32 path.
*/
function toWin32Path(pth) {
return pth.replace(/[/]/g, '\\');
}
exports.toWin32Path = toWin32Path;
/**
* toPlatformPath converts the given path to a platform-specific path. It does
* this by replacing instances of / and \ with the platform-specific path
* separator.
*
* @param pth The path to platformize.
* @return string The platform-specific path.
*/
function toPlatformPath(pth) {
return pth.replace(/[/\\]/g, path.sep);
}
exports.toPlatformPath = toPlatformPath;
//# sourceMappingURL=path-utils.js.map
/***/ }),
/***/ 1327:
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.summary = exports.markdownSummary = exports.SUMMARY_DOCS_URL = exports.SUMMARY_ENV_VAR = void 0;
const os_1 = __nccwpck_require__(2037);
const fs_1 = __nccwpck_require__(7147);
const { access, appendFile, writeFile } = fs_1.promises;
exports.SUMMARY_ENV_VAR = 'GITHUB_STEP_SUMMARY';
exports.SUMMARY_DOCS_URL = 'https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-job-summary';
class Summary {
constructor() {
this._buffer = '';
}
/**
* Finds the summary file path from the environment, rejects if env var is not found or file does not exist
* Also checks r/w permissions.
*
* @returns step summary file path
*/
filePath() {
return __awaiter(this, void 0, void 0, function* () {
if (this._filePath) {
return this._filePath;
}
const pathFromEnv = process.env[exports.SUMMARY_ENV_VAR];
if (!pathFromEnv) {
throw new Error(`Unable to find environment variable for $${exports.SUMMARY_ENV_VAR}. Check if your runtime environment supports job summaries.`);
}
try {
yield access(pathFromEnv, fs_1.constants.R_OK | fs_1.constants.W_OK);
}
catch (_a) {
throw new Error(`Unable to access summary file: '${pathFromEnv}'. Check if the file has correct read/write permissions.`);
}
this._filePath = pathFromEnv;
return this._filePath;
});
}
/**
* Wraps content in an HTML tag, adding any HTML attributes
*
* @param {string} tag HTML tag to wrap
* @param {string | null} content content within the tag
* @param {[attribute: string]: string} attrs key-value list of HTML attributes to add
*
* @returns {string} content wrapped in HTML element
*/
wrap(tag, content, attrs = {}) {
const htmlAttrs = Object.entries(attrs)
.map(([key, value]) => ` ${key}="${value}"`)
.join('');
if (!content) {
return `<${tag}${htmlAttrs}>`;
}
return `<${tag}${htmlAttrs}>${content}</${tag}>`;
}
/**
* Writes text in the buffer to the summary buffer file and empties buffer. Will append by default.
*
* @param {SummaryWriteOptions} [options] (optional) options for write operation
*
* @returns {Promise<Summary>} summary instance
*/
write(options) {
return __awaiter(this, void 0, void 0, function* () {
const overwrite = !!(options === null || options === void 0 ? void 0 : options.overwrite);
const filePath = yield this.filePath();
const writeFunc = overwrite ? writeFile : appendFile;
yield writeFunc(filePath, this._buffer, { encoding: 'utf8' });
return this.emptyBuffer();
});
}
/**
* Clears the summary buffer and wipes the summary file
*
* @returns {Summary} summary instance
*/
clear() {
return __awaiter(this, void 0, void 0, function* () {
return this.emptyBuffer().write({ overwrite: true });
});
}
/**
* Returns the current summary buffer as a string
*
* @returns {string} string of summary buffer
*/
stringify() {
return this._buffer;
}
/**
* If the summary buffer is empty
*
* @returns {boolen} true if the buffer is empty
*/
isEmptyBuffer() {
return this._buffer.length === 0;
}
/**
* Resets the summary buffer without writing to summary file
*
* @returns {Summary} summary instance
*/
emptyBuffer() {
this._buffer = '';
return this;
}
/**
* Adds raw text to the summary buffer
*
* @param {string} text content to add
* @param {boolean} [addEOL=false] (optional) append an EOL to the raw text (default: false)
*
* @returns {Summary} summary instance
*/
addRaw(text, addEOL = false) {
this._buffer += text;
return addEOL ? this.addEOL() : this;
}
/**
* Adds the operating system-specific end-of-line marker to the buffer
*
* @returns {Summary} summary instance
*/
addEOL() {
return this.addRaw(os_1.EOL);
}
/**
* Adds an HTML codeblock to the summary buffer
*
* @param {string} code content to render within fenced code block
* @param {string} lang (optional) language to syntax highlight code
*
* @returns {Summary} summary instance
*/
addCodeBlock(code, lang) {
const attrs = Object.assign({}, (lang && { lang }));
const element = this.wrap('pre', this.wrap('code', code), attrs);
return this.addRaw(element).addEOL();
}
/**
* Adds an HTML list to the summary buffer
*
* @param {string[]} items list of items to render
* @param {boolean} [ordered=false] (optional) if the rendered list should be ordered or not (default: false)
*
* @returns {Summary} summary instance
*/
addList(items, ordered = false) {
const tag = ordered ? 'ol' : 'ul';
const listItems = items.map(item => this.wrap('li', item)).join('');
const element = this.wrap(tag, listItems);
return this.addRaw(element).addEOL();
}
/**
* Adds an HTML table to the summary buffer
*
* @param {SummaryTableCell[]} rows table rows
*
* @returns {Summary} summary instance
*/
addTable(rows) {
const tableBody = rows
.map(row => {
const cells = row
.map(cell => {
if (typeof cell === 'string') {
return this.wrap('td', cell);
}
const { header, data, colspan, rowspan } = cell;
const tag = header ? 'th' : 'td';
const attrs = Object.assign(Object.assign({}, (colspan && { colspan })), (rowspan && { rowspan }));
return this.wrap(tag, data, attrs);
})
.join('');
return this.wrap('tr', cells);
})
.join('');
const element = this.wrap('table', tableBody);
return this.addRaw(element).addEOL();
}
/**
* Adds a collapsable HTML details element to the summary buffer
*
* @param {string} label text for the closed state
* @param {string} content collapsable content
*
* @returns {Summary} summary instance
*/
addDetails(label, content) {
const element = this.wrap('details', this.wrap('summary', label) + content);
return this.addRaw(element).addEOL();
}
/**
* Adds an HTML image tag to the summary buffer
*
* @param {string} src path to the image you to embed
* @param {string} alt text description of the image
* @param {SummaryImageOptions} options (optional) addition image attributes
*
* @returns {Summary} summary instance
*/
addImage(src, alt, options) {
const { width, height } = options || {};
const attrs = Object.assign(Object.assign({}, (width && { width })), (height && { height }));
const element = this.wrap('img', null, Object.assign({ src, alt }, attrs));
return this.addRaw(element).addEOL();
}
/**
* Adds an HTML section heading element
*
* @param {string} text heading text
* @param {number | string} [level=1] (optional) the heading level, default: 1
*
* @returns {Summary} summary instance
*/
addHeading(text, level) {
const tag = `h${level}`;
const allowedTag = ['h1', 'h2', 'h3', 'h4', 'h5', 'h6'].includes(tag)
? tag
: 'h1';
const element = this.wrap(allowedTag, text);
return this.addRaw(element).addEOL();
}
/**
* Adds an HTML thematic break (<hr>) to the summary buffer
*
* @returns {Summary} summary instance
*/
addSeparator() {
const element = this.wrap('hr', null);
return this.addRaw(element).addEOL();
}
/**
* Adds an HTML line break (<br>) to the summary buffer
*
* @returns {Summary} summary instance
*/
addBreak() {
const element = this.wrap('br', null);
return this.addRaw(element).addEOL();
}
/**
* Adds an HTML blockquote to the summary buffer
*
* @param {string} text quote text
* @param {string} cite (optional) citation url
*
* @returns {Summary} summary instance
*/
addQuote(text, cite) {
const attrs = Object.assign({}, (cite && { cite }));
const element = this.wrap('blockquote', text, attrs);
return this.addRaw(element).addEOL();
}
/**
* Adds an HTML anchor tag to the summary buffer
*
* @param {string} text link text/content
* @param {string} href hyperlink
*
* @returns {Summary} summary instance
*/
addLink(text, href) {
const element = this.wrap('a', text, { href });
return this.addRaw(element).addEOL();
}
}
const _summary = new Summary();
/**
* @deprecated use `core.summary`
*/
exports.markdownSummary = _summary;
exports.summary = _summary;
//# sourceMappingURL=summary.js.map
/***/ }),
/***/ 5278:
/***/ ((__unused_webpack_module, exports) => {
"use strict";
// We use any as a valid input type
/* eslint-disable @typescript-eslint/no-explicit-any */
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.toCommandProperties = exports.toCommandValue = void 0;
/**
* Sanitizes an input into a string so it can be passed into issueCommand safely
* @param input input to sanitize into a string
*/
function toCommandValue(input) {
if (input === null || input === undefined) {
return '';
}
else if (typeof input === 'string' || input instanceof String) {
return input;
}
return JSON.stringify(input);
}
exports.toCommandValue = toCommandValue;
/**
*
* @param annotationProperties
* @returns The command properties to send with the actual annotation command
* See IssueCommandProperties: https://github.com/actions/runner/blob/main/src/Runner.Worker/ActionCommandManager.cs#L646
*/
function toCommandProperties(annotationProperties) {
if (!Object.keys(annotationProperties).length) {
return {};
}
return {
title: annotationProperties.title,
file: annotationProperties.file,
line: annotationProperties.startLine,
endLine: annotationProperties.endLine,
col: annotationProperties.startColumn,
endColumn: annotationProperties.endColumn
};
}
exports.toCommandProperties = toCommandProperties;
//# sourceMappingURL=utils.js.map
/***/ }),
/***/ 8974:
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({
value: true
}));
Object.defineProperty(exports, "v1", ({
enumerable: true,
get: function () {
return _v.default;
}
}));
Object.defineProperty(exports, "v3", ({
enumerable: true,
get: function () {
return _v2.default;
}
}));
Object.defineProperty(exports, "v4", ({
enumerable: true,
get: function () {
return _v3.default;
}
}));
Object.defineProperty(exports, "v5", ({
enumerable: true,
get: function () {
return _v4.default;
}
}));
Object.defineProperty(exports, "NIL", ({
enumerable: true,
get: function () {
return _nil.default;
}
}));
Object.defineProperty(exports, "version", ({
enumerable: true,
get: function () {
return _version.default;
}
}));
Object.defineProperty(exports, "validate", ({
enumerable: true,
get: function () {
return _validate.default;
}
}));
Object.defineProperty(exports, "stringify", ({
enumerable: true,
get: function () {
return _stringify.default;
}
}));
Object.defineProperty(exports, "parse", ({
enumerable: true,
get: function () {
return _parse.default;
}
}));
var _v = _interopRequireDefault(__nccwpck_require__(1595));
var _v2 = _interopRequireDefault(__nccwpck_require__(6993));
var _v3 = _interopRequireDefault(__nccwpck_require__(1472));
var _v4 = _interopRequireDefault(__nccwpck_require__(6217));
var _nil = _interopRequireDefault(__nccwpck_require__(2381));
var _version = _interopRequireDefault(__nccwpck_require__(427));
var _validate = _interopRequireDefault(__nccwpck_require__(2609));
var _stringify = _interopRequireDefault(__nccwpck_require__(1458));
var _parse = _interopRequireDefault(__nccwpck_require__(6385));
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
/***/ }),
/***/ 5842:
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({
value: true
}));
exports["default"] = void 0;
var _crypto = _interopRequireDefault(__nccwpck_require__(6113));
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
function md5(bytes) {
if (Array.isArray(bytes)) {
bytes = Buffer.from(bytes);
} else if (typeof bytes === 'string') {
bytes = Buffer.from(bytes, 'utf8');
}
return _crypto.default.createHash('md5').update(bytes).digest();
}
var _default = md5;
exports["default"] = _default;
/***/ }),
/***/ 2381:
/***/ ((__unused_webpack_module, exports) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({
value: true
}));
exports["default"] = void 0;
var _default = '00000000-0000-0000-0000-000000000000';
exports["default"] = _default;
/***/ }),
/***/ 6385:
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({
value: true
}));
exports["default"] = void 0;
var _validate = _interopRequireDefault(__nccwpck_require__(2609));
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
function parse(uuid) {
if (!(0, _validate.default)(uuid)) {
throw TypeError('Invalid UUID');
}
let v;
const arr = new Uint8Array(16); // Parse ########-....-....-....-............
arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24;
arr[1] = v >>> 16 & 0xff;
arr[2] = v >>> 8 & 0xff;
arr[3] = v & 0xff; // Parse ........-####-....-....-............
arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8;
arr[5] = v & 0xff; // Parse ........-....-####-....-............
arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8;
arr[7] = v & 0xff; // Parse ........-....-....-####-............
arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8;
arr[9] = v & 0xff; // Parse ........-....-....-....-############
// (Use "/" to avoid 32-bit truncation when bit-shifting high-order bytes)
arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff;
arr[11] = v / 0x100000000 & 0xff;
arr[12] = v >>> 24 & 0xff;
arr[13] = v >>> 16 & 0xff;
arr[14] = v >>> 8 & 0xff;
arr[15] = v & 0xff;
return arr;
}
var _default = parse;
exports["default"] = _default;
/***/ }),
/***/ 6230:
/***/ ((__unused_webpack_module, exports) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({
value: true
}));
exports["default"] = void 0;
var _default = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i;
exports["default"] = _default;
/***/ }),
/***/ 9784:
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({
value: true
}));
exports["default"] = rng;
var _crypto = _interopRequireDefault(__nccwpck_require__(6113));
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
const rnds8Pool = new Uint8Array(256); // # of random values to pre-allocate
let poolPtr = rnds8Pool.length;
function rng() {
if (poolPtr > rnds8Pool.length - 16) {
_crypto.default.randomFillSync(rnds8Pool);
poolPtr = 0;
}
return rnds8Pool.slice(poolPtr, poolPtr += 16);
}
/***/ }),
/***/ 8844:
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({
value: true
}));
exports["default"] = void 0;
var _crypto = _interopRequireDefault(__nccwpck_require__(6113));
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
function sha1(bytes) {
if (Array.isArray(bytes)) {
bytes = Buffer.from(bytes);
} else if (typeof bytes === 'string') {
bytes = Buffer.from(bytes, 'utf8');
}
return _crypto.default.createHash('sha1').update(bytes).digest();
}
var _default = sha1;
exports["default"] = _default;
/***/ }),
/***/ 1458:
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({
value: true
}));
exports["default"] = void 0;
var _validate = _interopRequireDefault(__nccwpck_require__(2609));
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
/**
* Convert array of 16 byte values to UUID string format of the form:
* XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX
*/
const byteToHex = [];
for (let i = 0; i < 256; ++i) {
byteToHex.push((i + 0x100).toString(16).substr(1));
}
function stringify(arr, offset = 0) {
// Note: Be careful editing this code! It's been tuned for performance
// and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434
const uuid = (byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]).toLowerCase(); // Consistency check for valid UUID. If this throws, it's likely due to one
// of the following:
// - One or more input array values don't map to a hex octet (leading to
// "undefined" in the uuid)
// - Invalid input values for the RFC `version` or `variant` fields
if (!(0, _validate.default)(uuid)) {
throw TypeError('Stringified UUID is invalid');
}
return uuid;
}
var _default = stringify;
exports["default"] = _default;
/***/ }),
/***/ 1595:
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({
value: true
}));
exports["default"] = void 0;
var _rng = _interopRequireDefault(__nccwpck_require__(9784));
var _stringify = _interopRequireDefault(__nccwpck_require__(1458));
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
// **`v1()` - Generate time-based UUID**
//
// Inspired by https://github.com/LiosK/UUID.js
// and http://docs.python.org/library/uuid.html
let _nodeId;
let _clockseq; // Previous uuid creation time
let _lastMSecs = 0;
let _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details
function v1(options, buf, offset) {
let i = buf && offset || 0;
const b = buf || new Array(16);
options = options || {};
let node = options.node || _nodeId;
let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not
// specified. We do this lazily to minimize issues related to insufficient
// system entropy. See #189
if (node == null || clockseq == null) {
const seedBytes = options.random || (options.rng || _rng.default)();
if (node == null) {
// Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1)
node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]];
}
if (clockseq == null) {
// Per 4.2.2, randomize (14 bit) clockseq
clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff;
}
} // UUID timestamps are 100 nano-second units since the Gregorian epoch,
// (1582-10-15 00:00). JSNumbers aren't precise enough for this, so
// time is handled internally as 'msecs' (integer milliseconds) and 'nsecs'
// (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00.
let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock
// cycle to simulate higher resolution clock
let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs)
const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression
if (dt < 0 && options.clockseq === undefined) {
clockseq = clockseq + 1 & 0x3fff;
} // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new
// time interval
if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) {
nsecs = 0;
} // Per 4.2.1.2 Throw error if too many uuids are requested
if (nsecs >= 10000) {
throw new Error("uuid.v1(): Can't create more than 10M uuids/sec");
}
_lastMSecs = msecs;
_lastNSecs = nsecs;
_clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch
msecs += 12219292800000; // `time_low`
const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000;
b[i++] = tl >>> 24 & 0xff;
b[i++] = tl >>> 16 & 0xff;
b[i++] = tl >>> 8 & 0xff;
b[i++] = tl & 0xff; // `time_mid`
const tmh = msecs / 0x100000000 * 10000 & 0xfffffff;
b[i++] = tmh >>> 8 & 0xff;
b[i++] = tmh & 0xff; // `time_high_and_version`
b[i++] = tmh >>> 24 & 0xf | 0x10; // include version
b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant)
b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low`
b[i++] = clockseq & 0xff; // `node`
for (let n = 0; n < 6; ++n) {
b[i + n] = node[n];
}
return buf || (0, _stringify.default)(b);
}
var _default = v1;
exports["default"] = _default;
/***/ }),
/***/ 6993:
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({
value: true
}));
exports["default"] = void 0;
var _v = _interopRequireDefault(__nccwpck_require__(5920));
var _md = _interopRequireDefault(__nccwpck_require__(5842));
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
const v3 = (0, _v.default)('v3', 0x30, _md.default);
var _default = v3;
exports["default"] = _default;
/***/ }),
/***/ 5920:
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({
value: true
}));
exports["default"] = _default;
exports.URL = exports.DNS = void 0;
var _stringify = _interopRequireDefault(__nccwpck_require__(1458));
var _parse = _interopRequireDefault(__nccwpck_require__(6385));
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
function stringToBytes(str) {
str = unescape(encodeURIComponent(str)); // UTF8 escape
const bytes = [];
for (let i = 0; i < str.length; ++i) {
bytes.push(str.charCodeAt(i));
}
return bytes;
}
const DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8';
exports.DNS = DNS;
const URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8';
exports.URL = URL;
function _default(name, version, hashfunc) {
function generateUUID(value, namespace, buf, offset) {
if (typeof value === 'string') {
value = stringToBytes(value);
2021-10-19 17:05:28 +02:00
}
if (typeof namespace === 'string') {
namespace = (0, _parse.default)(namespace);
2021-10-19 17:05:28 +02:00
}
if (namespace.length !== 16) {
throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)');
} // Compute hash of namespace and value, Per 4.3
// Future: Use spread syntax when supported on all platforms, e.g. `bytes =
// hashfunc([...namespace, ... value])`
let bytes = new Uint8Array(16 + value.length);
bytes.set(namespace);
bytes.set(value, namespace.length);
bytes = hashfunc(bytes);
bytes[6] = bytes[6] & 0x0f | version;
bytes[8] = bytes[8] & 0x3f | 0x80;
if (buf) {
offset = offset || 0;
for (let i = 0; i < 16; ++i) {
buf[offset + i] = bytes[i];
}
return buf;
}
return (0, _stringify.default)(bytes);
} // Function#name is not settable on some platforms (#270)
try {
generateUUID.name = name; // eslint-disable-next-line no-empty
} catch (err) {} // For CommonJS default export support
generateUUID.DNS = DNS;
generateUUID.URL = URL;
return generateUUID;
}
/***/ }),
/***/ 1472:
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({
value: true
}));
exports["default"] = void 0;
var _rng = _interopRequireDefault(__nccwpck_require__(9784));
var _stringify = _interopRequireDefault(__nccwpck_require__(1458));
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
function v4(options, buf, offset) {
options = options || {};
const rnds = options.random || (options.rng || _rng.default)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved`
rnds[6] = rnds[6] & 0x0f | 0x40;
rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided
if (buf) {
offset = offset || 0;
for (let i = 0; i < 16; ++i) {
buf[offset + i] = rnds[i];
}
return buf;
}
return (0, _stringify.default)(rnds);
}
var _default = v4;
exports["default"] = _default;
/***/ }),
/***/ 6217:
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({
value: true
}));
exports["default"] = void 0;
var _v = _interopRequireDefault(__nccwpck_require__(5920));
var _sha = _interopRequireDefault(__nccwpck_require__(8844));
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
const v5 = (0, _v.default)('v5', 0x50, _sha.default);
var _default = v5;
exports["default"] = _default;
/***/ }),
/***/ 2609:
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({
value: true
}));
exports["default"] = void 0;
var _regex = _interopRequireDefault(__nccwpck_require__(6230));
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
function validate(uuid) {
return typeof uuid === 'string' && _regex.default.test(uuid);
}
var _default = validate;
exports["default"] = _default;
/***/ }),
/***/ 427:
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({
value: true
}));
exports["default"] = void 0;
var _validate = _interopRequireDefault(__nccwpck_require__(2609));
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
function version(uuid) {
if (!(0, _validate.default)(uuid)) {
throw TypeError('Invalid UUID');
}
return parseInt(uuid.substr(14, 1), 16);
}
var _default = version;
exports["default"] = _default;
/***/ }),
/***/ 1514:
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
const tr = __nccwpck_require__(8159);
/**
* Exec a command.
* Output will be streamed to the live console.
* Returns promise with return code
*
* @param commandLine command to execute (can include additional args). Must be correctly escaped.
* @param args optional arguments for tool. Escaping is handled by the lib.
* @param options optional exec options. See ExecOptions
* @returns Promise<number> exit code
*/
function exec(commandLine, args, options) {
return __awaiter(this, void 0, void 0, function* () {
const commandArgs = tr.argStringToArray(commandLine);
if (commandArgs.length === 0) {
throw new Error(`Parameter 'commandLine' cannot be null or empty.`);
}
// Path to tool to execute should be first arg
const toolPath = commandArgs[0];
args = commandArgs.slice(1).concat(args || []);
const runner = new tr.ToolRunner(toolPath, args, options);
return runner.exec();
});
}
exports.exec = exec;
//# sourceMappingURL=exec.js.map
/***/ }),
/***/ 8159:
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
const os = __nccwpck_require__(2037);
const events = __nccwpck_require__(2361);
const child = __nccwpck_require__(2081);
/* eslint-disable @typescript-eslint/unbound-method */
const IS_WINDOWS = process.platform === 'win32';
/*
* Class for running command line tools. Handles quoting and arg parsing in a platform agnostic way.
*/
class ToolRunner extends events.EventEmitter {
constructor(toolPath, args, options) {
super();
if (!toolPath) {
throw new Error("Parameter 'toolPath' cannot be null or empty.");
}
this.toolPath = toolPath;
this.args = args || [];
this.options = options || {};
}
_debug(message) {
if (this.options.listeners && this.options.listeners.debug) {
this.options.listeners.debug(message);
}
}
_getCommandString(options, noPrefix) {
const toolPath = this._getSpawnFileName();
const args = this._getSpawnArgs(options);
let cmd = noPrefix ? '' : '[command]'; // omit prefix when piped to a second tool
if (IS_WINDOWS) {
// Windows + cmd file
if (this._isCmdFile()) {
cmd += toolPath;
for (const a of args) {
cmd += ` ${a}`;
}
}
// Windows + verbatim
else if (options.windowsVerbatimArguments) {
cmd += `"${toolPath}"`;
for (const a of args) {
cmd += ` ${a}`;
}
}
// Windows (regular)
else {
cmd += this._windowsQuoteCmdArg(toolPath);
for (const a of args) {
cmd += ` ${this._windowsQuoteCmdArg(a)}`;
}
}
2021-10-19 17:05:28 +02:00
}
else {
// OSX/Linux - this can likely be improved with some form of quoting.
// creating processes on Unix is fundamentally different than Windows.
// on Unix, execvp() takes an arg array.
cmd += toolPath;
for (const a of args) {
cmd += ` ${a}`;
}
2021-10-19 17:05:28 +02:00
}
return cmd;
}
_processLineBuffer(data, strBuffer, onLine) {
try {
let s = strBuffer + data.toString();
let n = s.indexOf(os.EOL);
while (n > -1) {
const line = s.substring(0, n);
onLine(line);
// the rest of the string ...
s = s.substring(n + os.EOL.length);
n = s.indexOf(os.EOL);
2021-10-19 17:05:28 +02:00
}
strBuffer = s;
}
catch (err) {
// streaming lines to console is best effort. Don't fail a build.
this._debug(`error processing line. Failed with error ${err}`);
2021-10-19 17:05:28 +02:00
}
}
_getSpawnFileName() {
if (IS_WINDOWS) {
if (this._isCmdFile()) {
return process.env['COMSPEC'] || 'cmd.exe';
}
}
return this.toolPath;
2021-10-19 17:05:28 +02:00
}
_getSpawnArgs(options) {
if (IS_WINDOWS) {
if (this._isCmdFile()) {
let argline = `/D /S /C "${this._windowsQuoteCmdArg(this.toolPath)}`;
for (const a of this.args) {
argline += ' ';
argline += options.windowsVerbatimArguments
? a
: this._windowsQuoteCmdArg(a);
}
argline += '"';
return [argline];
2021-10-19 17:05:28 +02:00
}
}
return this.args;
2021-10-19 17:05:28 +02:00
}
_endsWith(str, end) {
return str.endsWith(end);
2021-10-19 17:05:28 +02:00
}
_isCmdFile() {
const upperToolPath = this.toolPath.toUpperCase();
return (this._endsWith(upperToolPath, '.CMD') ||
this._endsWith(upperToolPath, '.BAT'));
2021-10-19 17:05:28 +02:00
}
_windowsQuoteCmdArg(arg) {
// for .exe, apply the normal quoting rules that libuv applies
if (!this._isCmdFile()) {
return this._uvQuoteCmdArg(arg);
}
// otherwise apply quoting rules specific to the cmd.exe command line parser.
// the libuv rules are generic and are not designed specifically for cmd.exe
// command line parser.
//
// for a detailed description of the cmd.exe command line parser, refer to
// http://stackoverflow.com/questions/4094699/how-does-the-windows-command-interpreter-cmd-exe-parse-scripts/7970912#7970912
// need quotes for empty arg
if (!arg) {
return '""';
}
// determine whether the arg needs to be quoted
const cmdSpecialChars = [
' ',
'\t',
'&',
'(',
')',
'[',
']',
'{',
'}',
'^',
'=',
';',
'!',
"'",
'+',
',',
'`',
'~',
'|',
'<',
'>',
'"'
];
let needsQuotes = false;
for (const char of arg) {
if (cmdSpecialChars.some(x => x === char)) {
needsQuotes = true;
break;
}
}
// short-circuit if quotes not needed
if (!needsQuotes) {
return arg;
}
// the following quoting rules are very similar to the rules that by libuv applies.
//
// 1) wrap the string in quotes
//
// 2) double-up quotes - i.e. " => ""
//
// this is different from the libuv quoting rules. libuv replaces " with \", which unfortunately
// doesn't work well with a cmd.exe command line.
//
// note, replacing " with "" also works well if the arg is passed to a downstream .NET console app.
// for example, the command line:
// foo.exe "myarg:""my val"""
// is parsed by a .NET console app into an arg array:
// [ "myarg:\"my val\"" ]
// which is the same end result when applying libuv quoting rules. although the actual
// command line from libuv quoting rules would look like:
// foo.exe "myarg:\"my val\""
//
// 3) double-up slashes that precede a quote,
// e.g. hello \world => "hello \world"
// hello\"world => "hello\\""world"
// hello\\"world => "hello\\\\""world"
// hello world\ => "hello world\\"
//
// technically this is not required for a cmd.exe command line, or the batch argument parser.
// the reasons for including this as a .cmd quoting rule are:
//
// a) this is optimized for the scenario where the argument is passed from the .cmd file to an
// external program. many programs (e.g. .NET console apps) rely on the slash-doubling rule.
//
// b) it's what we've been doing previously (by deferring to node default behavior) and we
// haven't heard any complaints about that aspect.
//
// note, a weakness of the quoting rules chosen here, is that % is not escaped. in fact, % cannot be
// escaped when used on the command line directly - even though within a .cmd file % can be escaped
// by using %%.
//
// the saving grace is, on the command line, %var% is left as-is if var is not defined. this contrasts
// the line parsing rules within a .cmd file, where if var is not defined it is replaced with nothing.
//
// one option that was explored was replacing % with ^% - i.e. %var% => ^%var^%. this hack would
// often work, since it is unlikely that var^ would exist, and the ^ character is removed when the
// variable is used. the problem, however, is that ^ is not removed when %* is used to pass the args
// to an external program.
//
// an unexplored potential solution for the % escaping problem, is to create a wrapper .cmd file.
// % can be escaped within a .cmd file.
let reverse = '"';
let quoteHit = true;
for (let i = arg.length; i > 0; i--) {
// walk the string in reverse
reverse += arg[i - 1];
if (quoteHit && arg[i - 1] === '\\') {
reverse += '\\'; // double the slash
}
else if (arg[i - 1] === '"') {
quoteHit = true;
reverse += '"'; // double the quote
}
else {
quoteHit = false;
}
}
reverse += '"';
return reverse
.split('')
.reverse()
.join('');
}
_uvQuoteCmdArg(arg) {
// Tool runner wraps child_process.spawn() and needs to apply the same quoting as
// Node in certain cases where the undocumented spawn option windowsVerbatimArguments
// is used.
//
// Since this function is a port of quote_cmd_arg from Node 4.x (technically, lib UV,
// see https://github.com/nodejs/node/blob/v4.x/deps/uv/src/win/process.c for details),
// pasting copyright notice from Node within this function:
//
// Copyright Joyent, Inc. and other Node contributors. All rights reserved.
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to
// deal in the Software without restriction, including without limitation the
// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
// sell copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
// IN THE SOFTWARE.
if (!arg) {
// Need double quotation for empty argument
return '""';
}
if (!arg.includes(' ') && !arg.includes('\t') && !arg.includes('"')) {
// No quotation needed
return arg;
}
if (!arg.includes('"') && !arg.includes('\\')) {
// No embedded double quotes or backslashes, so I can just wrap
// quote marks around the whole thing.
return `"${arg}"`;
}
// Expected input/output:
// input : hello"world
// output: "hello\"world"
// input : hello""world
// output: "hello\"\"world"
// input : hello\world
// output: hello\world
// input : hello\\world
// output: hello\\world
// input : hello\"world
// output: "hello\\\"world"
// input : hello\\"world
// output: "hello\\\\\"world"
// input : hello world\
// output: "hello world\\" - note the comment in libuv actually reads "hello world\"
// but it appears the comment is wrong, it should be "hello world\\"
let reverse = '"';
let quoteHit = true;
for (let i = arg.length; i > 0; i--) {
// walk the string in reverse
reverse += arg[i - 1];
if (quoteHit && arg[i - 1] === '\\') {
reverse += '\\';
}
else if (arg[i - 1] === '"') {
quoteHit = true;
reverse += '\\';
}
else {
quoteHit = false;
}
}
reverse += '"';
return reverse
.split('')
.reverse()
.join('');
}
_cloneExecOptions(options) {
options = options || {};
const result = {
cwd: options.cwd || process.cwd(),
env: options.env || process.env,
silent: options.silent || false,
windowsVerbatimArguments: options.windowsVerbatimArguments || false,
failOnStdErr: options.failOnStdErr || false,
ignoreReturnCode: options.ignoreReturnCode || false,
delay: options.delay || 10000
};
result.outStream = options.outStream || process.stdout;
result.errStream = options.errStream || process.stderr;
return result;
}
_getSpawnOptions(options, toolPath) {
options = options || {};
const result = {};
result.cwd = options.cwd;
result.env = options.env;
result['windowsVerbatimArguments'] =
options.windowsVerbatimArguments || this._isCmdFile();
if (options.windowsVerbatimArguments) {
result.argv0 = `"${toolPath}"`;
}
return result;
}
/**
* Exec a tool.
* Output will be streamed to the live console.
* Returns promise with return code
*
* @param tool path to tool to exec
* @param options optional exec options. See ExecOptions
* @returns number
*/
exec() {
return __awaiter(this, void 0, void 0, function* () {
return new Promise((resolve, reject) => {
this._debug(`exec tool: ${this.toolPath}`);
this._debug('arguments:');
for (const arg of this.args) {
this._debug(` ${arg}`);
}
const optionsNonNull = this._cloneExecOptions(this.options);
if (!optionsNonNull.silent && optionsNonNull.outStream) {
optionsNonNull.outStream.write(this._getCommandString(optionsNonNull) + os.EOL);
}
const state = new ExecState(optionsNonNull, this.toolPath);
state.on('debug', (message) => {
this._debug(message);
});
const fileName = this._getSpawnFileName();
const cp = child.spawn(fileName, this._getSpawnArgs(optionsNonNull), this._getSpawnOptions(this.options, fileName));
const stdbuffer = '';
if (cp.stdout) {
cp.stdout.on('data', (data) => {
if (this.options.listeners && this.options.listeners.stdout) {
this.options.listeners.stdout(data);
}
if (!optionsNonNull.silent && optionsNonNull.outStream) {
optionsNonNull.outStream.write(data);
}
this._processLineBuffer(data, stdbuffer, (line) => {
if (this.options.listeners && this.options.listeners.stdline) {
this.options.listeners.stdline(line);
}
});
});
}
const errbuffer = '';
if (cp.stderr) {
cp.stderr.on('data', (data) => {
state.processStderr = true;
if (this.options.listeners && this.options.listeners.stderr) {
this.options.listeners.stderr(data);
}
if (!optionsNonNull.silent &&
optionsNonNull.errStream &&
optionsNonNull.outStream) {
const s = optionsNonNull.failOnStdErr
? optionsNonNull.errStream
: optionsNonNull.outStream;
s.write(data);
}
this._processLineBuffer(data, errbuffer, (line) => {
if (this.options.listeners && this.options.listeners.errline) {
this.options.listeners.errline(line);
}
});
});
}
cp.on('error', (err) => {
state.processError = err.message;
state.processExited = true;
state.processClosed = true;
state.CheckComplete();
});
cp.on('exit', (code) => {
state.processExitCode = code;
state.processExited = true;
this._debug(`Exit code ${code} received from tool '${this.toolPath}'`);
state.CheckComplete();
});
cp.on('close', (code) => {
state.processExitCode = code;
state.processExited = true;
state.processClosed = true;
this._debug(`STDIO streams have closed for tool '${this.toolPath}'`);
state.CheckComplete();
});
state.on('done', (error, exitCode) => {
if (stdbuffer.length > 0) {
this.emit('stdline', stdbuffer);
}
if (errbuffer.length > 0) {
this.emit('errline', errbuffer);
}
cp.removeAllListeners();
if (error) {
reject(error);
}
else {
resolve(exitCode);
}
});
});
});
}
}
exports.ToolRunner = ToolRunner;
/**
* Convert an arg string to an array of args. Handles escaping
*
* @param argString string of arguments
* @returns string[] array of arguments
*/
function argStringToArray(argString) {
const args = [];
let inQuotes = false;
let escaped = false;
let arg = '';
function append(c) {
// we only escape double quotes.
if (escaped && c !== '"') {
arg += '\\';
}
arg += c;
escaped = false;
}
for (let i = 0; i < argString.length; i++) {
const c = argString.charAt(i);
if (c === '"') {
if (!escaped) {
inQuotes = !inQuotes;
}
else {
append(c);
}
continue;
}
if (c === '\\' && escaped) {
append(c);
continue;
}
if (c === '\\' && inQuotes) {
escaped = true;
continue;
}
if (c === ' ' && !inQuotes) {
if (arg.length > 0) {
args.push(arg);
arg = '';
}
continue;
}
append(c);
}
if (arg.length > 0) {
args.push(arg.trim());
}
return args;
}
exports.argStringToArray = argStringToArray;
class ExecState extends events.EventEmitter {
constructor(options, toolPath) {
super();
this.processClosed = false; // tracks whether the process has exited and stdio is closed
this.processError = '';
this.processExitCode = 0;
this.processExited = false; // tracks whether the process has exited
this.processStderr = false; // tracks whether stderr was written to
this.delay = 10000; // 10 seconds
this.done = false;
this.timeout = null;
if (!toolPath) {
throw new Error('toolPath must not be empty');
}
this.options = options;
this.toolPath = toolPath;
if (options.delay) {
this.delay = options.delay;
}
}
CheckComplete() {
if (this.done) {
return;
}
if (this.processClosed) {
this._setResult();
}
else if (this.processExited) {
this.timeout = setTimeout(ExecState.HandleTimeout, this.delay, this);
}
}
_debug(message) {
this.emit('debug', message);
}
_setResult() {
// determine whether there is an error
let error;
if (this.processExited) {
if (this.processError) {
error = new Error(`There was an error when attempting to execute the process '${this.toolPath}'. This may indicate the process failed to start. Error: ${this.processError}`);
}
else if (this.processExitCode !== 0 && !this.options.ignoreReturnCode) {
error = new Error(`The process '${this.toolPath}' failed with exit code ${this.processExitCode}`);
}
else if (this.processStderr && this.options.failOnStdErr) {
error = new Error(`The process '${this.toolPath}' failed because one or more lines were written to the STDERR stream`);
}
}
// clear the timeout
if (this.timeout) {
clearTimeout(this.timeout);
this.timeout = null;
}
this.done = true;
this.emit('done', error, this.processExitCode);
}
static HandleTimeout(state) {
if (state.done) {
return;
}
if (!state.processClosed && state.processExited) {
const message = `The STDIO streams did not close within ${state.delay /
1000} seconds of the exit event from process '${state.toolPath}'. This may indicate a child process inherited the STDIO streams and has not yet exited.`;
state._debug(message);
}
state._setResult();
}
}
//# sourceMappingURL=toolrunner.js.map
/***/ }),
/***/ 4087:
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.Context = void 0;
const fs_1 = __nccwpck_require__(7147);
const os_1 = __nccwpck_require__(2037);
class Context {
/**
* Hydrate the context from the environment
*/
constructor() {
var _a, _b, _c;
this.payload = {};
if (process.env.GITHUB_EVENT_PATH) {
if (fs_1.existsSync(process.env.GITHUB_EVENT_PATH)) {
this.payload = JSON.parse(fs_1.readFileSync(process.env.GITHUB_EVENT_PATH, { encoding: 'utf8' }));
}
else {
const path = process.env.GITHUB_EVENT_PATH;
process.stdout.write(`GITHUB_EVENT_PATH ${path} does not exist${os_1.EOL}`);
}
}
this.eventName = process.env.GITHUB_EVENT_NAME;
this.sha = process.env.GITHUB_SHA;
this.ref = process.env.GITHUB_REF;
this.workflow = process.env.GITHUB_WORKFLOW;
this.action = process.env.GITHUB_ACTION;
this.actor = process.env.GITHUB_ACTOR;
this.job = process.env.GITHUB_JOB;
this.runNumber = parseInt(process.env.GITHUB_RUN_NUMBER, 10);
this.runId = parseInt(process.env.GITHUB_RUN_ID, 10);
this.apiUrl = (_a = process.env.GITHUB_API_URL) !== null && _a !== void 0 ? _a : `https://api.github.com`;
this.serverUrl = (_b = process.env.GITHUB_SERVER_URL) !== null && _b !== void 0 ? _b : `https://github.com`;
this.graphqlUrl = (_c = process.env.GITHUB_GRAPHQL_URL) !== null && _c !== void 0 ? _c : `https://api.github.com/graphql`;
}
get issue() {
const payload = this.payload;
return Object.assign(Object.assign({}, this.repo), { number: (payload.issue || payload.pull_request || payload).number });
}
get repo() {
if (process.env.GITHUB_REPOSITORY) {
const [owner, repo] = process.env.GITHUB_REPOSITORY.split('/');
return { owner, repo };
}
if (this.payload.repository) {
return {
owner: this.payload.repository.owner.login,
repo: this.payload.repository.name
};
}
throw new Error("context.repo requires a GITHUB_REPOSITORY environment variable like 'owner/repo'");
}
}
exports.Context = Context;
//# sourceMappingURL=context.js.map
2021-10-19 17:05:28 +02:00
/***/ }),
/***/ 5438:
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
2021-10-19 17:05:28 +02:00
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.getOctokit = exports.context = void 0;
const Context = __importStar(__nccwpck_require__(4087));
const utils_1 = __nccwpck_require__(3030);
exports.context = new Context.Context();
/**
* Returns a hydrated octokit ready to use for GitHub Actions
*
* @param token the repo PAT or GITHUB_TOKEN
* @param options other options to set
*/
function getOctokit(token, options, ...additionalPlugins) {
const GitHubWithPlugins = utils_1.GitHub.plugin(...additionalPlugins);
return new GitHubWithPlugins(utils_1.getOctokitOptions(token, options));
}
exports.getOctokit = getOctokit;
//# sourceMappingURL=github.js.map
2021-10-19 17:05:28 +02:00
/***/ }),
/***/ 7914:
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
2020-01-27 16:21:50 +01:00
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.getApiBaseUrl = exports.getProxyAgent = exports.getAuthString = void 0;
const httpClient = __importStar(__nccwpck_require__(6255));
function getAuthString(token, options) {
if (!token && !options.auth) {
throw new Error('Parameter token or opts.auth is required');
}
else if (token && options.auth) {
throw new Error('Parameters token and opts.auth may not both be specified');
}
return typeof options.auth === 'string' ? options.auth : `token ${token}`;
}
exports.getAuthString = getAuthString;
function getProxyAgent(destinationUrl) {
const hc = new httpClient.HttpClient();
return hc.getAgent(destinationUrl);
}
exports.getProxyAgent = getProxyAgent;
function getApiBaseUrl() {
return process.env['GITHUB_API_URL'] || 'https://api.github.com';
}
exports.getApiBaseUrl = getApiBaseUrl;
//# sourceMappingURL=utils.js.map
/***/ }),
/***/ 3030:
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.getOctokitOptions = exports.GitHub = exports.defaults = exports.context = void 0;
const Context = __importStar(__nccwpck_require__(4087));
const Utils = __importStar(__nccwpck_require__(7914));
// octokit + plugins
const core_1 = __nccwpck_require__(6762);
const plugin_rest_endpoint_methods_1 = __nccwpck_require__(3044);
const plugin_paginate_rest_1 = __nccwpck_require__(4193);
exports.context = new Context.Context();
const baseUrl = Utils.getApiBaseUrl();
exports.defaults = {
baseUrl,
request: {
agent: Utils.getProxyAgent(baseUrl)
}
};
exports.GitHub = core_1.Octokit.plugin(plugin_rest_endpoint_methods_1.restEndpointMethods, plugin_paginate_rest_1.paginateRest).defaults(exports.defaults);
/**
* Convience function to correctly format Octokit Options to pass into the constructor.
*
* @param token the repo PAT or GITHUB_TOKEN
* @param options other options to set
*/
function getOctokitOptions(token, options) {
const opts = Object.assign({}, options || {}); // Shallow clone - don't mutate the object provided by the caller
// Auth
const auth = Utils.getAuthString(token, opts);
if (auth) {
opts.auth = auth;
}
return opts;
}
exports.getOctokitOptions = getOctokitOptions;
//# sourceMappingURL=utils.js.map
/***/ }),
/***/ 5526:
/***/ (function(__unused_webpack_module, exports) {
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.PersonalAccessTokenCredentialHandler = exports.BearerCredentialHandler = exports.BasicCredentialHandler = void 0;
class BasicCredentialHandler {
constructor(username, password) {
this.username = username;
this.password = password;
}
prepareRequest(options) {
if (!options.headers) {
throw Error('The request has no headers');
}
options.headers['Authorization'] = `Basic ${Buffer.from(`${this.username}:${this.password}`).toString('base64')}`;
}
// This handler cannot handle 401
canHandleAuthentication() {
return false;
}
handleAuthentication() {
return __awaiter(this, void 0, void 0, function* () {
throw new Error('not implemented');
});
}
}
exports.BasicCredentialHandler = BasicCredentialHandler;
class BearerCredentialHandler {
constructor(token) {
this.token = token;
}
// currently implements pre-authorization
// TODO: support preAuth = false where it hooks on 401
prepareRequest(options) {
if (!options.headers) {
throw Error('The request has no headers');
}
options.headers['Authorization'] = `Bearer ${this.token}`;
}
// This handler cannot handle 401
canHandleAuthentication() {
return false;
}
handleAuthentication() {
return __awaiter(this, void 0, void 0, function* () {
throw new Error('not implemented');
});
}
}
exports.BearerCredentialHandler = BearerCredentialHandler;
class PersonalAccessTokenCredentialHandler {
constructor(token) {
this.token = token;
}
// currently implements pre-authorization
// TODO: support preAuth = false where it hooks on 401
prepareRequest(options) {
if (!options.headers) {
throw Error('The request has no headers');
}
options.headers['Authorization'] = `Basic ${Buffer.from(`PAT:${this.token}`).toString('base64')}`;
}
// This handler cannot handle 401
canHandleAuthentication() {
return false;
}
handleAuthentication() {
return __awaiter(this, void 0, void 0, function* () {
throw new Error('not implemented');
});
}
}
exports.PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHandler;
//# sourceMappingURL=auth.js.map
/***/ }),
/***/ 6255:
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
"use strict";
/* eslint-disable @typescript-eslint/no-explicit-any */
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.HttpClient = exports.isHttps = exports.HttpClientResponse = exports.HttpClientError = exports.getProxyUrl = exports.MediaTypes = exports.Headers = exports.HttpCodes = void 0;
const http = __importStar(__nccwpck_require__(3685));
const https = __importStar(__nccwpck_require__(5687));
const pm = __importStar(__nccwpck_require__(9835));
const tunnel = __importStar(__nccwpck_require__(4294));
var HttpCodes;
(function (HttpCodes) {
HttpCodes[HttpCodes["OK"] = 200] = "OK";
HttpCodes[HttpCodes["MultipleChoices"] = 300] = "MultipleChoices";
HttpCodes[HttpCodes["MovedPermanently"] = 301] = "MovedPermanently";
HttpCodes[HttpCodes["ResourceMoved"] = 302] = "ResourceMoved";
HttpCodes[HttpCodes["SeeOther"] = 303] = "SeeOther";
HttpCodes[HttpCodes["NotModified"] = 304] = "NotModified";
HttpCodes[HttpCodes["UseProxy"] = 305] = "UseProxy";
HttpCodes[HttpCodes["SwitchProxy"] = 306] = "SwitchProxy";
HttpCodes[HttpCodes["TemporaryRedirect"] = 307] = "TemporaryRedirect";
HttpCodes[HttpCodes["PermanentRedirect"] = 308] = "PermanentRedirect";
HttpCodes[HttpCodes["BadRequest"] = 400] = "BadRequest";
HttpCodes[HttpCodes["Unauthorized"] = 401] = "Unauthorized";
HttpCodes[HttpCodes["PaymentRequired"] = 402] = "PaymentRequired";
HttpCodes[HttpCodes["Forbidden"] = 403] = "Forbidden";
HttpCodes[HttpCodes["NotFound"] = 404] = "NotFound";
HttpCodes[HttpCodes["MethodNotAllowed"] = 405] = "MethodNotAllowed";
HttpCodes[HttpCodes["NotAcceptable"] = 406] = "NotAcceptable";
HttpCodes[HttpCodes["ProxyAuthenticationRequired"] = 407] = "ProxyAuthenticationRequired";
HttpCodes[HttpCodes["RequestTimeout"] = 408] = "RequestTimeout";
HttpCodes[HttpCodes["Conflict"] = 409] = "Conflict";
HttpCodes[HttpCodes["Gone"] = 410] = "Gone";
HttpCodes[HttpCodes["TooManyRequests"] = 429] = "TooManyRequests";
HttpCodes[HttpCodes["InternalServerError"] = 500] = "InternalServerError";
HttpCodes[HttpCodes["NotImplemented"] = 501] = "NotImplemented";
HttpCodes[HttpCodes["BadGateway"] = 502] = "BadGateway";
HttpCodes[HttpCodes["ServiceUnavailable"] = 503] = "ServiceUnavailable";
HttpCodes[HttpCodes["GatewayTimeout"] = 504] = "GatewayTimeout";
})(HttpCodes = exports.HttpCodes || (exports.HttpCodes = {}));
var Headers;
(function (Headers) {
Headers["Accept"] = "accept";
Headers["ContentType"] = "content-type";
})(Headers = exports.Headers || (exports.Headers = {}));
var MediaTypes;
(function (MediaTypes) {
MediaTypes["ApplicationJson"] = "application/json";
})(MediaTypes = exports.MediaTypes || (exports.MediaTypes = {}));
/**
* Returns the proxy URL, depending upon the supplied url and proxy environment variables.
* @param serverUrl The server URL where the request will be sent. For example, https://api.github.com
*/
function getProxyUrl(serverUrl) {
const proxyUrl = pm.getProxyUrl(new URL(serverUrl));
return proxyUrl ? proxyUrl.href : '';
}
exports.getProxyUrl = getProxyUrl;
const HttpRedirectCodes = [
HttpCodes.MovedPermanently,
HttpCodes.ResourceMoved,
HttpCodes.SeeOther,
HttpCodes.TemporaryRedirect,
HttpCodes.PermanentRedirect
];
const HttpResponseRetryCodes = [
HttpCodes.BadGateway,
HttpCodes.ServiceUnavailable,
HttpCodes.GatewayTimeout
];
const RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD'];
const ExponentialBackoffCeiling = 10;
const ExponentialBackoffTimeSlice = 5;
class HttpClientError extends Error {
constructor(message, statusCode) {
super(message);
this.name = 'HttpClientError';
this.statusCode = statusCode;
Object.setPrototypeOf(this, HttpClientError.prototype);
}
2020-01-27 16:21:50 +01:00
}
exports.HttpClientError = HttpClientError;
class HttpClientResponse {
constructor(message) {
this.message = message;
}
readBody() {
return __awaiter(this, void 0, void 0, function* () {
return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () {
let output = Buffer.alloc(0);
this.message.on('data', (chunk) => {
output = Buffer.concat([output, chunk]);
});
this.message.on('end', () => {
resolve(output.toString());
});
}));
});
}
}
exports.HttpClientResponse = HttpClientResponse;
function isHttps(requestUrl) {
const parsedUrl = new URL(requestUrl);
return parsedUrl.protocol === 'https:';
}
exports.isHttps = isHttps;
class HttpClient {
constructor(userAgent, handlers, requestOptions) {
this._ignoreSslError = false;
this._allowRedirects = true;
this._allowRedirectDowngrade = false;
this._maxRedirects = 50;
this._allowRetries = false;
this._maxRetries = 1;
this._keepAlive = false;
this._disposed = false;
this.userAgent = userAgent;
this.handlers = handlers || [];
this.requestOptions = requestOptions;
if (requestOptions) {
if (requestOptions.ignoreSslError != null) {
this._ignoreSslError = requestOptions.ignoreSslError;
}
this._socketTimeout = requestOptions.socketTimeout;
if (requestOptions.allowRedirects != null) {
this._allowRedirects = requestOptions.allowRedirects;
}
if (requestOptions.allowRedirectDowngrade != null) {
this._allowRedirectDowngrade = requestOptions.allowRedirectDowngrade;
}
if (requestOptions.maxRedirects != null) {
this._maxRedirects = Math.max(requestOptions.maxRedirects, 0);
}
if (requestOptions.keepAlive != null) {
this._keepAlive = requestOptions.keepAlive;
}
if (requestOptions.allowRetries != null) {
this._allowRetries = requestOptions.allowRetries;
}
if (requestOptions.maxRetries != null) {
this._maxRetries = requestOptions.maxRetries;
}
}
}
options(requestUrl, additionalHeaders) {
return __awaiter(this, void 0, void 0, function* () {
return this.request('OPTIONS', requestUrl, null, additionalHeaders || {});
});
}
get(requestUrl, additionalHeaders) {
return __awaiter(this, void 0, void 0, function* () {
return this.request('GET', requestUrl, null, additionalHeaders || {});
});
}
del(requestUrl, additionalHeaders) {
return __awaiter(this, void 0, void 0, function* () {
return this.request('DELETE', requestUrl, null, additionalHeaders || {});
});
2021-10-19 17:05:28 +02:00
}
post(requestUrl, data, additionalHeaders) {
return __awaiter(this, void 0, void 0, function* () {
return this.request('POST', requestUrl, data, additionalHeaders || {});
});
2020-01-27 16:21:50 +01:00
}
patch(requestUrl, data, additionalHeaders) {
return __awaiter(this, void 0, void 0, function* () {
return this.request('PATCH', requestUrl, data, additionalHeaders || {});
});
2020-01-27 16:21:50 +01:00
}
put(requestUrl, data, additionalHeaders) {
return __awaiter(this, void 0, void 0, function* () {
return this.request('PUT', requestUrl, data, additionalHeaders || {});
});
}
head(requestUrl, additionalHeaders) {
return __awaiter(this, void 0, void 0, function* () {
return this.request('HEAD', requestUrl, null, additionalHeaders || {});
});
2020-01-27 16:21:50 +01:00
}
sendStream(verb, requestUrl, stream, additionalHeaders) {
return __awaiter(this, void 0, void 0, function* () {
return this.request(verb, requestUrl, stream, additionalHeaders);
});
2020-01-27 16:21:50 +01:00
}
/**
* Gets a typed object from an endpoint
* Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise
*/
getJson(requestUrl, additionalHeaders = {}) {
return __awaiter(this, void 0, void 0, function* () {
additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
const res = yield this.get(requestUrl, additionalHeaders);
return this._processResponse(res, this.requestOptions);
});
}
postJson(requestUrl, obj, additionalHeaders = {}) {
return __awaiter(this, void 0, void 0, function* () {
const data = JSON.stringify(obj, null, 2);
additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);
const res = yield this.post(requestUrl, data, additionalHeaders);
return this._processResponse(res, this.requestOptions);
});
}
putJson(requestUrl, obj, additionalHeaders = {}) {
return __awaiter(this, void 0, void 0, function* () {
const data = JSON.stringify(obj, null, 2);
additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);
const res = yield this.put(requestUrl, data, additionalHeaders);
return this._processResponse(res, this.requestOptions);
});
}
patchJson(requestUrl, obj, additionalHeaders = {}) {
return __awaiter(this, void 0, void 0, function* () {
const data = JSON.stringify(obj, null, 2);
additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);
const res = yield this.patch(requestUrl, data, additionalHeaders);
return this._processResponse(res, this.requestOptions);
});
}
/**
* Makes a raw http request.
* All other methods such as get, post, patch, and request ultimately call this.
* Prefer get, del, post and patch
*/
request(verb, requestUrl, data, headers) {
return __awaiter(this, void 0, void 0, function* () {
if (this._disposed) {
throw new Error('Client has already been disposed.');
}
const parsedUrl = new URL(requestUrl);
let info = this._prepareRequest(verb, parsedUrl, headers);
// Only perform retries on reads since writes may not be idempotent.
const maxTries = this._allowRetries && RetryableHttpVerbs.includes(verb)
? this._maxRetries + 1
: 1;
let numTries = 0;
let response;
do {
response = yield this.requestRaw(info, data);
// Check if it's an authentication challenge
if (response &&
response.message &&
response.message.statusCode === HttpCodes.Unauthorized) {
let authenticationHandler;
for (const handler of this.handlers) {
if (handler.canHandleAuthentication(response)) {
authenticationHandler = handler;
break;
}
}
if (authenticationHandler) {
return authenticationHandler.handleAuthentication(this, info, data);
}
else {
// We have received an unauthorized response but have no handlers to handle it.
// Let the response return to the caller.
return response;
}
}
let redirectsRemaining = this._maxRedirects;
while (response.message.statusCode &&
HttpRedirectCodes.includes(response.message.statusCode) &&
this._allowRedirects &&
redirectsRemaining > 0) {
const redirectUrl = response.message.headers['location'];
if (!redirectUrl) {
// if there's no location to redirect to, we won't
break;
}
const parsedRedirectUrl = new URL(redirectUrl);
if (parsedUrl.protocol === 'https:' &&
parsedUrl.protocol !== parsedRedirectUrl.protocol &&
!this._allowRedirectDowngrade) {
throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.');
}
// we need to finish reading the response before reassigning response
// which will leak the open socket.
yield response.readBody();
// strip authorization header if redirected to a different hostname
if (parsedRedirectUrl.hostname !== parsedUrl.hostname) {
for (const header in headers) {
// header names are case insensitive
if (header.toLowerCase() === 'authorization') {
delete headers[header];
}
}
}
// let's make the request with the new redirectUrl
info = this._prepareRequest(verb, parsedRedirectUrl, headers);
response = yield this.requestRaw(info, data);
redirectsRemaining--;
}
if (!response.message.statusCode ||
!HttpResponseRetryCodes.includes(response.message.statusCode)) {
// If not a retry code, return immediately instead of retrying
return response;
}
numTries += 1;
if (numTries < maxTries) {
yield response.readBody();
yield this._performExponentialBackoff(numTries);
}
} while (numTries < maxTries);
return response;
});
}
/**
* Needs to be called if keepAlive is set to true in request options.
*/
dispose() {
if (this._agent) {
this._agent.destroy();
2020-01-27 16:21:50 +01:00
}
this._disposed = true;
}
/**
* Raw request.
* @param info
* @param data
*/
requestRaw(info, data) {
return __awaiter(this, void 0, void 0, function* () {
return new Promise((resolve, reject) => {
function callbackForResult(err, res) {
if (err) {
reject(err);
}
else if (!res) {
// If `err` is not passed, then `res` must be passed.
reject(new Error('Unknown error'));
}
else {
resolve(res);
}
}
this.requestRawWithCallback(info, data, callbackForResult);
});
});
}
/**
* Raw request with callback.
* @param info
* @param data
* @param onResult
*/
requestRawWithCallback(info, data, onResult) {
if (typeof data === 'string') {
if (!info.options.headers) {
info.options.headers = {};
}
info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8');
2020-01-27 16:21:50 +01:00
}
let callbackCalled = false;
function handleResult(err, res) {
if (!callbackCalled) {
callbackCalled = true;
onResult(err, res);
}
}
const req = info.httpModule.request(info.options, (msg) => {
const res = new HttpClientResponse(msg);
handleResult(undefined, res);
});
let socket;
req.on('socket', sock => {
socket = sock;
});
// If we ever get disconnected, we want the socket to timeout eventually
req.setTimeout(this._socketTimeout || 3 * 60000, () => {
if (socket) {
socket.end();
}
handleResult(new Error(`Request timeout: ${info.options.path}`));
});
req.on('error', function (err) {
// err has statusCode property
// res should have headers
handleResult(err);
});
if (data && typeof data === 'string') {
req.write(data, 'utf8');
}
if (data && typeof data !== 'string') {
data.on('close', function () {
req.end();
});
data.pipe(req);
}
else {
req.end();
}
}
/**
* Gets an http agent. This function is useful when you need an http agent that handles
* routing through a proxy server - depending upon the url and proxy environment variables.
* @param serverUrl The server URL where the request will be sent. For example, https://api.github.com
*/
getAgent(serverUrl) {
const parsedUrl = new URL(serverUrl);
return this._getAgent(parsedUrl);
}
_prepareRequest(method, requestUrl, headers) {
const info = {};
info.parsedUrl = requestUrl;
const usingSsl = info.parsedUrl.protocol === 'https:';
info.httpModule = usingSsl ? https : http;
const defaultPort = usingSsl ? 443 : 80;
info.options = {};
info.options.host = info.parsedUrl.hostname;
info.options.port = info.parsedUrl.port
? parseInt(info.parsedUrl.port)
: defaultPort;
info.options.path =
(info.parsedUrl.pathname || '') + (info.parsedUrl.search || '');
info.options.method = method;
info.options.headers = this._mergeHeaders(headers);
if (this.userAgent != null) {
info.options.headers['user-agent'] = this.userAgent;
}
info.options.agent = this._getAgent(info.parsedUrl);
// gives handlers an opportunity to participate
if (this.handlers) {
for (const handler of this.handlers) {
handler.prepareRequest(info.options);
}
}
return info;
2020-01-27 16:21:50 +01:00
}
_mergeHeaders(headers) {
if (this.requestOptions && this.requestOptions.headers) {
return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers || {}));
}
return lowercaseKeys(headers || {});
}
_getExistingOrDefaultHeader(additionalHeaders, header, _default) {
let clientHeader;
if (this.requestOptions && this.requestOptions.headers) {
clientHeader = lowercaseKeys(this.requestOptions.headers)[header];
}
return additionalHeaders[header] || clientHeader || _default;
}
_getAgent(parsedUrl) {
let agent;
const proxyUrl = pm.getProxyUrl(parsedUrl);
const useProxy = proxyUrl && proxyUrl.hostname;
if (this._keepAlive && useProxy) {
agent = this._proxyAgent;
}
if (this._keepAlive && !useProxy) {
agent = this._agent;
}
// if agent is already assigned use that agent.
if (agent) {
return agent;
}
const usingSsl = parsedUrl.protocol === 'https:';
let maxSockets = 100;
if (this.requestOptions) {
maxSockets = this.requestOptions.maxSockets || http.globalAgent.maxSockets;
}
// This is `useProxy` again, but we need to check `proxyURl` directly for TypeScripts's flow analysis.
if (proxyUrl && proxyUrl.hostname) {
const agentOptions = {
maxSockets,
keepAlive: this._keepAlive,
proxy: Object.assign(Object.assign({}, ((proxyUrl.username || proxyUrl.password) && {
proxyAuth: `${proxyUrl.username}:${proxyUrl.password}`
})), { host: proxyUrl.hostname, port: proxyUrl.port })
};
let tunnelAgent;
const overHttps = proxyUrl.protocol === 'https:';
if (usingSsl) {
tunnelAgent = overHttps ? tunnel.httpsOverHttps : tunnel.httpsOverHttp;
}
else {
tunnelAgent = overHttps ? tunnel.httpOverHttps : tunnel.httpOverHttp;
}
agent = tunnelAgent(agentOptions);
this._proxyAgent = agent;
}
// if reusing agent across request and tunneling agent isn't assigned create a new agent
if (this._keepAlive && !agent) {
const options = { keepAlive: this._keepAlive, maxSockets };
agent = usingSsl ? new https.Agent(options) : new http.Agent(options);
this._agent = agent;
}
// if not using private agent and tunnel agent isn't setup then use global agent
if (!agent) {
agent = usingSsl ? https.globalAgent : http.globalAgent;
}
if (usingSsl && this._ignoreSslError) {
// we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process
// http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options
// we have to cast it to any and change it directly
agent.options = Object.assign(agent.options || {}, {
rejectUnauthorized: false
});
}
return agent;
}
_performExponentialBackoff(retryNumber) {
return __awaiter(this, void 0, void 0, function* () {
retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber);
const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber);
return new Promise(resolve => setTimeout(() => resolve(), ms));
});
}
_processResponse(res, options) {
return __awaiter(this, void 0, void 0, function* () {
return new Promise((resolve, reject) => __awaiter(this, void 0, void 0, function* () {
const statusCode = res.message.statusCode || 0;
const response = {
statusCode,
result: null,
headers: {}
};
// not found leads to null obj returned
if (statusCode === HttpCodes.NotFound) {
resolve(response);
}
// get the result from the body
function dateTimeDeserializer(key, value) {
if (typeof value === 'string') {
const a = new Date(value);
if (!isNaN(a.valueOf())) {
return a;
}
}
return value;
}
let obj;
let contents;
try {
contents = yield res.readBody();
if (contents && contents.length > 0) {
if (options && options.deserializeDates) {
obj = JSON.parse(contents, dateTimeDeserializer);
}
else {
obj = JSON.parse(contents);
}
response.result = obj;
}
response.headers = res.message.headers;
}
catch (err) {
// Invalid resource (contents not json); leaving result obj null
}
// note that 3xx redirects are handled by the http layer.
if (statusCode > 299) {
let msg;
// if exception/error in body, attempt to get better error
if (obj && obj.message) {
msg = obj.message;
}
else if (contents && contents.length > 0) {
// it may be the case that the exception is in the body message as string
msg = contents;
}
else {
msg = `Failed request: (${statusCode})`;
}
const err = new HttpClientError(msg, statusCode);
err.result = response.result;
reject(err);
}
else {
resolve(response);
}
}));
});
}
}
exports.HttpClient = HttpClient;
const lowercaseKeys = (obj) => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {});
//# sourceMappingURL=index.js.map
/***/ }),
/***/ 9835:
/***/ ((__unused_webpack_module, exports) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.checkBypass = exports.getProxyUrl = void 0;
function getProxyUrl(reqUrl) {
const usingSsl = reqUrl.protocol === 'https:';
if (checkBypass(reqUrl)) {
return undefined;
}
const proxyVar = (() => {
if (usingSsl) {
return process.env['https_proxy'] || process.env['HTTPS_PROXY'];
}
else {
return process.env['http_proxy'] || process.env['HTTP_PROXY'];
}
})();
if (proxyVar) {
return new URL(proxyVar);
}
else {
return undefined;
}
}
exports.getProxyUrl = getProxyUrl;
function checkBypass(reqUrl) {
if (!reqUrl.hostname) {
return false;
}
const reqHost = reqUrl.hostname;
if (isLoopbackAddress(reqHost)) {
return true;
}
const noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || '';
if (!noProxy) {
return false;
}
// Determine the request port
let reqPort;
if (reqUrl.port) {
reqPort = Number(reqUrl.port);
}
else if (reqUrl.protocol === 'http:') {
reqPort = 80;
}
else if (reqUrl.protocol === 'https:') {
reqPort = 443;
}
// Format the request hostname and hostname with port
const upperReqHosts = [reqUrl.hostname.toUpperCase()];
if (typeof reqPort === 'number') {
upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`);
}
// Compare request host against noproxy
for (const upperNoProxyItem of noProxy
.split(',')
.map(x => x.trim().toUpperCase())
.filter(x => x)) {
if (upperNoProxyItem === '*' ||
upperReqHosts.some(x => x === upperNoProxyItem ||
x.endsWith(`.${upperNoProxyItem}`) ||
(upperNoProxyItem.startsWith('.') &&
x.endsWith(`${upperNoProxyItem}`)))) {
return true;
}
}
return false;
}
exports.checkBypass = checkBypass;
function isLoopbackAddress(host) {
const hostLower = host.toLowerCase();
return (hostLower === 'localhost' ||
hostLower.startsWith('127.') ||
hostLower.startsWith('[::1]') ||
hostLower.startsWith('[0:0:0:0:0:0:0:1]'));
}
//# sourceMappingURL=proxy.js.map
2020-01-27 16:21:50 +01:00
/***/ }),
2020-01-27 16:21:50 +01:00
/***/ 1962:
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
2020-01-27 16:21:50 +01:00
"use strict";
2020-01-27 16:21:50 +01:00
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var _a;
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.getCmdPath = exports.tryGetExecutablePath = exports.isRooted = exports.isDirectory = exports.exists = exports.READONLY = exports.UV_FS_O_EXLOCK = exports.IS_WINDOWS = exports.unlink = exports.symlink = exports.stat = exports.rmdir = exports.rm = exports.rename = exports.readlink = exports.readdir = exports.open = exports.mkdir = exports.lstat = exports.copyFile = exports.chmod = void 0;
const fs = __importStar(__nccwpck_require__(7147));
const path = __importStar(__nccwpck_require__(1017));
_a = fs.promises
// export const {open} = 'fs'
, exports.chmod = _a.chmod, exports.copyFile = _a.copyFile, exports.lstat = _a.lstat, exports.mkdir = _a.mkdir, exports.open = _a.open, exports.readdir = _a.readdir, exports.readlink = _a.readlink, exports.rename = _a.rename, exports.rm = _a.rm, exports.rmdir = _a.rmdir, exports.stat = _a.stat, exports.symlink = _a.symlink, exports.unlink = _a.unlink;
// export const {open} = 'fs'
exports.IS_WINDOWS = process.platform === 'win32';
// See https://github.com/nodejs/node/blob/d0153aee367422d0858105abec186da4dff0a0c5/deps/uv/include/uv/win.h#L691
exports.UV_FS_O_EXLOCK = 0x10000000;
exports.READONLY = fs.constants.O_RDONLY;
function exists(fsPath) {
return __awaiter(this, void 0, void 0, function* () {
try {
yield exports.stat(fsPath);
}
catch (err) {
if (err.code === 'ENOENT') {
return false;
}
throw err;
}
return true;
});
}
exports.exists = exists;
function isDirectory(fsPath, useStat = false) {
return __awaiter(this, void 0, void 0, function* () {
const stats = useStat ? yield exports.stat(fsPath) : yield exports.lstat(fsPath);
return stats.isDirectory();
});
}
exports.isDirectory = isDirectory;
/**
* On OSX/Linux, true if path starts with '/'. On Windows, true for paths like:
* \, \hello, \\hello\share, C:, and C:\hello (and corresponding alternate separator cases).
*/
function isRooted(p) {
p = normalizeSeparators(p);
if (!p) {
throw new Error('isRooted() parameter "p" cannot be empty');
}
if (exports.IS_WINDOWS) {
return (p.startsWith('\\') || /^[A-Z]:/i.test(p) // e.g. \ or \hello or \\hello
); // e.g. C: or C:\hello
}
return p.startsWith('/');
}
exports.isRooted = isRooted;
/**
* Best effort attempt to determine whether a file exists and is executable.
* @param filePath file path to check
* @param extensions additional file extensions to try
* @return if file exists and is executable, returns the file path. otherwise empty string.
*/
function tryGetExecutablePath(filePath, extensions) {
return __awaiter(this, void 0, void 0, function* () {
let stats = undefined;
try {
// test file exists
stats = yield exports.stat(filePath);
}
catch (err) {
if (err.code !== 'ENOENT') {
// eslint-disable-next-line no-console
console.log(`Unexpected error attempting to determine if executable file exists '${filePath}': ${err}`);
}
}
if (stats && stats.isFile()) {
if (exports.IS_WINDOWS) {
// on Windows, test for valid extension
const upperExt = path.extname(filePath).toUpperCase();
if (extensions.some(validExt => validExt.toUpperCase() === upperExt)) {
return filePath;
}
}
else {
if (isUnixExecutable(stats)) {
return filePath;
}
}
}
// try each extension
const originalFilePath = filePath;
for (const extension of extensions) {
filePath = originalFilePath + extension;
stats = undefined;
try {
stats = yield exports.stat(filePath);
}
catch (err) {
if (err.code !== 'ENOENT') {
// eslint-disable-next-line no-console
console.log(`Unexpected error attempting to determine if executable file exists '${filePath}': ${err}`);
}
}
if (stats && stats.isFile()) {
if (exports.IS_WINDOWS) {
// preserve the case of the actual file (since an extension was appended)
try {
const directory = path.dirname(filePath);
const upperName = path.basename(filePath).toUpperCase();
for (const actualName of yield exports.readdir(directory)) {
if (upperName === actualName.toUpperCase()) {
filePath = path.join(directory, actualName);
break;
}
}
}
catch (err) {
// eslint-disable-next-line no-console
console.log(`Unexpected error attempting to determine the actual case of the file '${filePath}': ${err}`);
}
return filePath;
}
else {
if (isUnixExecutable(stats)) {
return filePath;
}
}
}
}
return '';
});
}
exports.tryGetExecutablePath = tryGetExecutablePath;
function normalizeSeparators(p) {
p = p || '';
if (exports.IS_WINDOWS) {
// convert slashes on Windows
p = p.replace(/\//g, '\\');
// remove redundant slashes
return p.replace(/\\\\+/g, '\\');
}
// remove redundant slashes
return p.replace(/\/\/+/g, '/');
}
// on Mac/Linux, test the execute bit
// R W X R W X R W X
// 256 128 64 32 16 8 4 2 1
function isUnixExecutable(stats) {
return ((stats.mode & 1) > 0 ||
((stats.mode & 8) > 0 && stats.gid === process.getgid()) ||
((stats.mode & 64) > 0 && stats.uid === process.getuid()));
}
// Get the path of cmd.exe in windows
function getCmdPath() {
var _a;
return (_a = process.env['COMSPEC']) !== null && _a !== void 0 ? _a : `cmd.exe`;
}
exports.getCmdPath = getCmdPath;
//# sourceMappingURL=io-util.js.map
/***/ }),
/***/ 7436:
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.findInPath = exports.which = exports.mkdirP = exports.rmRF = exports.mv = exports.cp = void 0;
const assert_1 = __nccwpck_require__(9491);
const path = __importStar(__nccwpck_require__(1017));
const ioUtil = __importStar(__nccwpck_require__(1962));
/**
* Copies a file or folder.
* Based off of shelljs - https://github.com/shelljs/shelljs/blob/9237f66c52e5daa40458f94f9565e18e8132f5a6/src/cp.js
*
* @param source source path
* @param dest destination path
* @param options optional. See CopyOptions.
*/
function cp(source, dest, options = {}) {
return __awaiter(this, void 0, void 0, function* () {
const { force, recursive, copySourceDirectory } = readCopyOptions(options);
const destStat = (yield ioUtil.exists(dest)) ? yield ioUtil.stat(dest) : null;
// Dest is an existing file, but not forcing
if (destStat && destStat.isFile() && !force) {
return;
}
// If dest is an existing directory, should copy inside.
const newDest = destStat && destStat.isDirectory() && copySourceDirectory
? path.join(dest, path.basename(source))
: dest;
if (!(yield ioUtil.exists(source))) {
throw new Error(`no such file or directory: ${source}`);
}
const sourceStat = yield ioUtil.stat(source);
if (sourceStat.isDirectory()) {
if (!recursive) {
throw new Error(`Failed to copy. ${source} is a directory, but tried to copy without recursive flag.`);
}
else {
yield cpDirRecursive(source, newDest, 0, force);
}
}
else {
if (path.relative(source, newDest) === '') {
// a file cannot be copied to itself
throw new Error(`'${newDest}' and '${source}' are the same file`);
}
yield copyFile(source, newDest, force);
}
});
}
exports.cp = cp;
/**
* Moves a path.
*
* @param source source path
* @param dest destination path
* @param options optional. See MoveOptions.
*/
function mv(source, dest, options = {}) {
return __awaiter(this, void 0, void 0, function* () {
if (yield ioUtil.exists(dest)) {
let destExists = true;
if (yield ioUtil.isDirectory(dest)) {
// If dest is directory copy src into dest
dest = path.join(dest, path.basename(source));
destExists = yield ioUtil.exists(dest);
}
if (destExists) {
if (options.force == null || options.force) {
yield rmRF(dest);
}
else {
throw new Error('Destination already exists');
}
}
}
yield mkdirP(path.dirname(dest));
yield ioUtil.rename(source, dest);
});
}
exports.mv = mv;
/**
* Remove a path recursively with force
*
* @param inputPath path to remove
*/
function rmRF(inputPath) {
return __awaiter(this, void 0, void 0, function* () {
if (ioUtil.IS_WINDOWS) {
// Check for invalid characters
// https://docs.microsoft.com/en-us/windows/win32/fileio/naming-a-file
if (/[*"<>|]/.test(inputPath)) {
throw new Error('File path must not contain `*`, `"`, `<`, `>` or `|` on Windows');
}
}
try {
// note if path does not exist, error is silent
yield ioUtil.rm(inputPath, {
force: true,
maxRetries: 3,
recursive: true,
retryDelay: 300
});
}
catch (err) {
throw new Error(`File was unable to be removed ${err}`);
}
});
}
exports.rmRF = rmRF;
/**
* Make a directory. Creates the full path with folders in between
* Will throw if it fails
*
* @param fsPath path to create
* @returns Promise<void>
*/
function mkdirP(fsPath) {
return __awaiter(this, void 0, void 0, function* () {
assert_1.ok(fsPath, 'a path argument must be provided');
yield ioUtil.mkdir(fsPath, { recursive: true });
});
}
exports.mkdirP = mkdirP;
/**
* Returns path of a tool had the tool actually been invoked. Resolves via paths.
* If you check and the tool does not exist, it will throw.
*
* @param tool name of the tool
* @param check whether to check if tool exists
* @returns Promise<string> path to tool
*/
function which(tool, check) {
return __awaiter(this, void 0, void 0, function* () {
if (!tool) {
throw new Error("parameter 'tool' is required");
}
// recursive when check=true
if (check) {
const result = yield which(tool, false);
if (!result) {
if (ioUtil.IS_WINDOWS) {
throw new Error(`Unable to locate executable file: ${tool}. Please verify either the file path exists or the file can be found within a directory specified by the PATH environment variable. Also verify the file has a valid extension for an executable file.`);
}
else {
throw new Error(`Unable to locate executable file: ${tool}. Please verify either the file path exists or the file can be found within a directory specified by the PATH environment variable. Also check the file mode to verify the file is executable.`);
}
}
return result;
}
const matches = yield findInPath(tool);
if (matches && matches.length > 0) {
return matches[0];
}
return '';
});
}
exports.which = which;
/**
* Returns a list of all occurrences of the given tool on the system path.
*
* @returns Promise<string[]> the paths of the tool
*/
function findInPath(tool) {
return __awaiter(this, void 0, void 0, function* () {
if (!tool) {
throw new Error("parameter 'tool' is required");
}
// build the list of extensions to try
const extensions = [];
if (ioUtil.IS_WINDOWS && process.env['PATHEXT']) {
for (const extension of process.env['PATHEXT'].split(path.delimiter)) {
if (extension) {
extensions.push(extension);
}
}
}
// if it's rooted, return it if exists. otherwise return empty.
if (ioUtil.isRooted(tool)) {
const filePath = yield ioUtil.tryGetExecutablePath(tool, extensions);
if (filePath) {
return [filePath];
}
return [];
}
// if any path separators, return empty
if (tool.includes(path.sep)) {
return [];
}
// build the list of directories
//
// Note, technically "where" checks the current directory on Windows. From a toolkit perspective,
// it feels like we should not do this. Checking the current directory seems like more of a use
// case of a shell, and the which() function exposed by the toolkit should strive for consistency
// across platforms.
const directories = [];
if (process.env.PATH) {
for (const p of process.env.PATH.split(path.delimiter)) {
if (p) {
directories.push(p);
}
}
}
// find all matches
const matches = [];
for (const directory of directories) {
const filePath = yield ioUtil.tryGetExecutablePath(path.join(directory, tool), extensions);
if (filePath) {
matches.push(filePath);
}
}
return matches;
});
}
exports.findInPath = findInPath;
function readCopyOptions(options) {
const force = options.force == null ? true : options.force;
const recursive = Boolean(options.recursive);
const copySourceDirectory = options.copySourceDirectory == null
? true
: Boolean(options.copySourceDirectory);
return { force, recursive, copySourceDirectory };
}
function cpDirRecursive(sourceDir, destDir, currentDepth, force) {
return __awaiter(this, void 0, void 0, function* () {
// Ensure there is not a run away recursive copy
if (currentDepth >= 255)
return;
currentDepth++;
yield mkdirP(destDir);
const files = yield ioUtil.readdir(sourceDir);
for (const fileName of files) {
const srcFile = `${sourceDir}/${fileName}`;
const destFile = `${destDir}/${fileName}`;
const srcFileStat = yield ioUtil.lstat(srcFile);
if (srcFileStat.isDirectory()) {
// Recurse
yield cpDirRecursive(srcFile, destFile, currentDepth, force);
}
else {
yield copyFile(srcFile, destFile, force);
}
}
// Change the mode for the newly created directory
yield ioUtil.chmod(destDir, (yield ioUtil.stat(sourceDir)).mode);
});
}
// Buffered file copy
function copyFile(srcFile, destFile, force) {
return __awaiter(this, void 0, void 0, function* () {
if ((yield ioUtil.lstat(srcFile)).isSymbolicLink()) {
// unlink/re-link it
try {
yield ioUtil.lstat(destFile);
yield ioUtil.unlink(destFile);
}
catch (e) {
// Try to override file permission
if (e.code === 'EPERM') {
yield ioUtil.chmod(destFile, '0666');
yield ioUtil.unlink(destFile);
}
// other errors = it doesn't exist, no work to do
}
// Copy over symlink
const symlinkFull = yield ioUtil.readlink(srcFile);
yield ioUtil.symlink(symlinkFull, destFile, ioUtil.IS_WINDOWS ? 'junction' : null);
}
else if (!(yield ioUtil.exists(destFile)) || force) {
yield ioUtil.copyFile(srcFile, destFile);
}
});
}
//# sourceMappingURL=io.js.map
/***/ }),
/***/ 7784:
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
const core = __nccwpck_require__(2186);
const io = __nccwpck_require__(7436);
const fs = __nccwpck_require__(7147);
const os = __nccwpck_require__(2037);
const path = __nccwpck_require__(1017);
const httpm = __nccwpck_require__(5538);
const semver = __nccwpck_require__(562);
const uuidV4 = __nccwpck_require__(824);
const exec_1 = __nccwpck_require__(1514);
const assert_1 = __nccwpck_require__(9491);
class HTTPError extends Error {
constructor(httpStatusCode) {
super(`Unexpected HTTP response: ${httpStatusCode}`);
this.httpStatusCode = httpStatusCode;
Object.setPrototypeOf(this, new.target.prototype);
}
}
exports.HTTPError = HTTPError;
const IS_WINDOWS = process.platform === 'win32';
const userAgent = 'actions/tool-cache';
// On load grab temp directory and cache directory and remove them from env (currently don't want to expose this)
let tempDirectory = process.env['RUNNER_TEMP'] || '';
let cacheRoot = process.env['RUNNER_TOOL_CACHE'] || '';
// If directories not found, place them in common temp locations
if (!tempDirectory || !cacheRoot) {
let baseLocation;
if (IS_WINDOWS) {
// On windows use the USERPROFILE env variable
baseLocation = process.env['USERPROFILE'] || 'C:\\';
}
else {
if (process.platform === 'darwin') {
baseLocation = '/Users';
}
else {
baseLocation = '/home';
}
}
if (!tempDirectory) {
tempDirectory = path.join(baseLocation, 'actions', 'temp');
}
if (!cacheRoot) {
cacheRoot = path.join(baseLocation, 'actions', 'cache');
}
}
/**
* Download a tool from an url and stream it into a file
*
* @param url url of tool to download
* @returns path to downloaded tool
*/
function downloadTool(url) {
return __awaiter(this, void 0, void 0, function* () {
// Wrap in a promise so that we can resolve from within stream callbacks
return new Promise((resolve, reject) => __awaiter(this, void 0, void 0, function* () {
try {
const http = new httpm.HttpClient(userAgent, [], {
allowRetries: true,
maxRetries: 3
});
const destPath = path.join(tempDirectory, uuidV4());
yield io.mkdirP(tempDirectory);
core.debug(`Downloading ${url}`);
core.debug(`Downloading ${destPath}`);
if (fs.existsSync(destPath)) {
throw new Error(`Destination file path ${destPath} already exists`);
}
const response = yield http.get(url);
if (response.message.statusCode !== 200) {
const err = new HTTPError(response.message.statusCode);
core.debug(`Failed to download from "${url}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`);
throw err;
}
const file = fs.createWriteStream(destPath);
file.on('open', () => __awaiter(this, void 0, void 0, function* () {
try {
const stream = response.message.pipe(file);
stream.on('close', () => {
core.debug('download complete');
resolve(destPath);
});
}
catch (err) {
core.debug(`Failed to download from "${url}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`);
reject(err);
}
}));
file.on('error', err => {
file.end();
reject(err);
});
}
catch (err) {
reject(err);
}
}));
});
}
exports.downloadTool = downloadTool;
/**
* Extract a .7z file
*
* @param file path to the .7z file
* @param dest destination directory. Optional.
* @param _7zPath path to 7zr.exe. Optional, for long path support. Most .7z archives do not have this
* problem. If your .7z archive contains very long paths, you can pass the path to 7zr.exe which will
* gracefully handle long paths. By default 7zdec.exe is used because it is a very small program and is
* bundled with the tool lib. However it does not support long paths. 7zr.exe is the reduced command line
* interface, it is smaller than the full command line interface, and it does support long paths. At the
* time of this writing, it is freely available from the LZMA SDK that is available on the 7zip website.
* Be sure to check the current license agreement. If 7zr.exe is bundled with your action, then the path
* to 7zr.exe can be pass to this function.
* @returns path to the destination directory
*/
function extract7z(file, dest, _7zPath) {
return __awaiter(this, void 0, void 0, function* () {
assert_1.ok(IS_WINDOWS, 'extract7z() not supported on current OS');
assert_1.ok(file, 'parameter "file" is required');
dest = dest || (yield _createExtractFolder(dest));
const originalCwd = process.cwd();
process.chdir(dest);
if (_7zPath) {
try {
const args = [
'x',
'-bb1',
'-bd',
'-sccUTF-8',
file
];
const options = {
silent: true
};
yield exec_1.exec(`"${_7zPath}"`, args, options);
}
finally {
process.chdir(originalCwd);
}
}
else {
const escapedScript = path
.join(__dirname, '..', 'scripts', 'Invoke-7zdec.ps1')
.replace(/'/g, "''")
.replace(/"|\n|\r/g, ''); // double-up single quotes, remove double quotes and newlines
const escapedFile = file.replace(/'/g, "''").replace(/"|\n|\r/g, '');
const escapedTarget = dest.replace(/'/g, "''").replace(/"|\n|\r/g, '');
const command = `& '${escapedScript}' -Source '${escapedFile}' -Target '${escapedTarget}'`;
const args = [
'-NoLogo',
'-Sta',
'-NoProfile',
'-NonInteractive',
'-ExecutionPolicy',
'Unrestricted',
'-Command',
command
];
const options = {
silent: true
};
try {
const powershellPath = yield io.which('powershell', true);
yield exec_1.exec(`"${powershellPath}"`, args, options);
}
finally {
process.chdir(originalCwd);
}
}
return dest;
});
}
exports.extract7z = extract7z;
/**
* Extract a tar
*
* @param file path to the tar
* @param dest destination directory. Optional.
* @param flags flags for the tar. Optional.
* @returns path to the destination directory
*/
function extractTar(file, dest, flags = 'xz') {
return __awaiter(this, void 0, void 0, function* () {
if (!file) {
throw new Error("parameter 'file' is required");
}
dest = dest || (yield _createExtractFolder(dest));
const tarPath = yield io.which('tar', true);
yield exec_1.exec(`"${tarPath}"`, [flags, '-C', dest, '-f', file]);
return dest;
});
}
exports.extractTar = extractTar;
/**
* Extract a zip
*
* @param file path to the zip
* @param dest destination directory. Optional.
* @returns path to the destination directory
*/
function extractZip(file, dest) {
return __awaiter(this, void 0, void 0, function* () {
if (!file) {
throw new Error("parameter 'file' is required");
}
dest = dest || (yield _createExtractFolder(dest));
if (IS_WINDOWS) {
yield extractZipWin(file, dest);
}
else {
yield extractZipNix(file, dest);
}
return dest;
});
}
exports.extractZip = extractZip;
function extractZipWin(file, dest) {
return __awaiter(this, void 0, void 0, function* () {
// build the powershell command
const escapedFile = file.replace(/'/g, "''").replace(/"|\n|\r/g, ''); // double-up single quotes, remove double quotes and newlines
const escapedDest = dest.replace(/'/g, "''").replace(/"|\n|\r/g, '');
const command = `$ErrorActionPreference = 'Stop' ; try { Add-Type -AssemblyName System.IO.Compression.FileSystem } catch { } ; [System.IO.Compression.ZipFile]::ExtractToDirectory('${escapedFile}', '${escapedDest}')`;
// run powershell
const powershellPath = yield io.which('powershell');
const args = [
'-NoLogo',
'-Sta',
'-NoProfile',
'-NonInteractive',
'-ExecutionPolicy',
'Unrestricted',
'-Command',
command
];
yield exec_1.exec(`"${powershellPath}"`, args);
});
}
function extractZipNix(file, dest) {
return __awaiter(this, void 0, void 0, function* () {
const unzipPath = yield io.which('unzip');
yield exec_1.exec(`"${unzipPath}"`, [file], { cwd: dest });
});
}
/**
* Caches a directory and installs it into the tool cacheDir
*
* @param sourceDir the directory to cache into tools
* @param tool tool name
* @param version version of the tool. semver format
* @param arch architecture of the tool. Optional. Defaults to machine architecture
*/
function cacheDir(sourceDir, tool, version, arch) {
return __awaiter(this, void 0, void 0, function* () {
version = semver.clean(version) || version;
arch = arch || os.arch();
core.debug(`Caching tool ${tool} ${version} ${arch}`);
core.debug(`source dir: ${sourceDir}`);
if (!fs.statSync(sourceDir).isDirectory()) {
throw new Error('sourceDir is not a directory');
}
// Create the tool dir
const destPath = yield _createToolPath(tool, version, arch);
// copy each child item. do not move. move can fail on Windows
// due to anti-virus software having an open handle on a file.
for (const itemName of fs.readdirSync(sourceDir)) {
const s = path.join(sourceDir, itemName);
yield io.cp(s, destPath, { recursive: true });
}
// write .complete
_completeToolPath(tool, version, arch);
return destPath;
});
}
exports.cacheDir = cacheDir;
/**
* Caches a downloaded file (GUID) and installs it
* into the tool cache with a given targetName
*
* @param sourceFile the file to cache into tools. Typically a result of downloadTool which is a guid.
* @param targetFile the name of the file name in the tools directory
* @param tool tool name
* @param version version of the tool. semver format
* @param arch architecture of the tool. Optional. Defaults to machine architecture
*/
function cacheFile(sourceFile, targetFile, tool, version, arch) {
return __awaiter(this, void 0, void 0, function* () {
version = semver.clean(version) || version;
arch = arch || os.arch();
core.debug(`Caching tool ${tool} ${version} ${arch}`);
core.debug(`source file: ${sourceFile}`);
if (!fs.statSync(sourceFile).isFile()) {
throw new Error('sourceFile is not a file');
}
// create the tool dir
const destFolder = yield _createToolPath(tool, version, arch);
// copy instead of move. move can fail on Windows due to
// anti-virus software having an open handle on a file.
const destPath = path.join(destFolder, targetFile);
core.debug(`destination file ${destPath}`);
yield io.cp(sourceFile, destPath);
// write .complete
_completeToolPath(tool, version, arch);
return destFolder;
});
}
exports.cacheFile = cacheFile;
/**
* Finds the path to a tool version in the local installed tool cache
*
* @param toolName name of the tool
* @param versionSpec version of the tool
* @param arch optional arch. defaults to arch of computer
*/
function find(toolName, versionSpec, arch) {
if (!toolName) {
throw new Error('toolName parameter is required');
}
if (!versionSpec) {
throw new Error('versionSpec parameter is required');
}
arch = arch || os.arch();
// attempt to resolve an explicit version
if (!_isExplicitVersion(versionSpec)) {
const localVersions = findAllVersions(toolName, arch);
const match = _evaluateVersions(localVersions, versionSpec);
versionSpec = match;
}
// check for the explicit version in the cache
let toolPath = '';
if (versionSpec) {
versionSpec = semver.clean(versionSpec) || '';
const cachePath = path.join(cacheRoot, toolName, versionSpec, arch);
core.debug(`checking cache: ${cachePath}`);
if (fs.existsSync(cachePath) && fs.existsSync(`${cachePath}.complete`)) {
core.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch}`);
toolPath = cachePath;
}
else {
core.debug('not found');
}
}
return toolPath;
}
exports.find = find;
/**
* Finds the paths to all versions of a tool that are installed in the local tool cache
*
* @param toolName name of the tool
* @param arch optional arch. defaults to arch of computer
*/
function findAllVersions(toolName, arch) {
const versions = [];
arch = arch || os.arch();
const toolPath = path.join(cacheRoot, toolName);
if (fs.existsSync(toolPath)) {
const children = fs.readdirSync(toolPath);
for (const child of children) {
if (_isExplicitVersion(child)) {
const fullPath = path.join(toolPath, child, arch || '');
if (fs.existsSync(fullPath) && fs.existsSync(`${fullPath}.complete`)) {
versions.push(child);
}
2020-03-11 20:55:17 +01:00
}
}
}
return versions;
}
exports.findAllVersions = findAllVersions;
function _createExtractFolder(dest) {
return __awaiter(this, void 0, void 0, function* () {
if (!dest) {
// create a temp dir
dest = path.join(tempDirectory, uuidV4());
}
yield io.mkdirP(dest);
return dest;
});
}
function _createToolPath(tool, version, arch) {
return __awaiter(this, void 0, void 0, function* () {
const folderPath = path.join(cacheRoot, tool, semver.clean(version) || version, arch || '');
core.debug(`destination ${folderPath}`);
const markerPath = `${folderPath}.complete`;
yield io.rmRF(folderPath);
yield io.rmRF(markerPath);
yield io.mkdirP(folderPath);
return folderPath;
});
}
function _completeToolPath(tool, version, arch) {
const folderPath = path.join(cacheRoot, tool, semver.clean(version) || version, arch || '');
const markerPath = `${folderPath}.complete`;
fs.writeFileSync(markerPath, '');
core.debug('finished caching tool');
}
function _isExplicitVersion(versionSpec) {
const c = semver.clean(versionSpec) || '';
core.debug(`isExplicit: ${c}`);
const valid = semver.valid(c) != null;
core.debug(`explicit? ${valid}`);
return valid;
}
function _evaluateVersions(versions, versionSpec) {
let version = '';
core.debug(`evaluating ${versions.length} versions`);
versions = versions.sort((a, b) => {
if (semver.gt(a, b)) {
return 1;
}
return -1;
});
for (let i = versions.length - 1; i >= 0; i--) {
const potential = versions[i];
const satisfied = semver.satisfies(potential, versionSpec);
if (satisfied) {
version = potential;
break;
}
}
if (version) {
core.debug(`matched: ${version}`);
}
else {
core.debug('match not found');
}
return version;
}
//# sourceMappingURL=tool-cache.js.map
/***/ }),
/***/ 562:
/***/ ((module, exports) => {
exports = module.exports = SemVer
var debug
/* istanbul ignore next */
if (typeof process === 'object' &&
process.env &&
process.env.NODE_DEBUG &&
/\bsemver\b/i.test(process.env.NODE_DEBUG)) {
debug = function () {
var args = Array.prototype.slice.call(arguments, 0)
args.unshift('SEMVER')
console.log.apply(console, args)
}
} else {
debug = function () {}
}
// Note: this is the semver.org version of the spec that it implements
// Not necessarily the package version of this code.
exports.SEMVER_SPEC_VERSION = '2.0.0'
var MAX_LENGTH = 256
var MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER ||
/* istanbul ignore next */ 9007199254740991
// Max safe segment length for coercion.
var MAX_SAFE_COMPONENT_LENGTH = 16
// The actual regexps go on exports.re
var re = exports.re = []
var src = exports.src = []
var t = exports.tokens = {}
var R = 0
function tok (n) {
t[n] = R++
}
// The following Regular Expressions can be used for tokenizing,
// validating, and parsing SemVer version strings.
// ## Numeric Identifier
// A single `0`, or a non-zero digit followed by zero or more digits.
tok('NUMERICIDENTIFIER')
src[t.NUMERICIDENTIFIER] = '0|[1-9]\\d*'
tok('NUMERICIDENTIFIERLOOSE')
src[t.NUMERICIDENTIFIERLOOSE] = '[0-9]+'
// ## Non-numeric Identifier
// Zero or more digits, followed by a letter or hyphen, and then zero or
// more letters, digits, or hyphens.
tok('NONNUMERICIDENTIFIER')
src[t.NONNUMERICIDENTIFIER] = '\\d*[a-zA-Z-][a-zA-Z0-9-]*'
// ## Main Version
// Three dot-separated numeric identifiers.
tok('MAINVERSION')
src[t.MAINVERSION] = '(' + src[t.NUMERICIDENTIFIER] + ')\\.' +
'(' + src[t.NUMERICIDENTIFIER] + ')\\.' +
'(' + src[t.NUMERICIDENTIFIER] + ')'
tok('MAINVERSIONLOOSE')
src[t.MAINVERSIONLOOSE] = '(' + src[t.NUMERICIDENTIFIERLOOSE] + ')\\.' +
'(' + src[t.NUMERICIDENTIFIERLOOSE] + ')\\.' +
'(' + src[t.NUMERICIDENTIFIERLOOSE] + ')'
// ## Pre-release Version Identifier
// A numeric identifier, or a non-numeric identifier.
tok('PRERELEASEIDENTIFIER')
src[t.PRERELEASEIDENTIFIER] = '(?:' + src[t.NUMERICIDENTIFIER] +
'|' + src[t.NONNUMERICIDENTIFIER] + ')'
tok('PRERELEASEIDENTIFIERLOOSE')
src[t.PRERELEASEIDENTIFIERLOOSE] = '(?:' + src[t.NUMERICIDENTIFIERLOOSE] +
'|' + src[t.NONNUMERICIDENTIFIER] + ')'
// ## Pre-release Version
// Hyphen, followed by one or more dot-separated pre-release version
// identifiers.
tok('PRERELEASE')
src[t.PRERELEASE] = '(?:-(' + src[t.PRERELEASEIDENTIFIER] +
'(?:\\.' + src[t.PRERELEASEIDENTIFIER] + ')*))'
tok('PRERELEASELOOSE')
src[t.PRERELEASELOOSE] = '(?:-?(' + src[t.PRERELEASEIDENTIFIERLOOSE] +
'(?:\\.' + src[t.PRERELEASEIDENTIFIERLOOSE] + ')*))'
// ## Build Metadata Identifier
// Any combination of digits, letters, or hyphens.
tok('BUILDIDENTIFIER')
src[t.BUILDIDENTIFIER] = '[0-9A-Za-z-]+'
// ## Build Metadata
// Plus sign, followed by one or more period-separated build metadata
// identifiers.
tok('BUILD')
src[t.BUILD] = '(?:\\+(' + src[t.BUILDIDENTIFIER] +
'(?:\\.' + src[t.BUILDIDENTIFIER] + ')*))'
// ## Full Version String
// A main version, followed optionally by a pre-release version and
// build metadata.
// Note that the only major, minor, patch, and pre-release sections of
// the version string are capturing groups. The build metadata is not a
// capturing group, because it should not ever be used in version
// comparison.
tok('FULL')
tok('FULLPLAIN')
src[t.FULLPLAIN] = 'v?' + src[t.MAINVERSION] +
src[t.PRERELEASE] + '?' +
src[t.BUILD] + '?'
src[t.FULL] = '^' + src[t.FULLPLAIN] + '$'
// like full, but allows v1.2.3 and =1.2.3, which people do sometimes.
// also, 1.0.0alpha1 (prerelease without the hyphen) which is pretty
// common in the npm registry.
tok('LOOSEPLAIN')
src[t.LOOSEPLAIN] = '[v=\\s]*' + src[t.MAINVERSIONLOOSE] +
src[t.PRERELEASELOOSE] + '?' +
src[t.BUILD] + '?'
tok('LOOSE')
src[t.LOOSE] = '^' + src[t.LOOSEPLAIN] + '$'
tok('GTLT')
src[t.GTLT] = '((?:<|>)?=?)'
// Something like "2.*" or "1.2.x".
// Note that "x.x" is a valid xRange identifer, meaning "any version"
// Only the first item is strictly required.
tok('XRANGEIDENTIFIERLOOSE')
src[t.XRANGEIDENTIFIERLOOSE] = src[t.NUMERICIDENTIFIERLOOSE] + '|x|X|\\*'
tok('XRANGEIDENTIFIER')
src[t.XRANGEIDENTIFIER] = src[t.NUMERICIDENTIFIER] + '|x|X|\\*'
tok('XRANGEPLAIN')
src[t.XRANGEPLAIN] = '[v=\\s]*(' + src[t.XRANGEIDENTIFIER] + ')' +
'(?:\\.(' + src[t.XRANGEIDENTIFIER] + ')' +
'(?:\\.(' + src[t.XRANGEIDENTIFIER] + ')' +
'(?:' + src[t.PRERELEASE] + ')?' +
src[t.BUILD] + '?' +
')?)?'
tok('XRANGEPLAINLOOSE')
src[t.XRANGEPLAINLOOSE] = '[v=\\s]*(' + src[t.XRANGEIDENTIFIERLOOSE] + ')' +
'(?:\\.(' + src[t.XRANGEIDENTIFIERLOOSE] + ')' +
'(?:\\.(' + src[t.XRANGEIDENTIFIERLOOSE] + ')' +
'(?:' + src[t.PRERELEASELOOSE] + ')?' +
src[t.BUILD] + '?' +
')?)?'
tok('XRANGE')
src[t.XRANGE] = '^' + src[t.GTLT] + '\\s*' + src[t.XRANGEPLAIN] + '$'
tok('XRANGELOOSE')
src[t.XRANGELOOSE] = '^' + src[t.GTLT] + '\\s*' + src[t.XRANGEPLAINLOOSE] + '$'
// Coercion.
// Extract anything that could conceivably be a part of a valid semver
tok('COERCE')
src[t.COERCE] = '(^|[^\\d])' +
'(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '})' +
'(?:\\.(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' +
'(?:\\.(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' +
'(?:$|[^\\d])'
tok('COERCERTL')
re[t.COERCERTL] = new RegExp(src[t.COERCE], 'g')
// Tilde ranges.
// Meaning is "reasonably at or greater than"
tok('LONETILDE')
src[t.LONETILDE] = '(?:~>?)'
tok('TILDETRIM')
src[t.TILDETRIM] = '(\\s*)' + src[t.LONETILDE] + '\\s+'
re[t.TILDETRIM] = new RegExp(src[t.TILDETRIM], 'g')
var tildeTrimReplace = '$1~'
tok('TILDE')
src[t.TILDE] = '^' + src[t.LONETILDE] + src[t.XRANGEPLAIN] + '$'
tok('TILDELOOSE')
src[t.TILDELOOSE] = '^' + src[t.LONETILDE] + src[t.XRANGEPLAINLOOSE] + '$'
// Caret ranges.
// Meaning is "at least and backwards compatible with"
tok('LONECARET')
src[t.LONECARET] = '(?:\\^)'
tok('CARETTRIM')
src[t.CARETTRIM] = '(\\s*)' + src[t.LONECARET] + '\\s+'
re[t.CARETTRIM] = new RegExp(src[t.CARETTRIM], 'g')
var caretTrimReplace = '$1^'
tok('CARET')
src[t.CARET] = '^' + src[t.LONECARET] + src[t.XRANGEPLAIN] + '$'
tok('CARETLOOSE')
src[t.CARETLOOSE] = '^' + src[t.LONECARET] + src[t.XRANGEPLAINLOOSE] + '$'
// A simple gt/lt/eq thing, or just "" to indicate "any version"
tok('COMPARATORLOOSE')
src[t.COMPARATORLOOSE] = '^' + src[t.GTLT] + '\\s*(' + src[t.LOOSEPLAIN] + ')$|^$'
tok('COMPARATOR')
src[t.COMPARATOR] = '^' + src[t.GTLT] + '\\s*(' + src[t.FULLPLAIN] + ')$|^$'
// An expression to strip any whitespace between the gtlt and the thing
// it modifies, so that `> 1.2.3` ==> `>1.2.3`
tok('COMPARATORTRIM')
src[t.COMPARATORTRIM] = '(\\s*)' + src[t.GTLT] +
'\\s*(' + src[t.LOOSEPLAIN] + '|' + src[t.XRANGEPLAIN] + ')'
// this one has to use the /g flag
re[t.COMPARATORTRIM] = new RegExp(src[t.COMPARATORTRIM], 'g')
var comparatorTrimReplace = '$1$2$3'
// Something like `1.2.3 - 1.2.4`
// Note that these all use the loose form, because they'll be
// checked against either the strict or loose comparator form
// later.
tok('HYPHENRANGE')
src[t.HYPHENRANGE] = '^\\s*(' + src[t.XRANGEPLAIN] + ')' +
'\\s+-\\s+' +
'(' + src[t.XRANGEPLAIN] + ')' +
'\\s*$'
tok('HYPHENRANGELOOSE')
src[t.HYPHENRANGELOOSE] = '^\\s*(' + src[t.XRANGEPLAINLOOSE] + ')' +
'\\s+-\\s+' +
'(' + src[t.XRANGEPLAINLOOSE] + ')' +
'\\s*$'
// Star ranges basically just allow anything at all.
tok('STAR')
src[t.STAR] = '(<|>)?=?\\s*\\*'
// Compile to actual regexp objects.
// All are flag-free, unless they were created above with a flag.
for (var i = 0; i < R; i++) {
debug(i, src[i])
if (!re[i]) {
re[i] = new RegExp(src[i])
}
}
exports.parse = parse
function parse (version, options) {
if (!options || typeof options !== 'object') {
options = {
loose: !!options,
includePrerelease: false
}
}
if (version instanceof SemVer) {
return version
}
if (typeof version !== 'string') {
return null
}
if (version.length > MAX_LENGTH) {
return null
}
var r = options.loose ? re[t.LOOSE] : re[t.FULL]
if (!r.test(version)) {
return null
}
try {
return new SemVer(version, options)
} catch (er) {
return null
}
}
exports.valid = valid
function valid (version, options) {
var v = parse(version, options)
return v ? v.version : null
}
exports.clean = clean
function clean (version, options) {
var s = parse(version.trim().replace(/^[=v]+/, ''), options)
return s ? s.version : null
}
exports.SemVer = SemVer
function SemVer (version, options) {
if (!options || typeof options !== 'object') {
options = {
loose: !!options,
includePrerelease: false
}
}
if (version instanceof SemVer) {
if (version.loose === options.loose) {
return version
} else {
version = version.version
}
} else if (typeof version !== 'string') {
throw new TypeError('Invalid Version: ' + version)
}
if (version.length > MAX_LENGTH) {
throw new TypeError('version is longer than ' + MAX_LENGTH + ' characters')
}
if (!(this instanceof SemVer)) {
return new SemVer(version, options)
}
debug('SemVer', version, options)
this.options = options
this.loose = !!options.loose
var m = version.trim().match(options.loose ? re[t.LOOSE] : re[t.FULL])
if (!m) {
throw new TypeError('Invalid Version: ' + version)
}
this.raw = version
// these are actually numbers
this.major = +m[1]
this.minor = +m[2]
this.patch = +m[3]
if (this.major > MAX_SAFE_INTEGER || this.major < 0) {
throw new TypeError('Invalid major version')
}
if (this.minor > MAX_SAFE_INTEGER || this.minor < 0) {
throw new TypeError('Invalid minor version')
}
if (this.patch > MAX_SAFE_INTEGER || this.patch < 0) {
throw new TypeError('Invalid patch version')
}
// numberify any prerelease numeric ids
if (!m[4]) {
this.prerelease = []
} else {
this.prerelease = m[4].split('.').map(function (id) {
if (/^[0-9]+$/.test(id)) {
var num = +id
if (num >= 0 && num < MAX_SAFE_INTEGER) {
return num
}
}
return id
})
}
this.build = m[5] ? m[5].split('.') : []
this.format()
}
SemVer.prototype.format = function () {
this.version = this.major + '.' + this.minor + '.' + this.patch
if (this.prerelease.length) {
this.version += '-' + this.prerelease.join('.')
}
return this.version
}
SemVer.prototype.toString = function () {
return this.version
}
SemVer.prototype.compare = function (other) {
debug('SemVer.compare', this.version, this.options, other)
if (!(other instanceof SemVer)) {
other = new SemVer(other, this.options)
}
return this.compareMain(other) || this.comparePre(other)
}
SemVer.prototype.compareMain = function (other) {
if (!(other instanceof SemVer)) {
other = new SemVer(other, this.options)
}
return compareIdentifiers(this.major, other.major) ||
compareIdentifiers(this.minor, other.minor) ||
compareIdentifiers(this.patch, other.patch)
}
SemVer.prototype.comparePre = function (other) {
if (!(other instanceof SemVer)) {
other = new SemVer(other, this.options)
}
// NOT having a prerelease is > having one
if (this.prerelease.length && !other.prerelease.length) {
return -1
} else if (!this.prerelease.length && other.prerelease.length) {
return 1
} else if (!this.prerelease.length && !other.prerelease.length) {
return 0
}
var i = 0
do {
var a = this.prerelease[i]
var b = other.prerelease[i]
debug('prerelease compare', i, a, b)
if (a === undefined && b === undefined) {
return 0
} else if (b === undefined) {
return 1
} else if (a === undefined) {
return -1
} else if (a === b) {
continue
} else {
return compareIdentifiers(a, b)
}
} while (++i)
}
SemVer.prototype.compareBuild = function (other) {
if (!(other instanceof SemVer)) {
other = new SemVer(other, this.options)
}
2021-10-19 17:05:28 +02:00
var i = 0
do {
var a = this.build[i]
var b = other.build[i]
debug('prerelease compare', i, a, b)
if (a === undefined && b === undefined) {
return 0
} else if (b === undefined) {
return 1
} else if (a === undefined) {
return -1
} else if (a === b) {
continue
} else {
return compareIdentifiers(a, b)
}
} while (++i)
}
2021-10-19 17:05:28 +02:00
// preminor will bump the version up to the next minor release, and immediately
// down to pre-release. premajor and prepatch work the same way.
SemVer.prototype.inc = function (release, identifier) {
switch (release) {
case 'premajor':
this.prerelease.length = 0
this.patch = 0
this.minor = 0
this.major++
this.inc('pre', identifier)
break
case 'preminor':
this.prerelease.length = 0
this.patch = 0
this.minor++
this.inc('pre', identifier)
break
case 'prepatch':
// If this is already a prerelease, it will bump to the next version
// drop any prereleases that might already exist, since they are not
// relevant at this point.
this.prerelease.length = 0
this.inc('patch', identifier)
this.inc('pre', identifier)
break
// If the input is a non-prerelease version, this acts the same as
// prepatch.
case 'prerelease':
if (this.prerelease.length === 0) {
this.inc('patch', identifier)
}
this.inc('pre', identifier)
break
2021-10-19 17:05:28 +02:00
case 'major':
// If this is a pre-major version, bump up to the same major version.
// Otherwise increment major.
// 1.0.0-5 bumps to 1.0.0
// 1.1.0 bumps to 2.0.0
if (this.minor !== 0 ||
this.patch !== 0 ||
this.prerelease.length === 0) {
this.major++
}
this.minor = 0
this.patch = 0
this.prerelease = []
break
case 'minor':
// If this is a pre-minor version, bump up to the same minor version.
// Otherwise increment minor.
// 1.2.0-5 bumps to 1.2.0
// 1.2.1 bumps to 1.3.0
if (this.patch !== 0 || this.prerelease.length === 0) {
this.minor++
}
this.patch = 0
this.prerelease = []
break
case 'patch':
// If this is not a pre-release version, it will increment the patch.
// If it is a pre-release it will bump up to the same patch version.
// 1.2.0-5 patches to 1.2.0
// 1.2.0 patches to 1.2.1
if (this.prerelease.length === 0) {
this.patch++
}
this.prerelease = []
break
// This probably shouldn't be used publicly.
// 1.0.0 "pre" would become 1.0.0-0 which is the wrong direction.
case 'pre':
if (this.prerelease.length === 0) {
this.prerelease = [0]
} else {
var i = this.prerelease.length
while (--i >= 0) {
if (typeof this.prerelease[i] === 'number') {
this.prerelease[i]++
i = -2
}
}
if (i === -1) {
// didn't increment anything
this.prerelease.push(0)
}
}
if (identifier) {
// 1.2.0-beta.1 bumps to 1.2.0-beta.2,
// 1.2.0-beta.fooblz or 1.2.0-beta bumps to 1.2.0-beta.0
if (this.prerelease[0] === identifier) {
if (isNaN(this.prerelease[1])) {
this.prerelease = [identifier, 0]
}
} else {
this.prerelease = [identifier, 0]
}
}
break
2021-10-19 17:05:28 +02:00
default:
throw new Error('invalid increment argument: ' + release)
}
this.format()
this.raw = this.version
return this
}
2020-01-27 16:21:50 +01:00
exports.inc = inc
function inc (version, release, loose, identifier) {
if (typeof (loose) === 'string') {
identifier = loose
loose = undefined
}
2020-01-27 16:21:50 +01:00
try {
return new SemVer(version, loose).inc(release, identifier).version
} catch (er) {
return null
}
}
2020-01-27 16:21:50 +01:00
exports.diff = diff
function diff (version1, version2) {
if (eq(version1, version2)) {
return null
} else {
var v1 = parse(version1)
var v2 = parse(version2)
var prefix = ''
if (v1.prerelease.length || v2.prerelease.length) {
prefix = 'pre'
var defaultResult = 'prerelease'
}
for (var key in v1) {
if (key === 'major' || key === 'minor' || key === 'patch') {
if (v1[key] !== v2[key]) {
return prefix + key
}
}
}
return defaultResult // may be undefined
}
}
2020-01-27 16:21:50 +01:00
exports.compareIdentifiers = compareIdentifiers
var numeric = /^[0-9]+$/
function compareIdentifiers (a, b) {
var anum = numeric.test(a)
var bnum = numeric.test(b)
if (anum && bnum) {
a = +a
b = +b
}
return a === b ? 0
: (anum && !bnum) ? -1
: (bnum && !anum) ? 1
: a < b ? -1
: 1
}
exports.rcompareIdentifiers = rcompareIdentifiers
function rcompareIdentifiers (a, b) {
return compareIdentifiers(b, a)
}
exports.major = major
function major (a, loose) {
return new SemVer(a, loose).major
}
exports.minor = minor
function minor (a, loose) {
return new SemVer(a, loose).minor
}
exports.patch = patch
function patch (a, loose) {
return new SemVer(a, loose).patch
}
exports.compare = compare
function compare (a, b, loose) {
return new SemVer(a, loose).compare(new SemVer(b, loose))
}
exports.compareLoose = compareLoose
function compareLoose (a, b) {
return compare(a, b, true)
}
exports.compareBuild = compareBuild
function compareBuild (a, b, loose) {
var versionA = new SemVer(a, loose)
var versionB = new SemVer(b, loose)
return versionA.compare(versionB) || versionA.compareBuild(versionB)
}
exports.rcompare = rcompare
function rcompare (a, b, loose) {
return compare(b, a, loose)
}
exports.sort = sort
function sort (list, loose) {
return list.sort(function (a, b) {
return exports.compareBuild(a, b, loose)
})
}
exports.rsort = rsort
function rsort (list, loose) {
return list.sort(function (a, b) {
return exports.compareBuild(b, a, loose)
})
}
exports.gt = gt
function gt (a, b, loose) {
return compare(a, b, loose) > 0
}
exports.lt = lt
function lt (a, b, loose) {
return compare(a, b, loose) < 0
}
exports.eq = eq
function eq (a, b, loose) {
return compare(a, b, loose) === 0
}
exports.neq = neq
function neq (a, b, loose) {
return compare(a, b, loose) !== 0
}
exports.gte = gte
function gte (a, b, loose) {
return compare(a, b, loose) >= 0
}
exports.lte = lte
function lte (a, b, loose) {
return compare(a, b, loose) <= 0
}
exports.cmp = cmp
function cmp (a, op, b, loose) {
switch (op) {
case '===':
if (typeof a === 'object')
a = a.version
if (typeof b === 'object')
b = b.version
return a === b
case '!==':
if (typeof a === 'object')
a = a.version
if (typeof b === 'object')
b = b.version
return a !== b
case '':
case '=':
case '==':
return eq(a, b, loose)
case '!=':
return neq(a, b, loose)
case '>':
return gt(a, b, loose)
case '>=':
return gte(a, b, loose)
case '<':
return lt(a, b, loose)
case '<=':
return lte(a, b, loose)
default:
throw new TypeError('Invalid operator: ' + op)
}
}
exports.Comparator = Comparator
function Comparator (comp, options) {
if (!options || typeof options !== 'object') {
options = {
loose: !!options,
includePrerelease: false
}
}
if (comp instanceof Comparator) {
if (comp.loose === !!options.loose) {
return comp
} else {
comp = comp.value
}
}
if (!(this instanceof Comparator)) {
return new Comparator(comp, options)
}
debug('comparator', comp, options)
this.options = options
this.loose = !!options.loose
this.parse(comp)
if (this.semver === ANY) {
this.value = ''
} else {
this.value = this.operator + this.semver.version
}
debug('comp', this)
}
var ANY = {}
Comparator.prototype.parse = function (comp) {
var r = this.options.loose ? re[t.COMPARATORLOOSE] : re[t.COMPARATOR]
var m = comp.match(r)
if (!m) {
throw new TypeError('Invalid comparator: ' + comp)
}
2021-10-19 17:05:28 +02:00
this.operator = m[1] !== undefined ? m[1] : ''
if (this.operator === '=') {
this.operator = ''
}
2021-10-19 17:05:28 +02:00
// if it literally is just '>' or '' then allow anything.
if (!m[2]) {
this.semver = ANY
} else {
this.semver = new SemVer(m[2], this.options.loose)
}
}
2021-10-19 17:05:28 +02:00
Comparator.prototype.toString = function () {
return this.value
}
2021-10-19 17:05:28 +02:00
Comparator.prototype.test = function (version) {
debug('Comparator.test', version, this.options.loose)
2021-10-19 17:05:28 +02:00
if (this.semver === ANY || version === ANY) {
return true
}
2021-10-19 17:05:28 +02:00
if (typeof version === 'string') {
try {
version = new SemVer(version, this.options)
} catch (er) {
return false
2021-10-19 17:05:28 +02:00
}
}
2021-10-19 17:05:28 +02:00
return cmp(version, this.operator, this.semver, this.options)
}
2021-10-19 17:05:28 +02:00
Comparator.prototype.intersects = function (comp, options) {
if (!(comp instanceof Comparator)) {
throw new TypeError('a Comparator is required')
}
2021-10-19 17:05:28 +02:00
if (!options || typeof options !== 'object') {
options = {
loose: !!options,
includePrerelease: false
2021-10-19 17:05:28 +02:00
}
}
2021-10-19 17:05:28 +02:00
var rangeTmp
if (this.operator === '') {
if (this.value === '') {
return true
}
rangeTmp = new Range(comp.value, options)
return satisfies(this.value, rangeTmp, options)
} else if (comp.operator === '') {
if (comp.value === '') {
return true
}
rangeTmp = new Range(this.value, options)
return satisfies(comp.semver, rangeTmp, options)
}
var sameDirectionIncreasing =
(this.operator === '>=' || this.operator === '>') &&
(comp.operator === '>=' || comp.operator === '>')
var sameDirectionDecreasing =
(this.operator === '<=' || this.operator === '<') &&
(comp.operator === '<=' || comp.operator === '<')
var sameSemVer = this.semver.version === comp.semver.version
var differentDirectionsInclusive =
(this.operator === '>=' || this.operator === '<=') &&
(comp.operator === '>=' || comp.operator === '<=')
var oppositeDirectionsLessThan =
cmp(this.semver, '<', comp.semver, options) &&
((this.operator === '>=' || this.operator === '>') &&
(comp.operator === '<=' || comp.operator === '<'))
var oppositeDirectionsGreaterThan =
cmp(this.semver, '>', comp.semver, options) &&
((this.operator === '<=' || this.operator === '<') &&
(comp.operator === '>=' || comp.operator === '>'))
return sameDirectionIncreasing || sameDirectionDecreasing ||
(sameSemVer && differentDirectionsInclusive) ||
oppositeDirectionsLessThan || oppositeDirectionsGreaterThan
}
exports.Range = Range
function Range (range, options) {
if (!options || typeof options !== 'object') {
options = {
loose: !!options,
includePrerelease: false
}
}
if (range instanceof Range) {
if (range.loose === !!options.loose &&
range.includePrerelease === !!options.includePrerelease) {
return range
} else {
return new Range(range.raw, options)
}
}
if (range instanceof Comparator) {
return new Range(range.value, options)
}
if (!(this instanceof Range)) {
return new Range(range, options)
}
this.options = options
this.loose = !!options.loose
this.includePrerelease = !!options.includePrerelease
// First, split based on boolean or ||
this.raw = range
this.set = range.split(/\s*\|\|\s*/).map(function (range) {
return this.parseRange(range.trim())
}, this).filter(function (c) {
// throw out any that are not relevant for whatever reason
return c.length
})
if (!this.set.length) {
throw new TypeError('Invalid SemVer Range: ' + range)
}
this.format()
}
Range.prototype.format = function () {
this.range = this.set.map(function (comps) {
return comps.join(' ').trim()
}).join('||').trim()
return this.range
}
Range.prototype.toString = function () {
return this.range
}
Range.prototype.parseRange = function (range) {
var loose = this.options.loose
range = range.trim()
// `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4`
var hr = loose ? re[t.HYPHENRANGELOOSE] : re[t.HYPHENRANGE]
range = range.replace(hr, hyphenReplace)
debug('hyphen replace', range)
// `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5`
range = range.replace(re[t.COMPARATORTRIM], comparatorTrimReplace)
debug('comparator trim', range, re[t.COMPARATORTRIM])
// `~ 1.2.3` => `~1.2.3`
range = range.replace(re[t.TILDETRIM], tildeTrimReplace)
// `^ 1.2.3` => `^1.2.3`
range = range.replace(re[t.CARETTRIM], caretTrimReplace)
// normalize spaces
range = range.split(/\s+/).join(' ')
// At this point, the range is completely trimmed and
// ready to be split into comparators.
var compRe = loose ? re[t.COMPARATORLOOSE] : re[t.COMPARATOR]
var set = range.split(' ').map(function (comp) {
return parseComparator(comp, this.options)
}, this).join(' ').split(/\s+/)
if (this.options.loose) {
// in loose mode, throw out any that are not valid comparators
set = set.filter(function (comp) {
return !!comp.match(compRe)
})
}
set = set.map(function (comp) {
return new Comparator(comp, this.options)
}, this)
return set
}
Range.prototype.intersects = function (range, options) {
if (!(range instanceof Range)) {
throw new TypeError('a Range is required')
}
return this.set.some(function (thisComparators) {
return (
isSatisfiable(thisComparators, options) &&
range.set.some(function (rangeComparators) {
return (
isSatisfiable(rangeComparators, options) &&
thisComparators.every(function (thisComparator) {
return rangeComparators.every(function (rangeComparator) {
return thisComparator.intersects(rangeComparator, options)
})
})
)
})
)
})
}
// take a set of comparators and determine whether there
// exists a version which can satisfy it
function isSatisfiable (comparators, options) {
var result = true
var remainingComparators = comparators.slice()
var testComparator = remainingComparators.pop()
while (result && remainingComparators.length) {
result = remainingComparators.every(function (otherComparator) {
return testComparator.intersects(otherComparator, options)
})
testComparator = remainingComparators.pop()
}
return result
}
// Mostly just for testing and legacy API reasons
exports.toComparators = toComparators
function toComparators (range, options) {
return new Range(range, options).set.map(function (comp) {
return comp.map(function (c) {
return c.value
}).join(' ').trim().split(' ')
})
}
// comprised of xranges, tildes, stars, and gtlt's at this point.
// already replaced the hyphen ranges
// turn into a set of JUST comparators.
function parseComparator (comp, options) {
debug('comp', comp, options)
comp = replaceCarets(comp, options)
debug('caret', comp)
comp = replaceTildes(comp, options)
debug('tildes', comp)
comp = replaceXRanges(comp, options)
debug('xrange', comp)
comp = replaceStars(comp, options)
debug('stars', comp)
return comp
}
function isX (id) {
return !id || id.toLowerCase() === 'x' || id === '*'
}
// ~, ~> --> * (any, kinda silly)
// ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0 <3.0.0
// ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0 <2.1.0
// ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0 <1.3.0
// ~1.2.3, ~>1.2.3 --> >=1.2.3 <1.3.0
// ~1.2.0, ~>1.2.0 --> >=1.2.0 <1.3.0
function replaceTildes (comp, options) {
return comp.trim().split(/\s+/).map(function (comp) {
return replaceTilde(comp, options)
}).join(' ')
}
function replaceTilde (comp, options) {
var r = options.loose ? re[t.TILDELOOSE] : re[t.TILDE]
return comp.replace(r, function (_, M, m, p, pr) {
debug('tilde', comp, _, M, m, p, pr)
var ret
if (isX(M)) {
ret = ''
} else if (isX(m)) {
ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0'
} else if (isX(p)) {
// ~1.2 == >=1.2.0 <1.3.0
ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0'
} else if (pr) {
debug('replaceTilde pr', pr)
ret = '>=' + M + '.' + m + '.' + p + '-' + pr +
' <' + M + '.' + (+m + 1) + '.0'
} else {
// ~1.2.3 == >=1.2.3 <1.3.0
ret = '>=' + M + '.' + m + '.' + p +
' <' + M + '.' + (+m + 1) + '.0'
}
debug('tilde return', ret)
return ret
})
}
// ^ --> * (any, kinda silly)
// ^2, ^2.x, ^2.x.x --> >=2.0.0 <3.0.0
// ^2.0, ^2.0.x --> >=2.0.0 <3.0.0
// ^1.2, ^1.2.x --> >=1.2.0 <2.0.0
// ^1.2.3 --> >=1.2.3 <2.0.0
// ^1.2.0 --> >=1.2.0 <2.0.0
function replaceCarets (comp, options) {
return comp.trim().split(/\s+/).map(function (comp) {
return replaceCaret(comp, options)
}).join(' ')
}
function replaceCaret (comp, options) {
debug('caret', comp, options)
var r = options.loose ? re[t.CARETLOOSE] : re[t.CARET]
return comp.replace(r, function (_, M, m, p, pr) {
debug('caret', comp, _, M, m, p, pr)
var ret
if (isX(M)) {
ret = ''
} else if (isX(m)) {
ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0'
} else if (isX(p)) {
if (M === '0') {
ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0'
} else {
ret = '>=' + M + '.' + m + '.0 <' + (+M + 1) + '.0.0'
}
} else if (pr) {
debug('replaceCaret pr', pr)
if (M === '0') {
if (m === '0') {
ret = '>=' + M + '.' + m + '.' + p + '-' + pr +
' <' + M + '.' + m + '.' + (+p + 1)
} else {
ret = '>=' + M + '.' + m + '.' + p + '-' + pr +
' <' + M + '.' + (+m + 1) + '.0'
}
} else {
ret = '>=' + M + '.' + m + '.' + p + '-' + pr +
' <' + (+M + 1) + '.0.0'
}
} else {
debug('no pr')
if (M === '0') {
if (m === '0') {
ret = '>=' + M + '.' + m + '.' + p +
' <' + M + '.' + m + '.' + (+p + 1)
} else {
ret = '>=' + M + '.' + m + '.' + p +
' <' + M + '.' + (+m + 1) + '.0'
}
} else {
ret = '>=' + M + '.' + m + '.' + p +
' <' + (+M + 1) + '.0.0'
}
}
debug('caret return', ret)
return ret
})
}
function replaceXRanges (comp, options) {
debug('replaceXRanges', comp, options)
return comp.split(/\s+/).map(function (comp) {
return replaceXRange(comp, options)
}).join(' ')
}
function replaceXRange (comp, options) {
comp = comp.trim()
var r = options.loose ? re[t.XRANGELOOSE] : re[t.XRANGE]
return comp.replace(r, function (ret, gtlt, M, m, p, pr) {
debug('xRange', comp, ret, gtlt, M, m, p, pr)
var xM = isX(M)
var xm = xM || isX(m)
var xp = xm || isX(p)
var anyX = xp
if (gtlt === '=' && anyX) {
gtlt = ''
}
// if we're including prereleases in the match, then we need
// to fix this to -0, the lowest possible prerelease value
pr = options.includePrerelease ? '-0' : ''
if (xM) {
if (gtlt === '>' || gtlt === '<') {
// nothing is allowed
ret = '<0.0.0-0'
} else {
// nothing is forbidden
ret = '*'
}
} else if (gtlt && anyX) {
// we know patch is an x, because we have any x at all.
// replace X with 0
if (xm) {
m = 0
}
p = 0
if (gtlt === '>') {
// >1 => >=2.0.0
// >1.2 => >=1.3.0
// >1.2.3 => >= 1.2.4
gtlt = '>='
if (xm) {
M = +M + 1
m = 0
p = 0
} else {
m = +m + 1
p = 0
}
} else if (gtlt === '<=') {
// <=0.7.x is actually <0.8.0, since any 0.7.x should
// pass. Similarly, <=7.x is actually <8.0.0, etc.
gtlt = '<'
if (xm) {
M = +M + 1
} else {
m = +m + 1
}
}
ret = gtlt + M + '.' + m + '.' + p + pr
} else if (xm) {
ret = '>=' + M + '.0.0' + pr + ' <' + (+M + 1) + '.0.0' + pr
} else if (xp) {
ret = '>=' + M + '.' + m + '.0' + pr +
' <' + M + '.' + (+m + 1) + '.0' + pr
}
debug('xRange return', ret)
return ret
})
}
// Because * is AND-ed with everything else in the comparator,
// and '' means "any version", just remove the *s entirely.
function replaceStars (comp, options) {
debug('replaceStars', comp, options)
// Looseness is ignored here. star is always as loose as it gets!
return comp.trim().replace(re[t.STAR], '')
}
// This function is passed to string.replace(re[t.HYPHENRANGE])
// M, m, patch, prerelease, build
// 1.2 - 3.4.5 => >=1.2.0 <=3.4.5
// 1.2.3 - 3.4 => >=1.2.0 <3.5.0 Any 3.4.x will do
// 1.2 - 3.4 => >=1.2.0 <3.5.0
function hyphenReplace ($0,
from, fM, fm, fp, fpr, fb,
to, tM, tm, tp, tpr, tb) {
if (isX(fM)) {
from = ''
} else if (isX(fm)) {
from = '>=' + fM + '.0.0'
} else if (isX(fp)) {
from = '>=' + fM + '.' + fm + '.0'
} else {
from = '>=' + from
}
if (isX(tM)) {
to = ''
} else if (isX(tm)) {
to = '<' + (+tM + 1) + '.0.0'
} else if (isX(tp)) {
to = '<' + tM + '.' + (+tm + 1) + '.0'
} else if (tpr) {
to = '<=' + tM + '.' + tm + '.' + tp + '-' + tpr
} else {
to = '<=' + to
}
return (from + ' ' + to).trim()
}
// if ANY of the sets match ALL of its comparators, then pass
Range.prototype.test = function (version) {
if (!version) {
return false
}
if (typeof version === 'string') {
try {
version = new SemVer(version, this.options)
} catch (er) {
return false
}
}
for (var i = 0; i < this.set.length; i++) {
if (testSet(this.set[i], version, this.options)) {
return true
}
}
return false
}
function testSet (set, version, options) {
for (var i = 0; i < set.length; i++) {
if (!set[i].test(version)) {
return false
}
}
if (version.prerelease.length && !options.includePrerelease) {
// Find the set of versions that are allowed to have prereleases
// For example, ^1.2.3-pr.1 desugars to >=1.2.3-pr.1 <2.0.0
// That should allow `1.2.3-pr.2` to pass.
// However, `1.2.4-alpha.notready` should NOT be allowed,
// even though it's within the range set by the comparators.
for (i = 0; i < set.length; i++) {
debug(set[i].semver)
if (set[i].semver === ANY) {
continue
}
if (set[i].semver.prerelease.length > 0) {
var allowed = set[i].semver
if (allowed.major === version.major &&
allowed.minor === version.minor &&
allowed.patch === version.patch) {
return true
}
}
}
// Version has a -pre, but it's not one of the ones we like.
return false
}
return true
}
exports.satisfies = satisfies
function satisfies (version, range, options) {
try {
range = new Range(range, options)
} catch (er) {
return false
}
return range.test(version)
}
exports.maxSatisfying = maxSatisfying
function maxSatisfying (versions, range, options) {
var max = null
var maxSV = null
try {
var rangeObj = new Range(range, options)
} catch (er) {
return null
}
versions.forEach(function (v) {
if (rangeObj.test(v)) {
// satisfies(v, range, options)
if (!max || maxSV.compare(v) === -1) {
// compare(max, v, true)
max = v
maxSV = new SemVer(max, options)
}
}
})
return max
}
exports.minSatisfying = minSatisfying
function minSatisfying (versions, range, options) {
var min = null
var minSV = null
try {
var rangeObj = new Range(range, options)
} catch (er) {
return null
}
versions.forEach(function (v) {
if (rangeObj.test(v)) {
// satisfies(v, range, options)
if (!min || minSV.compare(v) === 1) {
// compare(min, v, true)
min = v
minSV = new SemVer(min, options)
}
}
})
return min
}
exports.minVersion = minVersion
function minVersion (range, loose) {
range = new Range(range, loose)
var minver = new SemVer('0.0.0')
if (range.test(minver)) {
return minver
}
minver = new SemVer('0.0.0-0')
if (range.test(minver)) {
return minver
}
minver = null
for (var i = 0; i < range.set.length; ++i) {
var comparators = range.set[i]
comparators.forEach(function (comparator) {
// Clone to avoid manipulating the comparator's semver object.
var compver = new SemVer(comparator.semver.version)
switch (comparator.operator) {
case '>':
if (compver.prerelease.length === 0) {
compver.patch++
} else {
compver.prerelease.push(0)
}
compver.raw = compver.format()
/* fallthrough */
case '':
case '>=':
if (!minver || gt(minver, compver)) {
minver = compver
}
break
case '<':
case '<=':
/* Ignore maximum versions */
break
/* istanbul ignore next */
default:
throw new Error('Unexpected operation: ' + comparator.operator)
}
})
}
if (minver && range.test(minver)) {
return minver
}
return null
}
2021-10-19 17:05:28 +02:00
exports.validRange = validRange
function validRange (range, options) {
try {
// Return '*' instead of '' so that truthiness works.
// This will throw if it's invalid anyway
return new Range(range, options).range || '*'
} catch (er) {
return null
}
}
2021-10-19 17:05:28 +02:00
// Determine if version is less than all the versions possible in the range
exports.ltr = ltr
function ltr (version, range, options) {
return outside(version, range, '<', options)
}
2021-10-19 17:05:28 +02:00
// Determine if version is greater than all the versions possible in the range.
exports.gtr = gtr
function gtr (version, range, options) {
return outside(version, range, '>', options)
}
2021-10-19 17:05:28 +02:00
exports.outside = outside
function outside (version, range, hilo, options) {
version = new SemVer(version, options)
range = new Range(range, options)
2021-10-19 17:05:28 +02:00
var gtfn, ltefn, ltfn, comp, ecomp
switch (hilo) {
case '>':
gtfn = gt
ltefn = lte
ltfn = lt
comp = '>'
ecomp = '>='
break
case '<':
gtfn = lt
ltefn = gte
ltfn = gt
comp = '<'
ecomp = '<='
break
default:
throw new TypeError('Must provide a hilo val of "<" or ">"')
}
2021-10-19 17:05:28 +02:00
// If it satisifes the range it is not outside
if (satisfies(version, range, options)) {
return false
}
2021-10-19 17:05:28 +02:00
// From now on, variable terms are as if we're in "gtr" mode.
// but note that everything is flipped for the "ltr" function.
for (var i = 0; i < range.set.length; ++i) {
var comparators = range.set[i]
var high = null
var low = null
comparators.forEach(function (comparator) {
if (comparator.semver === ANY) {
comparator = new Comparator('>=0.0.0')
}
high = high || comparator
low = low || comparator
if (gtfn(comparator.semver, high.semver, options)) {
high = comparator
} else if (ltfn(comparator.semver, low.semver, options)) {
low = comparator
}
})
// If the edge version comparator has a operator then our version
// isn't outside it
if (high.operator === comp || high.operator === ecomp) {
return false
}
// If the lowest version comparator has an operator and our version
// is less than it then it isn't higher than the range
if ((!low.operator || low.operator === comp) &&
ltefn(version, low.semver)) {
return false
} else if (low.operator === ecomp && ltfn(version, low.semver)) {
return false
}
}
return true
}
exports.prerelease = prerelease
function prerelease (version, options) {
var parsed = parse(version, options)
return (parsed && parsed.prerelease.length) ? parsed.prerelease : null
}
exports.intersects = intersects
function intersects (r1, r2, options) {
r1 = new Range(r1, options)
r2 = new Range(r2, options)
return r1.intersects(r2)
}
exports.coerce = coerce
function coerce (version, options) {
if (version instanceof SemVer) {
return version
}
if (typeof version === 'number') {
version = String(version)
}
if (typeof version !== 'string') {
return null
}
options = options || {}
var match = null
if (!options.rtl) {
match = version.match(re[t.COERCE])
} else {
// Find the right-most coercible string that does not share
// a terminus with a more left-ward coercible string.
// Eg, '1.2.3.4' wants to coerce '2.3.4', not '3.4' or '4'
//
// Walk through the string checking with a /g regexp
// Manually set the index so as to pick up overlapping matches.
// Stop when we get a match that ends at the string end, since no
// coercible string can be more right-ward without the same terminus.
var next
while ((next = re[t.COERCERTL].exec(version)) &&
(!match || match.index + match[0].length !== version.length)
) {
if (!match ||
next.index + next[0].length !== match.index + match[0].length) {
match = next
}
re[t.COERCERTL].lastIndex = next.index + next[1].length + next[2].length
}
// leave it in a clean state
re[t.COERCERTL].lastIndex = -1
}
if (match === null) {
return null
}
return parse(match[2] +
'.' + (match[3] || '0') +
'.' + (match[4] || '0'), options)
}
/***/ }),
/***/ 334:
/***/ ((__unused_webpack_module, exports) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
const REGEX_IS_INSTALLATION_LEGACY = /^v1\./;
const REGEX_IS_INSTALLATION = /^ghs_/;
const REGEX_IS_USER_TO_SERVER = /^ghu_/;
async function auth(token) {
const isApp = token.split(/\./).length === 3;
const isInstallation = REGEX_IS_INSTALLATION_LEGACY.test(token) || REGEX_IS_INSTALLATION.test(token);
const isUserToServer = REGEX_IS_USER_TO_SERVER.test(token);
const tokenType = isApp ? "app" : isInstallation ? "installation" : isUserToServer ? "user-to-server" : "oauth";
return {
type: "token",
token: token,
tokenType
};
}
/**
* Prefix token for usage in the Authorization header
*
* @param token OAuth token or JSON Web Token
*/
function withAuthorizationPrefix(token) {
if (token.split(/\./).length === 3) {
return `bearer ${token}`;
}
return `token ${token}`;
}
async function hook(token, request, route, parameters) {
const endpoint = request.endpoint.merge(route, parameters);
endpoint.headers.authorization = withAuthorizationPrefix(token);
return request(endpoint);
}
const createTokenAuth = function createTokenAuth(token) {
if (!token) {
throw new Error("[@octokit/auth-token] No token passed to createTokenAuth");
}
if (typeof token !== "string") {
throw new Error("[@octokit/auth-token] Token passed to createTokenAuth is not a string");
}
token = token.replace(/^(token|bearer) +/i, "");
return Object.assign(auth.bind(null, token), {
hook: hook.bind(null, token)
});
};
exports.createTokenAuth = createTokenAuth;
//# sourceMappingURL=index.js.map
/***/ }),
/***/ 6762:
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
var universalUserAgent = __nccwpck_require__(5030);
var beforeAfterHook = __nccwpck_require__(3682);
var request = __nccwpck_require__(6234);
var graphql = __nccwpck_require__(8467);
var authToken = __nccwpck_require__(334);
function _objectWithoutPropertiesLoose(source, excluded) {
if (source == null) return {};
var target = {};
var sourceKeys = Object.keys(source);
var key, i;
for (i = 0; i < sourceKeys.length; i++) {
key = sourceKeys[i];
if (excluded.indexOf(key) >= 0) continue;
target[key] = source[key];
}
return target;
}
function _objectWithoutProperties(source, excluded) {
if (source == null) return {};
var target = _objectWithoutPropertiesLoose(source, excluded);
var key, i;
if (Object.getOwnPropertySymbols) {
var sourceSymbolKeys = Object.getOwnPropertySymbols(source);
for (i = 0; i < sourceSymbolKeys.length; i++) {
key = sourceSymbolKeys[i];
if (excluded.indexOf(key) >= 0) continue;
if (!Object.prototype.propertyIsEnumerable.call(source, key)) continue;
target[key] = source[key];
}
}
return target;
}
const VERSION = "3.6.0";
const _excluded = ["authStrategy"];
class Octokit {
constructor(options = {}) {
const hook = new beforeAfterHook.Collection();
const requestDefaults = {
baseUrl: request.request.endpoint.DEFAULTS.baseUrl,
headers: {},
request: Object.assign({}, options.request, {
// @ts-ignore internal usage only, no need to type
hook: hook.bind(null, "request")
}),
mediaType: {
previews: [],
format: ""
}
}; // prepend default user agent with `options.userAgent` if set
requestDefaults.headers["user-agent"] = [options.userAgent, `octokit-core.js/${VERSION} ${universalUserAgent.getUserAgent()}`].filter(Boolean).join(" ");
if (options.baseUrl) {
requestDefaults.baseUrl = options.baseUrl;
}
if (options.previews) {
requestDefaults.mediaType.previews = options.previews;
}
if (options.timeZone) {
requestDefaults.headers["time-zone"] = options.timeZone;
}
this.request = request.request.defaults(requestDefaults);
this.graphql = graphql.withCustomRequest(this.request).defaults(requestDefaults);
this.log = Object.assign({
debug: () => {},
info: () => {},
warn: console.warn.bind(console),
error: console.error.bind(console)
}, options.log);
this.hook = hook; // (1) If neither `options.authStrategy` nor `options.auth` are set, the `octokit` instance
// is unauthenticated. The `this.auth()` method is a no-op and no request hook is registered.
// (2) If only `options.auth` is set, use the default token authentication strategy.
// (3) If `options.authStrategy` is set then use it and pass in `options.auth`. Always pass own request as many strategies accept a custom request instance.
// TODO: type `options.auth` based on `options.authStrategy`.
if (!options.authStrategy) {
if (!options.auth) {
// (1)
this.auth = async () => ({
type: "unauthenticated"
});
} else {
// (2)
const auth = authToken.createTokenAuth(options.auth); // @ts-ignore ¯\_(ツ)_/¯
hook.wrap("request", auth.hook);
this.auth = auth;
}
} else {
const {
authStrategy
} = options,
otherOptions = _objectWithoutProperties(options, _excluded);
const auth = authStrategy(Object.assign({
request: this.request,
log: this.log,
// we pass the current octokit instance as well as its constructor options
// to allow for authentication strategies that return a new octokit instance
// that shares the same internal state as the current one. The original
// requirement for this was the "event-octokit" authentication strategy
// of https://github.com/probot/octokit-auth-probot.
octokit: this,
octokitOptions: otherOptions
}, options.auth)); // @ts-ignore ¯\_(ツ)_/¯
hook.wrap("request", auth.hook);
this.auth = auth;
} // apply plugins
// https://stackoverflow.com/a/16345172
const classConstructor = this.constructor;
classConstructor.plugins.forEach(plugin => {
Object.assign(this, plugin(this, options));
});
}
static defaults(defaults) {
const OctokitWithDefaults = class extends this {
constructor(...args) {
const options = args[0] || {};
if (typeof defaults === "function") {
super(defaults(options));
return;
}
super(Object.assign({}, defaults, options, options.userAgent && defaults.userAgent ? {
userAgent: `${options.userAgent} ${defaults.userAgent}`
} : null));
}
};
return OctokitWithDefaults;
}
/**
* Attach a plugin (or many) to your Octokit instance.
*
* @example
* const API = Octokit.plugin(plugin1, plugin2, plugin3, ...)
*/
static plugin(...newPlugins) {
var _a;
const currentPlugins = this.plugins;
const NewOctokit = (_a = class extends this {}, _a.plugins = currentPlugins.concat(newPlugins.filter(plugin => !currentPlugins.includes(plugin))), _a);
return NewOctokit;
}
}
Octokit.VERSION = VERSION;
Octokit.plugins = [];
exports.Octokit = Octokit;
//# sourceMappingURL=index.js.map
/***/ }),
/***/ 9440:
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
var isPlainObject = __nccwpck_require__(3287);
var universalUserAgent = __nccwpck_require__(5030);
function lowercaseKeys(object) {
if (!object) {
return {};
}
return Object.keys(object).reduce((newObj, key) => {
newObj[key.toLowerCase()] = object[key];
return newObj;
}, {});
}
function mergeDeep(defaults, options) {
const result = Object.assign({}, defaults);
Object.keys(options).forEach(key => {
if (isPlainObject.isPlainObject(options[key])) {
if (!(key in defaults)) Object.assign(result, {
[key]: options[key]
});else result[key] = mergeDeep(defaults[key], options[key]);
} else {
Object.assign(result, {
[key]: options[key]
});
}
});
return result;
}
function removeUndefinedProperties(obj) {
for (const key in obj) {
if (obj[key] === undefined) {
delete obj[key];
}
}
return obj;
}
function merge(defaults, route, options) {
if (typeof route === "string") {
let [method, url] = route.split(" ");
options = Object.assign(url ? {
method,
url
} : {
url: method
}, options);
} else {
options = Object.assign({}, route);
} // lowercase header names before merging with defaults to avoid duplicates
options.headers = lowercaseKeys(options.headers); // remove properties with undefined values before merging
removeUndefinedProperties(options);
removeUndefinedProperties(options.headers);
const mergedOptions = mergeDeep(defaults || {}, options); // mediaType.previews arrays are merged, instead of overwritten
if (defaults && defaults.mediaType.previews.length) {
mergedOptions.mediaType.previews = defaults.mediaType.previews.filter(preview => !mergedOptions.mediaType.previews.includes(preview)).concat(mergedOptions.mediaType.previews);
}
mergedOptions.mediaType.previews = mergedOptions.mediaType.previews.map(preview => preview.replace(/-preview/, ""));
return mergedOptions;
}
function addQueryParameters(url, parameters) {
const separator = /\?/.test(url) ? "&" : "?";
const names = Object.keys(parameters);
if (names.length === 0) {
return url;
}
return url + separator + names.map(name => {
if (name === "q") {
return "q=" + parameters.q.split("+").map(encodeURIComponent).join("+");
}
return `${name}=${encodeURIComponent(parameters[name])}`;
}).join("&");
}
const urlVariableRegex = /\{[^}]+\}/g;
function removeNonChars(variableName) {
return variableName.replace(/^\W+|\W+$/g, "").split(/,/);
}
function extractUrlVariableNames(url) {
const matches = url.match(urlVariableRegex);
if (!matches) {
return [];
}
return matches.map(removeNonChars).reduce((a, b) => a.concat(b), []);
}
function omit(object, keysToOmit) {
return Object.keys(object).filter(option => !keysToOmit.includes(option)).reduce((obj, key) => {
obj[key] = object[key];
return obj;
}, {});
}
// Based on https://github.com/bramstein/url-template, licensed under BSD
// TODO: create separate package.
//
// Copyright (c) 2012-2014, Bram Stein
// All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions
// are met:
// 1. Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// 2. Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
// 3. The name of the author may not be used to endorse or promote products
// derived from this software without specific prior written permission.
// THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR IMPLIED
// WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
// MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
// EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
// INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
// OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
// EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
/* istanbul ignore file */
function encodeReserved(str) {
return str.split(/(%[0-9A-Fa-f]{2})/g).map(function (part) {
if (!/%[0-9A-Fa-f]/.test(part)) {
part = encodeURI(part).replace(/%5B/g, "[").replace(/%5D/g, "]");
}
return part;
}).join("");
}
function encodeUnreserved(str) {
return encodeURIComponent(str).replace(/[!'()*]/g, function (c) {
return "%" + c.charCodeAt(0).toString(16).toUpperCase();
});
}
function encodeValue(operator, value, key) {
value = operator === "+" || operator === "#" ? encodeReserved(value) : encodeUnreserved(value);
if (key) {
return encodeUnreserved(key) + "=" + value;
} else {
return value;
}
}
function isDefined(value) {
return value !== undefined && value !== null;
}
function isKeyOperator(operator) {
return operator === ";" || operator === "&" || operator === "?";
}
function getValues(context, operator, key, modifier) {
var value = context[key],
result = [];
if (isDefined(value) && value !== "") {
if (typeof value === "string" || typeof value === "number" || typeof value === "boolean") {
value = value.toString();
if (modifier && modifier !== "*") {
value = value.substring(0, parseInt(modifier, 10));
}
result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : ""));
} else {
if (modifier === "*") {
if (Array.isArray(value)) {
value.filter(isDefined).forEach(function (value) {
result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : ""));
});
} else {
Object.keys(value).forEach(function (k) {
if (isDefined(value[k])) {
result.push(encodeValue(operator, value[k], k));
}
});
}
} else {
const tmp = [];
if (Array.isArray(value)) {
value.filter(isDefined).forEach(function (value) {
tmp.push(encodeValue(operator, value));
});
} else {
Object.keys(value).forEach(function (k) {
if (isDefined(value[k])) {
tmp.push(encodeUnreserved(k));
tmp.push(encodeValue(operator, value[k].toString()));
}
});
}
if (isKeyOperator(operator)) {
result.push(encodeUnreserved(key) + "=" + tmp.join(","));
} else if (tmp.length !== 0) {
result.push(tmp.join(","));
}
}
}
} else {
if (operator === ";") {
if (isDefined(value)) {
result.push(encodeUnreserved(key));
}
} else if (value === "" && (operator === "&" || operator === "?")) {
result.push(encodeUnreserved(key) + "=");
} else if (value === "") {
result.push("");
}
}
return result;
}
function parseUrl(template) {
return {
expand: expand.bind(null, template)
};
}
function expand(template, context) {
var operators = ["+", "#", ".", "/", ";", "?", "&"];
return template.replace(/\{([^\{\}]+)\}|([^\{\}]+)/g, function (_, expression, literal) {
if (expression) {
let operator = "";
const values = [];
if (operators.indexOf(expression.charAt(0)) !== -1) {
operator = expression.charAt(0);
expression = expression.substr(1);
}
expression.split(/,/g).forEach(function (variable) {
var tmp = /([^:\*]*)(?::(\d+)|(\*))?/.exec(variable);
values.push(getValues(context, operator, tmp[1], tmp[2] || tmp[3]));
});
if (operator && operator !== "+") {
var separator = ",";
if (operator === "?") {
separator = "&";
} else if (operator !== "#") {
separator = operator;
}
return (values.length !== 0 ? operator : "") + values.join(separator);
} else {
return values.join(",");
}
} else {
return encodeReserved(literal);
}
});
}
function parse(options) {
// https://fetch.spec.whatwg.org/#methods
let method = options.method.toUpperCase(); // replace :varname with {varname} to make it RFC 6570 compatible
let url = (options.url || "/").replace(/:([a-z]\w+)/g, "{$1}");
let headers = Object.assign({}, options.headers);
let body;
let parameters = omit(options, ["method", "baseUrl", "url", "headers", "request", "mediaType"]); // extract variable names from URL to calculate remaining variables later
const urlVariableNames = extractUrlVariableNames(url);
url = parseUrl(url).expand(parameters);
if (!/^http/.test(url)) {
url = options.baseUrl + url;
}
const omittedParameters = Object.keys(options).filter(option => urlVariableNames.includes(option)).concat("baseUrl");
const remainingParameters = omit(parameters, omittedParameters);
const isBinaryRequest = /application\/octet-stream/i.test(headers.accept);
if (!isBinaryRequest) {
if (options.mediaType.format) {
// e.g. application/vnd.github.v3+json => application/vnd.github.v3.raw
headers.accept = headers.accept.split(/,/).map(preview => preview.replace(/application\/vnd(\.\w+)(\.v3)?(\.\w+)?(\+json)?$/, `application/vnd$1$2.${options.mediaType.format}`)).join(",");
}
if (options.mediaType.previews.length) {
const previewsFromAcceptHeader = headers.accept.match(/[\w-]+(?=-preview)/g) || [];
headers.accept = previewsFromAcceptHeader.concat(options.mediaType.previews).map(preview => {
const format = options.mediaType.format ? `.${options.mediaType.format}` : "+json";
return `application/vnd.github.${preview}-preview${format}`;
}).join(",");
}
} // for GET/HEAD requests, set URL query parameters from remaining parameters
// for PATCH/POST/PUT/DELETE requests, set request body from remaining parameters
if (["GET", "HEAD"].includes(method)) {
url = addQueryParameters(url, remainingParameters);
} else {
if ("data" in remainingParameters) {
body = remainingParameters.data;
} else {
if (Object.keys(remainingParameters).length) {
body = remainingParameters;
} else {
headers["content-length"] = 0;
}
}
} // default content-type for JSON if body is set
if (!headers["content-type"] && typeof body !== "undefined") {
headers["content-type"] = "application/json; charset=utf-8";
} // GitHub expects 'content-length: 0' header for PUT/PATCH requests without body.
// fetch does not allow to set `content-length` header, but we can set body to an empty string
if (["PATCH", "PUT"].includes(method) && typeof body === "undefined") {
body = "";
} // Only return body/request keys if present
return Object.assign({
method,
url,
headers
}, typeof body !== "undefined" ? {
body
} : null, options.request ? {
request: options.request
} : null);
}
function endpointWithDefaults(defaults, route, options) {
return parse(merge(defaults, route, options));
}
function withDefaults(oldDefaults, newDefaults) {
const DEFAULTS = merge(oldDefaults, newDefaults);
const endpoint = endpointWithDefaults.bind(null, DEFAULTS);
return Object.assign(endpoint, {
DEFAULTS,
defaults: withDefaults.bind(null, DEFAULTS),
merge: merge.bind(null, DEFAULTS),
parse
});
}
const VERSION = "6.0.12";
const userAgent = `octokit-endpoint.js/${VERSION} ${universalUserAgent.getUserAgent()}`; // DEFAULTS has all properties set that EndpointOptions has, except url.
// So we use RequestParameters and add method as additional required property.
const DEFAULTS = {
method: "GET",
baseUrl: "https://api.github.com",
headers: {
accept: "application/vnd.github.v3+json",
"user-agent": userAgent
},
mediaType: {
format: "",
previews: []
}
};
const endpoint = withDefaults(null, DEFAULTS);
exports.endpoint = endpoint;
//# sourceMappingURL=index.js.map
/***/ }),
/***/ 8467:
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
var request = __nccwpck_require__(6234);
var universalUserAgent = __nccwpck_require__(5030);
const VERSION = "4.8.0";
function _buildMessageForResponseErrors(data) {
return `Request failed due to following response errors:\n` + data.errors.map(e => ` - ${e.message}`).join("\n");
}
class GraphqlResponseError extends Error {
constructor(request, headers, response) {
super(_buildMessageForResponseErrors(response));
this.request = request;
this.headers = headers;
this.response = response;
this.name = "GraphqlResponseError"; // Expose the errors and response data in their shorthand properties.
this.errors = response.errors;
this.data = response.data; // Maintains proper stack trace (only available on V8)
/* istanbul ignore next */
if (Error.captureStackTrace) {
Error.captureStackTrace(this, this.constructor);
}
}
}
const NON_VARIABLE_OPTIONS = ["method", "baseUrl", "url", "headers", "request", "query", "mediaType"];
const FORBIDDEN_VARIABLE_OPTIONS = ["query", "method", "url"];
const GHES_V3_SUFFIX_REGEX = /\/api\/v3\/?$/;
function graphql(request, query, options) {
if (options) {
if (typeof query === "string" && "query" in options) {
return Promise.reject(new Error(`[@octokit/graphql] "query" cannot be used as variable name`));
}
for (const key in options) {
if (!FORBIDDEN_VARIABLE_OPTIONS.includes(key)) continue;
return Promise.reject(new Error(`[@octokit/graphql] "${key}" cannot be used as variable name`));
}
}
const parsedOptions = typeof query === "string" ? Object.assign({
query
}, options) : query;
const requestOptions = Object.keys(parsedOptions).reduce((result, key) => {
if (NON_VARIABLE_OPTIONS.includes(key)) {
result[key] = parsedOptions[key];
return result;
}
if (!result.variables) {
result.variables = {};
}
result.variables[key] = parsedOptions[key];
return result;
}, {}); // workaround for GitHub Enterprise baseUrl set with /api/v3 suffix
// https://github.com/octokit/auth-app.js/issues/111#issuecomment-657610451
const baseUrl = parsedOptions.baseUrl || request.endpoint.DEFAULTS.baseUrl;
if (GHES_V3_SUFFIX_REGEX.test(baseUrl)) {
requestOptions.url = baseUrl.replace(GHES_V3_SUFFIX_REGEX, "/api/graphql");
}
return request(requestOptions).then(response => {
if (response.data.errors) {
const headers = {};
for (const key of Object.keys(response.headers)) {
headers[key] = response.headers[key];
}
throw new GraphqlResponseError(requestOptions, headers, response.data);
}
return response.data.data;
});
}
function withDefaults(request$1, newDefaults) {
const newRequest = request$1.defaults(newDefaults);
const newApi = (query, options) => {
return graphql(newRequest, query, options);
};
return Object.assign(newApi, {
defaults: withDefaults.bind(null, newRequest),
endpoint: request.request.endpoint
});
}
const graphql$1 = withDefaults(request.request, {
headers: {
"user-agent": `octokit-graphql.js/${VERSION} ${universalUserAgent.getUserAgent()}`
},
method: "POST",
url: "/graphql"
});
function withCustomRequest(customRequest) {
return withDefaults(customRequest, {
method: "POST",
url: "/graphql"
});
}
exports.GraphqlResponseError = GraphqlResponseError;
exports.graphql = graphql$1;
exports.withCustomRequest = withCustomRequest;
//# sourceMappingURL=index.js.map
/***/ }),
/***/ 4193:
/***/ ((__unused_webpack_module, exports) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
const VERSION = "2.21.3";
function ownKeys(object, enumerableOnly) {
var keys = Object.keys(object);
if (Object.getOwnPropertySymbols) {
var symbols = Object.getOwnPropertySymbols(object);
enumerableOnly && (symbols = symbols.filter(function (sym) {
return Object.getOwnPropertyDescriptor(object, sym).enumerable;
})), keys.push.apply(keys, symbols);
}
return keys;
}
function _objectSpread2(target) {
for (var i = 1; i < arguments.length; i++) {
var source = null != arguments[i] ? arguments[i] : {};
i % 2 ? ownKeys(Object(source), !0).forEach(function (key) {
_defineProperty(target, key, source[key]);
}) : Object.getOwnPropertyDescriptors ? Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)) : ownKeys(Object(source)).forEach(function (key) {
Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key));
});
}
return target;
}
function _defineProperty(obj, key, value) {
if (key in obj) {
Object.defineProperty(obj, key, {
value: value,
enumerable: true,
configurable: true,
writable: true
});
} else {
obj[key] = value;
}
return obj;
}
/**
* Some list response that can be paginated have a different response structure
*
* They have a `total_count` key in the response (search also has `incomplete_results`,
* /installation/repositories also has `repository_selection`), as well as a key with
* the list of the items which name varies from endpoint to endpoint.
*
* Octokit normalizes these responses so that paginated results are always returned following
* the same structure. One challenge is that if the list response has only one page, no Link
* header is provided, so this header alone is not sufficient to check wether a response is
* paginated or not.
*
* We check if a "total_count" key is present in the response data, but also make sure that
* a "url" property is not, as the "Get the combined status for a specific ref" endpoint would
* otherwise match: https://developer.github.com/v3/repos/statuses/#get-the-combined-status-for-a-specific-ref
*/
function normalizePaginatedListResponse(response) {
// endpoints can respond with 204 if repository is empty
if (!response.data) {
return _objectSpread2(_objectSpread2({}, response), {}, {
data: []
});
}
const responseNeedsNormalization = "total_count" in response.data && !("url" in response.data);
if (!responseNeedsNormalization) return response; // keep the additional properties intact as there is currently no other way
// to retrieve the same information.
const incompleteResults = response.data.incomplete_results;
const repositorySelection = response.data.repository_selection;
const totalCount = response.data.total_count;
delete response.data.incomplete_results;
delete response.data.repository_selection;
delete response.data.total_count;
const namespaceKey = Object.keys(response.data)[0];
const data = response.data[namespaceKey];
response.data = data;
if (typeof incompleteResults !== "undefined") {
response.data.incomplete_results = incompleteResults;
}
if (typeof repositorySelection !== "undefined") {
response.data.repository_selection = repositorySelection;
}
response.data.total_count = totalCount;
return response;
}
function iterator(octokit, route, parameters) {
const options = typeof route === "function" ? route.endpoint(parameters) : octokit.request.endpoint(route, parameters);
const requestMethod = typeof route === "function" ? route : octokit.request;
const method = options.method;
const headers = options.headers;
let url = options.url;
return {
[Symbol.asyncIterator]: () => ({
async next() {
if (!url) return {
done: true
};
try {
const response = await requestMethod({
method,
url,
headers
});
const normalizedResponse = normalizePaginatedListResponse(response); // `response.headers.link` format:
// '<https://api.github.com/users/aseemk/followers?page=2>; rel="next", <https://api.github.com/users/aseemk/followers?page=2>; rel="last"'
// sets `url` to undefined if "next" URL is not present or `link` header is not set
url = ((normalizedResponse.headers.link || "").match(/<([^>]+)>;\s*rel="next"/) || [])[1];
return {
value: normalizedResponse
};
} catch (error) {
if (error.status !== 409) throw error;
url = "";
return {
value: {
status: 200,
headers: {},
data: []
}
};
}
}
})
};
}
function paginate(octokit, route, parameters, mapFn) {
if (typeof parameters === "function") {
mapFn = parameters;
parameters = undefined;
}
return gather(octokit, [], iterator(octokit, route, parameters)[Symbol.asyncIterator](), mapFn);
}
function gather(octokit, results, iterator, mapFn) {
return iterator.next().then(result => {
if (result.done) {
return results;
}
let earlyExit = false;
function done() {
earlyExit = true;
}
results = results.concat(mapFn ? mapFn(result.value, done) : result.value.data);
if (earlyExit) {
return results;
}
return gather(octokit, results, iterator, mapFn);
});
}
const composePaginateRest = Object.assign(paginate, {
iterator
});
const paginatingEndpoints = ["GET /app/hook/deliveries", "GET /app/installations", "GET /applications/grants", "GET /authorizations", "GET /enterprises/{enterprise}/actions/permissions/organizations", "GET /enterprises/{enterprise}/actions/runner-groups", "GET /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/organizations", "GET /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/runners", "GET /enterprises/{enterprise}/actions/runners", "GET /enterprises/{enterprise}/audit-log", "GET /enterprises/{enterprise}/secret-scanning/alerts", "GET /enterprises/{enterprise}/settings/billing/advanced-security", "GET /events", "GET /gists", "GET /gists/public", "GET /gists/starred", "GET /gists/{gist_id}/comments", "GET /gists/{gist_id}/commits", "GET /gists/{gist_id}/forks", "GET /installation/repositories", "GET /issues", "GET /licenses", "GET /marketplace_listing/plans", "GET /marketplace_listing/plans/{plan_id}/accounts", "GET /marketplace_listing/stubbed/plans", "GET /marketplace_listing/stubbed/plans/{plan_id}/accounts", "GET /networks/{owner}/{repo}/events", "GET /notifications", "GET /organizations", "GET /orgs/{org}/actions/cache/usage-by-repository", "GET /orgs/{org}/actions/permissions/repositories", "GET /orgs/{org}/actions/runner-groups", "GET /orgs/{org}/actions/runner-groups/{runner_group_id}/repositories", "GET /orgs/{org}/actions/runner-groups/{runner_group_id}/runners", "GET /orgs/{org}/actions/runners", "GET /orgs/{org}/actions/secrets", "GET /orgs/{org}/actions/secrets/{secret_name}/repositories", "GET /orgs/{org}/audit-log", "GET /orgs/{org}/blocks", "GET /orgs/{org}/code-scanning/alerts", "GET /orgs/{org}/codespaces", "GET /orgs/{org}/credential-authorizations", "GET /orgs/{org}/dependabot/secrets", "GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories", "GET /orgs/{org}/events", "GET /orgs/{org}/external-groups", "GET /orgs/{org}/failed_invitations", "GET /orgs/{org}/hooks", "GET /orgs/{org}/hooks/{hook_id}/deliveries", "GET /orgs/{org}/installations", "GET /orgs/{org}/invitations", "GET /orgs/{org}/invitations/{invitation_id}/teams", "GET /orgs/{org}/issues", "GET /orgs/{org}/members", "GET /orgs/{org}/migrations", "GET /orgs/{org}/migrations/{migration_id}/repositories", "GET /orgs/{org}/outside_collaborators", "GET /orgs/{org}/packages", "GET /orgs/{org}/packages/{package_type}/{package_name}/versions", "GET /orgs/{org}/projects", "GET /orgs/{org}/public_members", "GET /orgs/{org}/repos", "GET /orgs/{org}/secret-scanning/alerts", "GET /orgs/{org}/settings/billing/advanced-security", "GET /orgs/{org}/team-sync/groups", "GET /orgs/{org}/teams", "GET /orgs/{org}/teams/{team_slug}/discussions", "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments", "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions", "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions", "GET /orgs/{org}/teams/{team_slug}/invitations", "GET /orgs/{org}/teams/{team_slug}/members", "GET /orgs/{org}/teams/{team_slug}/projects", "GET /orgs/{org}/teams/{team_slug}/repos", "GET /orgs/{org}/teams/{team_slug}/teams", "GET /projects/columns/{column_id}/cards", "GET /projects/{project_id}/collaborators", "GET /projects/{project_id}/columns", "GET /repos/{owner}/{repo}/actions/artifacts", "GET /repos/{owner}/{repo}/actions/caches", "GET /repos/{owner}/{repo}/actions/runners", "GET /repos/{owner}/{repo}/actions/runs", "GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts", "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs", "GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs", "GET /repos/{owner}/{repo}/actions/secrets", "GET /repos/{owner}/{repo}/actions/workflows", "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs", "GET /repos/{owner}/{repo}/assignees", "GET /repos/{owner}/{repo}/branches", "GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations", "GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs", "GET /repos/{owner}/{repo}/code-scanning
function isPaginatingEndpoint(arg) {
if (typeof arg === "string") {
return paginatingEndpoints.includes(arg);
} else {
return false;
}
}
/**
* @param octokit Octokit instance
* @param options Options passed to Octokit constructor
*/
function paginateRest(octokit) {
return {
paginate: Object.assign(paginate.bind(null, octokit), {
iterator: iterator.bind(null, octokit)
})
};
}
paginateRest.VERSION = VERSION;
exports.composePaginateRest = composePaginateRest;
exports.isPaginatingEndpoint = isPaginatingEndpoint;
exports.paginateRest = paginateRest;
exports.paginatingEndpoints = paginatingEndpoints;
//# sourceMappingURL=index.js.map
/***/ }),
/***/ 3044:
/***/ ((__unused_webpack_module, exports) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
function ownKeys(object, enumerableOnly) {
var keys = Object.keys(object);
if (Object.getOwnPropertySymbols) {
var symbols = Object.getOwnPropertySymbols(object);
if (enumerableOnly) {
symbols = symbols.filter(function (sym) {
return Object.getOwnPropertyDescriptor(object, sym).enumerable;
});
}
keys.push.apply(keys, symbols);
}
return keys;
}
function _objectSpread2(target) {
for (var i = 1; i < arguments.length; i++) {
var source = arguments[i] != null ? arguments[i] : {};
if (i % 2) {
ownKeys(Object(source), true).forEach(function (key) {
_defineProperty(target, key, source[key]);
});
} else if (Object.getOwnPropertyDescriptors) {
Object.defineProperties(target, Object.getOwnPropertyDescriptors(source));
} else {
ownKeys(Object(source)).forEach(function (key) {
Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key));
});
}
}
return target;
}
function _defineProperty(obj, key, value) {
if (key in obj) {
Object.defineProperty(obj, key, {
value: value,
enumerable: true,
configurable: true,
writable: true
});
} else {
obj[key] = value;
}
return obj;
}
const Endpoints = {
actions: {
addCustomLabelsToSelfHostedRunnerForOrg: ["POST /orgs/{org}/actions/runners/{runner_id}/labels"],
addCustomLabelsToSelfHostedRunnerForRepo: ["POST /repos/{owner}/{repo}/actions/runners/{runner_id}/labels"],
addSelectedRepoToOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}"],
approveWorkflowRun: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/approve"],
cancelWorkflowRun: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel"],
createOrUpdateEnvironmentSecret: ["PUT /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}"],
createOrUpdateOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}"],
createOrUpdateRepoSecret: ["PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}"],
createRegistrationTokenForOrg: ["POST /orgs/{org}/actions/runners/registration-token"],
createRegistrationTokenForRepo: ["POST /repos/{owner}/{repo}/actions/runners/registration-token"],
createRemoveTokenForOrg: ["POST /orgs/{org}/actions/runners/remove-token"],
createRemoveTokenForRepo: ["POST /repos/{owner}/{repo}/actions/runners/remove-token"],
createWorkflowDispatch: ["POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches"],
deleteActionsCacheById: ["DELETE /repos/{owner}/{repo}/actions/caches/{cache_id}"],
deleteActionsCacheByKey: ["DELETE /repos/{owner}/{repo}/actions/caches{?key,ref}"],
deleteArtifact: ["DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"],
deleteEnvironmentSecret: ["DELETE /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}"],
deleteOrgSecret: ["DELETE /orgs/{org}/actions/secrets/{secret_name}"],
deleteRepoSecret: ["DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}"],
deleteSelfHostedRunnerFromOrg: ["DELETE /orgs/{org}/actions/runners/{runner_id}"],
deleteSelfHostedRunnerFromRepo: ["DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}"],
deleteWorkflowRun: ["DELETE /repos/{owner}/{repo}/actions/runs/{run_id}"],
deleteWorkflowRunLogs: ["DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs"],
disableSelectedRepositoryGithubActionsOrganization: ["DELETE /orgs/{org}/actions/permissions/repositories/{repository_id}"],
disableWorkflow: ["PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/disable"],
downloadArtifact: ["GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}"],
downloadJobLogsForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs"],
downloadWorkflowRunAttemptLogs: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/logs"],
downloadWorkflowRunLogs: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs"],
enableSelectedRepositoryGithubActionsOrganization: ["PUT /orgs/{org}/actions/permissions/repositories/{repository_id}"],
enableWorkflow: ["PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/enable"],
getActionsCacheList: ["GET /repos/{owner}/{repo}/actions/caches"],
getActionsCacheUsage: ["GET /repos/{owner}/{repo}/actions/cache/usage"],
getActionsCacheUsageByRepoForOrg: ["GET /orgs/{org}/actions/cache/usage-by-repository"],
getActionsCacheUsageForEnterprise: ["GET /enterprises/{enterprise}/actions/cache/usage"],
getActionsCacheUsageForOrg: ["GET /orgs/{org}/actions/cache/usage"],
getAllowedActionsOrganization: ["GET /orgs/{org}/actions/permissions/selected-actions"],
getAllowedActionsRepository: ["GET /repos/{owner}/{repo}/actions/permissions/selected-actions"],
getArtifact: ["GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"],
getEnvironmentPublicKey: ["GET /repositories/{repository_id}/environments/{environment_name}/secrets/public-key"],
getEnvironmentSecret: ["GET /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}"],
getGithubActionsDefaultWorkflowPermissionsEnterprise: ["GET /enterprises/{enterprise}/actions/permissions/workflow"],
getGithubActionsDefaultWorkflowPermissionsOrganization: ["GET /orgs/{org}/actions/permissions/workflow"],
getGithubActionsDefaultWorkflowPermissionsRepository: ["GET /repos/{owner}/{repo}/actions/permissions/workflow"],
getGithubActionsPermissionsOrganization: ["GET /orgs/{org}/actions/permissions"],
getGithubActionsPermissionsRepository: ["GET /repos/{owner}/{repo}/actions/permissions"],
getJobForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}"],
getOrgPublicKey: ["GET /orgs/{org}/actions/secrets/public-key"],
getOrgSecret: ["GET /orgs/{org}/actions/secrets/{secret_name}"],
getPendingDeploymentsForRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments"],
getRepoPermissions: ["GET /repos/{owner}/{repo}/actions/permissions", {}, {
renamed: ["actions", "getGithubActionsPermissionsRepository"]
}],
getRepoPublicKey: ["GET /repos/{owner}/{repo}/actions/secrets/public-key"],
getRepoSecret: ["GET /repos/{owner}/{repo}/actions/secrets/{secret_name}"],
getReviewsForRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/approvals"],
getSelfHostedRunnerForOrg: ["GET /orgs/{org}/actions/runners/{runner_id}"],
getSelfHostedRunnerForRepo: ["GET /repos/{owner}/{repo}/actions/runners/{runner_id}"],
getWorkflow: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}"],
getWorkflowAccessToRepository: ["GET /repos/{owner}/{repo}/actions/permissions/access"],
getWorkflowRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}"],
getWorkflowRunAttempt: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}"],
getWorkflowRunUsage: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing"],
getWorkflowUsage: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing"],
listArtifactsForRepo: ["GET /repos/{owner}/{repo}/actions/artifacts"],
listEnvironmentSecrets: ["GET /repositories/{repository_id}/environments/{environment_name}/secrets"],
listJobsForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs"],
listJobsForWorkflowRunAttempt: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs"],
listLabelsForSelfHostedRunnerForOrg: ["GET /orgs/{org}/actions/runners/{runner_id}/labels"],
listLabelsForSelfHostedRunnerForRepo: ["GET /repos/{owner}/{repo}/actions/runners/{runner_id}/labels"],
listOrgSecrets: ["GET /orgs/{org}/actions/secrets"],
listRepoSecrets: ["GET /repos/{owner}/{repo}/actions/secrets"],
listRepoWorkflows: ["GET /repos/{owner}/{repo}/actions/workflows"],
listRunnerApplicationsForOrg: ["GET /orgs/{org}/actions/runners/downloads"],
listRunnerApplicationsForRepo: ["GET /repos/{owner}/{repo}/actions/runners/downloads"],
listSelectedReposForOrgSecret: ["GET /orgs/{org}/actions/secrets/{secret_name}/repositories"],
listSelectedRepositoriesEnabledGithubActionsOrganization: ["GET /orgs/{org}/actions/permissions/repositories"],
listSelfHostedRunnersForOrg: ["GET /orgs/{org}/actions/runners"],
listSelfHostedRunnersForRepo: ["GET /repos/{owner}/{repo}/actions/runners"],
listWorkflowRunArtifacts: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts"],
listWorkflowRuns: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs"],
listWorkflowRunsForRepo: ["GET /repos/{owner}/{repo}/actions/runs"],
reRunJobForWorkflowRun: ["POST /repos/{owner}/{repo}/actions/jobs/{job_id}/rerun"],
reRunWorkflow: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun"],
reRunWorkflowFailedJobs: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun-failed-jobs"],
removeAllCustomLabelsFromSelfHostedRunnerForOrg: ["DELETE /orgs/{org}/actions/runners/{runner_id}/labels"],
removeAllCustomLabelsFromSelfHostedRunnerForRepo: ["DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels"],
removeCustomLabelFromSelfHostedRunnerForOrg: ["DELETE /orgs/{org}/actions/runners/{runner_id}/labels/{name}"],
removeCustomLabelFromSelfHostedRunnerForRepo: ["DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels/{name}"],
removeSelectedRepoFromOrgSecret: ["DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}"],
reviewPendingDeploymentsForRun: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments"],
setAllowedActionsOrganization: ["PUT /orgs/{org}/actions/permissions/selected-actions"],
setAllowedActionsRepository: ["PUT /repos/{owner}/{repo}/actions/permissions/selected-actions"],
setCustomLabelsForSelfHostedRunnerForOrg: ["PUT /orgs/{org}/actions/runners/{runner_id}/labels"],
setCustomLabelsForSelfHostedRunnerForRepo: ["PUT /repos/{owner}/{repo}/actions/runners/{runner_id}/labels"],
setGithubActionsDefaultWorkflowPermissionsEnterprise: ["PUT /enterprises/{enterprise}/actions/permissions/workflow"],
setGithubActionsDefaultWorkflowPermissionsOrganization: ["PUT /orgs/{org}/actions/permissions/workflow"],
setGithubActionsDefaultWorkflowPermissionsRepository: ["PUT /repos/{owner}/{repo}/actions/permissions/workflow"],
setGithubActionsPermissionsOrganization: ["PUT /orgs/{org}/actions/permissions"],
setGithubActionsPermissionsRepository: ["PUT /repos/{owner}/{repo}/actions/permissions"],
setSelectedReposForOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}/repositories"],
setSelectedRepositoriesEnabledGithubActionsOrganization: ["PUT /orgs/{org}/actions/permissions/repositories"],
setWorkflowAccessToRepository: ["PUT /repos/{owner}/{repo}/actions/permissions/access"]
},
activity: {
checkRepoIsStarredByAuthenticatedUser: ["GET /user/starred/{owner}/{repo}"],
deleteRepoSubscription: ["DELETE /repos/{owner}/{repo}/subscription"],
deleteThreadSubscription: ["DELETE /notifications/threads/{thread_id}/subscription"],
getFeeds: ["GET /feeds"],
getRepoSubscription: ["GET /repos/{owner}/{repo}/subscription"],
getThread: ["GET /notifications/threads/{thread_id}"],
getThreadSubscriptionForAuthenticatedUser: ["GET /notifications/threads/{thread_id}/subscription"],
listEventsForAuthenticatedUser: ["GET /users/{username}/events"],
listNotificationsForAuthenticatedUser: ["GET /notifications"],
listOrgEventsForAuthenticatedUser: ["GET /users/{username}/events/orgs/{org}"],
listPublicEvents: ["GET /events"],
listPublicEventsForRepoNetwork: ["GET /networks/{owner}/{repo}/events"],
listPublicEventsForUser: ["GET /users/{username}/events/public"],
listPublicOrgEvents: ["GET /orgs/{org}/events"],
listReceivedEventsForUser: ["GET /users/{username}/received_events"],
listReceivedPublicEventsForUser: ["GET /users/{username}/received_events/public"],
listRepoEvents: ["GET /repos/{owner}/{repo}/events"],
listRepoNotificationsForAuthenticatedUser: ["GET /repos/{owner}/{repo}/notifications"],
listReposStarredByAuthenticatedUser: ["GET /user/starred"],
listReposStarredByUser: ["GET /users/{username}/starred"],
listReposWatchedByUser: ["GET /users/{username}/subscriptions"],
listStargazersForRepo: ["GET /repos/{owner}/{repo}/stargazers"],
listWatchedReposForAuthenticatedUser: ["GET /user/subscriptions"],
listWatchersForRepo: ["GET /repos/{owner}/{repo}/subscribers"],
markNotificationsAsRead: ["PUT /notifications"],
markRepoNotificationsAsRead: ["PUT /repos/{owner}/{repo}/notifications"],
markThreadAsRead: ["PATCH /notifications/threads/{thread_id}"],
setRepoSubscription: ["PUT /repos/{owner}/{repo}/subscription"],
setThreadSubscription: ["PUT /notifications/threads/{thread_id}/subscription"],
starRepoForAuthenticatedUser: ["PUT /user/starred/{owner}/{repo}"],
unstarRepoForAuthenticatedUser: ["DELETE /user/starred/{owner}/{repo}"]
},
apps: {
addRepoToInstallation: ["PUT /user/installations/{installation_id}/repositories/{repository_id}", {}, {
renamed: ["apps", "addRepoToInstallationForAuthenticatedUser"]
}],
addRepoToInstallationForAuthenticatedUser: ["PUT /user/installations/{installation_id}/repositories/{repository_id}"],
checkToken: ["POST /applications/{client_id}/token"],
createFromManifest: ["POST /app-manifests/{code}/conversions"],
createInstallationAccessToken: ["POST /app/installations/{installation_id}/access_tokens"],
deleteAuthorization: ["DELETE /applications/{client_id}/grant"],
deleteInstallation: ["DELETE /app/installations/{installation_id}"],
deleteToken: ["DELETE /applications/{client_id}/token"],
getAuthenticated: ["GET /app"],
getBySlug: ["GET /apps/{app_slug}"],
getInstallation: ["GET /app/installations/{installation_id}"],
getOrgInstallation: ["GET /orgs/{org}/installation"],
getRepoInstallation: ["GET /repos/{owner}/{repo}/installation"],
getSubscriptionPlanForAccount: ["GET /marketplace_listing/accounts/{account_id}"],
getSubscriptionPlanForAccountStubbed: ["GET /marketplace_listing/stubbed/accounts/{account_id}"],
getUserInstallation: ["GET /users/{username}/installation"],
getWebhookConfigForApp: ["GET /app/hook/config"],
getWebhookDelivery: ["GET /app/hook/deliveries/{delivery_id}"],
listAccountsForPlan: ["GET /marketplace_listing/plans/{plan_id}/accounts"],
listAccountsForPlanStubbed: ["GET /marketplace_listing/stubbed/plans/{plan_id}/accounts"],
listInstallationReposForAuthenticatedUser: ["GET /user/installations/{installation_id}/repositories"],
listInstallations: ["GET /app/installations"],
listInstallationsForAuthenticatedUser: ["GET /user/installations"],
listPlans: ["GET /marketplace_listing/plans"],
listPlansStubbed: ["GET /marketplace_listing/stubbed/plans"],
listReposAccessibleToInstallation: ["GET /installation/repositories"],
listSubscriptionsForAuthenticatedUser: ["GET /user/marketplace_purchases"],
listSubscriptionsForAuthenticatedUserStubbed: ["GET /user/marketplace_purchases/stubbed"],
listWebhookDeliveries: ["GET /app/hook/deliveries"],
redeliverWebhookDelivery: ["POST /app/hook/deliveries/{delivery_id}/attempts"],
removeRepoFromInstallation: ["DELETE /user/installations/{installation_id}/repositories/{repository_id}", {}, {
renamed: ["apps", "removeRepoFromInstallationForAuthenticatedUser"]
}],
removeRepoFromInstallationForAuthenticatedUser: ["DELETE /user/installations/{installation_id}/repositories/{repository_id}"],
resetToken: ["PATCH /applications/{client_id}/token"],
revokeInstallationAccessToken: ["DELETE /installation/token"],
scopeToken: ["POST /applications/{client_id}/token/scoped"],
suspendInstallation: ["PUT /app/installations/{installation_id}/suspended"],
unsuspendInstallation: ["DELETE /app/installations/{installation_id}/suspended"],
updateWebhookConfigForApp: ["PATCH /app/hook/config"]
},
billing: {
getGithubActionsBillingOrg: ["GET /orgs/{org}/settings/billing/actions"],
getGithubActionsBillingUser: ["GET /users/{username}/settings/billing/actions"],
getGithubAdvancedSecurityBillingGhe: ["GET /enterprises/{enterprise}/settings/billing/advanced-security"],
getGithubAdvancedSecurityBillingOrg: ["GET /orgs/{org}/settings/billing/advanced-security"],
getGithubPackagesBillingOrg: ["GET /orgs/{org}/settings/billing/packages"],
getGithubPackagesBillingUser: ["GET /users/{username}/settings/billing/packages"],
getSharedStorageBillingOrg: ["GET /orgs/{org}/settings/billing/shared-storage"],
getSharedStorageBillingUser: ["GET /users/{username}/settings/billing/shared-storage"]
},
checks: {
create: ["POST /repos/{owner}/{repo}/check-runs"],
createSuite: ["POST /repos/{owner}/{repo}/check-suites"],
get: ["GET /repos/{owner}/{repo}/check-runs/{check_run_id}"],
getSuite: ["GET /repos/{owner}/{repo}/check-suites/{check_suite_id}"],
listAnnotations: ["GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations"],
listForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-runs"],
listForSuite: ["GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs"],
listSuitesForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-suites"],
rerequestRun: ["POST /repos/{owner}/{repo}/check-runs/{check_run_id}/rerequest"],
rerequestSuite: ["POST /repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest"],
setSuitesPreferences: ["PATCH /repos/{owner}/{repo}/check-suites/preferences"],
update: ["PATCH /repos/{owner}/{repo}/check-runs/{check_run_id}"]
},
codeScanning: {
deleteAnalysis: ["DELETE /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}{?confirm_delete}"],
getAlert: ["GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}", {}, {
renamedParameters: {
alert_id: "alert_number"
}
}],
getAnalysis: ["GET /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}"],
getSarif: ["GET /repos/{owner}/{repo}/code-scanning/sarifs/{sarif_id}"],
listAlertInstances: ["GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances"],
listAlertsForOrg: ["GET /orgs/{org}/code-scanning/alerts"],
listAlertsForRepo: ["GET /repos/{owner}/{repo}/code-scanning/alerts"],
listAlertsInstances: ["GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances", {}, {
renamed: ["codeScanning", "listAlertInstances"]
}],
listRecentAnalyses: ["GET /repos/{owner}/{repo}/code-scanning/analyses"],
updateAlert: ["PATCH /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}"],
uploadSarif: ["POST /repos/{owner}/{repo}/code-scanning/sarifs"]
},
codesOfConduct: {
getAllCodesOfConduct: ["GET /codes_of_conduct"],
getConductCode: ["GET /codes_of_conduct/{key}"]
},
codespaces: {
addRepositoryForSecretForAuthenticatedUser: ["PUT /user/codespaces/secrets/{secret_name}/repositories/{repository_id}"],
codespaceMachinesForAuthenticatedUser: ["GET /user/codespaces/{codespace_name}/machines"],
createForAuthenticatedUser: ["POST /user/codespaces"],
createOrUpdateRepoSecret: ["PUT /repos/{owner}/{repo}/codespaces/secrets/{secret_name}"],
createOrUpdateSecretForAuthenticatedUser: ["PUT /user/codespaces/secrets/{secret_name}"],
createWithPrForAuthenticatedUser: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/codespaces"],
createWithRepoForAuthenticatedUser: ["POST /repos/{owner}/{repo}/codespaces"],
deleteForAuthenticatedUser: ["DELETE /user/codespaces/{codespace_name}"],
deleteFromOrganization: ["DELETE /orgs/{org}/members/{username}/codespaces/{codespace_name}"],
deleteRepoSecret: ["DELETE /repos/{owner}/{repo}/codespaces/secrets/{secret_name}"],
deleteSecretForAuthenticatedUser: ["DELETE /user/codespaces/secrets/{secret_name}"],
exportForAuthenticatedUser: ["POST /user/codespaces/{codespace_name}/exports"],
getExportDetailsForAuthenticatedUser: ["GET /user/codespaces/{codespace_name}/exports/{export_id}"],
getForAuthenticatedUser: ["GET /user/codespaces/{codespace_name}"],
getPublicKeyForAuthenticatedUser: ["GET /user/codespaces/secrets/public-key"],
getRepoPublicKey: ["GET /repos/{owner}/{repo}/codespaces/secrets/public-key"],
getRepoSecret: ["GET /repos/{owner}/{repo}/codespaces/secrets/{secret_name}"],
getSecretForAuthenticatedUser: ["GET /user/codespaces/secrets/{secret_name}"],
listDevcontainersInRepositoryForAuthenticatedUser: ["GET /repos/{owner}/{repo}/codespaces/devcontainers"],
listForAuthenticatedUser: ["GET /user/codespaces"],
listInOrganization: ["GET /orgs/{org}/codespaces", {}, {
renamedParameters: {
org_id: "org"
}
}],
listInRepositoryForAuthenticatedUser: ["GET /repos/{owner}/{repo}/codespaces"],
listRepoSecrets: ["GET /repos/{owner}/{repo}/codespaces/secrets"],
listRepositoriesForSecretForAuthenticatedUser: ["GET /user/codespaces/secrets/{secret_name}/repositories"],
listSecretsForAuthenticatedUser: ["GET /user/codespaces/secrets"],
removeRepositoryForSecretForAuthenticatedUser: ["DELETE /user/codespaces/secrets/{secret_name}/repositories/{repository_id}"],
repoMachinesForAuthenticatedUser: ["GET /repos/{owner}/{repo}/codespaces/machines"],
setRepositoriesForSecretForAuthenticatedUser: ["PUT /user/codespaces/secrets/{secret_name}/repositories"],
startForAuthenticatedUser: ["POST /user/codespaces/{codespace_name}/start"],
stopForAuthenticatedUser: ["POST /user/codespaces/{codespace_name}/stop"],
stopInOrganization: ["POST /orgs/{org}/members/{username}/codespaces/{codespace_name}/stop"],
updateForAuthenticatedUser: ["PATCH /user/codespaces/{codespace_name}"]
},
dependabot: {
addSelectedRepoToOrgSecret: ["PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}"],
createOrUpdateOrgSecret: ["PUT /orgs/{org}/dependabot/secrets/{secret_name}"],
createOrUpdateRepoSecret: ["PUT /repos/{owner}/{repo}/dependabot/secrets/{secret_name}"],
deleteOrgSecret: ["DELETE /orgs/{org}/dependabot/secrets/{secret_name}"],
deleteRepoSecret: ["DELETE /repos/{owner}/{repo}/dependabot/secrets/{secret_name}"],
getOrgPublicKey: ["GET /orgs/{org}/dependabot/secrets/public-key"],
getOrgSecret: ["GET /orgs/{org}/dependabot/secrets/{secret_name}"],
getRepoPublicKey: ["GET /repos/{owner}/{repo}/dependabot/secrets/public-key"],
getRepoSecret: ["GET /repos/{owner}/{repo}/dependabot/secrets/{secret_name}"],
listOrgSecrets: ["GET /orgs/{org}/dependabot/secrets"],
listRepoSecrets: ["GET /repos/{owner}/{repo}/dependabot/secrets"],
listSelectedReposForOrgSecret: ["GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories"],
removeSelectedRepoFromOrgSecret: ["DELETE /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}"],
setSelectedReposForOrgSecret: ["PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories"]
},
dependencyGraph: {
createRepositorySnapshot: ["POST /repos/{owner}/{repo}/dependency-graph/snapshots"],
diffRange: ["GET /repos/{owner}/{repo}/dependency-graph/compare/{basehead}"]
},
emojis: {
get: ["GET /emojis"]
},
enterpriseAdmin: {
addCustomLabelsToSelfHostedRunnerForEnterprise: ["POST /enterprises/{enterprise}/actions/runners/{runner_id}/labels"],
disableSelectedOrganizationGithubActionsEnterprise: ["DELETE /enterprises/{enterprise}/actions/permissions/organizations/{org_id}"],
enableSelectedOrganizationGithubActionsEnterprise: ["PUT /enterprises/{enterprise}/actions/permissions/organizations/{org_id}"],
getAllowedActionsEnterprise: ["GET /enterprises/{enterprise}/actions/permissions/selected-actions"],
getGithubActionsPermissionsEnterprise: ["GET /enterprises/{enterprise}/actions/permissions"],
getServerStatistics: ["GET /enterprise-installation/{enterprise_or_org}/server-statistics"],
listLabelsForSelfHostedRunnerForEnterprise: ["GET /enterprises/{enterprise}/actions/runners/{runner_id}/labels"],
listSelectedOrganizationsEnabledGithubActionsEnterprise: ["GET /enterprises/{enterprise}/actions/permissions/organizations"],
removeAllCustomLabelsFromSelfHostedRunnerForEnterprise: ["DELETE /enterprises/{enterprise}/actions/runners/{runner_id}/labels"],
removeCustomLabelFromSelfHostedRunnerForEnterprise: ["DELETE /enterprises/{enterprise}/actions/runners/{runner_id}/labels/{name}"],
setAllowedActionsEnterprise: ["PUT /enterprises/{enterprise}/actions/permissions/selected-actions"],
setCustomLabelsForSelfHostedRunnerForEnterprise: ["PUT /enterprises/{enterprise}/actions/runners/{runner_id}/labels"],
setGithubActionsPermissionsEnterprise: ["PUT /enterprises/{enterprise}/actions/permissions"],
setSelectedOrganizationsEnabledGithubActionsEnterprise: ["PUT /enterprises/{enterprise}/actions/permissions/organizations"]
},
gists: {
checkIsStarred: ["GET /gists/{gist_id}/star"],
create: ["POST /gists"],
createComment: ["POST /gists/{gist_id}/comments"],
delete: ["DELETE /gists/{gist_id}"],
deleteComment: ["DELETE /gists/{gist_id}/comments/{comment_id}"],
fork: ["POST /gists/{gist_id}/forks"],
get: ["GET /gists/{gist_id}"],
getComment: ["GET /gists/{gist_id}/comments/{comment_id}"],
getRevision: ["GET /gists/{gist_id}/{sha}"],
list: ["GET /gists"],
listComments: ["GET /gists/{gist_id}/comments"],
listCommits: ["GET /gists/{gist_id}/commits"],
listForUser: ["GET /users/{username}/gists"],
listForks: ["GET /gists/{gist_id}/forks"],
listPublic: ["GET /gists/public"],
listStarred: ["GET /gists/starred"],
star: ["PUT /gists/{gist_id}/star"],
unstar: ["DELETE /gists/{gist_id}/star"],
update: ["PATCH /gists/{gist_id}"],
updateComment: ["PATCH /gists/{gist_id}/comments/{comment_id}"]
},
git: {
createBlob: ["POST /repos/{owner}/{repo}/git/blobs"],
createCommit: ["POST /repos/{owner}/{repo}/git/commits"],
createRef: ["POST /repos/{owner}/{repo}/git/refs"],
createTag: ["POST /repos/{owner}/{repo}/git/tags"],
createTree: ["POST /repos/{owner}/{repo}/git/trees"],
deleteRef: ["DELETE /repos/{owner}/{repo}/git/refs/{ref}"],
getBlob: ["GET /repos/{owner}/{repo}/git/blobs/{file_sha}"],
getCommit: ["GET /repos/{owner}/{repo}/git/commits/{commit_sha}"],
getRef: ["GET /repos/{owner}/{repo}/git/ref/{ref}"],
getTag: ["GET /repos/{owner}/{repo}/git/tags/{tag_sha}"],
getTree: ["GET /repos/{owner}/{repo}/git/trees/{tree_sha}"],
listMatchingRefs: ["GET /repos/{owner}/{repo}/git/matching-refs/{ref}"],
updateRef: ["PATCH /repos/{owner}/{repo}/git/refs/{ref}"]
},
gitignore: {
getAllTemplates: ["GET /gitignore/templates"],
getTemplate: ["GET /gitignore/templates/{name}"]
},
interactions: {
getRestrictionsForAuthenticatedUser: ["GET /user/interaction-limits"],
getRestrictionsForOrg: ["GET /orgs/{org}/interaction-limits"],
getRestrictionsForRepo: ["GET /repos/{owner}/{repo}/interaction-limits"],
getRestrictionsForYourPublicRepos: ["GET /user/interaction-limits", {}, {
renamed: ["interactions", "getRestrictionsForAuthenticatedUser"]
}],
removeRestrictionsForAuthenticatedUser: ["DELETE /user/interaction-limits"],
removeRestrictionsForOrg: ["DELETE /orgs/{org}/interaction-limits"],
removeRestrictionsForRepo: ["DELETE /repos/{owner}/{repo}/interaction-limits"],
removeRestrictionsForYourPublicRepos: ["DELETE /user/interaction-limits", {}, {
renamed: ["interactions", "removeRestrictionsForAuthenticatedUser"]
}],
setRestrictionsForAuthenticatedUser: ["PUT /user/interaction-limits"],
setRestrictionsForOrg: ["PUT /orgs/{org}/interaction-limits"],
setRestrictionsForRepo: ["PUT /repos/{owner}/{repo}/interaction-limits"],
setRestrictionsForYourPublicRepos: ["PUT /user/interaction-limits", {}, {
renamed: ["interactions", "setRestrictionsForAuthenticatedUser"]
}]
},
issues: {
addAssignees: ["POST /repos/{owner}/{repo}/issues/{issue_number}/assignees"],
addLabels: ["POST /repos/{owner}/{repo}/issues/{issue_number}/labels"],
checkUserCanBeAssigned: ["GET /repos/{owner}/{repo}/assignees/{assignee}"],
create: ["POST /repos/{owner}/{repo}/issues"],
createComment: ["POST /repos/{owner}/{repo}/issues/{issue_number}/comments"],
createLabel: ["POST /repos/{owner}/{repo}/labels"],
createMilestone: ["POST /repos/{owner}/{repo}/milestones"],
deleteComment: ["DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}"],
deleteLabel: ["DELETE /repos/{owner}/{repo}/labels/{name}"],
deleteMilestone: ["DELETE /repos/{owner}/{repo}/milestones/{milestone_number}"],
get: ["GET /repos/{owner}/{repo}/issues/{issue_number}"],
getComment: ["GET /repos/{owner}/{repo}/issues/comments/{comment_id}"],
getEvent: ["GET /repos/{owner}/{repo}/issues/events/{event_id}"],
getLabel: ["GET /repos/{owner}/{repo}/labels/{name}"],
getMilestone: ["GET /repos/{owner}/{repo}/milestones/{milestone_number}"],
list: ["GET /issues"],
listAssignees: ["GET /repos/{owner}/{repo}/assignees"],
listComments: ["GET /repos/{owner}/{repo}/issues/{issue_number}/comments"],
listCommentsForRepo: ["GET /repos/{owner}/{repo}/issues/comments"],
listEvents: ["GET /repos/{owner}/{repo}/issues/{issue_number}/events"],
listEventsForRepo: ["GET /repos/{owner}/{repo}/issues/events"],
listEventsForTimeline: ["GET /repos/{owner}/{repo}/issues/{issue_number}/timeline"],
listForAuthenticatedUser: ["GET /user/issues"],
listForOrg: ["GET /orgs/{org}/issues"],
listForRepo: ["GET /repos/{owner}/{repo}/issues"],
listLabelsForMilestone: ["GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels"],
listLabelsForRepo: ["GET /repos/{owner}/{repo}/labels"],
listLabelsOnIssue: ["GET /repos/{owner}/{repo}/issues/{issue_number}/labels"],
listMilestones: ["GET /repos/{owner}/{repo}/milestones"],
lock: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/lock"],
removeAllLabels: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels"],
removeAssignees: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/assignees"],
removeLabel: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels/{name}"],
setLabels: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/labels"],
unlock: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/lock"],
update: ["PATCH /repos/{owner}/{repo}/issues/{issue_number}"],
updateComment: ["PATCH /repos/{owner}/{repo}/issues/comments/{comment_id}"],
updateLabel: ["PATCH /repos/{owner}/{repo}/labels/{name}"],
updateMilestone: ["PATCH /repos/{owner}/{repo}/milestones/{milestone_number}"]
},
licenses: {
get: ["GET /licenses/{license}"],
getAllCommonlyUsed: ["GET /licenses"],
getForRepo: ["GET /repos/{owner}/{repo}/license"]
},
markdown: {
render: ["POST /markdown"],
renderRaw: ["POST /markdown/raw", {
headers: {
"content-type": "text/plain; charset=utf-8"
}
}]
},
meta: {
get: ["GET /meta"],
getOctocat: ["GET /octocat"],
getZen: ["GET /zen"],
root: ["GET /"]
},
migrations: {
cancelImport: ["DELETE /repos/{owner}/{repo}/import"],
deleteArchiveForAuthenticatedUser: ["DELETE /user/migrations/{migration_id}/archive"],
deleteArchiveForOrg: ["DELETE /orgs/{org}/migrations/{migration_id}/archive"],
downloadArchiveForOrg: ["GET /orgs/{org}/migrations/{migration_id}/archive"],
getArchiveForAuthenticatedUser: ["GET /user/migrations/{migration_id}/archive"],
getCommitAuthors: ["GET /repos/{owner}/{repo}/import/authors"],
getImportStatus: ["GET /repos/{owner}/{repo}/import"],
getLargeFiles: ["GET /repos/{owner}/{repo}/import/large_files"],
getStatusForAuthenticatedUser: ["GET /user/migrations/{migration_id}"],
getStatusForOrg: ["GET /orgs/{org}/migrations/{migration_id}"],
listForAuthenticatedUser: ["GET /user/migrations"],
listForOrg: ["GET /orgs/{org}/migrations"],
listReposForAuthenticatedUser: ["GET /user/migrations/{migration_id}/repositories"],
listReposForOrg: ["GET /orgs/{org}/migrations/{migration_id}/repositories"],
listReposForUser: ["GET /user/migrations/{migration_id}/repositories", {}, {
renamed: ["migrations", "listReposForAuthenticatedUser"]
}],
mapCommitAuthor: ["PATCH /repos/{owner}/{repo}/import/authors/{author_id}"],
setLfsPreference: ["PATCH /repos/{owner}/{repo}/import/lfs"],
startForAuthenticatedUser: ["POST /user/migrations"],
startForOrg: ["POST /orgs/{org}/migrations"],
startImport: ["PUT /repos/{owner}/{repo}/import"],
unlockRepoForAuthenticatedUser: ["DELETE /user/migrations/{migration_id}/repos/{repo_name}/lock"],
unlockRepoForOrg: ["DELETE /orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock"],
updateImport: ["PATCH /repos/{owner}/{repo}/import"]
},
orgs: {
blockUser: ["PUT /orgs/{org}/blocks/{username}"],
cancelInvitation: ["DELETE /orgs/{org}/invitations/{invitation_id}"],
checkBlockedUser: ["GET /orgs/{org}/blocks/{username}"],
checkMembershipForUser: ["GET /orgs/{org}/members/{username}"],
checkPublicMembershipForUser: ["GET /orgs/{org}/public_members/{username}"],
convertMemberToOutsideCollaborator: ["PUT /orgs/{org}/outside_collaborators/{username}"],
createInvitation: ["POST /orgs/{org}/invitations"],
createWebhook: ["POST /orgs/{org}/hooks"],
deleteWebhook: ["DELETE /orgs/{org}/hooks/{hook_id}"],
get: ["GET /orgs/{org}"],
getMembershipForAuthenticatedUser: ["GET /user/memberships/orgs/{org}"],
getMembershipForUser: ["GET /orgs/{org}/memberships/{username}"],
getWebhook: ["GET /orgs/{org}/hooks/{hook_id}"],
getWebhookConfigForOrg: ["GET /orgs/{org}/hooks/{hook_id}/config"],
getWebhookDelivery: ["GET /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}"],
list: ["GET /organizations"],
listAppInstallations: ["GET /orgs/{org}/installations"],
listBlockedUsers: ["GET /orgs/{org}/blocks"],
listCustomRoles: ["GET /organizations/{organization_id}/custom_roles"],
listFailedInvitations: ["GET /orgs/{org}/failed_invitations"],
listForAuthenticatedUser: ["GET /user/orgs"],
listForUser: ["GET /users/{username}/orgs"],
listInvitationTeams: ["GET /orgs/{org}/invitations/{invitation_id}/teams"],
listMembers: ["GET /orgs/{org}/members"],
listMembershipsForAuthenticatedUser: ["GET /user/memberships/orgs"],
listOutsideCollaborators: ["GET /orgs/{org}/outside_collaborators"],
listPendingInvitations: ["GET /orgs/{org}/invitations"],
listPublicMembers: ["GET /orgs/{org}/public_members"],
listWebhookDeliveries: ["GET /orgs/{org}/hooks/{hook_id}/deliveries"],
listWebhooks: ["GET /orgs/{org}/hooks"],
pingWebhook: ["POST /orgs/{org}/hooks/{hook_id}/pings"],
redeliverWebhookDelivery: ["POST /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}/attempts"],
removeMember: ["DELETE /orgs/{org}/members/{username}"],
removeMembershipForUser: ["DELETE /orgs/{org}/memberships/{username}"],
removeOutsideCollaborator: ["DELETE /orgs/{org}/outside_collaborators/{username}"],
removePublicMembershipForAuthenticatedUser: ["DELETE /orgs/{org}/public_members/{username}"],
setMembershipForUser: ["PUT /orgs/{org}/memberships/{username}"],
setPublicMembershipForAuthenticatedUser: ["PUT /orgs/{org}/public_members/{username}"],
unblockUser: ["DELETE /orgs/{org}/blocks/{username}"],
update: ["PATCH /orgs/{org}"],
updateMembershipForAuthenticatedUser: ["PATCH /user/memberships/orgs/{org}"],
updateWebhook: ["PATCH /orgs/{org}/hooks/{hook_id}"],
updateWebhookConfigForOrg: ["PATCH /orgs/{org}/hooks/{hook_id}/config"]
},
packages: {
deletePackageForAuthenticatedUser: ["DELETE /user/packages/{package_type}/{package_name}"],
deletePackageForOrg: ["DELETE /orgs/{org}/packages/{package_type}/{package_name}"],
deletePackageForUser: ["DELETE /users/{username}/packages/{package_type}/{package_name}"],
deletePackageVersionForAuthenticatedUser: ["DELETE /user/packages/{package_type}/{package_name}/versions/{package_version_id}"],
deletePackageVersionForOrg: ["DELETE /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}"],
deletePackageVersionForUser: ["DELETE /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}"],
getAllPackageVersionsForAPackageOwnedByAnOrg: ["GET /orgs/{org}/packages/{package_type}/{package_name}/versions", {}, {
renamed: ["packages", "getAllPackageVersionsForPackageOwnedByOrg"]
}],
getAllPackageVersionsForAPackageOwnedByTheAuthenticatedUser: ["GET /user/packages/{package_type}/{package_name}/versions", {}, {
renamed: ["packages", "getAllPackageVersionsForPackageOwnedByAuthenticatedUser"]
}],
getAllPackageVersionsForPackageOwnedByAuthenticatedUser: ["GET /user/packages/{package_type}/{package_name}/versions"],
getAllPackageVersionsForPackageOwnedByOrg: ["GET /orgs/{org}/packages/{package_type}/{package_name}/versions"],
getAllPackageVersionsForPackageOwnedByUser: ["GET /users/{username}/packages/{package_type}/{package_name}/versions"],
getPackageForAuthenticatedUser: ["GET /user/packages/{package_type}/{package_name}"],
getPackageForOrganization: ["GET /orgs/{org}/packages/{package_type}/{package_name}"],
getPackageForUser: ["GET /users/{username}/packages/{package_type}/{package_name}"],
getPackageVersionForAuthenticatedUser: ["GET /user/packages/{package_type}/{package_name}/versions/{package_version_id}"],
getPackageVersionForOrganization: ["GET /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}"],
getPackageVersionForUser: ["GET /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}"],
listPackagesForAuthenticatedUser: ["GET /user/packages"],
listPackagesForOrganization: ["GET /orgs/{org}/packages"],
listPackagesForUser: ["GET /users/{username}/packages"],
restorePackageForAuthenticatedUser: ["POST /user/packages/{package_type}/{package_name}/restore{?token}"],
restorePackageForOrg: ["POST /orgs/{org}/packages/{package_type}/{package_name}/restore{?token}"],
restorePackageForUser: ["POST /users/{username}/packages/{package_type}/{package_name}/restore{?token}"],
restorePackageVersionForAuthenticatedUser: ["POST /user/packages/{package_type}/{package_name}/versions/{package_version_id}/restore"],
restorePackageVersionForOrg: ["POST /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore"],
restorePackageVersionForUser: ["POST /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore"]
},
projects: {
addCollaborator: ["PUT /projects/{project_id}/collaborators/{username}"],
createCard: ["POST /projects/columns/{column_id}/cards"],
createColumn: ["POST /projects/{project_id}/columns"],
createForAuthenticatedUser: ["POST /user/projects"],
createForOrg: ["POST /orgs/{org}/projects"],
createForRepo: ["POST /repos/{owner}/{repo}/projects"],
delete: ["DELETE /projects/{project_id}"],
deleteCard: ["DELETE /projects/columns/cards/{card_id}"],
deleteColumn: ["DELETE /projects/columns/{column_id}"],
get: ["GET /projects/{project_id}"],
getCard: ["GET /projects/columns/cards/{card_id}"],
getColumn: ["GET /projects/columns/{column_id}"],
getPermissionForUser: ["GET /projects/{project_id}/collaborators/{username}/permission"],
listCards: ["GET /projects/columns/{column_id}/cards"],
listCollaborators: ["GET /projects/{project_id}/collaborators"],
listColumns: ["GET /projects/{project_id}/columns"],
listForOrg: ["GET /orgs/{org}/projects"],
listForRepo: ["GET /repos/{owner}/{repo}/projects"],
listForUser: ["GET /users/{username}/projects"],
moveCard: ["POST /projects/columns/cards/{card_id}/moves"],
moveColumn: ["POST /projects/columns/{column_id}/moves"],
removeCollaborator: ["DELETE /projects/{project_id}/collaborators/{username}"],
update: ["PATCH /projects/{project_id}"],
updateCard: ["PATCH /projects/columns/cards/{card_id}"],
updateColumn: ["PATCH /projects/columns/{column_id}"]
},
pulls: {
checkIfMerged: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/merge"],
create: ["POST /repos/{owner}/{repo}/pulls"],
createReplyForReviewComment: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies"],
createReview: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews"],
createReviewComment: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/comments"],
deletePendingReview: ["DELETE /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"],
deleteReviewComment: ["DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}"],
dismissReview: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals"],
get: ["GET /repos/{owner}/{repo}/pulls/{pull_number}"],
getReview: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"],
getReviewComment: ["GET /repos/{owner}/{repo}/pulls/comments/{comment_id}"],
list: ["GET /repos/{owner}/{repo}/pulls"],
listCommentsForReview: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments"],
listCommits: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/commits"],
listFiles: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/files"],
listRequestedReviewers: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"],
listReviewComments: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/comments"],
listReviewCommentsForRepo: ["GET /repos/{owner}/{repo}/pulls/comments"],
listReviews: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews"],
merge: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/merge"],
removeRequestedReviewers: ["DELETE /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"],
requestReviewers: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"],
submitReview: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events"],
update: ["PATCH /repos/{owner}/{repo}/pulls/{pull_number}"],
updateBranch: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/update-branch"],
updateReview: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"],
updateReviewComment: ["PATCH /repos/{owner}/{repo}/pulls/comments/{comment_id}"]
},
rateLimit: {
get: ["GET /rate_limit"]
},
reactions: {
createForCommitComment: ["POST /repos/{owner}/{repo}/comments/{comment_id}/reactions"],
createForIssue: ["POST /repos/{owner}/{repo}/issues/{issue_number}/reactions"],
createForIssueComment: ["POST /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions"],
createForPullRequestReviewComment: ["POST /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions"],
createForRelease: ["POST /repos/{owner}/{repo}/releases/{release_id}/reactions"],
createForTeamDiscussionCommentInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions"],
createForTeamDiscussionInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions"],
deleteForCommitComment: ["DELETE /repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}"],
deleteForIssue: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}"],
deleteForIssueComment: ["DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}"],
deleteForPullRequestComment: ["DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}"],
deleteForRelease: ["DELETE /repos/{owner}/{repo}/releases/{release_id}/reactions/{reaction_id}"],
deleteForTeamDiscussion: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}"],
deleteForTeamDiscussionComment: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}"],
listForCommitComment: ["GET /repos/{owner}/{repo}/comments/{comment_id}/reactions"],
listForIssue: ["GET /repos/{owner}/{repo}/issues/{issue_number}/reactions"],
listForIssueComment: ["GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions"],
listForPullRequestReviewComment: ["GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions"],
listForRelease: ["GET /repos/{owner}/{repo}/releases/{release_id}/reactions"],
listForTeamDiscussionCommentInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions"],
listForTeamDiscussionInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions"]
},
repos: {
acceptInvitation: ["PATCH /user/repository_invitations/{invitation_id}", {}, {
renamed: ["repos", "acceptInvitationForAuthenticatedUser"]
}],
acceptInvitationForAuthenticatedUser: ["PATCH /user/repository_invitations/{invitation_id}"],
addAppAccessRestrictions: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, {
mapToData: "apps"
}],
addCollaborator: ["PUT /repos/{owner}/{repo}/collaborators/{username}"],
addStatusCheckContexts: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, {
mapToData: "contexts"
}],
addTeamAccessRestrictions: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, {
mapToData: "teams"
}],
addUserAccessRestrictions: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, {
mapToData: "users"
}],
checkCollaborator: ["GET /repos/{owner}/{repo}/collaborators/{username}"],
checkVulnerabilityAlerts: ["GET /repos/{owner}/{repo}/vulnerability-alerts"],
codeownersErrors: ["GET /repos/{owner}/{repo}/codeowners/errors"],
compareCommits: ["GET /repos/{owner}/{repo}/compare/{base}...{head}"],
compareCommitsWithBasehead: ["GET /repos/{owner}/{repo}/compare/{basehead}"],
createAutolink: ["POST /repos/{owner}/{repo}/autolinks"],
createCommitComment: ["POST /repos/{owner}/{repo}/commits/{commit_sha}/comments"],
createCommitSignatureProtection: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures"],
createCommitStatus: ["POST /repos/{owner}/{repo}/statuses/{sha}"],
createDeployKey: ["POST /repos/{owner}/{repo}/keys"],
createDeployment: ["POST /repos/{owner}/{repo}/deployments"],
createDeploymentStatus: ["POST /repos/{owner}/{repo}/deployments/{deployment_id}/statuses"],
createDispatchEvent: ["POST /repos/{owner}/{repo}/dispatches"],
createForAuthenticatedUser: ["POST /user/repos"],
createFork: ["POST /repos/{owner}/{repo}/forks"],
createInOrg: ["POST /orgs/{org}/repos"],
createOrUpdateEnvironment: ["PUT /repos/{owner}/{repo}/environments/{environment_name}"],
createOrUpdateFileContents: ["PUT /repos/{owner}/{repo}/contents/{path}"],
createPagesSite: ["POST /repos/{owner}/{repo}/pages"],
createRelease: ["POST /repos/{owner}/{repo}/releases"],
createTagProtection: ["POST /repos/{owner}/{repo}/tags/protection"],
createUsingTemplate: ["POST /repos/{template_owner}/{template_repo}/generate"],
createWebhook: ["POST /repos/{owner}/{repo}/hooks"],
declineInvitation: ["DELETE /user/repository_invitations/{invitation_id}", {}, {
renamed: ["repos", "declineInvitationForAuthenticatedUser"]
}],
declineInvitationForAuthenticatedUser: ["DELETE /user/repository_invitations/{invitation_id}"],
delete: ["DELETE /repos/{owner}/{repo}"],
deleteAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions"],
deleteAdminBranchProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"],
deleteAnEnvironment: ["DELETE /repos/{owner}/{repo}/environments/{environment_name}"],
deleteAutolink: ["DELETE /repos/{owner}/{repo}/autolinks/{autolink_id}"],
deleteBranchProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection"],
deleteCommitComment: ["DELETE /repos/{owner}/{repo}/comments/{comment_id}"],
deleteCommitSignatureProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures"],
deleteDeployKey: ["DELETE /repos/{owner}/{repo}/keys/{key_id}"],
deleteDeployment: ["DELETE /repos/{owner}/{repo}/deployments/{deployment_id}"],
deleteFile: ["DELETE /repos/{owner}/{repo}/contents/{path}"],
deleteInvitation: ["DELETE /repos/{owner}/{repo}/invitations/{invitation_id}"],
deletePagesSite: ["DELETE /repos/{owner}/{repo}/pages"],
deletePullRequestReviewProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"],
deleteRelease: ["DELETE /repos/{owner}/{repo}/releases/{release_id}"],
deleteReleaseAsset: ["DELETE /repos/{owner}/{repo}/releases/assets/{asset_id}"],
deleteTagProtection: ["DELETE /repos/{owner}/{repo}/tags/protection/{tag_protection_id}"],
deleteWebhook: ["DELETE /repos/{owner}/{repo}/hooks/{hook_id}"],
disableAutomatedSecurityFixes: ["DELETE /repos/{owner}/{repo}/automated-security-fixes"],
disableLfsForRepo: ["DELETE /repos/{owner}/{repo}/lfs"],
disableVulnerabilityAlerts: ["DELETE /repos/{owner}/{repo}/vulnerability-alerts"],
downloadArchive: ["GET /repos/{owner}/{repo}/zipball/{ref}", {}, {
renamed: ["repos", "downloadZipballArchive"]
}],
downloadTarballArchive: ["GET /repos/{owner}/{repo}/tarball/{ref}"],
downloadZipballArchive: ["GET /repos/{owner}/{repo}/zipball/{ref}"],
enableAutomatedSecurityFixes: ["PUT /repos/{owner}/{repo}/automated-security-fixes"],
enableLfsForRepo: ["PUT /repos/{owner}/{repo}/lfs"],
enableVulnerabilityAlerts: ["PUT /repos/{owner}/{repo}/vulnerability-alerts"],
generateReleaseNotes: ["POST /repos/{owner}/{repo}/releases/generate-notes"],
get: ["GET /repos/{owner}/{repo}"],
getAccessRestrictions: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions"],
getAdminBranchProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"],
getAllEnvironments: ["GET /repos/{owner}/{repo}/environments"],
getAllStatusCheckContexts: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts"],
getAllTopics: ["GET /repos/{owner}/{repo}/topics"],
getAppsWithAccessToProtectedBranch: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps"],
getAutolink: ["GET /repos/{owner}/{repo}/autolinks/{autolink_id}"],
getBranch: ["GET /repos/{owner}/{repo}/branches/{branch}"],
getBranchProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection"],
getClones: ["GET /repos/{owner}/{repo}/traffic/clones"],
getCodeFrequencyStats: ["GET /repos/{owner}/{repo}/stats/code_frequency"],
getCollaboratorPermissionLevel: ["GET /repos/{owner}/{repo}/collaborators/{username}/permission"],
getCombinedStatusForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/status"],
getCommit: ["GET /repos/{owner}/{repo}/commits/{ref}"],
getCommitActivityStats: ["GET /repos/{owner}/{repo}/stats/commit_activity"],
getCommitComment: ["GET /repos/{owner}/{repo}/comments/{comment_id}"],
getCommitSignatureProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures"],
getCommunityProfileMetrics: ["GET /repos/{owner}/{repo}/community/profile"],
getContent: ["GET /repos/{owner}/{repo}/contents/{path}"],
getContributorsStats: ["GET /repos/{owner}/{repo}/stats/contributors"],
getDeployKey: ["GET /repos/{owner}/{repo}/keys/{key_id}"],
getDeployment: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}"],
getDeploymentStatus: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}"],
getEnvironment: ["GET /repos/{owner}/{repo}/environments/{environment_name}"],
getLatestPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/latest"],
getLatestRelease: ["GET /repos/{owner}/{repo}/releases/latest"],
getPages: ["GET /repos/{owner}/{repo}/pages"],
getPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/{build_id}"],
getPagesHealthCheck: ["GET /repos/{owner}/{repo}/pages/health"],
getParticipationStats: ["GET /repos/{owner}/{repo}/stats/participation"],
getPullRequestReviewProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"],
getPunchCardStats: ["GET /repos/{owner}/{repo}/stats/punch_card"],
getReadme: ["GET /repos/{owner}/{repo}/readme"],
getReadmeInDirectory: ["GET /repos/{owner}/{repo}/readme/{dir}"],
getRelease: ["GET /repos/{owner}/{repo}/releases/{release_id}"],
getReleaseAsset: ["GET /repos/{owner}/{repo}/releases/assets/{asset_id}"],
getReleaseByTag: ["GET /repos/{owner}/{repo}/releases/tags/{tag}"],
getStatusChecksProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"],
getTeamsWithAccessToProtectedBranch: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams"],
getTopPaths: ["GET /repos/{owner}/{repo}/traffic/popular/paths"],
getTopReferrers: ["GET /repos/{owner}/{repo}/traffic/popular/referrers"],
getUsersWithAccessToProtectedBranch: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users"],
getViews: ["GET /repos/{owner}/{repo}/traffic/views"],
getWebhook: ["GET /repos/{owner}/{repo}/hooks/{hook_id}"],
getWebhookConfigForRepo: ["GET /repos/{owner}/{repo}/hooks/{hook_id}/config"],
getWebhookDelivery: ["GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}"],
listAutolinks: ["GET /repos/{owner}/{repo}/autolinks"],
listBranches: ["GET /repos/{owner}/{repo}/branches"],
listBranchesForHeadCommit: ["GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head"],
listCollaborators: ["GET /repos/{owner}/{repo}/collaborators"],
listCommentsForCommit: ["GET /repos/{owner}/{repo}/commits/{commit_sha}/comments"],
listCommitCommentsForRepo: ["GET /repos/{owner}/{repo}/comments"],
listCommitStatusesForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/statuses"],
listCommits: ["GET /repos/{owner}/{repo}/commits"],
listContributors: ["GET /repos/{owner}/{repo}/contributors"],
listDeployKeys: ["GET /repos/{owner}/{repo}/keys"],
listDeploymentStatuses: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses"],
listDeployments: ["GET /repos/{owner}/{repo}/deployments"],
listForAuthenticatedUser: ["GET /user/repos"],
listForOrg: ["GET /orgs/{org}/repos"],
listForUser: ["GET /users/{username}/repos"],
listForks: ["GET /repos/{owner}/{repo}/forks"],
listInvitations: ["GET /repos/{owner}/{repo}/invitations"],
listInvitationsForAuthenticatedUser: ["GET /user/repository_invitations"],
listLanguages: ["GET /repos/{owner}/{repo}/languages"],
listPagesBuilds: ["GET /repos/{owner}/{repo}/pages/builds"],
listPublic: ["GET /repositories"],
listPullRequestsAssociatedWithCommit: ["GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls"],
listReleaseAssets: ["GET /repos/{owner}/{repo}/releases/{release_id}/assets"],
listReleases: ["GET /repos/{owner}/{repo}/releases"],
listTagProtection: ["GET /repos/{owner}/{repo}/tags/protection"],
listTags: ["GET /repos/{owner}/{repo}/tags"],
listTeams: ["GET /repos/{owner}/{repo}/teams"],
listWebhookDeliveries: ["GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries"],
listWebhooks: ["GET /repos/{owner}/{repo}/hooks"],
merge: ["POST /repos/{owner}/{repo}/merges"],
mergeUpstream: ["POST /repos/{owner}/{repo}/merge-upstream"],
pingWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/pings"],
redeliverWebhookDelivery: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}/attempts"],
removeAppAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, {
mapToData: "apps"
}],
removeCollaborator: ["DELETE /repos/{owner}/{repo}/collaborators/{username}"],
removeStatusCheckContexts: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, {
mapToData: "contexts"
}],
removeStatusCheckProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"],
removeTeamAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, {
mapToData: "teams"
}],
removeUserAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, {
mapToData: "users"
}],
renameBranch: ["POST /repos/{owner}/{repo}/branches/{branch}/rename"],
replaceAllTopics: ["PUT /repos/{owner}/{repo}/topics"],
requestPagesBuild: ["POST /repos/{owner}/{repo}/pages/builds"],
setAdminBranchProtection: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"],
setAppAccessRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, {
mapToData: "apps"
}],
setStatusCheckContexts: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, {
mapToData: "contexts"
}],
setTeamAccessRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, {
mapToData: "teams"
}],
setUserAccessRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, {
mapToData: "users"
}],
testPushWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/tests"],
transfer: ["POST /repos/{owner}/{repo}/transfer"],
update: ["PATCH /repos/{owner}/{repo}"],
updateBranchProtection: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection"],
updateCommitComment: ["PATCH /repos/{owner}/{repo}/comments/{comment_id}"],
updateInformationAboutPagesSite: ["PUT /repos/{owner}/{repo}/pages"],
updateInvitation: ["PATCH /repos/{owner}/{repo}/invitations/{invitation_id}"],
updatePullRequestReviewProtection: ["PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"],
updateRelease: ["PATCH /repos/{owner}/{repo}/releases/{release_id}"],
updateReleaseAsset: ["PATCH /repos/{owner}/{repo}/releases/assets/{asset_id}"],
updateStatusCheckPotection: ["PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks", {}, {
renamed: ["repos", "updateStatusCheckProtection"]
}],
updateStatusCheckProtection: ["PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"],
updateWebhook: ["PATCH /repos/{owner}/{repo}/hooks/{hook_id}"],
updateWebhookConfigForRepo: ["PATCH /repos/{owner}/{repo}/hooks/{hook_id}/config"],
uploadReleaseAsset: ["POST /repos/{owner}/{repo}/releases/{release_id}/assets{?name,label}", {
baseUrl: "https://uploads.github.com"
}]
},
search: {
code: ["GET /search/code"],
commits: ["GET /search/commits"],
issuesAndPullRequests: ["GET /search/issues"],
labels: ["GET /search/labels"],
repos: ["GET /search/repositories"],
topics: ["GET /search/topics"],
users: ["GET /search/users"]
},
secretScanning: {
getAlert: ["GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}"],
listAlertsForEnterprise: ["GET /enterprises/{enterprise}/secret-scanning/alerts"],
listAlertsForOrg: ["GET /orgs/{org}/secret-scanning/alerts"],
listAlertsForRepo: ["GET /repos/{owner}/{repo}/secret-scanning/alerts"],
listLocationsForAlert: ["GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations"],
updateAlert: ["PATCH /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}"]
},
teams: {
addOrUpdateMembershipForUserInOrg: ["PUT /orgs/{org}/teams/{team_slug}/memberships/{username}"],
addOrUpdateProjectPermissionsInOrg: ["PUT /orgs/{org}/teams/{team_slug}/projects/{project_id}"],
addOrUpdateRepoPermissionsInOrg: ["PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"],
checkPermissionsForProjectInOrg: ["GET /orgs/{org}/teams/{team_slug}/projects/{project_id}"],
checkPermissionsForRepoInOrg: ["GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"],
create: ["POST /orgs/{org}/teams"],
createDiscussionCommentInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments"],
createDiscussionInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions"],
deleteDiscussionCommentInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"],
deleteDiscussionInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"],
deleteInOrg: ["DELETE /orgs/{org}/teams/{team_slug}"],
getByName: ["GET /orgs/{org}/teams/{team_slug}"],
getDiscussionCommentInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"],
getDiscussionInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"],
getMembershipForUserInOrg: ["GET /orgs/{org}/teams/{team_slug}/memberships/{username}"],
list: ["GET /orgs/{org}/teams"],
listChildInOrg: ["GET /orgs/{org}/teams/{team_slug}/teams"],
listDiscussionCommentsInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments"],
listDiscussionsInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions"],
listForAuthenticatedUser: ["GET /user/teams"],
listMembersInOrg: ["GET /orgs/{org}/teams/{team_slug}/members"],
listPendingInvitationsInOrg: ["GET /orgs/{org}/teams/{team_slug}/invitations"],
listProjectsInOrg: ["GET /orgs/{org}/teams/{team_slug}/projects"],
listReposInOrg: ["GET /orgs/{org}/teams/{team_slug}/repos"],
removeMembershipForUserInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/memberships/{username}"],
removeProjectInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/projects/{project_id}"],
removeRepoInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"],
updateDiscussionCommentInOrg: ["PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"],
updateDiscussionInOrg: ["PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"],
updateInOrg: ["PATCH /orgs/{org}/teams/{team_slug}"]
},
users: {
addEmailForAuthenticated: ["POST /user/emails", {}, {
renamed: ["users", "addEmailForAuthenticatedUser"]
}],
addEmailForAuthenticatedUser: ["POST /user/emails"],
block: ["PUT /user/blocks/{username}"],
checkBlocked: ["GET /user/blocks/{username}"],
checkFollowingForUser: ["GET /users/{username}/following/{target_user}"],
checkPersonIsFollowedByAuthenticated: ["GET /user/following/{username}"],
createGpgKeyForAuthenticated: ["POST /user/gpg_keys", {}, {
renamed: ["users", "createGpgKeyForAuthenticatedUser"]
}],
createGpgKeyForAuthenticatedUser: ["POST /user/gpg_keys"],
createPublicSshKeyForAuthenticated: ["POST /user/keys", {}, {
renamed: ["users", "createPublicSshKeyForAuthenticatedUser"]
}],
createPublicSshKeyForAuthenticatedUser: ["POST /user/keys"],
deleteEmailForAuthenticated: ["DELETE /user/emails", {}, {
renamed: ["users", "deleteEmailForAuthenticatedUser"]
}],
deleteEmailForAuthenticatedUser: ["DELETE /user/emails"],
deleteGpgKeyForAuthenticated: ["DELETE /user/gpg_keys/{gpg_key_id}", {}, {
renamed: ["users", "deleteGpgKeyForAuthenticatedUser"]
}],
deleteGpgKeyForAuthenticatedUser: ["DELETE /user/gpg_keys/{gpg_key_id}"],
deletePublicSshKeyForAuthenticated: ["DELETE /user/keys/{key_id}", {}, {
renamed: ["users", "deletePublicSshKeyForAuthenticatedUser"]
}],
deletePublicSshKeyForAuthenticatedUser: ["DELETE /user/keys/{key_id}"],
follow: ["PUT /user/following/{username}"],
getAuthenticated: ["GET /user"],
getByUsername: ["GET /users/{username}"],
getContextForUser: ["GET /users/{username}/hovercard"],
getGpgKeyForAuthenticated: ["GET /user/gpg_keys/{gpg_key_id}", {}, {
renamed: ["users", "getGpgKeyForAuthenticatedUser"]
}],
getGpgKeyForAuthenticatedUser: ["GET /user/gpg_keys/{gpg_key_id}"],
getPublicSshKeyForAuthenticated: ["GET /user/keys/{key_id}", {}, {
renamed: ["users", "getPublicSshKeyForAuthenticatedUser"]
}],
getPublicSshKeyForAuthenticatedUser: ["GET /user/keys/{key_id}"],
list: ["GET /users"],
listBlockedByAuthenticated: ["GET /user/blocks", {}, {
renamed: ["users", "listBlockedByAuthenticatedUser"]
}],
listBlockedByAuthenticatedUser: ["GET /user/blocks"],
listEmailsForAuthenticated: ["GET /user/emails", {}, {
renamed: ["users", "listEmailsForAuthenticatedUser"]
}],
listEmailsForAuthenticatedUser: ["GET /user/emails"],
listFollowedByAuthenticated: ["GET /user/following", {}, {
renamed: ["users", "listFollowedByAuthenticatedUser"]
}],
listFollowedByAuthenticatedUser: ["GET /user/following"],
listFollowersForAuthenticatedUser: ["GET /user/followers"],
listFollowersForUser: ["GET /users/{username}/followers"],
listFollowingForUser: ["GET /users/{username}/following"],
listGpgKeysForAuthenticated: ["GET /user/gpg_keys", {}, {
renamed: ["users", "listGpgKeysForAuthenticatedUser"]
}],
listGpgKeysForAuthenticatedUser: ["GET /user/gpg_keys"],
listGpgKeysForUser: ["GET /users/{username}/gpg_keys"],
listPublicEmailsForAuthenticated: ["GET /user/public_emails", {}, {
renamed: ["users", "listPublicEmailsForAuthenticatedUser"]
}],
listPublicEmailsForAuthenticatedUser: ["GET /user/public_emails"],
listPublicKeysForUser: ["GET /users/{username}/keys"],
listPublicSshKeysForAuthenticated: ["GET /user/keys", {}, {
renamed: ["users", "listPublicSshKeysForAuthenticatedUser"]
}],
listPublicSshKeysForAuthenticatedUser: ["GET /user/keys"],
setPrimaryEmailVisibilityForAuthenticated: ["PATCH /user/email/visibility", {}, {
renamed: ["users", "setPrimaryEmailVisibilityForAuthenticatedUser"]
}],
setPrimaryEmailVisibilityForAuthenticatedUser: ["PATCH /user/email/visibility"],
unblock: ["DELETE /user/blocks/{username}"],
unfollow: ["DELETE /user/following/{username}"],
updateAuthenticated: ["PATCH /user"]
}
};
const VERSION = "5.16.2";
function endpointsToMethods(octokit, endpointsMap) {
const newMethods = {};
for (const [scope, endpoints] of Object.entries(endpointsMap)) {
for (const [methodName, endpoint] of Object.entries(endpoints)) {
const [route, defaults, decorations] = endpoint;
const [method, url] = route.split(/ /);
const endpointDefaults = Object.assign({
method,
url
}, defaults);
if (!newMethods[scope]) {
newMethods[scope] = {};
}
const scopeMethods = newMethods[scope];
if (decorations) {
scopeMethods[methodName] = decorate(octokit, scope, methodName, endpointDefaults, decorations);
continue;
}
scopeMethods[methodName] = octokit.request.defaults(endpointDefaults);
}
}
return newMethods;
}
function decorate(octokit, scope, methodName, defaults, decorations) {
const requestWithDefaults = octokit.request.defaults(defaults);
/* istanbul ignore next */
function withDecorations(...args) {
// @ts-ignore https://github.com/microsoft/TypeScript/issues/25488
let options = requestWithDefaults.endpoint.merge(...args); // There are currently no other decorations than `.mapToData`
if (decorations.mapToData) {
options = Object.assign({}, options, {
data: options[decorations.mapToData],
[decorations.mapToData]: undefined
});
return requestWithDefaults(options);
}
if (decorations.renamed) {
const [newScope, newMethodName] = decorations.renamed;
octokit.log.warn(`octokit.${scope}.${methodName}() has been renamed to octokit.${newScope}.${newMethodName}()`);
}
if (decorations.deprecated) {
octokit.log.warn(decorations.deprecated);
}
if (decorations.renamedParameters) {
// @ts-ignore https://github.com/microsoft/TypeScript/issues/25488
const options = requestWithDefaults.endpoint.merge(...args);
for (const [name, alias] of Object.entries(decorations.renamedParameters)) {
if (name in options) {
octokit.log.warn(`"${name}" parameter is deprecated for "octokit.${scope}.${methodName}()". Use "${alias}" instead`);
if (!(alias in options)) {
options[alias] = options[name];
}
delete options[name];
}
}
return requestWithDefaults(options);
} // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488
return requestWithDefaults(...args);
}
return Object.assign(withDecorations, requestWithDefaults);
}
function restEndpointMethods(octokit) {
const api = endpointsToMethods(octokit, Endpoints);
return {
rest: api
};
}
restEndpointMethods.VERSION = VERSION;
function legacyRestEndpointMethods(octokit) {
const api = endpointsToMethods(octokit, Endpoints);
return _objectSpread2(_objectSpread2({}, api), {}, {
rest: api
});
}
legacyRestEndpointMethods.VERSION = VERSION;
exports.legacyRestEndpointMethods = legacyRestEndpointMethods;
exports.restEndpointMethods = restEndpointMethods;
//# sourceMappingURL=index.js.map
/***/ }),
/***/ 537:
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }
var deprecation = __nccwpck_require__(8932);
var once = _interopDefault(__nccwpck_require__(1223));
const logOnceCode = once(deprecation => console.warn(deprecation));
const logOnceHeaders = once(deprecation => console.warn(deprecation));
/**
* Error with extra properties to help with debugging
*/
class RequestError extends Error {
constructor(message, statusCode, options) {
super(message); // Maintains proper stack trace (only available on V8)
/* istanbul ignore next */
if (Error.captureStackTrace) {
Error.captureStackTrace(this, this.constructor);
}
this.name = "HttpError";
this.status = statusCode;
let headers;
if ("headers" in options && typeof options.headers !== "undefined") {
headers = options.headers;
}
if ("response" in options) {
this.response = options.response;
headers = options.response.headers;
} // redact request credentials without mutating original request options
const requestCopy = Object.assign({}, options.request);
if (options.request.headers.authorization) {
requestCopy.headers = Object.assign({}, options.request.headers, {
authorization: options.request.headers.authorization.replace(/ .*$/, " [REDACTED]")
});
}
requestCopy.url = requestCopy.url // client_id & client_secret can be passed as URL query parameters to increase rate limit
// see https://developer.github.com/v3/#increasing-the-unauthenticated-rate-limit-for-oauth-applications
.replace(/\bclient_secret=\w+/g, "client_secret=[REDACTED]") // OAuth tokens can be passed as URL query parameters, although it is not recommended
// see https://developer.github.com/v3/#oauth2-token-sent-in-a-header
.replace(/\baccess_token=\w+/g, "access_token=[REDACTED]");
this.request = requestCopy; // deprecations
Object.defineProperty(this, "code", {
get() {
logOnceCode(new deprecation.Deprecation("[@octokit/request-error] `error.code` is deprecated, use `error.status`."));
return statusCode;
}
});
Object.defineProperty(this, "headers", {
get() {
logOnceHeaders(new deprecation.Deprecation("[@octokit/request-error] `error.headers` is deprecated, use `error.response.headers`."));
return headers || {};
}
});
}
}
exports.RequestError = RequestError;
//# sourceMappingURL=index.js.map
/***/ }),
/***/ 6234:
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }
var endpoint = __nccwpck_require__(9440);
var universalUserAgent = __nccwpck_require__(5030);
var isPlainObject = __nccwpck_require__(3287);
var nodeFetch = _interopDefault(__nccwpck_require__(467));
var requestError = __nccwpck_require__(537);
const VERSION = "5.6.3";
function getBufferResponse(response) {
return response.arrayBuffer();
}
function fetchWrapper(requestOptions) {
const log = requestOptions.request && requestOptions.request.log ? requestOptions.request.log : console;
if (isPlainObject.isPlainObject(requestOptions.body) || Array.isArray(requestOptions.body)) {
requestOptions.body = JSON.stringify(requestOptions.body);
}
let headers = {};
let status;
let url;
const fetch = requestOptions.request && requestOptions.request.fetch || nodeFetch;
return fetch(requestOptions.url, Object.assign({
method: requestOptions.method,
body: requestOptions.body,
headers: requestOptions.headers,
redirect: requestOptions.redirect
}, // `requestOptions.request.agent` type is incompatible
// see https://github.com/octokit/types.ts/pull/264
requestOptions.request)).then(async response => {
url = response.url;
status = response.status;
for (const keyAndValue of response.headers) {
headers[keyAndValue[0]] = keyAndValue[1];
}
if ("deprecation" in headers) {
const matches = headers.link && headers.link.match(/<([^>]+)>; rel="deprecation"/);
const deprecationLink = matches && matches.pop();
log.warn(`[@octokit/request] "${requestOptions.method} ${requestOptions.url}" is deprecated. It is scheduled to be removed on ${headers.sunset}${deprecationLink ? `. See ${deprecationLink}` : ""}`);
}
if (status === 204 || status === 205) {
return;
} // GitHub API returns 200 for HEAD requests
if (requestOptions.method === "HEAD") {
if (status < 400) {
return;
}
throw new requestError.RequestError(response.statusText, status, {
response: {
url,
status,
headers,
data: undefined
},
request: requestOptions
});
}
if (status === 304) {
throw new requestError.RequestError("Not modified", status, {
response: {
url,
status,
headers,
data: await getResponseData(response)
},
request: requestOptions
});
}
if (status >= 400) {
const data = await getResponseData(response);
const error = new requestError.RequestError(toErrorMessage(data), status, {
response: {
url,
status,
headers,
data
},
request: requestOptions
});
throw error;
}
return getResponseData(response);
}).then(data => {
return {
status,
url,
headers,
data
};
}).catch(error => {
if (error instanceof requestError.RequestError) throw error;
throw new requestError.RequestError(error.message, 500, {
request: requestOptions
});
});
}
async function getResponseData(response) {
const contentType = response.headers.get("content-type");
if (/application\/json/.test(contentType)) {
return response.json();
}
if (!contentType || /^text\/|charset=utf-8$/.test(contentType)) {
return response.text();
}
return getBufferResponse(response);
}
function toErrorMessage(data) {
if (typeof data === "string") return data; // istanbul ignore else - just in case
if ("message" in data) {
if (Array.isArray(data.errors)) {
return `${data.message}: ${data.errors.map(JSON.stringify).join(", ")}`;
}
return data.message;
} // istanbul ignore next - just in case
return `Unknown error: ${JSON.stringify(data)}`;
}
function withDefaults(oldEndpoint, newDefaults) {
const endpoint = oldEndpoint.defaults(newDefaults);
const newApi = function (route, parameters) {
const endpointOptions = endpoint.merge(route, parameters);
if (!endpointOptions.request || !endpointOptions.request.hook) {
return fetchWrapper(endpoint.parse(endpointOptions));
}
const request = (route, parameters) => {
return fetchWrapper(endpoint.parse(endpoint.merge(route, parameters)));
};
Object.assign(request, {
endpoint,
defaults: withDefaults.bind(null, endpoint)
});
return endpointOptions.request.hook(request, endpointOptions);
};
return Object.assign(newApi, {
endpoint,
defaults: withDefaults.bind(null, endpoint)
});
}
const request = withDefaults(endpoint.endpoint, {
headers: {
"user-agent": `octokit-request.js/${VERSION} ${universalUserAgent.getUserAgent()}`
}
});
exports.request = request;
//# sourceMappingURL=index.js.map
/***/ }),
/***/ 3682:
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
var register = __nccwpck_require__(4670);
var addHook = __nccwpck_require__(5549);
var removeHook = __nccwpck_require__(6819);
// bind with array of arguments: https://stackoverflow.com/a/21792913
var bind = Function.bind;
var bindable = bind.bind(bind);
function bindApi(hook, state, name) {
var removeHookRef = bindable(removeHook, null).apply(
null,
name ? [state, name] : [state]
);
hook.api = { remove: removeHookRef };
hook.remove = removeHookRef;
["before", "error", "after", "wrap"].forEach(function (kind) {
var args = name ? [state, kind, name] : [state, kind];
hook[kind] = hook.api[kind] = bindable(addHook, null).apply(null, args);
});
}
function HookSingular() {
var singularHookName = "h";
var singularHookState = {
registry: {},
};
var singularHook = register.bind(null, singularHookState, singularHookName);
bindApi(singularHook, singularHookState, singularHookName);
return singularHook;
}
function HookCollection() {
var state = {
registry: {},
};
var hook = register.bind(null, state);
bindApi(hook, state);
return hook;
}
var collectionHookDeprecationMessageDisplayed = false;
function Hook() {
if (!collectionHookDeprecationMessageDisplayed) {
console.warn(
'[before-after-hook]: "Hook()" repurposing warning, use "Hook.Collection()". Read more: https://git.io/upgrade-before-after-hook-to-1.4'
);
collectionHookDeprecationMessageDisplayed = true;
}
return HookCollection();
}
Hook.Singular = HookSingular.bind();
Hook.Collection = HookCollection.bind();
module.exports = Hook;
// expose constructors as a named property for TypeScript
module.exports.Hook = Hook;
module.exports.Singular = Hook.Singular;
module.exports.Collection = Hook.Collection;
/***/ }),
/***/ 5549:
/***/ ((module) => {
module.exports = addHook;
function addHook(state, kind, name, hook) {
var orig = hook;
if (!state.registry[name]) {
state.registry[name] = [];
}
if (kind === "before") {
hook = function (method, options) {
return Promise.resolve()
.then(orig.bind(null, options))
.then(method.bind(null, options));
};
}
if (kind === "after") {
hook = function (method, options) {
var result;
return Promise.resolve()
.then(method.bind(null, options))
.then(function (result_) {
result = result_;
return orig(result, options);
})
.then(function () {
return result;
});
};
}
if (kind === "error") {
hook = function (method, options) {
return Promise.resolve()
.then(method.bind(null, options))
.catch(function (error) {
return orig(error, options);
});
};
}
state.registry[name].push({
hook: hook,
orig: orig,
});
}
/***/ }),
/***/ 4670:
/***/ ((module) => {
module.exports = register;
function register(state, name, method, options) {
if (typeof method !== "function") {
throw new Error("method for before hook must be a function");
}
if (!options) {
options = {};
}
if (Array.isArray(name)) {
return name.reverse().reduce(function (callback, name) {
return register.bind(null, state, name, callback, options);
}, method)();
}
return Promise.resolve().then(function () {
if (!state.registry[name]) {
return method(options);
}
return state.registry[name].reduce(function (method, registered) {
return registered.hook.bind(null, method, options);
}, method)();
});
}
/***/ }),
/***/ 6819:
/***/ ((module) => {
module.exports = removeHook;
function removeHook(state, name, method) {
if (!state.registry[name]) {
return;
}
var index = state.registry[name]
.map(function (registered) {
return registered.orig;
})
.indexOf(method);
if (index === -1) {
return;
}
state.registry[name].splice(index, 1);
}
/***/ }),
/***/ 8803:
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
"use strict";
var GetIntrinsic = __nccwpck_require__(4538);
var callBind = __nccwpck_require__(2977);
var $indexOf = callBind(GetIntrinsic('String.prototype.indexOf'));
module.exports = function callBoundIntrinsic(name, allowMissing) {
var intrinsic = GetIntrinsic(name, !!allowMissing);
if (typeof intrinsic === 'function' && $indexOf(name, '.prototype.') > -1) {
return callBind(intrinsic);
}
return intrinsic;
};
/***/ }),
/***/ 2977:
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
"use strict";
var bind = __nccwpck_require__(8334);
var GetIntrinsic = __nccwpck_require__(4538);
var $apply = GetIntrinsic('%Function.prototype.apply%');
var $call = GetIntrinsic('%Function.prototype.call%');
var $reflectApply = GetIntrinsic('%Reflect.apply%', true) || bind.call($call, $apply);
var $gOPD = GetIntrinsic('%Object.getOwnPropertyDescriptor%', true);
var $defineProperty = GetIntrinsic('%Object.defineProperty%', true);
var $max = GetIntrinsic('%Math.max%');
if ($defineProperty) {
try {
$defineProperty({}, 'a', { value: 1 });
} catch (e) {
// IE 8 has a broken defineProperty
$defineProperty = null;
}
}
module.exports = function callBind(originalFunction) {
var func = $reflectApply(bind, $call, arguments);
if ($gOPD && $defineProperty) {
var desc = $gOPD(func, 'length');
if (desc.configurable) {
// original length, plus the receiver, minus any additional arguments (after the receiver)
$defineProperty(
func,
'length',
{ value: 1 + $max(0, originalFunction.length - (arguments.length - 1)) }
);
}
}
return func;
};
var applyBind = function applyBind() {
return $reflectApply(bind, $apply, arguments);
};
if ($defineProperty) {
$defineProperty(module.exports, 'apply', { value: applyBind });
} else {
module.exports.apply = applyBind;
}
/***/ }),
/***/ 8932:
/***/ ((__unused_webpack_module, exports) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
class Deprecation extends Error {
constructor(message) {
super(message); // Maintains proper stack trace (only available on V8)
/* istanbul ignore next */
if (Error.captureStackTrace) {
Error.captureStackTrace(this, this.constructor);
}
this.name = 'Deprecation';
}
}
exports.Deprecation = Deprecation;
/***/ }),
/***/ 9320:
/***/ ((module) => {
"use strict";
/* eslint no-invalid-this: 1 */
var ERROR_MESSAGE = 'Function.prototype.bind called on incompatible ';
var slice = Array.prototype.slice;
var toStr = Object.prototype.toString;
var funcType = '[object Function]';
module.exports = function bind(that) {
var target = this;
if (typeof target !== 'function' || toStr.call(target) !== funcType) {
throw new TypeError(ERROR_MESSAGE + target);
}
var args = slice.call(arguments, 1);
var bound;
var binder = function () {
if (this instanceof bound) {
var result = target.apply(
this,
args.concat(slice.call(arguments))
);
if (Object(result) === result) {
return result;
}
return this;
} else {
return target.apply(
that,
args.concat(slice.call(arguments))
);
}
};
var boundLength = Math.max(0, target.length - args.length);
var boundArgs = [];
for (var i = 0; i < boundLength; i++) {
boundArgs.push('$' + i);
}
bound = Function('binder', 'return function (' + boundArgs.join(',') + '){ return binder.apply(this,arguments); }')(binder);
if (target.prototype) {
var Empty = function Empty() {};
Empty.prototype = target.prototype;
bound.prototype = new Empty();
Empty.prototype = null;
}
2021-10-19 17:05:28 +02:00
return bound;
};
/***/ }),
/***/ 8334:
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
"use strict";
var implementation = __nccwpck_require__(9320);
module.exports = Function.prototype.bind || implementation;
/***/ }),
/***/ 4538:
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
"use strict";
var undefined;
var $SyntaxError = SyntaxError;
var $Function = Function;
var $TypeError = TypeError;
// eslint-disable-next-line consistent-return
var getEvalledConstructor = function (expressionSyntax) {
try {
return $Function('"use strict"; return (' + expressionSyntax + ').constructor;')();
} catch (e) {}
};
var $gOPD = Object.getOwnPropertyDescriptor;
if ($gOPD) {
try {
$gOPD({}, '');
} catch (e) {
$gOPD = null; // this is IE 8, which has a broken gOPD
}
}
var throwTypeError = function () {
throw new $TypeError();
};
var ThrowTypeError = $gOPD
? (function () {
try {
// eslint-disable-next-line no-unused-expressions, no-caller, no-restricted-properties
arguments.callee; // IE 8 does not throw here
return throwTypeError;
} catch (calleeThrows) {
try {
// IE 8 throws on Object.getOwnPropertyDescriptor(arguments, '')
return $gOPD(arguments, 'callee').get;
} catch (gOPDthrows) {
return throwTypeError;
}
}
}())
: throwTypeError;
var hasSymbols = __nccwpck_require__(587)();
var getProto = Object.getPrototypeOf || function (x) { return x.__proto__; }; // eslint-disable-line no-proto
var needsEval = {};
var TypedArray = typeof Uint8Array === 'undefined' ? undefined : getProto(Uint8Array);
var INTRINSICS = {
'%AggregateError%': typeof AggregateError === 'undefined' ? undefined : AggregateError,
'%Array%': Array,
'%ArrayBuffer%': typeof ArrayBuffer === 'undefined' ? undefined : ArrayBuffer,
'%ArrayIteratorPrototype%': hasSymbols ? getProto([][Symbol.iterator]()) : undefined,
'%AsyncFromSyncIteratorPrototype%': undefined,
'%AsyncFunction%': needsEval,
'%AsyncGenerator%': needsEval,
'%AsyncGeneratorFunction%': needsEval,
'%AsyncIteratorPrototype%': needsEval,
'%Atomics%': typeof Atomics === 'undefined' ? undefined : Atomics,
'%BigInt%': typeof BigInt === 'undefined' ? undefined : BigInt,
'%Boolean%': Boolean,
'%DataView%': typeof DataView === 'undefined' ? undefined : DataView,
'%Date%': Date,
'%decodeURI%': decodeURI,
'%decodeURIComponent%': decodeURIComponent,
'%encodeURI%': encodeURI,
'%encodeURIComponent%': encodeURIComponent,
'%Error%': Error,
'%eval%': eval, // eslint-disable-line no-eval
'%EvalError%': EvalError,
'%Float32Array%': typeof Float32Array === 'undefined' ? undefined : Float32Array,
'%Float64Array%': typeof Float64Array === 'undefined' ? undefined : Float64Array,
'%FinalizationRegistry%': typeof FinalizationRegistry === 'undefined' ? undefined : FinalizationRegistry,
'%Function%': $Function,
'%GeneratorFunction%': needsEval,
'%Int8Array%': typeof Int8Array === 'undefined' ? undefined : Int8Array,
'%Int16Array%': typeof Int16Array === 'undefined' ? undefined : Int16Array,
'%Int32Array%': typeof Int32Array === 'undefined' ? undefined : Int32Array,
'%isFinite%': isFinite,
'%isNaN%': isNaN,
'%IteratorPrototype%': hasSymbols ? getProto(getProto([][Symbol.iterator]())) : undefined,
'%JSON%': typeof JSON === 'object' ? JSON : undefined,
'%Map%': typeof Map === 'undefined' ? undefined : Map,
'%MapIteratorPrototype%': typeof Map === 'undefined' || !hasSymbols ? undefined : getProto(new Map()[Symbol.iterator]()),
'%Math%': Math,
'%Number%': Number,
'%Object%': Object,
'%parseFloat%': parseFloat,
'%parseInt%': parseInt,
'%Promise%': typeof Promise === 'undefined' ? undefined : Promise,
'%Proxy%': typeof Proxy === 'undefined' ? undefined : Proxy,
'%RangeError%': RangeError,
'%ReferenceError%': ReferenceError,
'%Reflect%': typeof Reflect === 'undefined' ? undefined : Reflect,
'%RegExp%': RegExp,
'%Set%': typeof Set === 'undefined' ? undefined : Set,
'%SetIteratorPrototype%': typeof Set === 'undefined' || !hasSymbols ? undefined : getProto(new Set()[Symbol.iterator]()),
'%SharedArrayBuffer%': typeof SharedArrayBuffer === 'undefined' ? undefined : SharedArrayBuffer,
'%String%': String,
'%StringIteratorPrototype%': hasSymbols ? getProto(''[Symbol.iterator]()) : undefined,
'%Symbol%': hasSymbols ? Symbol : undefined,
'%SyntaxError%': $SyntaxError,
'%ThrowTypeError%': ThrowTypeError,
'%TypedArray%': TypedArray,
'%TypeError%': $TypeError,
'%Uint8Array%': typeof Uint8Array === 'undefined' ? undefined : Uint8Array,
'%Uint8ClampedArray%': typeof Uint8ClampedArray === 'undefined' ? undefined : Uint8ClampedArray,
'%Uint16Array%': typeof Uint16Array === 'undefined' ? undefined : Uint16Array,
'%Uint32Array%': typeof Uint32Array === 'undefined' ? undefined : Uint32Array,
'%URIError%': URIError,
'%WeakMap%': typeof WeakMap === 'undefined' ? undefined : WeakMap,
'%WeakRef%': typeof WeakRef === 'undefined' ? undefined : WeakRef,
'%WeakSet%': typeof WeakSet === 'undefined' ? undefined : WeakSet
};
var doEval = function doEval(name) {
var value;
if (name === '%AsyncFunction%') {
value = getEvalledConstructor('async function () {}');
} else if (name === '%GeneratorFunction%') {
value = getEvalledConstructor('function* () {}');
} else if (name === '%AsyncGeneratorFunction%') {
value = getEvalledConstructor('async function* () {}');
} else if (name === '%AsyncGenerator%') {
var fn = doEval('%AsyncGeneratorFunction%');
if (fn) {
value = fn.prototype;
}
} else if (name === '%AsyncIteratorPrototype%') {
var gen = doEval('%AsyncGenerator%');
if (gen) {
value = getProto(gen.prototype);
}
}
INTRINSICS[name] = value;
return value;
};
var LEGACY_ALIASES = {
'%ArrayBufferPrototype%': ['ArrayBuffer', 'prototype'],
'%ArrayPrototype%': ['Array', 'prototype'],
'%ArrayProto_entries%': ['Array', 'prototype', 'entries'],
'%ArrayProto_forEach%': ['Array', 'prototype', 'forEach'],
'%ArrayProto_keys%': ['Array', 'prototype', 'keys'],
'%ArrayProto_values%': ['Array', 'prototype', 'values'],
'%AsyncFunctionPrototype%': ['AsyncFunction', 'prototype'],
'%AsyncGenerator%': ['AsyncGeneratorFunction', 'prototype'],
'%AsyncGeneratorPrototype%': ['AsyncGeneratorFunction', 'prototype', 'prototype'],
'%BooleanPrototype%': ['Boolean', 'prototype'],
'%DataViewPrototype%': ['DataView', 'prototype'],
'%DatePrototype%': ['Date', 'prototype'],
'%ErrorPrototype%': ['Error', 'prototype'],
'%EvalErrorPrototype%': ['EvalError', 'prototype'],
'%Float32ArrayPrototype%': ['Float32Array', 'prototype'],
'%Float64ArrayPrototype%': ['Float64Array', 'prototype'],
'%FunctionPrototype%': ['Function', 'prototype'],
'%Generator%': ['GeneratorFunction', 'prototype'],
'%GeneratorPrototype%': ['GeneratorFunction', 'prototype', 'prototype'],
'%Int8ArrayPrototype%': ['Int8Array', 'prototype'],
'%Int16ArrayPrototype%': ['Int16Array', 'prototype'],
'%Int32ArrayPrototype%': ['Int32Array', 'prototype'],
'%JSONParse%': ['JSON', 'parse'],
'%JSONStringify%': ['JSON', 'stringify'],
'%MapPrototype%': ['Map', 'prototype'],
'%NumberPrototype%': ['Number', 'prototype'],
'%ObjectPrototype%': ['Object', 'prototype'],
'%ObjProto_toString%': ['Object', 'prototype', 'toString'],
'%ObjProto_valueOf%': ['Object', 'prototype', 'valueOf'],
'%PromisePrototype%': ['Promise', 'prototype'],
'%PromiseProto_then%': ['Promise', 'prototype', 'then'],
'%Promise_all%': ['Promise', 'all'],
'%Promise_reject%': ['Promise', 'reject'],
'%Promise_resolve%': ['Promise', 'resolve'],
'%RangeErrorPrototype%': ['RangeError', 'prototype'],
'%ReferenceErrorPrototype%': ['ReferenceError', 'prototype'],
'%RegExpPrototype%': ['RegExp', 'prototype'],
'%SetPrototype%': ['Set', 'prototype'],
'%SharedArrayBufferPrototype%': ['SharedArrayBuffer', 'prototype'],
'%StringPrototype%': ['String', 'prototype'],
'%SymbolPrototype%': ['Symbol', 'prototype'],
'%SyntaxErrorPrototype%': ['SyntaxError', 'prototype'],
'%TypedArrayPrototype%': ['TypedArray', 'prototype'],
'%TypeErrorPrototype%': ['TypeError', 'prototype'],
'%Uint8ArrayPrototype%': ['Uint8Array', 'prototype'],
'%Uint8ClampedArrayPrototype%': ['Uint8ClampedArray', 'prototype'],
'%Uint16ArrayPrototype%': ['Uint16Array', 'prototype'],
'%Uint32ArrayPrototype%': ['Uint32Array', 'prototype'],
'%URIErrorPrototype%': ['URIError', 'prototype'],
'%WeakMapPrototype%': ['WeakMap', 'prototype'],
'%WeakSetPrototype%': ['WeakSet', 'prototype']
};
var bind = __nccwpck_require__(8334);
var hasOwn = __nccwpck_require__(6339);
var $concat = bind.call(Function.call, Array.prototype.concat);
var $spliceApply = bind.call(Function.apply, Array.prototype.splice);
var $replace = bind.call(Function.call, String.prototype.replace);
var $strSlice = bind.call(Function.call, String.prototype.slice);
/* adapted from https://github.com/lodash/lodash/blob/4.17.15/dist/lodash.js#L6735-L6744 */
var rePropName = /[^%.[\]]+|\[(?:(-?\d+(?:\.\d+)?)|(["'])((?:(?!\2)[^\\]|\\.)*?)\2)\]|(?=(?:\.|\[\])(?:\.|\[\]|%$))/g;
var reEscapeChar = /\\(\\)?/g; /** Used to match backslashes in property paths. */
var stringToPath = function stringToPath(string) {
var first = $strSlice(string, 0, 1);
var last = $strSlice(string, -1);
if (first === '%' && last !== '%') {
throw new $SyntaxError('invalid intrinsic syntax, expected closing `%`');
} else if (last === '%' && first !== '%') {
throw new $SyntaxError('invalid intrinsic syntax, expected opening `%`');
}
var result = [];
$replace(string, rePropName, function (match, number, quote, subString) {
result[result.length] = quote ? $replace(subString, reEscapeChar, '$1') : number || match;
});
return result;
};
/* end adaptation */
var getBaseIntrinsic = function getBaseIntrinsic(name, allowMissing) {
var intrinsicName = name;
var alias;
if (hasOwn(LEGACY_ALIASES, intrinsicName)) {
alias = LEGACY_ALIASES[intrinsicName];
intrinsicName = '%' + alias[0] + '%';
}
if (hasOwn(INTRINSICS, intrinsicName)) {
var value = INTRINSICS[intrinsicName];
if (value === needsEval) {
value = doEval(intrinsicName);
}
if (typeof value === 'undefined' && !allowMissing) {
throw new $TypeError('intrinsic ' + name + ' exists, but is not available. Please file an issue!');
}
return {
alias: alias,
name: intrinsicName,
value: value
};
}
throw new $SyntaxError('intrinsic ' + name + ' does not exist!');
};
module.exports = function GetIntrinsic(name, allowMissing) {
if (typeof name !== 'string' || name.length === 0) {
throw new $TypeError('intrinsic name must be a non-empty string');
}
if (arguments.length > 1 && typeof allowMissing !== 'boolean') {
throw new $TypeError('"allowMissing" argument must be a boolean');
}
var parts = stringToPath(name);
var intrinsicBaseName = parts.length > 0 ? parts[0] : '';
var intrinsic = getBaseIntrinsic('%' + intrinsicBaseName + '%', allowMissing);
var intrinsicRealName = intrinsic.name;
var value = intrinsic.value;
var skipFurtherCaching = false;
var alias = intrinsic.alias;
if (alias) {
intrinsicBaseName = alias[0];
$spliceApply(parts, $concat([0, 1], alias));
}
for (var i = 1, isOwn = true; i < parts.length; i += 1) {
var part = parts[i];
var first = $strSlice(part, 0, 1);
var last = $strSlice(part, -1);
if (
(
(first === '"' || first === "'" || first === '`')
|| (last === '"' || last === "'" || last === '`')
)
&& first !== last
) {
throw new $SyntaxError('property names with quotes must have matching quotes');
}
if (part === 'constructor' || !isOwn) {
skipFurtherCaching = true;
}
intrinsicBaseName += '.' + part;
intrinsicRealName = '%' + intrinsicBaseName + '%';
if (hasOwn(INTRINSICS, intrinsicRealName)) {
value = INTRINSICS[intrinsicRealName];
} else if (value != null) {
if (!(part in value)) {
if (!allowMissing) {
throw new $TypeError('base intrinsic for ' + name + ' exists, but the property is not available.');
}
return void undefined;
}
if ($gOPD && (i + 1) >= parts.length) {
var desc = $gOPD(value, part);
isOwn = !!desc;
// By convention, when a data property is converted to an accessor
// property to emulate a data property that does not suffer from
// the override mistake, that accessor's getter is marked with
// an `originalValue` property. Here, when we detect this, we
// uphold the illusion by pretending to see that original data
// property, i.e., returning the value rather than the getter
// itself.
if (isOwn && 'get' in desc && !('originalValue' in desc.get)) {
value = desc.get;
} else {
value = value[part];
}
} else {
isOwn = hasOwn(value, part);
value = value[part];
}
if (isOwn && !skipFurtherCaching) {
INTRINSICS[intrinsicRealName] = value;
}
}
}
return value;
};
/***/ }),
/***/ 587:
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
"use strict";
var origSymbol = typeof Symbol !== 'undefined' && Symbol;
var hasSymbolSham = __nccwpck_require__(7747);
module.exports = function hasNativeSymbols() {
if (typeof origSymbol !== 'function') { return false; }
if (typeof Symbol !== 'function') { return false; }
if (typeof origSymbol('foo') !== 'symbol') { return false; }
if (typeof Symbol('bar') !== 'symbol') { return false; }
return hasSymbolSham();
};
/***/ }),
/***/ 7747:
/***/ ((module) => {
"use strict";
/* eslint complexity: [2, 18], max-statements: [2, 33] */
module.exports = function hasSymbols() {
if (typeof Symbol !== 'function' || typeof Object.getOwnPropertySymbols !== 'function') { return false; }
if (typeof Symbol.iterator === 'symbol') { return true; }
var obj = {};
var sym = Symbol('test');
var symObj = Object(sym);
if (typeof sym === 'string') { return false; }
if (Object.prototype.toString.call(sym) !== '[object Symbol]') { return false; }
if (Object.prototype.toString.call(symObj) !== '[object Symbol]') { return false; }
// temp disabled per https://github.com/ljharb/object.assign/issues/17
// if (sym instanceof Symbol) { return false; }
// temp disabled per https://github.com/WebReflection/get-own-property-symbols/issues/4
// if (!(symObj instanceof Symbol)) { return false; }
// if (typeof Symbol.prototype.toString !== 'function') { return false; }
// if (String(sym) !== Symbol.prototype.toString.call(sym)) { return false; }
var symVal = 42;
obj[sym] = symVal;
for (sym in obj) { return false; } // eslint-disable-line no-restricted-syntax, no-unreachable-loop
if (typeof Object.keys === 'function' && Object.keys(obj).length !== 0) { return false; }
if (typeof Object.getOwnPropertyNames === 'function' && Object.getOwnPropertyNames(obj).length !== 0) { return false; }
var syms = Object.getOwnPropertySymbols(obj);
if (syms.length !== 1 || syms[0] !== sym) { return false; }
if (!Object.prototype.propertyIsEnumerable.call(obj, sym)) { return false; }
if (typeof Object.getOwnPropertyDescriptor === 'function') {
var descriptor = Object.getOwnPropertyDescriptor(obj, sym);
if (descriptor.value !== symVal || descriptor.enumerable !== true) { return false; }
}
return true;
};
/***/ }),
/***/ 6339:
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
"use strict";
var bind = __nccwpck_require__(8334);
module.exports = bind.call(Function.call, Object.prototype.hasOwnProperty);
/***/ }),
/***/ 3287:
/***/ ((__unused_webpack_module, exports) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
/*!
* is-plain-object <https://github.com/jonschlinkert/is-plain-object>
*
* Copyright (c) 2014-2017, Jon Schlinkert.
* Released under the MIT License.
*/
function isObject(o) {
return Object.prototype.toString.call(o) === '[object Object]';
}
function isPlainObject(o) {
var ctor,prot;
if (isObject(o) === false) return false;
// If has modified constructor
ctor = o.constructor;
if (ctor === undefined) return true;
// If has modified prototype
prot = ctor.prototype;
if (isObject(prot) === false) return false;
// If constructor does not have an Object-specific method
if (prot.hasOwnProperty('isPrototypeOf') === false) {
return false;
}
// Most likely a plain Object
return true;
}
exports.isPlainObject = isPlainObject;
/***/ }),
/***/ 467:
/***/ ((module, exports, __nccwpck_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }
var Stream = _interopDefault(__nccwpck_require__(2781));
var http = _interopDefault(__nccwpck_require__(3685));
var Url = _interopDefault(__nccwpck_require__(7310));
var whatwgUrl = _interopDefault(__nccwpck_require__(3323));
var https = _interopDefault(__nccwpck_require__(5687));
var zlib = _interopDefault(__nccwpck_require__(9796));
// Based on https://github.com/tmpvar/jsdom/blob/aa85b2abf07766ff7bf5c1f6daafb3726f2f2db5/lib/jsdom/living/blob.js
// fix for "Readable" isn't a named export issue
const Readable = Stream.Readable;
const BUFFER = Symbol('buffer');
const TYPE = Symbol('type');
class Blob {
constructor() {
this[TYPE] = '';
const blobParts = arguments[0];
const options = arguments[1];
const buffers = [];
let size = 0;
if (blobParts) {
const a = blobParts;
const length = Number(a.length);
for (let i = 0; i < length; i++) {
const element = a[i];
let buffer;
if (element instanceof Buffer) {
buffer = element;
} else if (ArrayBuffer.isView(element)) {
buffer = Buffer.from(element.buffer, element.byteOffset, element.byteLength);
} else if (element instanceof ArrayBuffer) {
buffer = Buffer.from(element);
} else if (element instanceof Blob) {
buffer = element[BUFFER];
} else {
buffer = Buffer.from(typeof element === 'string' ? element : String(element));
}
size += buffer.length;
buffers.push(buffer);
}
}
this[BUFFER] = Buffer.concat(buffers);
let type = options && options.type !== undefined && String(options.type).toLowerCase();
if (type && !/[^\u0020-\u007E]/.test(type)) {
this[TYPE] = type;
}
}
get size() {
return this[BUFFER].length;
}
get type() {
return this[TYPE];
}
text() {
return Promise.resolve(this[BUFFER].toString());
}
arrayBuffer() {
const buf = this[BUFFER];
const ab = buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength);
return Promise.resolve(ab);
}
stream() {
const readable = new Readable();
readable._read = function () {};
readable.push(this[BUFFER]);
readable.push(null);
return readable;
}
toString() {
return '[object Blob]';
}
slice() {
const size = this.size;
const start = arguments[0];
const end = arguments[1];
let relativeStart, relativeEnd;
if (start === undefined) {
relativeStart = 0;
} else if (start < 0) {
relativeStart = Math.max(size + start, 0);
} else {
relativeStart = Math.min(start, size);
}
if (end === undefined) {
relativeEnd = size;
} else if (end < 0) {
relativeEnd = Math.max(size + end, 0);
} else {
relativeEnd = Math.min(end, size);
}
const span = Math.max(relativeEnd - relativeStart, 0);
const buffer = this[BUFFER];
const slicedBuffer = buffer.slice(relativeStart, relativeStart + span);
const blob = new Blob([], { type: arguments[2] });
blob[BUFFER] = slicedBuffer;
return blob;
}
}
Object.defineProperties(Blob.prototype, {
size: { enumerable: true },
type: { enumerable: true },
slice: { enumerable: true }
});
Object.defineProperty(Blob.prototype, Symbol.toStringTag, {
value: 'Blob',
writable: false,
enumerable: false,
configurable: true
});
/**
* fetch-error.js
*
* FetchError interface for operational errors
*/
/**
* Create FetchError instance
*
* @param String message Error message for human
* @param String type Error type for machine
* @param String systemError For Node.js system error
* @return FetchError
*/
function FetchError(message, type, systemError) {
Error.call(this, message);
this.message = message;
this.type = type;
// when err.type is `system`, err.code contains system error code
if (systemError) {
this.code = this.errno = systemError.code;
}
// hide custom error implementation details from end-users
Error.captureStackTrace(this, this.constructor);
}
FetchError.prototype = Object.create(Error.prototype);
FetchError.prototype.constructor = FetchError;
FetchError.prototype.name = 'FetchError';
let convert;
try {
convert = (__nccwpck_require__(2877).convert);
} catch (e) {}
const INTERNALS = Symbol('Body internals');
// fix an issue where "PassThrough" isn't a named export for node <10
const PassThrough = Stream.PassThrough;
/**
* Body mixin
*
* Ref: https://fetch.spec.whatwg.org/#body
*
* @param Stream body Readable stream
* @param Object opts Response options
* @return Void
*/
function Body(body) {
var _this = this;
var _ref = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {},
_ref$size = _ref.size;
2021-10-19 17:05:28 +02:00
let size = _ref$size === undefined ? 0 : _ref$size;
var _ref$timeout = _ref.timeout;
let timeout = _ref$timeout === undefined ? 0 : _ref$timeout;
if (body == null) {
// body is undefined or null
body = null;
} else if (isURLSearchParams(body)) {
// body is a URLSearchParams
body = Buffer.from(body.toString());
} else if (isBlob(body)) ; else if (Buffer.isBuffer(body)) ; else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') {
// body is ArrayBuffer
body = Buffer.from(body);
} else if (ArrayBuffer.isView(body)) {
// body is ArrayBufferView
body = Buffer.from(body.buffer, body.byteOffset, body.byteLength);
} else if (body instanceof Stream) ; else {
// none of the above
// coerce to string then buffer
body = Buffer.from(String(body));
}
this[INTERNALS] = {
body,
disturbed: false,
error: null
};
this.size = size;
this.timeout = timeout;
if (body instanceof Stream) {
body.on('error', function (err) {
const error = err.name === 'AbortError' ? err : new FetchError(`Invalid response body while trying to fetch ${_this.url}: ${err.message}`, 'system', err);
_this[INTERNALS].error = error;
});
}
}
Body.prototype = {
get body() {
return this[INTERNALS].body;
},
get bodyUsed() {
return this[INTERNALS].disturbed;
},
/**
* Decode response as ArrayBuffer
*
* @return Promise
*/
arrayBuffer() {
return consumeBody.call(this).then(function (buf) {
return buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength);
});
},
/**
* Return raw response as Blob
*
* @return Promise
*/
blob() {
let ct = this.headers && this.headers.get('content-type') || '';
return consumeBody.call(this).then(function (buf) {
return Object.assign(
// Prevent copying
new Blob([], {
type: ct.toLowerCase()
}), {
[BUFFER]: buf
});
});
},
/**
* Decode response as json
*
* @return Promise
*/
json() {
var _this2 = this;
return consumeBody.call(this).then(function (buffer) {
try {
return JSON.parse(buffer.toString());
} catch (err) {
return Body.Promise.reject(new FetchError(`invalid json response body at ${_this2.url} reason: ${err.message}`, 'invalid-json'));
}
});
},
/**
* Decode response as text
*
* @return Promise
*/
text() {
return consumeBody.call(this).then(function (buffer) {
return buffer.toString();
});
},
/**
* Decode response as buffer (non-spec api)
*
* @return Promise
*/
buffer() {
return consumeBody.call(this);
},
/**
* Decode response as text, while automatically detecting the encoding and
* trying to decode to UTF-8 (non-spec api)
*
* @return Promise
*/
textConverted() {
var _this3 = this;
return consumeBody.call(this).then(function (buffer) {
return convertBody(buffer, _this3.headers);
});
}
};
// In browsers, all properties are enumerable.
Object.defineProperties(Body.prototype, {
body: { enumerable: true },
bodyUsed: { enumerable: true },
arrayBuffer: { enumerable: true },
blob: { enumerable: true },
json: { enumerable: true },
text: { enumerable: true }
});
Body.mixIn = function (proto) {
for (const name of Object.getOwnPropertyNames(Body.prototype)) {
// istanbul ignore else: future proof
if (!(name in proto)) {
const desc = Object.getOwnPropertyDescriptor(Body.prototype, name);
Object.defineProperty(proto, name, desc);
}
}
};
/**
* Consume and convert an entire Body to a Buffer.
*
* Ref: https://fetch.spec.whatwg.org/#concept-body-consume-body
*
* @return Promise
*/
function consumeBody() {
var _this4 = this;
if (this[INTERNALS].disturbed) {
return Body.Promise.reject(new TypeError(`body used already for: ${this.url}`));
}
this[INTERNALS].disturbed = true;
if (this[INTERNALS].error) {
return Body.Promise.reject(this[INTERNALS].error);
}
let body = this.body;
// body is null
if (body === null) {
return Body.Promise.resolve(Buffer.alloc(0));
}
// body is blob
if (isBlob(body)) {
body = body.stream();
}
// body is buffer
if (Buffer.isBuffer(body)) {
return Body.Promise.resolve(body);
}
// istanbul ignore if: should never happen
if (!(body instanceof Stream)) {
return Body.Promise.resolve(Buffer.alloc(0));
}
// body is stream
// get ready to actually consume the body
let accum = [];
let accumBytes = 0;
let abort = false;
return new Body.Promise(function (resolve, reject) {
let resTimeout;
// allow timeout on slow response body
if (_this4.timeout) {
resTimeout = setTimeout(function () {
abort = true;
reject(new FetchError(`Response timeout while trying to fetch ${_this4.url} (over ${_this4.timeout}ms)`, 'body-timeout'));
}, _this4.timeout);
}
// handle stream errors
body.on('error', function (err) {
if (err.name === 'AbortError') {
// if the request was aborted, reject with this Error
abort = true;
reject(err);
} else {
// other errors, such as incorrect content-encoding
reject(new FetchError(`Invalid response body while trying to fetch ${_this4.url}: ${err.message}`, 'system', err));
}
});
body.on('data', function (chunk) {
if (abort || chunk === null) {
return;
}
if (_this4.size && accumBytes + chunk.length > _this4.size) {
abort = true;
reject(new FetchError(`content size at ${_this4.url} over limit: ${_this4.size}`, 'max-size'));
return;
}
accumBytes += chunk.length;
accum.push(chunk);
});
body.on('end', function () {
if (abort) {
return;
}
clearTimeout(resTimeout);
try {
resolve(Buffer.concat(accum, accumBytes));
} catch (err) {
// handle streams that have accumulated too much data (issue #414)
reject(new FetchError(`Could not create Buffer from response body for ${_this4.url}: ${err.message}`, 'system', err));
}
});
});
}
/**
* Detect buffer encoding and convert to target encoding
* ref: http://www.w3.org/TR/2011/WD-html5-20110113/parsing.html#determining-the-character-encoding
*
* @param Buffer buffer Incoming buffer
* @param String encoding Target encoding
* @return String
*/
function convertBody(buffer, headers) {
if (typeof convert !== 'function') {
throw new Error('The package `encoding` must be installed to use the textConverted() function');
}
const ct = headers.get('content-type');
let charset = 'utf-8';
let res, str;
// header
if (ct) {
res = /charset=([^;]*)/i.exec(ct);
}
// no charset in content type, peek at response body for at most 1024 bytes
str = buffer.slice(0, 1024).toString();
// html5
if (!res && str) {
res = /<meta.+?charset=(['"])(.+?)\1/i.exec(str);
}
// html4
if (!res && str) {
res = /<meta[\s]+?http-equiv=(['"])content-type\1[\s]+?content=(['"])(.+?)\2/i.exec(str);
if (!res) {
res = /<meta[\s]+?content=(['"])(.+?)\1[\s]+?http-equiv=(['"])content-type\3/i.exec(str);
if (res) {
res.pop(); // drop last quote
}
}
if (res) {
res = /charset=(.*)/i.exec(res.pop());
}
}
// xml
if (!res && str) {
res = /<\?xml.+?encoding=(['"])(.+?)\1/i.exec(str);
}
// found charset
if (res) {
charset = res.pop();
// prevent decode issues when sites use incorrect encoding
// ref: https://hsivonen.fi/encoding-menu/
if (charset === 'gb2312' || charset === 'gbk') {
charset = 'gb18030';
}
}
// turn raw buffers into a single utf-8 buffer
return convert(buffer, 'UTF-8', charset).toString();
}
/**
* Detect a URLSearchParams object
* ref: https://github.com/bitinn/node-fetch/issues/296#issuecomment-307598143
*
* @param Object obj Object to detect by type or brand
* @return String
*/
function isURLSearchParams(obj) {
// Duck-typing as a necessary condition.
if (typeof obj !== 'object' || typeof obj.append !== 'function' || typeof obj.delete !== 'function' || typeof obj.get !== 'function' || typeof obj.getAll !== 'function' || typeof obj.has !== 'function' || typeof obj.set !== 'function') {
return false;
}
// Brand-checking and more duck-typing as optional condition.
return obj.constructor.name === 'URLSearchParams' || Object.prototype.toString.call(obj) === '[object URLSearchParams]' || typeof obj.sort === 'function';
}
/**
* Check if `obj` is a W3C `Blob` object (which `File` inherits from)
* @param {*} obj
* @return {boolean}
*/
function isBlob(obj) {
return typeof obj === 'object' && typeof obj.arrayBuffer === 'function' && typeof obj.type === 'string' && typeof obj.stream === 'function' && typeof obj.constructor === 'function' && typeof obj.constructor.name === 'string' && /^(Blob|File)$/.test(obj.constructor.name) && /^(Blob|File)$/.test(obj[Symbol.toStringTag]);
}
/**
* Clone body given Res/Req instance
*
* @param Mixed instance Response or Request instance
* @return Mixed
*/
function clone(instance) {
let p1, p2;
let body = instance.body;
// don't allow cloning a used body
if (instance.bodyUsed) {
throw new Error('cannot clone body after it is used');
}
// check that body is a stream and not form-data object
// note: we can't clone the form-data object without having it as a dependency
if (body instanceof Stream && typeof body.getBoundary !== 'function') {
// tee instance body
p1 = new PassThrough();
p2 = new PassThrough();
body.pipe(p1);
body.pipe(p2);
// set instance body to teed body and return the other teed body
instance[INTERNALS].body = p1;
body = p2;
}
return body;
}
/**
* Performs the operation "extract a `Content-Type` value from |object|" as
* specified in the specification:
* https://fetch.spec.whatwg.org/#concept-bodyinit-extract
*
* This function assumes that instance.body is present.
*
* @param Mixed instance Any options.body input
*/
function extractContentType(body) {
if (body === null) {
// body is null
return null;
} else if (typeof body === 'string') {
// body is string
return 'text/plain;charset=UTF-8';
} else if (isURLSearchParams(body)) {
// body is a URLSearchParams
return 'application/x-www-form-urlencoded;charset=UTF-8';
} else if (isBlob(body)) {
// body is blob
return body.type || null;
} else if (Buffer.isBuffer(body)) {
// body is buffer
return null;
} else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') {
// body is ArrayBuffer
return null;
} else if (ArrayBuffer.isView(body)) {
// body is ArrayBufferView
return null;
} else if (typeof body.getBoundary === 'function') {
// detect form data input from form-data module
return `multipart/form-data;boundary=${body.getBoundary()}`;
} else if (body instanceof Stream) {
// body is stream
// can't really do much about this
return null;
} else {
// Body constructor defaults other things to string
return 'text/plain;charset=UTF-8';
}
}
/**
* The Fetch Standard treats this as if "total bytes" is a property on the body.
* For us, we have to explicitly get it with a function.
*
* ref: https://fetch.spec.whatwg.org/#concept-body-total-bytes
*
* @param Body instance Instance of Body
* @return Number? Number of bytes, or null if not possible
*/
function getTotalBytes(instance) {
const body = instance.body;
if (body === null) {
// body is null
return 0;
} else if (isBlob(body)) {
return body.size;
} else if (Buffer.isBuffer(body)) {
// body is buffer
return body.length;
} else if (body && typeof body.getLengthSync === 'function') {
// detect form data input from form-data module
if (body._lengthRetrievers && body._lengthRetrievers.length == 0 || // 1.x
body.hasKnownLength && body.hasKnownLength()) {
// 2.x
return body.getLengthSync();
}
return null;
} else {
// body is stream
return null;
}
}
/**
* Write a Body to a Node.js WritableStream (e.g. http.Request) object.
*
* @param Body instance Instance of Body
* @return Void
*/
function writeToStream(dest, instance) {
const body = instance.body;
if (body === null) {
// body is null
dest.end();
} else if (isBlob(body)) {
body.stream().pipe(dest);
} else if (Buffer.isBuffer(body)) {
// body is buffer
dest.write(body);
dest.end();
} else {
// body is stream
body.pipe(dest);
}
}
// expose Promise
Body.Promise = global.Promise;
/**
* headers.js
*
* Headers class offers convenient helpers
*/
const invalidTokenRegex = /[^\^_`a-zA-Z\-0-9!#$%&'*+.|~]/;
const invalidHeaderCharRegex = /[^\t\x20-\x7e\x80-\xff]/;
function validateName(name) {
name = `${name}`;
if (invalidTokenRegex.test(name) || name === '') {
throw new TypeError(`${name} is not a legal HTTP header name`);
}
}
function validateValue(value) {
value = `${value}`;
if (invalidHeaderCharRegex.test(value)) {
throw new TypeError(`${value} is not a legal HTTP header value`);
}
}
/**
* Find the key in the map object given a header name.
*
* Returns undefined if not found.
*
* @param String name Header name
* @return String|Undefined
*/
function find(map, name) {
name = name.toLowerCase();
for (const key in map) {
if (key.toLowerCase() === name) {
return key;
}
}
return undefined;
}
const MAP = Symbol('map');
class Headers {
/**
* Headers class
*
* @param Object headers Response headers
* @return Void
*/
constructor() {
let init = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : undefined;
this[MAP] = Object.create(null);
if (init instanceof Headers) {
const rawHeaders = init.raw();
const headerNames = Object.keys(rawHeaders);
for (const headerName of headerNames) {
for (const value of rawHeaders[headerName]) {
this.append(headerName, value);
}
}
return;
}
// We don't worry about converting prop to ByteString here as append()
// will handle it.
if (init == null) ; else if (typeof init === 'object') {
const method = init[Symbol.iterator];
if (method != null) {
if (typeof method !== 'function') {
throw new TypeError('Header pairs must be iterable');
}
// sequence<sequence<ByteString>>
// Note: per spec we have to first exhaust the lists then process them
const pairs = [];
for (const pair of init) {
if (typeof pair !== 'object' || typeof pair[Symbol.iterator] !== 'function') {
throw new TypeError('Each header pair must be iterable');
}
pairs.push(Array.from(pair));
}
for (const pair of pairs) {
if (pair.length !== 2) {
throw new TypeError('Each header pair must be a name/value tuple');
}
this.append(pair[0], pair[1]);
}
} else {
// record<ByteString, ByteString>
for (const key of Object.keys(init)) {
const value = init[key];
this.append(key, value);
}
}
} else {
throw new TypeError('Provided initializer must be an object');
}
}
/**
* Return combined header value given name
*
* @param String name Header name
* @return Mixed
*/
get(name) {
name = `${name}`;
validateName(name);
const key = find(this[MAP], name);
if (key === undefined) {
return null;
}
return this[MAP][key].join(', ');
}
/**
* Iterate over all headers
*
* @param Function callback Executed for each item with parameters (value, name, thisArg)
* @param Boolean thisArg `this` context for callback function
* @return Void
*/
forEach(callback) {
let thisArg = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : undefined;
let pairs = getHeaders(this);
let i = 0;
while (i < pairs.length) {
var _pairs$i = pairs[i];
const name = _pairs$i[0],
value = _pairs$i[1];
callback.call(thisArg, value, name, this);
pairs = getHeaders(this);
i++;
}
}
/**
* Overwrite header values given name
*
* @param String name Header name
* @param String value Header value
* @return Void
*/
set(name, value) {
name = `${name}`;
value = `${value}`;
validateName(name);
validateValue(value);
const key = find(this[MAP], name);
this[MAP][key !== undefined ? key : name] = [value];
}
/**
* Append a value onto existing header
*
* @param String name Header name
* @param String value Header value
* @return Void
*/
append(name, value) {
name = `${name}`;
value = `${value}`;
validateName(name);
validateValue(value);
const key = find(this[MAP], name);
if (key !== undefined) {
this[MAP][key].push(value);
} else {
this[MAP][name] = [value];
}
}
/**
* Check for header name existence
*
* @param String name Header name
* @return Boolean
*/
has(name) {
name = `${name}`;
validateName(name);
return find(this[MAP], name) !== undefined;
}
/**
* Delete all header values given name
*
* @param String name Header name
* @return Void
*/
delete(name) {
name = `${name}`;
validateName(name);
const key = find(this[MAP], name);
if (key !== undefined) {
delete this[MAP][key];
}
}
/**
* Return raw headers (non-spec api)
*
* @return Object
*/
raw() {
return this[MAP];
}
/**
* Get an iterator on keys.
*
* @return Iterator
*/
keys() {
return createHeadersIterator(this, 'key');
}
/**
* Get an iterator on values.
*
* @return Iterator
*/
values() {
return createHeadersIterator(this, 'value');
}
/**
* Get an iterator on entries.
*
* This is the default iterator of the Headers object.
*
* @return Iterator
*/
[Symbol.iterator]() {
return createHeadersIterator(this, 'key+value');
}
}
Headers.prototype.entries = Headers.prototype[Symbol.iterator];
Object.defineProperty(Headers.prototype, Symbol.toStringTag, {
value: 'Headers',
writable: false,
enumerable: false,
configurable: true
});
Object.defineProperties(Headers.prototype, {
get: { enumerable: true },
forEach: { enumerable: true },
set: { enumerable: true },
append: { enumerable: true },
has: { enumerable: true },
delete: { enumerable: true },
keys: { enumerable: true },
values: { enumerable: true },
entries: { enumerable: true }
});
function getHeaders(headers) {
let kind = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'key+value';
const keys = Object.keys(headers[MAP]).sort();
return keys.map(kind === 'key' ? function (k) {
return k.toLowerCase();
} : kind === 'value' ? function (k) {
return headers[MAP][k].join(', ');
} : function (k) {
return [k.toLowerCase(), headers[MAP][k].join(', ')];
});
}
const INTERNAL = Symbol('internal');
function createHeadersIterator(target, kind) {
const iterator = Object.create(HeadersIteratorPrototype);
iterator[INTERNAL] = {
target,
kind,
index: 0
};
return iterator;
}
const HeadersIteratorPrototype = Object.setPrototypeOf({
next() {
// istanbul ignore if
if (!this || Object.getPrototypeOf(this) !== HeadersIteratorPrototype) {
throw new TypeError('Value of `this` is not a HeadersIterator');
}
var _INTERNAL = this[INTERNAL];
const target = _INTERNAL.target,
kind = _INTERNAL.kind,
index = _INTERNAL.index;
const values = getHeaders(target, kind);
const len = values.length;
if (index >= len) {
return {
value: undefined,
done: true
};
}
this[INTERNAL].index = index + 1;
return {
value: values[index],
done: false
};
}
}, Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]())));
Object.defineProperty(HeadersIteratorPrototype, Symbol.toStringTag, {
value: 'HeadersIterator',
writable: false,
enumerable: false,
configurable: true
});
/**
* Export the Headers object in a form that Node.js can consume.
*
* @param Headers headers
* @return Object
*/
function exportNodeCompatibleHeaders(headers) {
const obj = Object.assign({ __proto__: null }, headers[MAP]);
// http.request() only supports string as Host header. This hack makes
// specifying custom Host header possible.
const hostHeaderKey = find(headers[MAP], 'Host');
if (hostHeaderKey !== undefined) {
obj[hostHeaderKey] = obj[hostHeaderKey][0];
}
return obj;
}
/**
* Create a Headers object from an object of headers, ignoring those that do
* not conform to HTTP grammar productions.
*
* @param Object obj Object of headers
* @return Headers
*/
function createHeadersLenient(obj) {
const headers = new Headers();
for (const name of Object.keys(obj)) {
if (invalidTokenRegex.test(name)) {
continue;
}
if (Array.isArray(obj[name])) {
for (const val of obj[name]) {
if (invalidHeaderCharRegex.test(val)) {
continue;
}
if (headers[MAP][name] === undefined) {
headers[MAP][name] = [val];
} else {
headers[MAP][name].push(val);
}
}
} else if (!invalidHeaderCharRegex.test(obj[name])) {
headers[MAP][name] = [obj[name]];
}
}
return headers;
}
const INTERNALS$1 = Symbol('Response internals');
// fix an issue where "STATUS_CODES" aren't a named export for node <10
const STATUS_CODES = http.STATUS_CODES;
/**
* Response class
*
* @param Stream body Readable stream
* @param Object opts Response options
* @return Void
*/
class Response {
constructor() {
let body = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : null;
let opts = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
Body.call(this, body, opts);
const status = opts.status || 200;
const headers = new Headers(opts.headers);
if (body != null && !headers.has('Content-Type')) {
const contentType = extractContentType(body);
if (contentType) {
headers.append('Content-Type', contentType);
}
}
this[INTERNALS$1] = {
url: opts.url,
status,
statusText: opts.statusText || STATUS_CODES[status],
headers,
counter: opts.counter
};
}
get url() {
return this[INTERNALS$1].url || '';
}
get status() {
return this[INTERNALS$1].status;
}
/**
* Convenience property representing if the request ended normally
*/
get ok() {
return this[INTERNALS$1].status >= 200 && this[INTERNALS$1].status < 300;
}
get redirected() {
return this[INTERNALS$1].counter > 0;
}
get statusText() {
return this[INTERNALS$1].statusText;
}
get headers() {
return this[INTERNALS$1].headers;
}
/**
* Clone this response
*
* @return Response
*/
clone() {
return new Response(clone(this), {
url: this.url,
status: this.status,
statusText: this.statusText,
headers: this.headers,
ok: this.ok,
redirected: this.redirected
});
}
}
Body.mixIn(Response.prototype);
Object.defineProperties(Response.prototype, {
url: { enumerable: true },
status: { enumerable: true },
ok: { enumerable: true },
redirected: { enumerable: true },
statusText: { enumerable: true },
headers: { enumerable: true },
clone: { enumerable: true }
});
Object.defineProperty(Response.prototype, Symbol.toStringTag, {
value: 'Response',
writable: false,
enumerable: false,
configurable: true
});
const INTERNALS$2 = Symbol('Request internals');
const URL = Url.URL || whatwgUrl.URL;
// fix an issue where "format", "parse" aren't a named export for node <10
const parse_url = Url.parse;
const format_url = Url.format;
/**
* Wrapper around `new URL` to handle arbitrary URLs
*
* @param {string} urlStr
* @return {void}
*/
function parseURL(urlStr) {
/*
Check whether the URL is absolute or not
Scheme: https://tools.ietf.org/html/rfc3986#section-3.1
Absolute URL: https://tools.ietf.org/html/rfc3986#section-4.3
*/
if (/^[a-zA-Z][a-zA-Z\d+\-.]*:/.exec(urlStr)) {
urlStr = new URL(urlStr).toString();
}
// Fallback to old implementation for arbitrary URLs
return parse_url(urlStr);
}
const streamDestructionSupported = 'destroy' in Stream.Readable.prototype;
/**
* Check if a value is an instance of Request.
*
* @param Mixed input
* @return Boolean
*/
function isRequest(input) {
return typeof input === 'object' && typeof input[INTERNALS$2] === 'object';
}
function isAbortSignal(signal) {
const proto = signal && typeof signal === 'object' && Object.getPrototypeOf(signal);
return !!(proto && proto.constructor.name === 'AbortSignal');
}
/**
* Request class
*
* @param Mixed input Url or Request instance
* @param Object init Custom options
* @return Void
*/
class Request {
constructor(input) {
let init = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
let parsedURL;
// normalize input
if (!isRequest(input)) {
if (input && input.href) {
// in order to support Node.js' Url objects; though WHATWG's URL objects
// will fall into this branch also (since their `toString()` will return
// `href` property anyway)
parsedURL = parseURL(input.href);
} else {
// coerce input to a string before attempting to parse
parsedURL = parseURL(`${input}`);
}
input = {};
} else {
parsedURL = parseURL(input.url);
}
let method = init.method || input.method || 'GET';
method = method.toUpperCase();
if ((init.body != null || isRequest(input) && input.body !== null) && (method === 'GET' || method === 'HEAD')) {
throw new TypeError('Request with GET/HEAD method cannot have body');
}
let inputBody = init.body != null ? init.body : isRequest(input) && input.body !== null ? clone(input) : null;
Body.call(this, inputBody, {
timeout: init.timeout || input.timeout || 0,
size: init.size || input.size || 0
});
const headers = new Headers(init.headers || input.headers || {});
if (inputBody != null && !headers.has('Content-Type')) {
const contentType = extractContentType(inputBody);
if (contentType) {
headers.append('Content-Type', contentType);
}
}
let signal = isRequest(input) ? input.signal : null;
if ('signal' in init) signal = init.signal;
if (signal != null && !isAbortSignal(signal)) {
throw new TypeError('Expected signal to be an instanceof AbortSignal');
}
this[INTERNALS$2] = {
method,
redirect: init.redirect || input.redirect || 'follow',
headers,
parsedURL,
signal
};
// node-fetch-only options
this.follow = init.follow !== undefined ? init.follow : input.follow !== undefined ? input.follow : 20;
this.compress = init.compress !== undefined ? init.compress : input.compress !== undefined ? input.compress : true;
this.counter = init.counter || input.counter || 0;
this.agent = init.agent || input.agent;
}
get method() {
return this[INTERNALS$2].method;
}
get url() {
return format_url(this[INTERNALS$2].parsedURL);
}
get headers() {
return this[INTERNALS$2].headers;
}
get redirect() {
return this[INTERNALS$2].redirect;
}
get signal() {
return this[INTERNALS$2].signal;
}
/**
* Clone this request
*
* @return Request
*/
clone() {
return new Request(this);
}
}
Body.mixIn(Request.prototype);
Object.defineProperty(Request.prototype, Symbol.toStringTag, {
value: 'Request',
writable: false,
enumerable: false,
configurable: true
});
Object.defineProperties(Request.prototype, {
method: { enumerable: true },
url: { enumerable: true },
headers: { enumerable: true },
redirect: { enumerable: true },
clone: { enumerable: true },
signal: { enumerable: true }
});
/**
* Convert a Request to Node.js http request options.
*
* @param Request A Request instance
* @return Object The options object to be passed to http.request
*/
function getNodeRequestOptions(request) {
const parsedURL = request[INTERNALS$2].parsedURL;
const headers = new Headers(request[INTERNALS$2].headers);
// fetch step 1.3
if (!headers.has('Accept')) {
headers.set('Accept', '*/*');
}
// Basic fetch
if (!parsedURL.protocol || !parsedURL.hostname) {
throw new TypeError('Only absolute URLs are supported');
}
if (!/^https?:$/.test(parsedURL.protocol)) {
throw new TypeError('Only HTTP(S) protocols are supported');
}
if (request.signal && request.body instanceof Stream.Readable && !streamDestructionSupported) {
throw new Error('Cancellation of streamed requests with AbortSignal is not supported in node < 8');
}
// HTTP-network-or-cache fetch steps 2.4-2.7
let contentLengthValue = null;
if (request.body == null && /^(POST|PUT)$/i.test(request.method)) {
contentLengthValue = '0';
}
if (request.body != null) {
const totalBytes = getTotalBytes(request);
if (typeof totalBytes === 'number') {
contentLengthValue = String(totalBytes);
}
}
if (contentLengthValue) {
headers.set('Content-Length', contentLengthValue);
}
// HTTP-network-or-cache fetch step 2.11
if (!headers.has('User-Agent')) {
headers.set('User-Agent', 'node-fetch/1.0 (+https://github.com/bitinn/node-fetch)');
}
// HTTP-network-or-cache fetch step 2.15
if (request.compress && !headers.has('Accept-Encoding')) {
headers.set('Accept-Encoding', 'gzip,deflate');
}
let agent = request.agent;
if (typeof agent === 'function') {
agent = agent(parsedURL);
}
if (!headers.has('Connection') && !agent) {
headers.set('Connection', 'close');
}
// HTTP-network fetch step 4.2
// chunked encoding is handled by Node.js
return Object.assign({}, parsedURL, {
method: request.method,
headers: exportNodeCompatibleHeaders(headers),
agent
});
}
/**
* abort-error.js
*
* AbortError interface for cancelled requests
*/
/**
* Create AbortError instance
*
* @param String message Error message for human
* @return AbortError
*/
function AbortError(message) {
Error.call(this, message);
this.type = 'aborted';
this.message = message;
// hide custom error implementation details from end-users
Error.captureStackTrace(this, this.constructor);
}
AbortError.prototype = Object.create(Error.prototype);
AbortError.prototype.constructor = AbortError;
AbortError.prototype.name = 'AbortError';
const URL$1 = Url.URL || whatwgUrl.URL;
// fix an issue where "PassThrough", "resolve" aren't a named export for node <10
const PassThrough$1 = Stream.PassThrough;
const isDomainOrSubdomain = function isDomainOrSubdomain(destination, original) {
const orig = new URL$1(original).hostname;
const dest = new URL$1(destination).hostname;
return orig === dest || orig[orig.length - dest.length - 1] === '.' && orig.endsWith(dest);
};
/**
* isSameProtocol reports whether the two provided URLs use the same protocol.
*
* Both domains must already be in canonical form.
* @param {string|URL} original
* @param {string|URL} destination
*/
const isSameProtocol = function isSameProtocol(destination, original) {
const orig = new URL$1(original).protocol;
const dest = new URL$1(destination).protocol;
return orig === dest;
};
/**
* Fetch function
*
* @param Mixed url Absolute url or Request instance
* @param Object opts Fetch options
* @return Promise
*/
function fetch(url, opts) {
// allow custom promise
if (!fetch.Promise) {
throw new Error('native promise missing, set fetch.Promise to your favorite alternative');
}
Body.Promise = fetch.Promise;
// wrap http.request into fetch
return new fetch.Promise(function (resolve, reject) {
// build request object
const request = new Request(url, opts);
const options = getNodeRequestOptions(request);
const send = (options.protocol === 'https:' ? https : http).request;
const signal = request.signal;
let response = null;
const abort = function abort() {
let error = new AbortError('The user aborted a request.');
reject(error);
if (request.body && request.body instanceof Stream.Readable) {
destroyStream(request.body, error);
}
if (!response || !response.body) return;
response.body.emit('error', error);
};
if (signal && signal.aborted) {
abort();
return;
}
const abortAndFinalize = function abortAndFinalize() {
abort();
finalize();
};
// send request
const req = send(options);
let reqTimeout;
if (signal) {
signal.addEventListener('abort', abortAndFinalize);
}
function finalize() {
req.abort();
if (signal) signal.removeEventListener('abort', abortAndFinalize);
clearTimeout(reqTimeout);
}
if (request.timeout) {
req.once('socket', function (socket) {
reqTimeout = setTimeout(function () {
reject(new FetchError(`network timeout at: ${request.url}`, 'request-timeout'));
finalize();
}, request.timeout);
});
}
req.on('error', function (err) {
reject(new FetchError(`request to ${request.url} failed, reason: ${err.message}`, 'system', err));
if (response && response.body) {
destroyStream(response.body, err);
}
finalize();
});
fixResponseChunkedTransferBadEnding(req, function (err) {
if (signal && signal.aborted) {
return;
}
if (response && response.body) {
destroyStream(response.body, err);
}
});
/* c8 ignore next 18 */
if (parseInt(process.version.substring(1)) < 14) {
// Before Node.js 14, pipeline() does not fully support async iterators and does not always
// properly handle when the socket close/end events are out of order.
req.on('socket', function (s) {
s.addListener('close', function (hadError) {
// if a data listener is still present we didn't end cleanly
const hasDataListener = s.listenerCount('data') > 0;
// if end happened before close but the socket didn't emit an error, do it now
if (response && hasDataListener && !hadError && !(signal && signal.aborted)) {
const err = new Error('Premature close');
err.code = 'ERR_STREAM_PREMATURE_CLOSE';
response.body.emit('error', err);
}
});
});
}
req.on('response', function (res) {
clearTimeout(reqTimeout);
const headers = createHeadersLenient(res.headers);
// HTTP fetch step 5
if (fetch.isRedirect(res.statusCode)) {
// HTTP fetch step 5.2
const location = headers.get('Location');
// HTTP fetch step 5.3
let locationURL = null;
try {
locationURL = location === null ? null : new URL$1(location, request.url).toString();
} catch (err) {
// error here can only be invalid URL in Location: header
// do not throw when options.redirect == manual
// let the user extract the errorneous redirect URL
if (request.redirect !== 'manual') {
reject(new FetchError(`uri requested responds with an invalid redirect URL: ${location}`, 'invalid-redirect'));
finalize();
return;
}
}
// HTTP fetch step 5.5
switch (request.redirect) {
case 'error':
reject(new FetchError(`uri requested responds with a redirect, redirect mode is set to error: ${request.url}`, 'no-redirect'));
finalize();
return;
case 'manual':
// node-fetch-specific step: make manual redirect a bit easier to use by setting the Location header value to the resolved URL.
if (locationURL !== null) {
// handle corrupted header
try {
headers.set('Location', locationURL);
} catch (err) {
// istanbul ignore next: nodejs server prevent invalid response headers, we can't test this through normal request
reject(err);
}
}
break;
case 'follow':
// HTTP-redirect fetch step 2
if (locationURL === null) {
break;
}
// HTTP-redirect fetch step 5
if (request.counter >= request.follow) {
reject(new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect'));
finalize();
return;
}
// HTTP-redirect fetch step 6 (counter increment)
// Create a new Request object.
const requestOpts = {
headers: new Headers(request.headers),
follow: request.follow,
counter: request.counter + 1,
agent: request.agent,
compress: request.compress,
method: request.method,
body: request.body,
signal: request.signal,
timeout: request.timeout,
size: request.size
};
if (!isDomainOrSubdomain(request.url, locationURL) || !isSameProtocol(request.url, locationURL)) {
for (const name of ['authorization', 'www-authenticate', 'cookie', 'cookie2']) {
requestOpts.headers.delete(name);
}
}
// HTTP-redirect fetch step 9
if (res.statusCode !== 303 && request.body && getTotalBytes(request) === null) {
reject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect'));
finalize();
return;
}
// HTTP-redirect fetch step 11
if (res.statusCode === 303 || (res.statusCode === 301 || res.statusCode === 302) && request.method === 'POST') {
requestOpts.method = 'GET';
requestOpts.body = undefined;
requestOpts.headers.delete('content-length');
}
// HTTP-redirect fetch step 15
resolve(fetch(new Request(locationURL, requestOpts)));
finalize();
return;
}
}
// prepare response
res.once('end', function () {
if (signal) signal.removeEventListener('abort', abortAndFinalize);
});
let body = res.pipe(new PassThrough$1());
const response_options = {
url: request.url,
status: res.statusCode,
statusText: res.statusMessage,
headers: headers,
size: request.size,
timeout: request.timeout,
counter: request.counter
};
// HTTP-network fetch step 12.1.1.3
const codings = headers.get('Content-Encoding');
// HTTP-network fetch step 12.1.1.4: handle content codings
// in following scenarios we ignore compression support
// 1. compression support is disabled
// 2. HEAD request
// 3. no Content-Encoding header
// 4. no content response (204)
// 5. content not modified response (304)
if (!request.compress || request.method === 'HEAD' || codings === null || res.statusCode === 204 || res.statusCode === 304) {
response = new Response(body, response_options);
resolve(response);
return;
}
// For Node v6+
// Be less strict when decoding compressed responses, since sometimes
// servers send slightly invalid responses that are still accepted
// by common browsers.
// Always using Z_SYNC_FLUSH is what cURL does.
const zlibOptions = {
flush: zlib.Z_SYNC_FLUSH,
finishFlush: zlib.Z_SYNC_FLUSH
};
// for gzip
if (codings == 'gzip' || codings == 'x-gzip') {
body = body.pipe(zlib.createGunzip(zlibOptions));
response = new Response(body, response_options);
resolve(response);
return;
}
// for deflate
if (codings == 'deflate' || codings == 'x-deflate') {
// handle the infamous raw deflate response from old servers
// a hack for old IIS and Apache servers
const raw = res.pipe(new PassThrough$1());
raw.once('data', function (chunk) {
// see http://stackoverflow.com/questions/37519828
if ((chunk[0] & 0x0F) === 0x08) {
body = body.pipe(zlib.createInflate());
} else {
body = body.pipe(zlib.createInflateRaw());
}
response = new Response(body, response_options);
resolve(response);
});
raw.on('end', function () {
// some old IIS servers return zero-length OK deflate responses, so 'data' is never emitted.
if (!response) {
response = new Response(body, response_options);
resolve(response);
}
});
return;
}
// for br
if (codings == 'br' && typeof zlib.createBrotliDecompress === 'function') {
body = body.pipe(zlib.createBrotliDecompress());
response = new Response(body, response_options);
resolve(response);
return;
}
// otherwise, use response as-is
response = new Response(body, response_options);
resolve(response);
});
writeToStream(req, request);
});
}
function fixResponseChunkedTransferBadEnding(request, errorCallback) {
let socket;
request.on('socket', function (s) {
socket = s;
});
request.on('response', function (response) {
const headers = response.headers;
if (headers['transfer-encoding'] === 'chunked' && !headers['content-length']) {
response.once('close', function (hadError) {
// if a data listener is still present we didn't end cleanly
const hasDataListener = socket.listenerCount('data') > 0;
if (hasDataListener && !hadError) {
const err = new Error('Premature close');
err.code = 'ERR_STREAM_PREMATURE_CLOSE';
errorCallback(err);
}
});
}
});
}
function destroyStream(stream, err) {
if (stream.destroy) {
stream.destroy(err);
} else {
// node < 8
stream.emit('error', err);
stream.end();
}
}
/**
* Redirect code matching
*
* @param Number code Status code
* @return Boolean
*/
fetch.isRedirect = function (code) {
return code === 301 || code === 302 || code === 303 || code === 307 || code === 308;
};
// expose Promise
fetch.Promise = global.Promise;
module.exports = exports = fetch;
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports["default"] = exports;
exports.Headers = Headers;
exports.Request = Request;
exports.Response = Response;
exports.FetchError = FetchError;
/***/ }),
/***/ 2299:
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
"use strict";
var punycode = __nccwpck_require__(5477);
var mappingTable = __nccwpck_require__(1907);
var PROCESSING_OPTIONS = {
TRANSITIONAL: 0,
NONTRANSITIONAL: 1
};
function normalize(str) { // fix bug in v8
return str.split('\u0000').map(function (s) { return s.normalize('NFC'); }).join('\u0000');
}
function findStatus(val) {
var start = 0;
var end = mappingTable.length - 1;
while (start <= end) {
var mid = Math.floor((start + end) / 2);
var target = mappingTable[mid];
if (target[0][0] <= val && target[0][1] >= val) {
return target;
} else if (target[0][0] > val) {
end = mid - 1;
} else {
start = mid + 1;
}
}
return null;
}
var regexAstralSymbols = /[\uD800-\uDBFF][\uDC00-\uDFFF]/g;
function countSymbols(string) {
return string
// replace every surrogate pair with a BMP symbol
.replace(regexAstralSymbols, '_')
// then get the length
.length;
}
function mapChars(domain_name, useSTD3, processing_option) {
var hasError = false;
var processed = "";
var len = countSymbols(domain_name);
for (var i = 0; i < len; ++i) {
var codePoint = domain_name.codePointAt(i);
var status = findStatus(codePoint);
switch (status[1]) {
case "disallowed":
hasError = true;
processed += String.fromCodePoint(codePoint);
break;
case "ignored":
break;
case "mapped":
processed += String.fromCodePoint.apply(String, status[2]);
break;
case "deviation":
if (processing_option === PROCESSING_OPTIONS.TRANSITIONAL) {
processed += String.fromCodePoint.apply(String, status[2]);
} else {
processed += String.fromCodePoint(codePoint);
}
break;
case "valid":
processed += String.fromCodePoint(codePoint);
break;
case "disallowed_STD3_mapped":
if (useSTD3) {
hasError = true;
processed += String.fromCodePoint(codePoint);
} else {
processed += String.fromCodePoint.apply(String, status[2]);
}
break;
case "disallowed_STD3_valid":
if (useSTD3) {
hasError = true;
}
processed += String.fromCodePoint(codePoint);
break;
}
}
return {
string: processed,
error: hasError
};
}
var combiningMarksRegex = /[\u0300-\u036F\u0483-\u0489\u0591-\u05BD\u05BF\u05C1\u05C2\u05C4\u05C5\u05C7\u0610-\u061A\u064B-\u065F\u0670\u06D6-\u06DC\u06DF-\u06E4\u06E7\u06E8\u06EA-\u06ED\u0711\u0730-\u074A\u07A6-\u07B0\u07EB-\u07F3\u0816-\u0819\u081B-\u0823\u0825-\u0827\u0829-\u082D\u0859-\u085B\u08E4-\u0903\u093A-\u093C\u093E-\u094F\u0951-\u0957\u0962\u0963\u0981-\u0983\u09BC\u09BE-\u09C4\u09C7\u09C8\u09CB-\u09CD\u09D7\u09E2\u09E3\u0A01-\u0A03\u0A3C\u0A3E-\u0A42\u0A47\u0A48\u0A4B-\u0A4D\u0A51\u0A70\u0A71\u0A75\u0A81-\u0A83\u0ABC\u0ABE-\u0AC5\u0AC7-\u0AC9\u0ACB-\u0ACD\u0AE2\u0AE3\u0B01-\u0B03\u0B3C\u0B3E-\u0B44\u0B47\u0B48\u0B4B-\u0B4D\u0B56\u0B57\u0B62\u0B63\u0B82\u0BBE-\u0BC2\u0BC6-\u0BC8\u0BCA-\u0BCD\u0BD7\u0C00-\u0C03\u0C3E-\u0C44\u0C46-\u0C48\u0C4A-\u0C4D\u0C55\u0C56\u0C62\u0C63\u0C81-\u0C83\u0CBC\u0CBE-\u0CC4\u0CC6-\u0CC8\u0CCA-\u0CCD\u0CD5\u0CD6\u0CE2\u0CE3\u0D01-\u0D03\u0D3E-\u0D44\u0D46-\u0D48\u0D4A-\u0D4D\u0D57\u0D62\u0D63\u0D82\u0D83\u0DCA\u0DCF-\u0DD4\u0DD6\u0DD8-\u0DDF\u0DF2\u0DF3\u0E31\u0E34-\u0E3A\u0E47-\u0E4E\u0EB1\u0EB4-\u0EB9\u0EBB\u0EBC\u0EC8-\u0ECD\u0F18\u0F19\u0F35\u0F37\u0F39\u0F3E\u0F3F\u0F71-\u0F84\u0F86\u0F87\u0F8D-\u0F97\u0F99-\u0FBC\u0FC6\u102B-\u103E\u1056-\u1059\u105E-\u1060\u1062-\u1064\u1067-\u106D\u1071-\u1074\u1082-\u108D\u108F\u109A-\u109D\u135D-\u135F\u1712-\u1714\u1732-\u1734\u1752\u1753\u1772\u1773\u17B4-\u17D3\u17DD\u180B-\u180D\u18A9\u1920-\u192B\u1930-\u193B\u19B0-\u19C0\u19C8\u19C9\u1A17-\u1A1B\u1A55-\u1A5E\u1A60-\u1A7C\u1A7F\u1AB0-\u1ABE\u1B00-\u1B04\u1B34-\u1B44\u1B6B-\u1B73\u1B80-\u1B82\u1BA1-\u1BAD\u1BE6-\u1BF3\u1C24-\u1C37\u1CD0-\u1CD2\u1CD4-\u1CE8\u1CED\u1CF2-\u1CF4\u1CF8\u1CF9\u1DC0-\u1DF5\u1DFC-\u1DFF\u20D0-\u20F0\u2CEF-\u2CF1\u2D7F\u2DE0-\u2DFF\u302A-\u302F\u3099\u309A\uA66F-\uA672\uA674-\uA67D\uA69F\uA6F0\uA6F1\uA802\uA806\uA80B\uA823-\uA827\uA880\uA881\uA8B4-\uA8C4\uA8E0-\uA8F1\uA926-\uA92D\uA947-\uA953\uA980-\uA983\uA9B3-\uA9C0\uA9E5\uAA29-\uAA36\uAA43\uAA4C\uAA4D\uAA7B-\uAA7D\uAAB0\uAAB2-\uAAB4\uAAB7\uAAB8\uAABE\uAABF\uAAC1\uAAEB-\uAAEF\uAAF5\uAAF6\uABE3-\uABEA\uABEC\uABED\uFB1E\uFE00-\uFE0F\uFE20-\uFE2D]|\uD800[\uDDFD\uDEE0\uDF76-\uDF7A]|\uD802[\uDE01-\uDE03\uDE05\uDE06\uDE0C-\uDE0F\uDE38-\uDE3A\uDE3F\uDEE5\uDEE6]|\uD804[\uDC00-\uDC02\uDC38-\uDC46\uDC7F-\uDC82\uDCB0-\uDCBA\uDD00-\uDD02\uDD27-\uDD34\uDD73\uDD80-\uDD82\uDDB3-\uDDC0\uDE2C-\uDE37\uDEDF-\uDEEA\uDF01-\uDF03\uDF3C\uDF3E-\uDF44\uDF47\uDF48\uDF4B-\uDF4D\uDF57\uDF62\uDF63\uDF66-\uDF6C\uDF70-\uDF74]|\uD805[\uDCB0-\uDCC3\uDDAF-\uDDB5\uDDB8-\uDDC0\uDE30-\uDE40\uDEAB-\uDEB7]|\uD81A[\uDEF0-\uDEF4\uDF30-\uDF36]|\uD81B[\uDF51-\uDF7E\uDF8F-\uDF92]|\uD82F[\uDC9D\uDC9E]|\uD834[\uDD65-\uDD69\uDD6D-\uDD72\uDD7B-\uDD82\uDD85-\uDD8B\uDDAA-\uDDAD\uDE42-\uDE44]|\uD83A[\uDCD0-\uDCD6]|\uDB40[\uDD00-\uDDEF]/;
function validateLabel(label, processing_option) {
if (label.substr(0, 4) === "xn--") {
label = punycode.toUnicode(label);
processing_option = PROCESSING_OPTIONS.NONTRANSITIONAL;
}
var error = false;
if (normalize(label) !== label ||
(label[3] === "-" && label[4] === "-") ||
label[0] === "-" || label[label.length - 1] === "-" ||
label.indexOf(".") !== -1 ||
label.search(combiningMarksRegex) === 0) {
error = true;
}
var len = countSymbols(label);
for (var i = 0; i < len; ++i) {
var status = findStatus(label.codePointAt(i));
if ((processing === PROCESSING_OPTIONS.TRANSITIONAL && status[1] !== "valid") ||
(processing === PROCESSING_OPTIONS.NONTRANSITIONAL &&
status[1] !== "valid" && status[1] !== "deviation")) {
error = true;
break;
}
}
return {
label: label,
error: error
};
}
function processing(domain_name, useSTD3, processing_option) {
var result = mapChars(domain_name, useSTD3, processing_option);
result.string = normalize(result.string);
var labels = result.string.split(".");
for (var i = 0; i < labels.length; ++i) {
try {
var validation = validateLabel(labels[i]);
labels[i] = validation.label;
result.error = result.error || validation.error;
} catch(e) {
result.error = true;
}
}
return {
string: labels.join("."),
error: result.error
};
}
module.exports.toASCII = function(domain_name, useSTD3, processing_option, verifyDnsLength) {
var result = processing(domain_name, useSTD3, processing_option);
var labels = result.string.split(".");
labels = labels.map(function(l) {
try {
return punycode.toASCII(l);
} catch(e) {
result.error = true;
return l;
}
});
if (verifyDnsLength) {
var total = labels.slice(0, labels.length - 1).join(".").length;
if (total.length > 253 || total.length === 0) {
result.error = true;
}
for (var i=0; i < labels.length; ++i) {
if (labels.length > 63 || labels.length === 0) {
result.error = true;
break;
}
}
}
if (result.error) return null;
return labels.join(".");
};
module.exports.toUnicode = function(domain_name, useSTD3) {
var result = processing(domain_name, useSTD3, PROCESSING_OPTIONS.NONTRANSITIONAL);
return {
domain: result.string,
error: result.error
};
};
module.exports.PROCESSING_OPTIONS = PROCESSING_OPTIONS;
/***/ }),
/***/ 5871:
/***/ ((module) => {
"use strict";
var conversions = {};
module.exports = conversions;
function sign(x) {
return x < 0 ? -1 : 1;
}
function evenRound(x) {
// Round x to the nearest integer, choosing the even integer if it lies halfway between two.
if ((x % 1) === 0.5 && (x & 1) === 0) { // [even number].5; round down (i.e. floor)
return Math.floor(x);
} else {
return Math.round(x);
}
}
function createNumberConversion(bitLength, typeOpts) {
if (!typeOpts.unsigned) {
--bitLength;
}
const lowerBound = typeOpts.unsigned ? 0 : -Math.pow(2, bitLength);
const upperBound = Math.pow(2, bitLength) - 1;
const moduloVal = typeOpts.moduloBitLength ? Math.pow(2, typeOpts.moduloBitLength) : Math.pow(2, bitLength);
const moduloBound = typeOpts.moduloBitLength ? Math.pow(2, typeOpts.moduloBitLength - 1) : Math.pow(2, bitLength - 1);
return function(V, opts) {
if (!opts) opts = {};
let x = +V;
if (opts.enforceRange) {
if (!Number.isFinite(x)) {
throw new TypeError("Argument is not a finite number");
}
x = sign(x) * Math.floor(Math.abs(x));
if (x < lowerBound || x > upperBound) {
throw new TypeError("Argument is not in byte range");
}
return x;
}
if (!isNaN(x) && opts.clamp) {
x = evenRound(x);
if (x < lowerBound) x = lowerBound;
if (x > upperBound) x = upperBound;
return x;
}
if (!Number.isFinite(x) || x === 0) {
return 0;
}
x = sign(x) * Math.floor(Math.abs(x));
x = x % moduloVal;
if (!typeOpts.unsigned && x >= moduloBound) {
return x - moduloVal;
} else if (typeOpts.unsigned) {
if (x < 0) {
x += moduloVal;
} else if (x === -0) { // don't return negative zero
return 0;
}
}
return x;
}
}
conversions["void"] = function () {
return undefined;
};
conversions["boolean"] = function (val) {
return !!val;
};
conversions["byte"] = createNumberConversion(8, { unsigned: false });
conversions["octet"] = createNumberConversion(8, { unsigned: true });
conversions["short"] = createNumberConversion(16, { unsigned: false });
conversions["unsigned short"] = createNumberConversion(16, { unsigned: true });
conversions["long"] = createNumberConversion(32, { unsigned: false });
conversions["unsigned long"] = createNumberConversion(32, { unsigned: true });
conversions["long long"] = createNumberConversion(32, { unsigned: false, moduloBitLength: 64 });
conversions["unsigned long long"] = createNumberConversion(32, { unsigned: true, moduloBitLength: 64 });
conversions["double"] = function (V) {
const x = +V;
if (!Number.isFinite(x)) {
throw new TypeError("Argument is not a finite floating-point value");
}
return x;
};
conversions["unrestricted double"] = function (V) {
const x = +V;
if (isNaN(x)) {
throw new TypeError("Argument is NaN");
}
return x;
};
// not quite valid, but good enough for JS
conversions["float"] = conversions["double"];
conversions["unrestricted float"] = conversions["unrestricted double"];
conversions["DOMString"] = function (V, opts) {
if (!opts) opts = {};
if (opts.treatNullAsEmptyString && V === null) {
return "";
}
return String(V);
};
conversions["ByteString"] = function (V, opts) {
const x = String(V);
let c = undefined;
for (let i = 0; (c = x.codePointAt(i)) !== undefined; ++i) {
if (c > 255) {
throw new TypeError("Argument is not a valid bytestring");
}
}
return x;
};
conversions["USVString"] = function (V) {
const S = String(V);
const n = S.length;
const U = [];
for (let i = 0; i < n; ++i) {
const c = S.charCodeAt(i);
if (c < 0xD800 || c > 0xDFFF) {
U.push(String.fromCodePoint(c));
} else if (0xDC00 <= c && c <= 0xDFFF) {
U.push(String.fromCodePoint(0xFFFD));
} else {
if (i === n - 1) {
U.push(String.fromCodePoint(0xFFFD));
} else {
const d = S.charCodeAt(i + 1);
if (0xDC00 <= d && d <= 0xDFFF) {
const a = c & 0x3FF;
const b = d & 0x3FF;
U.push(String.fromCodePoint((2 << 15) + (2 << 9) * a + b));
++i;
} else {
U.push(String.fromCodePoint(0xFFFD));
}
}
}
}
return U.join('');
};
conversions["Date"] = function (V, opts) {
if (!(V instanceof Date)) {
throw new TypeError("Argument is not a Date object");
}
if (isNaN(V)) {
return undefined;
}
return V;
};
conversions["RegExp"] = function (V, opts) {
if (!(V instanceof RegExp)) {
V = new RegExp(V);
}
return V;
};
/***/ }),
/***/ 8262:
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
"use strict";
const usm = __nccwpck_require__(33);
exports.implementation = class URLImpl {
constructor(constructorArgs) {
const url = constructorArgs[0];
const base = constructorArgs[1];
let parsedBase = null;
if (base !== undefined) {
parsedBase = usm.basicURLParse(base);
if (parsedBase === "failure") {
throw new TypeError("Invalid base URL");
}
}
const parsedURL = usm.basicURLParse(url, { baseURL: parsedBase });
if (parsedURL === "failure") {
throw new TypeError("Invalid URL");
}
this._url = parsedURL;
// TODO: query stuff
}
get href() {
return usm.serializeURL(this._url);
}
set href(v) {
const parsedURL = usm.basicURLParse(v);
if (parsedURL === "failure") {
throw new TypeError("Invalid URL");
}
this._url = parsedURL;
}
get origin() {
return usm.serializeURLOrigin(this._url);
}
get protocol() {
return this._url.scheme + ":";
}
set protocol(v) {
usm.basicURLParse(v + ":", { url: this._url, stateOverride: "scheme start" });
}
get username() {
return this._url.username;
}
set username(v) {
if (usm.cannotHaveAUsernamePasswordPort(this._url)) {
return;
}
usm.setTheUsername(this._url, v);
}
get password() {
return this._url.password;
}
set password(v) {
if (usm.cannotHaveAUsernamePasswordPort(this._url)) {
return;
}
usm.setThePassword(this._url, v);
}
get host() {
const url = this._url;
if (url.host === null) {
return "";
}
if (url.port === null) {
return usm.serializeHost(url.host);
}
return usm.serializeHost(url.host) + ":" + usm.serializeInteger(url.port);
}
set host(v) {
if (this._url.cannotBeABaseURL) {
return;
}
usm.basicURLParse(v, { url: this._url, stateOverride: "host" });
}
get hostname() {
if (this._url.host === null) {
return "";
}
return usm.serializeHost(this._url.host);
}
set hostname(v) {
if (this._url.cannotBeABaseURL) {
return;
}
usm.basicURLParse(v, { url: this._url, stateOverride: "hostname" });
}
get port() {
if (this._url.port === null) {
return "";
}
return usm.serializeInteger(this._url.port);
}
set port(v) {
if (usm.cannotHaveAUsernamePasswordPort(this._url)) {
return;
}
if (v === "") {
this._url.port = null;
} else {
usm.basicURLParse(v, { url: this._url, stateOverride: "port" });
}
}
get pathname() {
if (this._url.cannotBeABaseURL) {
return this._url.path[0];
}
if (this._url.path.length === 0) {
return "";
}
return "/" + this._url.path.join("/");
}
set pathname(v) {
if (this._url.cannotBeABaseURL) {
return;
}
this._url.path = [];
usm.basicURLParse(v, { url: this._url, stateOverride: "path start" });
}
get search() {
if (this._url.query === null || this._url.query === "") {
return "";
}
return "?" + this._url.query;
}
set search(v) {
// TODO: query stuff
const url = this._url;
if (v === "") {
url.query = null;
return;
}
const input = v[0] === "?" ? v.substring(1) : v;
url.query = "";
usm.basicURLParse(input, { url, stateOverride: "query" });
}
get hash() {
if (this._url.fragment === null || this._url.fragment === "") {
return "";
}
return "#" + this._url.fragment;
}
set hash(v) {
if (v === "") {
this._url.fragment = null;
return;
}
const input = v[0] === "#" ? v.substring(1) : v;
this._url.fragment = "";
usm.basicURLParse(input, { url: this._url, stateOverride: "fragment" });
}
toJSON() {
return this.href;
}
};
/***/ }),
/***/ 653:
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
"use strict";
const conversions = __nccwpck_require__(5871);
const utils = __nccwpck_require__(276);
const Impl = __nccwpck_require__(8262);
const impl = utils.implSymbol;
function URL(url) {
if (!this || this[impl] || !(this instanceof URL)) {
throw new TypeError("Failed to construct 'URL': Please use the 'new' operator, this DOM object constructor cannot be called as a function.");
}
if (arguments.length < 1) {
throw new TypeError("Failed to construct 'URL': 1 argument required, but only " + arguments.length + " present.");
}
const args = [];
for (let i = 0; i < arguments.length && i < 2; ++i) {
args[i] = arguments[i];
}
args[0] = conversions["USVString"](args[0]);
if (args[1] !== undefined) {
args[1] = conversions["USVString"](args[1]);
}
module.exports.setup(this, args);
}
URL.prototype.toJSON = function toJSON() {
if (!this || !module.exports.is(this)) {
throw new TypeError("Illegal invocation");
}
const args = [];
for (let i = 0; i < arguments.length && i < 0; ++i) {
args[i] = arguments[i];
}
return this[impl].toJSON.apply(this[impl], args);
};
Object.defineProperty(URL.prototype, "href", {
get() {
return this[impl].href;
},
set(V) {
V = conversions["USVString"](V);
this[impl].href = V;
},
enumerable: true,
configurable: true
});
URL.prototype.toString = function () {
if (!this || !module.exports.is(this)) {
throw new TypeError("Illegal invocation");
}
return this.href;
};
Object.defineProperty(URL.prototype, "origin", {
get() {
return this[impl].origin;
},
enumerable: true,
configurable: true
});
Object.defineProperty(URL.prototype, "protocol", {
get() {
return this[impl].protocol;
},
set(V) {
V = conversions["USVString"](V);
this[impl].protocol = V;
},
enumerable: true,
configurable: true
});
Object.defineProperty(URL.prototype, "username", {
get() {
return this[impl].username;
},
set(V) {
V = conversions["USVString"](V);
this[impl].username = V;
},
enumerable: true,
configurable: true
});
Object.defineProperty(URL.prototype, "password", {
get() {
return this[impl].password;
},
set(V) {
V = conversions["USVString"](V);
this[impl].password = V;
},
enumerable: true,
configurable: true
});
Object.defineProperty(URL.prototype, "host", {
get() {
return this[impl].host;
},
set(V) {
V = conversions["USVString"](V);
this[impl].host = V;
},
enumerable: true,
configurable: true
});
Object.defineProperty(URL.prototype, "hostname", {
get() {
return this[impl].hostname;
},
set(V) {
V = conversions["USVString"](V);
this[impl].hostname = V;
},
enumerable: true,
configurable: true
});
Object.defineProperty(URL.prototype, "port", {
get() {
return this[impl].port;
},
set(V) {
V = conversions["USVString"](V);
this[impl].port = V;
},
enumerable: true,
configurable: true
});
Object.defineProperty(URL.prototype, "pathname", {
get() {
return this[impl].pathname;
},
set(V) {
V = conversions["USVString"](V);
this[impl].pathname = V;
},
enumerable: true,
configurable: true
});
Object.defineProperty(URL.prototype, "search", {
get() {
return this[impl].search;
},
set(V) {
V = conversions["USVString"](V);
this[impl].search = V;
},
enumerable: true,
configurable: true
});
Object.defineProperty(URL.prototype, "hash", {
get() {
return this[impl].hash;
},
set(V) {
V = conversions["USVString"](V);
this[impl].hash = V;
},
enumerable: true,
configurable: true
});
module.exports = {
is(obj) {
return !!obj && obj[impl] instanceof Impl.implementation;
},
create(constructorArgs, privateData) {
let obj = Object.create(URL.prototype);
this.setup(obj, constructorArgs, privateData);
return obj;
},
setup(obj, constructorArgs, privateData) {
if (!privateData) privateData = {};
privateData.wrapper = obj;
obj[impl] = new Impl.implementation(constructorArgs, privateData);
obj[impl][utils.wrapperSymbol] = obj;
},
interface: URL,
expose: {
Window: { URL: URL },
Worker: { URL: URL }
}
};
/***/ }),
/***/ 3323:
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
"use strict";
exports.URL = __nccwpck_require__(653)["interface"];
exports.serializeURL = __nccwpck_require__(33).serializeURL;
exports.serializeURLOrigin = __nccwpck_require__(33).serializeURLOrigin;
exports.basicURLParse = __nccwpck_require__(33).basicURLParse;
exports.setTheUsername = __nccwpck_require__(33).setTheUsername;
exports.setThePassword = __nccwpck_require__(33).setThePassword;
exports.serializeHost = __nccwpck_require__(33).serializeHost;
exports.serializeInteger = __nccwpck_require__(33).serializeInteger;
exports.parseURL = __nccwpck_require__(33).parseURL;
/***/ }),
/***/ 33:
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
"use strict";
const punycode = __nccwpck_require__(5477);
const tr46 = __nccwpck_require__(2299);
const specialSchemes = {
ftp: 21,
file: null,
gopher: 70,
http: 80,
https: 443,
ws: 80,
wss: 443
};
const failure = Symbol("failure");
function countSymbols(str) {
return punycode.ucs2.decode(str).length;
}
function at(input, idx) {
const c = input[idx];
return isNaN(c) ? undefined : String.fromCodePoint(c);
}
function isASCIIDigit(c) {
return c >= 0x30 && c <= 0x39;
}
function isASCIIAlpha(c) {
return (c >= 0x41 && c <= 0x5A) || (c >= 0x61 && c <= 0x7A);
}
function isASCIIAlphanumeric(c) {
return isASCIIAlpha(c) || isASCIIDigit(c);
}
function isASCIIHex(c) {
return isASCIIDigit(c) || (c >= 0x41 && c <= 0x46) || (c >= 0x61 && c <= 0x66);
}
function isSingleDot(buffer) {
return buffer === "." || buffer.toLowerCase() === "%2e";
}
function isDoubleDot(buffer) {
buffer = buffer.toLowerCase();
return buffer === ".." || buffer === "%2e." || buffer === ".%2e" || buffer === "%2e%2e";
}
function isWindowsDriveLetterCodePoints(cp1, cp2) {
return isASCIIAlpha(cp1) && (cp2 === 58 || cp2 === 124);
}
function isWindowsDriveLetterString(string) {
return string.length === 2 && isASCIIAlpha(string.codePointAt(0)) && (string[1] === ":" || string[1] === "|");
}
function isNormalizedWindowsDriveLetterString(string) {
return string.length === 2 && isASCIIAlpha(string.codePointAt(0)) && string[1] === ":";
}
function containsForbiddenHostCodePoint(string) {
return string.search(/\u0000|\u0009|\u000A|\u000D|\u0020|#|%|\/|:|\?|@|\[|\\|\]/) !== -1;
}
function containsForbiddenHostCodePointExcludingPercent(string) {
return string.search(/\u0000|\u0009|\u000A|\u000D|\u0020|#|\/|:|\?|@|\[|\\|\]/) !== -1;
}
function isSpecialScheme(scheme) {
return specialSchemes[scheme] !== undefined;
}
function isSpecial(url) {
return isSpecialScheme(url.scheme);
}
function defaultPort(scheme) {
return specialSchemes[scheme];
}
function percentEncode(c) {
let hex = c.toString(16).toUpperCase();
if (hex.length === 1) {
hex = "0" + hex;
}
return "%" + hex;
}
function utf8PercentEncode(c) {
const buf = new Buffer(c);
let str = "";
for (let i = 0; i < buf.length; ++i) {
str += percentEncode(buf[i]);
}
return str;
}
function utf8PercentDecode(str) {
const input = new Buffer(str);
const output = [];
for (let i = 0; i < input.length; ++i) {
if (input[i] !== 37) {
output.push(input[i]);
} else if (input[i] === 37 && isASCIIHex(input[i + 1]) && isASCIIHex(input[i + 2])) {
output.push(parseInt(input.slice(i + 1, i + 3).toString(), 16));
i += 2;
} else {
output.push(input[i]);
}
}
return new Buffer(output).toString();
}
function isC0ControlPercentEncode(c) {
return c <= 0x1F || c > 0x7E;
}
const extraPathPercentEncodeSet = new Set([32, 34, 35, 60, 62, 63, 96, 123, 125]);
function isPathPercentEncode(c) {
return isC0ControlPercentEncode(c) || extraPathPercentEncodeSet.has(c);
}
const extraUserinfoPercentEncodeSet =
new Set([47, 58, 59, 61, 64, 91, 92, 93, 94, 124]);
function isUserinfoPercentEncode(c) {
return isPathPercentEncode(c) || extraUserinfoPercentEncodeSet.has(c);
}
function percentEncodeChar(c, encodeSetPredicate) {
const cStr = String.fromCodePoint(c);
if (encodeSetPredicate(c)) {
return utf8PercentEncode(cStr);
}
return cStr;
}
function parseIPv4Number(input) {
let R = 10;
if (input.length >= 2 && input.charAt(0) === "0" && input.charAt(1).toLowerCase() === "x") {
input = input.substring(2);
R = 16;
} else if (input.length >= 2 && input.charAt(0) === "0") {
input = input.substring(1);
R = 8;
}
if (input === "") {
return 0;
}
const regex = R === 10 ? /[^0-9]/ : (R === 16 ? /[^0-9A-Fa-f]/ : /[^0-7]/);
if (regex.test(input)) {
return failure;
}
return parseInt(input, R);
}
function parseIPv4(input) {
const parts = input.split(".");
if (parts[parts.length - 1] === "") {
if (parts.length > 1) {
parts.pop();
}
}
if (parts.length > 4) {
return input;
}
const numbers = [];
for (const part of parts) {
if (part === "") {
return input;
}
const n = parseIPv4Number(part);
if (n === failure) {
return input;
}
numbers.push(n);
}
for (let i = 0; i < numbers.length - 1; ++i) {
if (numbers[i] > 255) {
return failure;
}
}
if (numbers[numbers.length - 1] >= Math.pow(256, 5 - numbers.length)) {
return failure;
}
let ipv4 = numbers.pop();
let counter = 0;
for (const n of numbers) {
ipv4 += n * Math.pow(256, 3 - counter);
++counter;
}
return ipv4;
}
function serializeIPv4(address) {
let output = "";
let n = address;
for (let i = 1; i <= 4; ++i) {
output = String(n % 256) + output;
if (i !== 4) {
output = "." + output;
}
n = Math.floor(n / 256);
}
return output;
}
function parseIPv6(input) {
const address = [0, 0, 0, 0, 0, 0, 0, 0];
let pieceIndex = 0;
let compress = null;
let pointer = 0;
input = punycode.ucs2.decode(input);
if (input[pointer] === 58) {
if (input[pointer + 1] !== 58) {
return failure;
}
pointer += 2;
++pieceIndex;
compress = pieceIndex;
}
while (pointer < input.length) {
if (pieceIndex === 8) {
return failure;
}
if (input[pointer] === 58) {
if (compress !== null) {
return failure;
}
++pointer;
++pieceIndex;
compress = pieceIndex;
continue;
}
let value = 0;
let length = 0;
while (length < 4 && isASCIIHex(input[pointer])) {
value = value * 0x10 + parseInt(at(input, pointer), 16);
++pointer;
++length;
}
if (input[pointer] === 46) {
if (length === 0) {
return failure;
}
pointer -= length;
if (pieceIndex > 6) {
return failure;
}
let numbersSeen = 0;
while (input[pointer] !== undefined) {
let ipv4Piece = null;
if (numbersSeen > 0) {
if (input[pointer] === 46 && numbersSeen < 4) {
++pointer;
} else {
return failure;
}
}
if (!isASCIIDigit(input[pointer])) {
return failure;
}
while (isASCIIDigit(input[pointer])) {
const number = parseInt(at(input, pointer));
if (ipv4Piece === null) {
ipv4Piece = number;
} else if (ipv4Piece === 0) {
return failure;
} else {
ipv4Piece = ipv4Piece * 10 + number;
}
if (ipv4Piece > 255) {
return failure;
}
++pointer;
}
address[pieceIndex] = address[pieceIndex] * 0x100 + ipv4Piece;
++numbersSeen;
if (numbersSeen === 2 || numbersSeen === 4) {
++pieceIndex;
}
}
if (numbersSeen !== 4) {
return failure;
}
break;
} else if (input[pointer] === 58) {
++pointer;
if (input[pointer] === undefined) {
return failure;
}
} else if (input[pointer] !== undefined) {
return failure;
}
address[pieceIndex] = value;
++pieceIndex;
}
if (compress !== null) {
let swaps = pieceIndex - compress;
pieceIndex = 7;
while (pieceIndex !== 0 && swaps > 0) {
const temp = address[compress + swaps - 1];
address[compress + swaps - 1] = address[pieceIndex];
address[pieceIndex] = temp;
--pieceIndex;
--swaps;
}
} else if (compress === null && pieceIndex !== 8) {
return failure;
}
return address;
}
function serializeIPv6(address) {
let output = "";
const seqResult = findLongestZeroSequence(address);
const compress = seqResult.idx;
let ignore0 = false;
for (let pieceIndex = 0; pieceIndex <= 7; ++pieceIndex) {
if (ignore0 && address[pieceIndex] === 0) {
continue;
} else if (ignore0) {
ignore0 = false;
}
if (compress === pieceIndex) {
const separator = pieceIndex === 0 ? "::" : ":";
output += separator;
ignore0 = true;
continue;
}
output += address[pieceIndex].toString(16);
if (pieceIndex !== 7) {
output += ":";
}
}
return output;
}
function parseHost(input, isSpecialArg) {
if (input[0] === "[") {
if (input[input.length - 1] !== "]") {
return failure;
}
return parseIPv6(input.substring(1, input.length - 1));
}
if (!isSpecialArg) {
return parseOpaqueHost(input);
}
const domain = utf8PercentDecode(input);
const asciiDomain = tr46.toASCII(domain, false, tr46.PROCESSING_OPTIONS.NONTRANSITIONAL, false);
if (asciiDomain === null) {
return failure;
}
if (containsForbiddenHostCodePoint(asciiDomain)) {
return failure;
}
const ipv4Host = parseIPv4(asciiDomain);
if (typeof ipv4Host === "number" || ipv4Host === failure) {
return ipv4Host;
}
return asciiDomain;
}
function parseOpaqueHost(input) {
if (containsForbiddenHostCodePointExcludingPercent(input)) {
return failure;
}
let output = "";
const decoded = punycode.ucs2.decode(input);
for (let i = 0; i < decoded.length; ++i) {
output += percentEncodeChar(decoded[i], isC0ControlPercentEncode);
}
return output;
}
function findLongestZeroSequence(arr) {
let maxIdx = null;
let maxLen = 1; // only find elements > 1
let currStart = null;
let currLen = 0;
for (let i = 0; i < arr.length; ++i) {
if (arr[i] !== 0) {
if (currLen > maxLen) {
maxIdx = currStart;
maxLen = currLen;
}
currStart = null;
currLen = 0;
} else {
if (currStart === null) {
currStart = i;
}
++currLen;
}
}
// if trailing zeros
if (currLen > maxLen) {
maxIdx = currStart;
maxLen = currLen;
}
return {
idx: maxIdx,
len: maxLen
};
}
function serializeHost(host) {
if (typeof host === "number") {
return serializeIPv4(host);
}
// IPv6 serializer
if (host instanceof Array) {
return "[" + serializeIPv6(host) + "]";
}
return host;
}
function trimControlChars(url) {
return url.replace(/^[\u0000-\u001F\u0020]+|[\u0000-\u001F\u0020]+$/g, "");
}
function trimTabAndNewline(url) {
return url.replace(/\u0009|\u000A|\u000D/g, "");
}
function shortenPath(url) {
const path = url.path;
if (path.length === 0) {
return;
}
if (url.scheme === "file" && path.length === 1 && isNormalizedWindowsDriveLetter(path[0])) {
return;
}
path.pop();
}
function includesCredentials(url) {
return url.username !== "" || url.password !== "";
}
function cannotHaveAUsernamePasswordPort(url) {
return url.host === null || url.host === "" || url.cannotBeABaseURL || url.scheme === "file";
}
function isNormalizedWindowsDriveLetter(string) {
return /^[A-Za-z]:$/.test(string);
}
function URLStateMachine(input, base, encodingOverride, url, stateOverride) {
this.pointer = 0;
this.input = input;
this.base = base || null;
this.encodingOverride = encodingOverride || "utf-8";
this.stateOverride = stateOverride;
this.url = url;
this.failure = false;
this.parseError = false;
if (!this.url) {
this.url = {
scheme: "",
username: "",
password: "",
host: null,
port: null,
path: [],
query: null,
fragment: null,
cannotBeABaseURL: false
};
const res = trimControlChars(this.input);
if (res !== this.input) {
this.parseError = true;
}
this.input = res;
}
const res = trimTabAndNewline(this.input);
if (res !== this.input) {
this.parseError = true;
}
this.input = res;
this.state = stateOverride || "scheme start";
this.buffer = "";
this.atFlag = false;
this.arrFlag = false;
this.passwordTokenSeenFlag = false;
this.input = punycode.ucs2.decode(this.input);
for (; this.pointer <= this.input.length; ++this.pointer) {
const c = this.input[this.pointer];
const cStr = isNaN(c) ? undefined : String.fromCodePoint(c);
// exec state machine
const ret = this["parse " + this.state](c, cStr);
if (!ret) {
break; // terminate algorithm
} else if (ret === failure) {
this.failure = true;
break;
}
}
}
URLStateMachine.prototype["parse scheme start"] = function parseSchemeStart(c, cStr) {
if (isASCIIAlpha(c)) {
this.buffer += cStr.toLowerCase();
this.state = "scheme";
} else if (!this.stateOverride) {
this.state = "no scheme";
--this.pointer;
} else {
this.parseError = true;
return failure;
}
return true;
};
URLStateMachine.prototype["parse scheme"] = function parseScheme(c, cStr) {
if (isASCIIAlphanumeric(c) || c === 43 || c === 45 || c === 46) {
this.buffer += cStr.toLowerCase();
} else if (c === 58) {
if (this.stateOverride) {
if (isSpecial(this.url) && !isSpecialScheme(this.buffer)) {
return false;
}
if (!isSpecial(this.url) && isSpecialScheme(this.buffer)) {
return false;
}
if ((includesCredentials(this.url) || this.url.port !== null) && this.buffer === "file") {
return false;
}
if (this.url.scheme === "file" && (this.url.host === "" || this.url.host === null)) {
return false;
}
}
this.url.scheme = this.buffer;
this.buffer = "";
if (this.stateOverride) {
return false;
}
if (this.url.scheme === "file") {
if (this.input[this.pointer + 1] !== 47 || this.input[this.pointer + 2] !== 47) {
this.parseError = true;
}
this.state = "file";
} else if (isSpecial(this.url) && this.base !== null && this.base.scheme === this.url.scheme) {
this.state = "special relative or authority";
} else if (isSpecial(this.url)) {
this.state = "special authority slashes";
} else if (this.input[this.pointer + 1] === 47) {
this.state = "path or authority";
++this.pointer;
} else {
this.url.cannotBeABaseURL = true;
this.url.path.push("");
this.state = "cannot-be-a-base-URL path";
}
} else if (!this.stateOverride) {
this.buffer = "";
this.state = "no scheme";
this.pointer = -1;
} else {
this.parseError = true;
return failure;
}
return true;
};
URLStateMachine.prototype["parse no scheme"] = function parseNoScheme(c) {
if (this.base === null || (this.base.cannotBeABaseURL && c !== 35)) {
return failure;
} else if (this.base.cannotBeABaseURL && c === 35) {
this.url.scheme = this.base.scheme;
this.url.path = this.base.path.slice();
this.url.query = this.base.query;
this.url.fragment = "";
this.url.cannotBeABaseURL = true;
this.state = "fragment";
} else if (this.base.scheme === "file") {
this.state = "file";
--this.pointer;
} else {
this.state = "relative";
--this.pointer;
}
return true;
};
URLStateMachine.prototype["parse special relative or authority"] = function parseSpecialRelativeOrAuthority(c) {
if (c === 47 && this.input[this.pointer + 1] === 47) {
this.state = "special authority ignore slashes";
++this.pointer;
} else {
this.parseError = true;
this.state = "relative";
--this.pointer;
}
return true;
};
URLStateMachine.prototype["parse path or authority"] = function parsePathOrAuthority(c) {
if (c === 47) {
this.state = "authority";
} else {
this.state = "path";
--this.pointer;
}
return true;
};
URLStateMachine.prototype["parse relative"] = function parseRelative(c) {
this.url.scheme = this.base.scheme;
if (isNaN(c)) {
this.url.username = this.base.username;
this.url.password = this.base.password;
this.url.host = this.base.host;
this.url.port = this.base.port;
this.url.path = this.base.path.slice();
this.url.query = this.base.query;
} else if (c === 47) {
this.state = "relative slash";
} else if (c === 63) {
this.url.username = this.base.username;
this.url.password = this.base.password;
this.url.host = this.base.host;
this.url.port = this.base.port;
this.url.path = this.base.path.slice();
this.url.query = "";
this.state = "query";
} else if (c === 35) {
this.url.username = this.base.username;
this.url.password = this.base.password;
this.url.host = this.base.host;
this.url.port = this.base.port;
this.url.path = this.base.path.slice();
this.url.query = this.base.query;
this.url.fragment = "";
this.state = "fragment";
} else if (isSpecial(this.url) && c === 92) {
this.parseError = true;
this.state = "relative slash";
} else {
this.url.username = this.base.username;
this.url.password = this.base.password;
this.url.host = this.base.host;
this.url.port = this.base.port;
this.url.path = this.base.path.slice(0, this.base.path.length - 1);
this.state = "path";
--this.pointer;
}
return true;
};
URLStateMachine.prototype["parse relative slash"] = function parseRelativeSlash(c) {
if (isSpecial(this.url) && (c === 47 || c === 92)) {
if (c === 92) {
this.parseError = true;
}
this.state = "special authority ignore slashes";
} else if (c === 47) {
this.state = "authority";
} else {
this.url.username = this.base.username;
this.url.password = this.base.password;
this.url.host = this.base.host;
this.url.port = this.base.port;
this.state = "path";
--this.pointer;
}
return true;
};
URLStateMachine.prototype["parse special authority slashes"] = function parseSpecialAuthoritySlashes(c) {
if (c === 47 && this.input[this.pointer + 1] === 47) {
this.state = "special authority ignore slashes";
++this.pointer;
} else {
this.parseError = true;
this.state = "special authority ignore slashes";
--this.pointer;
}
return true;
};
URLStateMachine.prototype["parse special authority ignore slashes"] = function parseSpecialAuthorityIgnoreSlashes(c) {
if (c !== 47 && c !== 92) {
this.state = "authority";
--this.pointer;
} else {
this.parseError = true;
}
return true;
};
URLStateMachine.prototype["parse authority"] = function parseAuthority(c, cStr) {
if (c === 64) {
this.parseError = true;
if (this.atFlag) {
this.buffer = "%40" + this.buffer;
}
this.atFlag = true;
// careful, this is based on buffer and has its own pointer (this.pointer != pointer) and inner chars
const len = countSymbols(this.buffer);
for (let pointer = 0; pointer < len; ++pointer) {
const codePoint = this.buffer.codePointAt(pointer);
if (codePoint === 58 && !this.passwordTokenSeenFlag) {
this.passwordTokenSeenFlag = true;
continue;
}
const encodedCodePoints = percentEncodeChar(codePoint, isUserinfoPercentEncode);
if (this.passwordTokenSeenFlag) {
this.url.password += encodedCodePoints;
} else {
this.url.username += encodedCodePoints;
}
}
this.buffer = "";
} else if (isNaN(c) || c === 47 || c === 63 || c === 35 ||
(isSpecial(this.url) && c === 92)) {
if (this.atFlag && this.buffer === "") {
this.parseError = true;
return failure;
}
this.pointer -= countSymbols(this.buffer) + 1;
this.buffer = "";
this.state = "host";
} else {
this.buffer += cStr;
}
return true;
};
URLStateMachine.prototype["parse hostname"] =
URLStateMachine.prototype["parse host"] = function parseHostName(c, cStr) {
if (this.stateOverride && this.url.scheme === "file") {
--this.pointer;
this.state = "file host";
} else if (c === 58 && !this.arrFlag) {
if (this.buffer === "") {
this.parseError = true;
return failure;
}
const host = parseHost(this.buffer, isSpecial(this.url));
if (host === failure) {
return failure;
}
this.url.host = host;
this.buffer = "";
this.state = "port";
if (this.stateOverride === "hostname") {
return false;
}
} else if (isNaN(c) || c === 47 || c === 63 || c === 35 ||
(isSpecial(this.url) && c === 92)) {
--this.pointer;
if (isSpecial(this.url) && this.buffer === "") {
this.parseError = true;
return failure;
} else if (this.stateOverride && this.buffer === "" &&
(includesCredentials(this.url) || this.url.port !== null)) {
this.parseError = true;
return false;
}
const host = parseHost(this.buffer, isSpecial(this.url));
if (host === failure) {
return failure;
}
this.url.host = host;
this.buffer = "";
this.state = "path start";
if (this.stateOverride) {
return false;
}
} else {
if (c === 91) {
this.arrFlag = true;
} else if (c === 93) {
this.arrFlag = false;
}
this.buffer += cStr;
}
return true;
};
URLStateMachine.prototype["parse port"] = function parsePort(c, cStr) {
if (isASCIIDigit(c)) {
this.buffer += cStr;
} else if (isNaN(c) || c === 47 || c === 63 || c === 35 ||
(isSpecial(this.url) && c === 92) ||
this.stateOverride) {
if (this.buffer !== "") {
const port = parseInt(this.buffer);
if (port > Math.pow(2, 16) - 1) {
this.parseError = true;
return failure;
}
this.url.port = port === defaultPort(this.url.scheme) ? null : port;
this.buffer = "";
}
if (this.stateOverride) {
return false;
}
this.state = "path start";
--this.pointer;
} else {
this.parseError = true;
return failure;
}
return true;
};
const fileOtherwiseCodePoints = new Set([47, 92, 63, 35]);
URLStateMachine.prototype["parse file"] = function parseFile(c) {
this.url.scheme = "file";
if (c === 47 || c === 92) {
if (c === 92) {
this.parseError = true;
}
this.state = "file slash";
} else if (this.base !== null && this.base.scheme === "file") {
if (isNaN(c)) {
this.url.host = this.base.host;
this.url.path = this.base.path.slice();
this.url.query = this.base.query;
} else if (c === 63) {
this.url.host = this.base.host;
this.url.path = this.base.path.slice();
this.url.query = "";
this.state = "query";
} else if (c === 35) {
this.url.host = this.base.host;
this.url.path = this.base.path.slice();
this.url.query = this.base.query;
this.url.fragment = "";
this.state = "fragment";
} else {
if (this.input.length - this.pointer - 1 === 0 || // remaining consists of 0 code points
!isWindowsDriveLetterCodePoints(c, this.input[this.pointer + 1]) ||
(this.input.length - this.pointer - 1 >= 2 && // remaining has at least 2 code points
!fileOtherwiseCodePoints.has(this.input[this.pointer + 2]))) {
this.url.host = this.base.host;
this.url.path = this.base.path.slice();
shortenPath(this.url);
} else {
this.parseError = true;
}
this.state = "path";
--this.pointer;
}
} else {
this.state = "path";
--this.pointer;
}
return true;
};
URLStateMachine.prototype["parse file slash"] = function parseFileSlash(c) {
if (c === 47 || c === 92) {
if (c === 92) {
this.parseError = true;
}
this.state = "file host";
} else {
if (this.base !== null && this.base.scheme === "file") {
if (isNormalizedWindowsDriveLetterString(this.base.path[0])) {
this.url.path.push(this.base.path[0]);
} else {
this.url.host = this.base.host;
}
}
this.state = "path";
--this.pointer;
}
return true;
};
URLStateMachine.prototype["parse file host"] = function parseFileHost(c, cStr) {
if (isNaN(c) || c === 47 || c === 92 || c === 63 || c === 35) {
--this.pointer;
if (!this.stateOverride && isWindowsDriveLetterString(this.buffer)) {
this.parseError = true;
this.state = "path";
} else if (this.buffer === "") {
this.url.host = "";
if (this.stateOverride) {
return false;
}
this.state = "path start";
} else {
let host = parseHost(this.buffer, isSpecial(this.url));
if (host === failure) {
return failure;
}
if (host === "localhost") {
host = "";
}
this.url.host = host;
if (this.stateOverride) {
return false;
}
this.buffer = "";
this.state = "path start";
}
} else {
this.buffer += cStr;
}
return true;
};
URLStateMachine.prototype["parse path start"] = function parsePathStart(c) {
if (isSpecial(this.url)) {
if (c === 92) {
this.parseError = true;
}
this.state = "path";
if (c !== 47 && c !== 92) {
--this.pointer;
}
} else if (!this.stateOverride && c === 63) {
this.url.query = "";
this.state = "query";
} else if (!this.stateOverride && c === 35) {
this.url.fragment = "";
this.state = "fragment";
} else if (c !== undefined) {
this.state = "path";
if (c !== 47) {
--this.pointer;
}
}
return true;
};
URLStateMachine.prototype["parse path"] = function parsePath(c) {
if (isNaN(c) || c === 47 || (isSpecial(this.url) && c === 92) ||
(!this.stateOverride && (c === 63 || c === 35))) {
if (isSpecial(this.url) && c === 92) {
this.parseError = true;
}
if (isDoubleDot(this.buffer)) {
shortenPath(this.url);
if (c !== 47 && !(isSpecial(this.url) && c === 92)) {
this.url.path.push("");
}
} else if (isSingleDot(this.buffer) && c !== 47 &&
!(isSpecial(this.url) && c === 92)) {
this.url.path.push("");
} else if (!isSingleDot(this.buffer)) {
if (this.url.scheme === "file" && this.url.path.length === 0 && isWindowsDriveLetterString(this.buffer)) {
if (this.url.host !== "" && this.url.host !== null) {
this.parseError = true;
this.url.host = "";
}
this.buffer = this.buffer[0] + ":";
}
this.url.path.push(this.buffer);
}
this.buffer = "";
if (this.url.scheme === "file" && (c === undefined || c === 63 || c === 35)) {
while (this.url.path.length > 1 && this.url.path[0] === "") {
this.parseError = true;
this.url.path.shift();
}
}
if (c === 63) {
this.url.query = "";
this.state = "query";
}
if (c === 35) {
this.url.fragment = "";
this.state = "fragment";
}
} else {
// TODO: If c is not a URL code point and not "%", parse error.
if (c === 37 &&
(!isASCIIHex(this.input[this.pointer + 1]) ||
!isASCIIHex(this.input[this.pointer + 2]))) {
this.parseError = true;
}
this.buffer += percentEncodeChar(c, isPathPercentEncode);
}
return true;
};
URLStateMachine.prototype["parse cannot-be-a-base-URL path"] = function parseCannotBeABaseURLPath(c) {
if (c === 63) {
this.url.query = "";
this.state = "query";
} else if (c === 35) {
this.url.fragment = "";
this.state = "fragment";
} else {
// TODO: Add: not a URL code point
if (!isNaN(c) && c !== 37) {
this.parseError = true;
}
if (c === 37 &&
(!isASCIIHex(this.input[this.pointer + 1]) ||
!isASCIIHex(this.input[this.pointer + 2]))) {
this.parseError = true;
}
if (!isNaN(c)) {
this.url.path[0] = this.url.path[0] + percentEncodeChar(c, isC0ControlPercentEncode);
}
}
return true;
};
URLStateMachine.prototype["parse query"] = function parseQuery(c, cStr) {
if (isNaN(c) || (!this.stateOverride && c === 35)) {
if (!isSpecial(this.url) || this.url.scheme === "ws" || this.url.scheme === "wss") {
this.encodingOverride = "utf-8";
}
const buffer = new Buffer(this.buffer); // TODO: Use encoding override instead
for (let i = 0; i < buffer.length; ++i) {
if (buffer[i] < 0x21 || buffer[i] > 0x7E || buffer[i] === 0x22 || buffer[i] === 0x23 ||
buffer[i] === 0x3C || buffer[i] === 0x3E) {
this.url.query += percentEncode(buffer[i]);
} else {
this.url.query += String.fromCodePoint(buffer[i]);
}
}
this.buffer = "";
if (c === 35) {
this.url.fragment = "";
this.state = "fragment";
}
} else {
// TODO: If c is not a URL code point and not "%", parse error.
if (c === 37 &&
(!isASCIIHex(this.input[this.pointer + 1]) ||
!isASCIIHex(this.input[this.pointer + 2]))) {
this.parseError = true;
}
this.buffer += cStr;
}
return true;
};
URLStateMachine.prototype["parse fragment"] = function parseFragment(c) {
if (isNaN(c)) { // do nothing
} else if (c === 0x0) {
this.parseError = true;
} else {
// TODO: If c is not a URL code point and not "%", parse error.
if (c === 37 &&
(!isASCIIHex(this.input[this.pointer + 1]) ||
!isASCIIHex(this.input[this.pointer + 2]))) {
this.parseError = true;
}
this.url.fragment += percentEncodeChar(c, isC0ControlPercentEncode);
}
return true;
};
function serializeURL(url, excludeFragment) {
let output = url.scheme + ":";
if (url.host !== null) {
output += "//";
if (url.username !== "" || url.password !== "") {
output += url.username;
if (url.password !== "") {
output += ":" + url.password;
}
output += "@";
}
output += serializeHost(url.host);
if (url.port !== null) {
output += ":" + url.port;
}
} else if (url.host === null && url.scheme === "file") {
output += "//";
}
if (url.cannotBeABaseURL) {
output += url.path[0];
} else {
for (const string of url.path) {
output += "/" + string;
}
}
if (url.query !== null) {
output += "?" + url.query;
}
if (!excludeFragment && url.fragment !== null) {
output += "#" + url.fragment;
}
return output;
}
function serializeOrigin(tuple) {
let result = tuple.scheme + "://";
result += serializeHost(tuple.host);
if (tuple.port !== null) {
result += ":" + tuple.port;
}
return result;
}
module.exports.serializeURL = serializeURL;
module.exports.serializeURLOrigin = function (url) {
// https://url.spec.whatwg.org/#concept-url-origin
switch (url.scheme) {
case "blob":
try {
return module.exports.serializeURLOrigin(module.exports.parseURL(url.path[0]));
} catch (e) {
// serializing an opaque origin returns "null"
return "null";
}
case "ftp":
case "gopher":
case "http":
case "https":
case "ws":
case "wss":
return serializeOrigin({
scheme: url.scheme,
host: url.host,
port: url.port
});
case "file":
// spec says "exercise to the reader", chrome says "file://"
return "file://";
default:
// serializing an opaque origin returns "null"
return "null";
}
};
module.exports.basicURLParse = function (input, options) {
if (options === undefined) {
options = {};
}
const usm = new URLStateMachine(input, options.baseURL, options.encodingOverride, options.url, options.stateOverride);
if (usm.failure) {
return "failure";
}
return usm.url;
};
module.exports.setTheUsername = function (url, username) {
url.username = "";
const decoded = punycode.ucs2.decode(username);
for (let i = 0; i < decoded.length; ++i) {
url.username += percentEncodeChar(decoded[i], isUserinfoPercentEncode);
}
};
module.exports.setThePassword = function (url, password) {
url.password = "";
const decoded = punycode.ucs2.decode(password);
for (let i = 0; i < decoded.length; ++i) {
url.password += percentEncodeChar(decoded[i], isUserinfoPercentEncode);
}
};
module.exports.serializeHost = serializeHost;
module.exports.cannotHaveAUsernamePasswordPort = cannotHaveAUsernamePasswordPort;
module.exports.serializeInteger = function (integer) {
return String(integer);
};
module.exports.parseURL = function (input, options) {
if (options === undefined) {
options = {};
}
// We don't handle blobs, so this just delegates:
return module.exports.basicURLParse(input, { baseURL: options.baseURL, encodingOverride: options.encodingOverride });
};
/***/ }),
/***/ 276:
/***/ ((module) => {
"use strict";
module.exports.mixin = function mixin(target, source) {
const keys = Object.getOwnPropertyNames(source);
for (let i = 0; i < keys.length; ++i) {
Object.defineProperty(target, keys[i], Object.getOwnPropertyDescriptor(source, keys[i]));
}
};
module.exports.wrapperSymbol = Symbol("wrapper");
module.exports.implSymbol = Symbol("impl");
module.exports.wrapperForImpl = function (impl) {
return impl[module.exports.wrapperSymbol];
};
module.exports.implForWrapper = function (wrapper) {
return wrapper[module.exports.implSymbol];
};
/***/ }),
/***/ 504:
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
var hasMap = typeof Map === 'function' && Map.prototype;
var mapSizeDescriptor = Object.getOwnPropertyDescriptor && hasMap ? Object.getOwnPropertyDescriptor(Map.prototype, 'size') : null;
var mapSize = hasMap && mapSizeDescriptor && typeof mapSizeDescriptor.get === 'function' ? mapSizeDescriptor.get : null;
var mapForEach = hasMap && Map.prototype.forEach;
var hasSet = typeof Set === 'function' && Set.prototype;
var setSizeDescriptor = Object.getOwnPropertyDescriptor && hasSet ? Object.getOwnPropertyDescriptor(Set.prototype, 'size') : null;
var setSize = hasSet && setSizeDescriptor && typeof setSizeDescriptor.get === 'function' ? setSizeDescriptor.get : null;
var setForEach = hasSet && Set.prototype.forEach;
var hasWeakMap = typeof WeakMap === 'function' && WeakMap.prototype;
var weakMapHas = hasWeakMap ? WeakMap.prototype.has : null;
var hasWeakSet = typeof WeakSet === 'function' && WeakSet.prototype;
var weakSetHas = hasWeakSet ? WeakSet.prototype.has : null;
var hasWeakRef = typeof WeakRef === 'function' && WeakRef.prototype;
var weakRefDeref = hasWeakRef ? WeakRef.prototype.deref : null;
var booleanValueOf = Boolean.prototype.valueOf;
var objectToString = Object.prototype.toString;
var functionToString = Function.prototype.toString;
var match = String.prototype.match;
var bigIntValueOf = typeof BigInt === 'function' ? BigInt.prototype.valueOf : null;
var gOPS = Object.getOwnPropertySymbols;
var symToString = typeof Symbol === 'function' && typeof Symbol.iterator === 'symbol' ? Symbol.prototype.toString : null;
var hasShammedSymbols = typeof Symbol === 'function' && typeof Symbol.iterator === 'object';
var isEnumerable = Object.prototype.propertyIsEnumerable;
var gPO = (typeof Reflect === 'function' ? Reflect.getPrototypeOf : Object.getPrototypeOf) || (
[].__proto__ === Array.prototype // eslint-disable-line no-proto
? function (O) {
return O.__proto__; // eslint-disable-line no-proto
}
: null
);
var inspectCustom = (__nccwpck_require__(7265).custom);
var inspectSymbol = inspectCustom && isSymbol(inspectCustom) ? inspectCustom : null;
var toStringTag = typeof Symbol === 'function' && typeof Symbol.toStringTag !== 'undefined' ? Symbol.toStringTag : null;
module.exports = function inspect_(obj, options, depth, seen) {
var opts = options || {};
if (has(opts, 'quoteStyle') && (opts.quoteStyle !== 'single' && opts.quoteStyle !== 'double')) {
throw new TypeError('option "quoteStyle" must be "single" or "double"');
}
if (
has(opts, 'maxStringLength') && (typeof opts.maxStringLength === 'number'
? opts.maxStringLength < 0 && opts.maxStringLength !== Infinity
: opts.maxStringLength !== null
)
) {
throw new TypeError('option "maxStringLength", if provided, must be a positive integer, Infinity, or `null`');
}
var customInspect = has(opts, 'customInspect') ? opts.customInspect : true;
if (typeof customInspect !== 'boolean' && customInspect !== 'symbol') {
throw new TypeError('option "customInspect", if provided, must be `true`, `false`, or `\'symbol\'`');
}
if (
has(opts, 'indent')
&& opts.indent !== null
&& opts.indent !== '\t'
&& !(parseInt(opts.indent, 10) === opts.indent && opts.indent > 0)
) {
throw new TypeError('options "indent" must be "\\t", an integer > 0, or `null`');
}
if (typeof obj === 'undefined') {
return 'undefined';
}
if (obj === null) {
return 'null';
}
if (typeof obj === 'boolean') {
return obj ? 'true' : 'false';
}
if (typeof obj === 'string') {
return inspectString(obj, opts);
}
if (typeof obj === 'number') {
if (obj === 0) {
return Infinity / obj > 0 ? '0' : '-0';
}
return String(obj);
}
if (typeof obj === 'bigint') {
return String(obj) + 'n';
}
var maxDepth = typeof opts.depth === 'undefined' ? 5 : opts.depth;
if (typeof depth === 'undefined') { depth = 0; }
if (depth >= maxDepth && maxDepth > 0 && typeof obj === 'object') {
return isArray(obj) ? '[Array]' : '[Object]';
}
var indent = getIndent(opts, depth);
if (typeof seen === 'undefined') {
seen = [];
} else if (indexOf(seen, obj) >= 0) {
return '[Circular]';
}
function inspect(value, from, noIndent) {
if (from) {
seen = seen.slice();
seen.push(from);
}
if (noIndent) {
var newOpts = {
depth: opts.depth
};
if (has(opts, 'quoteStyle')) {
newOpts.quoteStyle = opts.quoteStyle;
}
return inspect_(value, newOpts, depth + 1, seen);
}
return inspect_(value, opts, depth + 1, seen);
}
if (typeof obj === 'function') {
var name = nameOf(obj);
var keys = arrObjKeys(obj, inspect);
return '[Function' + (name ? ': ' + name : ' (anonymous)') + ']' + (keys.length > 0 ? ' { ' + keys.join(', ') + ' }' : '');
}
if (isSymbol(obj)) {
var symString = hasShammedSymbols ? String(obj).replace(/^(Symbol\(.*\))_[^)]*$/, '$1') : symToString.call(obj);
return typeof obj === 'object' && !hasShammedSymbols ? markBoxed(symString) : symString;
}
if (isElement(obj)) {
var s = '<' + String(obj.nodeName).toLowerCase();
var attrs = obj.attributes || [];
for (var i = 0; i < attrs.length; i++) {
s += ' ' + attrs[i].name + '=' + wrapQuotes(quote(attrs[i].value), 'double', opts);
}
s += '>';
if (obj.childNodes && obj.childNodes.length) { s += '...'; }
s += '</' + String(obj.nodeName).toLowerCase() + '>';
return s;
}
if (isArray(obj)) {
if (obj.length === 0) { return '[]'; }
var xs = arrObjKeys(obj, inspect);
if (indent && !singleLineValues(xs)) {
return '[' + indentedJoin(xs, indent) + ']';
}
return '[ ' + xs.join(', ') + ' ]';
}
if (isError(obj)) {
var parts = arrObjKeys(obj, inspect);
if (parts.length === 0) { return '[' + String(obj) + ']'; }
return '{ [' + String(obj) + '] ' + parts.join(', ') + ' }';
}
if (typeof obj === 'object' && customInspect) {
if (inspectSymbol && typeof obj[inspectSymbol] === 'function') {
return obj[inspectSymbol]();
} else if (customInspect !== 'symbol' && typeof obj.inspect === 'function') {
return obj.inspect();
}
}
if (isMap(obj)) {
var mapParts = [];
mapForEach.call(obj, function (value, key) {
mapParts.push(inspect(key, obj, true) + ' => ' + inspect(value, obj));
});
return collectionOf('Map', mapSize.call(obj), mapParts, indent);
}
if (isSet(obj)) {
var setParts = [];
setForEach.call(obj, function (value) {
setParts.push(inspect(value, obj));
});
return collectionOf('Set', setSize.call(obj), setParts, indent);
}
if (isWeakMap(obj)) {
return weakCollectionOf('WeakMap');
}
if (isWeakSet(obj)) {
return weakCollectionOf('WeakSet');
}
if (isWeakRef(obj)) {
return weakCollectionOf('WeakRef');
}
if (isNumber(obj)) {
return markBoxed(inspect(Number(obj)));
}
if (isBigInt(obj)) {
return markBoxed(inspect(bigIntValueOf.call(obj)));
}
if (isBoolean(obj)) {
return markBoxed(booleanValueOf.call(obj));
}
if (isString(obj)) {
return markBoxed(inspect(String(obj)));
}
if (!isDate(obj) && !isRegExp(obj)) {
var ys = arrObjKeys(obj, inspect);
var isPlainObject = gPO ? gPO(obj) === Object.prototype : obj instanceof Object || obj.constructor === Object;
var protoTag = obj instanceof Object ? '' : 'null prototype';
var stringTag = !isPlainObject && toStringTag && Object(obj) === obj && toStringTag in obj ? toStr(obj).slice(8, -1) : protoTag ? 'Object' : '';
var constructorTag = isPlainObject || typeof obj.constructor !== 'function' ? '' : obj.constructor.name ? obj.constructor.name + ' ' : '';
var tag = constructorTag + (stringTag || protoTag ? '[' + [].concat(stringTag || [], protoTag || []).join(': ') + '] ' : '');
if (ys.length === 0) { return tag + '{}'; }
if (indent) {
return tag + '{' + indentedJoin(ys, indent) + '}';
}
return tag + '{ ' + ys.join(', ') + ' }';
}
return String(obj);
};
function wrapQuotes(s, defaultStyle, opts) {
var quoteChar = (opts.quoteStyle || defaultStyle) === 'double' ? '"' : "'";
return quoteChar + s + quoteChar;
}
function quote(s) {
return String(s).replace(/"/g, '&quot;');
}
function isArray(obj) { return toStr(obj) === '[object Array]' && (!toStringTag || !(typeof obj === 'object' && toStringTag in obj)); }
function isDate(obj) { return toStr(obj) === '[object Date]' && (!toStringTag || !(typeof obj === 'object' && toStringTag in obj)); }
function isRegExp(obj) { return toStr(obj) === '[object RegExp]' && (!toStringTag || !(typeof obj === 'object' && toStringTag in obj)); }
function isError(obj) { return toStr(obj) === '[object Error]' && (!toStringTag || !(typeof obj === 'object' && toStringTag in obj)); }
function isString(obj) { return toStr(obj) === '[object String]' && (!toStringTag || !(typeof obj === 'object' && toStringTag in obj)); }
function isNumber(obj) { return toStr(obj) === '[object Number]' && (!toStringTag || !(typeof obj === 'object' && toStringTag in obj)); }
function isBoolean(obj) { return toStr(obj) === '[object Boolean]' && (!toStringTag || !(typeof obj === 'object' && toStringTag in obj)); }
// Symbol and BigInt do have Symbol.toStringTag by spec, so that can't be used to eliminate false positives
function isSymbol(obj) {
if (hasShammedSymbols) {
return obj && typeof obj === 'object' && obj instanceof Symbol;
}
if (typeof obj === 'symbol') {
return true;
}
if (!obj || typeof obj !== 'object' || !symToString) {
return false;
}
try {
symToString.call(obj);
return true;
} catch (e) {}
return false;
}
function isBigInt(obj) {
if (!obj || typeof obj !== 'object' || !bigIntValueOf) {
return false;
}
try {
bigIntValueOf.call(obj);
return true;
} catch (e) {}
return false;
}
var hasOwn = Object.prototype.hasOwnProperty || function (key) { return key in this; };
function has(obj, key) {
return hasOwn.call(obj, key);
}
function toStr(obj) {
return objectToString.call(obj);
}
function nameOf(f) {
if (f.name) { return f.name; }
var m = match.call(functionToString.call(f), /^function\s*([\w$]+)/);
if (m) { return m[1]; }
return null;
}
function indexOf(xs, x) {
if (xs.indexOf) { return xs.indexOf(x); }
for (var i = 0, l = xs.length; i < l; i++) {
if (xs[i] === x) { return i; }
}
return -1;
}
function isMap(x) {
if (!mapSize || !x || typeof x !== 'object') {
return false;
}
try {
mapSize.call(x);
try {
setSize.call(x);
} catch (s) {
return true;
}
return x instanceof Map; // core-js workaround, pre-v2.5.0
} catch (e) {}
return false;
}
function isWeakMap(x) {
if (!weakMapHas || !x || typeof x !== 'object') {
return false;
}
try {
weakMapHas.call(x, weakMapHas);
try {
weakSetHas.call(x, weakSetHas);
} catch (s) {
return true;
}
return x instanceof WeakMap; // core-js workaround, pre-v2.5.0
} catch (e) {}
return false;
}
function isWeakRef(x) {
if (!weakRefDeref || !x || typeof x !== 'object') {
return false;
}
try {
weakRefDeref.call(x);
return true;
} catch (e) {}
return false;
}
function isSet(x) {
if (!setSize || !x || typeof x !== 'object') {
return false;
}
try {
setSize.call(x);
try {
mapSize.call(x);
} catch (m) {
return true;
}
return x instanceof Set; // core-js workaround, pre-v2.5.0
} catch (e) {}
return false;
}
function isWeakSet(x) {
if (!weakSetHas || !x || typeof x !== 'object') {
return false;
}
try {
weakSetHas.call(x, weakSetHas);
try {
weakMapHas.call(x, weakMapHas);
} catch (s) {
return true;
}
return x instanceof WeakSet; // core-js workaround, pre-v2.5.0
} catch (e) {}
return false;
}
function isElement(x) {
if (!x || typeof x !== 'object') { return false; }
if (typeof HTMLElement !== 'undefined' && x instanceof HTMLElement) {
return true;
}
return typeof x.nodeName === 'string' && typeof x.getAttribute === 'function';
}
function inspectString(str, opts) {
if (str.length > opts.maxStringLength) {
var remaining = str.length - opts.maxStringLength;
var trailer = '... ' + remaining + ' more character' + (remaining > 1 ? 's' : '');
return inspectString(str.slice(0, opts.maxStringLength), opts) + trailer;
}
// eslint-disable-next-line no-control-regex
var s = str.replace(/(['\\])/g, '\\$1').replace(/[\x00-\x1f]/g, lowbyte);
return wrapQuotes(s, 'single', opts);
}
function lowbyte(c) {
var n = c.charCodeAt(0);
var x = {
8: 'b',
9: 't',
10: 'n',
12: 'f',
13: 'r'
}[n];
if (x) { return '\\' + x; }
return '\\x' + (n < 0x10 ? '0' : '') + n.toString(16).toUpperCase();
}
function markBoxed(str) {
return 'Object(' + str + ')';
}
function weakCollectionOf(type) {
return type + ' { ? }';
}
function collectionOf(type, size, entries, indent) {
var joinedEntries = indent ? indentedJoin(entries, indent) : entries.join(', ');
return type + ' (' + size + ') {' + joinedEntries + '}';
}
function singleLineValues(xs) {
for (var i = 0; i < xs.length; i++) {
if (indexOf(xs[i], '\n') >= 0) {
return false;
}
}
return true;
}
function getIndent(opts, depth) {
var baseIndent;
if (opts.indent === '\t') {
baseIndent = '\t';
} else if (typeof opts.indent === 'number' && opts.indent > 0) {
baseIndent = Array(opts.indent + 1).join(' ');
} else {
return null;
}
return {
base: baseIndent,
prev: Array(depth + 1).join(baseIndent)
};
}
function indentedJoin(xs, indent) {
if (xs.length === 0) { return ''; }
var lineJoiner = '\n' + indent.prev + indent.base;
return lineJoiner + xs.join(',' + lineJoiner) + '\n' + indent.prev;
}
function arrObjKeys(obj, inspect) {
var isArr = isArray(obj);
var xs = [];
if (isArr) {
xs.length = obj.length;
for (var i = 0; i < obj.length; i++) {
xs[i] = has(obj, i) ? inspect(obj[i], obj) : '';
}
}
var syms = typeof gOPS === 'function' ? gOPS(obj) : [];
var symMap;
if (hasShammedSymbols) {
symMap = {};
for (var k = 0; k < syms.length; k++) {
symMap['$' + syms[k]] = syms[k];
}
}
for (var key in obj) { // eslint-disable-line no-restricted-syntax
if (!has(obj, key)) { continue; } // eslint-disable-line no-restricted-syntax, no-continue
if (isArr && String(Number(key)) === key && key < obj.length) { continue; } // eslint-disable-line no-restricted-syntax, no-continue
if (hasShammedSymbols && symMap['$' + key] instanceof Symbol) {
// this is to prevent shammed Symbols, which are stored as strings, from being included in the string key section
continue; // eslint-disable-line no-restricted-syntax, no-continue
} else if ((/[^\w$]/).test(key)) {
xs.push(inspect(key, obj) + ': ' + inspect(obj[key], obj));
} else {
xs.push(key + ': ' + inspect(obj[key], obj));
}
}
if (typeof gOPS === 'function') {
for (var j = 0; j < syms.length; j++) {
if (isEnumerable.call(obj, syms[j])) {
xs.push('[' + inspect(syms[j]) + ']: ' + inspect(obj[syms[j]], obj));
}
}
}
return xs;
}
/***/ }),
/***/ 7265:
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
module.exports = __nccwpck_require__(3837).inspect;
/***/ }),
/***/ 1223:
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
var wrappy = __nccwpck_require__(2940)
module.exports = wrappy(once)
module.exports.strict = wrappy(onceStrict)
once.proto = once(function () {
Object.defineProperty(Function.prototype, 'once', {
value: function () {
return once(this)
},
configurable: true
})
Object.defineProperty(Function.prototype, 'onceStrict', {
value: function () {
return onceStrict(this)
},
configurable: true
})
})
function once (fn) {
var f = function () {
if (f.called) return f.value
f.called = true
return f.value = fn.apply(this, arguments)
}
f.called = false
return f
}
function onceStrict (fn) {
var f = function () {
if (f.called)
throw new Error(f.onceError)
f.called = true
return f.value = fn.apply(this, arguments)
}
var name = fn.name || 'Function wrapped with `once`'
f.onceError = name + " shouldn't be called more than once"
f.called = false
return f
}
/***/ }),
/***/ 4907:
/***/ ((module) => {
"use strict";
var replace = String.prototype.replace;
var percentTwenties = /%20/g;
var Format = {
RFC1738: 'RFC1738',
RFC3986: 'RFC3986'
};
module.exports = {
'default': Format.RFC3986,
formatters: {
RFC1738: function (value) {
return replace.call(value, percentTwenties, '+');
},
RFC3986: function (value) {
return String(value);
}
},
RFC1738: Format.RFC1738,
RFC3986: Format.RFC3986
};
/***/ }),
/***/ 2760:
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
"use strict";
var stringify = __nccwpck_require__(9954);
var parse = __nccwpck_require__(3912);
var formats = __nccwpck_require__(4907);
module.exports = {
formats: formats,
parse: parse,
stringify: stringify
};
/***/ }),
/***/ 3912:
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
"use strict";
var utils = __nccwpck_require__(2360);
var has = Object.prototype.hasOwnProperty;
var isArray = Array.isArray;
var defaults = {
allowDots: false,
allowPrototypes: false,
allowSparse: false,
arrayLimit: 20,
charset: 'utf-8',
charsetSentinel: false,
comma: false,
decoder: utils.decode,
delimiter: '&',
depth: 5,
ignoreQueryPrefix: false,
interpretNumericEntities: false,
parameterLimit: 1000,
parseArrays: true,
plainObjects: false,
strictNullHandling: false
};
var interpretNumericEntities = function (str) {
return str.replace(/&#(\d+);/g, function ($0, numberStr) {
return String.fromCharCode(parseInt(numberStr, 10));
});
};
var parseArrayValue = function (val, options) {
if (val && typeof val === 'string' && options.comma && val.indexOf(',') > -1) {
return val.split(',');
}
return val;
};
// This is what browsers will submit when the ✓ character occurs in an
// application/x-www-form-urlencoded body and the encoding of the page containing
// the form is iso-8859-1, or when the submitted form has an accept-charset
// attribute of iso-8859-1. Presumably also with other charsets that do not contain
// the ✓ character, such as us-ascii.
var isoSentinel = 'utf8=%26%2310003%3B'; // encodeURIComponent('&#10003;')
// These are the percent-encoded utf-8 octets representing a checkmark, indicating that the request actually is utf-8 encoded.
var charsetSentinel = 'utf8=%E2%9C%93'; // encodeURIComponent('✓')
var parseValues = function parseQueryStringValues(str, options) {
var obj = {};
var cleanStr = options.ignoreQueryPrefix ? str.replace(/^\?/, '') : str;
var limit = options.parameterLimit === Infinity ? undefined : options.parameterLimit;
var parts = cleanStr.split(options.delimiter, limit);
var skipIndex = -1; // Keep track of where the utf8 sentinel was found
var i;
var charset = options.charset;
if (options.charsetSentinel) {
for (i = 0; i < parts.length; ++i) {
if (parts[i].indexOf('utf8=') === 0) {
if (parts[i] === charsetSentinel) {
charset = 'utf-8';
} else if (parts[i] === isoSentinel) {
charset = 'iso-8859-1';
}
skipIndex = i;
i = parts.length; // The eslint settings do not allow break;
}
}
}
for (i = 0; i < parts.length; ++i) {
if (i === skipIndex) {
continue;
}
var part = parts[i];
var bracketEqualsPos = part.indexOf(']=');
var pos = bracketEqualsPos === -1 ? part.indexOf('=') : bracketEqualsPos + 1;
var key, val;
if (pos === -1) {
key = options.decoder(part, defaults.decoder, charset, 'key');
val = options.strictNullHandling ? null : '';
} else {
key = options.decoder(part.slice(0, pos), defaults.decoder, charset, 'key');
val = utils.maybeMap(
parseArrayValue(part.slice(pos + 1), options),
function (encodedVal) {
return options.decoder(encodedVal, defaults.decoder, charset, 'value');
}
);
}
if (val && options.interpretNumericEntities && charset === 'iso-8859-1') {
val = interpretNumericEntities(val);
}
if (part.indexOf('[]=') > -1) {
val = isArray(val) ? [val] : val;
}
if (has.call(obj, key)) {
obj[key] = utils.combine(obj[key], val);
} else {
obj[key] = val;
}
}
return obj;
};
var parseObject = function (chain, val, options, valuesParsed) {
var leaf = valuesParsed ? val : parseArrayValue(val, options);
for (var i = chain.length - 1; i >= 0; --i) {
var obj;
var root = chain[i];
if (root === '[]' && options.parseArrays) {
obj = [].concat(leaf);
} else {
obj = options.plainObjects ? Object.create(null) : {};
var cleanRoot = root.charAt(0) === '[' && root.charAt(root.length - 1) === ']' ? root.slice(1, -1) : root;
var index = parseInt(cleanRoot, 10);
if (!options.parseArrays && cleanRoot === '') {
obj = { 0: leaf };
} else if (
!isNaN(index)
&& root !== cleanRoot
&& String(index) === cleanRoot
&& index >= 0
&& (options.parseArrays && index <= options.arrayLimit)
) {
obj = [];
obj[index] = leaf;
} else if (cleanRoot !== '__proto__') {
obj[cleanRoot] = leaf;
}
}
leaf = obj;
}
return leaf;
};
var parseKeys = function parseQueryStringKeys(givenKey, val, options, valuesParsed) {
if (!givenKey) {
return;
}
// Transform dot notation to bracket notation
var key = options.allowDots ? givenKey.replace(/\.([^.[]+)/g, '[$1]') : givenKey;
// The regex chunks
var brackets = /(\[[^[\]]*])/;
var child = /(\[[^[\]]*])/g;
// Get the parent
var segment = options.depth > 0 && brackets.exec(key);
var parent = segment ? key.slice(0, segment.index) : key;
// Stash the parent if it exists
var keys = [];
if (parent) {
// If we aren't using plain objects, optionally prefix keys that would overwrite object prototype properties
if (!options.plainObjects && has.call(Object.prototype, parent)) {
if (!options.allowPrototypes) {
return;
}
}
keys.push(parent);
}
// Loop through children appending to the array until we hit depth
var i = 0;
while (options.depth > 0 && (segment = child.exec(key)) !== null && i < options.depth) {
i += 1;
if (!options.plainObjects && has.call(Object.prototype, segment[1].slice(1, -1))) {
if (!options.allowPrototypes) {
return;
}
}
keys.push(segment[1]);
}
// If there's a remainder, just add whatever is left
if (segment) {
keys.push('[' + key.slice(segment.index) + ']');
}
return parseObject(keys, val, options, valuesParsed);
};
var normalizeParseOptions = function normalizeParseOptions(opts) {
if (!opts) {
return defaults;
}
if (opts.decoder !== null && opts.decoder !== undefined && typeof opts.decoder !== 'function') {
throw new TypeError('Decoder has to be a function.');
}
if (typeof opts.charset !== 'undefined' && opts.charset !== 'utf-8' && opts.charset !== 'iso-8859-1') {
throw new TypeError('The charset option must be either utf-8, iso-8859-1, or undefined');
}
var charset = typeof opts.charset === 'undefined' ? defaults.charset : opts.charset;
return {
allowDots: typeof opts.allowDots === 'undefined' ? defaults.allowDots : !!opts.allowDots,
allowPrototypes: typeof opts.allowPrototypes === 'boolean' ? opts.allowPrototypes : defaults.allowPrototypes,
allowSparse: typeof opts.allowSparse === 'boolean' ? opts.allowSparse : defaults.allowSparse,
arrayLimit: typeof opts.arrayLimit === 'number' ? opts.arrayLimit : defaults.arrayLimit,
charset: charset,
charsetSentinel: typeof opts.charsetSentinel === 'boolean' ? opts.charsetSentinel : defaults.charsetSentinel,
comma: typeof opts.comma === 'boolean' ? opts.comma : defaults.comma,
decoder: typeof opts.decoder === 'function' ? opts.decoder : defaults.decoder,
delimiter: typeof opts.delimiter === 'string' || utils.isRegExp(opts.delimiter) ? opts.delimiter : defaults.delimiter,
// eslint-disable-next-line no-implicit-coercion, no-extra-parens
depth: (typeof opts.depth === 'number' || opts.depth === false) ? +opts.depth : defaults.depth,
ignoreQueryPrefix: opts.ignoreQueryPrefix === true,
interpretNumericEntities: typeof opts.interpretNumericEntities === 'boolean' ? opts.interpretNumericEntities : defaults.interpretNumericEntities,
parameterLimit: typeof opts.parameterLimit === 'number' ? opts.parameterLimit : defaults.parameterLimit,
parseArrays: opts.parseArrays !== false,
plainObjects: typeof opts.plainObjects === 'boolean' ? opts.plainObjects : defaults.plainObjects,
strictNullHandling: typeof opts.strictNullHandling === 'boolean' ? opts.strictNullHandling : defaults.strictNullHandling
};
};
module.exports = function (str, opts) {
var options = normalizeParseOptions(opts);
if (str === '' || str === null || typeof str === 'undefined') {
return options.plainObjects ? Object.create(null) : {};
}
var tempObj = typeof str === 'string' ? parseValues(str, options) : str;
var obj = options.plainObjects ? Object.create(null) : {};
// Iterate over the keys and setup the new object
var keys = Object.keys(tempObj);
for (var i = 0; i < keys.length; ++i) {
var key = keys[i];
var newObj = parseKeys(key, tempObj[key], options, typeof str === 'string');
obj = utils.merge(obj, newObj, options);
}
if (options.allowSparse === true) {
return obj;
}
return utils.compact(obj);
};
/***/ }),
/***/ 9954:
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
"use strict";
2020-01-27 16:21:50 +01:00
var getSideChannel = __nccwpck_require__(4334);
var utils = __nccwpck_require__(2360);
var formats = __nccwpck_require__(4907);
var has = Object.prototype.hasOwnProperty;
2020-01-27 16:21:50 +01:00
var arrayPrefixGenerators = {
brackets: function brackets(prefix) {
return prefix + '[]';
},
comma: 'comma',
indices: function indices(prefix, key) {
return prefix + '[' + key + ']';
},
repeat: function repeat(prefix) {
return prefix;
}
};
2020-01-27 16:21:50 +01:00
var isArray = Array.isArray;
var split = String.prototype.split;
var push = Array.prototype.push;
var pushToArray = function (arr, valueOrArray) {
push.apply(arr, isArray(valueOrArray) ? valueOrArray : [valueOrArray]);
};
var toISO = Date.prototype.toISOString;
var defaultFormat = formats['default'];
var defaults = {
addQueryPrefix: false,
allowDots: false,
charset: 'utf-8',
charsetSentinel: false,
delimiter: '&',
encode: true,
encoder: utils.encode,
encodeValuesOnly: false,
format: defaultFormat,
formatter: formats.formatters[defaultFormat],
// deprecated
indices: false,
serializeDate: function serializeDate(date) {
return toISO.call(date);
},
skipNulls: false,
strictNullHandling: false
};
var isNonNullishPrimitive = function isNonNullishPrimitive(v) {
return typeof v === 'string'
|| typeof v === 'number'
|| typeof v === 'boolean'
|| typeof v === 'symbol'
|| typeof v === 'bigint';
};
var sentinel = {};
var stringify = function stringify(
object,
prefix,
generateArrayPrefix,
commaRoundTrip,
strictNullHandling,
skipNulls,
encoder,
filter,
sort,
allowDots,
serializeDate,
format,
formatter,
encodeValuesOnly,
charset,
sideChannel
) {
var obj = object;
var tmpSc = sideChannel;
var step = 0;
var findFlag = false;
while ((tmpSc = tmpSc.get(sentinel)) !== void undefined && !findFlag) {
// Where object last appeared in the ref tree
var pos = tmpSc.get(object);
step += 1;
if (typeof pos !== 'undefined') {
if (pos === step) {
throw new RangeError('Cyclic object value');
} else {
findFlag = true; // Break while
}
}
if (typeof tmpSc.get(sentinel) === 'undefined') {
step = 0;
}
}
if (typeof filter === 'function') {
obj = filter(prefix, obj);
} else if (obj instanceof Date) {
obj = serializeDate(obj);
} else if (generateArrayPrefix === 'comma' && isArray(obj)) {
obj = utils.maybeMap(obj, function (value) {
if (value instanceof Date) {
return serializeDate(value);
}
return value;
});
}
if (obj === null) {
if (strictNullHandling) {
return encoder && !encodeValuesOnly ? encoder(prefix, defaults.encoder, charset, 'key', format) : prefix;
}
obj = '';
}
if (isNonNullishPrimitive(obj) || utils.isBuffer(obj)) {
if (encoder) {
var keyValue = encodeValuesOnly ? prefix : encoder(prefix, defaults.encoder, charset, 'key', format);
if (generateArrayPrefix === 'comma' && encodeValuesOnly) {
var valuesArray = split.call(String(obj), ',');
var valuesJoined = '';
for (var i = 0; i < valuesArray.length; ++i) {
valuesJoined += (i === 0 ? '' : ',') + formatter(encoder(valuesArray[i], defaults.encoder, charset, 'value', format));
}
return [formatter(keyValue) + (commaRoundTrip && isArray(obj) && valuesArray.length === 1 ? '[]' : '') + '=' + valuesJoined];
}
return [formatter(keyValue) + '=' + formatter(encoder(obj, defaults.encoder, charset, 'value', format))];
}
return [formatter(prefix) + '=' + formatter(String(obj))];
}
var values = [];
if (typeof obj === 'undefined') {
return values;
}
var objKeys;
if (generateArrayPrefix === 'comma' && isArray(obj)) {
// we need to join elements in
objKeys = [{ value: obj.length > 0 ? obj.join(',') || null : void undefined }];
} else if (isArray(filter)) {
objKeys = filter;
} else {
var keys = Object.keys(obj);
objKeys = sort ? keys.sort(sort) : keys;
2020-01-27 16:21:50 +01:00
}
var adjustedPrefix = commaRoundTrip && isArray(obj) && obj.length === 1 ? prefix + '[]' : prefix;
for (var j = 0; j < objKeys.length; ++j) {
var key = objKeys[j];
var value = typeof key === 'object' && typeof key.value !== 'undefined' ? key.value : obj[key];
if (skipNulls && value === null) {
continue;
}
var keyPrefix = isArray(obj)
? typeof generateArrayPrefix === 'function' ? generateArrayPrefix(adjustedPrefix, key) : adjustedPrefix
: adjustedPrefix + (allowDots ? '.' + key : '[' + key + ']');
sideChannel.set(object, step);
var valueSideChannel = getSideChannel();
valueSideChannel.set(sentinel, sideChannel);
pushToArray(values, stringify(
value,
keyPrefix,
generateArrayPrefix,
commaRoundTrip,
strictNullHandling,
skipNulls,
encoder,
filter,
sort,
allowDots,
serializeDate,
format,
formatter,
encodeValuesOnly,
charset,
valueSideChannel
));
}
return values;
};
var normalizeStringifyOptions = function normalizeStringifyOptions(opts) {
if (!opts) {
return defaults;
2020-01-27 16:21:50 +01:00
}
if (opts.encoder !== null && typeof opts.encoder !== 'undefined' && typeof opts.encoder !== 'function') {
throw new TypeError('Encoder has to be a function.');
}
var charset = opts.charset || defaults.charset;
if (typeof opts.charset !== 'undefined' && opts.charset !== 'utf-8' && opts.charset !== 'iso-8859-1') {
throw new TypeError('The charset option must be either utf-8, iso-8859-1, or undefined');
}
var format = formats['default'];
if (typeof opts.format !== 'undefined') {
if (!has.call(formats.formatters, opts.format)) {
throw new TypeError('Unknown format option provided.');
}
format = opts.format;
}
var formatter = formats.formatters[format];
var filter = defaults.filter;
if (typeof opts.filter === 'function' || isArray(opts.filter)) {
filter = opts.filter;
2020-01-27 16:21:50 +01:00
}
return {
addQueryPrefix: typeof opts.addQueryPrefix === 'boolean' ? opts.addQueryPrefix : defaults.addQueryPrefix,
allowDots: typeof opts.allowDots === 'undefined' ? defaults.allowDots : !!opts.allowDots,
charset: charset,
charsetSentinel: typeof opts.charsetSentinel === 'boolean' ? opts.charsetSentinel : defaults.charsetSentinel,
delimiter: typeof opts.delimiter === 'undefined' ? defaults.delimiter : opts.delimiter,
encode: typeof opts.encode === 'boolean' ? opts.encode : defaults.encode,
encoder: typeof opts.encoder === 'function' ? opts.encoder : defaults.encoder,
encodeValuesOnly: typeof opts.encodeValuesOnly === 'boolean' ? opts.encodeValuesOnly : defaults.encodeValuesOnly,
filter: filter,
format: format,
formatter: formatter,
serializeDate: typeof opts.serializeDate === 'function' ? opts.serializeDate : defaults.serializeDate,
skipNulls: typeof opts.skipNulls === 'boolean' ? opts.skipNulls : defaults.skipNulls,
sort: typeof opts.sort === 'function' ? opts.sort : null,
strictNullHandling: typeof opts.strictNullHandling === 'boolean' ? opts.strictNullHandling : defaults.strictNullHandling
};
};
module.exports = function (object, opts) {
var obj = object;
var options = normalizeStringifyOptions(opts);
var objKeys;
var filter;
if (typeof options.filter === 'function') {
filter = options.filter;
obj = filter('', obj);
} else if (isArray(options.filter)) {
filter = options.filter;
objKeys = filter;
}
var keys = [];
if (typeof obj !== 'object' || obj === null) {
return '';
}
var arrayFormat;
if (opts && opts.arrayFormat in arrayPrefixGenerators) {
arrayFormat = opts.arrayFormat;
} else if (opts && 'indices' in opts) {
arrayFormat = opts.indices ? 'indices' : 'repeat';
} else {
arrayFormat = 'indices';
}
var generateArrayPrefix = arrayPrefixGenerators[arrayFormat];
if (opts && 'commaRoundTrip' in opts && typeof opts.commaRoundTrip !== 'boolean') {
throw new TypeError('`commaRoundTrip` must be a boolean, or absent');
}
var commaRoundTrip = generateArrayPrefix === 'comma' && opts && opts.commaRoundTrip;
if (!objKeys) {
objKeys = Object.keys(obj);
2020-01-27 16:21:50 +01:00
}
if (options.sort) {
objKeys.sort(options.sort);
2020-01-27 16:21:50 +01:00
}
var sideChannel = getSideChannel();
for (var i = 0; i < objKeys.length; ++i) {
var key = objKeys[i];
if (options.skipNulls && obj[key] === null) {
continue;
2020-01-27 16:21:50 +01:00
}
pushToArray(keys, stringify(
obj[key],
key,
generateArrayPrefix,
commaRoundTrip,
options.strictNullHandling,
options.skipNulls,
options.encode ? options.encoder : null,
options.filter,
options.sort,
options.allowDots,
options.serializeDate,
options.format,
options.formatter,
options.encodeValuesOnly,
options.charset,
sideChannel
));
2020-01-27 16:21:50 +01:00
}
var joined = keys.join(options.delimiter);
var prefix = options.addQueryPrefix === true ? '?' : '';
if (options.charsetSentinel) {
if (options.charset === 'iso-8859-1') {
// encodeURIComponent('&#10003;'), the "numeric entity" representation of a checkmark
prefix += 'utf8=%26%2310003%3B&';
} else {
// encodeURIComponent('✓')
prefix += 'utf8=%E2%9C%93&';
}
}
2020-01-27 16:21:50 +01:00
return joined.length > 0 ? prefix + joined : '';
};
2020-01-27 16:21:50 +01:00
/***/ }),
/***/ 2360:
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
"use strict";
var formats = __nccwpck_require__(4907);
var has = Object.prototype.hasOwnProperty;
var isArray = Array.isArray;
var hexTable = (function () {
var array = [];
for (var i = 0; i < 256; ++i) {
array.push('%' + ((i < 16 ? '0' : '') + i.toString(16)).toUpperCase());
}
return array;
}());
var compactQueue = function compactQueue(queue) {
while (queue.length > 1) {
var item = queue.pop();
var obj = item.obj[item.prop];
if (isArray(obj)) {
var compacted = [];
for (var j = 0; j < obj.length; ++j) {
if (typeof obj[j] !== 'undefined') {
compacted.push(obj[j]);
}
}
item.obj[item.prop] = compacted;
}
}
};
var arrayToObject = function arrayToObject(source, options) {
var obj = options && options.plainObjects ? Object.create(null) : {};
for (var i = 0; i < source.length; ++i) {
if (typeof source[i] !== 'undefined') {
obj[i] = source[i];
}
}
return obj;
};
var merge = function merge(target, source, options) {
/* eslint no-param-reassign: 0 */
if (!source) {
return target;
}
if (typeof source !== 'object') {
if (isArray(target)) {
target.push(source);
} else if (target && typeof target === 'object') {
if ((options && (options.plainObjects || options.allowPrototypes)) || !has.call(Object.prototype, source)) {
target[source] = true;
}
} else {
return [target, source];
}
return target;
}
if (!target || typeof target !== 'object') {
return [target].concat(source);
}
var mergeTarget = target;
if (isArray(target) && !isArray(source)) {
mergeTarget = arrayToObject(target, options);
}
if (isArray(target) && isArray(source)) {
source.forEach(function (item, i) {
if (has.call(target, i)) {
var targetItem = target[i];
if (targetItem && typeof targetItem === 'object' && item && typeof item === 'object') {
target[i] = merge(targetItem, item, options);
} else {
target.push(item);
}
} else {
target[i] = item;
}
});
return target;
}
return Object.keys(source).reduce(function (acc, key) {
var value = source[key];
if (has.call(acc, key)) {
acc[key] = merge(acc[key], value, options);
} else {
acc[key] = value;
}
return acc;
}, mergeTarget);
};
var assign = function assignSingleSource(target, source) {
return Object.keys(source).reduce(function (acc, key) {
acc[key] = source[key];
return acc;
}, target);
};
var decode = function (str, decoder, charset) {
var strWithoutPlus = str.replace(/\+/g, ' ');
if (charset === 'iso-8859-1') {
// unescape never throws, no try...catch needed:
return strWithoutPlus.replace(/%[0-9a-f]{2}/gi, unescape);
}
// utf-8
try {
return decodeURIComponent(strWithoutPlus);
} catch (e) {
return strWithoutPlus;
}
};
var encode = function encode(str, defaultEncoder, charset, kind, format) {
// This code was originally written by Brian White (mscdex) for the io.js core querystring library.
// It has been adapted here for stricter adherence to RFC 3986
if (str.length === 0) {
return str;
}
var string = str;
if (typeof str === 'symbol') {
string = Symbol.prototype.toString.call(str);
} else if (typeof str !== 'string') {
string = String(str);
}
if (charset === 'iso-8859-1') {
return escape(string).replace(/%u[0-9a-f]{4}/gi, function ($0) {
return '%26%23' + parseInt($0.slice(2), 16) + '%3B';
});
}
var out = '';
for (var i = 0; i < string.length; ++i) {
var c = string.charCodeAt(i);
if (
c === 0x2D // -
|| c === 0x2E // .
|| c === 0x5F // _
|| c === 0x7E // ~
|| (c >= 0x30 && c <= 0x39) // 0-9
|| (c >= 0x41 && c <= 0x5A) // a-z
|| (c >= 0x61 && c <= 0x7A) // A-Z
|| (format === formats.RFC1738 && (c === 0x28 || c === 0x29)) // ( )
) {
out += string.charAt(i);
continue;
}
if (c < 0x80) {
out = out + hexTable[c];
continue;
}
if (c < 0x800) {
out = out + (hexTable[0xC0 | (c >> 6)] + hexTable[0x80 | (c & 0x3F)]);
continue;
}
if (c < 0xD800 || c >= 0xE000) {
out = out + (hexTable[0xE0 | (c >> 12)] + hexTable[0x80 | ((c >> 6) & 0x3F)] + hexTable[0x80 | (c & 0x3F)]);
continue;
}
i += 1;
c = 0x10000 + (((c & 0x3FF) << 10) | (string.charCodeAt(i) & 0x3FF));
/* eslint operator-linebreak: [2, "before"] */
out += hexTable[0xF0 | (c >> 18)]
+ hexTable[0x80 | ((c >> 12) & 0x3F)]
+ hexTable[0x80 | ((c >> 6) & 0x3F)]
+ hexTable[0x80 | (c & 0x3F)];
}
return out;
};
var compact = function compact(value) {
var queue = [{ obj: { o: value }, prop: 'o' }];
var refs = [];
for (var i = 0; i < queue.length; ++i) {
var item = queue[i];
var obj = item.obj[item.prop];
var keys = Object.keys(obj);
for (var j = 0; j < keys.length; ++j) {
var key = keys[j];
var val = obj[key];
if (typeof val === 'object' && val !== null && refs.indexOf(val) === -1) {
queue.push({ obj: obj, prop: key });
refs.push(val);
}
}
}
compactQueue(queue);
return value;
};
var isRegExp = function isRegExp(obj) {
return Object.prototype.toString.call(obj) === '[object RegExp]';
};
var isBuffer = function isBuffer(obj) {
if (!obj || typeof obj !== 'object') {
return false;
}
return !!(obj.constructor && obj.constructor.isBuffer && obj.constructor.isBuffer(obj));
};
var combine = function combine(a, b) {
return [].concat(a, b);
};
var maybeMap = function maybeMap(val, fn) {
if (isArray(val)) {
var mapped = [];
for (var i = 0; i < val.length; i += 1) {
mapped.push(fn(val[i]));
}
return mapped;
}
return fn(val);
};
module.exports = {
arrayToObject: arrayToObject,
assign: assign,
combine: combine,
compact: compact,
decode: decode,
encode: encode,
isBuffer: isBuffer,
isRegExp: isRegExp,
maybeMap: maybeMap,
merge: merge
};
/***/ }),
/***/ 4334:
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
"use strict";
var GetIntrinsic = __nccwpck_require__(4538);
var callBound = __nccwpck_require__(8803);
var inspect = __nccwpck_require__(504);
var $TypeError = GetIntrinsic('%TypeError%');
var $WeakMap = GetIntrinsic('%WeakMap%', true);
var $Map = GetIntrinsic('%Map%', true);
var $weakMapGet = callBound('WeakMap.prototype.get', true);
var $weakMapSet = callBound('WeakMap.prototype.set', true);
var $weakMapHas = callBound('WeakMap.prototype.has', true);
var $mapGet = callBound('Map.prototype.get', true);
var $mapSet = callBound('Map.prototype.set', true);
var $mapHas = callBound('Map.prototype.has', true);
/*
* This function traverses the list returning the node corresponding to the
* given key.
*
* That node is also moved to the head of the list, so that if it's accessed
* again we don't need to traverse the whole list. By doing so, all the recently
* used nodes can be accessed relatively quickly.
*/
var listGetNode = function (list, key) { // eslint-disable-line consistent-return
for (var prev = list, curr; (curr = prev.next) !== null; prev = curr) {
if (curr.key === key) {
prev.next = curr.next;
curr.next = list.next;
list.next = curr; // eslint-disable-line no-param-reassign
return curr;
}
}
};
var listGet = function (objects, key) {
var node = listGetNode(objects, key);
return node && node.value;
};
var listSet = function (objects, key, value) {
var node = listGetNode(objects, key);
if (node) {
node.value = value;
} else {
// Prepend the new node to the beginning of the list
objects.next = { // eslint-disable-line no-param-reassign
key: key,
next: objects.next,
value: value
};
}
};
var listHas = function (objects, key) {
return !!listGetNode(objects, key);
};
module.exports = function getSideChannel() {
var $wm;
var $m;
var $o;
var channel = {
assert: function (key) {
if (!channel.has(key)) {
throw new $TypeError('Side channel does not contain ' + inspect(key));
}
},
get: function (key) { // eslint-disable-line consistent-return
if ($WeakMap && key && (typeof key === 'object' || typeof key === 'function')) {
if ($wm) {
return $weakMapGet($wm, key);
}
} else if ($Map) {
if ($m) {
return $mapGet($m, key);
}
} else {
if ($o) { // eslint-disable-line no-lonely-if
return listGet($o, key);
}
}
},
has: function (key) {
if ($WeakMap && key && (typeof key === 'object' || typeof key === 'function')) {
if ($wm) {
return $weakMapHas($wm, key);
}
} else if ($Map) {
if ($m) {
return $mapHas($m, key);
}
} else {
if ($o) { // eslint-disable-line no-lonely-if
return listHas($o, key);
}
}
return false;
},
set: function (key, value) {
if ($WeakMap && key && (typeof key === 'object' || typeof key === 'function')) {
if (!$wm) {
$wm = new $WeakMap();
}
$weakMapSet($wm, key, value);
} else if ($Map) {
if (!$m) {
$m = new $Map();
}
$mapSet($m, key, value);
} else {
if (!$o) {
/*
* Initialize the linked list as an empty node, so that we don't have
* to special-case handling of the first node: we can always refer to
* it as (previous node).next, instead of something like (list).head
*/
$o = { key: {}, next: null };
}
listSet($o, key, value);
}
}
};
return channel;
};
/***/ }),
/***/ 4294:
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
module.exports = __nccwpck_require__(4219);
2021-10-19 17:05:28 +02:00
/***/ }),
/***/ 4219:
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
2021-10-19 17:05:28 +02:00
"use strict";
var net = __nccwpck_require__(1808);
var tls = __nccwpck_require__(4404);
var http = __nccwpck_require__(3685);
var https = __nccwpck_require__(5687);
var events = __nccwpck_require__(2361);
var assert = __nccwpck_require__(9491);
var util = __nccwpck_require__(3837);
2021-10-19 17:05:28 +02:00
exports.httpOverHttp = httpOverHttp;
exports.httpsOverHttp = httpsOverHttp;
exports.httpOverHttps = httpOverHttps;
exports.httpsOverHttps = httpsOverHttps;
2021-10-19 17:05:28 +02:00
function httpOverHttp(options) {
var agent = new TunnelingAgent(options);
agent.request = http.request;
return agent;
2021-10-19 17:05:28 +02:00
}
function httpsOverHttp(options) {
var agent = new TunnelingAgent(options);
agent.request = http.request;
agent.createSocket = createSecureSocket;
agent.defaultPort = 443;
return agent;
2021-10-19 17:05:28 +02:00
}
function httpOverHttps(options) {
var agent = new TunnelingAgent(options);
agent.request = https.request;
return agent;
2021-10-19 17:05:28 +02:00
}
function httpsOverHttps(options) {
var agent = new TunnelingAgent(options);
agent.request = https.request;
agent.createSocket = createSecureSocket;
agent.defaultPort = 443;
return agent;
}
2021-10-19 17:05:28 +02:00
function TunnelingAgent(options) {
var self = this;
self.options = options || {};
self.proxyOptions = self.options.proxy || {};
self.maxSockets = self.options.maxSockets || http.Agent.defaultMaxSockets;
self.requests = [];
self.sockets = [];
2021-10-19 17:05:28 +02:00
self.on('free', function onFree(socket, host, port, localAddress) {
var options = toOptions(host, port, localAddress);
for (var i = 0, len = self.requests.length; i < len; ++i) {
var pending = self.requests[i];
if (pending.host === options.host && pending.port === options.port) {
// Detect the request to connect same origin server,
// reuse the connection.
self.requests.splice(i, 1);
pending.request.onSocket(socket);
return;
}
2021-10-19 17:05:28 +02:00
}
socket.destroy();
self.removeSocket(socket);
});
2021-10-19 17:05:28 +02:00
}
util.inherits(TunnelingAgent, events.EventEmitter);
2021-10-19 17:05:28 +02:00
TunnelingAgent.prototype.addRequest = function addRequest(req, host, port, localAddress) {
var self = this;
var options = mergeOptions({request: req}, self.options, toOptions(host, port, localAddress));
2021-10-19 17:05:28 +02:00
if (self.sockets.length >= this.maxSockets) {
// We are over limit so we'll add it to the queue.
self.requests.push(options);
return;
2021-10-19 17:05:28 +02:00
}
// If we are under maxSockets create a new one.
self.createSocket(options, function(socket) {
socket.on('free', onFree);
socket.on('close', onCloseOrRemove);
socket.on('agentRemove', onCloseOrRemove);
req.onSocket(socket);
2021-10-19 17:05:28 +02:00
function onFree() {
self.emit('free', socket, options);
2021-10-19 17:05:28 +02:00
}
function onCloseOrRemove(err) {
self.removeSocket(socket);
socket.removeListener('free', onFree);
socket.removeListener('close', onCloseOrRemove);
socket.removeListener('agentRemove', onCloseOrRemove);
2021-10-19 17:05:28 +02:00
}
});
};
2021-10-19 17:05:28 +02:00
TunnelingAgent.prototype.createSocket = function createSocket(options, cb) {
var self = this;
var placeholder = {};
self.sockets.push(placeholder);
var connectOptions = mergeOptions({}, self.proxyOptions, {
method: 'CONNECT',
path: options.host + ':' + options.port,
agent: false,
headers: {
host: options.host + ':' + options.port
2021-10-19 17:05:28 +02:00
}
});
if (options.localAddress) {
connectOptions.localAddress = options.localAddress;
}
if (connectOptions.proxyAuth) {
connectOptions.headers = connectOptions.headers || {};
connectOptions.headers['Proxy-Authorization'] = 'Basic ' +
new Buffer(connectOptions.proxyAuth).toString('base64');
2021-10-19 17:05:28 +02:00
}
debug('making CONNECT request');
var connectReq = self.request(connectOptions);
connectReq.useChunkedEncodingByDefault = false; // for v0.6
connectReq.once('response', onResponse); // for v0.6
connectReq.once('upgrade', onUpgrade); // for v0.6
connectReq.once('connect', onConnect); // for v0.7 or later
connectReq.once('error', onError);
connectReq.end();
2021-10-19 17:05:28 +02:00
function onResponse(res) {
// Very hacky. This is necessary to avoid http-parser leaks.
res.upgrade = true;
}
2021-10-19 17:05:28 +02:00
function onUpgrade(res, socket, head) {
// Hacky.
process.nextTick(function() {
onConnect(res, socket, head);
});
}
2021-10-19 17:05:28 +02:00
function onConnect(res, socket, head) {
connectReq.removeAllListeners();
socket.removeAllListeners();
2021-10-19 17:05:28 +02:00
if (res.statusCode !== 200) {
debug('tunneling socket could not be established, statusCode=%d',
res.statusCode);
socket.destroy();
var error = new Error('tunneling socket could not be established, ' +
'statusCode=' + res.statusCode);
error.code = 'ECONNRESET';
options.request.emit('error', error);
self.removeSocket(placeholder);
return;
}
if (head.length > 0) {
debug('got illegal response body from proxy');
socket.destroy();
var error = new Error('got illegal response body from proxy');
error.code = 'ECONNRESET';
options.request.emit('error', error);
self.removeSocket(placeholder);
return;
}
debug('tunneling connection has established');
self.sockets[self.sockets.indexOf(placeholder)] = socket;
return cb(socket);
}
2021-10-19 17:05:28 +02:00
function onError(cause) {
connectReq.removeAllListeners();
debug('tunneling socket could not be established, cause=%s\n',
cause.message, cause.stack);
var error = new Error('tunneling socket could not be established, ' +
'cause=' + cause.message);
error.code = 'ECONNRESET';
options.request.emit('error', error);
self.removeSocket(placeholder);
}
};
TunnelingAgent.prototype.removeSocket = function removeSocket(socket) {
var pos = this.sockets.indexOf(socket)
if (pos === -1) {
return;
}
this.sockets.splice(pos, 1);
var pending = this.requests.shift();
if (pending) {
// If we have pending requests and a socket gets closed a new one
// needs to be created to take over in the pool for the one that closed.
this.createSocket(pending, function(socket) {
pending.request.onSocket(socket);
});
}
};
function createSecureSocket(options, cb) {
var self = this;
TunnelingAgent.prototype.createSocket.call(self, options, function(socket) {
var hostHeader = options.request.getHeader('host');
var tlsOptions = mergeOptions({}, self.options, {
socket: socket,
servername: hostHeader ? hostHeader.replace(/:.*$/, '') : options.host
});
// 0 is dummy port for v0.6
var secureSocket = tls.connect(0, tlsOptions);
self.sockets[self.sockets.indexOf(socket)] = secureSocket;
cb(secureSocket);
});
}
function toOptions(host, port, localAddress) {
if (typeof host === 'string') { // since v0.10
return {
host: host,
port: port,
localAddress: localAddress
};
}
return host; // for v0.11 or later
}
function mergeOptions(target) {
for (var i = 1, len = arguments.length; i < len; ++i) {
var overrides = arguments[i];
if (typeof overrides === 'object') {
var keys = Object.keys(overrides);
for (var j = 0, keyLen = keys.length; j < keyLen; ++j) {
var k = keys[j];
if (overrides[k] !== undefined) {
target[k] = overrides[k];
}
}
}
}
return target;
}
var debug;
if (process.env.NODE_DEBUG && /\btunnel\b/.test(process.env.NODE_DEBUG)) {
debug = function() {
var args = Array.prototype.slice.call(arguments);
if (typeof args[0] === 'string') {
args[0] = 'TUNNEL: ' + args[0];
} else {
args.unshift('TUNNEL:');
}
console.error.apply(console, args);
}
} else {
debug = function() {};
}
exports.debug = debug; // for test
/***/ }),
/***/ 5538:
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
"use strict";
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : new P(function (resolve) { resolve(result.value); }).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
const url = __nccwpck_require__(7310);
const http = __nccwpck_require__(3685);
const https = __nccwpck_require__(5687);
const util = __nccwpck_require__(9470);
let fs;
let tunnel;
var HttpCodes;
(function (HttpCodes) {
HttpCodes[HttpCodes["OK"] = 200] = "OK";
HttpCodes[HttpCodes["MultipleChoices"] = 300] = "MultipleChoices";
HttpCodes[HttpCodes["MovedPermanently"] = 301] = "MovedPermanently";
HttpCodes[HttpCodes["ResourceMoved"] = 302] = "ResourceMoved";
HttpCodes[HttpCodes["SeeOther"] = 303] = "SeeOther";
HttpCodes[HttpCodes["NotModified"] = 304] = "NotModified";
HttpCodes[HttpCodes["UseProxy"] = 305] = "UseProxy";
HttpCodes[HttpCodes["SwitchProxy"] = 306] = "SwitchProxy";
HttpCodes[HttpCodes["TemporaryRedirect"] = 307] = "TemporaryRedirect";
HttpCodes[HttpCodes["PermanentRedirect"] = 308] = "PermanentRedirect";
HttpCodes[HttpCodes["BadRequest"] = 400] = "BadRequest";
HttpCodes[HttpCodes["Unauthorized"] = 401] = "Unauthorized";
HttpCodes[HttpCodes["PaymentRequired"] = 402] = "PaymentRequired";
HttpCodes[HttpCodes["Forbidden"] = 403] = "Forbidden";
HttpCodes[HttpCodes["NotFound"] = 404] = "NotFound";
HttpCodes[HttpCodes["MethodNotAllowed"] = 405] = "MethodNotAllowed";
HttpCodes[HttpCodes["NotAcceptable"] = 406] = "NotAcceptable";
HttpCodes[HttpCodes["ProxyAuthenticationRequired"] = 407] = "ProxyAuthenticationRequired";
HttpCodes[HttpCodes["RequestTimeout"] = 408] = "RequestTimeout";
HttpCodes[HttpCodes["Conflict"] = 409] = "Conflict";
HttpCodes[HttpCodes["Gone"] = 410] = "Gone";
HttpCodes[HttpCodes["TooManyRequests"] = 429] = "TooManyRequests";
HttpCodes[HttpCodes["InternalServerError"] = 500] = "InternalServerError";
HttpCodes[HttpCodes["NotImplemented"] = 501] = "NotImplemented";
HttpCodes[HttpCodes["BadGateway"] = 502] = "BadGateway";
HttpCodes[HttpCodes["ServiceUnavailable"] = 503] = "ServiceUnavailable";
HttpCodes[HttpCodes["GatewayTimeout"] = 504] = "GatewayTimeout";
})(HttpCodes = exports.HttpCodes || (exports.HttpCodes = {}));
const HttpRedirectCodes = [HttpCodes.MovedPermanently, HttpCodes.ResourceMoved, HttpCodes.SeeOther, HttpCodes.TemporaryRedirect, HttpCodes.PermanentRedirect];
const HttpResponseRetryCodes = [HttpCodes.BadGateway, HttpCodes.ServiceUnavailable, HttpCodes.GatewayTimeout];
const NetworkRetryErrors = ['ECONNRESET', 'ENOTFOUND', 'ESOCKETTIMEDOUT', 'ETIMEDOUT', 'ECONNREFUSED'];
const RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD'];
const ExponentialBackoffCeiling = 10;
const ExponentialBackoffTimeSlice = 5;
class HttpClientResponse {
constructor(message) {
this.message = message;
}
readBody() {
return new Promise((resolve, reject) => __awaiter(this, void 0, void 0, function* () {
let buffer = Buffer.alloc(0);
const encodingCharset = util.obtainContentCharset(this);
// Extract Encoding from header: 'content-encoding'
// Match `gzip`, `gzip, deflate` variations of GZIP encoding
const contentEncoding = this.message.headers['content-encoding'] || '';
const isGzippedEncoded = new RegExp('(gzip$)|(gzip, *deflate)').test(contentEncoding);
this.message.on('data', function (data) {
const chunk = (typeof data === 'string') ? Buffer.from(data, encodingCharset) : data;
buffer = Buffer.concat([buffer, chunk]);
}).on('end', function () {
return __awaiter(this, void 0, void 0, function* () {
if (isGzippedEncoded) { // Process GZipped Response Body HERE
const gunzippedBody = yield util.decompressGzippedContent(buffer, encodingCharset);
resolve(gunzippedBody);
}
else {
resolve(buffer.toString(encodingCharset));
}
});
}).on('error', function (err) {
reject(err);
});
}));
}
}
exports.HttpClientResponse = HttpClientResponse;
function isHttps(requestUrl) {
let parsedUrl = url.parse(requestUrl);
return parsedUrl.protocol === 'https:';
}
exports.isHttps = isHttps;
var EnvironmentVariables;
(function (EnvironmentVariables) {
EnvironmentVariables["HTTP_PROXY"] = "HTTP_PROXY";
EnvironmentVariables["HTTPS_PROXY"] = "HTTPS_PROXY";
EnvironmentVariables["NO_PROXY"] = "NO_PROXY";
})(EnvironmentVariables || (EnvironmentVariables = {}));
class HttpClient {
constructor(userAgent, handlers, requestOptions) {
this._ignoreSslError = false;
this._allowRedirects = true;
this._allowRedirectDowngrade = false;
this._maxRedirects = 50;
this._allowRetries = false;
this._maxRetries = 1;
this._keepAlive = false;
this._disposed = false;
this.userAgent = userAgent;
this.handlers = handlers || [];
let no_proxy = process.env[EnvironmentVariables.NO_PROXY];
if (no_proxy) {
this._httpProxyBypassHosts = [];
no_proxy.split(',').forEach(bypass => {
this._httpProxyBypassHosts.push(util.buildProxyBypassRegexFromEnv(bypass));
});
}
this.requestOptions = requestOptions;
if (requestOptions) {
if (requestOptions.ignoreSslError != null) {
this._ignoreSslError = requestOptions.ignoreSslError;
}
this._socketTimeout = requestOptions.socketTimeout;
this._httpProxy = requestOptions.proxy;
if (requestOptions.proxy && requestOptions.proxy.proxyBypassHosts) {
this._httpProxyBypassHosts = [];
requestOptions.proxy.proxyBypassHosts.forEach(bypass => {
this._httpProxyBypassHosts.push(new RegExp(bypass, 'i'));
});
}
this._certConfig = requestOptions.cert;
if (this._certConfig) {
// If using cert, need fs
fs = __nccwpck_require__(7147);
// cache the cert content into memory, so we don't have to read it from disk every time
if (this._certConfig.caFile && fs.existsSync(this._certConfig.caFile)) {
this._ca = fs.readFileSync(this._certConfig.caFile, 'utf8');
}
if (this._certConfig.certFile && fs.existsSync(this._certConfig.certFile)) {
this._cert = fs.readFileSync(this._certConfig.certFile, 'utf8');
}
if (this._certConfig.keyFile && fs.existsSync(this._certConfig.keyFile)) {
this._key = fs.readFileSync(this._certConfig.keyFile, 'utf8');
}
}
if (requestOptions.allowRedirects != null) {
this._allowRedirects = requestOptions.allowRedirects;
}
if (requestOptions.allowRedirectDowngrade != null) {
this._allowRedirectDowngrade = requestOptions.allowRedirectDowngrade;
}
if (requestOptions.maxRedirects != null) {
this._maxRedirects = Math.max(requestOptions.maxRedirects, 0);
}
if (requestOptions.keepAlive != null) {
this._keepAlive = requestOptions.keepAlive;
}
if (requestOptions.allowRetries != null) {
this._allowRetries = requestOptions.allowRetries;
}
if (requestOptions.maxRetries != null) {
this._maxRetries = requestOptions.maxRetries;
}
}
}
options(requestUrl, additionalHeaders) {
return this.request('OPTIONS', requestUrl, null, additionalHeaders || {});
}
get(requestUrl, additionalHeaders) {
return this.request('GET', requestUrl, null, additionalHeaders || {});
}
del(requestUrl, additionalHeaders) {
return this.request('DELETE', requestUrl, null, additionalHeaders || {});
}
post(requestUrl, data, additionalHeaders) {
return this.request('POST', requestUrl, data, additionalHeaders || {});
}
patch(requestUrl, data, additionalHeaders) {
return this.request('PATCH', requestUrl, data, additionalHeaders || {});
}
put(requestUrl, data, additionalHeaders) {
return this.request('PUT', requestUrl, data, additionalHeaders || {});
}
head(requestUrl, additionalHeaders) {
return this.request('HEAD', requestUrl, null, additionalHeaders || {});
}
sendStream(verb, requestUrl, stream, additionalHeaders) {
return this.request(verb, requestUrl, stream, additionalHeaders);
}
/**
* Makes a raw http request.
* All other methods such as get, post, patch, and request ultimately call this.
* Prefer get, del, post and patch
*/
request(verb, requestUrl, data, headers) {
return __awaiter(this, void 0, void 0, function* () {
if (this._disposed) {
throw new Error("Client has already been disposed.");
}
let parsedUrl = url.parse(requestUrl);
let info = this._prepareRequest(verb, parsedUrl, headers);
// Only perform retries on reads since writes may not be idempotent.
let maxTries = (this._allowRetries && RetryableHttpVerbs.indexOf(verb) != -1) ? this._maxRetries + 1 : 1;
let numTries = 0;
let response;
while (numTries < maxTries) {
try {
response = yield this.requestRaw(info, data);
}
catch (err) {
numTries++;
if (err && err.code && NetworkRetryErrors.indexOf(err.code) > -1 && numTries < maxTries) {
yield this._performExponentialBackoff(numTries);
continue;
}
throw err;
}
// Check if it's an authentication challenge
if (response && response.message && response.message.statusCode === HttpCodes.Unauthorized) {
let authenticationHandler;
for (let i = 0; i < this.handlers.length; i++) {
if (this.handlers[i].canHandleAuthentication(response)) {
authenticationHandler = this.handlers[i];
break;
}
}
if (authenticationHandler) {
return authenticationHandler.handleAuthentication(this, info, data);
}
else {
// We have received an unauthorized response but have no handlers to handle it.
// Let the response return to the caller.
return response;
}
}
let redirectsRemaining = this._maxRedirects;
while (HttpRedirectCodes.indexOf(response.message.statusCode) != -1
&& this._allowRedirects
&& redirectsRemaining > 0) {
const redirectUrl = response.message.headers["location"];
if (!redirectUrl) {
// if there's no location to redirect to, we won't
break;
}
let parsedRedirectUrl = url.parse(redirectUrl);
if (parsedUrl.protocol == 'https:' && parsedUrl.protocol != parsedRedirectUrl.protocol && !this._allowRedirectDowngrade) {
throw new Error("Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.");
}
// we need to finish reading the response before reassigning response
// which will leak the open socket.
yield response.readBody();
// let's make the request with the new redirectUrl
info = this._prepareRequest(verb, parsedRedirectUrl, headers);
response = yield this.requestRaw(info, data);
redirectsRemaining--;
}
if (HttpResponseRetryCodes.indexOf(response.message.statusCode) == -1) {
// If not a retry code, return immediately instead of retrying
return response;
}
numTries += 1;
if (numTries < maxTries) {
yield response.readBody();
yield this._performExponentialBackoff(numTries);
}
}
return response;
});
}
/**
* Needs to be called if keepAlive is set to true in request options.
*/
dispose() {
if (this._agent) {
this._agent.destroy();
}
this._disposed = true;
}
/**
* Raw request.
* @param info
* @param data
*/
requestRaw(info, data) {
return new Promise((resolve, reject) => {
let callbackForResult = function (err, res) {
if (err) {
reject(err);
}
resolve(res);
};
this.requestRawWithCallback(info, data, callbackForResult);
});
}
/**
* Raw request with callback.
* @param info
* @param data
* @param onResult
*/
requestRawWithCallback(info, data, onResult) {
let socket;
if (typeof (data) === 'string') {
info.options.headers["Content-Length"] = Buffer.byteLength(data, 'utf8');
}
let callbackCalled = false;
let handleResult = (err, res) => {
if (!callbackCalled) {
callbackCalled = true;
onResult(err, res);
}
};
let req = info.httpModule.request(info.options, (msg) => {
let res = new HttpClientResponse(msg);
handleResult(null, res);
});
req.on('socket', (sock) => {
socket = sock;
});
// If we ever get disconnected, we want the socket to timeout eventually
req.setTimeout(this._socketTimeout || 3 * 60000, () => {
if (socket) {
socket.destroy();
}
handleResult(new Error('Request timeout: ' + info.options.path), null);
});
req.on('error', function (err) {
// err has statusCode property
// res should have headers
handleResult(err, null);
});
if (data && typeof (data) === 'string') {
req.write(data, 'utf8');
}
if (data && typeof (data) !== 'string') {
data.on('close', function () {
req.end();
});
data.pipe(req);
}
else {
req.end();
}
}
_prepareRequest(method, requestUrl, headers) {
const info = {};
info.parsedUrl = requestUrl;
const usingSsl = info.parsedUrl.protocol === 'https:';
info.httpModule = usingSsl ? https : http;
const defaultPort = usingSsl ? 443 : 80;
info.options = {};
info.options.host = info.parsedUrl.hostname;
info.options.port = info.parsedUrl.port ? parseInt(info.parsedUrl.port) : defaultPort;
info.options.path = (info.parsedUrl.pathname || '') + (info.parsedUrl.search || '');
info.options.method = method;
info.options.timeout = (this.requestOptions && this.requestOptions.socketTimeout) || this._socketTimeout;
this._socketTimeout = info.options.timeout;
info.options.headers = this._mergeHeaders(headers);
if (this.userAgent != null) {
info.options.headers["user-agent"] = this.userAgent;
}
info.options.agent = this._getAgent(info.parsedUrl);
// gives handlers an opportunity to participate
if (this.handlers && !this._isPresigned(url.format(requestUrl))) {
this.handlers.forEach((handler) => {
handler.prepareRequest(info.options);
});
}
return info;
}
_isPresigned(requestUrl) {
if (this.requestOptions && this.requestOptions.presignedUrlPatterns) {
const patterns = this.requestOptions.presignedUrlPatterns;
for (let i = 0; i < patterns.length; i++) {
if (requestUrl.match(patterns[i])) {
return true;
}
}
}
return false;
}
_mergeHeaders(headers) {
const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => (c[k.toLowerCase()] = obj[k], c), {});
if (this.requestOptions && this.requestOptions.headers) {
return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers));
}
return lowercaseKeys(headers || {});
}
_getAgent(parsedUrl) {
let agent;
let proxy = this._getProxy(parsedUrl);
let useProxy = proxy.proxyUrl && proxy.proxyUrl.hostname && !this._isMatchInBypassProxyList(parsedUrl);
if (this._keepAlive && useProxy) {
agent = this._proxyAgent;
}
if (this._keepAlive && !useProxy) {
agent = this._agent;
}
// if agent is already assigned use that agent.
if (!!agent) {
return agent;
}
const usingSsl = parsedUrl.protocol === 'https:';
let maxSockets = 100;
if (!!this.requestOptions) {
maxSockets = this.requestOptions.maxSockets || http.globalAgent.maxSockets;
}
if (useProxy) {
// If using proxy, need tunnel
if (!tunnel) {
tunnel = __nccwpck_require__(4294);
}
const agentOptions = {
maxSockets: maxSockets,
keepAlive: this._keepAlive,
proxy: {
proxyAuth: proxy.proxyAuth,
host: proxy.proxyUrl.hostname,
port: proxy.proxyUrl.port
},
};
let tunnelAgent;
const overHttps = proxy.proxyUrl.protocol === 'https:';
if (usingSsl) {
tunnelAgent = overHttps ? tunnel.httpsOverHttps : tunnel.httpsOverHttp;
}
else {
tunnelAgent = overHttps ? tunnel.httpOverHttps : tunnel.httpOverHttp;
}
agent = tunnelAgent(agentOptions);
this._proxyAgent = agent;
}
// if reusing agent across request and tunneling agent isn't assigned create a new agent
if (this._keepAlive && !agent) {
const options = { keepAlive: this._keepAlive, maxSockets: maxSockets };
agent = usingSsl ? new https.Agent(options) : new http.Agent(options);
this._agent = agent;
}
// if not using private agent and tunnel agent isn't setup then use global agent
if (!agent) {
agent = usingSsl ? https.globalAgent : http.globalAgent;
}
if (usingSsl && this._ignoreSslError) {
// we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process
// http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options
// we have to cast it to any and change it directly
agent.options = Object.assign(agent.options || {}, { rejectUnauthorized: false });
}
if (usingSsl && this._certConfig) {
agent.options = Object.assign(agent.options || {}, { ca: this._ca, cert: this._cert, key: this._key, passphrase: this._certConfig.passphrase });
}
return agent;
}
_getProxy(parsedUrl) {
let usingSsl = parsedUrl.protocol === 'https:';
let proxyConfig = this._httpProxy;
// fallback to http_proxy and https_proxy env
let https_proxy = process.env[EnvironmentVariables.HTTPS_PROXY];
let http_proxy = process.env[EnvironmentVariables.HTTP_PROXY];
if (!proxyConfig) {
if (https_proxy && usingSsl) {
proxyConfig = {
proxyUrl: https_proxy
};
}
else if (http_proxy) {
proxyConfig = {
proxyUrl: http_proxy
};
}
}
let proxyUrl;
let proxyAuth;
if (proxyConfig) {
if (proxyConfig.proxyUrl.length > 0) {
proxyUrl = url.parse(proxyConfig.proxyUrl);
}
if (proxyConfig.proxyUsername || proxyConfig.proxyPassword) {
proxyAuth = proxyConfig.proxyUsername + ":" + proxyConfig.proxyPassword;
}
}
return { proxyUrl: proxyUrl, proxyAuth: proxyAuth };
}
_isMatchInBypassProxyList(parsedUrl) {
if (!this._httpProxyBypassHosts) {
return false;
}
let bypass = false;
this._httpProxyBypassHosts.forEach(bypassHost => {
if (bypassHost.test(parsedUrl.href)) {
bypass = true;
}
});
return bypass;
}
_performExponentialBackoff(retryNumber) {
retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber);
const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber);
return new Promise(resolve => setTimeout(() => resolve(), ms));
}
}
exports.HttpClient = HttpClient;
/***/ }),
/***/ 9470:
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
"use strict";
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : new P(function (resolve) { resolve(result.value); }).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
const qs = __nccwpck_require__(2760);
const url = __nccwpck_require__(7310);
const path = __nccwpck_require__(1017);
const zlib = __nccwpck_require__(9796);
/**
* creates an url from a request url and optional base url (http://server:8080)
* @param {string} resource - a fully qualified url or relative path
* @param {string} baseUrl - an optional baseUrl (http://server:8080)
* @param {IRequestOptions} options - an optional options object, could include QueryParameters e.g.
* @return {string} - resultant url
*/
function getUrl(resource, baseUrl, queryParams) {
const pathApi = path.posix || path;
let requestUrl = '';
if (!baseUrl) {
requestUrl = resource;
}
else if (!resource) {
requestUrl = baseUrl;
}
else {
const base = url.parse(baseUrl);
const resultantUrl = url.parse(resource);
// resource (specific per request) elements take priority
resultantUrl.protocol = resultantUrl.protocol || base.protocol;
resultantUrl.auth = resultantUrl.auth || base.auth;
resultantUrl.host = resultantUrl.host || base.host;
resultantUrl.pathname = pathApi.resolve(base.pathname, resultantUrl.pathname);
if (!resultantUrl.pathname.endsWith('/') && resource.endsWith('/')) {
resultantUrl.pathname += '/';
}
requestUrl = url.format(resultantUrl);
}
return queryParams ?
getUrlWithParsedQueryParams(requestUrl, queryParams) :
requestUrl;
}
exports.getUrl = getUrl;
/**
*
* @param {string} requestUrl
* @param {IRequestQueryParams} queryParams
* @return {string} - Request's URL with Query Parameters appended/parsed.
*/
function getUrlWithParsedQueryParams(requestUrl, queryParams) {
const url = requestUrl.replace(/\?$/g, ''); // Clean any extra end-of-string "?" character
const parsedQueryParams = qs.stringify(queryParams.params, buildParamsStringifyOptions(queryParams));
return `${url}${parsedQueryParams}`;
}
/**
* Build options for QueryParams Stringifying.
*
* @param {IRequestQueryParams} queryParams
* @return {object}
*/
function buildParamsStringifyOptions(queryParams) {
let options = {
addQueryPrefix: true,
delimiter: (queryParams.options || {}).separator || '&',
allowDots: (queryParams.options || {}).shouldAllowDots || false,
arrayFormat: (queryParams.options || {}).arrayFormat || 'repeat',
encodeValuesOnly: (queryParams.options || {}).shouldOnlyEncodeValues || true
};
return options;
}
/**
* Decompress/Decode gzip encoded JSON
* Using Node.js built-in zlib module
*
* @param {Buffer} buffer
* @param {string} charset? - optional; defaults to 'utf-8'
* @return {Promise<string>}
*/
function decompressGzippedContent(buffer, charset) {
return __awaiter(this, void 0, void 0, function* () {
return new Promise((resolve, reject) => __awaiter(this, void 0, void 0, function* () {
zlib.gunzip(buffer, function (error, buffer) {
if (error) {
reject(error);
}
resolve(buffer.toString(charset || 'utf-8'));
});
}));
});
}
exports.decompressGzippedContent = decompressGzippedContent;
/**
* Builds a RegExp to test urls against for deciding
* wether to bypass proxy from an entry of the
* environment variable setting NO_PROXY
*
* @param {string} bypass
* @return {RegExp}
*/
function buildProxyBypassRegexFromEnv(bypass) {
try {
// We need to keep this around for back-compat purposes
return new RegExp(bypass, 'i');
}
catch (err) {
if (err instanceof SyntaxError && (bypass || "").startsWith("*")) {
let wildcardEscaped = bypass.replace('*', '(.*)');
return new RegExp(wildcardEscaped, 'i');
}
throw err;
}
}
exports.buildProxyBypassRegexFromEnv = buildProxyBypassRegexFromEnv;
/**
* Obtain Response's Content Charset.
* Through inspecting `content-type` response header.
* It Returns 'utf-8' if NO charset specified/matched.
*
* @param {IHttpClientResponse} response
* @return {string} - Content Encoding Charset; Default=utf-8
*/
function obtainContentCharset(response) {
// Find the charset, if specified.
// Search for the `charset=CHARSET` string, not including `;,\r\n`
// Example: content-type: 'application/json;charset=utf-8'
// |__ matches would be ['charset=utf-8', 'utf-8', index: 18, input: 'application/json; charset=utf-8']
// |_____ matches[1] would have the charset :tada: , in our example it's utf-8
// However, if the matches Array was empty or no charset found, 'utf-8' would be returned by default.
const nodeSupportedEncodings = ['ascii', 'utf8', 'utf16le', 'ucs2', 'base64', 'binary', 'hex'];
const contentType = response.message.headers['content-type'] || '';
const matches = contentType.match(/charset=([^;,\r\n]+)/i);
return (matches && matches[1] && nodeSupportedEncodings.indexOf(matches[1]) != -1) ? matches[1] : 'utf-8';
}
exports.obtainContentCharset = obtainContentCharset;
/***/ }),
/***/ 5030:
/***/ ((__unused_webpack_module, exports) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
function getUserAgent() {
if (typeof navigator === "object" && "userAgent" in navigator) {
return navigator.userAgent;
}
if (typeof process === "object" && "version" in process) {
return `Node.js/${process.version.substr(1)} (${process.platform}; ${process.arch})`;
}
return "<environment undetectable>";
}
exports.getUserAgent = getUserAgent;
//# sourceMappingURL=index.js.map
/***/ }),
/***/ 2707:
/***/ ((module) => {
/**
* Convert array of 16 byte values to UUID string format of the form:
* XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX
*/
var byteToHex = [];
for (var i = 0; i < 256; ++i) {
byteToHex[i] = (i + 0x100).toString(16).substr(1);
}
function bytesToUuid(buf, offset) {
var i = offset || 0;
var bth = byteToHex;
// join used to fix memory issue caused by concatenation: https://bugs.chromium.org/p/v8/issues/detail?id=3175#c4
return ([bth[buf[i++]], bth[buf[i++]],
bth[buf[i++]], bth[buf[i++]], '-',
bth[buf[i++]], bth[buf[i++]], '-',
bth[buf[i++]], bth[buf[i++]], '-',
bth[buf[i++]], bth[buf[i++]], '-',
bth[buf[i++]], bth[buf[i++]],
bth[buf[i++]], bth[buf[i++]],
bth[buf[i++]], bth[buf[i++]]]).join('');
}
module.exports = bytesToUuid;
/***/ }),
/***/ 5859:
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
// Unique ID creation requires a high quality random # generator. In node.js
// this is pretty straight-forward - we use the crypto API.
var crypto = __nccwpck_require__(6113);
module.exports = function nodeRNG() {
return crypto.randomBytes(16);
};
/***/ }),
/***/ 824:
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
var rng = __nccwpck_require__(5859);
var bytesToUuid = __nccwpck_require__(2707);
function v4(options, buf, offset) {
var i = buf && offset || 0;
if (typeof(options) == 'string') {
buf = options === 'binary' ? new Array(16) : null;
options = null;
}
options = options || {};
var rnds = options.random || (options.rng || rng)();
// Per 4.4, set bits for version and `clock_seq_hi_and_reserved`
rnds[6] = (rnds[6] & 0x0f) | 0x40;
rnds[8] = (rnds[8] & 0x3f) | 0x80;
// Copy bytes to buffer, if provided
if (buf) {
for (var ii = 0; ii < 16; ++ii) {
buf[i + ii] = rnds[ii];
}
}
return buf || bytesToUuid(rnds);
}
module.exports = v4;
/***/ }),
/***/ 2940:
/***/ ((module) => {
// Returns a wrapper function that returns a wrapped callback
// The wrapper function should do some stuff, and return a
// presumably different callback function.
// This makes sure that own properties are retained, so that
// decorations and such are not lost along the way.
module.exports = wrappy
function wrappy (fn, cb) {
if (fn && cb) return wrappy(fn)(cb)
if (typeof fn !== 'function')
throw new TypeError('need wrapper function')
Object.keys(fn).forEach(function (k) {
wrapper[k] = fn[k]
})
return wrapper
function wrapper() {
var args = new Array(arguments.length)
for (var i = 0; i < args.length; i++) {
args[i] = arguments[i]
}
var ret = fn.apply(this, args)
var cb = args[args.length-1]
if (typeof ret === 'function' && ret !== cb) {
Object.keys(cb).forEach(function (k) {
ret[k] = cb[k]
})
}
return ret
}
}
/***/ }),
/***/ 2877:
/***/ ((module) => {
module.exports = eval("require")("encoding");
/***/ }),
/***/ 9491:
/***/ ((module) => {
"use strict";
module.exports = require("assert");
/***/ }),
/***/ 2081:
/***/ ((module) => {
"use strict";
module.exports = require("child_process");
/***/ }),
/***/ 6113:
/***/ ((module) => {
"use strict";
module.exports = require("crypto");
/***/ }),
/***/ 2361:
/***/ ((module) => {
"use strict";
module.exports = require("events");
/***/ }),
/***/ 7147:
/***/ ((module) => {
"use strict";
module.exports = require("fs");
/***/ }),
/***/ 3685:
/***/ ((module) => {
"use strict";
module.exports = require("http");
/***/ }),
/***/ 5687:
/***/ ((module) => {
"use strict";
module.exports = require("https");
/***/ }),
/***/ 1808:
/***/ ((module) => {
"use strict";
module.exports = require("net");
/***/ }),
/***/ 2037:
/***/ ((module) => {
"use strict";
module.exports = require("os");
/***/ }),
/***/ 1017:
/***/ ((module) => {
"use strict";
module.exports = require("path");
/***/ }),
/***/ 5477:
/***/ ((module) => {
"use strict";
module.exports = require("punycode");
/***/ }),
/***/ 2781:
/***/ ((module) => {
"use strict";
module.exports = require("stream");
/***/ }),
/***/ 4404:
/***/ ((module) => {
"use strict";
module.exports = require("tls");
/***/ }),
/***/ 7310:
/***/ ((module) => {
"use strict";
module.exports = require("url");
/***/ }),
/***/ 3837:
/***/ ((module) => {
"use strict";
module.exports = require("util");
/***/ }),
/***/ 9796:
/***/ ((module) => {
"use strict";
module.exports = require("zlib");
/***/ }),
/***/ 1907:
/***/ ((module) => {
"use strict";
module.exports = JSON.parse('[[[0,44],"disallowed_STD3_valid"],[[45,46],"valid"],[[47,47],"disallowed_STD3_valid"],[[48,57],"valid"],[[58,64],"disallowed_STD3_valid"],[[65,65],"mapped",[97]],[[66,66],"mapped",[98]],[[67,67],"mapped",[99]],[[68,68],"mapped",[100]],[[69,69],"mapped",[101]],[[70,70],"mapped",[102]],[[71,71],"mapped",[103]],[[72,72],"mapped",[104]],[[73,73],"mapped",[105]],[[74,74],"mapped",[106]],[[75,75],"mapped",[107]],[[76,76],"mapped",[108]],[[77,77],"mapped",[109]],[[78,78],"mapped",[110]],[[79,79],"mapped",[111]],[[80,80],"mapped",[112]],[[81,81],"mapped",[113]],[[82,82],"mapped",[114]],[[83,83],"mapped",[115]],[[84,84],"mapped",[116]],[[85,85],"mapped",[117]],[[86,86],"mapped",[118]],[[87,87],"mapped",[119]],[[88,88],"mapped",[120]],[[89,89],"mapped",[121]],[[90,90],"mapped",[122]],[[91,96],"disallowed_STD3_valid"],[[97,122],"valid"],[[123,127],"disallowed_STD3_valid"],[[128,159],"disallowed"],[[160,160],"disallowed_STD3_mapped",[32]],[[161,167],"valid",[],"NV8"],[[168,168],"disallowed_STD3_mapped",[32,776]],[[169,169],"valid",[],"NV8"],[[170,170],"mapped",[97]],[[171,172],"valid",[],"NV8"],[[173,173],"ignored"],[[174,174],"valid",[],"NV8"],[[175,175],"disallowed_STD3_mapped",[32,772]],[[176,177],"valid",[],"NV8"],[[178,178],"mapped",[50]],[[179,179],"mapped",[51]],[[180,180],"disallowed_STD3_mapped",[32,769]],[[181,181],"mapped",[956]],[[182,182],"valid",[],"NV8"],[[183,183],"valid"],[[184,184],"disallowed_STD3_mapped",[32,807]],[[185,185],"mapped",[49]],[[186,186],"mapped",[111]],[[187,187],"valid",[],"NV8"],[[188,188],"mapped",[49,8260,52]],[[189,189],"mapped",[49,8260,50]],[[190,190],"mapped",[51,8260,52]],[[191,191],"valid",[],"NV8"],[[192,192],"mapped",[224]],[[193,193],"mapped",[225]],[[194,194],"mapped",[226]],[[195,195],"mapped",[227]],[[196,196],"mapped",[228]],[[197,197],"mapped",[229]],[[198,198],"mapped",[230]],[[199,199],"mapped",[231]],[[200,200],"mapped",[232]],[[201,201],"mapped",[233]],[[202,202],"mapped",[234]],[[203,203],"mapped",[235]],[[204,204],"mapped",[236]],[[205,205],"mapped",[237]],[[206,206],"mapped",[238]],[[207,207],"mapped",[239]],[[208,208],"mapped",[240]],[[209,209],"mapped",[241]],[[210,210],"mapped",[242]],[[211,211],"mapped",[243]],[[212,212],"mapped",[244]],[[213,213],"mapped",[245]],[[214,214],"mapped",[246]],[[215,215],"valid",[],"NV8"],[[216,216],"mapped",[248]],[[217,217],"mapped",[249]],[[218,218],"mapped",[250]],[[219,219],"mapped",[251]],[[220,220],"mapped",[252]],[[221,221],"mapped",[253]],[[222,222],"mapped",[254]],[[223,223],"deviation",[115,115]],[[224,246],"valid"],[[247,247],"valid",[],"NV8"],[[248,255],"valid"],[[256,256],"mapped",[257]],[[257,257],"valid"],[[258,258],"mapped",[259]],[[259,259],"valid"],[[260,260],"mapped",[261]],[[261,261],"valid"],[[262,262],"mapped",[263]],[[263,263],"valid"],[[264,264],"mapped",[265]],[[265,265],"valid"],[[266,266],"mapped",[267]],[[267,267],"valid"],[[268,268],"mapped",[269]],[[269,269],"valid"],[[270,270],"mapped",[271]],[[271,271],"valid"],[[272,272],"mapped",[273]],[[273,273],"valid"],[[274,274],"mapped",[275]],[[275,275],"valid"],[[276,276],"mapped",[277]],[[277,277],"valid"],[[278,278],"mapped",[279]],[[279,279],"valid"],[[280,280],"mapped",[281]],[[281,281],"valid"],[[282,282],"mapped",[283]],[[283,283],"valid"],[[284,284],"mapped",[285]],[[285,285],"valid"],[[286,286],"mapped",[287]],[[287,287],"valid"],[[288,288],"mapped",[289]],[[289,289],"valid"],[[290,290],"mapped",[291]],[[291,291],"valid"],[[292,292],"mapped",[293]],[[293,293],"valid"],[[294,294],"mapped",[295]],[[295,295],"valid"],[[296,296],"mapped",[297]],[[297,297],"valid"],[[298,298],"mapped",[299]],[[299,299],"valid"],[[300,300],"mapped",[301]],[[301,301],"valid"],[[302,302],"mapped",[303]],[[303,303],"valid"],[[304,304],"mapped",[105,775]],[[305,305],"valid"],[[306,307],"mapped",[105,106]],[[308,308],"mapped",[309]],[[309,309],"valid"],[[310,310],"mapped",[311]],[[311,312],"valid"],[[313,313],"mapped",[314]],[[314,314],"valid"],[[315,315],"mapped",[316]],[[316,316],"valid"],[[317,317],"mapped",[318]],[[318,318],"valid"],[[319,320],"mapped",
/***/ })
/******/ });
/************************************************************************/
/******/ // The module cache
/******/ var __webpack_module_cache__ = {};
/******/
/******/ // The require function
/******/ function __nccwpck_require__(moduleId) {
/******/ // Check if module is in cache
/******/ var cachedModule = __webpack_module_cache__[moduleId];
/******/ if (cachedModule !== undefined) {
/******/ return cachedModule.exports;
/******/ }
/******/ // Create a new module (and put it into the cache)
/******/ var module = __webpack_module_cache__[moduleId] = {
/******/ // no module.id needed
/******/ // no module.loaded needed
/******/ exports: {}
/******/ };
/******/
/******/ // Execute the module function
/******/ var threw = true;
/******/ try {
/******/ __webpack_modules__[moduleId].call(module.exports, module, module.exports, __nccwpck_require__);
/******/ threw = false;
/******/ } finally {
/******/ if(threw) delete __webpack_module_cache__[moduleId];
/******/ }
/******/
/******/ // Return the exports of the module
/******/ return module.exports;
/******/ }
/******/
/************************************************************************/
/******/ /* webpack/runtime/compat */
/******/
/******/ if (typeof __nccwpck_require__ !== 'undefined') __nccwpck_require__.ab = __dirname + "/";
/******/
/************************************************************************/
/******/
/******/ // startup
/******/ // Load entry module and return exports
/******/ // This entry module is referenced by other modules so it can't be inlined
/******/ var __webpack_exports__ = __nccwpck_require__(3109);
/******/ module.exports = __webpack_exports__;
/******/
/******/ })()
;