mirror of
https://gitea.com/actions/upload-artifact.git
synced 2024-12-22 15:25:47 +01:00
Add retries to all HTTP calls + resolve dependabot alerts (#160)
* Bump @actions/artifact to version 0.5.0 * Resolve dependabot alert for node-notifier * Resolve dependabot alert for node-fetch * Bump artifact.dep.yml * Update http-client.dep.yml
This commit is contained in:
parent
e6bd6b7749
commit
e448a9b857
5 changed files with 1807 additions and 1857 deletions
2
.licenses/npm/@actions/artifact.dep.yml
generated
2
.licenses/npm/@actions/artifact.dep.yml
generated
|
@ -1,6 +1,6 @@
|
|||
---
|
||||
name: "@actions/artifact"
|
||||
version: 0.4.0
|
||||
version: 0.5.0
|
||||
type: npm
|
||||
summary:
|
||||
homepage:
|
||||
|
|
2
.licenses/npm/@actions/http-client.dep.yml
generated
2
.licenses/npm/@actions/http-client.dep.yml
generated
|
@ -1,6 +1,6 @@
|
|||
---
|
||||
name: "@actions/http-client"
|
||||
version: 1.0.8
|
||||
version: 1.0.9
|
||||
type: npm
|
||||
summary: Actions Http Client
|
||||
homepage: https://github.com/actions/http-client#readme
|
||||
|
|
283
dist/index.js
vendored
283
dist/index.js
vendored
|
@ -4075,7 +4075,8 @@ function run() {
|
|||
}
|
||||
}
|
||||
else {
|
||||
core.info(`With the provided path, there will be ${searchResult.filesToUpload.length} file(s) uploaded`);
|
||||
const s = searchResult.filesToUpload.length === 1 ? '' : 's';
|
||||
core.info(`With the provided path, there will be ${searchResult.filesToUpload.length} file${s} uploaded`);
|
||||
core.debug(`Root artifact directory is ${searchResult.rootDirectory}`);
|
||||
const artifactClient = artifact_1.create();
|
||||
const options = {
|
||||
|
@ -5334,6 +5335,88 @@ exports.getState = getState;
|
|||
|
||||
/***/ }),
|
||||
|
||||
/***/ 489:
|
||||
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
||||
|
||||
"use strict";
|
||||
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||
result["default"] = mod;
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const utils_1 = __webpack_require__(870);
|
||||
const core = __importStar(__webpack_require__(470));
|
||||
const config_variables_1 = __webpack_require__(401);
|
||||
function retry(name, operation, customErrorMessages, maxAttempts) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
let response = undefined;
|
||||
let statusCode = undefined;
|
||||
let isRetryable = false;
|
||||
let errorMessage = '';
|
||||
let customErrorInformation = undefined;
|
||||
let attempt = 1;
|
||||
while (attempt <= maxAttempts) {
|
||||
try {
|
||||
response = yield operation();
|
||||
statusCode = response.message.statusCode;
|
||||
if (utils_1.isSuccessStatusCode(statusCode)) {
|
||||
return response;
|
||||
}
|
||||
// Extra error information that we want to display if a particular response code is hit
|
||||
if (statusCode) {
|
||||
customErrorInformation = customErrorMessages.get(statusCode);
|
||||
}
|
||||
isRetryable = utils_1.isRetryableStatusCode(statusCode);
|
||||
errorMessage = `Artifact service responded with ${statusCode}`;
|
||||
}
|
||||
catch (error) {
|
||||
isRetryable = true;
|
||||
errorMessage = error.message;
|
||||
}
|
||||
if (!isRetryable) {
|
||||
core.info(`${name} - Error is not retryable`);
|
||||
if (response) {
|
||||
utils_1.displayHttpDiagnostics(response);
|
||||
}
|
||||
break;
|
||||
}
|
||||
core.info(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`);
|
||||
yield utils_1.sleep(utils_1.getExponentialRetryTimeInMilliseconds(attempt));
|
||||
attempt++;
|
||||
}
|
||||
if (response) {
|
||||
utils_1.displayHttpDiagnostics(response);
|
||||
}
|
||||
if (customErrorInformation) {
|
||||
throw Error(`${name} failed: ${customErrorInformation}`);
|
||||
}
|
||||
throw Error(`${name} failed: ${errorMessage}`);
|
||||
});
|
||||
}
|
||||
exports.retry = retry;
|
||||
function retryHttpClientRequest(name, method, customErrorMessages = new Map(), maxAttempts = config_variables_1.getRetryLimit()) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
return yield retry(name, method, customErrorMessages, maxAttempts);
|
||||
});
|
||||
}
|
||||
exports.retryHttpClientRequest = retryHttpClientRequest;
|
||||
//# sourceMappingURL=requestUtils.js.map
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 532:
|
||||
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
||||
|
||||
|
@ -5408,7 +5491,6 @@ exports.getDownloadSpecification = getDownloadSpecification;
|
|||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const url = __webpack_require__(835);
|
||||
const http = __webpack_require__(605);
|
||||
const https = __webpack_require__(211);
|
||||
const pm = __webpack_require__(950);
|
||||
|
@ -5457,7 +5539,7 @@ var MediaTypes;
|
|||
* @param serverUrl The server URL where the request will be sent. For example, https://api.github.com
|
||||
*/
|
||||
function getProxyUrl(serverUrl) {
|
||||
let proxyUrl = pm.getProxyUrl(url.parse(serverUrl));
|
||||
let proxyUrl = pm.getProxyUrl(new URL(serverUrl));
|
||||
return proxyUrl ? proxyUrl.href : '';
|
||||
}
|
||||
exports.getProxyUrl = getProxyUrl;
|
||||
|
@ -5476,6 +5558,15 @@ const HttpResponseRetryCodes = [
|
|||
const RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD'];
|
||||
const ExponentialBackoffCeiling = 10;
|
||||
const ExponentialBackoffTimeSlice = 5;
|
||||
class HttpClientError extends Error {
|
||||
constructor(message, statusCode) {
|
||||
super(message);
|
||||
this.name = 'HttpClientError';
|
||||
this.statusCode = statusCode;
|
||||
Object.setPrototypeOf(this, HttpClientError.prototype);
|
||||
}
|
||||
}
|
||||
exports.HttpClientError = HttpClientError;
|
||||
class HttpClientResponse {
|
||||
constructor(message) {
|
||||
this.message = message;
|
||||
|
@ -5494,7 +5585,7 @@ class HttpClientResponse {
|
|||
}
|
||||
exports.HttpClientResponse = HttpClientResponse;
|
||||
function isHttps(requestUrl) {
|
||||
let parsedUrl = url.parse(requestUrl);
|
||||
let parsedUrl = new URL(requestUrl);
|
||||
return parsedUrl.protocol === 'https:';
|
||||
}
|
||||
exports.isHttps = isHttps;
|
||||
|
@ -5599,7 +5690,7 @@ class HttpClient {
|
|||
if (this._disposed) {
|
||||
throw new Error('Client has already been disposed.');
|
||||
}
|
||||
let parsedUrl = url.parse(requestUrl);
|
||||
let parsedUrl = new URL(requestUrl);
|
||||
let info = this._prepareRequest(verb, parsedUrl, headers);
|
||||
// Only perform retries on reads since writes may not be idempotent.
|
||||
let maxTries = this._allowRetries && RetryableHttpVerbs.indexOf(verb) != -1
|
||||
|
@ -5638,7 +5729,7 @@ class HttpClient {
|
|||
// if there's no location to redirect to, we won't
|
||||
break;
|
||||
}
|
||||
let parsedRedirectUrl = url.parse(redirectUrl);
|
||||
let parsedRedirectUrl = new URL(redirectUrl);
|
||||
if (parsedUrl.protocol == 'https:' &&
|
||||
parsedUrl.protocol != parsedRedirectUrl.protocol &&
|
||||
!this._allowRedirectDowngrade) {
|
||||
|
@ -5754,7 +5845,7 @@ class HttpClient {
|
|||
* @param serverUrl The server URL where the request will be sent. For example, https://api.github.com
|
||||
*/
|
||||
getAgent(serverUrl) {
|
||||
let parsedUrl = url.parse(serverUrl);
|
||||
let parsedUrl = new URL(serverUrl);
|
||||
return this._getAgent(parsedUrl);
|
||||
}
|
||||
_prepareRequest(method, requestUrl, headers) {
|
||||
|
@ -5827,7 +5918,7 @@ class HttpClient {
|
|||
maxSockets: maxSockets,
|
||||
keepAlive: this._keepAlive,
|
||||
proxy: {
|
||||
proxyAuth: proxyUrl.auth,
|
||||
proxyAuth: `${proxyUrl.username}:${proxyUrl.password}`,
|
||||
host: proxyUrl.hostname,
|
||||
port: proxyUrl.port
|
||||
}
|
||||
|
@ -5922,12 +6013,8 @@ class HttpClient {
|
|||
else {
|
||||
msg = 'Failed request: (' + statusCode + ')';
|
||||
}
|
||||
let err = new Error(msg);
|
||||
// attach statusCode and body obj (if available) to the error object
|
||||
err['statusCode'] = statusCode;
|
||||
if (response.result) {
|
||||
err['result'] = response.result;
|
||||
}
|
||||
let err = new HttpClientError(msg, statusCode);
|
||||
err.result = response.result;
|
||||
reject(err);
|
||||
}
|
||||
else {
|
||||
|
@ -6760,8 +6847,10 @@ const util_1 = __webpack_require__(669);
|
|||
const url_1 = __webpack_require__(835);
|
||||
const perf_hooks_1 = __webpack_require__(630);
|
||||
const status_reporter_1 = __webpack_require__(176);
|
||||
const http_client_1 = __webpack_require__(539);
|
||||
const http_manager_1 = __webpack_require__(452);
|
||||
const upload_gzip_1 = __webpack_require__(647);
|
||||
const requestUtils_1 = __webpack_require__(489);
|
||||
const stat = util_1.promisify(fs.stat);
|
||||
class UploadHttpClient {
|
||||
constructor() {
|
||||
|
@ -6789,20 +6878,22 @@ class UploadHttpClient {
|
|||
// use the first client from the httpManager, `keep-alive` is not used so the connection will close immediately
|
||||
const client = this.uploadHttpManager.getClient(0);
|
||||
const headers = utils_1.getUploadHeaders('application/json', false);
|
||||
const rawResponse = yield client.post(artifactUrl, data, headers);
|
||||
const body = yield rawResponse.readBody();
|
||||
if (utils_1.isSuccessStatusCode(rawResponse.message.statusCode) && body) {
|
||||
return JSON.parse(body);
|
||||
}
|
||||
else if (utils_1.isForbiddenStatusCode(rawResponse.message.statusCode)) {
|
||||
// if a 403 is returned when trying to create a file container, the customer has exceeded
|
||||
// their storage quota so no new artifact containers can be created
|
||||
throw new Error(`Artifact storage quota has been hit. Unable to upload any new artifacts`);
|
||||
}
|
||||
else {
|
||||
utils_1.displayHttpDiagnostics(rawResponse);
|
||||
throw new Error(`Unable to create a container for the artifact ${artifactName} at ${artifactUrl}`);
|
||||
}
|
||||
// Extra information to display when a particular HTTP code is returned
|
||||
// If a 403 is returned when trying to create a file container, the customer has exceeded
|
||||
// their storage quota so no new artifact containers can be created
|
||||
const customErrorMessages = new Map([
|
||||
[
|
||||
http_client_1.HttpCodes.Forbidden,
|
||||
'Artifact storage quota has been hit. Unable to upload any new artifacts'
|
||||
],
|
||||
[
|
||||
http_client_1.HttpCodes.BadRequest,
|
||||
`The artifact name ${artifactName} is not valid. Request URL ${artifactUrl}`
|
||||
]
|
||||
]);
|
||||
const response = yield requestUtils_1.retryHttpClientRequest('Create Artifact Container', () => __awaiter(this, void 0, void 0, function* () { return client.post(artifactUrl, data, headers); }), customErrorMessages);
|
||||
const body = yield response.readBody();
|
||||
return JSON.parse(body);
|
||||
});
|
||||
}
|
||||
/**
|
||||
|
@ -7026,12 +7117,12 @@ class UploadHttpClient {
|
|||
this.uploadHttpManager.disposeAndReplaceClient(httpClientIndex);
|
||||
if (retryAfterValue) {
|
||||
core.info(`Backoff due to too many requests, retry #${retryCount}. Waiting for ${retryAfterValue} milliseconds before continuing the upload`);
|
||||
yield new Promise(resolve => setTimeout(resolve, retryAfterValue));
|
||||
yield utils_1.sleep(retryAfterValue);
|
||||
}
|
||||
else {
|
||||
const backoffTime = utils_1.getExponentialRetryTimeInMilliseconds(retryCount);
|
||||
core.info(`Exponential backoff for retry #${retryCount}. Waiting for ${backoffTime} milliseconds before continuing the upload at offset ${start}`);
|
||||
yield new Promise(resolve => setTimeout(resolve, backoffTime));
|
||||
yield utils_1.sleep(backoffTime);
|
||||
}
|
||||
core.info(`Finished backoff for retry #${retryCount}, continuing with upload`);
|
||||
return;
|
||||
|
@ -7083,7 +7174,6 @@ class UploadHttpClient {
|
|||
*/
|
||||
patchArtifactSize(size, artifactName) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const headers = utils_1.getUploadHeaders('application/json', false);
|
||||
const resourceUrl = new url_1.URL(utils_1.getArtifactUrl());
|
||||
resourceUrl.searchParams.append('artifactName', artifactName);
|
||||
const parameters = { Size: size };
|
||||
|
@ -7091,19 +7181,18 @@ class UploadHttpClient {
|
|||
core.debug(`URL is ${resourceUrl.toString()}`);
|
||||
// use the first client from the httpManager, `keep-alive` is not used so the connection will close immediately
|
||||
const client = this.uploadHttpManager.getClient(0);
|
||||
const response = yield client.patch(resourceUrl.toString(), data, headers);
|
||||
const body = yield response.readBody();
|
||||
if (utils_1.isSuccessStatusCode(response.message.statusCode)) {
|
||||
core.debug(`Artifact ${artifactName} has been successfully uploaded, total size in bytes: ${size}`);
|
||||
}
|
||||
else if (response.message.statusCode === 404) {
|
||||
throw new Error(`An Artifact with the name ${artifactName} was not found`);
|
||||
}
|
||||
else {
|
||||
utils_1.displayHttpDiagnostics(response);
|
||||
core.info(body);
|
||||
throw new Error(`Unable to finish uploading artifact ${artifactName} to ${resourceUrl}`);
|
||||
}
|
||||
const headers = utils_1.getUploadHeaders('application/json', false);
|
||||
// Extra information to display when a particular HTTP code is returned
|
||||
const customErrorMessages = new Map([
|
||||
[
|
||||
http_client_1.HttpCodes.NotFound,
|
||||
`An Artifact with the name ${artifactName} was not found`
|
||||
]
|
||||
]);
|
||||
// TODO retry for all possible response codes, the artifact upload is pretty much complete so it at all costs we should try to finish this
|
||||
const response = yield requestUtils_1.retryHttpClientRequest('Finalize artifact upload', () => __awaiter(this, void 0, void 0, function* () { return client.patch(resourceUrl.toString(), data, headers); }), customErrorMessages);
|
||||
yield response.readBody();
|
||||
core.debug(`Artifact ${artifactName} has been successfully uploaded, total size in bytes: ${size}`);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
@ -7523,6 +7612,7 @@ const status_reporter_1 = __webpack_require__(176);
|
|||
const perf_hooks_1 = __webpack_require__(630);
|
||||
const http_manager_1 = __webpack_require__(452);
|
||||
const config_variables_1 = __webpack_require__(401);
|
||||
const requestUtils_1 = __webpack_require__(489);
|
||||
class DownloadHttpClient {
|
||||
constructor() {
|
||||
this.downloadHttpManager = new http_manager_1.HttpManager(config_variables_1.getDownloadFileConcurrency(), '@actions/artifact-download');
|
||||
|
@ -7538,13 +7628,9 @@ class DownloadHttpClient {
|
|||
// use the first client from the httpManager, `keep-alive` is not used so the connection will close immediately
|
||||
const client = this.downloadHttpManager.getClient(0);
|
||||
const headers = utils_1.getDownloadHeaders('application/json');
|
||||
const response = yield client.get(artifactUrl, headers);
|
||||
const response = yield requestUtils_1.retryHttpClientRequest('List Artifacts', () => __awaiter(this, void 0, void 0, function* () { return client.get(artifactUrl, headers); }));
|
||||
const body = yield response.readBody();
|
||||
if (utils_1.isSuccessStatusCode(response.message.statusCode) && body) {
|
||||
return JSON.parse(body);
|
||||
}
|
||||
utils_1.displayHttpDiagnostics(response);
|
||||
throw new Error(`Unable to list artifacts for the run. Resource Url ${artifactUrl}`);
|
||||
return JSON.parse(body);
|
||||
});
|
||||
}
|
||||
/**
|
||||
|
@ -7560,13 +7646,9 @@ class DownloadHttpClient {
|
|||
// use the first client from the httpManager, `keep-alive` is not used so the connection will close immediately
|
||||
const client = this.downloadHttpManager.getClient(0);
|
||||
const headers = utils_1.getDownloadHeaders('application/json');
|
||||
const response = yield client.get(resourceUrl.toString(), headers);
|
||||
const response = yield requestUtils_1.retryHttpClientRequest('Get Container Items', () => __awaiter(this, void 0, void 0, function* () { return client.get(resourceUrl.toString(), headers); }));
|
||||
const body = yield response.readBody();
|
||||
if (utils_1.isSuccessStatusCode(response.message.statusCode) && body) {
|
||||
return JSON.parse(body);
|
||||
}
|
||||
utils_1.displayHttpDiagnostics(response);
|
||||
throw new Error(`Unable to get ContainersItems from ${resourceUrl}`);
|
||||
return JSON.parse(body);
|
||||
});
|
||||
}
|
||||
/**
|
||||
|
@ -7616,7 +7698,7 @@ class DownloadHttpClient {
|
|||
return __awaiter(this, void 0, void 0, function* () {
|
||||
let retryCount = 0;
|
||||
const retryLimit = config_variables_1.getRetryLimit();
|
||||
const destinationStream = fs.createWriteStream(downloadPath);
|
||||
let destinationStream = fs.createWriteStream(downloadPath);
|
||||
const headers = utils_1.getDownloadHeaders('application/json', true, true);
|
||||
// a single GET request is used to download a file
|
||||
const makeDownloadRequest = () => __awaiter(this, void 0, void 0, function* () {
|
||||
|
@ -7641,22 +7723,40 @@ class DownloadHttpClient {
|
|||
if (retryAfterValue) {
|
||||
// Back off by waiting the specified time denoted by the retry-after header
|
||||
core.info(`Backoff due to too many requests, retry #${retryCount}. Waiting for ${retryAfterValue} milliseconds before continuing the download`);
|
||||
yield new Promise(resolve => setTimeout(resolve, retryAfterValue));
|
||||
yield utils_1.sleep(retryAfterValue);
|
||||
}
|
||||
else {
|
||||
// Back off using an exponential value that depends on the retry count
|
||||
const backoffTime = utils_1.getExponentialRetryTimeInMilliseconds(retryCount);
|
||||
core.info(`Exponential backoff for retry #${retryCount}. Waiting for ${backoffTime} milliseconds before continuing the download`);
|
||||
yield new Promise(resolve => setTimeout(resolve, backoffTime));
|
||||
yield utils_1.sleep(backoffTime);
|
||||
}
|
||||
core.info(`Finished backoff for retry #${retryCount}, continuing with download`);
|
||||
}
|
||||
});
|
||||
const isAllBytesReceived = (expected, received) => {
|
||||
// be lenient, if any input is missing, assume success, i.e. not truncated
|
||||
if (!expected ||
|
||||
!received ||
|
||||
process.env['ACTIONS_ARTIFACT_SKIP_DOWNLOAD_VALIDATION']) {
|
||||
core.info('Skipping download validation.');
|
||||
return true;
|
||||
}
|
||||
return parseInt(expected) === received;
|
||||
};
|
||||
const resetDestinationStream = (fileDownloadPath) => __awaiter(this, void 0, void 0, function* () {
|
||||
destinationStream.close();
|
||||
yield utils_1.rmFile(fileDownloadPath);
|
||||
destinationStream = fs.createWriteStream(fileDownloadPath);
|
||||
});
|
||||
// keep trying to download a file until a retry limit has been reached
|
||||
while (retryCount <= retryLimit) {
|
||||
let response;
|
||||
try {
|
||||
response = yield makeDownloadRequest();
|
||||
if (core.isDebug()) {
|
||||
utils_1.displayHttpDiagnostics(response);
|
||||
}
|
||||
}
|
||||
catch (error) {
|
||||
// if an error is caught, it is usually indicative of a timeout so retry the download
|
||||
|
@ -7667,14 +7767,30 @@ class DownloadHttpClient {
|
|||
yield backOff();
|
||||
continue;
|
||||
}
|
||||
let forceRetry = false;
|
||||
if (utils_1.isSuccessStatusCode(response.message.statusCode)) {
|
||||
// The body contains the contents of the file however calling response.readBody() causes all the content to be converted to a string
|
||||
// which can cause some gzip encoded data to be lost
|
||||
// Instead of using response.readBody(), response.message is a readableStream that can be directly used to get the raw body contents
|
||||
return this.pipeResponseToFile(response, destinationStream, isGzip(response.message.headers));
|
||||
try {
|
||||
const isGzipped = isGzip(response.message.headers);
|
||||
yield this.pipeResponseToFile(response, destinationStream, isGzipped);
|
||||
if (isGzipped ||
|
||||
isAllBytesReceived(response.message.headers['content-length'], yield utils_1.getFileSize(downloadPath))) {
|
||||
return;
|
||||
}
|
||||
else {
|
||||
forceRetry = true;
|
||||
}
|
||||
}
|
||||
catch (error) {
|
||||
// retry on error, most likely streams were corrupted
|
||||
forceRetry = true;
|
||||
}
|
||||
}
|
||||
else if (utils_1.isRetryableStatusCode(response.message.statusCode)) {
|
||||
if (forceRetry || utils_1.isRetryableStatusCode(response.message.statusCode)) {
|
||||
core.info(`A ${response.message.statusCode} response code has been received while attempting to download an artifact`);
|
||||
resetDestinationStream(downloadPath);
|
||||
// if a throttled status code is received, try to get the retryAfter header value, else differ to standard exponential backoff
|
||||
utils_1.isThrottledStatusCode(response.message.statusCode)
|
||||
? yield backOff(utils_1.tryGetRetryAfterValueTimeInMilliseconds(response.message.headers))
|
||||
|
@ -7700,24 +7816,40 @@ class DownloadHttpClient {
|
|||
if (isGzip) {
|
||||
const gunzip = zlib.createGunzip();
|
||||
response.message
|
||||
.on('error', error => {
|
||||
core.error(`An error occurred while attempting to read the response stream`);
|
||||
gunzip.close();
|
||||
destinationStream.close();
|
||||
reject(error);
|
||||
})
|
||||
.pipe(gunzip)
|
||||
.on('error', error => {
|
||||
core.error(`An error occurred while attempting to decompress the response stream`);
|
||||
destinationStream.close();
|
||||
reject(error);
|
||||
})
|
||||
.pipe(destinationStream)
|
||||
.on('close', () => {
|
||||
resolve();
|
||||
})
|
||||
.on('error', error => {
|
||||
core.error(`An error has been encountered while decompressing and writing a downloaded file to ${destinationStream.path}`);
|
||||
core.error(`An error occurred while writing a downloaded file to ${destinationStream.path}`);
|
||||
reject(error);
|
||||
});
|
||||
}
|
||||
else {
|
||||
response.message
|
||||
.on('error', error => {
|
||||
core.error(`An error occurred while attempting to read the response stream`);
|
||||
destinationStream.close();
|
||||
reject(error);
|
||||
})
|
||||
.pipe(destinationStream)
|
||||
.on('close', () => {
|
||||
resolve();
|
||||
})
|
||||
.on('error', error => {
|
||||
core.error(`An error has been encountered while writing a downloaded file to ${destinationStream.path}`);
|
||||
core.error(`An error occurred while writing a downloaded file to ${destinationStream.path}`);
|
||||
reject(error);
|
||||
});
|
||||
}
|
||||
|
@ -8250,6 +8382,20 @@ function createEmptyFilesForArtifact(emptyFilesToCreate) {
|
|||
});
|
||||
}
|
||||
exports.createEmptyFilesForArtifact = createEmptyFilesForArtifact;
|
||||
function getFileSize(filePath) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const stats = yield fs_1.promises.stat(filePath);
|
||||
core_1.debug(`${filePath} size:(${stats.size}) blksize:(${stats.blksize}) blocks:(${stats.blocks})`);
|
||||
return stats.size;
|
||||
});
|
||||
}
|
||||
exports.getFileSize = getFileSize;
|
||||
function rmFile(filePath) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
yield fs_1.promises.unlink(filePath);
|
||||
});
|
||||
}
|
||||
exports.rmFile = rmFile;
|
||||
function getProperRetention(retentionInput, retentionSetting) {
|
||||
if (retentionInput < 0) {
|
||||
throw new Error('Invalid retention, minimum value is 1.');
|
||||
|
@ -8265,6 +8411,12 @@ function getProperRetention(retentionInput, retentionSetting) {
|
|||
return retention;
|
||||
}
|
||||
exports.getProperRetention = getProperRetention;
|
||||
function sleep(milliseconds) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
return new Promise(resolve => setTimeout(resolve, milliseconds));
|
||||
});
|
||||
}
|
||||
exports.sleep = sleep;
|
||||
//# sourceMappingURL=utils.js.map
|
||||
|
||||
/***/ }),
|
||||
|
@ -8583,12 +8735,11 @@ exports.Pattern = Pattern;
|
|||
/***/ }),
|
||||
|
||||
/***/ 950:
|
||||
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
||||
/***/ (function(__unusedmodule, exports) {
|
||||
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const url = __webpack_require__(835);
|
||||
function getProxyUrl(reqUrl) {
|
||||
let usingSsl = reqUrl.protocol === 'https:';
|
||||
let proxyUrl;
|
||||
|
@ -8603,7 +8754,7 @@ function getProxyUrl(reqUrl) {
|
|||
proxyVar = process.env['http_proxy'] || process.env['HTTP_PROXY'];
|
||||
}
|
||||
if (proxyVar) {
|
||||
proxyUrl = url.parse(proxyVar);
|
||||
proxyUrl = new URL(proxyVar);
|
||||
}
|
||||
return proxyUrl;
|
||||
}
|
||||
|
|
3371
package-lock.json
generated
3371
package-lock.json
generated
File diff suppressed because it is too large
Load diff
|
@ -29,7 +29,7 @@
|
|||
},
|
||||
"homepage": "https://github.com/actions/upload-artifact#readme",
|
||||
"dependencies": {
|
||||
"@actions/artifact": "^0.4.0",
|
||||
"@actions/artifact": "^0.5.0",
|
||||
"@actions/core": "^1.2.6",
|
||||
"@actions/glob": "^0.1.0",
|
||||
"@actions/io": "^1.0.2"
|
||||
|
@ -41,10 +41,10 @@
|
|||
"@zeit/ncc": "^0.22.1",
|
||||
"concurrently": "^5.1.0",
|
||||
"eslint": "^7.4.0",
|
||||
"eslint-plugin-github": "^3.4.1",
|
||||
"eslint-plugin-github": "^4.1.1",
|
||||
"eslint-plugin-jest": "^23.8.2",
|
||||
"glob": "^7.1.6",
|
||||
"jest": "^26.1.0",
|
||||
"jest": "^26.6.3",
|
||||
"jest-circus": "^26.1.0",
|
||||
"prettier": "^2.0.4",
|
||||
"ts-jest": "^25.3.1",
|
||||
|
|
Loading…
Reference in a new issue