1
Fork 0
mirror of https://gitea.com/actions/upload-artifact.git synced 2024-11-14 04:35:57 +01:00

Merge pull request #453 from actions/robherley/more-v4-improvements

More v4 Improvements: adjustable compression level and tweak concurrency
This commit is contained in:
Rob Herley 2023-11-21 10:58:16 -05:00 committed by GitHub
commit eeb333dcd2
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
6 changed files with 85 additions and 19 deletions

View file

@ -23,6 +23,18 @@ inputs:
Minimum 1 day. Minimum 1 day.
Maximum 90 days unless changed from the repository settings page. Maximum 90 days unless changed from the repository settings page.
compression-level:
description: >
The level of compression for Zlib to be applied to the artifact archive.
The value can range from 0 to 9:
- 0: No compression
- 1: Best speed
- 6: Default compression (same as GNU Gzip)
- 9: Best compression
Higher levels will result in better compression, but will take longer to complete.
For large files that are not easily compressed, a value of 0 is recommended for significantly faster uploads.
default: '6'
outputs: outputs:
artifact-id: artifact-id:
description: > description: >

61
dist/index.js vendored
View file

@ -5032,8 +5032,8 @@ class ArtifactHttpClient {
// JSON generated client. // JSON generated client.
request(service, method, contentType, data) { request(service, method, contentType, data) {
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
const url = `${this.baseUrl}/twirp/${service}/${method}`; const url = new URL(`/twirp/${service}/${method}`, this.baseUrl).href;
(0, core_1.debug)(`Requesting ${url}`); (0, core_1.debug)(`Requesting: ${url}`);
const headers = { const headers = {
'Content-Type': contentType 'Content-Type': contentType
}; };
@ -5126,12 +5126,16 @@ exports.createArtifactTwirpClient = createArtifactTwirpClient;
/***/ }), /***/ }),
/***/ 95042: /***/ 95042:
/***/ ((__unused_webpack_module, exports) => { /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
"use strict"; "use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", ({ value: true })); Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.getGitHubWorkspaceDir = exports.isGhes = exports.getResultsServiceUrl = exports.getRuntimeToken = exports.getUploadChunkSize = void 0; exports.getConcurrency = exports.getGitHubWorkspaceDir = exports.isGhes = exports.getResultsServiceUrl = exports.getRuntimeToken = exports.getUploadChunkSize = void 0;
const os_1 = __importDefault(__nccwpck_require__(22037));
// Used for controlling the highWaterMark value of the zip that is being streamed // Used for controlling the highWaterMark value of the zip that is being streamed
// The same value is used as the chunk size that is use during upload to blob storage // The same value is used as the chunk size that is use during upload to blob storage
function getUploadChunkSize() { function getUploadChunkSize() {
@ -5151,7 +5155,7 @@ function getResultsServiceUrl() {
if (!resultsUrl) { if (!resultsUrl) {
throw new Error('Unable to get the ACTIONS_RESULTS_URL env variable'); throw new Error('Unable to get the ACTIONS_RESULTS_URL env variable');
} }
return resultsUrl; return new URL(resultsUrl).origin;
} }
exports.getResultsServiceUrl = getResultsServiceUrl; exports.getResultsServiceUrl = getResultsServiceUrl;
function isGhes() { function isGhes() {
@ -5167,6 +5171,18 @@ function getGitHubWorkspaceDir() {
return ghWorkspaceDir; return ghWorkspaceDir;
} }
exports.getGitHubWorkspaceDir = getGitHubWorkspaceDir; exports.getGitHubWorkspaceDir = getGitHubWorkspaceDir;
// Mimics behavior of azcopy: https://learn.microsoft.com/en-us/azure/storage/common/storage-use-azcopy-optimize
// If your machine has fewer than 5 CPUs, then the value of this variable is set to 32.
// Otherwise, the default value is equal to 16 multiplied by the number of CPUs. The maximum value of this variable is 300.
function getConcurrency() {
const numCPUs = os_1.default.cpus().length;
if (numCPUs <= 4) {
return 32;
}
const concurrency = 16 * numCPUs;
return concurrency > 300 ? 300 : concurrency;
}
exports.getConcurrency = getConcurrency;
//# sourceMappingURL=config.js.map //# sourceMappingURL=config.js.map
/***/ }), /***/ }),
@ -5309,11 +5325,11 @@ const stream = __importStar(__nccwpck_require__(12781));
function uploadZipToBlobStorage(authenticatedUploadURL, zipUploadStream) { function uploadZipToBlobStorage(authenticatedUploadURL, zipUploadStream) {
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
let uploadByteCount = 0; let uploadByteCount = 0;
const maxBuffers = 5; const maxConcurrency = (0, config_1.getConcurrency)();
const bufferSize = (0, config_1.getUploadChunkSize)(); const bufferSize = (0, config_1.getUploadChunkSize)();
const blobClient = new storage_blob_1.BlobClient(authenticatedUploadURL); const blobClient = new storage_blob_1.BlobClient(authenticatedUploadURL);
const blockBlobClient = blobClient.getBlockBlobClient(); const blockBlobClient = blobClient.getBlockBlobClient();
core.debug(`Uploading artifact zip to blob storage with maxBuffers: ${maxBuffers}, bufferSize: ${bufferSize}`); core.debug(`Uploading artifact zip to blob storage with maxConcurrency: ${maxConcurrency}, bufferSize: ${bufferSize}`);
const uploadCallback = (progress) => { const uploadCallback = (progress) => {
core.info(`Uploaded bytes ${progress.loadedBytes}`); core.info(`Uploaded bytes ${progress.loadedBytes}`);
uploadByteCount = progress.loadedBytes; uploadByteCount = progress.loadedBytes;
@ -5329,7 +5345,7 @@ function uploadZipToBlobStorage(authenticatedUploadURL, zipUploadStream) {
zipUploadStream.pipe(hashStream).setEncoding('hex'); // This stream is used to compute a hash of the zip content that gets used. Integrity check zipUploadStream.pipe(hashStream).setEncoding('hex'); // This stream is used to compute a hash of the zip content that gets used. Integrity check
try { try {
core.info('Beginning upload of artifact content to blob storage'); core.info('Beginning upload of artifact content to blob storage');
yield blockBlobClient.uploadStream(uploadStream, bufferSize, maxBuffers, options); yield blockBlobClient.uploadStream(uploadStream, bufferSize, maxConcurrency, options);
core.info('Finished uploading artifact content to blob storage!'); core.info('Finished uploading artifact content to blob storage!');
hashStream.end(); hashStream.end();
sha256Hash = hashStream.read(); sha256Hash = hashStream.read();
@ -5553,7 +5569,7 @@ function uploadArtifact(name, files, rootDirectory, options) {
success: false success: false
}; };
} }
const zipUploadStream = yield (0, zip_1.createZipUploadStream)(zipSpecification); const zipUploadStream = yield (0, zip_1.createZipUploadStream)(zipSpecification, options === null || options === void 0 ? void 0 : options.compressionLevel);
// get the IDs needed for the artifact creation // get the IDs needed for the artifact creation
const backendIds = (0, util_1.getBackendIdsFromToken)(); const backendIds = (0, util_1.getBackendIdsFromToken)();
if (!backendIds.workflowRunBackendId || !backendIds.workflowJobRunBackendId) { if (!backendIds.workflowRunBackendId || !backendIds.workflowJobRunBackendId) {
@ -5784,12 +5800,13 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
}); });
}; };
Object.defineProperty(exports, "__esModule", ({ value: true })); Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.createZipUploadStream = exports.ZipUploadStream = void 0; exports.createZipUploadStream = exports.ZipUploadStream = exports.DEFAULT_COMPRESSION_LEVEL = void 0;
const stream = __importStar(__nccwpck_require__(12781)); const stream = __importStar(__nccwpck_require__(12781));
const archiver = __importStar(__nccwpck_require__(71160)); const archiver = __importStar(__nccwpck_require__(71160));
const core = __importStar(__nccwpck_require__(66526)); const core = __importStar(__nccwpck_require__(66526));
const fs_1 = __nccwpck_require__(57147); const fs_1 = __nccwpck_require__(57147);
const config_1 = __nccwpck_require__(95042); const config_1 = __nccwpck_require__(95042);
exports.DEFAULT_COMPRESSION_LEVEL = 6;
// Custom stream transformer so we can set the highWaterMark property // Custom stream transformer so we can set the highWaterMark property
// See https://github.com/nodejs/node/issues/8855 // See https://github.com/nodejs/node/issues/8855
class ZipUploadStream extends stream.Transform { class ZipUploadStream extends stream.Transform {
@ -5804,14 +5821,12 @@ class ZipUploadStream extends stream.Transform {
} }
} }
exports.ZipUploadStream = ZipUploadStream; exports.ZipUploadStream = ZipUploadStream;
function createZipUploadStream(uploadSpecification) { function createZipUploadStream(uploadSpecification, compressionLevel = exports.DEFAULT_COMPRESSION_LEVEL) {
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
core.debug(`Creating Artifact archive with compressionLevel: ${compressionLevel}`);
const zip = archiver.create('zip', { const zip = archiver.create('zip', {
zlib: { level: 9 } // Sets the compression level. highWaterMark: (0, config_1.getUploadChunkSize)(),
// Available options are 0-9 zlib: { level: compressionLevel }
// 0 => no compression
// 1 => fastest with low compression
// 9 => highest compression ratio but the slowest
}); });
// register callbacks for various events during the zip lifecycle // register callbacks for various events during the zip lifecycle
zip.on('error', zipErrorCallback); zip.on('error', zipErrorCallback);
@ -121087,6 +121102,7 @@ var Inputs;
Inputs["Path"] = "path"; Inputs["Path"] = "path";
Inputs["IfNoFilesFound"] = "if-no-files-found"; Inputs["IfNoFilesFound"] = "if-no-files-found";
Inputs["RetentionDays"] = "retention-days"; Inputs["RetentionDays"] = "retention-days";
Inputs["CompressionLevel"] = "compression-level";
})(Inputs = exports.Inputs || (exports.Inputs = {})); })(Inputs = exports.Inputs || (exports.Inputs = {}));
var NoFileOptions; var NoFileOptions;
(function (NoFileOptions) { (function (NoFileOptions) {
@ -121162,6 +121178,16 @@ function getInputs() {
core.setFailed('Invalid retention-days'); core.setFailed('Invalid retention-days');
} }
} }
const compressionLevelStr = core.getInput(constants_1.Inputs.CompressionLevel);
if (compressionLevelStr) {
inputs.compressionLevel = parseInt(compressionLevelStr);
if (isNaN(inputs.compressionLevel)) {
core.setFailed('Invalid compression-level');
}
if (inputs.compressionLevel < 0 || inputs.compressionLevel > 9) {
core.setFailed('Invalid compression-level. Valid values are 0-9');
}
}
return inputs; return inputs;
} }
exports.getInputs = getInputs; exports.getInputs = getInputs;
@ -121411,6 +121437,9 @@ function run() {
if (inputs.retentionDays) { if (inputs.retentionDays) {
options.retentionDays = inputs.retentionDays; options.retentionDays = inputs.retentionDays;
} }
if (typeof inputs.compressionLevel !== 'undefined') {
options.compressionLevel = inputs.compressionLevel;
}
const uploadResponse = yield artifactClient.uploadArtifact(inputs.artifactName, searchResult.filesToUpload, searchResult.rootDirectory, options); const uploadResponse = yield artifactClient.uploadArtifact(inputs.artifactName, searchResult.filesToUpload, searchResult.rootDirectory, options);
if (uploadResponse.success === false) { if (uploadResponse.success === false) {
core.setFailed(`An error was encountered when uploading ${inputs.artifactName}.`); core.setFailed(`An error was encountered when uploading ${inputs.artifactName}.`);

View file

@ -3,7 +3,8 @@ export enum Inputs {
Name = 'name', Name = 'name',
Path = 'path', Path = 'path',
IfNoFilesFound = 'if-no-files-found', IfNoFilesFound = 'if-no-files-found',
RetentionDays = 'retention-days' RetentionDays = 'retention-days',
CompressionLevel = 'compression-level'
} }
export enum NoFileOptions { export enum NoFileOptions {

View file

@ -36,5 +36,17 @@ export function getInputs(): UploadInputs {
} }
} }
const compressionLevelStr = core.getInput(Inputs.CompressionLevel)
if (compressionLevelStr) {
inputs.compressionLevel = parseInt(compressionLevelStr)
if (isNaN(inputs.compressionLevel)) {
core.setFailed('Invalid compression-level')
}
if (inputs.compressionLevel < 0 || inputs.compressionLevel > 9) {
core.setFailed('Invalid compression-level. Valid values are 0-9')
}
}
return inputs return inputs
} }

View file

@ -1,5 +1,8 @@
import * as core from '../node_modules/@actions/core/' import * as core from '../node_modules/@actions/core/'
import {UploadOptions, create} from '../node_modules/@actions/artifact/lib/artifact' import {
UploadOptions,
create
} from '../node_modules/@actions/artifact/lib/artifact'
import {findFilesToUpload} from './search' import {findFilesToUpload} from './search'
import {getInputs} from './input-helper' import {getInputs} from './input-helper'
import {NoFileOptions} from './constants' import {NoFileOptions} from './constants'
@ -43,6 +46,10 @@ async function run(): Promise<void> {
options.retentionDays = inputs.retentionDays options.retentionDays = inputs.retentionDays
} }
if (typeof inputs.compressionLevel !== 'undefined') {
options.compressionLevel = inputs.compressionLevel
}
const uploadResponse = await artifactClient.uploadArtifact( const uploadResponse = await artifactClient.uploadArtifact(
inputs.artifactName, inputs.artifactName,
searchResult.filesToUpload, searchResult.filesToUpload,

View file

@ -20,4 +20,9 @@ export interface UploadInputs {
* Duration after which artifact will expire in days * Duration after which artifact will expire in days
*/ */
retentionDays: number retentionDays: number
/**
* The level of compression for Zlib to be applied to the artifact archive.
*/
compressionLevel?: number
} }