mirror of
https://gitea.com/actions/upload-artifact.git
synced 2024-11-10 02:35:58 +01:00
Update to @actions/artifact 0.3.1
This commit is contained in:
parent
85bdb8f4a9
commit
f42ee54997
3 changed files with 53 additions and 68 deletions
113
dist/index.js
vendored
113
dist/index.js
vendored
|
@ -2493,7 +2493,7 @@ class StatusReporter {
|
|||
for (const value of Array.from(this.largeFiles.values())) {
|
||||
core_1.info(value);
|
||||
}
|
||||
// delete all entires in the map after displaying the information so it will not be displayed again unless explicitly added
|
||||
// delete all entries in the map after displaying the information so it will not be displayed again unless explicitly added
|
||||
this.largeFiles.clear();
|
||||
}, 1000);
|
||||
}
|
||||
|
@ -6658,61 +6658,55 @@ class UploadHttpClient {
|
|||
}
|
||||
else {
|
||||
// the file that is being uploaded is greater than 64k in size, a temporary file gets created on disk using the
|
||||
// npm tmp-promise package and this file gets used during compression for the GZip file that gets created
|
||||
return tmp
|
||||
.file()
|
||||
.then((tmpFile) => __awaiter(this, void 0, void 0, function* () {
|
||||
// create a GZip file of the original file being uploaded, the original file should not be modified in any way
|
||||
uploadFileSize = yield upload_gzip_1.createGZipFileOnDisk(parameters.file, tmpFile.path);
|
||||
let uploadFilePath = tmpFile.path;
|
||||
// compression did not help with size reduction, use the original file for upload and delete the temp GZip file
|
||||
if (totalFileSize < uploadFileSize) {
|
||||
uploadFileSize = totalFileSize;
|
||||
uploadFilePath = parameters.file;
|
||||
isGzip = false;
|
||||
tmpFile.cleanup();
|
||||
// npm tmp-promise package and this file gets used to create a GZipped file
|
||||
const tempFile = yield tmp.file();
|
||||
// create a GZip file of the original file being uploaded, the original file should not be modified in any way
|
||||
uploadFileSize = yield upload_gzip_1.createGZipFileOnDisk(parameters.file, tempFile.path);
|
||||
let uploadFilePath = tempFile.path;
|
||||
// compression did not help with size reduction, use the original file for upload and delete the temp GZip file
|
||||
if (totalFileSize < uploadFileSize) {
|
||||
uploadFileSize = totalFileSize;
|
||||
uploadFilePath = parameters.file;
|
||||
isGzip = false;
|
||||
}
|
||||
let abortFileUpload = false;
|
||||
// upload only a single chunk at a time
|
||||
while (offset < uploadFileSize) {
|
||||
const chunkSize = Math.min(uploadFileSize - offset, parameters.maxChunkSize);
|
||||
// if an individual file is greater than 100MB (1024*1024*100) in size, display extra information about the upload status
|
||||
if (uploadFileSize > 104857600) {
|
||||
this.statusReporter.updateLargeFileStatus(parameters.file, offset, uploadFileSize);
|
||||
}
|
||||
let abortFileUpload = false;
|
||||
// upload only a single chunk at a time
|
||||
while (offset < uploadFileSize) {
|
||||
const chunkSize = Math.min(uploadFileSize - offset, parameters.maxChunkSize);
|
||||
// if an individual file is greater than 100MB (1024*1024*100) in size, display extra information about the upload status
|
||||
if (uploadFileSize > 104857600) {
|
||||
this.statusReporter.updateLargeFileStatus(parameters.file, offset, uploadFileSize);
|
||||
}
|
||||
const start = offset;
|
||||
const end = offset + chunkSize - 1;
|
||||
offset += parameters.maxChunkSize;
|
||||
if (abortFileUpload) {
|
||||
// if we don't want to continue in the event of an error, any pending upload chunks will be marked as failed
|
||||
failedChunkSizes += chunkSize;
|
||||
continue;
|
||||
}
|
||||
const result = yield this.uploadChunk(httpClientIndex, parameters.resourceUrl, fs.createReadStream(uploadFilePath, {
|
||||
start,
|
||||
end,
|
||||
autoClose: false
|
||||
}), start, end, uploadFileSize, isGzip, totalFileSize);
|
||||
if (!result) {
|
||||
// Chunk failed to upload, report as failed and do not continue uploading any more chunks for the file. It is possible that part of a chunk was
|
||||
// successfully uploaded so the server may report a different size for what was uploaded
|
||||
isUploadSuccessful = false;
|
||||
failedChunkSizes += chunkSize;
|
||||
core.warning(`Aborting upload for ${parameters.file} due to failure`);
|
||||
abortFileUpload = true;
|
||||
}
|
||||
const start = offset;
|
||||
const end = offset + chunkSize - 1;
|
||||
offset += parameters.maxChunkSize;
|
||||
if (abortFileUpload) {
|
||||
// if we don't want to continue in the event of an error, any pending upload chunks will be marked as failed
|
||||
failedChunkSizes += chunkSize;
|
||||
continue;
|
||||
}
|
||||
}))
|
||||
.then(() => __awaiter(this, void 0, void 0, function* () {
|
||||
// only after the file upload is complete and the temporary file is deleted, return the UploadResult
|
||||
return new Promise(resolve => {
|
||||
resolve({
|
||||
isSuccess: isUploadSuccessful,
|
||||
successfulUploadSize: uploadFileSize - failedChunkSizes,
|
||||
totalSize: totalFileSize
|
||||
});
|
||||
});
|
||||
}));
|
||||
const result = yield this.uploadChunk(httpClientIndex, parameters.resourceUrl, fs.createReadStream(uploadFilePath, {
|
||||
start,
|
||||
end,
|
||||
autoClose: false
|
||||
}), start, end, uploadFileSize, isGzip, totalFileSize);
|
||||
if (!result) {
|
||||
// Chunk failed to upload, report as failed and do not continue uploading any more chunks for the file. It is possible that part of a chunk was
|
||||
// successfully uploaded so the server may report a different size for what was uploaded
|
||||
isUploadSuccessful = false;
|
||||
failedChunkSizes += chunkSize;
|
||||
core.warning(`Aborting upload for ${parameters.file} due to failure`);
|
||||
abortFileUpload = true;
|
||||
}
|
||||
}
|
||||
// Delete the temporary file that was created as part of the upload. If the temp file does not get manually deleted by
|
||||
// calling cleanup, it gets removed when the node process exits. For more info see: https://www.npmjs.com/package/tmp-promise#about
|
||||
yield tempFile.cleanup();
|
||||
return {
|
||||
isSuccess: isUploadSuccessful,
|
||||
successfulUploadSize: uploadFileSize - failedChunkSizes,
|
||||
totalSize: totalFileSize
|
||||
};
|
||||
}
|
||||
});
|
||||
}
|
||||
|
@ -7913,16 +7907,7 @@ exports.displayHttpDiagnostics = displayHttpDiagnostics;
|
|||
*
|
||||
* FilePaths can include characters such as \ and / which are not permitted in the artifact name alone
|
||||
*/
|
||||
const invalidArtifactFilePathCharacters = [
|
||||
'"',
|
||||
':',
|
||||
'<',
|
||||
'>',
|
||||
'|',
|
||||
'*',
|
||||
'?',
|
||||
' '
|
||||
];
|
||||
const invalidArtifactFilePathCharacters = ['"', ':', '<', '>', '|', '*', '?'];
|
||||
const invalidArtifactNameCharacters = [
|
||||
...invalidArtifactFilePathCharacters,
|
||||
'\\',
|
||||
|
|
6
package-lock.json
generated
6
package-lock.json
generated
|
@ -5,9 +5,9 @@
|
|||
"requires": true,
|
||||
"dependencies": {
|
||||
"@actions/artifact": {
|
||||
"version": "0.3.0",
|
||||
"resolved": "https://registry.npmjs.org/@actions/artifact/-/artifact-0.3.0.tgz",
|
||||
"integrity": "sha512-t35sO6q2nVEb0Y/4GOKK7XlBo1qqnKlapMry46OFjFelsGmAgHMGMBq16s4Q0XCJrvToAhiRUcf3wZ8xyByDuw==",
|
||||
"version": "0.3.1",
|
||||
"resolved": "https://registry.npmjs.org/@actions/artifact/-/artifact-0.3.1.tgz",
|
||||
"integrity": "sha512-czRvOioOpuvmF/qDevfVVpZeBt7pjYlrnmM1+tRuCpKJxjWFYgi5MIW7TfscyupXPvtJz9jIxMjvxy9Eug1QEA==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"@actions/core": "^1.2.1",
|
||||
|
|
|
@ -29,7 +29,7 @@
|
|||
},
|
||||
"homepage": "https://github.com/actions/upload-artifact#readme",
|
||||
"devDependencies": {
|
||||
"@actions/artifact": "^0.3.0",
|
||||
"@actions/artifact": "^0.3.1",
|
||||
"@actions/core": "^1.2.3",
|
||||
"@actions/glob": "^0.1.0",
|
||||
"@actions/io": "^1.0.2",
|
||||
|
|
Loading…
Reference in a new issue