mirror of
https://gitea.com/actions/upload-artifact.git
synced 2025-03-29 10:50:44 +01:00
Prepare for new release of actions/upload-artifact with new toolkit cache version
This commit is contained in:
parent
4cec3d8aa0
commit
08396203c1
6 changed files with 811 additions and 70 deletions
2
.licenses/npm/@actions/artifact.dep.yml
generated
2
.licenses/npm/@actions/artifact.dep.yml
generated
|
@ -1,6 +1,6 @@
|
||||||
---
|
---
|
||||||
name: "@actions/artifact"
|
name: "@actions/artifact"
|
||||||
version: 2.2.2
|
version: 2.3.2
|
||||||
type: npm
|
type: npm
|
||||||
summary: Actions artifact lib
|
summary: Actions artifact lib
|
||||||
homepage: https://github.com/actions/toolkit/tree/main/packages/artifact
|
homepage: https://github.com/actions/toolkit/tree/main/packages/artifact
|
||||||
|
|
36
.vscode/launch.json
vendored
Normal file
36
.vscode/launch.json
vendored
Normal file
|
@ -0,0 +1,36 @@
|
||||||
|
{
|
||||||
|
"version": "0.2.0",
|
||||||
|
"configurations": [
|
||||||
|
{
|
||||||
|
"type": "node",
|
||||||
|
"request": "launch",
|
||||||
|
"name": "Debug Jest Tests",
|
||||||
|
"program": "${workspaceFolder}/node_modules/jest/bin/jest.js",
|
||||||
|
"args": [
|
||||||
|
"--runInBand",
|
||||||
|
"--testTimeout",
|
||||||
|
"10000"
|
||||||
|
],
|
||||||
|
"cwd": "${workspaceFolder}",
|
||||||
|
"console": "integratedTerminal",
|
||||||
|
"internalConsoleOptions": "neverOpen",
|
||||||
|
"disableOptimisticBPs": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "node",
|
||||||
|
"request": "launch",
|
||||||
|
"name": "Debug Current Test File",
|
||||||
|
"program": "${workspaceFolder}/node_modules/jest/bin/jest.js",
|
||||||
|
"args": [
|
||||||
|
"--runInBand",
|
||||||
|
"--testTimeout",
|
||||||
|
"10000",
|
||||||
|
"${relativeFile}"
|
||||||
|
],
|
||||||
|
"cwd": "${workspaceFolder}",
|
||||||
|
"console": "integratedTerminal",
|
||||||
|
"internalConsoleOptions": "neverOpen",
|
||||||
|
"disableOptimisticBPs": true
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
410
dist/merge/index.js
vendored
410
dist/merge/index.js
vendored
|
@ -824,7 +824,7 @@ __exportStar(__nccwpck_require__(49773), exports);
|
||||||
"use strict";
|
"use strict";
|
||||||
|
|
||||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||||
exports.ArtifactService = exports.DeleteArtifactResponse = exports.DeleteArtifactRequest = exports.GetSignedArtifactURLResponse = exports.GetSignedArtifactURLRequest = exports.ListArtifactsResponse_MonolithArtifact = exports.ListArtifactsResponse = exports.ListArtifactsRequest = exports.FinalizeArtifactResponse = exports.FinalizeArtifactRequest = exports.CreateArtifactResponse = exports.CreateArtifactRequest = void 0;
|
exports.ArtifactService = exports.DeleteArtifactResponse = exports.DeleteArtifactRequest = exports.GetSignedArtifactURLResponse = exports.GetSignedArtifactURLRequest = exports.ListArtifactsResponse_MonolithArtifact = exports.ListArtifactsResponse = exports.ListArtifactsRequest = exports.FinalizeArtifactResponse = exports.FinalizeArtifactRequest = exports.CreateArtifactResponse = exports.CreateArtifactRequest = exports.FinalizeMigratedArtifactResponse = exports.FinalizeMigratedArtifactRequest = exports.MigrateArtifactResponse = exports.MigrateArtifactRequest = void 0;
|
||||||
// @generated by protobuf-ts 2.9.1 with parameter long_type_string,client_none,generate_dependencies
|
// @generated by protobuf-ts 2.9.1 with parameter long_type_string,client_none,generate_dependencies
|
||||||
// @generated from protobuf file "results/api/v1/artifact.proto" (package "github.actions.results.api.v1", syntax proto3)
|
// @generated from protobuf file "results/api/v1/artifact.proto" (package "github.actions.results.api.v1", syntax proto3)
|
||||||
// tslint:disable
|
// tslint:disable
|
||||||
|
@ -838,6 +838,236 @@ const wrappers_1 = __nccwpck_require__(8626);
|
||||||
const wrappers_2 = __nccwpck_require__(8626);
|
const wrappers_2 = __nccwpck_require__(8626);
|
||||||
const timestamp_1 = __nccwpck_require__(54622);
|
const timestamp_1 = __nccwpck_require__(54622);
|
||||||
// @generated message type with reflection information, may provide speed optimized methods
|
// @generated message type with reflection information, may provide speed optimized methods
|
||||||
|
class MigrateArtifactRequest$Type extends runtime_5.MessageType {
|
||||||
|
constructor() {
|
||||||
|
super("github.actions.results.api.v1.MigrateArtifactRequest", [
|
||||||
|
{ no: 1, name: "workflow_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||||
|
{ no: 2, name: "name", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||||
|
{ no: 3, name: "expires_at", kind: "message", T: () => timestamp_1.Timestamp }
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
create(value) {
|
||||||
|
const message = { workflowRunBackendId: "", name: "" };
|
||||||
|
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||||
|
if (value !== undefined)
|
||||||
|
(0, runtime_3.reflectionMergePartial)(this, message, value);
|
||||||
|
return message;
|
||||||
|
}
|
||||||
|
internalBinaryRead(reader, length, options, target) {
|
||||||
|
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||||
|
while (reader.pos < end) {
|
||||||
|
let [fieldNo, wireType] = reader.tag();
|
||||||
|
switch (fieldNo) {
|
||||||
|
case /* string workflow_run_backend_id */ 1:
|
||||||
|
message.workflowRunBackendId = reader.string();
|
||||||
|
break;
|
||||||
|
case /* string name */ 2:
|
||||||
|
message.name = reader.string();
|
||||||
|
break;
|
||||||
|
case /* google.protobuf.Timestamp expires_at */ 3:
|
||||||
|
message.expiresAt = timestamp_1.Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.expiresAt);
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
let u = options.readUnknownField;
|
||||||
|
if (u === "throw")
|
||||||
|
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||||
|
let d = reader.skip(wireType);
|
||||||
|
if (u !== false)
|
||||||
|
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return message;
|
||||||
|
}
|
||||||
|
internalBinaryWrite(message, writer, options) {
|
||||||
|
/* string workflow_run_backend_id = 1; */
|
||||||
|
if (message.workflowRunBackendId !== "")
|
||||||
|
writer.tag(1, runtime_1.WireType.LengthDelimited).string(message.workflowRunBackendId);
|
||||||
|
/* string name = 2; */
|
||||||
|
if (message.name !== "")
|
||||||
|
writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.name);
|
||||||
|
/* google.protobuf.Timestamp expires_at = 3; */
|
||||||
|
if (message.expiresAt)
|
||||||
|
timestamp_1.Timestamp.internalBinaryWrite(message.expiresAt, writer.tag(3, runtime_1.WireType.LengthDelimited).fork(), options).join();
|
||||||
|
let u = options.writeUnknownFields;
|
||||||
|
if (u !== false)
|
||||||
|
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||||
|
return writer;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* @generated MessageType for protobuf message github.actions.results.api.v1.MigrateArtifactRequest
|
||||||
|
*/
|
||||||
|
exports.MigrateArtifactRequest = new MigrateArtifactRequest$Type();
|
||||||
|
// @generated message type with reflection information, may provide speed optimized methods
|
||||||
|
class MigrateArtifactResponse$Type extends runtime_5.MessageType {
|
||||||
|
constructor() {
|
||||||
|
super("github.actions.results.api.v1.MigrateArtifactResponse", [
|
||||||
|
{ no: 1, name: "ok", kind: "scalar", T: 8 /*ScalarType.BOOL*/ },
|
||||||
|
{ no: 2, name: "signed_upload_url", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
create(value) {
|
||||||
|
const message = { ok: false, signedUploadUrl: "" };
|
||||||
|
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||||
|
if (value !== undefined)
|
||||||
|
(0, runtime_3.reflectionMergePartial)(this, message, value);
|
||||||
|
return message;
|
||||||
|
}
|
||||||
|
internalBinaryRead(reader, length, options, target) {
|
||||||
|
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||||
|
while (reader.pos < end) {
|
||||||
|
let [fieldNo, wireType] = reader.tag();
|
||||||
|
switch (fieldNo) {
|
||||||
|
case /* bool ok */ 1:
|
||||||
|
message.ok = reader.bool();
|
||||||
|
break;
|
||||||
|
case /* string signed_upload_url */ 2:
|
||||||
|
message.signedUploadUrl = reader.string();
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
let u = options.readUnknownField;
|
||||||
|
if (u === "throw")
|
||||||
|
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||||
|
let d = reader.skip(wireType);
|
||||||
|
if (u !== false)
|
||||||
|
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return message;
|
||||||
|
}
|
||||||
|
internalBinaryWrite(message, writer, options) {
|
||||||
|
/* bool ok = 1; */
|
||||||
|
if (message.ok !== false)
|
||||||
|
writer.tag(1, runtime_1.WireType.Varint).bool(message.ok);
|
||||||
|
/* string signed_upload_url = 2; */
|
||||||
|
if (message.signedUploadUrl !== "")
|
||||||
|
writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.signedUploadUrl);
|
||||||
|
let u = options.writeUnknownFields;
|
||||||
|
if (u !== false)
|
||||||
|
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||||
|
return writer;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* @generated MessageType for protobuf message github.actions.results.api.v1.MigrateArtifactResponse
|
||||||
|
*/
|
||||||
|
exports.MigrateArtifactResponse = new MigrateArtifactResponse$Type();
|
||||||
|
// @generated message type with reflection information, may provide speed optimized methods
|
||||||
|
class FinalizeMigratedArtifactRequest$Type extends runtime_5.MessageType {
|
||||||
|
constructor() {
|
||||||
|
super("github.actions.results.api.v1.FinalizeMigratedArtifactRequest", [
|
||||||
|
{ no: 1, name: "workflow_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||||
|
{ no: 2, name: "name", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||||
|
{ no: 3, name: "size", kind: "scalar", T: 3 /*ScalarType.INT64*/ }
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
create(value) {
|
||||||
|
const message = { workflowRunBackendId: "", name: "", size: "0" };
|
||||||
|
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||||
|
if (value !== undefined)
|
||||||
|
(0, runtime_3.reflectionMergePartial)(this, message, value);
|
||||||
|
return message;
|
||||||
|
}
|
||||||
|
internalBinaryRead(reader, length, options, target) {
|
||||||
|
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||||
|
while (reader.pos < end) {
|
||||||
|
let [fieldNo, wireType] = reader.tag();
|
||||||
|
switch (fieldNo) {
|
||||||
|
case /* string workflow_run_backend_id */ 1:
|
||||||
|
message.workflowRunBackendId = reader.string();
|
||||||
|
break;
|
||||||
|
case /* string name */ 2:
|
||||||
|
message.name = reader.string();
|
||||||
|
break;
|
||||||
|
case /* int64 size */ 3:
|
||||||
|
message.size = reader.int64().toString();
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
let u = options.readUnknownField;
|
||||||
|
if (u === "throw")
|
||||||
|
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||||
|
let d = reader.skip(wireType);
|
||||||
|
if (u !== false)
|
||||||
|
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return message;
|
||||||
|
}
|
||||||
|
internalBinaryWrite(message, writer, options) {
|
||||||
|
/* string workflow_run_backend_id = 1; */
|
||||||
|
if (message.workflowRunBackendId !== "")
|
||||||
|
writer.tag(1, runtime_1.WireType.LengthDelimited).string(message.workflowRunBackendId);
|
||||||
|
/* string name = 2; */
|
||||||
|
if (message.name !== "")
|
||||||
|
writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.name);
|
||||||
|
/* int64 size = 3; */
|
||||||
|
if (message.size !== "0")
|
||||||
|
writer.tag(3, runtime_1.WireType.Varint).int64(message.size);
|
||||||
|
let u = options.writeUnknownFields;
|
||||||
|
if (u !== false)
|
||||||
|
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||||
|
return writer;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* @generated MessageType for protobuf message github.actions.results.api.v1.FinalizeMigratedArtifactRequest
|
||||||
|
*/
|
||||||
|
exports.FinalizeMigratedArtifactRequest = new FinalizeMigratedArtifactRequest$Type();
|
||||||
|
// @generated message type with reflection information, may provide speed optimized methods
|
||||||
|
class FinalizeMigratedArtifactResponse$Type extends runtime_5.MessageType {
|
||||||
|
constructor() {
|
||||||
|
super("github.actions.results.api.v1.FinalizeMigratedArtifactResponse", [
|
||||||
|
{ no: 1, name: "ok", kind: "scalar", T: 8 /*ScalarType.BOOL*/ },
|
||||||
|
{ no: 2, name: "artifact_id", kind: "scalar", T: 3 /*ScalarType.INT64*/ }
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
create(value) {
|
||||||
|
const message = { ok: false, artifactId: "0" };
|
||||||
|
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||||
|
if (value !== undefined)
|
||||||
|
(0, runtime_3.reflectionMergePartial)(this, message, value);
|
||||||
|
return message;
|
||||||
|
}
|
||||||
|
internalBinaryRead(reader, length, options, target) {
|
||||||
|
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||||
|
while (reader.pos < end) {
|
||||||
|
let [fieldNo, wireType] = reader.tag();
|
||||||
|
switch (fieldNo) {
|
||||||
|
case /* bool ok */ 1:
|
||||||
|
message.ok = reader.bool();
|
||||||
|
break;
|
||||||
|
case /* int64 artifact_id */ 2:
|
||||||
|
message.artifactId = reader.int64().toString();
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
let u = options.readUnknownField;
|
||||||
|
if (u === "throw")
|
||||||
|
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||||
|
let d = reader.skip(wireType);
|
||||||
|
if (u !== false)
|
||||||
|
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return message;
|
||||||
|
}
|
||||||
|
internalBinaryWrite(message, writer, options) {
|
||||||
|
/* bool ok = 1; */
|
||||||
|
if (message.ok !== false)
|
||||||
|
writer.tag(1, runtime_1.WireType.Varint).bool(message.ok);
|
||||||
|
/* int64 artifact_id = 2; */
|
||||||
|
if (message.artifactId !== "0")
|
||||||
|
writer.tag(2, runtime_1.WireType.Varint).int64(message.artifactId);
|
||||||
|
let u = options.writeUnknownFields;
|
||||||
|
if (u !== false)
|
||||||
|
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||||
|
return writer;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* @generated MessageType for protobuf message github.actions.results.api.v1.FinalizeMigratedArtifactResponse
|
||||||
|
*/
|
||||||
|
exports.FinalizeMigratedArtifactResponse = new FinalizeMigratedArtifactResponse$Type();
|
||||||
|
// @generated message type with reflection information, may provide speed optimized methods
|
||||||
class CreateArtifactRequest$Type extends runtime_5.MessageType {
|
class CreateArtifactRequest$Type extends runtime_5.MessageType {
|
||||||
constructor() {
|
constructor() {
|
||||||
super("github.actions.results.api.v1.CreateArtifactRequest", [
|
super("github.actions.results.api.v1.CreateArtifactRequest", [
|
||||||
|
@ -1219,7 +1449,8 @@ class ListArtifactsResponse_MonolithArtifact$Type extends runtime_5.MessageType
|
||||||
{ no: 3, name: "database_id", kind: "scalar", T: 3 /*ScalarType.INT64*/ },
|
{ no: 3, name: "database_id", kind: "scalar", T: 3 /*ScalarType.INT64*/ },
|
||||||
{ no: 4, name: "name", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
{ no: 4, name: "name", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||||
{ no: 5, name: "size", kind: "scalar", T: 3 /*ScalarType.INT64*/ },
|
{ no: 5, name: "size", kind: "scalar", T: 3 /*ScalarType.INT64*/ },
|
||||||
{ no: 6, name: "created_at", kind: "message", T: () => timestamp_1.Timestamp }
|
{ no: 6, name: "created_at", kind: "message", T: () => timestamp_1.Timestamp },
|
||||||
|
{ no: 7, name: "digest", kind: "message", T: () => wrappers_2.StringValue }
|
||||||
]);
|
]);
|
||||||
}
|
}
|
||||||
create(value) {
|
create(value) {
|
||||||
|
@ -1252,6 +1483,9 @@ class ListArtifactsResponse_MonolithArtifact$Type extends runtime_5.MessageType
|
||||||
case /* google.protobuf.Timestamp created_at */ 6:
|
case /* google.protobuf.Timestamp created_at */ 6:
|
||||||
message.createdAt = timestamp_1.Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.createdAt);
|
message.createdAt = timestamp_1.Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.createdAt);
|
||||||
break;
|
break;
|
||||||
|
case /* google.protobuf.StringValue digest */ 7:
|
||||||
|
message.digest = wrappers_2.StringValue.internalBinaryRead(reader, reader.uint32(), options, message.digest);
|
||||||
|
break;
|
||||||
default:
|
default:
|
||||||
let u = options.readUnknownField;
|
let u = options.readUnknownField;
|
||||||
if (u === "throw")
|
if (u === "throw")
|
||||||
|
@ -1282,6 +1516,9 @@ class ListArtifactsResponse_MonolithArtifact$Type extends runtime_5.MessageType
|
||||||
/* google.protobuf.Timestamp created_at = 6; */
|
/* google.protobuf.Timestamp created_at = 6; */
|
||||||
if (message.createdAt)
|
if (message.createdAt)
|
||||||
timestamp_1.Timestamp.internalBinaryWrite(message.createdAt, writer.tag(6, runtime_1.WireType.LengthDelimited).fork(), options).join();
|
timestamp_1.Timestamp.internalBinaryWrite(message.createdAt, writer.tag(6, runtime_1.WireType.LengthDelimited).fork(), options).join();
|
||||||
|
/* google.protobuf.StringValue digest = 7; */
|
||||||
|
if (message.digest)
|
||||||
|
wrappers_2.StringValue.internalBinaryWrite(message.digest, writer.tag(7, runtime_1.WireType.LengthDelimited).fork(), options).join();
|
||||||
let u = options.writeUnknownFields;
|
let u = options.writeUnknownFields;
|
||||||
if (u !== false)
|
if (u !== false)
|
||||||
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||||
|
@ -1523,7 +1760,9 @@ exports.ArtifactService = new runtime_rpc_1.ServiceType("github.actions.results.
|
||||||
{ name: "FinalizeArtifact", options: {}, I: exports.FinalizeArtifactRequest, O: exports.FinalizeArtifactResponse },
|
{ name: "FinalizeArtifact", options: {}, I: exports.FinalizeArtifactRequest, O: exports.FinalizeArtifactResponse },
|
||||||
{ name: "ListArtifacts", options: {}, I: exports.ListArtifactsRequest, O: exports.ListArtifactsResponse },
|
{ name: "ListArtifacts", options: {}, I: exports.ListArtifactsRequest, O: exports.ListArtifactsResponse },
|
||||||
{ name: "GetSignedArtifactURL", options: {}, I: exports.GetSignedArtifactURLRequest, O: exports.GetSignedArtifactURLResponse },
|
{ name: "GetSignedArtifactURL", options: {}, I: exports.GetSignedArtifactURLRequest, O: exports.GetSignedArtifactURLResponse },
|
||||||
{ name: "DeleteArtifact", options: {}, I: exports.DeleteArtifactRequest, O: exports.DeleteArtifactResponse }
|
{ name: "DeleteArtifact", options: {}, I: exports.DeleteArtifactRequest, O: exports.DeleteArtifactResponse },
|
||||||
|
{ name: "MigrateArtifact", options: {}, I: exports.MigrateArtifactRequest, O: exports.MigrateArtifactResponse },
|
||||||
|
{ name: "FinalizeMigratedArtifact", options: {}, I: exports.FinalizeMigratedArtifactRequest, O: exports.FinalizeMigratedArtifactResponse }
|
||||||
]);
|
]);
|
||||||
//# sourceMappingURL=artifact.js.map
|
//# sourceMappingURL=artifact.js.map
|
||||||
|
|
||||||
|
@ -1920,6 +2159,8 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||||
exports.downloadArtifactInternal = exports.downloadArtifactPublic = exports.streamExtractExternal = void 0;
|
exports.downloadArtifactInternal = exports.downloadArtifactPublic = exports.streamExtractExternal = void 0;
|
||||||
const promises_1 = __importDefault(__nccwpck_require__(73292));
|
const promises_1 = __importDefault(__nccwpck_require__(73292));
|
||||||
|
const crypto = __importStar(__nccwpck_require__(6113));
|
||||||
|
const stream = __importStar(__nccwpck_require__(12781));
|
||||||
const github = __importStar(__nccwpck_require__(21260));
|
const github = __importStar(__nccwpck_require__(21260));
|
||||||
const core = __importStar(__nccwpck_require__(42186));
|
const core = __importStar(__nccwpck_require__(42186));
|
||||||
const httpClient = __importStar(__nccwpck_require__(96255));
|
const httpClient = __importStar(__nccwpck_require__(96255));
|
||||||
|
@ -1956,8 +2197,7 @@ function streamExtract(url, directory) {
|
||||||
let retryCount = 0;
|
let retryCount = 0;
|
||||||
while (retryCount < 5) {
|
while (retryCount < 5) {
|
||||||
try {
|
try {
|
||||||
yield streamExtractExternal(url, directory);
|
return yield streamExtractExternal(url, directory);
|
||||||
return;
|
|
||||||
}
|
}
|
||||||
catch (error) {
|
catch (error) {
|
||||||
retryCount++;
|
retryCount++;
|
||||||
|
@ -1977,12 +2217,18 @@ function streamExtractExternal(url, directory) {
|
||||||
throw new Error(`Unexpected HTTP response from blob storage: ${response.message.statusCode} ${response.message.statusMessage}`);
|
throw new Error(`Unexpected HTTP response from blob storage: ${response.message.statusCode} ${response.message.statusMessage}`);
|
||||||
}
|
}
|
||||||
const timeout = 30 * 1000; // 30 seconds
|
const timeout = 30 * 1000; // 30 seconds
|
||||||
|
let sha256Digest = undefined;
|
||||||
return new Promise((resolve, reject) => {
|
return new Promise((resolve, reject) => {
|
||||||
const timerFn = () => {
|
const timerFn = () => {
|
||||||
response.message.destroy(new Error(`Blob storage chunk did not respond in ${timeout}ms`));
|
response.message.destroy(new Error(`Blob storage chunk did not respond in ${timeout}ms`));
|
||||||
};
|
};
|
||||||
const timer = setTimeout(timerFn, timeout);
|
const timer = setTimeout(timerFn, timeout);
|
||||||
response.message
|
const hashStream = crypto.createHash('sha256').setEncoding('hex');
|
||||||
|
const passThrough = new stream.PassThrough();
|
||||||
|
response.message.pipe(passThrough);
|
||||||
|
passThrough.pipe(hashStream);
|
||||||
|
const extractStream = passThrough;
|
||||||
|
extractStream
|
||||||
.on('data', () => {
|
.on('data', () => {
|
||||||
timer.refresh();
|
timer.refresh();
|
||||||
})
|
})
|
||||||
|
@ -1994,7 +2240,12 @@ function streamExtractExternal(url, directory) {
|
||||||
.pipe(unzip_stream_1.default.Extract({ path: directory }))
|
.pipe(unzip_stream_1.default.Extract({ path: directory }))
|
||||||
.on('close', () => {
|
.on('close', () => {
|
||||||
clearTimeout(timer);
|
clearTimeout(timer);
|
||||||
resolve();
|
if (hashStream) {
|
||||||
|
hashStream.end();
|
||||||
|
sha256Digest = hashStream.read();
|
||||||
|
core.info(`SHA256 digest of downloaded artifact is ${sha256Digest}`);
|
||||||
|
}
|
||||||
|
resolve({ sha256Digest: `sha256:${sha256Digest}` });
|
||||||
})
|
})
|
||||||
.on('error', (error) => {
|
.on('error', (error) => {
|
||||||
reject(error);
|
reject(error);
|
||||||
|
@ -2007,6 +2258,7 @@ function downloadArtifactPublic(artifactId, repositoryOwner, repositoryName, tok
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
const downloadPath = yield resolveOrCreateDirectory(options === null || options === void 0 ? void 0 : options.path);
|
const downloadPath = yield resolveOrCreateDirectory(options === null || options === void 0 ? void 0 : options.path);
|
||||||
const api = github.getOctokit(token);
|
const api = github.getOctokit(token);
|
||||||
|
let digestMismatch = false;
|
||||||
core.info(`Downloading artifact '${artifactId}' from '${repositoryOwner}/${repositoryName}'`);
|
core.info(`Downloading artifact '${artifactId}' from '${repositoryOwner}/${repositoryName}'`);
|
||||||
const { headers, status } = yield api.rest.actions.downloadArtifact({
|
const { headers, status } = yield api.rest.actions.downloadArtifact({
|
||||||
owner: repositoryOwner,
|
owner: repositoryOwner,
|
||||||
|
@ -2027,13 +2279,20 @@ function downloadArtifactPublic(artifactId, repositoryOwner, repositoryName, tok
|
||||||
core.info(`Redirecting to blob download url: ${scrubQueryParameters(location)}`);
|
core.info(`Redirecting to blob download url: ${scrubQueryParameters(location)}`);
|
||||||
try {
|
try {
|
||||||
core.info(`Starting download of artifact to: ${downloadPath}`);
|
core.info(`Starting download of artifact to: ${downloadPath}`);
|
||||||
yield streamExtract(location, downloadPath);
|
const extractResponse = yield streamExtract(location, downloadPath);
|
||||||
core.info(`Artifact download completed successfully.`);
|
core.info(`Artifact download completed successfully.`);
|
||||||
|
if (options === null || options === void 0 ? void 0 : options.expectedHash) {
|
||||||
|
if ((options === null || options === void 0 ? void 0 : options.expectedHash) !== extractResponse.sha256Digest) {
|
||||||
|
digestMismatch = true;
|
||||||
|
core.debug(`Computed digest: ${extractResponse.sha256Digest}`);
|
||||||
|
core.debug(`Expected digest: ${options.expectedHash}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
catch (error) {
|
catch (error) {
|
||||||
throw new Error(`Unable to download and extract artifact: ${error.message}`);
|
throw new Error(`Unable to download and extract artifact: ${error.message}`);
|
||||||
}
|
}
|
||||||
return { downloadPath };
|
return { downloadPath, digestMismatch };
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
exports.downloadArtifactPublic = downloadArtifactPublic;
|
exports.downloadArtifactPublic = downloadArtifactPublic;
|
||||||
|
@ -2041,6 +2300,7 @@ function downloadArtifactInternal(artifactId, options) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
const downloadPath = yield resolveOrCreateDirectory(options === null || options === void 0 ? void 0 : options.path);
|
const downloadPath = yield resolveOrCreateDirectory(options === null || options === void 0 ? void 0 : options.path);
|
||||||
const artifactClient = (0, artifact_twirp_client_1.internalArtifactTwirpClient)();
|
const artifactClient = (0, artifact_twirp_client_1.internalArtifactTwirpClient)();
|
||||||
|
let digestMismatch = false;
|
||||||
const { workflowRunBackendId, workflowJobRunBackendId } = (0, util_1.getBackendIdsFromToken)();
|
const { workflowRunBackendId, workflowJobRunBackendId } = (0, util_1.getBackendIdsFromToken)();
|
||||||
const listReq = {
|
const listReq = {
|
||||||
workflowRunBackendId,
|
workflowRunBackendId,
|
||||||
|
@ -2063,13 +2323,20 @@ function downloadArtifactInternal(artifactId, options) {
|
||||||
core.info(`Redirecting to blob download url: ${scrubQueryParameters(signedUrl)}`);
|
core.info(`Redirecting to blob download url: ${scrubQueryParameters(signedUrl)}`);
|
||||||
try {
|
try {
|
||||||
core.info(`Starting download of artifact to: ${downloadPath}`);
|
core.info(`Starting download of artifact to: ${downloadPath}`);
|
||||||
yield streamExtract(signedUrl, downloadPath);
|
const extractResponse = yield streamExtract(signedUrl, downloadPath);
|
||||||
core.info(`Artifact download completed successfully.`);
|
core.info(`Artifact download completed successfully.`);
|
||||||
|
if (options === null || options === void 0 ? void 0 : options.expectedHash) {
|
||||||
|
if ((options === null || options === void 0 ? void 0 : options.expectedHash) !== extractResponse.sha256Digest) {
|
||||||
|
digestMismatch = true;
|
||||||
|
core.debug(`Computed digest: ${extractResponse.sha256Digest}`);
|
||||||
|
core.debug(`Expected digest: ${options.expectedHash}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
catch (error) {
|
catch (error) {
|
||||||
throw new Error(`Unable to download and extract artifact: ${error.message}`);
|
throw new Error(`Unable to download and extract artifact: ${error.message}`);
|
||||||
}
|
}
|
||||||
return { downloadPath };
|
return { downloadPath, digestMismatch };
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
exports.downloadArtifactInternal = downloadArtifactInternal;
|
exports.downloadArtifactInternal = downloadArtifactInternal;
|
||||||
|
@ -2175,13 +2442,17 @@ function getArtifactPublic(artifactName, workflowRunId, repositoryOwner, reposit
|
||||||
name: artifact.name,
|
name: artifact.name,
|
||||||
id: artifact.id,
|
id: artifact.id,
|
||||||
size: artifact.size_in_bytes,
|
size: artifact.size_in_bytes,
|
||||||
createdAt: artifact.created_at ? new Date(artifact.created_at) : undefined
|
createdAt: artifact.created_at
|
||||||
|
? new Date(artifact.created_at)
|
||||||
|
: undefined,
|
||||||
|
digest: artifact.digest
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
exports.getArtifactPublic = getArtifactPublic;
|
exports.getArtifactPublic = getArtifactPublic;
|
||||||
function getArtifactInternal(artifactName) {
|
function getArtifactInternal(artifactName) {
|
||||||
|
var _a;
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
const artifactClient = (0, artifact_twirp_client_1.internalArtifactTwirpClient)();
|
const artifactClient = (0, artifact_twirp_client_1.internalArtifactTwirpClient)();
|
||||||
const { workflowRunBackendId, workflowJobRunBackendId } = (0, util_1.getBackendIdsFromToken)();
|
const { workflowRunBackendId, workflowJobRunBackendId } = (0, util_1.getBackendIdsFromToken)();
|
||||||
|
@ -2208,7 +2479,8 @@ function getArtifactInternal(artifactName) {
|
||||||
size: Number(artifact.size),
|
size: Number(artifact.size),
|
||||||
createdAt: artifact.createdAt
|
createdAt: artifact.createdAt
|
||||||
? generated_1.Timestamp.toDate(artifact.createdAt)
|
? generated_1.Timestamp.toDate(artifact.createdAt)
|
||||||
: undefined
|
: undefined,
|
||||||
|
digest: (_a = artifact.digest) === null || _a === void 0 ? void 0 : _a.value
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
});
|
});
|
||||||
|
@ -2262,7 +2534,7 @@ function listArtifactsPublic(workflowRunId, repositoryOwner, repositoryName, tok
|
||||||
};
|
};
|
||||||
const github = (0, github_1.getOctokit)(token, opts, plugin_retry_1.retry, plugin_request_log_1.requestLog);
|
const github = (0, github_1.getOctokit)(token, opts, plugin_retry_1.retry, plugin_request_log_1.requestLog);
|
||||||
let currentPageNumber = 1;
|
let currentPageNumber = 1;
|
||||||
const { data: listArtifactResponse } = yield github.rest.actions.listWorkflowRunArtifacts({
|
const { data: listArtifactResponse } = yield github.request('GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts', {
|
||||||
owner: repositoryOwner,
|
owner: repositoryOwner,
|
||||||
repo: repositoryName,
|
repo: repositoryName,
|
||||||
run_id: workflowRunId,
|
run_id: workflowRunId,
|
||||||
|
@ -2281,14 +2553,18 @@ function listArtifactsPublic(workflowRunId, repositoryOwner, repositoryName, tok
|
||||||
name: artifact.name,
|
name: artifact.name,
|
||||||
id: artifact.id,
|
id: artifact.id,
|
||||||
size: artifact.size_in_bytes,
|
size: artifact.size_in_bytes,
|
||||||
createdAt: artifact.created_at ? new Date(artifact.created_at) : undefined
|
createdAt: artifact.created_at
|
||||||
|
? new Date(artifact.created_at)
|
||||||
|
: undefined,
|
||||||
|
digest: artifact.digest
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
// Move to the next page
|
||||||
|
currentPageNumber++;
|
||||||
// Iterate over any remaining pages
|
// Iterate over any remaining pages
|
||||||
for (currentPageNumber; currentPageNumber < numberOfPages; currentPageNumber++) {
|
for (currentPageNumber; currentPageNumber < numberOfPages; currentPageNumber++) {
|
||||||
currentPageNumber++;
|
|
||||||
(0, core_1.debug)(`Fetching page ${currentPageNumber} of artifact list`);
|
(0, core_1.debug)(`Fetching page ${currentPageNumber} of artifact list`);
|
||||||
const { data: listArtifactResponse } = yield github.rest.actions.listWorkflowRunArtifacts({
|
const { data: listArtifactResponse } = yield github.request('GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts', {
|
||||||
owner: repositoryOwner,
|
owner: repositoryOwner,
|
||||||
repo: repositoryName,
|
repo: repositoryName,
|
||||||
run_id: workflowRunId,
|
run_id: workflowRunId,
|
||||||
|
@ -2302,7 +2578,8 @@ function listArtifactsPublic(workflowRunId, repositoryOwner, repositoryName, tok
|
||||||
size: artifact.size_in_bytes,
|
size: artifact.size_in_bytes,
|
||||||
createdAt: artifact.created_at
|
createdAt: artifact.created_at
|
||||||
? new Date(artifact.created_at)
|
? new Date(artifact.created_at)
|
||||||
: undefined
|
: undefined,
|
||||||
|
digest: artifact.digest
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -2325,14 +2602,18 @@ function listArtifactsInternal(latest = false) {
|
||||||
workflowJobRunBackendId
|
workflowJobRunBackendId
|
||||||
};
|
};
|
||||||
const res = yield artifactClient.ListArtifacts(req);
|
const res = yield artifactClient.ListArtifacts(req);
|
||||||
let artifacts = res.artifacts.map(artifact => ({
|
let artifacts = res.artifacts.map(artifact => {
|
||||||
name: artifact.name,
|
var _a;
|
||||||
id: Number(artifact.databaseId),
|
return ({
|
||||||
size: Number(artifact.size),
|
name: artifact.name,
|
||||||
createdAt: artifact.createdAt
|
id: Number(artifact.databaseId),
|
||||||
? generated_1.Timestamp.toDate(artifact.createdAt)
|
size: Number(artifact.size),
|
||||||
: undefined
|
createdAt: artifact.createdAt
|
||||||
}));
|
? generated_1.Timestamp.toDate(artifact.createdAt)
|
||||||
|
: undefined,
|
||||||
|
digest: (_a = artifact.digest) === null || _a === void 0 ? void 0 : _a.value
|
||||||
|
});
|
||||||
|
});
|
||||||
if (latest) {
|
if (latest) {
|
||||||
artifacts = filterLatest(artifacts);
|
artifacts = filterLatest(artifacts);
|
||||||
}
|
}
|
||||||
|
@ -2444,6 +2725,7 @@ const generated_1 = __nccwpck_require__(49960);
|
||||||
const config_1 = __nccwpck_require__(74610);
|
const config_1 = __nccwpck_require__(74610);
|
||||||
const user_agent_1 = __nccwpck_require__(85164);
|
const user_agent_1 = __nccwpck_require__(85164);
|
||||||
const errors_1 = __nccwpck_require__(38182);
|
const errors_1 = __nccwpck_require__(38182);
|
||||||
|
const util_1 = __nccwpck_require__(63062);
|
||||||
class ArtifactHttpClient {
|
class ArtifactHttpClient {
|
||||||
constructor(userAgent, maxAttempts, baseRetryIntervalMilliseconds, retryMultiplier) {
|
constructor(userAgent, maxAttempts, baseRetryIntervalMilliseconds, retryMultiplier) {
|
||||||
this.maxAttempts = 5;
|
this.maxAttempts = 5;
|
||||||
|
@ -2496,6 +2778,7 @@ class ArtifactHttpClient {
|
||||||
(0, core_1.debug)(`[Response] - ${response.message.statusCode}`);
|
(0, core_1.debug)(`[Response] - ${response.message.statusCode}`);
|
||||||
(0, core_1.debug)(`Headers: ${JSON.stringify(response.message.headers, null, 2)}`);
|
(0, core_1.debug)(`Headers: ${JSON.stringify(response.message.headers, null, 2)}`);
|
||||||
const body = JSON.parse(rawBody);
|
const body = JSON.parse(rawBody);
|
||||||
|
(0, util_1.maskSecretUrls)(body);
|
||||||
(0, core_1.debug)(`Body: ${JSON.stringify(body, null, 2)}`);
|
(0, core_1.debug)(`Body: ${JSON.stringify(body, null, 2)}`);
|
||||||
if (this.isSuccessStatusCode(statusCode)) {
|
if (this.isSuccessStatusCode(statusCode)) {
|
||||||
return { response, body };
|
return { response, body };
|
||||||
|
@ -2812,10 +3095,11 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||||
exports.getBackendIdsFromToken = void 0;
|
exports.maskSecretUrls = exports.maskSigUrl = exports.getBackendIdsFromToken = void 0;
|
||||||
const core = __importStar(__nccwpck_require__(42186));
|
const core = __importStar(__nccwpck_require__(42186));
|
||||||
const config_1 = __nccwpck_require__(74610);
|
const config_1 = __nccwpck_require__(74610);
|
||||||
const jwt_decode_1 = __importDefault(__nccwpck_require__(84329));
|
const jwt_decode_1 = __importDefault(__nccwpck_require__(84329));
|
||||||
|
const core_1 = __nccwpck_require__(42186);
|
||||||
const InvalidJwtError = new Error('Failed to get backend IDs: The provided JWT token is invalid and/or missing claims');
|
const InvalidJwtError = new Error('Failed to get backend IDs: The provided JWT token is invalid and/or missing claims');
|
||||||
// uses the JWT token claims to get the
|
// uses the JWT token claims to get the
|
||||||
// workflow run and workflow job run backend ids
|
// workflow run and workflow job run backend ids
|
||||||
|
@ -2864,6 +3148,74 @@ function getBackendIdsFromToken() {
|
||||||
throw InvalidJwtError;
|
throw InvalidJwtError;
|
||||||
}
|
}
|
||||||
exports.getBackendIdsFromToken = getBackendIdsFromToken;
|
exports.getBackendIdsFromToken = getBackendIdsFromToken;
|
||||||
|
/**
|
||||||
|
* Masks the `sig` parameter in a URL and sets it as a secret.
|
||||||
|
*
|
||||||
|
* @param url - The URL containing the signature parameter to mask
|
||||||
|
* @remarks
|
||||||
|
* This function attempts to parse the provided URL and identify the 'sig' query parameter.
|
||||||
|
* If found, it registers both the raw and URL-encoded signature values as secrets using
|
||||||
|
* the Actions `setSecret` API, which prevents them from being displayed in logs.
|
||||||
|
*
|
||||||
|
* The function handles errors gracefully if URL parsing fails, logging them as debug messages.
|
||||||
|
*
|
||||||
|
* @example
|
||||||
|
* ```typescript
|
||||||
|
* // Mask a signature in an Azure SAS token URL
|
||||||
|
* maskSigUrl('https://example.blob.core.windows.net/container/file.txt?sig=abc123&se=2023-01-01');
|
||||||
|
* ```
|
||||||
|
*/
|
||||||
|
function maskSigUrl(url) {
|
||||||
|
if (!url)
|
||||||
|
return;
|
||||||
|
try {
|
||||||
|
const parsedUrl = new URL(url);
|
||||||
|
const signature = parsedUrl.searchParams.get('sig');
|
||||||
|
if (signature) {
|
||||||
|
(0, core_1.setSecret)(signature);
|
||||||
|
(0, core_1.setSecret)(encodeURIComponent(signature));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
catch (error) {
|
||||||
|
(0, core_1.debug)(`Failed to parse URL: ${url} ${error instanceof Error ? error.message : String(error)}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
exports.maskSigUrl = maskSigUrl;
|
||||||
|
/**
|
||||||
|
* Masks sensitive information in URLs containing signature parameters.
|
||||||
|
* Currently supports masking 'sig' parameters in the 'signed_upload_url'
|
||||||
|
* and 'signed_download_url' properties of the provided object.
|
||||||
|
*
|
||||||
|
* @param body - The object should contain a signature
|
||||||
|
* @remarks
|
||||||
|
* This function extracts URLs from the object properties and calls maskSigUrl
|
||||||
|
* on each one to redact sensitive signature information. The function doesn't
|
||||||
|
* modify the original object; it only marks the signatures as secrets for
|
||||||
|
* logging purposes.
|
||||||
|
*
|
||||||
|
* @example
|
||||||
|
* ```typescript
|
||||||
|
* const responseBody = {
|
||||||
|
* signed_upload_url: 'https://example.com?sig=abc123',
|
||||||
|
* signed_download_url: 'https://example.com?sig=def456'
|
||||||
|
* };
|
||||||
|
* maskSecretUrls(responseBody);
|
||||||
|
* ```
|
||||||
|
*/
|
||||||
|
function maskSecretUrls(body) {
|
||||||
|
if (typeof body !== 'object' || body === null) {
|
||||||
|
(0, core_1.debug)('body is not an object or is null');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
if ('signed_upload_url' in body &&
|
||||||
|
typeof body.signed_upload_url === 'string') {
|
||||||
|
maskSigUrl(body.signed_upload_url);
|
||||||
|
}
|
||||||
|
if ('signed_url' in body && typeof body.signed_url === 'string') {
|
||||||
|
maskSigUrl(body.signed_url);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
exports.maskSecretUrls = maskSecretUrls;
|
||||||
//# sourceMappingURL=util.js.map
|
//# sourceMappingURL=util.js.map
|
||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
|
@ -2970,7 +3322,7 @@ function uploadZipToBlobStorage(authenticatedUploadURL, zipUploadStream) {
|
||||||
core.info('Finished uploading artifact content to blob storage!');
|
core.info('Finished uploading artifact content to blob storage!');
|
||||||
hashStream.end();
|
hashStream.end();
|
||||||
sha256Hash = hashStream.read();
|
sha256Hash = hashStream.read();
|
||||||
core.info(`SHA256 hash of uploaded artifact zip is ${sha256Hash}`);
|
core.info(`SHA256 digest of uploaded artifact zip is ${sha256Hash}`);
|
||||||
if (uploadByteCount === 0) {
|
if (uploadByteCount === 0) {
|
||||||
core.warning(`No data was uploaded to blob storage. Reported upload byte count is 0.`);
|
core.warning(`No data was uploaded to blob storage. Reported upload byte count is 0.`);
|
||||||
}
|
}
|
||||||
|
@ -135483,7 +135835,7 @@ module.exports = index;
|
||||||
/***/ ((module) => {
|
/***/ ((module) => {
|
||||||
|
|
||||||
"use strict";
|
"use strict";
|
||||||
module.exports = JSON.parse('{"name":"@actions/artifact","version":"2.2.2","preview":true,"description":"Actions artifact lib","keywords":["github","actions","artifact"],"homepage":"https://github.com/actions/toolkit/tree/main/packages/artifact","license":"MIT","main":"lib/artifact.js","types":"lib/artifact.d.ts","directories":{"lib":"lib","test":"__tests__"},"files":["lib","!.DS_Store"],"publishConfig":{"access":"public"},"repository":{"type":"git","url":"git+https://github.com/actions/toolkit.git","directory":"packages/artifact"},"scripts":{"audit-moderate":"npm install && npm audit --json --audit-level=moderate > audit.json","test":"cd ../../ && npm run test ./packages/artifact","bootstrap":"cd ../../ && npm run bootstrap","tsc-run":"tsc","tsc":"npm run bootstrap && npm run tsc-run","gen:docs":"typedoc --plugin typedoc-plugin-markdown --out docs/generated src/artifact.ts --githubPages false --readme none"},"bugs":{"url":"https://github.com/actions/toolkit/issues"},"dependencies":{"@actions/core":"^1.10.0","@actions/github":"^5.1.1","@actions/http-client":"^2.1.0","@azure/storage-blob":"^12.15.0","@octokit/core":"^3.5.1","@octokit/plugin-request-log":"^1.0.4","@octokit/plugin-retry":"^3.0.9","@octokit/request-error":"^5.0.0","@protobuf-ts/plugin":"^2.2.3-alpha.1","archiver":"^7.0.1","jwt-decode":"^3.1.2","unzip-stream":"^0.3.1"},"devDependencies":{"@types/archiver":"^5.3.2","@types/unzip-stream":"^0.3.4","typedoc":"^0.25.4","typedoc-plugin-markdown":"^3.17.1","typescript":"^5.2.2"}}');
|
module.exports = JSON.parse('{"name":"@actions/artifact","version":"2.3.2","preview":true,"description":"Actions artifact lib","keywords":["github","actions","artifact"],"homepage":"https://github.com/actions/toolkit/tree/main/packages/artifact","license":"MIT","main":"lib/artifact.js","types":"lib/artifact.d.ts","directories":{"lib":"lib","test":"__tests__"},"files":["lib","!.DS_Store"],"publishConfig":{"access":"public"},"repository":{"type":"git","url":"git+https://github.com/actions/toolkit.git","directory":"packages/artifact"},"scripts":{"audit-moderate":"npm install && npm audit --json --audit-level=moderate > audit.json","test":"cd ../../ && npm run test ./packages/artifact","bootstrap":"cd ../../ && npm run bootstrap","tsc-run":"tsc","tsc":"npm run bootstrap && npm run tsc-run","gen:docs":"typedoc --plugin typedoc-plugin-markdown --out docs/generated src/artifact.ts --githubPages false --readme none"},"bugs":{"url":"https://github.com/actions/toolkit/issues"},"dependencies":{"@actions/core":"^1.10.0","@actions/github":"^5.1.1","@actions/http-client":"^2.1.0","@azure/storage-blob":"^12.15.0","@octokit/core":"^3.5.1","@octokit/plugin-request-log":"^1.0.4","@octokit/plugin-retry":"^3.0.9","@octokit/request-error":"^5.0.0","@protobuf-ts/plugin":"^2.2.3-alpha.1","archiver":"^7.0.1","jwt-decode":"^3.1.2","unzip-stream":"^0.3.1"},"devDependencies":{"@types/archiver":"^5.3.2","@types/unzip-stream":"^0.3.4","typedoc":"^0.25.4","typedoc-plugin-markdown":"^3.17.1","typescript":"^5.2.2"}}');
|
||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
|
|
||||||
|
|
410
dist/upload/index.js
vendored
410
dist/upload/index.js
vendored
|
@ -824,7 +824,7 @@ __exportStar(__nccwpck_require__(49773), exports);
|
||||||
"use strict";
|
"use strict";
|
||||||
|
|
||||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||||
exports.ArtifactService = exports.DeleteArtifactResponse = exports.DeleteArtifactRequest = exports.GetSignedArtifactURLResponse = exports.GetSignedArtifactURLRequest = exports.ListArtifactsResponse_MonolithArtifact = exports.ListArtifactsResponse = exports.ListArtifactsRequest = exports.FinalizeArtifactResponse = exports.FinalizeArtifactRequest = exports.CreateArtifactResponse = exports.CreateArtifactRequest = void 0;
|
exports.ArtifactService = exports.DeleteArtifactResponse = exports.DeleteArtifactRequest = exports.GetSignedArtifactURLResponse = exports.GetSignedArtifactURLRequest = exports.ListArtifactsResponse_MonolithArtifact = exports.ListArtifactsResponse = exports.ListArtifactsRequest = exports.FinalizeArtifactResponse = exports.FinalizeArtifactRequest = exports.CreateArtifactResponse = exports.CreateArtifactRequest = exports.FinalizeMigratedArtifactResponse = exports.FinalizeMigratedArtifactRequest = exports.MigrateArtifactResponse = exports.MigrateArtifactRequest = void 0;
|
||||||
// @generated by protobuf-ts 2.9.1 with parameter long_type_string,client_none,generate_dependencies
|
// @generated by protobuf-ts 2.9.1 with parameter long_type_string,client_none,generate_dependencies
|
||||||
// @generated from protobuf file "results/api/v1/artifact.proto" (package "github.actions.results.api.v1", syntax proto3)
|
// @generated from protobuf file "results/api/v1/artifact.proto" (package "github.actions.results.api.v1", syntax proto3)
|
||||||
// tslint:disable
|
// tslint:disable
|
||||||
|
@ -838,6 +838,236 @@ const wrappers_1 = __nccwpck_require__(8626);
|
||||||
const wrappers_2 = __nccwpck_require__(8626);
|
const wrappers_2 = __nccwpck_require__(8626);
|
||||||
const timestamp_1 = __nccwpck_require__(54622);
|
const timestamp_1 = __nccwpck_require__(54622);
|
||||||
// @generated message type with reflection information, may provide speed optimized methods
|
// @generated message type with reflection information, may provide speed optimized methods
|
||||||
|
class MigrateArtifactRequest$Type extends runtime_5.MessageType {
|
||||||
|
constructor() {
|
||||||
|
super("github.actions.results.api.v1.MigrateArtifactRequest", [
|
||||||
|
{ no: 1, name: "workflow_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||||
|
{ no: 2, name: "name", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||||
|
{ no: 3, name: "expires_at", kind: "message", T: () => timestamp_1.Timestamp }
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
create(value) {
|
||||||
|
const message = { workflowRunBackendId: "", name: "" };
|
||||||
|
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||||
|
if (value !== undefined)
|
||||||
|
(0, runtime_3.reflectionMergePartial)(this, message, value);
|
||||||
|
return message;
|
||||||
|
}
|
||||||
|
internalBinaryRead(reader, length, options, target) {
|
||||||
|
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||||
|
while (reader.pos < end) {
|
||||||
|
let [fieldNo, wireType] = reader.tag();
|
||||||
|
switch (fieldNo) {
|
||||||
|
case /* string workflow_run_backend_id */ 1:
|
||||||
|
message.workflowRunBackendId = reader.string();
|
||||||
|
break;
|
||||||
|
case /* string name */ 2:
|
||||||
|
message.name = reader.string();
|
||||||
|
break;
|
||||||
|
case /* google.protobuf.Timestamp expires_at */ 3:
|
||||||
|
message.expiresAt = timestamp_1.Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.expiresAt);
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
let u = options.readUnknownField;
|
||||||
|
if (u === "throw")
|
||||||
|
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||||
|
let d = reader.skip(wireType);
|
||||||
|
if (u !== false)
|
||||||
|
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return message;
|
||||||
|
}
|
||||||
|
internalBinaryWrite(message, writer, options) {
|
||||||
|
/* string workflow_run_backend_id = 1; */
|
||||||
|
if (message.workflowRunBackendId !== "")
|
||||||
|
writer.tag(1, runtime_1.WireType.LengthDelimited).string(message.workflowRunBackendId);
|
||||||
|
/* string name = 2; */
|
||||||
|
if (message.name !== "")
|
||||||
|
writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.name);
|
||||||
|
/* google.protobuf.Timestamp expires_at = 3; */
|
||||||
|
if (message.expiresAt)
|
||||||
|
timestamp_1.Timestamp.internalBinaryWrite(message.expiresAt, writer.tag(3, runtime_1.WireType.LengthDelimited).fork(), options).join();
|
||||||
|
let u = options.writeUnknownFields;
|
||||||
|
if (u !== false)
|
||||||
|
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||||
|
return writer;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* @generated MessageType for protobuf message github.actions.results.api.v1.MigrateArtifactRequest
|
||||||
|
*/
|
||||||
|
exports.MigrateArtifactRequest = new MigrateArtifactRequest$Type();
|
||||||
|
// @generated message type with reflection information, may provide speed optimized methods
|
||||||
|
class MigrateArtifactResponse$Type extends runtime_5.MessageType {
|
||||||
|
constructor() {
|
||||||
|
super("github.actions.results.api.v1.MigrateArtifactResponse", [
|
||||||
|
{ no: 1, name: "ok", kind: "scalar", T: 8 /*ScalarType.BOOL*/ },
|
||||||
|
{ no: 2, name: "signed_upload_url", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
create(value) {
|
||||||
|
const message = { ok: false, signedUploadUrl: "" };
|
||||||
|
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||||
|
if (value !== undefined)
|
||||||
|
(0, runtime_3.reflectionMergePartial)(this, message, value);
|
||||||
|
return message;
|
||||||
|
}
|
||||||
|
internalBinaryRead(reader, length, options, target) {
|
||||||
|
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||||
|
while (reader.pos < end) {
|
||||||
|
let [fieldNo, wireType] = reader.tag();
|
||||||
|
switch (fieldNo) {
|
||||||
|
case /* bool ok */ 1:
|
||||||
|
message.ok = reader.bool();
|
||||||
|
break;
|
||||||
|
case /* string signed_upload_url */ 2:
|
||||||
|
message.signedUploadUrl = reader.string();
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
let u = options.readUnknownField;
|
||||||
|
if (u === "throw")
|
||||||
|
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||||
|
let d = reader.skip(wireType);
|
||||||
|
if (u !== false)
|
||||||
|
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return message;
|
||||||
|
}
|
||||||
|
internalBinaryWrite(message, writer, options) {
|
||||||
|
/* bool ok = 1; */
|
||||||
|
if (message.ok !== false)
|
||||||
|
writer.tag(1, runtime_1.WireType.Varint).bool(message.ok);
|
||||||
|
/* string signed_upload_url = 2; */
|
||||||
|
if (message.signedUploadUrl !== "")
|
||||||
|
writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.signedUploadUrl);
|
||||||
|
let u = options.writeUnknownFields;
|
||||||
|
if (u !== false)
|
||||||
|
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||||
|
return writer;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* @generated MessageType for protobuf message github.actions.results.api.v1.MigrateArtifactResponse
|
||||||
|
*/
|
||||||
|
exports.MigrateArtifactResponse = new MigrateArtifactResponse$Type();
|
||||||
|
// @generated message type with reflection information, may provide speed optimized methods
|
||||||
|
class FinalizeMigratedArtifactRequest$Type extends runtime_5.MessageType {
|
||||||
|
constructor() {
|
||||||
|
super("github.actions.results.api.v1.FinalizeMigratedArtifactRequest", [
|
||||||
|
{ no: 1, name: "workflow_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||||
|
{ no: 2, name: "name", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||||
|
{ no: 3, name: "size", kind: "scalar", T: 3 /*ScalarType.INT64*/ }
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
create(value) {
|
||||||
|
const message = { workflowRunBackendId: "", name: "", size: "0" };
|
||||||
|
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||||
|
if (value !== undefined)
|
||||||
|
(0, runtime_3.reflectionMergePartial)(this, message, value);
|
||||||
|
return message;
|
||||||
|
}
|
||||||
|
internalBinaryRead(reader, length, options, target) {
|
||||||
|
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||||
|
while (reader.pos < end) {
|
||||||
|
let [fieldNo, wireType] = reader.tag();
|
||||||
|
switch (fieldNo) {
|
||||||
|
case /* string workflow_run_backend_id */ 1:
|
||||||
|
message.workflowRunBackendId = reader.string();
|
||||||
|
break;
|
||||||
|
case /* string name */ 2:
|
||||||
|
message.name = reader.string();
|
||||||
|
break;
|
||||||
|
case /* int64 size */ 3:
|
||||||
|
message.size = reader.int64().toString();
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
let u = options.readUnknownField;
|
||||||
|
if (u === "throw")
|
||||||
|
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||||
|
let d = reader.skip(wireType);
|
||||||
|
if (u !== false)
|
||||||
|
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return message;
|
||||||
|
}
|
||||||
|
internalBinaryWrite(message, writer, options) {
|
||||||
|
/* string workflow_run_backend_id = 1; */
|
||||||
|
if (message.workflowRunBackendId !== "")
|
||||||
|
writer.tag(1, runtime_1.WireType.LengthDelimited).string(message.workflowRunBackendId);
|
||||||
|
/* string name = 2; */
|
||||||
|
if (message.name !== "")
|
||||||
|
writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.name);
|
||||||
|
/* int64 size = 3; */
|
||||||
|
if (message.size !== "0")
|
||||||
|
writer.tag(3, runtime_1.WireType.Varint).int64(message.size);
|
||||||
|
let u = options.writeUnknownFields;
|
||||||
|
if (u !== false)
|
||||||
|
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||||
|
return writer;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* @generated MessageType for protobuf message github.actions.results.api.v1.FinalizeMigratedArtifactRequest
|
||||||
|
*/
|
||||||
|
exports.FinalizeMigratedArtifactRequest = new FinalizeMigratedArtifactRequest$Type();
|
||||||
|
// @generated message type with reflection information, may provide speed optimized methods
|
||||||
|
class FinalizeMigratedArtifactResponse$Type extends runtime_5.MessageType {
|
||||||
|
constructor() {
|
||||||
|
super("github.actions.results.api.v1.FinalizeMigratedArtifactResponse", [
|
||||||
|
{ no: 1, name: "ok", kind: "scalar", T: 8 /*ScalarType.BOOL*/ },
|
||||||
|
{ no: 2, name: "artifact_id", kind: "scalar", T: 3 /*ScalarType.INT64*/ }
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
create(value) {
|
||||||
|
const message = { ok: false, artifactId: "0" };
|
||||||
|
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||||
|
if (value !== undefined)
|
||||||
|
(0, runtime_3.reflectionMergePartial)(this, message, value);
|
||||||
|
return message;
|
||||||
|
}
|
||||||
|
internalBinaryRead(reader, length, options, target) {
|
||||||
|
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||||
|
while (reader.pos < end) {
|
||||||
|
let [fieldNo, wireType] = reader.tag();
|
||||||
|
switch (fieldNo) {
|
||||||
|
case /* bool ok */ 1:
|
||||||
|
message.ok = reader.bool();
|
||||||
|
break;
|
||||||
|
case /* int64 artifact_id */ 2:
|
||||||
|
message.artifactId = reader.int64().toString();
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
let u = options.readUnknownField;
|
||||||
|
if (u === "throw")
|
||||||
|
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||||
|
let d = reader.skip(wireType);
|
||||||
|
if (u !== false)
|
||||||
|
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return message;
|
||||||
|
}
|
||||||
|
internalBinaryWrite(message, writer, options) {
|
||||||
|
/* bool ok = 1; */
|
||||||
|
if (message.ok !== false)
|
||||||
|
writer.tag(1, runtime_1.WireType.Varint).bool(message.ok);
|
||||||
|
/* int64 artifact_id = 2; */
|
||||||
|
if (message.artifactId !== "0")
|
||||||
|
writer.tag(2, runtime_1.WireType.Varint).int64(message.artifactId);
|
||||||
|
let u = options.writeUnknownFields;
|
||||||
|
if (u !== false)
|
||||||
|
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||||
|
return writer;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* @generated MessageType for protobuf message github.actions.results.api.v1.FinalizeMigratedArtifactResponse
|
||||||
|
*/
|
||||||
|
exports.FinalizeMigratedArtifactResponse = new FinalizeMigratedArtifactResponse$Type();
|
||||||
|
// @generated message type with reflection information, may provide speed optimized methods
|
||||||
class CreateArtifactRequest$Type extends runtime_5.MessageType {
|
class CreateArtifactRequest$Type extends runtime_5.MessageType {
|
||||||
constructor() {
|
constructor() {
|
||||||
super("github.actions.results.api.v1.CreateArtifactRequest", [
|
super("github.actions.results.api.v1.CreateArtifactRequest", [
|
||||||
|
@ -1219,7 +1449,8 @@ class ListArtifactsResponse_MonolithArtifact$Type extends runtime_5.MessageType
|
||||||
{ no: 3, name: "database_id", kind: "scalar", T: 3 /*ScalarType.INT64*/ },
|
{ no: 3, name: "database_id", kind: "scalar", T: 3 /*ScalarType.INT64*/ },
|
||||||
{ no: 4, name: "name", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
{ no: 4, name: "name", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||||
{ no: 5, name: "size", kind: "scalar", T: 3 /*ScalarType.INT64*/ },
|
{ no: 5, name: "size", kind: "scalar", T: 3 /*ScalarType.INT64*/ },
|
||||||
{ no: 6, name: "created_at", kind: "message", T: () => timestamp_1.Timestamp }
|
{ no: 6, name: "created_at", kind: "message", T: () => timestamp_1.Timestamp },
|
||||||
|
{ no: 7, name: "digest", kind: "message", T: () => wrappers_2.StringValue }
|
||||||
]);
|
]);
|
||||||
}
|
}
|
||||||
create(value) {
|
create(value) {
|
||||||
|
@ -1252,6 +1483,9 @@ class ListArtifactsResponse_MonolithArtifact$Type extends runtime_5.MessageType
|
||||||
case /* google.protobuf.Timestamp created_at */ 6:
|
case /* google.protobuf.Timestamp created_at */ 6:
|
||||||
message.createdAt = timestamp_1.Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.createdAt);
|
message.createdAt = timestamp_1.Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.createdAt);
|
||||||
break;
|
break;
|
||||||
|
case /* google.protobuf.StringValue digest */ 7:
|
||||||
|
message.digest = wrappers_2.StringValue.internalBinaryRead(reader, reader.uint32(), options, message.digest);
|
||||||
|
break;
|
||||||
default:
|
default:
|
||||||
let u = options.readUnknownField;
|
let u = options.readUnknownField;
|
||||||
if (u === "throw")
|
if (u === "throw")
|
||||||
|
@ -1282,6 +1516,9 @@ class ListArtifactsResponse_MonolithArtifact$Type extends runtime_5.MessageType
|
||||||
/* google.protobuf.Timestamp created_at = 6; */
|
/* google.protobuf.Timestamp created_at = 6; */
|
||||||
if (message.createdAt)
|
if (message.createdAt)
|
||||||
timestamp_1.Timestamp.internalBinaryWrite(message.createdAt, writer.tag(6, runtime_1.WireType.LengthDelimited).fork(), options).join();
|
timestamp_1.Timestamp.internalBinaryWrite(message.createdAt, writer.tag(6, runtime_1.WireType.LengthDelimited).fork(), options).join();
|
||||||
|
/* google.protobuf.StringValue digest = 7; */
|
||||||
|
if (message.digest)
|
||||||
|
wrappers_2.StringValue.internalBinaryWrite(message.digest, writer.tag(7, runtime_1.WireType.LengthDelimited).fork(), options).join();
|
||||||
let u = options.writeUnknownFields;
|
let u = options.writeUnknownFields;
|
||||||
if (u !== false)
|
if (u !== false)
|
||||||
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||||
|
@ -1523,7 +1760,9 @@ exports.ArtifactService = new runtime_rpc_1.ServiceType("github.actions.results.
|
||||||
{ name: "FinalizeArtifact", options: {}, I: exports.FinalizeArtifactRequest, O: exports.FinalizeArtifactResponse },
|
{ name: "FinalizeArtifact", options: {}, I: exports.FinalizeArtifactRequest, O: exports.FinalizeArtifactResponse },
|
||||||
{ name: "ListArtifacts", options: {}, I: exports.ListArtifactsRequest, O: exports.ListArtifactsResponse },
|
{ name: "ListArtifacts", options: {}, I: exports.ListArtifactsRequest, O: exports.ListArtifactsResponse },
|
||||||
{ name: "GetSignedArtifactURL", options: {}, I: exports.GetSignedArtifactURLRequest, O: exports.GetSignedArtifactURLResponse },
|
{ name: "GetSignedArtifactURL", options: {}, I: exports.GetSignedArtifactURLRequest, O: exports.GetSignedArtifactURLResponse },
|
||||||
{ name: "DeleteArtifact", options: {}, I: exports.DeleteArtifactRequest, O: exports.DeleteArtifactResponse }
|
{ name: "DeleteArtifact", options: {}, I: exports.DeleteArtifactRequest, O: exports.DeleteArtifactResponse },
|
||||||
|
{ name: "MigrateArtifact", options: {}, I: exports.MigrateArtifactRequest, O: exports.MigrateArtifactResponse },
|
||||||
|
{ name: "FinalizeMigratedArtifact", options: {}, I: exports.FinalizeMigratedArtifactRequest, O: exports.FinalizeMigratedArtifactResponse }
|
||||||
]);
|
]);
|
||||||
//# sourceMappingURL=artifact.js.map
|
//# sourceMappingURL=artifact.js.map
|
||||||
|
|
||||||
|
@ -1920,6 +2159,8 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||||
exports.downloadArtifactInternal = exports.downloadArtifactPublic = exports.streamExtractExternal = void 0;
|
exports.downloadArtifactInternal = exports.downloadArtifactPublic = exports.streamExtractExternal = void 0;
|
||||||
const promises_1 = __importDefault(__nccwpck_require__(73292));
|
const promises_1 = __importDefault(__nccwpck_require__(73292));
|
||||||
|
const crypto = __importStar(__nccwpck_require__(6113));
|
||||||
|
const stream = __importStar(__nccwpck_require__(12781));
|
||||||
const github = __importStar(__nccwpck_require__(21260));
|
const github = __importStar(__nccwpck_require__(21260));
|
||||||
const core = __importStar(__nccwpck_require__(42186));
|
const core = __importStar(__nccwpck_require__(42186));
|
||||||
const httpClient = __importStar(__nccwpck_require__(96255));
|
const httpClient = __importStar(__nccwpck_require__(96255));
|
||||||
|
@ -1956,8 +2197,7 @@ function streamExtract(url, directory) {
|
||||||
let retryCount = 0;
|
let retryCount = 0;
|
||||||
while (retryCount < 5) {
|
while (retryCount < 5) {
|
||||||
try {
|
try {
|
||||||
yield streamExtractExternal(url, directory);
|
return yield streamExtractExternal(url, directory);
|
||||||
return;
|
|
||||||
}
|
}
|
||||||
catch (error) {
|
catch (error) {
|
||||||
retryCount++;
|
retryCount++;
|
||||||
|
@ -1977,12 +2217,18 @@ function streamExtractExternal(url, directory) {
|
||||||
throw new Error(`Unexpected HTTP response from blob storage: ${response.message.statusCode} ${response.message.statusMessage}`);
|
throw new Error(`Unexpected HTTP response from blob storage: ${response.message.statusCode} ${response.message.statusMessage}`);
|
||||||
}
|
}
|
||||||
const timeout = 30 * 1000; // 30 seconds
|
const timeout = 30 * 1000; // 30 seconds
|
||||||
|
let sha256Digest = undefined;
|
||||||
return new Promise((resolve, reject) => {
|
return new Promise((resolve, reject) => {
|
||||||
const timerFn = () => {
|
const timerFn = () => {
|
||||||
response.message.destroy(new Error(`Blob storage chunk did not respond in ${timeout}ms`));
|
response.message.destroy(new Error(`Blob storage chunk did not respond in ${timeout}ms`));
|
||||||
};
|
};
|
||||||
const timer = setTimeout(timerFn, timeout);
|
const timer = setTimeout(timerFn, timeout);
|
||||||
response.message
|
const hashStream = crypto.createHash('sha256').setEncoding('hex');
|
||||||
|
const passThrough = new stream.PassThrough();
|
||||||
|
response.message.pipe(passThrough);
|
||||||
|
passThrough.pipe(hashStream);
|
||||||
|
const extractStream = passThrough;
|
||||||
|
extractStream
|
||||||
.on('data', () => {
|
.on('data', () => {
|
||||||
timer.refresh();
|
timer.refresh();
|
||||||
})
|
})
|
||||||
|
@ -1994,7 +2240,12 @@ function streamExtractExternal(url, directory) {
|
||||||
.pipe(unzip_stream_1.default.Extract({ path: directory }))
|
.pipe(unzip_stream_1.default.Extract({ path: directory }))
|
||||||
.on('close', () => {
|
.on('close', () => {
|
||||||
clearTimeout(timer);
|
clearTimeout(timer);
|
||||||
resolve();
|
if (hashStream) {
|
||||||
|
hashStream.end();
|
||||||
|
sha256Digest = hashStream.read();
|
||||||
|
core.info(`SHA256 digest of downloaded artifact is ${sha256Digest}`);
|
||||||
|
}
|
||||||
|
resolve({ sha256Digest: `sha256:${sha256Digest}` });
|
||||||
})
|
})
|
||||||
.on('error', (error) => {
|
.on('error', (error) => {
|
||||||
reject(error);
|
reject(error);
|
||||||
|
@ -2007,6 +2258,7 @@ function downloadArtifactPublic(artifactId, repositoryOwner, repositoryName, tok
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
const downloadPath = yield resolveOrCreateDirectory(options === null || options === void 0 ? void 0 : options.path);
|
const downloadPath = yield resolveOrCreateDirectory(options === null || options === void 0 ? void 0 : options.path);
|
||||||
const api = github.getOctokit(token);
|
const api = github.getOctokit(token);
|
||||||
|
let digestMismatch = false;
|
||||||
core.info(`Downloading artifact '${artifactId}' from '${repositoryOwner}/${repositoryName}'`);
|
core.info(`Downloading artifact '${artifactId}' from '${repositoryOwner}/${repositoryName}'`);
|
||||||
const { headers, status } = yield api.rest.actions.downloadArtifact({
|
const { headers, status } = yield api.rest.actions.downloadArtifact({
|
||||||
owner: repositoryOwner,
|
owner: repositoryOwner,
|
||||||
|
@ -2027,13 +2279,20 @@ function downloadArtifactPublic(artifactId, repositoryOwner, repositoryName, tok
|
||||||
core.info(`Redirecting to blob download url: ${scrubQueryParameters(location)}`);
|
core.info(`Redirecting to blob download url: ${scrubQueryParameters(location)}`);
|
||||||
try {
|
try {
|
||||||
core.info(`Starting download of artifact to: ${downloadPath}`);
|
core.info(`Starting download of artifact to: ${downloadPath}`);
|
||||||
yield streamExtract(location, downloadPath);
|
const extractResponse = yield streamExtract(location, downloadPath);
|
||||||
core.info(`Artifact download completed successfully.`);
|
core.info(`Artifact download completed successfully.`);
|
||||||
|
if (options === null || options === void 0 ? void 0 : options.expectedHash) {
|
||||||
|
if ((options === null || options === void 0 ? void 0 : options.expectedHash) !== extractResponse.sha256Digest) {
|
||||||
|
digestMismatch = true;
|
||||||
|
core.debug(`Computed digest: ${extractResponse.sha256Digest}`);
|
||||||
|
core.debug(`Expected digest: ${options.expectedHash}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
catch (error) {
|
catch (error) {
|
||||||
throw new Error(`Unable to download and extract artifact: ${error.message}`);
|
throw new Error(`Unable to download and extract artifact: ${error.message}`);
|
||||||
}
|
}
|
||||||
return { downloadPath };
|
return { downloadPath, digestMismatch };
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
exports.downloadArtifactPublic = downloadArtifactPublic;
|
exports.downloadArtifactPublic = downloadArtifactPublic;
|
||||||
|
@ -2041,6 +2300,7 @@ function downloadArtifactInternal(artifactId, options) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
const downloadPath = yield resolveOrCreateDirectory(options === null || options === void 0 ? void 0 : options.path);
|
const downloadPath = yield resolveOrCreateDirectory(options === null || options === void 0 ? void 0 : options.path);
|
||||||
const artifactClient = (0, artifact_twirp_client_1.internalArtifactTwirpClient)();
|
const artifactClient = (0, artifact_twirp_client_1.internalArtifactTwirpClient)();
|
||||||
|
let digestMismatch = false;
|
||||||
const { workflowRunBackendId, workflowJobRunBackendId } = (0, util_1.getBackendIdsFromToken)();
|
const { workflowRunBackendId, workflowJobRunBackendId } = (0, util_1.getBackendIdsFromToken)();
|
||||||
const listReq = {
|
const listReq = {
|
||||||
workflowRunBackendId,
|
workflowRunBackendId,
|
||||||
|
@ -2063,13 +2323,20 @@ function downloadArtifactInternal(artifactId, options) {
|
||||||
core.info(`Redirecting to blob download url: ${scrubQueryParameters(signedUrl)}`);
|
core.info(`Redirecting to blob download url: ${scrubQueryParameters(signedUrl)}`);
|
||||||
try {
|
try {
|
||||||
core.info(`Starting download of artifact to: ${downloadPath}`);
|
core.info(`Starting download of artifact to: ${downloadPath}`);
|
||||||
yield streamExtract(signedUrl, downloadPath);
|
const extractResponse = yield streamExtract(signedUrl, downloadPath);
|
||||||
core.info(`Artifact download completed successfully.`);
|
core.info(`Artifact download completed successfully.`);
|
||||||
|
if (options === null || options === void 0 ? void 0 : options.expectedHash) {
|
||||||
|
if ((options === null || options === void 0 ? void 0 : options.expectedHash) !== extractResponse.sha256Digest) {
|
||||||
|
digestMismatch = true;
|
||||||
|
core.debug(`Computed digest: ${extractResponse.sha256Digest}`);
|
||||||
|
core.debug(`Expected digest: ${options.expectedHash}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
catch (error) {
|
catch (error) {
|
||||||
throw new Error(`Unable to download and extract artifact: ${error.message}`);
|
throw new Error(`Unable to download and extract artifact: ${error.message}`);
|
||||||
}
|
}
|
||||||
return { downloadPath };
|
return { downloadPath, digestMismatch };
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
exports.downloadArtifactInternal = downloadArtifactInternal;
|
exports.downloadArtifactInternal = downloadArtifactInternal;
|
||||||
|
@ -2175,13 +2442,17 @@ function getArtifactPublic(artifactName, workflowRunId, repositoryOwner, reposit
|
||||||
name: artifact.name,
|
name: artifact.name,
|
||||||
id: artifact.id,
|
id: artifact.id,
|
||||||
size: artifact.size_in_bytes,
|
size: artifact.size_in_bytes,
|
||||||
createdAt: artifact.created_at ? new Date(artifact.created_at) : undefined
|
createdAt: artifact.created_at
|
||||||
|
? new Date(artifact.created_at)
|
||||||
|
: undefined,
|
||||||
|
digest: artifact.digest
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
exports.getArtifactPublic = getArtifactPublic;
|
exports.getArtifactPublic = getArtifactPublic;
|
||||||
function getArtifactInternal(artifactName) {
|
function getArtifactInternal(artifactName) {
|
||||||
|
var _a;
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
const artifactClient = (0, artifact_twirp_client_1.internalArtifactTwirpClient)();
|
const artifactClient = (0, artifact_twirp_client_1.internalArtifactTwirpClient)();
|
||||||
const { workflowRunBackendId, workflowJobRunBackendId } = (0, util_1.getBackendIdsFromToken)();
|
const { workflowRunBackendId, workflowJobRunBackendId } = (0, util_1.getBackendIdsFromToken)();
|
||||||
|
@ -2208,7 +2479,8 @@ function getArtifactInternal(artifactName) {
|
||||||
size: Number(artifact.size),
|
size: Number(artifact.size),
|
||||||
createdAt: artifact.createdAt
|
createdAt: artifact.createdAt
|
||||||
? generated_1.Timestamp.toDate(artifact.createdAt)
|
? generated_1.Timestamp.toDate(artifact.createdAt)
|
||||||
: undefined
|
: undefined,
|
||||||
|
digest: (_a = artifact.digest) === null || _a === void 0 ? void 0 : _a.value
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
});
|
});
|
||||||
|
@ -2262,7 +2534,7 @@ function listArtifactsPublic(workflowRunId, repositoryOwner, repositoryName, tok
|
||||||
};
|
};
|
||||||
const github = (0, github_1.getOctokit)(token, opts, plugin_retry_1.retry, plugin_request_log_1.requestLog);
|
const github = (0, github_1.getOctokit)(token, opts, plugin_retry_1.retry, plugin_request_log_1.requestLog);
|
||||||
let currentPageNumber = 1;
|
let currentPageNumber = 1;
|
||||||
const { data: listArtifactResponse } = yield github.rest.actions.listWorkflowRunArtifacts({
|
const { data: listArtifactResponse } = yield github.request('GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts', {
|
||||||
owner: repositoryOwner,
|
owner: repositoryOwner,
|
||||||
repo: repositoryName,
|
repo: repositoryName,
|
||||||
run_id: workflowRunId,
|
run_id: workflowRunId,
|
||||||
|
@ -2281,14 +2553,18 @@ function listArtifactsPublic(workflowRunId, repositoryOwner, repositoryName, tok
|
||||||
name: artifact.name,
|
name: artifact.name,
|
||||||
id: artifact.id,
|
id: artifact.id,
|
||||||
size: artifact.size_in_bytes,
|
size: artifact.size_in_bytes,
|
||||||
createdAt: artifact.created_at ? new Date(artifact.created_at) : undefined
|
createdAt: artifact.created_at
|
||||||
|
? new Date(artifact.created_at)
|
||||||
|
: undefined,
|
||||||
|
digest: artifact.digest
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
// Move to the next page
|
||||||
|
currentPageNumber++;
|
||||||
// Iterate over any remaining pages
|
// Iterate over any remaining pages
|
||||||
for (currentPageNumber; currentPageNumber < numberOfPages; currentPageNumber++) {
|
for (currentPageNumber; currentPageNumber < numberOfPages; currentPageNumber++) {
|
||||||
currentPageNumber++;
|
|
||||||
(0, core_1.debug)(`Fetching page ${currentPageNumber} of artifact list`);
|
(0, core_1.debug)(`Fetching page ${currentPageNumber} of artifact list`);
|
||||||
const { data: listArtifactResponse } = yield github.rest.actions.listWorkflowRunArtifacts({
|
const { data: listArtifactResponse } = yield github.request('GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts', {
|
||||||
owner: repositoryOwner,
|
owner: repositoryOwner,
|
||||||
repo: repositoryName,
|
repo: repositoryName,
|
||||||
run_id: workflowRunId,
|
run_id: workflowRunId,
|
||||||
|
@ -2302,7 +2578,8 @@ function listArtifactsPublic(workflowRunId, repositoryOwner, repositoryName, tok
|
||||||
size: artifact.size_in_bytes,
|
size: artifact.size_in_bytes,
|
||||||
createdAt: artifact.created_at
|
createdAt: artifact.created_at
|
||||||
? new Date(artifact.created_at)
|
? new Date(artifact.created_at)
|
||||||
: undefined
|
: undefined,
|
||||||
|
digest: artifact.digest
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -2325,14 +2602,18 @@ function listArtifactsInternal(latest = false) {
|
||||||
workflowJobRunBackendId
|
workflowJobRunBackendId
|
||||||
};
|
};
|
||||||
const res = yield artifactClient.ListArtifacts(req);
|
const res = yield artifactClient.ListArtifacts(req);
|
||||||
let artifacts = res.artifacts.map(artifact => ({
|
let artifacts = res.artifacts.map(artifact => {
|
||||||
name: artifact.name,
|
var _a;
|
||||||
id: Number(artifact.databaseId),
|
return ({
|
||||||
size: Number(artifact.size),
|
name: artifact.name,
|
||||||
createdAt: artifact.createdAt
|
id: Number(artifact.databaseId),
|
||||||
? generated_1.Timestamp.toDate(artifact.createdAt)
|
size: Number(artifact.size),
|
||||||
: undefined
|
createdAt: artifact.createdAt
|
||||||
}));
|
? generated_1.Timestamp.toDate(artifact.createdAt)
|
||||||
|
: undefined,
|
||||||
|
digest: (_a = artifact.digest) === null || _a === void 0 ? void 0 : _a.value
|
||||||
|
});
|
||||||
|
});
|
||||||
if (latest) {
|
if (latest) {
|
||||||
artifacts = filterLatest(artifacts);
|
artifacts = filterLatest(artifacts);
|
||||||
}
|
}
|
||||||
|
@ -2444,6 +2725,7 @@ const generated_1 = __nccwpck_require__(49960);
|
||||||
const config_1 = __nccwpck_require__(74610);
|
const config_1 = __nccwpck_require__(74610);
|
||||||
const user_agent_1 = __nccwpck_require__(85164);
|
const user_agent_1 = __nccwpck_require__(85164);
|
||||||
const errors_1 = __nccwpck_require__(38182);
|
const errors_1 = __nccwpck_require__(38182);
|
||||||
|
const util_1 = __nccwpck_require__(63062);
|
||||||
class ArtifactHttpClient {
|
class ArtifactHttpClient {
|
||||||
constructor(userAgent, maxAttempts, baseRetryIntervalMilliseconds, retryMultiplier) {
|
constructor(userAgent, maxAttempts, baseRetryIntervalMilliseconds, retryMultiplier) {
|
||||||
this.maxAttempts = 5;
|
this.maxAttempts = 5;
|
||||||
|
@ -2496,6 +2778,7 @@ class ArtifactHttpClient {
|
||||||
(0, core_1.debug)(`[Response] - ${response.message.statusCode}`);
|
(0, core_1.debug)(`[Response] - ${response.message.statusCode}`);
|
||||||
(0, core_1.debug)(`Headers: ${JSON.stringify(response.message.headers, null, 2)}`);
|
(0, core_1.debug)(`Headers: ${JSON.stringify(response.message.headers, null, 2)}`);
|
||||||
const body = JSON.parse(rawBody);
|
const body = JSON.parse(rawBody);
|
||||||
|
(0, util_1.maskSecretUrls)(body);
|
||||||
(0, core_1.debug)(`Body: ${JSON.stringify(body, null, 2)}`);
|
(0, core_1.debug)(`Body: ${JSON.stringify(body, null, 2)}`);
|
||||||
if (this.isSuccessStatusCode(statusCode)) {
|
if (this.isSuccessStatusCode(statusCode)) {
|
||||||
return { response, body };
|
return { response, body };
|
||||||
|
@ -2812,10 +3095,11 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||||
exports.getBackendIdsFromToken = void 0;
|
exports.maskSecretUrls = exports.maskSigUrl = exports.getBackendIdsFromToken = void 0;
|
||||||
const core = __importStar(__nccwpck_require__(42186));
|
const core = __importStar(__nccwpck_require__(42186));
|
||||||
const config_1 = __nccwpck_require__(74610);
|
const config_1 = __nccwpck_require__(74610);
|
||||||
const jwt_decode_1 = __importDefault(__nccwpck_require__(84329));
|
const jwt_decode_1 = __importDefault(__nccwpck_require__(84329));
|
||||||
|
const core_1 = __nccwpck_require__(42186);
|
||||||
const InvalidJwtError = new Error('Failed to get backend IDs: The provided JWT token is invalid and/or missing claims');
|
const InvalidJwtError = new Error('Failed to get backend IDs: The provided JWT token is invalid and/or missing claims');
|
||||||
// uses the JWT token claims to get the
|
// uses the JWT token claims to get the
|
||||||
// workflow run and workflow job run backend ids
|
// workflow run and workflow job run backend ids
|
||||||
|
@ -2864,6 +3148,74 @@ function getBackendIdsFromToken() {
|
||||||
throw InvalidJwtError;
|
throw InvalidJwtError;
|
||||||
}
|
}
|
||||||
exports.getBackendIdsFromToken = getBackendIdsFromToken;
|
exports.getBackendIdsFromToken = getBackendIdsFromToken;
|
||||||
|
/**
|
||||||
|
* Masks the `sig` parameter in a URL and sets it as a secret.
|
||||||
|
*
|
||||||
|
* @param url - The URL containing the signature parameter to mask
|
||||||
|
* @remarks
|
||||||
|
* This function attempts to parse the provided URL and identify the 'sig' query parameter.
|
||||||
|
* If found, it registers both the raw and URL-encoded signature values as secrets using
|
||||||
|
* the Actions `setSecret` API, which prevents them from being displayed in logs.
|
||||||
|
*
|
||||||
|
* The function handles errors gracefully if URL parsing fails, logging them as debug messages.
|
||||||
|
*
|
||||||
|
* @example
|
||||||
|
* ```typescript
|
||||||
|
* // Mask a signature in an Azure SAS token URL
|
||||||
|
* maskSigUrl('https://example.blob.core.windows.net/container/file.txt?sig=abc123&se=2023-01-01');
|
||||||
|
* ```
|
||||||
|
*/
|
||||||
|
function maskSigUrl(url) {
|
||||||
|
if (!url)
|
||||||
|
return;
|
||||||
|
try {
|
||||||
|
const parsedUrl = new URL(url);
|
||||||
|
const signature = parsedUrl.searchParams.get('sig');
|
||||||
|
if (signature) {
|
||||||
|
(0, core_1.setSecret)(signature);
|
||||||
|
(0, core_1.setSecret)(encodeURIComponent(signature));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
catch (error) {
|
||||||
|
(0, core_1.debug)(`Failed to parse URL: ${url} ${error instanceof Error ? error.message : String(error)}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
exports.maskSigUrl = maskSigUrl;
|
||||||
|
/**
|
||||||
|
* Masks sensitive information in URLs containing signature parameters.
|
||||||
|
* Currently supports masking 'sig' parameters in the 'signed_upload_url'
|
||||||
|
* and 'signed_download_url' properties of the provided object.
|
||||||
|
*
|
||||||
|
* @param body - The object should contain a signature
|
||||||
|
* @remarks
|
||||||
|
* This function extracts URLs from the object properties and calls maskSigUrl
|
||||||
|
* on each one to redact sensitive signature information. The function doesn't
|
||||||
|
* modify the original object; it only marks the signatures as secrets for
|
||||||
|
* logging purposes.
|
||||||
|
*
|
||||||
|
* @example
|
||||||
|
* ```typescript
|
||||||
|
* const responseBody = {
|
||||||
|
* signed_upload_url: 'https://example.com?sig=abc123',
|
||||||
|
* signed_download_url: 'https://example.com?sig=def456'
|
||||||
|
* };
|
||||||
|
* maskSecretUrls(responseBody);
|
||||||
|
* ```
|
||||||
|
*/
|
||||||
|
function maskSecretUrls(body) {
|
||||||
|
if (typeof body !== 'object' || body === null) {
|
||||||
|
(0, core_1.debug)('body is not an object or is null');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
if ('signed_upload_url' in body &&
|
||||||
|
typeof body.signed_upload_url === 'string') {
|
||||||
|
maskSigUrl(body.signed_upload_url);
|
||||||
|
}
|
||||||
|
if ('signed_url' in body && typeof body.signed_url === 'string') {
|
||||||
|
maskSigUrl(body.signed_url);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
exports.maskSecretUrls = maskSecretUrls;
|
||||||
//# sourceMappingURL=util.js.map
|
//# sourceMappingURL=util.js.map
|
||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
|
@ -2970,7 +3322,7 @@ function uploadZipToBlobStorage(authenticatedUploadURL, zipUploadStream) {
|
||||||
core.info('Finished uploading artifact content to blob storage!');
|
core.info('Finished uploading artifact content to blob storage!');
|
||||||
hashStream.end();
|
hashStream.end();
|
||||||
sha256Hash = hashStream.read();
|
sha256Hash = hashStream.read();
|
||||||
core.info(`SHA256 hash of uploaded artifact zip is ${sha256Hash}`);
|
core.info(`SHA256 digest of uploaded artifact zip is ${sha256Hash}`);
|
||||||
if (uploadByteCount === 0) {
|
if (uploadByteCount === 0) {
|
||||||
core.warning(`No data was uploaded to blob storage. Reported upload byte count is 0.`);
|
core.warning(`No data was uploaded to blob storage. Reported upload byte count is 0.`);
|
||||||
}
|
}
|
||||||
|
@ -135493,7 +135845,7 @@ module.exports = index;
|
||||||
/***/ ((module) => {
|
/***/ ((module) => {
|
||||||
|
|
||||||
"use strict";
|
"use strict";
|
||||||
module.exports = JSON.parse('{"name":"@actions/artifact","version":"2.2.2","preview":true,"description":"Actions artifact lib","keywords":["github","actions","artifact"],"homepage":"https://github.com/actions/toolkit/tree/main/packages/artifact","license":"MIT","main":"lib/artifact.js","types":"lib/artifact.d.ts","directories":{"lib":"lib","test":"__tests__"},"files":["lib","!.DS_Store"],"publishConfig":{"access":"public"},"repository":{"type":"git","url":"git+https://github.com/actions/toolkit.git","directory":"packages/artifact"},"scripts":{"audit-moderate":"npm install && npm audit --json --audit-level=moderate > audit.json","test":"cd ../../ && npm run test ./packages/artifact","bootstrap":"cd ../../ && npm run bootstrap","tsc-run":"tsc","tsc":"npm run bootstrap && npm run tsc-run","gen:docs":"typedoc --plugin typedoc-plugin-markdown --out docs/generated src/artifact.ts --githubPages false --readme none"},"bugs":{"url":"https://github.com/actions/toolkit/issues"},"dependencies":{"@actions/core":"^1.10.0","@actions/github":"^5.1.1","@actions/http-client":"^2.1.0","@azure/storage-blob":"^12.15.0","@octokit/core":"^3.5.1","@octokit/plugin-request-log":"^1.0.4","@octokit/plugin-retry":"^3.0.9","@octokit/request-error":"^5.0.0","@protobuf-ts/plugin":"^2.2.3-alpha.1","archiver":"^7.0.1","jwt-decode":"^3.1.2","unzip-stream":"^0.3.1"},"devDependencies":{"@types/archiver":"^5.3.2","@types/unzip-stream":"^0.3.4","typedoc":"^0.25.4","typedoc-plugin-markdown":"^3.17.1","typescript":"^5.2.2"}}');
|
module.exports = JSON.parse('{"name":"@actions/artifact","version":"2.3.2","preview":true,"description":"Actions artifact lib","keywords":["github","actions","artifact"],"homepage":"https://github.com/actions/toolkit/tree/main/packages/artifact","license":"MIT","main":"lib/artifact.js","types":"lib/artifact.d.ts","directories":{"lib":"lib","test":"__tests__"},"files":["lib","!.DS_Store"],"publishConfig":{"access":"public"},"repository":{"type":"git","url":"git+https://github.com/actions/toolkit.git","directory":"packages/artifact"},"scripts":{"audit-moderate":"npm install && npm audit --json --audit-level=moderate > audit.json","test":"cd ../../ && npm run test ./packages/artifact","bootstrap":"cd ../../ && npm run bootstrap","tsc-run":"tsc","tsc":"npm run bootstrap && npm run tsc-run","gen:docs":"typedoc --plugin typedoc-plugin-markdown --out docs/generated src/artifact.ts --githubPages false --readme none"},"bugs":{"url":"https://github.com/actions/toolkit/issues"},"dependencies":{"@actions/core":"^1.10.0","@actions/github":"^5.1.1","@actions/http-client":"^2.1.0","@azure/storage-blob":"^12.15.0","@octokit/core":"^3.5.1","@octokit/plugin-request-log":"^1.0.4","@octokit/plugin-retry":"^3.0.9","@octokit/request-error":"^5.0.0","@protobuf-ts/plugin":"^2.2.3-alpha.1","archiver":"^7.0.1","jwt-decode":"^3.1.2","unzip-stream":"^0.3.1"},"devDependencies":{"@types/archiver":"^5.3.2","@types/unzip-stream":"^0.3.4","typedoc":"^0.25.4","typedoc-plugin-markdown":"^3.17.1","typescript":"^5.2.2"}}');
|
||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
|
|
||||||
|
|
19
package-lock.json
generated
19
package-lock.json
generated
|
@ -1,15 +1,15 @@
|
||||||
{
|
{
|
||||||
"name": "upload-artifact",
|
"name": "upload-artifact",
|
||||||
"version": "4.6.1",
|
"version": "4.6.2",
|
||||||
"lockfileVersion": 2,
|
"lockfileVersion": 2,
|
||||||
"requires": true,
|
"requires": true,
|
||||||
"packages": {
|
"packages": {
|
||||||
"": {
|
"": {
|
||||||
"name": "upload-artifact",
|
"name": "upload-artifact",
|
||||||
"version": "4.6.1",
|
"version": "4.6.2",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@actions/artifact": "^2.2.2",
|
"@actions/artifact": "^2.3.2",
|
||||||
"@actions/core": "^1.11.1",
|
"@actions/core": "^1.11.1",
|
||||||
"@actions/github": "^6.0.0",
|
"@actions/github": "^6.0.0",
|
||||||
"@actions/glob": "^0.5.0",
|
"@actions/glob": "^0.5.0",
|
||||||
|
@ -34,9 +34,10 @@
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/@actions/artifact": {
|
"node_modules/@actions/artifact": {
|
||||||
"version": "2.2.2",
|
"version": "2.3.2",
|
||||||
"resolved": "https://registry.npmjs.org/@actions/artifact/-/artifact-2.2.2.tgz",
|
"resolved": "https://registry.npmjs.org/@actions/artifact/-/artifact-2.3.2.tgz",
|
||||||
"integrity": "sha512-UtS1kcINiPRkI3/hDKkO/XdrtKo89kn8s81J67QNBU6RRMWSSXrrfCCbQVThuxcdW2boOLv51NVCEKyo954A2A==",
|
"integrity": "sha512-uX2Mr5KEPcwnzqa0Og9wOTEKIae6C/yx9P/m8bIglzCS5nZDkcQC/zRWjjoEsyVecL6oQpBx5BuqQj/yuVm0gw==",
|
||||||
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@actions/core": "^1.10.0",
|
"@actions/core": "^1.10.0",
|
||||||
"@actions/github": "^5.1.1",
|
"@actions/github": "^5.1.1",
|
||||||
|
@ -7766,9 +7767,9 @@
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@actions/artifact": {
|
"@actions/artifact": {
|
||||||
"version": "2.2.2",
|
"version": "2.3.2",
|
||||||
"resolved": "https://registry.npmjs.org/@actions/artifact/-/artifact-2.2.2.tgz",
|
"resolved": "https://registry.npmjs.org/@actions/artifact/-/artifact-2.3.2.tgz",
|
||||||
"integrity": "sha512-UtS1kcINiPRkI3/hDKkO/XdrtKo89kn8s81J67QNBU6RRMWSSXrrfCCbQVThuxcdW2boOLv51NVCEKyo954A2A==",
|
"integrity": "sha512-uX2Mr5KEPcwnzqa0Og9wOTEKIae6C/yx9P/m8bIglzCS5nZDkcQC/zRWjjoEsyVecL6oQpBx5BuqQj/yuVm0gw==",
|
||||||
"requires": {
|
"requires": {
|
||||||
"@actions/core": "^1.10.0",
|
"@actions/core": "^1.10.0",
|
||||||
"@actions/github": "^5.1.1",
|
"@actions/github": "^5.1.1",
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
{
|
{
|
||||||
"name": "upload-artifact",
|
"name": "upload-artifact",
|
||||||
"version": "4.6.1",
|
"version": "4.6.2",
|
||||||
"description": "Upload an Actions Artifact in a workflow run",
|
"description": "Upload an Actions Artifact in a workflow run",
|
||||||
"main": "dist/upload/index.js",
|
"main": "dist/upload/index.js",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
|
@ -29,7 +29,7 @@
|
||||||
},
|
},
|
||||||
"homepage": "https://github.com/actions/upload-artifact#readme",
|
"homepage": "https://github.com/actions/upload-artifact#readme",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@actions/artifact": "^2.2.2",
|
"@actions/artifact": "^2.3.2",
|
||||||
"@actions/core": "^1.11.1",
|
"@actions/core": "^1.11.1",
|
||||||
"@actions/github": "^6.0.0",
|
"@actions/github": "^6.0.0",
|
||||||
"@actions/glob": "^0.5.0",
|
"@actions/glob": "^0.5.0",
|
||||||
|
|
Loading…
Add table
Reference in a new issue