From 4902d3a118cbb2bcaa1a4f914ed144458e50971c Mon Sep 17 00:00:00 2001 From: Bassem Dghaidi <568794+Link-@users.noreply.github.com> Date: Mon, 24 Jun 2024 01:16:11 -0700 Subject: [PATCH] Add backend ids --- packages/cache/src/cache.ts | 10 +- .../src/generated/results/api/v1/blobcache.ts | 93 +++++++++++++------ 2 files changed, 75 insertions(+), 28 deletions(-) diff --git a/packages/cache/src/cache.ts b/packages/cache/src/cache.ts index d8a26b27..fdba186e 100644 --- a/packages/cache/src/cache.ts +++ b/packages/cache/src/cache.ts @@ -19,6 +19,7 @@ import { getUploadZipSpecification } from '@actions/artifact/lib/internal/upload/upload-zip-specification' import {createZipUploadStream} from '@actions/artifact/lib/internal/upload/zip' +import {getBackendIdsFromToken, BackendIds} from '@actions/artifact/lib/internal/shared/util' export class ValidationError extends Error { constructor(message: string) { @@ -209,9 +210,12 @@ async function restoreCachev2( } try { + // BackendIds are retrieved form the signed JWT + const backendIds: BackendIds = getBackendIdsFromToken() const twirpClient = cacheTwirpClient.internalBlobCacheTwirpClient() const getSignedDownloadURLRequest: GetCachedBlobRequest = { - owner: "github", + workflowRunBackendId: backendIds.workflowRunBackendId, + workflowJobRunBackendId: backendIds.workflowJobRunBackendId, keys: keys, } const signedDownloadURL: GetCachedBlobResponse = await twirpClient.GetCachedBlob(getSignedDownloadURLRequest) @@ -362,8 +366,12 @@ async function saveCachev2( options?: UploadOptions, enableCrossOsArchive = false ): Promise { + // BackendIds are retrieved form the signed JWT + const backendIds: BackendIds = getBackendIdsFromToken() const twirpClient = cacheTwirpClient.internalBlobCacheTwirpClient() const getSignedUploadURL: GetCacheBlobUploadURLRequest = { + workflowRunBackendId: backendIds.workflowRunBackendId, + workflowJobRunBackendId: backendIds.workflowJobRunBackendId, organization: "github", keys: [key], } diff --git a/packages/cache/src/generated/results/api/v1/blobcache.ts b/packages/cache/src/generated/results/api/v1/blobcache.ts index 41af2886..8e63bc63 100644 --- a/packages/cache/src/generated/results/api/v1/blobcache.ts +++ b/packages/cache/src/generated/results/api/v1/blobcache.ts @@ -18,15 +18,21 @@ import { Timestamp } from "../../../google/protobuf/timestamp"; */ export interface GetCachedBlobRequest { /** - * Owner of the blob(s) to be retrieved + * Workflow run backend ID * - * @generated from protobuf field: string owner = 1; + * @generated from protobuf field: string workflow_run_backend_id = 1; */ - owner: string; + workflowRunBackendId: string; + /** + * Workflow job run backend ID + * + * @generated from protobuf field: string workflow_job_run_backend_id = 2; + */ + workflowJobRunBackendId: string; /** * Key(s) of te blob(s) to be retrieved * - * @generated from protobuf field: repeated string keys = 2; + * @generated from protobuf field: repeated string keys = 3; */ keys: string[]; } @@ -87,15 +93,27 @@ export interface GetCachedBlobResponse_Blob { */ export interface GetCacheBlobUploadURLRequest { /** - * Owner of the blob(s) to be retrieved + * Workflow run backend ID * - * @generated from protobuf field: string organization = 1; + * @generated from protobuf field: string workflow_run_backend_id = 1; + */ + workflowRunBackendId: string; + /** + * Workflow job run backend ID + * + * @generated from protobuf field: string workflow_job_run_backend_id = 2; + */ + workflowJobRunBackendId: string; + /** + * / Owner of the blob(s) to be retrieved + * + * @generated from protobuf field: string organization = 3; */ organization: string; /** * Key(s) of te blob(s) to be retrieved * - * @generated from protobuf field: repeated string keys = 2; + * @generated from protobuf field: repeated string keys = 4; */ keys: string[]; } @@ -131,12 +149,13 @@ export interface GetCacheBlobUploadURLResponse_Url { class GetCachedBlobRequest$Type extends MessageType { constructor() { super("github.actions.results.api.v1.GetCachedBlobRequest", [ - { no: 1, name: "owner", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, - { no: 2, name: "keys", kind: "scalar", repeat: 2 /*RepeatType.UNPACKED*/, T: 9 /*ScalarType.STRING*/ } + { no: 1, name: "workflow_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 2, name: "workflow_job_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 3, name: "keys", kind: "scalar", repeat: 2 /*RepeatType.UNPACKED*/, T: 9 /*ScalarType.STRING*/ } ]); } create(value?: PartialMessage): GetCachedBlobRequest { - const message = { owner: "", keys: [] }; + const message = { workflowRunBackendId: "", workflowJobRunBackendId: "", keys: [] }; globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); if (value !== undefined) reflectionMergePartial(this, message, value); @@ -147,10 +166,13 @@ class GetCachedBlobRequest$Type extends MessageType { while (reader.pos < end) { let [fieldNo, wireType] = reader.tag(); switch (fieldNo) { - case /* string owner */ 1: - message.owner = reader.string(); + case /* string workflow_run_backend_id */ 1: + message.workflowRunBackendId = reader.string(); break; - case /* repeated string keys */ 2: + case /* string workflow_job_run_backend_id */ 2: + message.workflowJobRunBackendId = reader.string(); + break; + case /* repeated string keys */ 3: message.keys.push(reader.string()); break; default: @@ -165,12 +187,15 @@ class GetCachedBlobRequest$Type extends MessageType { return message; } internalBinaryWrite(message: GetCachedBlobRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { - /* string owner = 1; */ - if (message.owner !== "") - writer.tag(1, WireType.LengthDelimited).string(message.owner); - /* repeated string keys = 2; */ + /* string workflow_run_backend_id = 1; */ + if (message.workflowRunBackendId !== "") + writer.tag(1, WireType.LengthDelimited).string(message.workflowRunBackendId); + /* string workflow_job_run_backend_id = 2; */ + if (message.workflowJobRunBackendId !== "") + writer.tag(2, WireType.LengthDelimited).string(message.workflowJobRunBackendId); + /* repeated string keys = 3; */ for (let i = 0; i < message.keys.length; i++) - writer.tag(2, WireType.LengthDelimited).string(message.keys[i]); + writer.tag(3, WireType.LengthDelimited).string(message.keys[i]); let u = options.writeUnknownFields; if (u !== false) (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); @@ -314,12 +339,14 @@ export const GetCachedBlobResponse_Blob = new GetCachedBlobResponse_Blob$Type(); class GetCacheBlobUploadURLRequest$Type extends MessageType { constructor() { super("github.actions.results.api.v1.GetCacheBlobUploadURLRequest", [ - { no: 1, name: "organization", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, - { no: 2, name: "keys", kind: "scalar", repeat: 2 /*RepeatType.UNPACKED*/, T: 9 /*ScalarType.STRING*/ } + { no: 1, name: "workflow_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 2, name: "workflow_job_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 3, name: "organization", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 4, name: "keys", kind: "scalar", repeat: 2 /*RepeatType.UNPACKED*/, T: 9 /*ScalarType.STRING*/ } ]); } create(value?: PartialMessage): GetCacheBlobUploadURLRequest { - const message = { organization: "", keys: [] }; + const message = { workflowRunBackendId: "", workflowJobRunBackendId: "", organization: "", keys: [] }; globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); if (value !== undefined) reflectionMergePartial(this, message, value); @@ -330,10 +357,16 @@ class GetCacheBlobUploadURLRequest$Type extends MessageType