1
0
Fork 0

Add backend ids

Link-/blobcache-spike
Bassem Dghaidi 2024-06-24 01:16:11 -07:00 committed by GitHub
parent 04d1a7ec3c
commit 4902d3a118
2 changed files with 75 additions and 28 deletions

View File

@ -19,6 +19,7 @@ import {
getUploadZipSpecification getUploadZipSpecification
} from '@actions/artifact/lib/internal/upload/upload-zip-specification' } from '@actions/artifact/lib/internal/upload/upload-zip-specification'
import {createZipUploadStream} from '@actions/artifact/lib/internal/upload/zip' import {createZipUploadStream} from '@actions/artifact/lib/internal/upload/zip'
import {getBackendIdsFromToken, BackendIds} from '@actions/artifact/lib/internal/shared/util'
export class ValidationError extends Error { export class ValidationError extends Error {
constructor(message: string) { constructor(message: string) {
@ -209,9 +210,12 @@ async function restoreCachev2(
} }
try { try {
// BackendIds are retrieved form the signed JWT
const backendIds: BackendIds = getBackendIdsFromToken()
const twirpClient = cacheTwirpClient.internalBlobCacheTwirpClient() const twirpClient = cacheTwirpClient.internalBlobCacheTwirpClient()
const getSignedDownloadURLRequest: GetCachedBlobRequest = { const getSignedDownloadURLRequest: GetCachedBlobRequest = {
owner: "github", workflowRunBackendId: backendIds.workflowRunBackendId,
workflowJobRunBackendId: backendIds.workflowJobRunBackendId,
keys: keys, keys: keys,
} }
const signedDownloadURL: GetCachedBlobResponse = await twirpClient.GetCachedBlob(getSignedDownloadURLRequest) const signedDownloadURL: GetCachedBlobResponse = await twirpClient.GetCachedBlob(getSignedDownloadURLRequest)
@ -362,8 +366,12 @@ async function saveCachev2(
options?: UploadOptions, options?: UploadOptions,
enableCrossOsArchive = false enableCrossOsArchive = false
): Promise<number> { ): Promise<number> {
// BackendIds are retrieved form the signed JWT
const backendIds: BackendIds = getBackendIdsFromToken()
const twirpClient = cacheTwirpClient.internalBlobCacheTwirpClient() const twirpClient = cacheTwirpClient.internalBlobCacheTwirpClient()
const getSignedUploadURL: GetCacheBlobUploadURLRequest = { const getSignedUploadURL: GetCacheBlobUploadURLRequest = {
workflowRunBackendId: backendIds.workflowRunBackendId,
workflowJobRunBackendId: backendIds.workflowJobRunBackendId,
organization: "github", organization: "github",
keys: [key], keys: [key],
} }

View File

@ -18,15 +18,21 @@ import { Timestamp } from "../../../google/protobuf/timestamp";
*/ */
export interface GetCachedBlobRequest { export interface GetCachedBlobRequest {
/** /**
* Owner of the blob(s) to be retrieved * Workflow run backend ID
* *
* @generated from protobuf field: string owner = 1; * @generated from protobuf field: string workflow_run_backend_id = 1;
*/ */
owner: string; workflowRunBackendId: string;
/**
* Workflow job run backend ID
*
* @generated from protobuf field: string workflow_job_run_backend_id = 2;
*/
workflowJobRunBackendId: string;
/** /**
* Key(s) of te blob(s) to be retrieved * Key(s) of te blob(s) to be retrieved
* *
* @generated from protobuf field: repeated string keys = 2; * @generated from protobuf field: repeated string keys = 3;
*/ */
keys: string[]; keys: string[];
} }
@ -87,15 +93,27 @@ export interface GetCachedBlobResponse_Blob {
*/ */
export interface GetCacheBlobUploadURLRequest { export interface GetCacheBlobUploadURLRequest {
/** /**
* Owner of the blob(s) to be retrieved * Workflow run backend ID
* *
* @generated from protobuf field: string organization = 1; * @generated from protobuf field: string workflow_run_backend_id = 1;
*/
workflowRunBackendId: string;
/**
* Workflow job run backend ID
*
* @generated from protobuf field: string workflow_job_run_backend_id = 2;
*/
workflowJobRunBackendId: string;
/**
* / Owner of the blob(s) to be retrieved
*
* @generated from protobuf field: string organization = 3;
*/ */
organization: string; organization: string;
/** /**
* Key(s) of te blob(s) to be retrieved * Key(s) of te blob(s) to be retrieved
* *
* @generated from protobuf field: repeated string keys = 2; * @generated from protobuf field: repeated string keys = 4;
*/ */
keys: string[]; keys: string[];
} }
@ -131,12 +149,13 @@ export interface GetCacheBlobUploadURLResponse_Url {
class GetCachedBlobRequest$Type extends MessageType<GetCachedBlobRequest> { class GetCachedBlobRequest$Type extends MessageType<GetCachedBlobRequest> {
constructor() { constructor() {
super("github.actions.results.api.v1.GetCachedBlobRequest", [ super("github.actions.results.api.v1.GetCachedBlobRequest", [
{ no: 1, name: "owner", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, { no: 1, name: "workflow_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
{ no: 2, name: "keys", kind: "scalar", repeat: 2 /*RepeatType.UNPACKED*/, T: 9 /*ScalarType.STRING*/ } { no: 2, name: "workflow_job_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
{ no: 3, name: "keys", kind: "scalar", repeat: 2 /*RepeatType.UNPACKED*/, T: 9 /*ScalarType.STRING*/ }
]); ]);
} }
create(value?: PartialMessage<GetCachedBlobRequest>): GetCachedBlobRequest { create(value?: PartialMessage<GetCachedBlobRequest>): GetCachedBlobRequest {
const message = { owner: "", keys: [] }; const message = { workflowRunBackendId: "", workflowJobRunBackendId: "", keys: [] };
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined) if (value !== undefined)
reflectionMergePartial<GetCachedBlobRequest>(this, message, value); reflectionMergePartial<GetCachedBlobRequest>(this, message, value);
@ -147,10 +166,13 @@ class GetCachedBlobRequest$Type extends MessageType<GetCachedBlobRequest> {
while (reader.pos < end) { while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag(); let [fieldNo, wireType] = reader.tag();
switch (fieldNo) { switch (fieldNo) {
case /* string owner */ 1: case /* string workflow_run_backend_id */ 1:
message.owner = reader.string(); message.workflowRunBackendId = reader.string();
break; break;
case /* repeated string keys */ 2: case /* string workflow_job_run_backend_id */ 2:
message.workflowJobRunBackendId = reader.string();
break;
case /* repeated string keys */ 3:
message.keys.push(reader.string()); message.keys.push(reader.string());
break; break;
default: default:
@ -165,12 +187,15 @@ class GetCachedBlobRequest$Type extends MessageType<GetCachedBlobRequest> {
return message; return message;
} }
internalBinaryWrite(message: GetCachedBlobRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { internalBinaryWrite(message: GetCachedBlobRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
/* string owner = 1; */ /* string workflow_run_backend_id = 1; */
if (message.owner !== "") if (message.workflowRunBackendId !== "")
writer.tag(1, WireType.LengthDelimited).string(message.owner); writer.tag(1, WireType.LengthDelimited).string(message.workflowRunBackendId);
/* repeated string keys = 2; */ /* string workflow_job_run_backend_id = 2; */
if (message.workflowJobRunBackendId !== "")
writer.tag(2, WireType.LengthDelimited).string(message.workflowJobRunBackendId);
/* repeated string keys = 3; */
for (let i = 0; i < message.keys.length; i++) for (let i = 0; i < message.keys.length; i++)
writer.tag(2, WireType.LengthDelimited).string(message.keys[i]); writer.tag(3, WireType.LengthDelimited).string(message.keys[i]);
let u = options.writeUnknownFields; let u = options.writeUnknownFields;
if (u !== false) if (u !== false)
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
@ -314,12 +339,14 @@ export const GetCachedBlobResponse_Blob = new GetCachedBlobResponse_Blob$Type();
class GetCacheBlobUploadURLRequest$Type extends MessageType<GetCacheBlobUploadURLRequest> { class GetCacheBlobUploadURLRequest$Type extends MessageType<GetCacheBlobUploadURLRequest> {
constructor() { constructor() {
super("github.actions.results.api.v1.GetCacheBlobUploadURLRequest", [ super("github.actions.results.api.v1.GetCacheBlobUploadURLRequest", [
{ no: 1, name: "organization", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, { no: 1, name: "workflow_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
{ no: 2, name: "keys", kind: "scalar", repeat: 2 /*RepeatType.UNPACKED*/, T: 9 /*ScalarType.STRING*/ } { no: 2, name: "workflow_job_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
{ no: 3, name: "organization", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
{ no: 4, name: "keys", kind: "scalar", repeat: 2 /*RepeatType.UNPACKED*/, T: 9 /*ScalarType.STRING*/ }
]); ]);
} }
create(value?: PartialMessage<GetCacheBlobUploadURLRequest>): GetCacheBlobUploadURLRequest { create(value?: PartialMessage<GetCacheBlobUploadURLRequest>): GetCacheBlobUploadURLRequest {
const message = { organization: "", keys: [] }; const message = { workflowRunBackendId: "", workflowJobRunBackendId: "", organization: "", keys: [] };
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined) if (value !== undefined)
reflectionMergePartial<GetCacheBlobUploadURLRequest>(this, message, value); reflectionMergePartial<GetCacheBlobUploadURLRequest>(this, message, value);
@ -330,10 +357,16 @@ class GetCacheBlobUploadURLRequest$Type extends MessageType<GetCacheBlobUploadUR
while (reader.pos < end) { while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag(); let [fieldNo, wireType] = reader.tag();
switch (fieldNo) { switch (fieldNo) {
case /* string organization */ 1: case /* string workflow_run_backend_id */ 1:
message.workflowRunBackendId = reader.string();
break;
case /* string workflow_job_run_backend_id */ 2:
message.workflowJobRunBackendId = reader.string();
break;
case /* string organization */ 3:
message.organization = reader.string(); message.organization = reader.string();
break; break;
case /* repeated string keys */ 2: case /* repeated string keys */ 4:
message.keys.push(reader.string()); message.keys.push(reader.string());
break; break;
default: default:
@ -348,12 +381,18 @@ class GetCacheBlobUploadURLRequest$Type extends MessageType<GetCacheBlobUploadUR
return message; return message;
} }
internalBinaryWrite(message: GetCacheBlobUploadURLRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { internalBinaryWrite(message: GetCacheBlobUploadURLRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
/* string organization = 1; */ /* string workflow_run_backend_id = 1; */
if (message.workflowRunBackendId !== "")
writer.tag(1, WireType.LengthDelimited).string(message.workflowRunBackendId);
/* string workflow_job_run_backend_id = 2; */
if (message.workflowJobRunBackendId !== "")
writer.tag(2, WireType.LengthDelimited).string(message.workflowJobRunBackendId);
/* string organization = 3; */
if (message.organization !== "") if (message.organization !== "")
writer.tag(1, WireType.LengthDelimited).string(message.organization); writer.tag(3, WireType.LengthDelimited).string(message.organization);
/* repeated string keys = 2; */ /* repeated string keys = 4; */
for (let i = 0; i < message.keys.length; i++) for (let i = 0; i < message.keys.length; i++)
writer.tag(2, WireType.LengthDelimited).string(message.keys[i]); writer.tag(4, WireType.LengthDelimited).string(message.keys[i]);
let u = options.writeUnknownFields; let u = options.writeUnknownFields;
if (u !== false) if (u !== false)
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);