1
0
Fork 0

Update cache service APIs & cleanup

pull/1857/head
Bassem Dghaidi 2024-11-20 13:53:47 -08:00 committed by GitHub
parent 8616c313a2
commit a1e6ef3759
9 changed files with 302 additions and 308 deletions

12
packages/cache/package-lock.json generated vendored
View File

@ -18,7 +18,6 @@
"@azure/ms-rest-js": "^2.6.0",
"@azure/storage-blob": "^12.13.0",
"@protobuf-ts/plugin": "^2.9.4",
"jwt-decode": "^3.1.2",
"semver": "^6.3.1",
"twirp-ts": "^2.5.0"
},
@ -525,12 +524,6 @@
"integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==",
"license": "ISC"
},
"node_modules/jwt-decode": {
"version": "3.1.2",
"resolved": "https://registry.npmjs.org/jwt-decode/-/jwt-decode-3.1.2.tgz",
"integrity": "sha512-UfpWE/VZn0iP50d8cz9NrZLM9lSWhcJ+0Gt/nm4by88UL+J1SiKN8/5dkjMmbEzwL2CAe+67GsegCbIKtbp75A==",
"license": "MIT"
},
"node_modules/lodash": {
"version": "4.17.21",
"resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz",
@ -1195,11 +1188,6 @@
"resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz",
"integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ=="
},
"jwt-decode": {
"version": "3.1.2",
"resolved": "https://registry.npmjs.org/jwt-decode/-/jwt-decode-3.1.2.tgz",
"integrity": "sha512-UfpWE/VZn0iP50d8cz9NrZLM9lSWhcJ+0Gt/nm4by88UL+J1SiKN8/5dkjMmbEzwL2CAe+67GsegCbIKtbp75A=="
},
"lodash": {
"version": "4.17.21",
"resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz",

View File

@ -47,7 +47,6 @@
"@azure/storage-blob": "^12.13.0",
"@protobuf-ts/plugin": "^2.9.4",
"semver": "^6.3.1",
"jwt-decode": "^3.1.2",
"twirp-ts": "^2.5.0"
},
"devDependencies": {

View File

@ -236,12 +236,9 @@ async function restoreCacheV2(
let archivePath = ''
try {
const twirpClient = cacheTwirpClient.internalCacheTwirpClient()
const backendIds: utils.BackendIds = utils.getBackendIdsFromToken()
const compressionMethod = await utils.getCompressionMethod()
const request: GetCacheEntryDownloadURLRequest = {
workflowRunBackendId: backendIds.workflowRunBackendId,
workflowJobRunBackendId: backendIds.workflowJobRunBackendId,
key: primaryKey,
restoreKeys,
version: utils.getCacheVersion(
@ -448,8 +445,6 @@ async function saveCacheV2(
options?: UploadOptions,
enableCrossOsArchive = false
): Promise<number> {
// BackendIds are retrieved form the signed JWT
const backendIds: utils.BackendIds = utils.getBackendIdsFromToken()
const compressionMethod = await utils.getCompressionMethod()
const twirpClient = cacheTwirpClient.internalCacheTwirpClient()
let cacheId = -1
@ -497,8 +492,6 @@ async function saveCacheV2(
enableCrossOsArchive
)
const request: CreateCacheEntryRequest = {
workflowRunBackendId: backendIds.workflowRunBackendId,
workflowJobRunBackendId: backendIds.workflowJobRunBackendId,
key,
version
}
@ -514,8 +507,6 @@ async function saveCacheV2(
await UploadCacheFile(response.signedUploadUrl, archivePath)
const finalizeRequest: FinalizeCacheEntryUploadRequest = {
workflowRunBackendId: backendIds.workflowRunBackendId,
workflowJobRunBackendId: backendIds.workflowJobRunBackendId,
key,
version,
sizeBytes: `${archiveFileSize}`

View File

@ -13,32 +13,27 @@ import { reflectionMergePartial } from "@protobuf-ts/runtime";
import { MESSAGE_TYPE } from "@protobuf-ts/runtime";
import { MessageType } from "@protobuf-ts/runtime";
import { Timestamp } from "../../../google/protobuf/timestamp";
import { CacheMetadata } from "../../entities/v1/cachemetadata";
/**
* @generated from protobuf message github.actions.results.api.v1.CreateCacheEntryRequest
*/
export interface CreateCacheEntryRequest {
/**
* Workflow run backend ID
* Scope and other metadata for the cache entry
*
* @generated from protobuf field: string workflow_run_backend_id = 1;
* @generated from protobuf field: github.actions.results.entities.v1.CacheMetadata metadata = 1;
*/
workflowRunBackendId: string;
/**
* Workflow job run backend ID
*
* @generated from protobuf field: string workflow_job_run_backend_id = 2;
*/
workflowJobRunBackendId: string;
metadata?: CacheMetadata;
/**
* An explicit key for a cache entry
*
* @generated from protobuf field: string key = 3;
* @generated from protobuf field: string key = 2;
*/
key: string;
/**
* Hash of the compression tool, runner OS and paths cached
*
* @generated from protobuf field: string version = 4;
* @generated from protobuf field: string version = 3;
*/
version: string;
}
@ -62,33 +57,27 @@ export interface CreateCacheEntryResponse {
*/
export interface FinalizeCacheEntryUploadRequest {
/**
* Workflow run backend ID
* Scope and other metadata for the cache entry
*
* @generated from protobuf field: string workflow_run_backend_id = 1;
* @generated from protobuf field: github.actions.results.entities.v1.CacheMetadata metadata = 1;
*/
workflowRunBackendId: string;
/**
* Workflow job run backend ID
*
* @generated from protobuf field: string workflow_job_run_backend_id = 2;
*/
workflowJobRunBackendId: string;
metadata?: CacheMetadata;
/**
* An explicit key for a cache entry
*
* @generated from protobuf field: string key = 3;
* @generated from protobuf field: string key = 2;
*/
key: string;
/**
* Size of the cache archive in Bytes
*
* @generated from protobuf field: int64 size_bytes = 4;
* @generated from protobuf field: int64 size_bytes = 3;
*/
sizeBytes: string;
/**
* Hash of the compression tool, runner OS and paths cached
*
* @generated from protobuf field: string version = 5;
* @generated from protobuf field: string version = 4;
*/
version: string;
}
@ -112,33 +101,27 @@ export interface FinalizeCacheEntryUploadResponse {
*/
export interface GetCacheEntryDownloadURLRequest {
/**
* Workflow run backend ID
* Scope and other metadata for the cache entry
*
* @generated from protobuf field: string workflow_run_backend_id = 1;
* @generated from protobuf field: github.actions.results.entities.v1.CacheMetadata metadata = 1;
*/
workflowRunBackendId: string;
/**
* Workflow job run backend ID
*
* @generated from protobuf field: string workflow_job_run_backend_id = 2;
*/
workflowJobRunBackendId: string;
metadata?: CacheMetadata;
/**
* An explicit key for a cache entry
*
* @generated from protobuf field: string key = 3;
* @generated from protobuf field: string key = 2;
*/
key: string;
/**
* Restore keys used for prefix searching
*
* @generated from protobuf field: repeated string restore_keys = 4;
* @generated from protobuf field: repeated string restore_keys = 3;
*/
restoreKeys: string[];
/**
* Hash of the compression tool, runner OS and paths cached
*
* @generated from protobuf field: string version = 5;
* @generated from protobuf field: string version = 4;
*/
version: string;
}
@ -162,21 +145,15 @@ export interface GetCacheEntryDownloadURLResponse {
*/
export interface DeleteCacheEntryRequest {
/**
* Workflow run backend ID
* Scope and other metadata for the cache entry
*
* @generated from protobuf field: string workflow_run_backend_id = 1;
* @generated from protobuf field: github.actions.results.entities.v1.CacheMetadata metadata = 1;
*/
workflowRunBackendId: string;
/**
* Workflow job run backend ID
*
* @generated from protobuf field: string workflow_job_run_backend_id = 2;
*/
workflowJobRunBackendId: string;
metadata?: CacheMetadata;
/**
* An explicit key for a cache entry
*
* @generated from protobuf field: string key = 3;
* @generated from protobuf field: string key = 2;
*/
key: string;
}
@ -200,27 +177,21 @@ export interface DeleteCacheEntryResponse {
*/
export interface ListCacheEntriesRequest {
/**
* Workflow run backend ID
* Scope and other metadata for the cache entry
*
* @generated from protobuf field: string workflow_run_backend_id = 1;
* @generated from protobuf field: github.actions.results.entities.v1.CacheMetadata metadata = 1;
*/
workflowRunBackendId: string;
/**
* Workflow job run backend ID
*
* @generated from protobuf field: string workflow_job_run_backend_id = 2;
*/
workflowJobRunBackendId: string;
metadata?: CacheMetadata;
/**
* An explicit key for a cache entry
*
* @generated from protobuf field: string key = 3;
* @generated from protobuf field: string key = 2;
*/
key: string;
/**
* Restore keys used for prefix searching
*
* @generated from protobuf field: repeated string restore_keys = 4;
* @generated from protobuf field: repeated string restore_keys = 3;
*/
restoreKeys: string[];
}
@ -291,33 +262,27 @@ export interface ListCacheEntriesResponse_CacheEntry {
*/
export interface LookupCacheEntryRequest {
/**
* Workflow run backend ID
* Scope and other metadata for the cache entry
*
* @generated from protobuf field: string workflow_run_backend_id = 1;
* @generated from protobuf field: github.actions.results.entities.v1.CacheMetadata metadata = 1;
*/
workflowRunBackendId: string;
/**
* Workflow job run backend ID
*
* @generated from protobuf field: string workflow_job_run_backend_id = 2;
*/
workflowJobRunBackendId: string;
metadata?: CacheMetadata;
/**
* An explicit key for a cache entry
*
* @generated from protobuf field: string key = 3;
* @generated from protobuf field: string key = 2;
*/
key: string;
/**
* Restore keys used for prefix searching
*
* @generated from protobuf field: repeated string restore_keys = 4;
* @generated from protobuf field: repeated string restore_keys = 3;
*/
restoreKeys: string[];
/**
* Hash of the compression tool, runner OS and paths cached
*
* @generated from protobuf field: string version = 5;
* @generated from protobuf field: string version = 4;
*/
version: string;
}
@ -391,14 +356,13 @@ export interface LookupCacheEntryResponse_CacheEntry {
class CreateCacheEntryRequest$Type extends MessageType<CreateCacheEntryRequest> {
constructor() {
super("github.actions.results.api.v1.CreateCacheEntryRequest", [
{ no: 1, name: "workflow_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
{ no: 2, name: "workflow_job_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
{ no: 3, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
{ no: 4, name: "version", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
{ no: 1, name: "metadata", kind: "message", T: () => CacheMetadata },
{ no: 2, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
{ no: 3, name: "version", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
]);
}
create(value?: PartialMessage<CreateCacheEntryRequest>): CreateCacheEntryRequest {
const message = { workflowRunBackendId: "", workflowJobRunBackendId: "", key: "", version: "" };
const message = { key: "", version: "" };
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
reflectionMergePartial<CreateCacheEntryRequest>(this, message, value);
@ -409,16 +373,13 @@ class CreateCacheEntryRequest$Type extends MessageType<CreateCacheEntryRequest>
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* string workflow_run_backend_id */ 1:
message.workflowRunBackendId = reader.string();
case /* github.actions.results.entities.v1.CacheMetadata metadata */ 1:
message.metadata = CacheMetadata.internalBinaryRead(reader, reader.uint32(), options, message.metadata);
break;
case /* string workflow_job_run_backend_id */ 2:
message.workflowJobRunBackendId = reader.string();
break;
case /* string key */ 3:
case /* string key */ 2:
message.key = reader.string();
break;
case /* string version */ 4:
case /* string version */ 3:
message.version = reader.string();
break;
default:
@ -433,18 +394,15 @@ class CreateCacheEntryRequest$Type extends MessageType<CreateCacheEntryRequest>
return message;
}
internalBinaryWrite(message: CreateCacheEntryRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
/* string workflow_run_backend_id = 1; */
if (message.workflowRunBackendId !== "")
writer.tag(1, WireType.LengthDelimited).string(message.workflowRunBackendId);
/* string workflow_job_run_backend_id = 2; */
if (message.workflowJobRunBackendId !== "")
writer.tag(2, WireType.LengthDelimited).string(message.workflowJobRunBackendId);
/* string key = 3; */
/* github.actions.results.entities.v1.CacheMetadata metadata = 1; */
if (message.metadata)
CacheMetadata.internalBinaryWrite(message.metadata, writer.tag(1, WireType.LengthDelimited).fork(), options).join();
/* string key = 2; */
if (message.key !== "")
writer.tag(3, WireType.LengthDelimited).string(message.key);
/* string version = 4; */
writer.tag(2, WireType.LengthDelimited).string(message.key);
/* string version = 3; */
if (message.version !== "")
writer.tag(4, WireType.LengthDelimited).string(message.version);
writer.tag(3, WireType.LengthDelimited).string(message.version);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
@ -513,15 +471,14 @@ export const CreateCacheEntryResponse = new CreateCacheEntryResponse$Type();
class FinalizeCacheEntryUploadRequest$Type extends MessageType<FinalizeCacheEntryUploadRequest> {
constructor() {
super("github.actions.results.api.v1.FinalizeCacheEntryUploadRequest", [
{ no: 1, name: "workflow_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
{ no: 2, name: "workflow_job_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
{ no: 3, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
{ no: 4, name: "size_bytes", kind: "scalar", T: 3 /*ScalarType.INT64*/ },
{ no: 5, name: "version", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
{ no: 1, name: "metadata", kind: "message", T: () => CacheMetadata },
{ no: 2, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
{ no: 3, name: "size_bytes", kind: "scalar", T: 3 /*ScalarType.INT64*/ },
{ no: 4, name: "version", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
]);
}
create(value?: PartialMessage<FinalizeCacheEntryUploadRequest>): FinalizeCacheEntryUploadRequest {
const message = { workflowRunBackendId: "", workflowJobRunBackendId: "", key: "", sizeBytes: "0", version: "" };
const message = { key: "", sizeBytes: "0", version: "" };
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
reflectionMergePartial<FinalizeCacheEntryUploadRequest>(this, message, value);
@ -532,19 +489,16 @@ class FinalizeCacheEntryUploadRequest$Type extends MessageType<FinalizeCacheEntr
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* string workflow_run_backend_id */ 1:
message.workflowRunBackendId = reader.string();
case /* github.actions.results.entities.v1.CacheMetadata metadata */ 1:
message.metadata = CacheMetadata.internalBinaryRead(reader, reader.uint32(), options, message.metadata);
break;
case /* string workflow_job_run_backend_id */ 2:
message.workflowJobRunBackendId = reader.string();
break;
case /* string key */ 3:
case /* string key */ 2:
message.key = reader.string();
break;
case /* int64 size_bytes */ 4:
case /* int64 size_bytes */ 3:
message.sizeBytes = reader.int64().toString();
break;
case /* string version */ 5:
case /* string version */ 4:
message.version = reader.string();
break;
default:
@ -559,21 +513,18 @@ class FinalizeCacheEntryUploadRequest$Type extends MessageType<FinalizeCacheEntr
return message;
}
internalBinaryWrite(message: FinalizeCacheEntryUploadRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
/* string workflow_run_backend_id = 1; */
if (message.workflowRunBackendId !== "")
writer.tag(1, WireType.LengthDelimited).string(message.workflowRunBackendId);
/* string workflow_job_run_backend_id = 2; */
if (message.workflowJobRunBackendId !== "")
writer.tag(2, WireType.LengthDelimited).string(message.workflowJobRunBackendId);
/* string key = 3; */
/* github.actions.results.entities.v1.CacheMetadata metadata = 1; */
if (message.metadata)
CacheMetadata.internalBinaryWrite(message.metadata, writer.tag(1, WireType.LengthDelimited).fork(), options).join();
/* string key = 2; */
if (message.key !== "")
writer.tag(3, WireType.LengthDelimited).string(message.key);
/* int64 size_bytes = 4; */
writer.tag(2, WireType.LengthDelimited).string(message.key);
/* int64 size_bytes = 3; */
if (message.sizeBytes !== "0")
writer.tag(4, WireType.Varint).int64(message.sizeBytes);
/* string version = 5; */
writer.tag(3, WireType.Varint).int64(message.sizeBytes);
/* string version = 4; */
if (message.version !== "")
writer.tag(5, WireType.LengthDelimited).string(message.version);
writer.tag(4, WireType.LengthDelimited).string(message.version);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
@ -642,15 +593,14 @@ export const FinalizeCacheEntryUploadResponse = new FinalizeCacheEntryUploadResp
class GetCacheEntryDownloadURLRequest$Type extends MessageType<GetCacheEntryDownloadURLRequest> {
constructor() {
super("github.actions.results.api.v1.GetCacheEntryDownloadURLRequest", [
{ no: 1, name: "workflow_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
{ no: 2, name: "workflow_job_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
{ no: 3, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
{ no: 4, name: "restore_keys", kind: "scalar", repeat: 2 /*RepeatType.UNPACKED*/, T: 9 /*ScalarType.STRING*/ },
{ no: 5, name: "version", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
{ no: 1, name: "metadata", kind: "message", T: () => CacheMetadata },
{ no: 2, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
{ no: 3, name: "restore_keys", kind: "scalar", repeat: 2 /*RepeatType.UNPACKED*/, T: 9 /*ScalarType.STRING*/ },
{ no: 4, name: "version", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
]);
}
create(value?: PartialMessage<GetCacheEntryDownloadURLRequest>): GetCacheEntryDownloadURLRequest {
const message = { workflowRunBackendId: "", workflowJobRunBackendId: "", key: "", restoreKeys: [], version: "" };
const message = { key: "", restoreKeys: [], version: "" };
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
reflectionMergePartial<GetCacheEntryDownloadURLRequest>(this, message, value);
@ -661,19 +611,16 @@ class GetCacheEntryDownloadURLRequest$Type extends MessageType<GetCacheEntryDown
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* string workflow_run_backend_id */ 1:
message.workflowRunBackendId = reader.string();
case /* github.actions.results.entities.v1.CacheMetadata metadata */ 1:
message.metadata = CacheMetadata.internalBinaryRead(reader, reader.uint32(), options, message.metadata);
break;
case /* string workflow_job_run_backend_id */ 2:
message.workflowJobRunBackendId = reader.string();
break;
case /* string key */ 3:
case /* string key */ 2:
message.key = reader.string();
break;
case /* repeated string restore_keys */ 4:
case /* repeated string restore_keys */ 3:
message.restoreKeys.push(reader.string());
break;
case /* string version */ 5:
case /* string version */ 4:
message.version = reader.string();
break;
default:
@ -688,21 +635,18 @@ class GetCacheEntryDownloadURLRequest$Type extends MessageType<GetCacheEntryDown
return message;
}
internalBinaryWrite(message: GetCacheEntryDownloadURLRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
/* string workflow_run_backend_id = 1; */
if (message.workflowRunBackendId !== "")
writer.tag(1, WireType.LengthDelimited).string(message.workflowRunBackendId);
/* string workflow_job_run_backend_id = 2; */
if (message.workflowJobRunBackendId !== "")
writer.tag(2, WireType.LengthDelimited).string(message.workflowJobRunBackendId);
/* string key = 3; */
/* github.actions.results.entities.v1.CacheMetadata metadata = 1; */
if (message.metadata)
CacheMetadata.internalBinaryWrite(message.metadata, writer.tag(1, WireType.LengthDelimited).fork(), options).join();
/* string key = 2; */
if (message.key !== "")
writer.tag(3, WireType.LengthDelimited).string(message.key);
/* repeated string restore_keys = 4; */
writer.tag(2, WireType.LengthDelimited).string(message.key);
/* repeated string restore_keys = 3; */
for (let i = 0; i < message.restoreKeys.length; i++)
writer.tag(4, WireType.LengthDelimited).string(message.restoreKeys[i]);
/* string version = 5; */
writer.tag(3, WireType.LengthDelimited).string(message.restoreKeys[i]);
/* string version = 4; */
if (message.version !== "")
writer.tag(5, WireType.LengthDelimited).string(message.version);
writer.tag(4, WireType.LengthDelimited).string(message.version);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
@ -771,13 +715,12 @@ export const GetCacheEntryDownloadURLResponse = new GetCacheEntryDownloadURLResp
class DeleteCacheEntryRequest$Type extends MessageType<DeleteCacheEntryRequest> {
constructor() {
super("github.actions.results.api.v1.DeleteCacheEntryRequest", [
{ no: 1, name: "workflow_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
{ no: 2, name: "workflow_job_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
{ no: 3, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
{ no: 1, name: "metadata", kind: "message", T: () => CacheMetadata },
{ no: 2, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
]);
}
create(value?: PartialMessage<DeleteCacheEntryRequest>): DeleteCacheEntryRequest {
const message = { workflowRunBackendId: "", workflowJobRunBackendId: "", key: "" };
const message = { key: "" };
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
reflectionMergePartial<DeleteCacheEntryRequest>(this, message, value);
@ -788,13 +731,10 @@ class DeleteCacheEntryRequest$Type extends MessageType<DeleteCacheEntryRequest>
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* string workflow_run_backend_id */ 1:
message.workflowRunBackendId = reader.string();
case /* github.actions.results.entities.v1.CacheMetadata metadata */ 1:
message.metadata = CacheMetadata.internalBinaryRead(reader, reader.uint32(), options, message.metadata);
break;
case /* string workflow_job_run_backend_id */ 2:
message.workflowJobRunBackendId = reader.string();
break;
case /* string key */ 3:
case /* string key */ 2:
message.key = reader.string();
break;
default:
@ -809,15 +749,12 @@ class DeleteCacheEntryRequest$Type extends MessageType<DeleteCacheEntryRequest>
return message;
}
internalBinaryWrite(message: DeleteCacheEntryRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
/* string workflow_run_backend_id = 1; */
if (message.workflowRunBackendId !== "")
writer.tag(1, WireType.LengthDelimited).string(message.workflowRunBackendId);
/* string workflow_job_run_backend_id = 2; */
if (message.workflowJobRunBackendId !== "")
writer.tag(2, WireType.LengthDelimited).string(message.workflowJobRunBackendId);
/* string key = 3; */
/* github.actions.results.entities.v1.CacheMetadata metadata = 1; */
if (message.metadata)
CacheMetadata.internalBinaryWrite(message.metadata, writer.tag(1, WireType.LengthDelimited).fork(), options).join();
/* string key = 2; */
if (message.key !== "")
writer.tag(3, WireType.LengthDelimited).string(message.key);
writer.tag(2, WireType.LengthDelimited).string(message.key);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
@ -886,14 +823,13 @@ export const DeleteCacheEntryResponse = new DeleteCacheEntryResponse$Type();
class ListCacheEntriesRequest$Type extends MessageType<ListCacheEntriesRequest> {
constructor() {
super("github.actions.results.api.v1.ListCacheEntriesRequest", [
{ no: 1, name: "workflow_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
{ no: 2, name: "workflow_job_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
{ no: 3, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
{ no: 4, name: "restore_keys", kind: "scalar", repeat: 2 /*RepeatType.UNPACKED*/, T: 9 /*ScalarType.STRING*/ }
{ no: 1, name: "metadata", kind: "message", T: () => CacheMetadata },
{ no: 2, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
{ no: 3, name: "restore_keys", kind: "scalar", repeat: 2 /*RepeatType.UNPACKED*/, T: 9 /*ScalarType.STRING*/ }
]);
}
create(value?: PartialMessage<ListCacheEntriesRequest>): ListCacheEntriesRequest {
const message = { workflowRunBackendId: "", workflowJobRunBackendId: "", key: "", restoreKeys: [] };
const message = { key: "", restoreKeys: [] };
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
reflectionMergePartial<ListCacheEntriesRequest>(this, message, value);
@ -904,16 +840,13 @@ class ListCacheEntriesRequest$Type extends MessageType<ListCacheEntriesRequest>
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* string workflow_run_backend_id */ 1:
message.workflowRunBackendId = reader.string();
case /* github.actions.results.entities.v1.CacheMetadata metadata */ 1:
message.metadata = CacheMetadata.internalBinaryRead(reader, reader.uint32(), options, message.metadata);
break;
case /* string workflow_job_run_backend_id */ 2:
message.workflowJobRunBackendId = reader.string();
break;
case /* string key */ 3:
case /* string key */ 2:
message.key = reader.string();
break;
case /* repeated string restore_keys */ 4:
case /* repeated string restore_keys */ 3:
message.restoreKeys.push(reader.string());
break;
default:
@ -928,18 +861,15 @@ class ListCacheEntriesRequest$Type extends MessageType<ListCacheEntriesRequest>
return message;
}
internalBinaryWrite(message: ListCacheEntriesRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
/* string workflow_run_backend_id = 1; */
if (message.workflowRunBackendId !== "")
writer.tag(1, WireType.LengthDelimited).string(message.workflowRunBackendId);
/* string workflow_job_run_backend_id = 2; */
if (message.workflowJobRunBackendId !== "")
writer.tag(2, WireType.LengthDelimited).string(message.workflowJobRunBackendId);
/* string key = 3; */
/* github.actions.results.entities.v1.CacheMetadata metadata = 1; */
if (message.metadata)
CacheMetadata.internalBinaryWrite(message.metadata, writer.tag(1, WireType.LengthDelimited).fork(), options).join();
/* string key = 2; */
if (message.key !== "")
writer.tag(3, WireType.LengthDelimited).string(message.key);
/* repeated string restore_keys = 4; */
writer.tag(2, WireType.LengthDelimited).string(message.key);
/* repeated string restore_keys = 3; */
for (let i = 0; i < message.restoreKeys.length; i++)
writer.tag(4, WireType.LengthDelimited).string(message.restoreKeys[i]);
writer.tag(3, WireType.LengthDelimited).string(message.restoreKeys[i]);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
@ -1097,15 +1027,14 @@ export const ListCacheEntriesResponse_CacheEntry = new ListCacheEntriesResponse_
class LookupCacheEntryRequest$Type extends MessageType<LookupCacheEntryRequest> {
constructor() {
super("github.actions.results.api.v1.LookupCacheEntryRequest", [
{ no: 1, name: "workflow_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
{ no: 2, name: "workflow_job_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
{ no: 3, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
{ no: 4, name: "restore_keys", kind: "scalar", repeat: 2 /*RepeatType.UNPACKED*/, T: 9 /*ScalarType.STRING*/ },
{ no: 5, name: "version", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
{ no: 1, name: "metadata", kind: "message", T: () => CacheMetadata },
{ no: 2, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
{ no: 3, name: "restore_keys", kind: "scalar", repeat: 2 /*RepeatType.UNPACKED*/, T: 9 /*ScalarType.STRING*/ },
{ no: 4, name: "version", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
]);
}
create(value?: PartialMessage<LookupCacheEntryRequest>): LookupCacheEntryRequest {
const message = { workflowRunBackendId: "", workflowJobRunBackendId: "", key: "", restoreKeys: [], version: "" };
const message = { key: "", restoreKeys: [], version: "" };
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
reflectionMergePartial<LookupCacheEntryRequest>(this, message, value);
@ -1116,19 +1045,16 @@ class LookupCacheEntryRequest$Type extends MessageType<LookupCacheEntryRequest>
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* string workflow_run_backend_id */ 1:
message.workflowRunBackendId = reader.string();
case /* github.actions.results.entities.v1.CacheMetadata metadata */ 1:
message.metadata = CacheMetadata.internalBinaryRead(reader, reader.uint32(), options, message.metadata);
break;
case /* string workflow_job_run_backend_id */ 2:
message.workflowJobRunBackendId = reader.string();
break;
case /* string key */ 3:
case /* string key */ 2:
message.key = reader.string();
break;
case /* repeated string restore_keys */ 4:
case /* repeated string restore_keys */ 3:
message.restoreKeys.push(reader.string());
break;
case /* string version */ 5:
case /* string version */ 4:
message.version = reader.string();
break;
default:
@ -1143,21 +1069,18 @@ class LookupCacheEntryRequest$Type extends MessageType<LookupCacheEntryRequest>
return message;
}
internalBinaryWrite(message: LookupCacheEntryRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
/* string workflow_run_backend_id = 1; */
if (message.workflowRunBackendId !== "")
writer.tag(1, WireType.LengthDelimited).string(message.workflowRunBackendId);
/* string workflow_job_run_backend_id = 2; */
if (message.workflowJobRunBackendId !== "")
writer.tag(2, WireType.LengthDelimited).string(message.workflowJobRunBackendId);
/* string key = 3; */
/* github.actions.results.entities.v1.CacheMetadata metadata = 1; */
if (message.metadata)
CacheMetadata.internalBinaryWrite(message.metadata, writer.tag(1, WireType.LengthDelimited).fork(), options).join();
/* string key = 2; */
if (message.key !== "")
writer.tag(3, WireType.LengthDelimited).string(message.key);
/* repeated string restore_keys = 4; */
writer.tag(2, WireType.LengthDelimited).string(message.key);
/* repeated string restore_keys = 3; */
for (let i = 0; i < message.restoreKeys.length; i++)
writer.tag(4, WireType.LengthDelimited).string(message.restoreKeys[i]);
/* string version = 5; */
writer.tag(3, WireType.LengthDelimited).string(message.restoreKeys[i]);
/* string version = 4; */
if (message.version !== "")
writer.tag(5, WireType.LengthDelimited).string(message.version);
writer.tag(4, WireType.LengthDelimited).string(message.version);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);

View File

@ -0,0 +1,85 @@
// @generated by protobuf-ts 2.9.1 with parameter long_type_string,client_none,generate_dependencies
// @generated from protobuf file "results/entities/v1/cachemetadata.proto" (package "github.actions.results.entities.v1", syntax proto3)
// tslint:disable
import type { BinaryWriteOptions } from "@protobuf-ts/runtime";
import type { IBinaryWriter } from "@protobuf-ts/runtime";
import { WireType } from "@protobuf-ts/runtime";
import type { BinaryReadOptions } from "@protobuf-ts/runtime";
import type { IBinaryReader } from "@protobuf-ts/runtime";
import { UnknownFieldHandler } from "@protobuf-ts/runtime";
import type { PartialMessage } from "@protobuf-ts/runtime";
import { reflectionMergePartial } from "@protobuf-ts/runtime";
import { MESSAGE_TYPE } from "@protobuf-ts/runtime";
import { MessageType } from "@protobuf-ts/runtime";
import { CacheScope } from "./cachescope";
/**
* @generated from protobuf message github.actions.results.entities.v1.CacheMetadata
*/
export interface CacheMetadata {
/**
* Backend repository id
*
* @generated from protobuf field: int64 repository_id = 1;
*/
repositoryId: string;
/**
* Scopes for the cache entry
*
* @generated from protobuf field: repeated github.actions.results.entities.v1.CacheScope scope = 2;
*/
scope: CacheScope[];
}
// @generated message type with reflection information, may provide speed optimized methods
class CacheMetadata$Type extends MessageType<CacheMetadata> {
constructor() {
super("github.actions.results.entities.v1.CacheMetadata", [
{ no: 1, name: "repository_id", kind: "scalar", T: 3 /*ScalarType.INT64*/ },
{ no: 2, name: "scope", kind: "message", repeat: 1 /*RepeatType.PACKED*/, T: () => CacheScope }
]);
}
create(value?: PartialMessage<CacheMetadata>): CacheMetadata {
const message = { repositoryId: "0", scope: [] };
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
reflectionMergePartial<CacheMetadata>(this, message, value);
return message;
}
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: CacheMetadata): CacheMetadata {
let message = target ?? this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* int64 repository_id */ 1:
message.repositoryId = reader.int64().toString();
break;
case /* repeated github.actions.results.entities.v1.CacheScope scope */ 2:
message.scope.push(CacheScope.internalBinaryRead(reader, reader.uint32(), options));
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message: CacheMetadata, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
/* int64 repository_id = 1; */
if (message.repositoryId !== "0")
writer.tag(1, WireType.Varint).int64(message.repositoryId);
/* repeated github.actions.results.entities.v1.CacheScope scope = 2; */
for (let i = 0; i < message.scope.length; i++)
CacheScope.internalBinaryWrite(message.scope[i], writer.tag(2, WireType.LengthDelimited).fork(), options).join();
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message github.actions.results.entities.v1.CacheMetadata
*/
export const CacheMetadata = new CacheMetadata$Type();

View File

@ -0,0 +1,84 @@
// @generated by protobuf-ts 2.9.1 with parameter long_type_string,client_none,generate_dependencies
// @generated from protobuf file "results/entities/v1/cachescope.proto" (package "github.actions.results.entities.v1", syntax proto3)
// tslint:disable
import type { BinaryWriteOptions } from "@protobuf-ts/runtime";
import type { IBinaryWriter } from "@protobuf-ts/runtime";
import { WireType } from "@protobuf-ts/runtime";
import type { BinaryReadOptions } from "@protobuf-ts/runtime";
import type { IBinaryReader } from "@protobuf-ts/runtime";
import { UnknownFieldHandler } from "@protobuf-ts/runtime";
import type { PartialMessage } from "@protobuf-ts/runtime";
import { reflectionMergePartial } from "@protobuf-ts/runtime";
import { MESSAGE_TYPE } from "@protobuf-ts/runtime";
import { MessageType } from "@protobuf-ts/runtime";
/**
* @generated from protobuf message github.actions.results.entities.v1.CacheScope
*/
export interface CacheScope {
/**
* Determines the scope of the cache entry
*
* @generated from protobuf field: string scope = 1;
*/
scope: string;
/**
* None: 0 | Read: 1 | Write: 2 | All: (1|2)
*
* @generated from protobuf field: int64 permission = 2;
*/
permission: string;
}
// @generated message type with reflection information, may provide speed optimized methods
class CacheScope$Type extends MessageType<CacheScope> {
constructor() {
super("github.actions.results.entities.v1.CacheScope", [
{ no: 1, name: "scope", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
{ no: 2, name: "permission", kind: "scalar", T: 3 /*ScalarType.INT64*/ }
]);
}
create(value?: PartialMessage<CacheScope>): CacheScope {
const message = { scope: "", permission: "0" };
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
reflectionMergePartial<CacheScope>(this, message, value);
return message;
}
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: CacheScope): CacheScope {
let message = target ?? this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* string scope */ 1:
message.scope = reader.string();
break;
case /* int64 permission */ 2:
message.permission = reader.int64().toString();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message: CacheScope, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
/* string scope = 1; */
if (message.scope !== "")
writer.tag(1, WireType.LengthDelimited).string(message.scope);
/* int64 permission = 2; */
if (message.permission !== "0")
writer.tag(2, WireType.Varint).int64(message.permission);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message github.actions.results.entities.v1.CacheScope
*/
export const CacheScope = new CacheScope$Type();

View File

@ -7,7 +7,6 @@ import * as fs from 'fs'
import * as path from 'path'
import * as semver from 'semver'
import * as util from 'util'
import jwt_decode from 'jwt-decode'
import {
CacheFilename,
CompressionMethod,
@ -179,71 +178,3 @@ export function getRuntimeToken(): string {
}
return token
}
export interface BackendIds {
workflowRunBackendId: string
workflowJobRunBackendId: string
}
interface ActionsToken {
scp: string
}
const InvalidJwtError = new Error(
'Failed to get backend IDs: The provided JWT token is invalid and/or missing claims'
)
// uses the JWT token claims to get the
// workflow run and workflow job run backend ids
export function getBackendIdsFromToken(): BackendIds {
const token = getRuntimeToken()
const decoded = jwt_decode<ActionsToken>(token)
if (!decoded.scp) {
throw InvalidJwtError
}
/*
* example decoded:
* {
* scp: "Actions.ExampleScope Actions.Results:ce7f54c7-61c7-4aae-887f-30da475f5f1a:ca395085-040a-526b-2ce8-bdc85f692774"
* }
*/
const scpParts = decoded.scp.split(' ')
if (scpParts.length === 0) {
throw InvalidJwtError
}
/*
* example scpParts:
* ["Actions.ExampleScope", "Actions.Results:ce7f54c7-61c7-4aae-887f-30da475f5f1a:ca395085-040a-526b-2ce8-bdc85f692774"]
*/
for (const scopes of scpParts) {
const scopeParts = scopes.split(':')
if (scopeParts?.[0] !== 'Actions.Results') {
// not the Actions.Results scope
continue
}
/*
* example scopeParts:
* ["Actions.Results", "ce7f54c7-61c7-4aae-887f-30da475f5f1a", "ca395085-040a-526b-2ce8-bdc85f692774"]
*/
if (scopeParts.length !== 3) {
// missing expected number of claims
throw InvalidJwtError
}
const ids = {
workflowRunBackendId: scopeParts[1],
workflowJobRunBackendId: scopeParts[2]
}
core.debug(`Workflow Run Backend ID: ${ids.workflowRunBackendId}`)
core.debug(`Workflow Job Run Backend ID: ${ids.workflowJobRunBackendId}`)
return ids
}
throw InvalidJwtError
}

View File

@ -1,11 +1,3 @@
export function getRuntimeToken(): string {
const token = process.env['ACTIONS_RUNTIME_TOKEN']
if (!token) {
throw new Error('Unable to get the ACTIONS_RUNTIME_TOKEN env variable')
}
return token
}
export function getCacheServiceVersion(): string {
return process.env['ACTIONS_CACHE_SERVICE_V2'] ? 'v2' : 'v1'
}

View File

@ -1,7 +1,8 @@
import {info, debug} from '@actions/core'
import {getUserAgentString} from './user-agent'
import {NetworkError, UsageError} from './errors'
import {getRuntimeToken, getCacheServiceURL} from '../config'
import {getCacheServiceURL} from '../config'
import {getRuntimeToken} from '../cacheUtils'
import {BearerCredentialHandler} from '@actions/http-client/lib/auth'
import {HttpClient, HttpClientResponse, HttpCodes} from '@actions/http-client'
import {CacheServiceClientJSON} from '../../generated/results/api/v1/cache.twirp'