diff --git a/packages/cache/src/cache.ts b/packages/cache/src/cache.ts index fdba186e..0530aaab 100644 --- a/packages/cache/src/cache.ts +++ b/packages/cache/src/cache.ts @@ -1,25 +1,27 @@ import * as core from '@actions/core' import * as path from 'path' import * as utils from './internal/cacheUtils' -import {CacheServiceVersion, CacheUrl} from './internal/constants' +import { CacheServiceVersion, CacheUrl } from './internal/constants' import * as cacheHttpClient from './internal/cacheHttpClient' import * as cacheTwirpClient from './internal/cacheTwirpClient' -import {createTar, extractTar, listTar} from './internal/tar' -import {DownloadOptions, UploadOptions} from './options' +import { createTar, extractTar, listTar } from './internal/tar' +import { DownloadOptions, UploadOptions } from './options' import { - GetCacheBlobUploadURLRequest, - GetCacheBlobUploadURLResponse, - GetCachedBlobRequest, - GetCachedBlobResponse -} from './generated/results/api/v1/blobcache' -import {UploadCacheStream} from './internal/v2/upload-cache' -import {StreamExtract} from './internal/v2/download-cache' + CreateCacheEntryRequest, + CreateCacheEntryResponse, + FinalizeCacheEntryUploadRequest, + FinalizeCacheEntryUploadResponse, + GetCacheEntryDownloadURLRequest, + GetCacheEntryDownloadURLResponse +} from './generated/results/api/v1/cache' +import { UploadCacheStream } from './internal/v2/upload-cache' +import { StreamExtract } from './internal/v2/download-cache' import { UploadZipSpecification, getUploadZipSpecification } from '@actions/artifact/lib/internal/upload/upload-zip-specification' -import {createZipUploadStream} from '@actions/artifact/lib/internal/upload/zip' -import {getBackendIdsFromToken, BackendIds} from '@actions/artifact/lib/internal/shared/util' +import { createZipUploadStream } from '@actions/artifact/lib/internal/upload/zip' +import { getBackendIdsFromToken, BackendIds } from '@actions/artifact/lib/internal/shared/util' export class ValidationError extends Error { constructor(message: string) { @@ -193,7 +195,7 @@ async function restoreCachev2( options?: DownloadOptions, enableCrossOsArchive = false ) { - + restoreKeys = restoreKeys || [] const keys = [primaryKey, ...restoreKeys] @@ -212,24 +214,31 @@ async function restoreCachev2( try { // BackendIds are retrieved form the signed JWT const backendIds: BackendIds = getBackendIdsFromToken() - const twirpClient = cacheTwirpClient.internalBlobCacheTwirpClient() - const getSignedDownloadURLRequest: GetCachedBlobRequest = { + const compressionMethod = await utils.getCompressionMethod() + const twirpClient = cacheTwirpClient.internalCacheTwirpClient() + const request: GetCacheEntryDownloadURLRequest = { workflowRunBackendId: backendIds.workflowRunBackendId, workflowJobRunBackendId: backendIds.workflowJobRunBackendId, - keys: keys, + key: primaryKey, + restoreKeys: restoreKeys, + version: utils.getCacheVersion( + paths, + compressionMethod, + enableCrossOsArchive, + ), } - const signedDownloadURL: GetCachedBlobResponse = await twirpClient.GetCachedBlob(getSignedDownloadURLRequest) - core.info(`GetCachedBlobResponse: ${JSON.stringify(signedDownloadURL)}`) + const response: GetCacheEntryDownloadURLResponse = await twirpClient.GetCacheEntryDownloadURL(request) + core.info(`GetCacheEntryDownloadURLResponse: ${JSON.stringify(response)}`) - if (signedDownloadURL.blobs.length === 0) { + if (!response.ok) { // Cache not found core.warning(`Cache not found for keys: ${keys.join(', ')}`) return undefined } - core.info(`Cache hit for: ${signedDownloadURL.blobs[0].key}`) + core.info(`Cache hit for: ${request.key}`) core.info(`Starting download of artifact to: ${paths[0]}`) - await StreamExtract(signedDownloadURL.blobs[0].signedUrl, path.dirname(paths[0])) + await StreamExtract(response.signedDownloadUrl, path.dirname(paths[0])) core.info(`Artifact download completed successfully.`) return keys[0] @@ -255,7 +264,7 @@ export async function saveCache( ): Promise { checkPaths(paths) checkKey(key) - + console.debug(`Cache Service Version: ${CacheServiceVersion}`) switch (CacheServiceVersion) { case "v2": @@ -327,9 +336,9 @@ async function saveCachev1( } else if (reserveCacheResponse?.statusCode === 400) { throw new Error( reserveCacheResponse?.error?.message ?? - `Cache size of ~${Math.round( - archiveFileSize / (1024 * 1024) - )} MB (${archiveFileSize} B) is over the data cap limit, not saving cache.` + `Cache size of ~${Math.round( + archiveFileSize / (1024 * 1024) + )} MB (${archiveFileSize} B) is over the data cap limit, not saving cache.` ) } else { throw new ReserveCacheError( @@ -368,15 +377,21 @@ async function saveCachev2( ): Promise { // BackendIds are retrieved form the signed JWT const backendIds: BackendIds = getBackendIdsFromToken() - const twirpClient = cacheTwirpClient.internalBlobCacheTwirpClient() - const getSignedUploadURL: GetCacheBlobUploadURLRequest = { + const compressionMethod = await utils.getCompressionMethod() + const version = utils.getCacheVersion( + paths, + compressionMethod, + enableCrossOsArchive + ) + const twirpClient = cacheTwirpClient.internalCacheTwirpClient() + const request: CreateCacheEntryRequest = { workflowRunBackendId: backendIds.workflowRunBackendId, workflowJobRunBackendId: backendIds.workflowJobRunBackendId, - organization: "github", - keys: [key], + key: key, + version: version } - const signedUploadURL: GetCacheBlobUploadURLResponse = await twirpClient.GetCacheBlobUploadURL(getSignedUploadURL) - core.info(`GetCacheBlobUploadURLResponse: ${JSON.stringify(signedUploadURL)}`) + const response: CreateCacheEntryResponse = await twirpClient.CreateCacheEntry(request) + core.info(`CreateCacheEntryResponse: ${JSON.stringify(response)}`) // Archive // We're going to handle 1 path fow now. This needs to be fixed to handle all @@ -403,7 +418,19 @@ async function saveCachev2( // - getSignedUploadURL // - archivePath core.info(`Saving Cache v2: ${paths[0]}`) - await UploadCacheStream(signedUploadURL.urls[0].url, zipUploadStream) + await UploadCacheStream(response.signedUploadUrl, zipUploadStream) + + // Finalize the cache entry + const finalizeRequest: FinalizeCacheEntryUploadRequest = { + workflowRunBackendId: backendIds.workflowRunBackendId, + workflowJobRunBackendId: backendIds.workflowJobRunBackendId, + key: key, + version: version, + sizeBytes: "1024", + } + + const finalizeResponse: FinalizeCacheEntryUploadResponse = await twirpClient.FinalizeCacheEntryUpload(finalizeRequest) + core.info(`FinalizeCacheEntryUploadResponse: ${JSON.stringify(finalizeResponse)}`) return 0 } \ No newline at end of file diff --git a/packages/cache/src/generated/results/api/v1/blobcache.ts b/packages/cache/src/generated/results/api/v1/blobcache.ts deleted file mode 100644 index 8e63bc63..00000000 --- a/packages/cache/src/generated/results/api/v1/blobcache.ts +++ /dev/null @@ -1,513 +0,0 @@ -// @generated by protobuf-ts 2.9.1 with parameter long_type_string,client_none,generate_dependencies -// @generated from protobuf file "results/api/v1/blobcache.proto" (package "github.actions.results.api.v1", syntax proto3) -// tslint:disable -import { ServiceType } from "@protobuf-ts/runtime-rpc"; -import type { BinaryWriteOptions } from "@protobuf-ts/runtime"; -import type { IBinaryWriter } from "@protobuf-ts/runtime"; -import { WireType } from "@protobuf-ts/runtime"; -import type { BinaryReadOptions } from "@protobuf-ts/runtime"; -import type { IBinaryReader } from "@protobuf-ts/runtime"; -import { UnknownFieldHandler } from "@protobuf-ts/runtime"; -import type { PartialMessage } from "@protobuf-ts/runtime"; -import { reflectionMergePartial } from "@protobuf-ts/runtime"; -import { MESSAGE_TYPE } from "@protobuf-ts/runtime"; -import { MessageType } from "@protobuf-ts/runtime"; -import { Timestamp } from "../../../google/protobuf/timestamp"; -/** - * @generated from protobuf message github.actions.results.api.v1.GetCachedBlobRequest - */ -export interface GetCachedBlobRequest { - /** - * Workflow run backend ID - * - * @generated from protobuf field: string workflow_run_backend_id = 1; - */ - workflowRunBackendId: string; - /** - * Workflow job run backend ID - * - * @generated from protobuf field: string workflow_job_run_backend_id = 2; - */ - workflowJobRunBackendId: string; - /** - * Key(s) of te blob(s) to be retrieved - * - * @generated from protobuf field: repeated string keys = 3; - */ - keys: string[]; -} -/** - * @generated from protobuf message github.actions.results.api.v1.GetCachedBlobResponse - */ -export interface GetCachedBlobResponse { - /** - * List of blobs that match the requested keys - * - * @generated from protobuf field: repeated github.actions.results.api.v1.GetCachedBlobResponse.Blob blobs = 1; - */ - blobs: GetCachedBlobResponse_Blob[]; -} -/** - * @generated from protobuf message github.actions.results.api.v1.GetCachedBlobResponse.Blob - */ -export interface GetCachedBlobResponse_Blob { - /** - * Key of the blob - * - * @generated from protobuf field: string key = 1; - */ - key: string; - /** - * Download url for the cached blob - * - * @generated from protobuf field: string signed_url = 2; - */ - signedUrl: string; - /** - * Version of the cached blob entry - * - * @generated from protobuf field: int32 version = 3; - */ - version: number; - /** - * Checksum of the blob - * - * @generated from protobuf field: string checksum = 4; - */ - checksum: string; - /** - * Timestamp for when the blob cache entry expires - * - * @generated from protobuf field: google.protobuf.Timestamp expires_at = 5; - */ - expiresAt?: Timestamp; - /** - * Timestamp for when the blob cache entry was created - * - * @generated from protobuf field: google.protobuf.Timestamp created_at = 6; - */ - createdAt?: Timestamp; -} -/** - * @generated from protobuf message github.actions.results.api.v1.GetCacheBlobUploadURLRequest - */ -export interface GetCacheBlobUploadURLRequest { - /** - * Workflow run backend ID - * - * @generated from protobuf field: string workflow_run_backend_id = 1; - */ - workflowRunBackendId: string; - /** - * Workflow job run backend ID - * - * @generated from protobuf field: string workflow_job_run_backend_id = 2; - */ - workflowJobRunBackendId: string; - /** - * / Owner of the blob(s) to be retrieved - * - * @generated from protobuf field: string organization = 3; - */ - organization: string; - /** - * Key(s) of te blob(s) to be retrieved - * - * @generated from protobuf field: repeated string keys = 4; - */ - keys: string[]; -} -/** - * @generated from protobuf message github.actions.results.api.v1.GetCacheBlobUploadURLResponse - */ -export interface GetCacheBlobUploadURLResponse { - /** - * List of upload URLs that match the requested keys - * - * @generated from protobuf field: repeated github.actions.results.api.v1.GetCacheBlobUploadURLResponse.Url urls = 1; - */ - urls: GetCacheBlobUploadURLResponse_Url[]; -} -/** - * @generated from protobuf message github.actions.results.api.v1.GetCacheBlobUploadURLResponse.Url - */ -export interface GetCacheBlobUploadURLResponse_Url { - /** - * Key of the blob - * - * @generated from protobuf field: string key = 1; - */ - key: string; - /** - * URL to the blob - * - * @generated from protobuf field: string url = 2; - */ - url: string; -} -// @generated message type with reflection information, may provide speed optimized methods -class GetCachedBlobRequest$Type extends MessageType { - constructor() { - super("github.actions.results.api.v1.GetCachedBlobRequest", [ - { no: 1, name: "workflow_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, - { no: 2, name: "workflow_job_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, - { no: 3, name: "keys", kind: "scalar", repeat: 2 /*RepeatType.UNPACKED*/, T: 9 /*ScalarType.STRING*/ } - ]); - } - create(value?: PartialMessage): GetCachedBlobRequest { - const message = { workflowRunBackendId: "", workflowJobRunBackendId: "", keys: [] }; - globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); - if (value !== undefined) - reflectionMergePartial(this, message, value); - return message; - } - internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: GetCachedBlobRequest): GetCachedBlobRequest { - let message = target ?? this.create(), end = reader.pos + length; - while (reader.pos < end) { - let [fieldNo, wireType] = reader.tag(); - switch (fieldNo) { - case /* string workflow_run_backend_id */ 1: - message.workflowRunBackendId = reader.string(); - break; - case /* string workflow_job_run_backend_id */ 2: - message.workflowJobRunBackendId = reader.string(); - break; - case /* repeated string keys */ 3: - message.keys.push(reader.string()); - break; - default: - let u = options.readUnknownField; - if (u === "throw") - throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); - let d = reader.skip(wireType); - if (u !== false) - (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); - } - } - return message; - } - internalBinaryWrite(message: GetCachedBlobRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { - /* string workflow_run_backend_id = 1; */ - if (message.workflowRunBackendId !== "") - writer.tag(1, WireType.LengthDelimited).string(message.workflowRunBackendId); - /* string workflow_job_run_backend_id = 2; */ - if (message.workflowJobRunBackendId !== "") - writer.tag(2, WireType.LengthDelimited).string(message.workflowJobRunBackendId); - /* repeated string keys = 3; */ - for (let i = 0; i < message.keys.length; i++) - writer.tag(3, WireType.LengthDelimited).string(message.keys[i]); - let u = options.writeUnknownFields; - if (u !== false) - (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); - return writer; - } -} -/** - * @generated MessageType for protobuf message github.actions.results.api.v1.GetCachedBlobRequest - */ -export const GetCachedBlobRequest = new GetCachedBlobRequest$Type(); -// @generated message type with reflection information, may provide speed optimized methods -class GetCachedBlobResponse$Type extends MessageType { - constructor() { - super("github.actions.results.api.v1.GetCachedBlobResponse", [ - { no: 1, name: "blobs", kind: "message", repeat: 1 /*RepeatType.PACKED*/, T: () => GetCachedBlobResponse_Blob } - ]); - } - create(value?: PartialMessage): GetCachedBlobResponse { - const message = { blobs: [] }; - globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); - if (value !== undefined) - reflectionMergePartial(this, message, value); - return message; - } - internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: GetCachedBlobResponse): GetCachedBlobResponse { - let message = target ?? this.create(), end = reader.pos + length; - while (reader.pos < end) { - let [fieldNo, wireType] = reader.tag(); - switch (fieldNo) { - case /* repeated github.actions.results.api.v1.GetCachedBlobResponse.Blob blobs */ 1: - message.blobs.push(GetCachedBlobResponse_Blob.internalBinaryRead(reader, reader.uint32(), options)); - break; - default: - let u = options.readUnknownField; - if (u === "throw") - throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); - let d = reader.skip(wireType); - if (u !== false) - (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); - } - } - return message; - } - internalBinaryWrite(message: GetCachedBlobResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { - /* repeated github.actions.results.api.v1.GetCachedBlobResponse.Blob blobs = 1; */ - for (let i = 0; i < message.blobs.length; i++) - GetCachedBlobResponse_Blob.internalBinaryWrite(message.blobs[i], writer.tag(1, WireType.LengthDelimited).fork(), options).join(); - let u = options.writeUnknownFields; - if (u !== false) - (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); - return writer; - } -} -/** - * @generated MessageType for protobuf message github.actions.results.api.v1.GetCachedBlobResponse - */ -export const GetCachedBlobResponse = new GetCachedBlobResponse$Type(); -// @generated message type with reflection information, may provide speed optimized methods -class GetCachedBlobResponse_Blob$Type extends MessageType { - constructor() { - super("github.actions.results.api.v1.GetCachedBlobResponse.Blob", [ - { no: 1, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, - { no: 2, name: "signed_url", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, - { no: 3, name: "version", kind: "scalar", T: 5 /*ScalarType.INT32*/ }, - { no: 4, name: "checksum", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, - { no: 5, name: "expires_at", kind: "message", T: () => Timestamp }, - { no: 6, name: "created_at", kind: "message", T: () => Timestamp } - ]); - } - create(value?: PartialMessage): GetCachedBlobResponse_Blob { - const message = { key: "", signedUrl: "", version: 0, checksum: "" }; - globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); - if (value !== undefined) - reflectionMergePartial(this, message, value); - return message; - } - internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: GetCachedBlobResponse_Blob): GetCachedBlobResponse_Blob { - let message = target ?? this.create(), end = reader.pos + length; - while (reader.pos < end) { - let [fieldNo, wireType] = reader.tag(); - switch (fieldNo) { - case /* string key */ 1: - message.key = reader.string(); - break; - case /* string signed_url */ 2: - message.signedUrl = reader.string(); - break; - case /* int32 version */ 3: - message.version = reader.int32(); - break; - case /* string checksum */ 4: - message.checksum = reader.string(); - break; - case /* google.protobuf.Timestamp expires_at */ 5: - message.expiresAt = Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.expiresAt); - break; - case /* google.protobuf.Timestamp created_at */ 6: - message.createdAt = Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.createdAt); - break; - default: - let u = options.readUnknownField; - if (u === "throw") - throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); - let d = reader.skip(wireType); - if (u !== false) - (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); - } - } - return message; - } - internalBinaryWrite(message: GetCachedBlobResponse_Blob, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { - /* string key = 1; */ - if (message.key !== "") - writer.tag(1, WireType.LengthDelimited).string(message.key); - /* string signed_url = 2; */ - if (message.signedUrl !== "") - writer.tag(2, WireType.LengthDelimited).string(message.signedUrl); - /* int32 version = 3; */ - if (message.version !== 0) - writer.tag(3, WireType.Varint).int32(message.version); - /* string checksum = 4; */ - if (message.checksum !== "") - writer.tag(4, WireType.LengthDelimited).string(message.checksum); - /* google.protobuf.Timestamp expires_at = 5; */ - if (message.expiresAt) - Timestamp.internalBinaryWrite(message.expiresAt, writer.tag(5, WireType.LengthDelimited).fork(), options).join(); - /* google.protobuf.Timestamp created_at = 6; */ - if (message.createdAt) - Timestamp.internalBinaryWrite(message.createdAt, writer.tag(6, WireType.LengthDelimited).fork(), options).join(); - let u = options.writeUnknownFields; - if (u !== false) - (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); - return writer; - } -} -/** - * @generated MessageType for protobuf message github.actions.results.api.v1.GetCachedBlobResponse.Blob - */ -export const GetCachedBlobResponse_Blob = new GetCachedBlobResponse_Blob$Type(); -// @generated message type with reflection information, may provide speed optimized methods -class GetCacheBlobUploadURLRequest$Type extends MessageType { - constructor() { - super("github.actions.results.api.v1.GetCacheBlobUploadURLRequest", [ - { no: 1, name: "workflow_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, - { no: 2, name: "workflow_job_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, - { no: 3, name: "organization", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, - { no: 4, name: "keys", kind: "scalar", repeat: 2 /*RepeatType.UNPACKED*/, T: 9 /*ScalarType.STRING*/ } - ]); - } - create(value?: PartialMessage): GetCacheBlobUploadURLRequest { - const message = { workflowRunBackendId: "", workflowJobRunBackendId: "", organization: "", keys: [] }; - globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); - if (value !== undefined) - reflectionMergePartial(this, message, value); - return message; - } - internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: GetCacheBlobUploadURLRequest): GetCacheBlobUploadURLRequest { - let message = target ?? this.create(), end = reader.pos + length; - while (reader.pos < end) { - let [fieldNo, wireType] = reader.tag(); - switch (fieldNo) { - case /* string workflow_run_backend_id */ 1: - message.workflowRunBackendId = reader.string(); - break; - case /* string workflow_job_run_backend_id */ 2: - message.workflowJobRunBackendId = reader.string(); - break; - case /* string organization */ 3: - message.organization = reader.string(); - break; - case /* repeated string keys */ 4: - message.keys.push(reader.string()); - break; - default: - let u = options.readUnknownField; - if (u === "throw") - throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); - let d = reader.skip(wireType); - if (u !== false) - (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); - } - } - return message; - } - internalBinaryWrite(message: GetCacheBlobUploadURLRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { - /* string workflow_run_backend_id = 1; */ - if (message.workflowRunBackendId !== "") - writer.tag(1, WireType.LengthDelimited).string(message.workflowRunBackendId); - /* string workflow_job_run_backend_id = 2; */ - if (message.workflowJobRunBackendId !== "") - writer.tag(2, WireType.LengthDelimited).string(message.workflowJobRunBackendId); - /* string organization = 3; */ - if (message.organization !== "") - writer.tag(3, WireType.LengthDelimited).string(message.organization); - /* repeated string keys = 4; */ - for (let i = 0; i < message.keys.length; i++) - writer.tag(4, WireType.LengthDelimited).string(message.keys[i]); - let u = options.writeUnknownFields; - if (u !== false) - (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); - return writer; - } -} -/** - * @generated MessageType for protobuf message github.actions.results.api.v1.GetCacheBlobUploadURLRequest - */ -export const GetCacheBlobUploadURLRequest = new GetCacheBlobUploadURLRequest$Type(); -// @generated message type with reflection information, may provide speed optimized methods -class GetCacheBlobUploadURLResponse$Type extends MessageType { - constructor() { - super("github.actions.results.api.v1.GetCacheBlobUploadURLResponse", [ - { no: 1, name: "urls", kind: "message", repeat: 1 /*RepeatType.PACKED*/, T: () => GetCacheBlobUploadURLResponse_Url } - ]); - } - create(value?: PartialMessage): GetCacheBlobUploadURLResponse { - const message = { urls: [] }; - globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); - if (value !== undefined) - reflectionMergePartial(this, message, value); - return message; - } - internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: GetCacheBlobUploadURLResponse): GetCacheBlobUploadURLResponse { - let message = target ?? this.create(), end = reader.pos + length; - while (reader.pos < end) { - let [fieldNo, wireType] = reader.tag(); - switch (fieldNo) { - case /* repeated github.actions.results.api.v1.GetCacheBlobUploadURLResponse.Url urls */ 1: - message.urls.push(GetCacheBlobUploadURLResponse_Url.internalBinaryRead(reader, reader.uint32(), options)); - break; - default: - let u = options.readUnknownField; - if (u === "throw") - throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); - let d = reader.skip(wireType); - if (u !== false) - (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); - } - } - return message; - } - internalBinaryWrite(message: GetCacheBlobUploadURLResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { - /* repeated github.actions.results.api.v1.GetCacheBlobUploadURLResponse.Url urls = 1; */ - for (let i = 0; i < message.urls.length; i++) - GetCacheBlobUploadURLResponse_Url.internalBinaryWrite(message.urls[i], writer.tag(1, WireType.LengthDelimited).fork(), options).join(); - let u = options.writeUnknownFields; - if (u !== false) - (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); - return writer; - } -} -/** - * @generated MessageType for protobuf message github.actions.results.api.v1.GetCacheBlobUploadURLResponse - */ -export const GetCacheBlobUploadURLResponse = new GetCacheBlobUploadURLResponse$Type(); -// @generated message type with reflection information, may provide speed optimized methods -class GetCacheBlobUploadURLResponse_Url$Type extends MessageType { - constructor() { - super("github.actions.results.api.v1.GetCacheBlobUploadURLResponse.Url", [ - { no: 1, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, - { no: 2, name: "url", kind: "scalar", T: 9 /*ScalarType.STRING*/ } - ]); - } - create(value?: PartialMessage): GetCacheBlobUploadURLResponse_Url { - const message = { key: "", url: "" }; - globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); - if (value !== undefined) - reflectionMergePartial(this, message, value); - return message; - } - internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: GetCacheBlobUploadURLResponse_Url): GetCacheBlobUploadURLResponse_Url { - let message = target ?? this.create(), end = reader.pos + length; - while (reader.pos < end) { - let [fieldNo, wireType] = reader.tag(); - switch (fieldNo) { - case /* string key */ 1: - message.key = reader.string(); - break; - case /* string url */ 2: - message.url = reader.string(); - break; - default: - let u = options.readUnknownField; - if (u === "throw") - throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); - let d = reader.skip(wireType); - if (u !== false) - (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); - } - } - return message; - } - internalBinaryWrite(message: GetCacheBlobUploadURLResponse_Url, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { - /* string key = 1; */ - if (message.key !== "") - writer.tag(1, WireType.LengthDelimited).string(message.key); - /* string url = 2; */ - if (message.url !== "") - writer.tag(2, WireType.LengthDelimited).string(message.url); - let u = options.writeUnknownFields; - if (u !== false) - (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); - return writer; - } -} -/** - * @generated MessageType for protobuf message github.actions.results.api.v1.GetCacheBlobUploadURLResponse.Url - */ -export const GetCacheBlobUploadURLResponse_Url = new GetCacheBlobUploadURLResponse_Url$Type(); -/** - * @generated ServiceType for protobuf service github.actions.results.api.v1.BlobCacheService - */ -export const BlobCacheService = new ServiceType("github.actions.results.api.v1.BlobCacheService", [ - { name: "GetCachedBlob", options: {}, I: GetCachedBlobRequest, O: GetCachedBlobResponse }, - { name: "GetCacheBlobUploadURL", options: {}, I: GetCacheBlobUploadURLRequest, O: GetCacheBlobUploadURLResponse } -]); diff --git a/packages/cache/src/generated/results/api/v1/blobcache.twirp.ts b/packages/cache/src/generated/results/api/v1/blobcache.twirp.ts deleted file mode 100644 index c2f05e88..00000000 --- a/packages/cache/src/generated/results/api/v1/blobcache.twirp.ts +++ /dev/null @@ -1,433 +0,0 @@ -import { - TwirpContext, - TwirpServer, - RouterEvents, - TwirpError, - TwirpErrorCode, - Interceptor, - TwirpContentType, - chainInterceptors, -} from "twirp-ts"; -import { - GetCachedBlobRequest, - GetCachedBlobResponse, - GetCacheBlobUploadURLRequest, - GetCacheBlobUploadURLResponse, -} from "./blobcache"; - -//==================================// -// Client Code // -//==================================// - -interface Rpc { - request( - service: string, - method: string, - contentType: "application/json" | "application/protobuf", - data: object | Uint8Array - ): Promise; -} - -export interface BlobCacheServiceClient { - GetCachedBlob(request: GetCachedBlobRequest): Promise; - GetCacheBlobUploadURL( - request: GetCacheBlobUploadURLRequest - ): Promise; -} - -export class BlobCacheServiceClientJSON implements BlobCacheServiceClient { - private readonly rpc: Rpc; - constructor(rpc: Rpc) { - this.rpc = rpc; - this.GetCachedBlob.bind(this); - this.GetCacheBlobUploadURL.bind(this); - } - GetCachedBlob(request: GetCachedBlobRequest): Promise { - const data = GetCachedBlobRequest.toJson(request, { - useProtoFieldName: true, - emitDefaultValues: false, - }); - const promise = this.rpc.request( - "github.actions.results.api.v1.BlobCacheService", - "GetCachedBlob", - "application/json", - data as object - ); - return promise.then((data) => - GetCachedBlobResponse.fromJson(data as any, { ignoreUnknownFields: true }) - ); - } - - GetCacheBlobUploadURL( - request: GetCacheBlobUploadURLRequest - ): Promise { - const data = GetCacheBlobUploadURLRequest.toJson(request, { - useProtoFieldName: true, - emitDefaultValues: false, - }); - const promise = this.rpc.request( - "github.actions.results.api.v1.BlobCacheService", - "GetCacheBlobUploadURL", - "application/json", - data as object - ); - return promise.then((data) => - GetCacheBlobUploadURLResponse.fromJson(data as any, { - ignoreUnknownFields: true, - }) - ); - } -} - -export class BlobCacheServiceClientProtobuf implements BlobCacheServiceClient { - private readonly rpc: Rpc; - constructor(rpc: Rpc) { - this.rpc = rpc; - this.GetCachedBlob.bind(this); - this.GetCacheBlobUploadURL.bind(this); - } - GetCachedBlob(request: GetCachedBlobRequest): Promise { - const data = GetCachedBlobRequest.toBinary(request); - const promise = this.rpc.request( - "github.actions.results.api.v1.BlobCacheService", - "GetCachedBlob", - "application/protobuf", - data - ); - return promise.then((data) => - GetCachedBlobResponse.fromBinary(data as Uint8Array) - ); - } - - GetCacheBlobUploadURL( - request: GetCacheBlobUploadURLRequest - ): Promise { - const data = GetCacheBlobUploadURLRequest.toBinary(request); - const promise = this.rpc.request( - "github.actions.results.api.v1.BlobCacheService", - "GetCacheBlobUploadURL", - "application/protobuf", - data - ); - return promise.then((data) => - GetCacheBlobUploadURLResponse.fromBinary(data as Uint8Array) - ); - } -} - -//==================================// -// Server Code // -//==================================// - -export interface BlobCacheServiceTwirp { - GetCachedBlob( - ctx: T, - request: GetCachedBlobRequest - ): Promise; - GetCacheBlobUploadURL( - ctx: T, - request: GetCacheBlobUploadURLRequest - ): Promise; -} - -export enum BlobCacheServiceMethod { - GetCachedBlob = "GetCachedBlob", - GetCacheBlobUploadURL = "GetCacheBlobUploadURL", -} - -export const BlobCacheServiceMethodList = [ - BlobCacheServiceMethod.GetCachedBlob, - BlobCacheServiceMethod.GetCacheBlobUploadURL, -]; - -export function createBlobCacheServiceServer< - T extends TwirpContext = TwirpContext ->(service: BlobCacheServiceTwirp) { - return new TwirpServer({ - service, - packageName: "github.actions.results.api.v1", - serviceName: "BlobCacheService", - methodList: BlobCacheServiceMethodList, - matchRoute: matchBlobCacheServiceRoute, - }); -} - -function matchBlobCacheServiceRoute( - method: string, - events: RouterEvents -) { - switch (method) { - case "GetCachedBlob": - return async ( - ctx: T, - service: BlobCacheServiceTwirp, - data: Buffer, - interceptors?: Interceptor< - T, - GetCachedBlobRequest, - GetCachedBlobResponse - >[] - ) => { - ctx = { ...ctx, methodName: "GetCachedBlob" }; - await events.onMatch(ctx); - return handleBlobCacheServiceGetCachedBlobRequest( - ctx, - service, - data, - interceptors - ); - }; - case "GetCacheBlobUploadURL": - return async ( - ctx: T, - service: BlobCacheServiceTwirp, - data: Buffer, - interceptors?: Interceptor< - T, - GetCacheBlobUploadURLRequest, - GetCacheBlobUploadURLResponse - >[] - ) => { - ctx = { ...ctx, methodName: "GetCacheBlobUploadURL" }; - await events.onMatch(ctx); - return handleBlobCacheServiceGetCacheBlobUploadURLRequest( - ctx, - service, - data, - interceptors - ); - }; - default: - events.onNotFound(); - const msg = `no handler found`; - throw new TwirpError(TwirpErrorCode.BadRoute, msg); - } -} - -function handleBlobCacheServiceGetCachedBlobRequest< - T extends TwirpContext = TwirpContext ->( - ctx: T, - service: BlobCacheServiceTwirp, - data: Buffer, - interceptors?: Interceptor[] -): Promise { - switch (ctx.contentType) { - case TwirpContentType.JSON: - return handleBlobCacheServiceGetCachedBlobJSON( - ctx, - service, - data, - interceptors - ); - case TwirpContentType.Protobuf: - return handleBlobCacheServiceGetCachedBlobProtobuf( - ctx, - service, - data, - interceptors - ); - default: - const msg = "unexpected Content-Type"; - throw new TwirpError(TwirpErrorCode.BadRoute, msg); - } -} - -function handleBlobCacheServiceGetCacheBlobUploadURLRequest< - T extends TwirpContext = TwirpContext ->( - ctx: T, - service: BlobCacheServiceTwirp, - data: Buffer, - interceptors?: Interceptor< - T, - GetCacheBlobUploadURLRequest, - GetCacheBlobUploadURLResponse - >[] -): Promise { - switch (ctx.contentType) { - case TwirpContentType.JSON: - return handleBlobCacheServiceGetCacheBlobUploadURLJSON( - ctx, - service, - data, - interceptors - ); - case TwirpContentType.Protobuf: - return handleBlobCacheServiceGetCacheBlobUploadURLProtobuf( - ctx, - service, - data, - interceptors - ); - default: - const msg = "unexpected Content-Type"; - throw new TwirpError(TwirpErrorCode.BadRoute, msg); - } -} -async function handleBlobCacheServiceGetCachedBlobJSON< - T extends TwirpContext = TwirpContext ->( - ctx: T, - service: BlobCacheServiceTwirp, - data: Buffer, - interceptors?: Interceptor[] -) { - let request: GetCachedBlobRequest; - let response: GetCachedBlobResponse; - - try { - const body = JSON.parse(data.toString() || "{}"); - request = GetCachedBlobRequest.fromJson(body, { - ignoreUnknownFields: true, - }); - } catch (e) { - if (e instanceof Error) { - const msg = "the json request could not be decoded"; - throw new TwirpError(TwirpErrorCode.Malformed, msg).withCause(e, true); - } - } - - if (interceptors && interceptors.length > 0) { - const interceptor = chainInterceptors(...interceptors) as Interceptor< - T, - GetCachedBlobRequest, - GetCachedBlobResponse - >; - response = await interceptor(ctx, request!, (ctx, inputReq) => { - return service.GetCachedBlob(ctx, inputReq); - }); - } else { - response = await service.GetCachedBlob(ctx, request!); - } - - return JSON.stringify( - GetCachedBlobResponse.toJson(response, { - useProtoFieldName: true, - emitDefaultValues: false, - }) as string - ); -} - -async function handleBlobCacheServiceGetCacheBlobUploadURLJSON< - T extends TwirpContext = TwirpContext ->( - ctx: T, - service: BlobCacheServiceTwirp, - data: Buffer, - interceptors?: Interceptor< - T, - GetCacheBlobUploadURLRequest, - GetCacheBlobUploadURLResponse - >[] -) { - let request: GetCacheBlobUploadURLRequest; - let response: GetCacheBlobUploadURLResponse; - - try { - const body = JSON.parse(data.toString() || "{}"); - request = GetCacheBlobUploadURLRequest.fromJson(body, { - ignoreUnknownFields: true, - }); - } catch (e) { - if (e instanceof Error) { - const msg = "the json request could not be decoded"; - throw new TwirpError(TwirpErrorCode.Malformed, msg).withCause(e, true); - } - } - - if (interceptors && interceptors.length > 0) { - const interceptor = chainInterceptors(...interceptors) as Interceptor< - T, - GetCacheBlobUploadURLRequest, - GetCacheBlobUploadURLResponse - >; - response = await interceptor(ctx, request!, (ctx, inputReq) => { - return service.GetCacheBlobUploadURL(ctx, inputReq); - }); - } else { - response = await service.GetCacheBlobUploadURL(ctx, request!); - } - - return JSON.stringify( - GetCacheBlobUploadURLResponse.toJson(response, { - useProtoFieldName: true, - emitDefaultValues: false, - }) as string - ); -} -async function handleBlobCacheServiceGetCachedBlobProtobuf< - T extends TwirpContext = TwirpContext ->( - ctx: T, - service: BlobCacheServiceTwirp, - data: Buffer, - interceptors?: Interceptor[] -) { - let request: GetCachedBlobRequest; - let response: GetCachedBlobResponse; - - try { - request = GetCachedBlobRequest.fromBinary(data); - } catch (e) { - if (e instanceof Error) { - const msg = "the protobuf request could not be decoded"; - throw new TwirpError(TwirpErrorCode.Malformed, msg).withCause(e, true); - } - } - - if (interceptors && interceptors.length > 0) { - const interceptor = chainInterceptors(...interceptors) as Interceptor< - T, - GetCachedBlobRequest, - GetCachedBlobResponse - >; - response = await interceptor(ctx, request!, (ctx, inputReq) => { - return service.GetCachedBlob(ctx, inputReq); - }); - } else { - response = await service.GetCachedBlob(ctx, request!); - } - - return Buffer.from(GetCachedBlobResponse.toBinary(response)); -} - -async function handleBlobCacheServiceGetCacheBlobUploadURLProtobuf< - T extends TwirpContext = TwirpContext ->( - ctx: T, - service: BlobCacheServiceTwirp, - data: Buffer, - interceptors?: Interceptor< - T, - GetCacheBlobUploadURLRequest, - GetCacheBlobUploadURLResponse - >[] -) { - let request: GetCacheBlobUploadURLRequest; - let response: GetCacheBlobUploadURLResponse; - - try { - request = GetCacheBlobUploadURLRequest.fromBinary(data); - } catch (e) { - if (e instanceof Error) { - const msg = "the protobuf request could not be decoded"; - throw new TwirpError(TwirpErrorCode.Malformed, msg).withCause(e, true); - } - } - - if (interceptors && interceptors.length > 0) { - const interceptor = chainInterceptors(...interceptors) as Interceptor< - T, - GetCacheBlobUploadURLRequest, - GetCacheBlobUploadURLResponse - >; - response = await interceptor(ctx, request!, (ctx, inputReq) => { - return service.GetCacheBlobUploadURL(ctx, inputReq); - }); - } else { - response = await service.GetCacheBlobUploadURL(ctx, request!); - } - - return Buffer.from(GetCacheBlobUploadURLResponse.toBinary(response)); -} diff --git a/packages/cache/src/generated/results/api/v1/cache.ts b/packages/cache/src/generated/results/api/v1/cache.ts new file mode 100644 index 00000000..f7686fbd --- /dev/null +++ b/packages/cache/src/generated/results/api/v1/cache.ts @@ -0,0 +1,1324 @@ +// @generated by protobuf-ts 2.9.1 with parameter long_type_string,client_none,generate_dependencies +// @generated from protobuf file "results/api/v1/cache.proto" (package "github.actions.results.api.v1", syntax proto3) +// tslint:disable +import { ServiceType } from "@protobuf-ts/runtime-rpc"; +import type { BinaryWriteOptions } from "@protobuf-ts/runtime"; +import type { IBinaryWriter } from "@protobuf-ts/runtime"; +import { WireType } from "@protobuf-ts/runtime"; +import type { BinaryReadOptions } from "@protobuf-ts/runtime"; +import type { IBinaryReader } from "@protobuf-ts/runtime"; +import { UnknownFieldHandler } from "@protobuf-ts/runtime"; +import type { PartialMessage } from "@protobuf-ts/runtime"; +import { reflectionMergePartial } from "@protobuf-ts/runtime"; +import { MESSAGE_TYPE } from "@protobuf-ts/runtime"; +import { MessageType } from "@protobuf-ts/runtime"; +import { Timestamp } from "../../../google/protobuf/timestamp"; +/** + * @generated from protobuf message github.actions.results.api.v1.CreateCacheEntryRequest + */ +export interface CreateCacheEntryRequest { + /** + * Workflow run backend ID + * + * @generated from protobuf field: string workflow_run_backend_id = 1; + */ + workflowRunBackendId: string; + /** + * Workflow job run backend ID + * + * @generated from protobuf field: string workflow_job_run_backend_id = 2; + */ + workflowJobRunBackendId: string; + /** + * An explicit key for a cache entry + * + * @generated from protobuf field: string key = 3; + */ + key: string; + /** + * Hash of the compression tool, runner OS and paths cached + * + * @generated from protobuf field: string version = 4; + */ + version: string; +} +/** + * @generated from protobuf message github.actions.results.api.v1.CreateCacheEntryResponse + */ +export interface CreateCacheEntryResponse { + /** + * @generated from protobuf field: bool ok = 1; + */ + ok: boolean; + /** + * SAS URL to upload the cache archive + * + * @generated from protobuf field: string signed_upload_url = 2; + */ + signedUploadUrl: string; +} +/** + * @generated from protobuf message github.actions.results.api.v1.FinalizeCacheEntryUploadRequest + */ +export interface FinalizeCacheEntryUploadRequest { + /** + * Workflow run backend ID + * + * @generated from protobuf field: string workflow_run_backend_id = 1; + */ + workflowRunBackendId: string; + /** + * Workflow job run backend ID + * + * @generated from protobuf field: string workflow_job_run_backend_id = 2; + */ + workflowJobRunBackendId: string; + /** + * An explicit key for a cache entry + * + * @generated from protobuf field: string key = 3; + */ + key: string; + /** + * Size of the cache archive in Bytes + * + * @generated from protobuf field: int64 size_bytes = 4; + */ + sizeBytes: string; + /** + * Hash of the compression tool, runner OS and paths cached + * + * @generated from protobuf field: string version = 5; + */ + version: string; +} +/** + * @generated from protobuf message github.actions.results.api.v1.FinalizeCacheEntryUploadResponse + */ +export interface FinalizeCacheEntryUploadResponse { + /** + * @generated from protobuf field: bool ok = 1; + */ + ok: boolean; + /** + * Cache entry database ID + * + * @generated from protobuf field: int64 entry_id = 2; + */ + entryId: string; +} +/** + * @generated from protobuf message github.actions.results.api.v1.GetCacheEntryDownloadURLRequest + */ +export interface GetCacheEntryDownloadURLRequest { + /** + * Workflow run backend ID + * + * @generated from protobuf field: string workflow_run_backend_id = 1; + */ + workflowRunBackendId: string; + /** + * Workflow job run backend ID + * + * @generated from protobuf field: string workflow_job_run_backend_id = 2; + */ + workflowJobRunBackendId: string; + /** + * An explicit key for a cache entry + * + * @generated from protobuf field: string key = 3; + */ + key: string; + /** + * Restore keys used for prefix searching + * + * @generated from protobuf field: repeated string restore_keys = 4; + */ + restoreKeys: string[]; + /** + * Hash of the compression tool, runner OS and paths cached + * + * @generated from protobuf field: string version = 5; + */ + version: string; +} +/** + * @generated from protobuf message github.actions.results.api.v1.GetCacheEntryDownloadURLResponse + */ +export interface GetCacheEntryDownloadURLResponse { + /** + * @generated from protobuf field: bool ok = 1; + */ + ok: boolean; + /** + * SAS URL to download the cache archive + * + * @generated from protobuf field: string signed_download_url = 2; + */ + signedDownloadUrl: string; +} +/** + * @generated from protobuf message github.actions.results.api.v1.DeleteCacheEntryRequest + */ +export interface DeleteCacheEntryRequest { + /** + * Workflow run backend ID + * + * @generated from protobuf field: string workflow_run_backend_id = 1; + */ + workflowRunBackendId: string; + /** + * Workflow job run backend ID + * + * @generated from protobuf field: string workflow_job_run_backend_id = 2; + */ + workflowJobRunBackendId: string; + /** + * An explicit key for a cache entry + * + * @generated from protobuf field: string key = 3; + */ + key: string; +} +/** + * @generated from protobuf message github.actions.results.api.v1.DeleteCacheEntryResponse + */ +export interface DeleteCacheEntryResponse { + /** + * @generated from protobuf field: bool ok = 1; + */ + ok: boolean; + /** + * Cache entry database ID + * + * @generated from protobuf field: int64 entry_id = 2; + */ + entryId: string; +} +/** + * @generated from protobuf message github.actions.results.api.v1.ListCacheEntriesRequest + */ +export interface ListCacheEntriesRequest { + /** + * Workflow run backend ID + * + * @generated from protobuf field: string workflow_run_backend_id = 1; + */ + workflowRunBackendId: string; + /** + * Workflow job run backend ID + * + * @generated from protobuf field: string workflow_job_run_backend_id = 2; + */ + workflowJobRunBackendId: string; + /** + * An explicit key for a cache entry + * + * @generated from protobuf field: string key = 3; + */ + key: string; + /** + * Restore keys used for prefix searching + * + * @generated from protobuf field: repeated string restore_keys = 4; + */ + restoreKeys: string[]; +} +/** + * @generated from protobuf message github.actions.results.api.v1.ListCacheEntriesResponse + */ +export interface ListCacheEntriesResponse { + /** + * @generated from protobuf field: repeated github.actions.results.api.v1.ListCacheEntriesResponse.CacheEntry entries = 1; + */ + entries: ListCacheEntriesResponse_CacheEntry[]; +} +/** + * @generated from protobuf message github.actions.results.api.v1.ListCacheEntriesResponse.CacheEntry + */ +export interface ListCacheEntriesResponse_CacheEntry { + /** + * An explicit key for a cache entry + * + * @generated from protobuf field: string key = 1; + */ + key: string; + /** + * SHA256 hex digest of the cache archive + * + * @generated from protobuf field: string hash = 2; + */ + hash: string; + /** + * Cache entry size in bytes + * + * @generated from protobuf field: int64 size_bytes = 3; + */ + sizeBytes: string; + /** + * Access scope + * + * @generated from protobuf field: string scope = 4; + */ + scope: string; + /** + * Version SHA256 hex digest + * + * @generated from protobuf field: string version = 5; + */ + version: string; + /** + * When the cache entry was created + * + * @generated from protobuf field: google.protobuf.Timestamp created_at = 6; + */ + createdAt?: Timestamp; + /** + * When the cache entry was last accessed + * + * @generated from protobuf field: google.protobuf.Timestamp last_accessed_at = 7; + */ + lastAccessedAt?: Timestamp; + /** + * When the cache entry is set to expire + * + * @generated from protobuf field: google.protobuf.Timestamp expires_at = 8; + */ + expiresAt?: Timestamp; +} +/** + * @generated from protobuf message github.actions.results.api.v1.LookupCacheEntryRequest + */ +export interface LookupCacheEntryRequest { + /** + * Workflow run backend ID + * + * @generated from protobuf field: string workflow_run_backend_id = 1; + */ + workflowRunBackendId: string; + /** + * Workflow job run backend ID + * + * @generated from protobuf field: string workflow_job_run_backend_id = 2; + */ + workflowJobRunBackendId: string; + /** + * An explicit key for a cache entry + * + * @generated from protobuf field: string key = 3; + */ + key: string; + /** + * Restore keys used for prefix searching + * + * @generated from protobuf field: repeated string restore_keys = 4; + */ + restoreKeys: string[]; + /** + * Hash of the compression tool, runner OS and paths cached + * + * @generated from protobuf field: string version = 5; + */ + version: string; +} +/** + * @generated from protobuf message github.actions.results.api.v1.LookupCacheEntryResponse + */ +export interface LookupCacheEntryResponse { + /** + * Indicates whether the cache entry exists or not + * + * @generated from protobuf field: bool exists = 1; + */ + exists: boolean; +} +/** + * Matched cache entry metadata + * + * @generated from protobuf message github.actions.results.api.v1.LookupCacheEntryResponse.CacheEntry + */ +export interface LookupCacheEntryResponse_CacheEntry { + /** + * An explicit key for a cache entry + * + * @generated from protobuf field: string key = 1; + */ + key: string; + /** + * SHA256 hex digest of the cache archive + * + * @generated from protobuf field: string hash = 2; + */ + hash: string; + /** + * Cache entry size in bytes + * + * @generated from protobuf field: int64 size_bytes = 3; + */ + sizeBytes: string; + /** + * Access scope + * + * @generated from protobuf field: string scope = 4; + */ + scope: string; + /** + * Version SHA256 hex digest + * + * @generated from protobuf field: string version = 5; + */ + version: string; + /** + * When the cache entry was created + * + * @generated from protobuf field: google.protobuf.Timestamp created_at = 6; + */ + createdAt?: Timestamp; + /** + * When the cache entry was last accessed + * + * @generated from protobuf field: google.protobuf.Timestamp last_accessed_at = 7; + */ + lastAccessedAt?: Timestamp; + /** + * When the cache entry is set to expire + * + * @generated from protobuf field: google.protobuf.Timestamp expires_at = 8; + */ + expiresAt?: Timestamp; +} +// @generated message type with reflection information, may provide speed optimized methods +class CreateCacheEntryRequest$Type extends MessageType { + constructor() { + super("github.actions.results.api.v1.CreateCacheEntryRequest", [ + { no: 1, name: "workflow_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 2, name: "workflow_job_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 3, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 4, name: "version", kind: "scalar", T: 9 /*ScalarType.STRING*/ } + ]); + } + create(value?: PartialMessage): CreateCacheEntryRequest { + const message = { workflowRunBackendId: "", workflowJobRunBackendId: "", key: "", version: "" }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: CreateCacheEntryRequest): CreateCacheEntryRequest { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string workflow_run_backend_id */ 1: + message.workflowRunBackendId = reader.string(); + break; + case /* string workflow_job_run_backend_id */ 2: + message.workflowJobRunBackendId = reader.string(); + break; + case /* string key */ 3: + message.key = reader.string(); + break; + case /* string version */ 4: + message.version = reader.string(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: CreateCacheEntryRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string workflow_run_backend_id = 1; */ + if (message.workflowRunBackendId !== "") + writer.tag(1, WireType.LengthDelimited).string(message.workflowRunBackendId); + /* string workflow_job_run_backend_id = 2; */ + if (message.workflowJobRunBackendId !== "") + writer.tag(2, WireType.LengthDelimited).string(message.workflowJobRunBackendId); + /* string key = 3; */ + if (message.key !== "") + writer.tag(3, WireType.LengthDelimited).string(message.key); + /* string version = 4; */ + if (message.version !== "") + writer.tag(4, WireType.LengthDelimited).string(message.version); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message github.actions.results.api.v1.CreateCacheEntryRequest + */ +export const CreateCacheEntryRequest = new CreateCacheEntryRequest$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class CreateCacheEntryResponse$Type extends MessageType { + constructor() { + super("github.actions.results.api.v1.CreateCacheEntryResponse", [ + { no: 1, name: "ok", kind: "scalar", T: 8 /*ScalarType.BOOL*/ }, + { no: 2, name: "signed_upload_url", kind: "scalar", T: 9 /*ScalarType.STRING*/ } + ]); + } + create(value?: PartialMessage): CreateCacheEntryResponse { + const message = { ok: false, signedUploadUrl: "" }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: CreateCacheEntryResponse): CreateCacheEntryResponse { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* bool ok */ 1: + message.ok = reader.bool(); + break; + case /* string signed_upload_url */ 2: + message.signedUploadUrl = reader.string(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: CreateCacheEntryResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* bool ok = 1; */ + if (message.ok !== false) + writer.tag(1, WireType.Varint).bool(message.ok); + /* string signed_upload_url = 2; */ + if (message.signedUploadUrl !== "") + writer.tag(2, WireType.LengthDelimited).string(message.signedUploadUrl); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message github.actions.results.api.v1.CreateCacheEntryResponse + */ +export const CreateCacheEntryResponse = new CreateCacheEntryResponse$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class FinalizeCacheEntryUploadRequest$Type extends MessageType { + constructor() { + super("github.actions.results.api.v1.FinalizeCacheEntryUploadRequest", [ + { no: 1, name: "workflow_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 2, name: "workflow_job_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 3, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 4, name: "size_bytes", kind: "scalar", T: 3 /*ScalarType.INT64*/ }, + { no: 5, name: "version", kind: "scalar", T: 9 /*ScalarType.STRING*/ } + ]); + } + create(value?: PartialMessage): FinalizeCacheEntryUploadRequest { + const message = { workflowRunBackendId: "", workflowJobRunBackendId: "", key: "", sizeBytes: "0", version: "" }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: FinalizeCacheEntryUploadRequest): FinalizeCacheEntryUploadRequest { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string workflow_run_backend_id */ 1: + message.workflowRunBackendId = reader.string(); + break; + case /* string workflow_job_run_backend_id */ 2: + message.workflowJobRunBackendId = reader.string(); + break; + case /* string key */ 3: + message.key = reader.string(); + break; + case /* int64 size_bytes */ 4: + message.sizeBytes = reader.int64().toString(); + break; + case /* string version */ 5: + message.version = reader.string(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: FinalizeCacheEntryUploadRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string workflow_run_backend_id = 1; */ + if (message.workflowRunBackendId !== "") + writer.tag(1, WireType.LengthDelimited).string(message.workflowRunBackendId); + /* string workflow_job_run_backend_id = 2; */ + if (message.workflowJobRunBackendId !== "") + writer.tag(2, WireType.LengthDelimited).string(message.workflowJobRunBackendId); + /* string key = 3; */ + if (message.key !== "") + writer.tag(3, WireType.LengthDelimited).string(message.key); + /* int64 size_bytes = 4; */ + if (message.sizeBytes !== "0") + writer.tag(4, WireType.Varint).int64(message.sizeBytes); + /* string version = 5; */ + if (message.version !== "") + writer.tag(5, WireType.LengthDelimited).string(message.version); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message github.actions.results.api.v1.FinalizeCacheEntryUploadRequest + */ +export const FinalizeCacheEntryUploadRequest = new FinalizeCacheEntryUploadRequest$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class FinalizeCacheEntryUploadResponse$Type extends MessageType { + constructor() { + super("github.actions.results.api.v1.FinalizeCacheEntryUploadResponse", [ + { no: 1, name: "ok", kind: "scalar", T: 8 /*ScalarType.BOOL*/ }, + { no: 2, name: "entry_id", kind: "scalar", T: 3 /*ScalarType.INT64*/ } + ]); + } + create(value?: PartialMessage): FinalizeCacheEntryUploadResponse { + const message = { ok: false, entryId: "0" }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: FinalizeCacheEntryUploadResponse): FinalizeCacheEntryUploadResponse { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* bool ok */ 1: + message.ok = reader.bool(); + break; + case /* int64 entry_id */ 2: + message.entryId = reader.int64().toString(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: FinalizeCacheEntryUploadResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* bool ok = 1; */ + if (message.ok !== false) + writer.tag(1, WireType.Varint).bool(message.ok); + /* int64 entry_id = 2; */ + if (message.entryId !== "0") + writer.tag(2, WireType.Varint).int64(message.entryId); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message github.actions.results.api.v1.FinalizeCacheEntryUploadResponse + */ +export const FinalizeCacheEntryUploadResponse = new FinalizeCacheEntryUploadResponse$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class GetCacheEntryDownloadURLRequest$Type extends MessageType { + constructor() { + super("github.actions.results.api.v1.GetCacheEntryDownloadURLRequest", [ + { no: 1, name: "workflow_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 2, name: "workflow_job_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 3, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 4, name: "restore_keys", kind: "scalar", repeat: 2 /*RepeatType.UNPACKED*/, T: 9 /*ScalarType.STRING*/ }, + { no: 5, name: "version", kind: "scalar", T: 9 /*ScalarType.STRING*/ } + ]); + } + create(value?: PartialMessage): GetCacheEntryDownloadURLRequest { + const message = { workflowRunBackendId: "", workflowJobRunBackendId: "", key: "", restoreKeys: [], version: "" }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: GetCacheEntryDownloadURLRequest): GetCacheEntryDownloadURLRequest { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string workflow_run_backend_id */ 1: + message.workflowRunBackendId = reader.string(); + break; + case /* string workflow_job_run_backend_id */ 2: + message.workflowJobRunBackendId = reader.string(); + break; + case /* string key */ 3: + message.key = reader.string(); + break; + case /* repeated string restore_keys */ 4: + message.restoreKeys.push(reader.string()); + break; + case /* string version */ 5: + message.version = reader.string(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: GetCacheEntryDownloadURLRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string workflow_run_backend_id = 1; */ + if (message.workflowRunBackendId !== "") + writer.tag(1, WireType.LengthDelimited).string(message.workflowRunBackendId); + /* string workflow_job_run_backend_id = 2; */ + if (message.workflowJobRunBackendId !== "") + writer.tag(2, WireType.LengthDelimited).string(message.workflowJobRunBackendId); + /* string key = 3; */ + if (message.key !== "") + writer.tag(3, WireType.LengthDelimited).string(message.key); + /* repeated string restore_keys = 4; */ + for (let i = 0; i < message.restoreKeys.length; i++) + writer.tag(4, WireType.LengthDelimited).string(message.restoreKeys[i]); + /* string version = 5; */ + if (message.version !== "") + writer.tag(5, WireType.LengthDelimited).string(message.version); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message github.actions.results.api.v1.GetCacheEntryDownloadURLRequest + */ +export const GetCacheEntryDownloadURLRequest = new GetCacheEntryDownloadURLRequest$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class GetCacheEntryDownloadURLResponse$Type extends MessageType { + constructor() { + super("github.actions.results.api.v1.GetCacheEntryDownloadURLResponse", [ + { no: 1, name: "ok", kind: "scalar", T: 8 /*ScalarType.BOOL*/ }, + { no: 2, name: "signed_download_url", kind: "scalar", T: 9 /*ScalarType.STRING*/ } + ]); + } + create(value?: PartialMessage): GetCacheEntryDownloadURLResponse { + const message = { ok: false, signedDownloadUrl: "" }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: GetCacheEntryDownloadURLResponse): GetCacheEntryDownloadURLResponse { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* bool ok */ 1: + message.ok = reader.bool(); + break; + case /* string signed_download_url */ 2: + message.signedDownloadUrl = reader.string(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: GetCacheEntryDownloadURLResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* bool ok = 1; */ + if (message.ok !== false) + writer.tag(1, WireType.Varint).bool(message.ok); + /* string signed_download_url = 2; */ + if (message.signedDownloadUrl !== "") + writer.tag(2, WireType.LengthDelimited).string(message.signedDownloadUrl); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message github.actions.results.api.v1.GetCacheEntryDownloadURLResponse + */ +export const GetCacheEntryDownloadURLResponse = new GetCacheEntryDownloadURLResponse$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class DeleteCacheEntryRequest$Type extends MessageType { + constructor() { + super("github.actions.results.api.v1.DeleteCacheEntryRequest", [ + { no: 1, name: "workflow_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 2, name: "workflow_job_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 3, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ } + ]); + } + create(value?: PartialMessage): DeleteCacheEntryRequest { + const message = { workflowRunBackendId: "", workflowJobRunBackendId: "", key: "" }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: DeleteCacheEntryRequest): DeleteCacheEntryRequest { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string workflow_run_backend_id */ 1: + message.workflowRunBackendId = reader.string(); + break; + case /* string workflow_job_run_backend_id */ 2: + message.workflowJobRunBackendId = reader.string(); + break; + case /* string key */ 3: + message.key = reader.string(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: DeleteCacheEntryRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string workflow_run_backend_id = 1; */ + if (message.workflowRunBackendId !== "") + writer.tag(1, WireType.LengthDelimited).string(message.workflowRunBackendId); + /* string workflow_job_run_backend_id = 2; */ + if (message.workflowJobRunBackendId !== "") + writer.tag(2, WireType.LengthDelimited).string(message.workflowJobRunBackendId); + /* string key = 3; */ + if (message.key !== "") + writer.tag(3, WireType.LengthDelimited).string(message.key); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message github.actions.results.api.v1.DeleteCacheEntryRequest + */ +export const DeleteCacheEntryRequest = new DeleteCacheEntryRequest$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class DeleteCacheEntryResponse$Type extends MessageType { + constructor() { + super("github.actions.results.api.v1.DeleteCacheEntryResponse", [ + { no: 1, name: "ok", kind: "scalar", T: 8 /*ScalarType.BOOL*/ }, + { no: 2, name: "entry_id", kind: "scalar", T: 3 /*ScalarType.INT64*/ } + ]); + } + create(value?: PartialMessage): DeleteCacheEntryResponse { + const message = { ok: false, entryId: "0" }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: DeleteCacheEntryResponse): DeleteCacheEntryResponse { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* bool ok */ 1: + message.ok = reader.bool(); + break; + case /* int64 entry_id */ 2: + message.entryId = reader.int64().toString(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: DeleteCacheEntryResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* bool ok = 1; */ + if (message.ok !== false) + writer.tag(1, WireType.Varint).bool(message.ok); + /* int64 entry_id = 2; */ + if (message.entryId !== "0") + writer.tag(2, WireType.Varint).int64(message.entryId); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message github.actions.results.api.v1.DeleteCacheEntryResponse + */ +export const DeleteCacheEntryResponse = new DeleteCacheEntryResponse$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class ListCacheEntriesRequest$Type extends MessageType { + constructor() { + super("github.actions.results.api.v1.ListCacheEntriesRequest", [ + { no: 1, name: "workflow_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 2, name: "workflow_job_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 3, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 4, name: "restore_keys", kind: "scalar", repeat: 2 /*RepeatType.UNPACKED*/, T: 9 /*ScalarType.STRING*/ } + ]); + } + create(value?: PartialMessage): ListCacheEntriesRequest { + const message = { workflowRunBackendId: "", workflowJobRunBackendId: "", key: "", restoreKeys: [] }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: ListCacheEntriesRequest): ListCacheEntriesRequest { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string workflow_run_backend_id */ 1: + message.workflowRunBackendId = reader.string(); + break; + case /* string workflow_job_run_backend_id */ 2: + message.workflowJobRunBackendId = reader.string(); + break; + case /* string key */ 3: + message.key = reader.string(); + break; + case /* repeated string restore_keys */ 4: + message.restoreKeys.push(reader.string()); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: ListCacheEntriesRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string workflow_run_backend_id = 1; */ + if (message.workflowRunBackendId !== "") + writer.tag(1, WireType.LengthDelimited).string(message.workflowRunBackendId); + /* string workflow_job_run_backend_id = 2; */ + if (message.workflowJobRunBackendId !== "") + writer.tag(2, WireType.LengthDelimited).string(message.workflowJobRunBackendId); + /* string key = 3; */ + if (message.key !== "") + writer.tag(3, WireType.LengthDelimited).string(message.key); + /* repeated string restore_keys = 4; */ + for (let i = 0; i < message.restoreKeys.length; i++) + writer.tag(4, WireType.LengthDelimited).string(message.restoreKeys[i]); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message github.actions.results.api.v1.ListCacheEntriesRequest + */ +export const ListCacheEntriesRequest = new ListCacheEntriesRequest$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class ListCacheEntriesResponse$Type extends MessageType { + constructor() { + super("github.actions.results.api.v1.ListCacheEntriesResponse", [ + { no: 1, name: "entries", kind: "message", repeat: 1 /*RepeatType.PACKED*/, T: () => ListCacheEntriesResponse_CacheEntry } + ]); + } + create(value?: PartialMessage): ListCacheEntriesResponse { + const message = { entries: [] }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: ListCacheEntriesResponse): ListCacheEntriesResponse { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* repeated github.actions.results.api.v1.ListCacheEntriesResponse.CacheEntry entries */ 1: + message.entries.push(ListCacheEntriesResponse_CacheEntry.internalBinaryRead(reader, reader.uint32(), options)); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: ListCacheEntriesResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* repeated github.actions.results.api.v1.ListCacheEntriesResponse.CacheEntry entries = 1; */ + for (let i = 0; i < message.entries.length; i++) + ListCacheEntriesResponse_CacheEntry.internalBinaryWrite(message.entries[i], writer.tag(1, WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message github.actions.results.api.v1.ListCacheEntriesResponse + */ +export const ListCacheEntriesResponse = new ListCacheEntriesResponse$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class ListCacheEntriesResponse_CacheEntry$Type extends MessageType { + constructor() { + super("github.actions.results.api.v1.ListCacheEntriesResponse.CacheEntry", [ + { no: 1, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 2, name: "hash", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 3, name: "size_bytes", kind: "scalar", T: 3 /*ScalarType.INT64*/ }, + { no: 4, name: "scope", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 5, name: "version", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 6, name: "created_at", kind: "message", T: () => Timestamp }, + { no: 7, name: "last_accessed_at", kind: "message", T: () => Timestamp }, + { no: 8, name: "expires_at", kind: "message", T: () => Timestamp } + ]); + } + create(value?: PartialMessage): ListCacheEntriesResponse_CacheEntry { + const message = { key: "", hash: "", sizeBytes: "0", scope: "", version: "" }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: ListCacheEntriesResponse_CacheEntry): ListCacheEntriesResponse_CacheEntry { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string key */ 1: + message.key = reader.string(); + break; + case /* string hash */ 2: + message.hash = reader.string(); + break; + case /* int64 size_bytes */ 3: + message.sizeBytes = reader.int64().toString(); + break; + case /* string scope */ 4: + message.scope = reader.string(); + break; + case /* string version */ 5: + message.version = reader.string(); + break; + case /* google.protobuf.Timestamp created_at */ 6: + message.createdAt = Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.createdAt); + break; + case /* google.protobuf.Timestamp last_accessed_at */ 7: + message.lastAccessedAt = Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.lastAccessedAt); + break; + case /* google.protobuf.Timestamp expires_at */ 8: + message.expiresAt = Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.expiresAt); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: ListCacheEntriesResponse_CacheEntry, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string key = 1; */ + if (message.key !== "") + writer.tag(1, WireType.LengthDelimited).string(message.key); + /* string hash = 2; */ + if (message.hash !== "") + writer.tag(2, WireType.LengthDelimited).string(message.hash); + /* int64 size_bytes = 3; */ + if (message.sizeBytes !== "0") + writer.tag(3, WireType.Varint).int64(message.sizeBytes); + /* string scope = 4; */ + if (message.scope !== "") + writer.tag(4, WireType.LengthDelimited).string(message.scope); + /* string version = 5; */ + if (message.version !== "") + writer.tag(5, WireType.LengthDelimited).string(message.version); + /* google.protobuf.Timestamp created_at = 6; */ + if (message.createdAt) + Timestamp.internalBinaryWrite(message.createdAt, writer.tag(6, WireType.LengthDelimited).fork(), options).join(); + /* google.protobuf.Timestamp last_accessed_at = 7; */ + if (message.lastAccessedAt) + Timestamp.internalBinaryWrite(message.lastAccessedAt, writer.tag(7, WireType.LengthDelimited).fork(), options).join(); + /* google.protobuf.Timestamp expires_at = 8; */ + if (message.expiresAt) + Timestamp.internalBinaryWrite(message.expiresAt, writer.tag(8, WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message github.actions.results.api.v1.ListCacheEntriesResponse.CacheEntry + */ +export const ListCacheEntriesResponse_CacheEntry = new ListCacheEntriesResponse_CacheEntry$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class LookupCacheEntryRequest$Type extends MessageType { + constructor() { + super("github.actions.results.api.v1.LookupCacheEntryRequest", [ + { no: 1, name: "workflow_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 2, name: "workflow_job_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 3, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 4, name: "restore_keys", kind: "scalar", repeat: 2 /*RepeatType.UNPACKED*/, T: 9 /*ScalarType.STRING*/ }, + { no: 5, name: "version", kind: "scalar", T: 9 /*ScalarType.STRING*/ } + ]); + } + create(value?: PartialMessage): LookupCacheEntryRequest { + const message = { workflowRunBackendId: "", workflowJobRunBackendId: "", key: "", restoreKeys: [], version: "" }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: LookupCacheEntryRequest): LookupCacheEntryRequest { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string workflow_run_backend_id */ 1: + message.workflowRunBackendId = reader.string(); + break; + case /* string workflow_job_run_backend_id */ 2: + message.workflowJobRunBackendId = reader.string(); + break; + case /* string key */ 3: + message.key = reader.string(); + break; + case /* repeated string restore_keys */ 4: + message.restoreKeys.push(reader.string()); + break; + case /* string version */ 5: + message.version = reader.string(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: LookupCacheEntryRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string workflow_run_backend_id = 1; */ + if (message.workflowRunBackendId !== "") + writer.tag(1, WireType.LengthDelimited).string(message.workflowRunBackendId); + /* string workflow_job_run_backend_id = 2; */ + if (message.workflowJobRunBackendId !== "") + writer.tag(2, WireType.LengthDelimited).string(message.workflowJobRunBackendId); + /* string key = 3; */ + if (message.key !== "") + writer.tag(3, WireType.LengthDelimited).string(message.key); + /* repeated string restore_keys = 4; */ + for (let i = 0; i < message.restoreKeys.length; i++) + writer.tag(4, WireType.LengthDelimited).string(message.restoreKeys[i]); + /* string version = 5; */ + if (message.version !== "") + writer.tag(5, WireType.LengthDelimited).string(message.version); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message github.actions.results.api.v1.LookupCacheEntryRequest + */ +export const LookupCacheEntryRequest = new LookupCacheEntryRequest$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class LookupCacheEntryResponse$Type extends MessageType { + constructor() { + super("github.actions.results.api.v1.LookupCacheEntryResponse", [ + { no: 1, name: "exists", kind: "scalar", T: 8 /*ScalarType.BOOL*/ } + ]); + } + create(value?: PartialMessage): LookupCacheEntryResponse { + const message = { exists: false }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: LookupCacheEntryResponse): LookupCacheEntryResponse { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* bool exists */ 1: + message.exists = reader.bool(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: LookupCacheEntryResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* bool exists = 1; */ + if (message.exists !== false) + writer.tag(1, WireType.Varint).bool(message.exists); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message github.actions.results.api.v1.LookupCacheEntryResponse + */ +export const LookupCacheEntryResponse = new LookupCacheEntryResponse$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class LookupCacheEntryResponse_CacheEntry$Type extends MessageType { + constructor() { + super("github.actions.results.api.v1.LookupCacheEntryResponse.CacheEntry", [ + { no: 1, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 2, name: "hash", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 3, name: "size_bytes", kind: "scalar", T: 3 /*ScalarType.INT64*/ }, + { no: 4, name: "scope", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 5, name: "version", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 6, name: "created_at", kind: "message", T: () => Timestamp }, + { no: 7, name: "last_accessed_at", kind: "message", T: () => Timestamp }, + { no: 8, name: "expires_at", kind: "message", T: () => Timestamp } + ]); + } + create(value?: PartialMessage): LookupCacheEntryResponse_CacheEntry { + const message = { key: "", hash: "", sizeBytes: "0", scope: "", version: "" }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: LookupCacheEntryResponse_CacheEntry): LookupCacheEntryResponse_CacheEntry { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string key */ 1: + message.key = reader.string(); + break; + case /* string hash */ 2: + message.hash = reader.string(); + break; + case /* int64 size_bytes */ 3: + message.sizeBytes = reader.int64().toString(); + break; + case /* string scope */ 4: + message.scope = reader.string(); + break; + case /* string version */ 5: + message.version = reader.string(); + break; + case /* google.protobuf.Timestamp created_at */ 6: + message.createdAt = Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.createdAt); + break; + case /* google.protobuf.Timestamp last_accessed_at */ 7: + message.lastAccessedAt = Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.lastAccessedAt); + break; + case /* google.protobuf.Timestamp expires_at */ 8: + message.expiresAt = Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.expiresAt); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: LookupCacheEntryResponse_CacheEntry, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string key = 1; */ + if (message.key !== "") + writer.tag(1, WireType.LengthDelimited).string(message.key); + /* string hash = 2; */ + if (message.hash !== "") + writer.tag(2, WireType.LengthDelimited).string(message.hash); + /* int64 size_bytes = 3; */ + if (message.sizeBytes !== "0") + writer.tag(3, WireType.Varint).int64(message.sizeBytes); + /* string scope = 4; */ + if (message.scope !== "") + writer.tag(4, WireType.LengthDelimited).string(message.scope); + /* string version = 5; */ + if (message.version !== "") + writer.tag(5, WireType.LengthDelimited).string(message.version); + /* google.protobuf.Timestamp created_at = 6; */ + if (message.createdAt) + Timestamp.internalBinaryWrite(message.createdAt, writer.tag(6, WireType.LengthDelimited).fork(), options).join(); + /* google.protobuf.Timestamp last_accessed_at = 7; */ + if (message.lastAccessedAt) + Timestamp.internalBinaryWrite(message.lastAccessedAt, writer.tag(7, WireType.LengthDelimited).fork(), options).join(); + /* google.protobuf.Timestamp expires_at = 8; */ + if (message.expiresAt) + Timestamp.internalBinaryWrite(message.expiresAt, writer.tag(8, WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message github.actions.results.api.v1.LookupCacheEntryResponse.CacheEntry + */ +export const LookupCacheEntryResponse_CacheEntry = new LookupCacheEntryResponse_CacheEntry$Type(); +/** + * @generated ServiceType for protobuf service github.actions.results.api.v1.CacheService + */ +export const CacheService = new ServiceType("github.actions.results.api.v1.CacheService", [ + { name: "CreateCacheEntry", options: {}, I: CreateCacheEntryRequest, O: CreateCacheEntryResponse }, + { name: "FinalizeCacheEntryUpload", options: {}, I: FinalizeCacheEntryUploadRequest, O: FinalizeCacheEntryUploadResponse }, + { name: "GetCacheEntryDownloadURL", options: {}, I: GetCacheEntryDownloadURLRequest, O: GetCacheEntryDownloadURLResponse }, + { name: "DeleteCacheEntry", options: {}, I: DeleteCacheEntryRequest, O: DeleteCacheEntryResponse }, + { name: "ListCacheEntries", options: {}, I: ListCacheEntriesRequest, O: ListCacheEntriesResponse }, + { name: "LookupCacheEntry", options: {}, I: LookupCacheEntryRequest, O: LookupCacheEntryResponse } +]); diff --git a/packages/cache/src/generated/results/api/v1/cache.twirp.ts b/packages/cache/src/generated/results/api/v1/cache.twirp.ts new file mode 100644 index 00000000..c8f1f633 --- /dev/null +++ b/packages/cache/src/generated/results/api/v1/cache.twirp.ts @@ -0,0 +1,1209 @@ +import { + TwirpContext, + TwirpServer, + RouterEvents, + TwirpError, + TwirpErrorCode, + Interceptor, + TwirpContentType, + chainInterceptors, +} from "twirp-ts"; +import { + CreateCacheEntryRequest, + CreateCacheEntryResponse, + FinalizeCacheEntryUploadRequest, + FinalizeCacheEntryUploadResponse, + GetCacheEntryDownloadURLRequest, + GetCacheEntryDownloadURLResponse, + DeleteCacheEntryRequest, + DeleteCacheEntryResponse, + ListCacheEntriesRequest, + ListCacheEntriesResponse, + LookupCacheEntryRequest, + LookupCacheEntryResponse, +} from "./cache"; + +//==================================// +// Client Code // +//==================================// + +interface Rpc { + request( + service: string, + method: string, + contentType: "application/json" | "application/protobuf", + data: object | Uint8Array + ): Promise; +} + +export interface CacheServiceClient { + CreateCacheEntry( + request: CreateCacheEntryRequest + ): Promise; + FinalizeCacheEntryUpload( + request: FinalizeCacheEntryUploadRequest + ): Promise; + GetCacheEntryDownloadURL( + request: GetCacheEntryDownloadURLRequest + ): Promise; + DeleteCacheEntry( + request: DeleteCacheEntryRequest + ): Promise; + ListCacheEntries( + request: ListCacheEntriesRequest + ): Promise; + LookupCacheEntry( + request: LookupCacheEntryRequest + ): Promise; +} + +export class CacheServiceClientJSON implements CacheServiceClient { + private readonly rpc: Rpc; + constructor(rpc: Rpc) { + this.rpc = rpc; + this.CreateCacheEntry.bind(this); + this.FinalizeCacheEntryUpload.bind(this); + this.GetCacheEntryDownloadURL.bind(this); + this.DeleteCacheEntry.bind(this); + this.ListCacheEntries.bind(this); + this.LookupCacheEntry.bind(this); + } + CreateCacheEntry( + request: CreateCacheEntryRequest + ): Promise { + const data = CreateCacheEntryRequest.toJson(request, { + useProtoFieldName: true, + emitDefaultValues: false, + }); + const promise = this.rpc.request( + "github.actions.results.api.v1.CacheService", + "CreateCacheEntry", + "application/json", + data as object + ); + return promise.then((data) => + CreateCacheEntryResponse.fromJson(data as any, { + ignoreUnknownFields: true, + }) + ); + } + + FinalizeCacheEntryUpload( + request: FinalizeCacheEntryUploadRequest + ): Promise { + const data = FinalizeCacheEntryUploadRequest.toJson(request, { + useProtoFieldName: true, + emitDefaultValues: false, + }); + const promise = this.rpc.request( + "github.actions.results.api.v1.CacheService", + "FinalizeCacheEntryUpload", + "application/json", + data as object + ); + return promise.then((data) => + FinalizeCacheEntryUploadResponse.fromJson(data as any, { + ignoreUnknownFields: true, + }) + ); + } + + GetCacheEntryDownloadURL( + request: GetCacheEntryDownloadURLRequest + ): Promise { + const data = GetCacheEntryDownloadURLRequest.toJson(request, { + useProtoFieldName: true, + emitDefaultValues: false, + }); + const promise = this.rpc.request( + "github.actions.results.api.v1.CacheService", + "GetCacheEntryDownloadURL", + "application/json", + data as object + ); + return promise.then((data) => + GetCacheEntryDownloadURLResponse.fromJson(data as any, { + ignoreUnknownFields: true, + }) + ); + } + + DeleteCacheEntry( + request: DeleteCacheEntryRequest + ): Promise { + const data = DeleteCacheEntryRequest.toJson(request, { + useProtoFieldName: true, + emitDefaultValues: false, + }); + const promise = this.rpc.request( + "github.actions.results.api.v1.CacheService", + "DeleteCacheEntry", + "application/json", + data as object + ); + return promise.then((data) => + DeleteCacheEntryResponse.fromJson(data as any, { + ignoreUnknownFields: true, + }) + ); + } + + ListCacheEntries( + request: ListCacheEntriesRequest + ): Promise { + const data = ListCacheEntriesRequest.toJson(request, { + useProtoFieldName: true, + emitDefaultValues: false, + }); + const promise = this.rpc.request( + "github.actions.results.api.v1.CacheService", + "ListCacheEntries", + "application/json", + data as object + ); + return promise.then((data) => + ListCacheEntriesResponse.fromJson(data as any, { + ignoreUnknownFields: true, + }) + ); + } + + LookupCacheEntry( + request: LookupCacheEntryRequest + ): Promise { + const data = LookupCacheEntryRequest.toJson(request, { + useProtoFieldName: true, + emitDefaultValues: false, + }); + const promise = this.rpc.request( + "github.actions.results.api.v1.CacheService", + "LookupCacheEntry", + "application/json", + data as object + ); + return promise.then((data) => + LookupCacheEntryResponse.fromJson(data as any, { + ignoreUnknownFields: true, + }) + ); + } +} + +export class CacheServiceClientProtobuf implements CacheServiceClient { + private readonly rpc: Rpc; + constructor(rpc: Rpc) { + this.rpc = rpc; + this.CreateCacheEntry.bind(this); + this.FinalizeCacheEntryUpload.bind(this); + this.GetCacheEntryDownloadURL.bind(this); + this.DeleteCacheEntry.bind(this); + this.ListCacheEntries.bind(this); + this.LookupCacheEntry.bind(this); + } + CreateCacheEntry( + request: CreateCacheEntryRequest + ): Promise { + const data = CreateCacheEntryRequest.toBinary(request); + const promise = this.rpc.request( + "github.actions.results.api.v1.CacheService", + "CreateCacheEntry", + "application/protobuf", + data + ); + return promise.then((data) => + CreateCacheEntryResponse.fromBinary(data as Uint8Array) + ); + } + + FinalizeCacheEntryUpload( + request: FinalizeCacheEntryUploadRequest + ): Promise { + const data = FinalizeCacheEntryUploadRequest.toBinary(request); + const promise = this.rpc.request( + "github.actions.results.api.v1.CacheService", + "FinalizeCacheEntryUpload", + "application/protobuf", + data + ); + return promise.then((data) => + FinalizeCacheEntryUploadResponse.fromBinary(data as Uint8Array) + ); + } + + GetCacheEntryDownloadURL( + request: GetCacheEntryDownloadURLRequest + ): Promise { + const data = GetCacheEntryDownloadURLRequest.toBinary(request); + const promise = this.rpc.request( + "github.actions.results.api.v1.CacheService", + "GetCacheEntryDownloadURL", + "application/protobuf", + data + ); + return promise.then((data) => + GetCacheEntryDownloadURLResponse.fromBinary(data as Uint8Array) + ); + } + + DeleteCacheEntry( + request: DeleteCacheEntryRequest + ): Promise { + const data = DeleteCacheEntryRequest.toBinary(request); + const promise = this.rpc.request( + "github.actions.results.api.v1.CacheService", + "DeleteCacheEntry", + "application/protobuf", + data + ); + return promise.then((data) => + DeleteCacheEntryResponse.fromBinary(data as Uint8Array) + ); + } + + ListCacheEntries( + request: ListCacheEntriesRequest + ): Promise { + const data = ListCacheEntriesRequest.toBinary(request); + const promise = this.rpc.request( + "github.actions.results.api.v1.CacheService", + "ListCacheEntries", + "application/protobuf", + data + ); + return promise.then((data) => + ListCacheEntriesResponse.fromBinary(data as Uint8Array) + ); + } + + LookupCacheEntry( + request: LookupCacheEntryRequest + ): Promise { + const data = LookupCacheEntryRequest.toBinary(request); + const promise = this.rpc.request( + "github.actions.results.api.v1.CacheService", + "LookupCacheEntry", + "application/protobuf", + data + ); + return promise.then((data) => + LookupCacheEntryResponse.fromBinary(data as Uint8Array) + ); + } +} + +//==================================// +// Server Code // +//==================================// + +export interface CacheServiceTwirp { + CreateCacheEntry( + ctx: T, + request: CreateCacheEntryRequest + ): Promise; + FinalizeCacheEntryUpload( + ctx: T, + request: FinalizeCacheEntryUploadRequest + ): Promise; + GetCacheEntryDownloadURL( + ctx: T, + request: GetCacheEntryDownloadURLRequest + ): Promise; + DeleteCacheEntry( + ctx: T, + request: DeleteCacheEntryRequest + ): Promise; + ListCacheEntries( + ctx: T, + request: ListCacheEntriesRequest + ): Promise; + LookupCacheEntry( + ctx: T, + request: LookupCacheEntryRequest + ): Promise; +} + +export enum CacheServiceMethod { + CreateCacheEntry = "CreateCacheEntry", + FinalizeCacheEntryUpload = "FinalizeCacheEntryUpload", + GetCacheEntryDownloadURL = "GetCacheEntryDownloadURL", + DeleteCacheEntry = "DeleteCacheEntry", + ListCacheEntries = "ListCacheEntries", + LookupCacheEntry = "LookupCacheEntry", +} + +export const CacheServiceMethodList = [ + CacheServiceMethod.CreateCacheEntry, + CacheServiceMethod.FinalizeCacheEntryUpload, + CacheServiceMethod.GetCacheEntryDownloadURL, + CacheServiceMethod.DeleteCacheEntry, + CacheServiceMethod.ListCacheEntries, + CacheServiceMethod.LookupCacheEntry, +]; + +export function createCacheServiceServer( + service: CacheServiceTwirp +) { + return new TwirpServer({ + service, + packageName: "github.actions.results.api.v1", + serviceName: "CacheService", + methodList: CacheServiceMethodList, + matchRoute: matchCacheServiceRoute, + }); +} + +function matchCacheServiceRoute( + method: string, + events: RouterEvents +) { + switch (method) { + case "CreateCacheEntry": + return async ( + ctx: T, + service: CacheServiceTwirp, + data: Buffer, + interceptors?: Interceptor< + T, + CreateCacheEntryRequest, + CreateCacheEntryResponse + >[] + ) => { + ctx = { ...ctx, methodName: "CreateCacheEntry" }; + await events.onMatch(ctx); + return handleCacheServiceCreateCacheEntryRequest( + ctx, + service, + data, + interceptors + ); + }; + case "FinalizeCacheEntryUpload": + return async ( + ctx: T, + service: CacheServiceTwirp, + data: Buffer, + interceptors?: Interceptor< + T, + FinalizeCacheEntryUploadRequest, + FinalizeCacheEntryUploadResponse + >[] + ) => { + ctx = { ...ctx, methodName: "FinalizeCacheEntryUpload" }; + await events.onMatch(ctx); + return handleCacheServiceFinalizeCacheEntryUploadRequest( + ctx, + service, + data, + interceptors + ); + }; + case "GetCacheEntryDownloadURL": + return async ( + ctx: T, + service: CacheServiceTwirp, + data: Buffer, + interceptors?: Interceptor< + T, + GetCacheEntryDownloadURLRequest, + GetCacheEntryDownloadURLResponse + >[] + ) => { + ctx = { ...ctx, methodName: "GetCacheEntryDownloadURL" }; + await events.onMatch(ctx); + return handleCacheServiceGetCacheEntryDownloadURLRequest( + ctx, + service, + data, + interceptors + ); + }; + case "DeleteCacheEntry": + return async ( + ctx: T, + service: CacheServiceTwirp, + data: Buffer, + interceptors?: Interceptor< + T, + DeleteCacheEntryRequest, + DeleteCacheEntryResponse + >[] + ) => { + ctx = { ...ctx, methodName: "DeleteCacheEntry" }; + await events.onMatch(ctx); + return handleCacheServiceDeleteCacheEntryRequest( + ctx, + service, + data, + interceptors + ); + }; + case "ListCacheEntries": + return async ( + ctx: T, + service: CacheServiceTwirp, + data: Buffer, + interceptors?: Interceptor< + T, + ListCacheEntriesRequest, + ListCacheEntriesResponse + >[] + ) => { + ctx = { ...ctx, methodName: "ListCacheEntries" }; + await events.onMatch(ctx); + return handleCacheServiceListCacheEntriesRequest( + ctx, + service, + data, + interceptors + ); + }; + case "LookupCacheEntry": + return async ( + ctx: T, + service: CacheServiceTwirp, + data: Buffer, + interceptors?: Interceptor< + T, + LookupCacheEntryRequest, + LookupCacheEntryResponse + >[] + ) => { + ctx = { ...ctx, methodName: "LookupCacheEntry" }; + await events.onMatch(ctx); + return handleCacheServiceLookupCacheEntryRequest( + ctx, + service, + data, + interceptors + ); + }; + default: + events.onNotFound(); + const msg = `no handler found`; + throw new TwirpError(TwirpErrorCode.BadRoute, msg); + } +} + +function handleCacheServiceCreateCacheEntryRequest< + T extends TwirpContext = TwirpContext +>( + ctx: T, + service: CacheServiceTwirp, + data: Buffer, + interceptors?: Interceptor< + T, + CreateCacheEntryRequest, + CreateCacheEntryResponse + >[] +): Promise { + switch (ctx.contentType) { + case TwirpContentType.JSON: + return handleCacheServiceCreateCacheEntryJSON( + ctx, + service, + data, + interceptors + ); + case TwirpContentType.Protobuf: + return handleCacheServiceCreateCacheEntryProtobuf( + ctx, + service, + data, + interceptors + ); + default: + const msg = "unexpected Content-Type"; + throw new TwirpError(TwirpErrorCode.BadRoute, msg); + } +} + +function handleCacheServiceFinalizeCacheEntryUploadRequest< + T extends TwirpContext = TwirpContext +>( + ctx: T, + service: CacheServiceTwirp, + data: Buffer, + interceptors?: Interceptor< + T, + FinalizeCacheEntryUploadRequest, + FinalizeCacheEntryUploadResponse + >[] +): Promise { + switch (ctx.contentType) { + case TwirpContentType.JSON: + return handleCacheServiceFinalizeCacheEntryUploadJSON( + ctx, + service, + data, + interceptors + ); + case TwirpContentType.Protobuf: + return handleCacheServiceFinalizeCacheEntryUploadProtobuf( + ctx, + service, + data, + interceptors + ); + default: + const msg = "unexpected Content-Type"; + throw new TwirpError(TwirpErrorCode.BadRoute, msg); + } +} + +function handleCacheServiceGetCacheEntryDownloadURLRequest< + T extends TwirpContext = TwirpContext +>( + ctx: T, + service: CacheServiceTwirp, + data: Buffer, + interceptors?: Interceptor< + T, + GetCacheEntryDownloadURLRequest, + GetCacheEntryDownloadURLResponse + >[] +): Promise { + switch (ctx.contentType) { + case TwirpContentType.JSON: + return handleCacheServiceGetCacheEntryDownloadURLJSON( + ctx, + service, + data, + interceptors + ); + case TwirpContentType.Protobuf: + return handleCacheServiceGetCacheEntryDownloadURLProtobuf( + ctx, + service, + data, + interceptors + ); + default: + const msg = "unexpected Content-Type"; + throw new TwirpError(TwirpErrorCode.BadRoute, msg); + } +} + +function handleCacheServiceDeleteCacheEntryRequest< + T extends TwirpContext = TwirpContext +>( + ctx: T, + service: CacheServiceTwirp, + data: Buffer, + interceptors?: Interceptor< + T, + DeleteCacheEntryRequest, + DeleteCacheEntryResponse + >[] +): Promise { + switch (ctx.contentType) { + case TwirpContentType.JSON: + return handleCacheServiceDeleteCacheEntryJSON( + ctx, + service, + data, + interceptors + ); + case TwirpContentType.Protobuf: + return handleCacheServiceDeleteCacheEntryProtobuf( + ctx, + service, + data, + interceptors + ); + default: + const msg = "unexpected Content-Type"; + throw new TwirpError(TwirpErrorCode.BadRoute, msg); + } +} + +function handleCacheServiceListCacheEntriesRequest< + T extends TwirpContext = TwirpContext +>( + ctx: T, + service: CacheServiceTwirp, + data: Buffer, + interceptors?: Interceptor< + T, + ListCacheEntriesRequest, + ListCacheEntriesResponse + >[] +): Promise { + switch (ctx.contentType) { + case TwirpContentType.JSON: + return handleCacheServiceListCacheEntriesJSON( + ctx, + service, + data, + interceptors + ); + case TwirpContentType.Protobuf: + return handleCacheServiceListCacheEntriesProtobuf( + ctx, + service, + data, + interceptors + ); + default: + const msg = "unexpected Content-Type"; + throw new TwirpError(TwirpErrorCode.BadRoute, msg); + } +} + +function handleCacheServiceLookupCacheEntryRequest< + T extends TwirpContext = TwirpContext +>( + ctx: T, + service: CacheServiceTwirp, + data: Buffer, + interceptors?: Interceptor< + T, + LookupCacheEntryRequest, + LookupCacheEntryResponse + >[] +): Promise { + switch (ctx.contentType) { + case TwirpContentType.JSON: + return handleCacheServiceLookupCacheEntryJSON( + ctx, + service, + data, + interceptors + ); + case TwirpContentType.Protobuf: + return handleCacheServiceLookupCacheEntryProtobuf( + ctx, + service, + data, + interceptors + ); + default: + const msg = "unexpected Content-Type"; + throw new TwirpError(TwirpErrorCode.BadRoute, msg); + } +} +async function handleCacheServiceCreateCacheEntryJSON< + T extends TwirpContext = TwirpContext +>( + ctx: T, + service: CacheServiceTwirp, + data: Buffer, + interceptors?: Interceptor< + T, + CreateCacheEntryRequest, + CreateCacheEntryResponse + >[] +) { + let request: CreateCacheEntryRequest; + let response: CreateCacheEntryResponse; + + try { + const body = JSON.parse(data.toString() || "{}"); + request = CreateCacheEntryRequest.fromJson(body, { + ignoreUnknownFields: true, + }); + } catch (e) { + if (e instanceof Error) { + const msg = "the json request could not be decoded"; + throw new TwirpError(TwirpErrorCode.Malformed, msg).withCause(e, true); + } + } + + if (interceptors && interceptors.length > 0) { + const interceptor = chainInterceptors(...interceptors) as Interceptor< + T, + CreateCacheEntryRequest, + CreateCacheEntryResponse + >; + response = await interceptor(ctx, request!, (ctx, inputReq) => { + return service.CreateCacheEntry(ctx, inputReq); + }); + } else { + response = await service.CreateCacheEntry(ctx, request!); + } + + return JSON.stringify( + CreateCacheEntryResponse.toJson(response, { + useProtoFieldName: true, + emitDefaultValues: false, + }) as string + ); +} + +async function handleCacheServiceFinalizeCacheEntryUploadJSON< + T extends TwirpContext = TwirpContext +>( + ctx: T, + service: CacheServiceTwirp, + data: Buffer, + interceptors?: Interceptor< + T, + FinalizeCacheEntryUploadRequest, + FinalizeCacheEntryUploadResponse + >[] +) { + let request: FinalizeCacheEntryUploadRequest; + let response: FinalizeCacheEntryUploadResponse; + + try { + const body = JSON.parse(data.toString() || "{}"); + request = FinalizeCacheEntryUploadRequest.fromJson(body, { + ignoreUnknownFields: true, + }); + } catch (e) { + if (e instanceof Error) { + const msg = "the json request could not be decoded"; + throw new TwirpError(TwirpErrorCode.Malformed, msg).withCause(e, true); + } + } + + if (interceptors && interceptors.length > 0) { + const interceptor = chainInterceptors(...interceptors) as Interceptor< + T, + FinalizeCacheEntryUploadRequest, + FinalizeCacheEntryUploadResponse + >; + response = await interceptor(ctx, request!, (ctx, inputReq) => { + return service.FinalizeCacheEntryUpload(ctx, inputReq); + }); + } else { + response = await service.FinalizeCacheEntryUpload(ctx, request!); + } + + return JSON.stringify( + FinalizeCacheEntryUploadResponse.toJson(response, { + useProtoFieldName: true, + emitDefaultValues: false, + }) as string + ); +} + +async function handleCacheServiceGetCacheEntryDownloadURLJSON< + T extends TwirpContext = TwirpContext +>( + ctx: T, + service: CacheServiceTwirp, + data: Buffer, + interceptors?: Interceptor< + T, + GetCacheEntryDownloadURLRequest, + GetCacheEntryDownloadURLResponse + >[] +) { + let request: GetCacheEntryDownloadURLRequest; + let response: GetCacheEntryDownloadURLResponse; + + try { + const body = JSON.parse(data.toString() || "{}"); + request = GetCacheEntryDownloadURLRequest.fromJson(body, { + ignoreUnknownFields: true, + }); + } catch (e) { + if (e instanceof Error) { + const msg = "the json request could not be decoded"; + throw new TwirpError(TwirpErrorCode.Malformed, msg).withCause(e, true); + } + } + + if (interceptors && interceptors.length > 0) { + const interceptor = chainInterceptors(...interceptors) as Interceptor< + T, + GetCacheEntryDownloadURLRequest, + GetCacheEntryDownloadURLResponse + >; + response = await interceptor(ctx, request!, (ctx, inputReq) => { + return service.GetCacheEntryDownloadURL(ctx, inputReq); + }); + } else { + response = await service.GetCacheEntryDownloadURL(ctx, request!); + } + + return JSON.stringify( + GetCacheEntryDownloadURLResponse.toJson(response, { + useProtoFieldName: true, + emitDefaultValues: false, + }) as string + ); +} + +async function handleCacheServiceDeleteCacheEntryJSON< + T extends TwirpContext = TwirpContext +>( + ctx: T, + service: CacheServiceTwirp, + data: Buffer, + interceptors?: Interceptor< + T, + DeleteCacheEntryRequest, + DeleteCacheEntryResponse + >[] +) { + let request: DeleteCacheEntryRequest; + let response: DeleteCacheEntryResponse; + + try { + const body = JSON.parse(data.toString() || "{}"); + request = DeleteCacheEntryRequest.fromJson(body, { + ignoreUnknownFields: true, + }); + } catch (e) { + if (e instanceof Error) { + const msg = "the json request could not be decoded"; + throw new TwirpError(TwirpErrorCode.Malformed, msg).withCause(e, true); + } + } + + if (interceptors && interceptors.length > 0) { + const interceptor = chainInterceptors(...interceptors) as Interceptor< + T, + DeleteCacheEntryRequest, + DeleteCacheEntryResponse + >; + response = await interceptor(ctx, request!, (ctx, inputReq) => { + return service.DeleteCacheEntry(ctx, inputReq); + }); + } else { + response = await service.DeleteCacheEntry(ctx, request!); + } + + return JSON.stringify( + DeleteCacheEntryResponse.toJson(response, { + useProtoFieldName: true, + emitDefaultValues: false, + }) as string + ); +} + +async function handleCacheServiceListCacheEntriesJSON< + T extends TwirpContext = TwirpContext +>( + ctx: T, + service: CacheServiceTwirp, + data: Buffer, + interceptors?: Interceptor< + T, + ListCacheEntriesRequest, + ListCacheEntriesResponse + >[] +) { + let request: ListCacheEntriesRequest; + let response: ListCacheEntriesResponse; + + try { + const body = JSON.parse(data.toString() || "{}"); + request = ListCacheEntriesRequest.fromJson(body, { + ignoreUnknownFields: true, + }); + } catch (e) { + if (e instanceof Error) { + const msg = "the json request could not be decoded"; + throw new TwirpError(TwirpErrorCode.Malformed, msg).withCause(e, true); + } + } + + if (interceptors && interceptors.length > 0) { + const interceptor = chainInterceptors(...interceptors) as Interceptor< + T, + ListCacheEntriesRequest, + ListCacheEntriesResponse + >; + response = await interceptor(ctx, request!, (ctx, inputReq) => { + return service.ListCacheEntries(ctx, inputReq); + }); + } else { + response = await service.ListCacheEntries(ctx, request!); + } + + return JSON.stringify( + ListCacheEntriesResponse.toJson(response, { + useProtoFieldName: true, + emitDefaultValues: false, + }) as string + ); +} + +async function handleCacheServiceLookupCacheEntryJSON< + T extends TwirpContext = TwirpContext +>( + ctx: T, + service: CacheServiceTwirp, + data: Buffer, + interceptors?: Interceptor< + T, + LookupCacheEntryRequest, + LookupCacheEntryResponse + >[] +) { + let request: LookupCacheEntryRequest; + let response: LookupCacheEntryResponse; + + try { + const body = JSON.parse(data.toString() || "{}"); + request = LookupCacheEntryRequest.fromJson(body, { + ignoreUnknownFields: true, + }); + } catch (e) { + if (e instanceof Error) { + const msg = "the json request could not be decoded"; + throw new TwirpError(TwirpErrorCode.Malformed, msg).withCause(e, true); + } + } + + if (interceptors && interceptors.length > 0) { + const interceptor = chainInterceptors(...interceptors) as Interceptor< + T, + LookupCacheEntryRequest, + LookupCacheEntryResponse + >; + response = await interceptor(ctx, request!, (ctx, inputReq) => { + return service.LookupCacheEntry(ctx, inputReq); + }); + } else { + response = await service.LookupCacheEntry(ctx, request!); + } + + return JSON.stringify( + LookupCacheEntryResponse.toJson(response, { + useProtoFieldName: true, + emitDefaultValues: false, + }) as string + ); +} +async function handleCacheServiceCreateCacheEntryProtobuf< + T extends TwirpContext = TwirpContext +>( + ctx: T, + service: CacheServiceTwirp, + data: Buffer, + interceptors?: Interceptor< + T, + CreateCacheEntryRequest, + CreateCacheEntryResponse + >[] +) { + let request: CreateCacheEntryRequest; + let response: CreateCacheEntryResponse; + + try { + request = CreateCacheEntryRequest.fromBinary(data); + } catch (e) { + if (e instanceof Error) { + const msg = "the protobuf request could not be decoded"; + throw new TwirpError(TwirpErrorCode.Malformed, msg).withCause(e, true); + } + } + + if (interceptors && interceptors.length > 0) { + const interceptor = chainInterceptors(...interceptors) as Interceptor< + T, + CreateCacheEntryRequest, + CreateCacheEntryResponse + >; + response = await interceptor(ctx, request!, (ctx, inputReq) => { + return service.CreateCacheEntry(ctx, inputReq); + }); + } else { + response = await service.CreateCacheEntry(ctx, request!); + } + + return Buffer.from(CreateCacheEntryResponse.toBinary(response)); +} + +async function handleCacheServiceFinalizeCacheEntryUploadProtobuf< + T extends TwirpContext = TwirpContext +>( + ctx: T, + service: CacheServiceTwirp, + data: Buffer, + interceptors?: Interceptor< + T, + FinalizeCacheEntryUploadRequest, + FinalizeCacheEntryUploadResponse + >[] +) { + let request: FinalizeCacheEntryUploadRequest; + let response: FinalizeCacheEntryUploadResponse; + + try { + request = FinalizeCacheEntryUploadRequest.fromBinary(data); + } catch (e) { + if (e instanceof Error) { + const msg = "the protobuf request could not be decoded"; + throw new TwirpError(TwirpErrorCode.Malformed, msg).withCause(e, true); + } + } + + if (interceptors && interceptors.length > 0) { + const interceptor = chainInterceptors(...interceptors) as Interceptor< + T, + FinalizeCacheEntryUploadRequest, + FinalizeCacheEntryUploadResponse + >; + response = await interceptor(ctx, request!, (ctx, inputReq) => { + return service.FinalizeCacheEntryUpload(ctx, inputReq); + }); + } else { + response = await service.FinalizeCacheEntryUpload(ctx, request!); + } + + return Buffer.from(FinalizeCacheEntryUploadResponse.toBinary(response)); +} + +async function handleCacheServiceGetCacheEntryDownloadURLProtobuf< + T extends TwirpContext = TwirpContext +>( + ctx: T, + service: CacheServiceTwirp, + data: Buffer, + interceptors?: Interceptor< + T, + GetCacheEntryDownloadURLRequest, + GetCacheEntryDownloadURLResponse + >[] +) { + let request: GetCacheEntryDownloadURLRequest; + let response: GetCacheEntryDownloadURLResponse; + + try { + request = GetCacheEntryDownloadURLRequest.fromBinary(data); + } catch (e) { + if (e instanceof Error) { + const msg = "the protobuf request could not be decoded"; + throw new TwirpError(TwirpErrorCode.Malformed, msg).withCause(e, true); + } + } + + if (interceptors && interceptors.length > 0) { + const interceptor = chainInterceptors(...interceptors) as Interceptor< + T, + GetCacheEntryDownloadURLRequest, + GetCacheEntryDownloadURLResponse + >; + response = await interceptor(ctx, request!, (ctx, inputReq) => { + return service.GetCacheEntryDownloadURL(ctx, inputReq); + }); + } else { + response = await service.GetCacheEntryDownloadURL(ctx, request!); + } + + return Buffer.from(GetCacheEntryDownloadURLResponse.toBinary(response)); +} + +async function handleCacheServiceDeleteCacheEntryProtobuf< + T extends TwirpContext = TwirpContext +>( + ctx: T, + service: CacheServiceTwirp, + data: Buffer, + interceptors?: Interceptor< + T, + DeleteCacheEntryRequest, + DeleteCacheEntryResponse + >[] +) { + let request: DeleteCacheEntryRequest; + let response: DeleteCacheEntryResponse; + + try { + request = DeleteCacheEntryRequest.fromBinary(data); + } catch (e) { + if (e instanceof Error) { + const msg = "the protobuf request could not be decoded"; + throw new TwirpError(TwirpErrorCode.Malformed, msg).withCause(e, true); + } + } + + if (interceptors && interceptors.length > 0) { + const interceptor = chainInterceptors(...interceptors) as Interceptor< + T, + DeleteCacheEntryRequest, + DeleteCacheEntryResponse + >; + response = await interceptor(ctx, request!, (ctx, inputReq) => { + return service.DeleteCacheEntry(ctx, inputReq); + }); + } else { + response = await service.DeleteCacheEntry(ctx, request!); + } + + return Buffer.from(DeleteCacheEntryResponse.toBinary(response)); +} + +async function handleCacheServiceListCacheEntriesProtobuf< + T extends TwirpContext = TwirpContext +>( + ctx: T, + service: CacheServiceTwirp, + data: Buffer, + interceptors?: Interceptor< + T, + ListCacheEntriesRequest, + ListCacheEntriesResponse + >[] +) { + let request: ListCacheEntriesRequest; + let response: ListCacheEntriesResponse; + + try { + request = ListCacheEntriesRequest.fromBinary(data); + } catch (e) { + if (e instanceof Error) { + const msg = "the protobuf request could not be decoded"; + throw new TwirpError(TwirpErrorCode.Malformed, msg).withCause(e, true); + } + } + + if (interceptors && interceptors.length > 0) { + const interceptor = chainInterceptors(...interceptors) as Interceptor< + T, + ListCacheEntriesRequest, + ListCacheEntriesResponse + >; + response = await interceptor(ctx, request!, (ctx, inputReq) => { + return service.ListCacheEntries(ctx, inputReq); + }); + } else { + response = await service.ListCacheEntries(ctx, request!); + } + + return Buffer.from(ListCacheEntriesResponse.toBinary(response)); +} + +async function handleCacheServiceLookupCacheEntryProtobuf< + T extends TwirpContext = TwirpContext +>( + ctx: T, + service: CacheServiceTwirp, + data: Buffer, + interceptors?: Interceptor< + T, + LookupCacheEntryRequest, + LookupCacheEntryResponse + >[] +) { + let request: LookupCacheEntryRequest; + let response: LookupCacheEntryResponse; + + try { + request = LookupCacheEntryRequest.fromBinary(data); + } catch (e) { + if (e instanceof Error) { + const msg = "the protobuf request could not be decoded"; + throw new TwirpError(TwirpErrorCode.Malformed, msg).withCause(e, true); + } + } + + if (interceptors && interceptors.length > 0) { + const interceptor = chainInterceptors(...interceptors) as Interceptor< + T, + LookupCacheEntryRequest, + LookupCacheEntryResponse + >; + response = await interceptor(ctx, request!, (ctx, inputReq) => { + return service.LookupCacheEntry(ctx, inputReq); + }); + } else { + response = await service.LookupCacheEntry(ctx, request!); + } + + return Buffer.from(LookupCacheEntryResponse.toBinary(response)); +} diff --git a/packages/cache/src/internal/cacheHttpClient.ts b/packages/cache/src/internal/cacheHttpClient.ts index c50ccd4b..8fe76376 100644 --- a/packages/cache/src/internal/cacheHttpClient.ts +++ b/packages/cache/src/internal/cacheHttpClient.ts @@ -1,16 +1,13 @@ import * as core from '@actions/core' -import {HttpClient} from '@actions/http-client' -import {BearerCredentialHandler} from '@actions/http-client/lib/auth' +import { HttpClient } from '@actions/http-client' +import { BearerCredentialHandler } from '@actions/http-client/lib/auth' import { RequestOptions, TypedResponse } from '@actions/http-client/lib/interfaces' -import * as crypto from 'crypto' import * as fs from 'fs' -import {URL} from 'url' - +import { URL } from 'url' import * as utils from './cacheUtils' -import {CompressionMethod} from './constants' import { ArtifactCacheEntry, InternalCacheOptions, @@ -36,9 +33,7 @@ import { retryHttpClientResponse, retryTypedResponse } from './requestUtils' -import {CacheUrl} from './constants' - -const versionSalt = '1.0' +import { CacheUrl } from './constants' function getCacheApiUrl(resource: string): string { const baseUrl: string = CacheUrl || '' @@ -76,43 +71,18 @@ function createHttpClient(): HttpClient { ) } -export function getCacheVersion( - paths: string[], - compressionMethod?: CompressionMethod, - enableCrossOsArchive = false -): string { - // don't pass changes upstream - const components = paths.slice() - - // Add compression method to cache version to restore - // compressed cache as per compression method - if (compressionMethod) { - components.push(compressionMethod) - } - - // Only check for windows platforms if enableCrossOsArchive is false - if (process.platform === 'win32' && !enableCrossOsArchive) { - components.push('windows-only') - } - - // Add salt to cache version to support breaking changes in cache entry - components.push(versionSalt) - - return crypto.createHash('sha256').update(components.join('|')).digest('hex') -} - export async function getCacheEntry( keys: string[], paths: string[], options?: InternalCacheOptions ): Promise { const httpClient = createHttpClient() - const version = getCacheVersion( + const version = utils.getCacheVersion( paths, options?.compressionMethod, options?.enableCrossOsArchive ) - + const resource = `cache?keys=${encodeURIComponent( keys.join(',') )}&version=${version}` @@ -209,7 +179,7 @@ export async function reserveCache( options?: InternalCacheOptions ): Promise> { const httpClient = createHttpClient() - const version = getCacheVersion( + const version = utils.getCacheVersion( paths, options?.compressionMethod, options?.enableCrossOsArchive @@ -246,8 +216,7 @@ async function uploadChunk( end: number ): Promise { core.debug( - `Uploading chunk of size ${ - end - start + 1 + `Uploading chunk of size ${end - start + 1 } bytes at offset ${start} with content range: ${getContentRange( start, end @@ -343,7 +312,7 @@ async function commitCache( cacheId: number, filesize: number ): Promise> { - const commitCacheRequest: CommitCacheRequest = {size: filesize} + const commitCacheRequest: CommitCacheRequest = { size: filesize } return await retryTypedResponse('commitCache', async () => httpClient.postJson( getCacheApiUrl(`caches/${cacheId.toString()}`), diff --git a/packages/cache/src/internal/cacheTwirpClient.ts b/packages/cache/src/internal/cacheTwirpClient.ts index 62f98426..6d9826ac 100644 --- a/packages/cache/src/internal/cacheTwirpClient.ts +++ b/packages/cache/src/internal/cacheTwirpClient.ts @@ -1,197 +1,196 @@ -import {HttpClient, HttpClientResponse, HttpCodes} from '@actions/http-client' -import {BearerCredentialHandler} from '@actions/http-client/lib/auth' -import {info, debug} from '@actions/core' -import {BlobCacheServiceClientJSON} from '../generated/results/api/v1/blobcache.twirp' -import {CacheUrl} from './constants' -import {getRuntimeToken} from './config' +import { HttpClient, HttpClientResponse, HttpCodes } from '@actions/http-client' +import { BearerCredentialHandler } from '@actions/http-client/lib/auth' +import { info, debug } from '@actions/core' +import { CacheServiceClientJSON } from '../generated/results/api/v1/cache.twirp' +import { CacheUrl } from './constants' +import { getRuntimeToken } from './config' // import {getUserAgentString} from './user-agent' // import {NetworkError, UsageError} from './errors' // The twirp http client must implement this interface interface Rpc { - request( - service: string, - method: string, - contentType: 'application/json' | 'application/protobuf', - data: object | Uint8Array - ): Promise + request( + service: string, + method: string, + contentType: 'application/json' | 'application/protobuf', + data: object | Uint8Array + ): Promise } -class BlobCacheServiceClient implements Rpc { - private httpClient: HttpClient - private baseUrl: string - private maxAttempts = 5 - private baseRetryIntervalMilliseconds = 3000 - private retryMultiplier = 1.5 +class CacheServiceClient implements Rpc { + private httpClient: HttpClient + private baseUrl: string + private maxAttempts = 5 + private baseRetryIntervalMilliseconds = 3000 + private retryMultiplier = 1.5 - constructor( - userAgent: string, - maxAttempts?: number, - baseRetryIntervalMilliseconds?: number, + constructor( + userAgent: string, + maxAttempts?: number, + baseRetryIntervalMilliseconds?: number, + retryMultiplier?: number + ) { + const token = getRuntimeToken() + this.baseUrl = CacheUrl + if (maxAttempts) { + this.maxAttempts = maxAttempts + } + if (baseRetryIntervalMilliseconds) { + this.baseRetryIntervalMilliseconds = baseRetryIntervalMilliseconds + } + if (retryMultiplier) { + this.retryMultiplier = retryMultiplier + } + + this.httpClient = new HttpClient(userAgent, [ + new BearerCredentialHandler(token) + ]) + } + + // This function satisfies the Rpc interface. It is compatible with the JSON + // JSON generated client. + async request( + service: string, + method: string, + contentType: 'application/json' | 'application/protobuf', + data: object | Uint8Array + ): Promise { + const url = new URL(`/twirp/${service}/${method}`, this.baseUrl).href + debug(`[Request] ${method} ${url}`) + const headers = { + 'Content-Type': contentType + } + try { + const { body } = await this.retryableRequest(async () => + this.httpClient.post(url, JSON.stringify(data), headers) + ) + + return body + } catch (error) { + throw new Error(`Failed to ${method}: ${error.message}`) + } + } + + async retryableRequest( + operation: () => Promise + ): Promise<{ response: HttpClientResponse; body: object }> { + let attempt = 0 + let errorMessage = '' + let rawBody = '' + while (attempt < this.maxAttempts) { + let isRetryable = false + + try { + const response = await operation() + const statusCode = response.message.statusCode + rawBody = await response.readBody() + debug(`[Response] - ${response.message.statusCode}`) + debug(`Headers: ${JSON.stringify(response.message.headers, null, 2)}`) + const body = JSON.parse(rawBody) + debug(`Body: ${JSON.stringify(body, null, 2)}`) + if (this.isSuccessStatusCode(statusCode)) { + return { response, body } + } + isRetryable = this.isRetryableHttpStatusCode(statusCode) + errorMessage = `Failed request: (${statusCode}) ${response.message.statusMessage}` + if (body.msg) { + // if (UsageError.isUsageErrorMessage(body.msg)) { + // throw new UsageError() + // } + + errorMessage = `${errorMessage}: ${body.msg}` + } + } catch (error) { + if (error instanceof SyntaxError) { + debug(`Raw Body: ${rawBody}`) + } + + // if (error instanceof UsageError) { + // throw error + // } + + // if (NetworkError.isNetworkErrorCode(error?.code)) { + // throw new NetworkError(error?.code) + // } + + isRetryable = true + errorMessage = error.message + } + + if (!isRetryable) { + throw new Error(`Received non-retryable error: ${errorMessage}`) + } + + if (attempt + 1 === this.maxAttempts) { + throw new Error( + `Failed to make request after ${this.maxAttempts} attempts: ${errorMessage}` + ) + } + + const retryTimeMilliseconds = + this.getExponentialRetryTimeMilliseconds(attempt) + info( + `Attempt ${attempt + 1} of ${this.maxAttempts + } failed with error: ${errorMessage}. Retrying request in ${retryTimeMilliseconds} ms...` + ) + await this.sleep(retryTimeMilliseconds) + attempt++ + } + + throw new Error(`Request failed`) + } + + isSuccessStatusCode(statusCode?: number): boolean { + if (!statusCode) return false + return statusCode >= 200 && statusCode < 300 + } + + isRetryableHttpStatusCode(statusCode?: number): boolean { + if (!statusCode) return false + + const retryableStatusCodes = [ + HttpCodes.BadGateway, + HttpCodes.GatewayTimeout, + HttpCodes.InternalServerError, + HttpCodes.ServiceUnavailable, + HttpCodes.TooManyRequests + ] + + return retryableStatusCodes.includes(statusCode) + } + + async sleep(milliseconds: number): Promise { + return new Promise(resolve => setTimeout(resolve, milliseconds)) + } + + getExponentialRetryTimeMilliseconds(attempt: number): number { + if (attempt < 0) { + throw new Error('attempt should be a positive integer') + } + + if (attempt === 0) { + return this.baseRetryIntervalMilliseconds + } + + const minTime = + this.baseRetryIntervalMilliseconds * this.retryMultiplier ** attempt + const maxTime = minTime * this.retryMultiplier + + // returns a random number between minTime and maxTime (exclusive) + return Math.trunc(Math.random() * (maxTime - minTime) + minTime) + } +} + +export function internalCacheTwirpClient(options?: { + maxAttempts?: number + retryIntervalMs?: number retryMultiplier?: number - ) { - const token = getRuntimeToken() - this.baseUrl = CacheUrl - if (maxAttempts) { - this.maxAttempts = maxAttempts - } - if (baseRetryIntervalMilliseconds) { - this.baseRetryIntervalMilliseconds = baseRetryIntervalMilliseconds - } - if (retryMultiplier) { - this.retryMultiplier = retryMultiplier - } - - this.httpClient = new HttpClient(userAgent, [ - new BearerCredentialHandler(token) - ]) - } - - // This function satisfies the Rpc interface. It is compatible with the JSON - // JSON generated client. - async request( - service: string, - method: string, - contentType: 'application/json' | 'application/protobuf', - data: object | Uint8Array - ): Promise { - const url = new URL(`/twirp/${service}/${method}`, this.baseUrl).href - debug(`[Request] ${method} ${url}`) - const headers = { - 'Content-Type': contentType - } - try { - const {body} = await this.retryableRequest(async () => - this.httpClient.post(url, JSON.stringify(data), headers) - ) - - return body - } catch (error) { - throw new Error(`Failed to ${method}: ${error.message}`) - } - } - - async retryableRequest( - operation: () => Promise - ): Promise<{response: HttpClientResponse; body: object}> { - let attempt = 0 - let errorMessage = '' - let rawBody = '' - while (attempt < this.maxAttempts) { - let isRetryable = false - - try { - const response = await operation() - const statusCode = response.message.statusCode - rawBody = await response.readBody() - debug(`[Response] - ${response.message.statusCode}`) - debug(`Headers: ${JSON.stringify(response.message.headers, null, 2)}`) - const body = JSON.parse(rawBody) - debug(`Body: ${JSON.stringify(body, null, 2)}`) - if (this.isSuccessStatusCode(statusCode)) { - return {response, body} - } - isRetryable = this.isRetryableHttpStatusCode(statusCode) - errorMessage = `Failed request: (${statusCode}) ${response.message.statusMessage}` - if (body.msg) { - // if (UsageError.isUsageErrorMessage(body.msg)) { - // throw new UsageError() - // } - - errorMessage = `${errorMessage}: ${body.msg}` - } - } catch (error) { - if (error instanceof SyntaxError) { - debug(`Raw Body: ${rawBody}`) - } - - // if (error instanceof UsageError) { - // throw error - // } - - // if (NetworkError.isNetworkErrorCode(error?.code)) { - // throw new NetworkError(error?.code) - // } - - isRetryable = true - errorMessage = error.message - } - - if (!isRetryable) { - throw new Error(`Received non-retryable error: ${errorMessage}`) - } - - if (attempt + 1 === this.maxAttempts) { - throw new Error( - `Failed to make request after ${this.maxAttempts} attempts: ${errorMessage}` - ) - } - - const retryTimeMilliseconds = - this.getExponentialRetryTimeMilliseconds(attempt) - info( - `Attempt ${attempt + 1} of ${ - this.maxAttempts - } failed with error: ${errorMessage}. Retrying request in ${retryTimeMilliseconds} ms...` - ) - await this.sleep(retryTimeMilliseconds) - attempt++ - } - - throw new Error(`Request failed`) - } - - isSuccessStatusCode(statusCode?: number): boolean { - if (!statusCode) return false - return statusCode >= 200 && statusCode < 300 - } - - isRetryableHttpStatusCode(statusCode?: number): boolean { - if (!statusCode) return false - - const retryableStatusCodes = [ - HttpCodes.BadGateway, - HttpCodes.GatewayTimeout, - HttpCodes.InternalServerError, - HttpCodes.ServiceUnavailable, - HttpCodes.TooManyRequests - ] - - return retryableStatusCodes.includes(statusCode) - } - - async sleep(milliseconds: number): Promise { - return new Promise(resolve => setTimeout(resolve, milliseconds)) - } - - getExponentialRetryTimeMilliseconds(attempt: number): number { - if (attempt < 0) { - throw new Error('attempt should be a positive integer') - } - - if (attempt === 0) { - return this.baseRetryIntervalMilliseconds - } - - const minTime = - this.baseRetryIntervalMilliseconds * this.retryMultiplier ** attempt - const maxTime = minTime * this.retryMultiplier - - // returns a random number between minTime and maxTime (exclusive) - return Math.trunc(Math.random() * (maxTime - minTime) + minTime) - } -} - -export function internalBlobCacheTwirpClient(options?: { - maxAttempts?: number - retryIntervalMs?: number - retryMultiplier?: number -}): BlobCacheServiceClientJSON { - const client = new BlobCacheServiceClient( - 'actions/cache', - options?.maxAttempts, - options?.retryIntervalMs, - options?.retryMultiplier - ) - return new BlobCacheServiceClientJSON(client) +}): CacheServiceClientJSON { + const client = new CacheServiceClient( + 'actions/cache', + options?.maxAttempts, + options?.retryIntervalMs, + options?.retryMultiplier + ) + return new CacheServiceClientJSON(client) } diff --git a/packages/cache/src/internal/cacheUtils.ts b/packages/cache/src/internal/cacheUtils.ts index 91bae9a8..48a0b354 100644 --- a/packages/cache/src/internal/cacheUtils.ts +++ b/packages/cache/src/internal/cacheUtils.ts @@ -6,13 +6,16 @@ import * as fs from 'fs' import * as path from 'path' import * as semver from 'semver' import * as util from 'util' -import {v4 as uuidV4} from 'uuid' +import { v4 as uuidV4 } from 'uuid' +import * as crypto from 'crypto' import { CacheFilename, CompressionMethod, GnuTarPathOnWindows } from './constants' +const versionSalt = '1.0' + // From https://github.com/actions/toolkit/blob/main/packages/tool-cache/src/tool-cache.ts#L23 export async function createTempDirectory(): Promise { const IS_WINDOWS = process.platform === 'win32' @@ -143,3 +146,28 @@ export function isGhes(): boolean { return !isGitHubHost && !isGheHost } + +export function getCacheVersion( + paths: string[], + compressionMethod?: CompressionMethod, + enableCrossOsArchive = false +): string { + // don't pass changes upstream + const components = paths.slice() + + // Add compression method to cache version to restore + // compressed cache as per compression method + if (compressionMethod) { + components.push(compressionMethod) + } + + // Only check for windows platforms if enableCrossOsArchive is false + if (process.platform === 'win32' && !enableCrossOsArchive) { + components.push('windows-only') + } + + // Add salt to cache version to support breaking changes in cache entry + components.push(versionSalt) + + return crypto.createHash('sha256').update(components.join('|')).digest('hex') +} \ No newline at end of file diff --git a/packages/cache/src/internal/v2/upload-cache.ts b/packages/cache/src/internal/v2/upload-cache.ts index 574cf788..b3ed530d 100644 --- a/packages/cache/src/internal/v2/upload-cache.ts +++ b/packages/cache/src/internal/v2/upload-cache.ts @@ -1,13 +1,14 @@ import * as core from '@actions/core' -import {GetCacheBlobUploadURLResponse} from '../../generated/results/api/v1/blobcache' -import {ZipUploadStream} from '@actions/artifact/lib/internal/upload/zip' -import {NetworkError} from '@actions/artifact/' -import {TransferProgressEvent} from '@azure/core-http' +import { CreateCacheEntryResponse } from '../../generated/results/api/v1/cache' +import { ZipUploadStream } from '@actions/artifact/lib/internal/upload/zip' +import { NetworkError } from '@actions/artifact/' +import { TransferProgressEvent } from '@azure/core-http' import * as stream from 'stream' import * as crypto from 'crypto' + import { - BlobClient, - BlockBlobClient, + BlobClient, + BlockBlobClient, BlockBlobUploadStreamOptions, BlockBlobParallelUploadOptions } from '@azure/storage-blob' @@ -55,7 +56,7 @@ export async function UploadCacheStream( } const options: BlockBlobUploadStreamOptions = { - blobHTTPHeaders: {blobContentType: 'zip'}, + blobHTTPHeaders: { blobContentType: 'zip' }, onProgress: uploadCallback } @@ -89,7 +90,7 @@ export async function UploadCacheStream( } core.info('Finished uploading cache content to blob storage!') - + hashStream.end() sha256Hash = hashStream.read() as string core.info(`SHA256 hash of uploaded artifact zip is ${sha256Hash}`) @@ -107,11 +108,11 @@ export async function UploadCacheStream( } export async function UploadCacheFile( - uploadURL: GetCacheBlobUploadURLResponse, + uploadURL: CreateCacheEntryResponse, archivePath: string, ): Promise<{}> { core.info(`Uploading ${archivePath} to: ${JSON.stringify(uploadURL)}`) - + // Specify data transfer options const uploadOptions: BlockBlobParallelUploadOptions = { blockSize: 4 * 1024 * 1024, // 4 MiB max block size @@ -119,8 +120,7 @@ export async function UploadCacheFile( maxSingleShotSize: 8 * 1024 * 1024, // 8 MiB initial transfer size }; - // const blobClient: BlobClient = new BlobClient(uploadURL.urls[0]) - const blobClient: BlobClient = new BlobClient(uploadURL.urls[0].url) + const blobClient: BlobClient = new BlobClient(uploadURL.signedUploadUrl) const blockBlobClient: BlockBlobClient = blobClient.getBlockBlobClient() core.info(`BlobClient: ${JSON.stringify(blobClient)}`)