mirror of https://github.com/actions/toolkit
Add twirp client
parent
264230c2c5
commit
c8466d1fac
|
@ -1,9 +1,12 @@
|
||||||
import * as core from '@actions/core'
|
import * as core from '@actions/core'
|
||||||
import * as path from 'path'
|
import * as path from 'path'
|
||||||
import * as utils from './internal/cacheUtils'
|
import * as utils from './internal/cacheUtils'
|
||||||
|
import {CacheUrl} from './internal/constants'
|
||||||
import * as cacheHttpClient from './internal/cacheHttpClient'
|
import * as cacheHttpClient from './internal/cacheHttpClient'
|
||||||
|
import * as cacheTwirpClient from './internal/cacheTwirpClient'
|
||||||
import {createTar, extractTar, listTar} from './internal/tar'
|
import {createTar, extractTar, listTar} from './internal/tar'
|
||||||
import {DownloadOptions, UploadOptions} from './options'
|
import {DownloadOptions, UploadOptions} from './options'
|
||||||
|
import {GetCachedBlobRequest} from './generated/results/api/v1/blobcache'
|
||||||
|
|
||||||
export class ValidationError extends Error {
|
export class ValidationError extends Error {
|
||||||
constructor(message: string) {
|
constructor(message: string) {
|
||||||
|
@ -50,7 +53,7 @@ function checkKey(key: string): void {
|
||||||
*/
|
*/
|
||||||
|
|
||||||
export function isFeatureAvailable(): boolean {
|
export function isFeatureAvailable(): boolean {
|
||||||
return !!process.env['ACTIONS_CACHE_URL']
|
return !!CacheUrl
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -171,6 +174,16 @@ export async function saveCache(
|
||||||
checkPaths(paths)
|
checkPaths(paths)
|
||||||
checkKey(key)
|
checkKey(key)
|
||||||
|
|
||||||
|
// TODO: REMOVE ME
|
||||||
|
// Making a call to the service
|
||||||
|
const twirpClient = cacheTwirpClient.internalBlobCacheTwirpClient()
|
||||||
|
const getBlobRequest: GetCachedBlobRequest = {
|
||||||
|
owner: "link-/test",
|
||||||
|
keys: ['test-123412631236126'],
|
||||||
|
}
|
||||||
|
const getBlobResponse = await twirpClient.GetCachedBlob(getBlobRequest)
|
||||||
|
core.info(`GetCachedBlobResponse: ${JSON.stringify(getBlobResponse)}`)
|
||||||
|
|
||||||
const compressionMethod = await utils.getCompressionMethod()
|
const compressionMethod = await utils.getCompressionMethod()
|
||||||
let cacheId = -1
|
let cacheId = -1
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,290 @@
|
||||||
|
// @generated by protobuf-ts 2.9.1 with parameter long_type_string,client_none,generate_dependencies
|
||||||
|
// @generated from protobuf file "google/protobuf/timestamp.proto" (package "google.protobuf", syntax proto3)
|
||||||
|
// tslint:disable
|
||||||
|
//
|
||||||
|
// Protocol Buffers - Google's data interchange format
|
||||||
|
// Copyright 2008 Google Inc. All rights reserved.
|
||||||
|
// https://developers.google.com/protocol-buffers/
|
||||||
|
//
|
||||||
|
// Redistribution and use in source and binary forms, with or without
|
||||||
|
// modification, are permitted provided that the following conditions are
|
||||||
|
// met:
|
||||||
|
//
|
||||||
|
// * Redistributions of source code must retain the above copyright
|
||||||
|
// notice, this list of conditions and the following disclaimer.
|
||||||
|
// * Redistributions in binary form must reproduce the above
|
||||||
|
// copyright notice, this list of conditions and the following disclaimer
|
||||||
|
// in the documentation and/or other materials provided with the
|
||||||
|
// distribution.
|
||||||
|
// * Neither the name of Google Inc. nor the names of its
|
||||||
|
// contributors may be used to endorse or promote products derived from
|
||||||
|
// this software without specific prior written permission.
|
||||||
|
//
|
||||||
|
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||||
|
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||||
|
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||||
|
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||||
|
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||||
|
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||||
|
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||||
|
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||||
|
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||||
|
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
//
|
||||||
|
import type { BinaryWriteOptions } from "@protobuf-ts/runtime";
|
||||||
|
import type { IBinaryWriter } from "@protobuf-ts/runtime";
|
||||||
|
import { WireType } from "@protobuf-ts/runtime";
|
||||||
|
import type { BinaryReadOptions } from "@protobuf-ts/runtime";
|
||||||
|
import type { IBinaryReader } from "@protobuf-ts/runtime";
|
||||||
|
import { UnknownFieldHandler } from "@protobuf-ts/runtime";
|
||||||
|
import type { PartialMessage } from "@protobuf-ts/runtime";
|
||||||
|
import { reflectionMergePartial } from "@protobuf-ts/runtime";
|
||||||
|
import { MESSAGE_TYPE } from "@protobuf-ts/runtime";
|
||||||
|
import { typeofJsonValue } from "@protobuf-ts/runtime";
|
||||||
|
import type { JsonValue } from "@protobuf-ts/runtime";
|
||||||
|
import type { JsonReadOptions } from "@protobuf-ts/runtime";
|
||||||
|
import type { JsonWriteOptions } from "@protobuf-ts/runtime";
|
||||||
|
import { PbLong } from "@protobuf-ts/runtime";
|
||||||
|
import { MessageType } from "@protobuf-ts/runtime";
|
||||||
|
/**
|
||||||
|
* A Timestamp represents a point in time independent of any time zone or local
|
||||||
|
* calendar, encoded as a count of seconds and fractions of seconds at
|
||||||
|
* nanosecond resolution. The count is relative to an epoch at UTC midnight on
|
||||||
|
* January 1, 1970, in the proleptic Gregorian calendar which extends the
|
||||||
|
* Gregorian calendar backwards to year one.
|
||||||
|
*
|
||||||
|
* All minutes are 60 seconds long. Leap seconds are "smeared" so that no leap
|
||||||
|
* second table is needed for interpretation, using a [24-hour linear
|
||||||
|
* smear](https://developers.google.com/time/smear).
|
||||||
|
*
|
||||||
|
* The range is from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59.999999999Z. By
|
||||||
|
* restricting to that range, we ensure that we can convert to and from [RFC
|
||||||
|
* 3339](https://www.ietf.org/rfc/rfc3339.txt) date strings.
|
||||||
|
*
|
||||||
|
* # Examples
|
||||||
|
*
|
||||||
|
* Example 1: Compute Timestamp from POSIX `time()`.
|
||||||
|
*
|
||||||
|
* Timestamp timestamp;
|
||||||
|
* timestamp.set_seconds(time(NULL));
|
||||||
|
* timestamp.set_nanos(0);
|
||||||
|
*
|
||||||
|
* Example 2: Compute Timestamp from POSIX `gettimeofday()`.
|
||||||
|
*
|
||||||
|
* struct timeval tv;
|
||||||
|
* gettimeofday(&tv, NULL);
|
||||||
|
*
|
||||||
|
* Timestamp timestamp;
|
||||||
|
* timestamp.set_seconds(tv.tv_sec);
|
||||||
|
* timestamp.set_nanos(tv.tv_usec * 1000);
|
||||||
|
*
|
||||||
|
* Example 3: Compute Timestamp from Win32 `GetSystemTimeAsFileTime()`.
|
||||||
|
*
|
||||||
|
* FILETIME ft;
|
||||||
|
* GetSystemTimeAsFileTime(&ft);
|
||||||
|
* UINT64 ticks = (((UINT64)ft.dwHighDateTime) << 32) | ft.dwLowDateTime;
|
||||||
|
*
|
||||||
|
* // A Windows tick is 100 nanoseconds. Windows epoch 1601-01-01T00:00:00Z
|
||||||
|
* // is 11644473600 seconds before Unix epoch 1970-01-01T00:00:00Z.
|
||||||
|
* Timestamp timestamp;
|
||||||
|
* timestamp.set_seconds((INT64) ((ticks / 10000000) - 11644473600LL));
|
||||||
|
* timestamp.set_nanos((INT32) ((ticks % 10000000) * 100));
|
||||||
|
*
|
||||||
|
* Example 4: Compute Timestamp from Java `System.currentTimeMillis()`.
|
||||||
|
*
|
||||||
|
* long millis = System.currentTimeMillis();
|
||||||
|
*
|
||||||
|
* Timestamp timestamp = Timestamp.newBuilder().setSeconds(millis / 1000)
|
||||||
|
* .setNanos((int) ((millis % 1000) * 1000000)).build();
|
||||||
|
*
|
||||||
|
*
|
||||||
|
* Example 5: Compute Timestamp from Java `Instant.now()`.
|
||||||
|
*
|
||||||
|
* Instant now = Instant.now();
|
||||||
|
*
|
||||||
|
* Timestamp timestamp =
|
||||||
|
* Timestamp.newBuilder().setSeconds(now.getEpochSecond())
|
||||||
|
* .setNanos(now.getNano()).build();
|
||||||
|
*
|
||||||
|
*
|
||||||
|
* Example 6: Compute Timestamp from current time in Python.
|
||||||
|
*
|
||||||
|
* timestamp = Timestamp()
|
||||||
|
* timestamp.GetCurrentTime()
|
||||||
|
*
|
||||||
|
* # JSON Mapping
|
||||||
|
*
|
||||||
|
* In JSON format, the Timestamp type is encoded as a string in the
|
||||||
|
* [RFC 3339](https://www.ietf.org/rfc/rfc3339.txt) format. That is, the
|
||||||
|
* format is "{year}-{month}-{day}T{hour}:{min}:{sec}[.{frac_sec}]Z"
|
||||||
|
* where {year} is always expressed using four digits while {month}, {day},
|
||||||
|
* {hour}, {min}, and {sec} are zero-padded to two digits each. The fractional
|
||||||
|
* seconds, which can go up to 9 digits (i.e. up to 1 nanosecond resolution),
|
||||||
|
* are optional. The "Z" suffix indicates the timezone ("UTC"); the timezone
|
||||||
|
* is required. A proto3 JSON serializer should always use UTC (as indicated by
|
||||||
|
* "Z") when printing the Timestamp type and a proto3 JSON parser should be
|
||||||
|
* able to accept both UTC and other timezones (as indicated by an offset).
|
||||||
|
*
|
||||||
|
* For example, "2017-01-15T01:30:15.01Z" encodes 15.01 seconds past
|
||||||
|
* 01:30 UTC on January 15, 2017.
|
||||||
|
*
|
||||||
|
* In JavaScript, one can convert a Date object to this format using the
|
||||||
|
* standard
|
||||||
|
* [toISOString()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toISOString)
|
||||||
|
* method. In Python, a standard `datetime.datetime` object can be converted
|
||||||
|
* to this format using
|
||||||
|
* [`strftime`](https://docs.python.org/2/library/time.html#time.strftime) with
|
||||||
|
* the time format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one can use
|
||||||
|
* the Joda Time's [`ISODateTimeFormat.dateTime()`](
|
||||||
|
* http://www.joda.org/joda-time/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime%2D%2D
|
||||||
|
* ) to obtain a formatter capable of generating timestamps in this format.
|
||||||
|
*
|
||||||
|
*
|
||||||
|
*
|
||||||
|
* @generated from protobuf message google.protobuf.Timestamp
|
||||||
|
*/
|
||||||
|
export interface Timestamp {
|
||||||
|
/**
|
||||||
|
* Represents seconds of UTC time since Unix epoch
|
||||||
|
* 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to
|
||||||
|
* 9999-12-31T23:59:59Z inclusive.
|
||||||
|
*
|
||||||
|
* @generated from protobuf field: int64 seconds = 1;
|
||||||
|
*/
|
||||||
|
seconds: string;
|
||||||
|
/**
|
||||||
|
* Non-negative fractions of a second at nanosecond resolution. Negative
|
||||||
|
* second values with fractions must still have non-negative nanos values
|
||||||
|
* that count forward in time. Must be from 0 to 999,999,999
|
||||||
|
* inclusive.
|
||||||
|
*
|
||||||
|
* @generated from protobuf field: int32 nanos = 2;
|
||||||
|
*/
|
||||||
|
nanos: number;
|
||||||
|
}
|
||||||
|
// @generated message type with reflection information, may provide speed optimized methods
|
||||||
|
class Timestamp$Type extends MessageType<Timestamp> {
|
||||||
|
constructor() {
|
||||||
|
super("google.protobuf.Timestamp", [
|
||||||
|
{ no: 1, name: "seconds", kind: "scalar", T: 3 /*ScalarType.INT64*/ },
|
||||||
|
{ no: 2, name: "nanos", kind: "scalar", T: 5 /*ScalarType.INT32*/ }
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Creates a new `Timestamp` for the current time.
|
||||||
|
*/
|
||||||
|
now(): Timestamp {
|
||||||
|
const msg = this.create();
|
||||||
|
const ms = Date.now();
|
||||||
|
msg.seconds = PbLong.from(Math.floor(ms / 1000)).toString();
|
||||||
|
msg.nanos = (ms % 1000) * 1000000;
|
||||||
|
return msg;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Converts a `Timestamp` to a JavaScript Date.
|
||||||
|
*/
|
||||||
|
toDate(message: Timestamp): Date {
|
||||||
|
return new Date(PbLong.from(message.seconds).toNumber() * 1000 + Math.ceil(message.nanos / 1000000));
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Converts a JavaScript Date to a `Timestamp`.
|
||||||
|
*/
|
||||||
|
fromDate(date: Date): Timestamp {
|
||||||
|
const msg = this.create();
|
||||||
|
const ms = date.getTime();
|
||||||
|
msg.seconds = PbLong.from(Math.floor(ms / 1000)).toString();
|
||||||
|
msg.nanos = (ms % 1000) * 1000000;
|
||||||
|
return msg;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* In JSON format, the `Timestamp` type is encoded as a string
|
||||||
|
* in the RFC 3339 format.
|
||||||
|
*/
|
||||||
|
internalJsonWrite(message: Timestamp, options: JsonWriteOptions): JsonValue {
|
||||||
|
let ms = PbLong.from(message.seconds).toNumber() * 1000;
|
||||||
|
if (ms < Date.parse("0001-01-01T00:00:00Z") || ms > Date.parse("9999-12-31T23:59:59Z"))
|
||||||
|
throw new Error("Unable to encode Timestamp to JSON. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive.");
|
||||||
|
if (message.nanos < 0)
|
||||||
|
throw new Error("Unable to encode invalid Timestamp to JSON. Nanos must not be negative.");
|
||||||
|
let z = "Z";
|
||||||
|
if (message.nanos > 0) {
|
||||||
|
let nanosStr = (message.nanos + 1000000000).toString().substring(1);
|
||||||
|
if (nanosStr.substring(3) === "000000")
|
||||||
|
z = "." + nanosStr.substring(0, 3) + "Z";
|
||||||
|
else if (nanosStr.substring(6) === "000")
|
||||||
|
z = "." + nanosStr.substring(0, 6) + "Z";
|
||||||
|
else
|
||||||
|
z = "." + nanosStr + "Z";
|
||||||
|
}
|
||||||
|
return new Date(ms).toISOString().replace(".000Z", z);
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* In JSON format, the `Timestamp` type is encoded as a string
|
||||||
|
* in the RFC 3339 format.
|
||||||
|
*/
|
||||||
|
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: Timestamp): Timestamp {
|
||||||
|
if (typeof json !== "string")
|
||||||
|
throw new Error("Unable to parse Timestamp from JSON " + typeofJsonValue(json) + ".");
|
||||||
|
let matches = json.match(/^([0-9]{4})-([0-9]{2})-([0-9]{2})T([0-9]{2}):([0-9]{2}):([0-9]{2})(?:Z|\.([0-9]{3,9})Z|([+-][0-9][0-9]:[0-9][0-9]))$/);
|
||||||
|
if (!matches)
|
||||||
|
throw new Error("Unable to parse Timestamp from JSON. Invalid format.");
|
||||||
|
let ms = Date.parse(matches[1] + "-" + matches[2] + "-" + matches[3] + "T" + matches[4] + ":" + matches[5] + ":" + matches[6] + (matches[8] ? matches[8] : "Z"));
|
||||||
|
if (Number.isNaN(ms))
|
||||||
|
throw new Error("Unable to parse Timestamp from JSON. Invalid value.");
|
||||||
|
if (ms < Date.parse("0001-01-01T00:00:00Z") || ms > Date.parse("9999-12-31T23:59:59Z"))
|
||||||
|
throw new globalThis.Error("Unable to parse Timestamp from JSON. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive.");
|
||||||
|
if (!target)
|
||||||
|
target = this.create();
|
||||||
|
target.seconds = PbLong.from(ms / 1000).toString();
|
||||||
|
target.nanos = 0;
|
||||||
|
if (matches[7])
|
||||||
|
target.nanos = (parseInt("1" + matches[7] + "0".repeat(9 - matches[7].length)) - 1000000000);
|
||||||
|
return target;
|
||||||
|
}
|
||||||
|
create(value?: PartialMessage<Timestamp>): Timestamp {
|
||||||
|
const message = { seconds: "0", nanos: 0 };
|
||||||
|
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
|
||||||
|
if (value !== undefined)
|
||||||
|
reflectionMergePartial<Timestamp>(this, message, value);
|
||||||
|
return message;
|
||||||
|
}
|
||||||
|
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: Timestamp): Timestamp {
|
||||||
|
let message = target ?? this.create(), end = reader.pos + length;
|
||||||
|
while (reader.pos < end) {
|
||||||
|
let [fieldNo, wireType] = reader.tag();
|
||||||
|
switch (fieldNo) {
|
||||||
|
case /* int64 seconds */ 1:
|
||||||
|
message.seconds = reader.int64().toString();
|
||||||
|
break;
|
||||||
|
case /* int32 nanos */ 2:
|
||||||
|
message.nanos = reader.int32();
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
let u = options.readUnknownField;
|
||||||
|
if (u === "throw")
|
||||||
|
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||||
|
let d = reader.skip(wireType);
|
||||||
|
if (u !== false)
|
||||||
|
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return message;
|
||||||
|
}
|
||||||
|
internalBinaryWrite(message: Timestamp, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||||
|
/* int64 seconds = 1; */
|
||||||
|
if (message.seconds !== "0")
|
||||||
|
writer.tag(1, WireType.Varint).int64(message.seconds);
|
||||||
|
/* int32 nanos = 2; */
|
||||||
|
if (message.nanos !== 0)
|
||||||
|
writer.tag(2, WireType.Varint).int32(message.nanos);
|
||||||
|
let u = options.writeUnknownFields;
|
||||||
|
if (u !== false)
|
||||||
|
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||||
|
return writer;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* @generated MessageType for protobuf message google.protobuf.Timestamp
|
||||||
|
*/
|
||||||
|
export const Timestamp = new Timestamp$Type();
|
|
@ -0,0 +1,753 @@
|
||||||
|
// @generated by protobuf-ts 2.9.1 with parameter long_type_string,client_none,generate_dependencies
|
||||||
|
// @generated from protobuf file "google/protobuf/wrappers.proto" (package "google.protobuf", syntax proto3)
|
||||||
|
// tslint:disable
|
||||||
|
//
|
||||||
|
// Protocol Buffers - Google's data interchange format
|
||||||
|
// Copyright 2008 Google Inc. All rights reserved.
|
||||||
|
// https://developers.google.com/protocol-buffers/
|
||||||
|
//
|
||||||
|
// Redistribution and use in source and binary forms, with or without
|
||||||
|
// modification, are permitted provided that the following conditions are
|
||||||
|
// met:
|
||||||
|
//
|
||||||
|
// * Redistributions of source code must retain the above copyright
|
||||||
|
// notice, this list of conditions and the following disclaimer.
|
||||||
|
// * Redistributions in binary form must reproduce the above
|
||||||
|
// copyright notice, this list of conditions and the following disclaimer
|
||||||
|
// in the documentation and/or other materials provided with the
|
||||||
|
// distribution.
|
||||||
|
// * Neither the name of Google Inc. nor the names of its
|
||||||
|
// contributors may be used to endorse or promote products derived from
|
||||||
|
// this software without specific prior written permission.
|
||||||
|
//
|
||||||
|
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||||
|
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||||
|
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||||
|
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||||
|
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||||
|
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||||
|
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||||
|
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||||
|
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||||
|
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
//
|
||||||
|
//
|
||||||
|
// Wrappers for primitive (non-message) types. These types are useful
|
||||||
|
// for embedding primitives in the `google.protobuf.Any` type and for places
|
||||||
|
// where we need to distinguish between the absence of a primitive
|
||||||
|
// typed field and its default value.
|
||||||
|
//
|
||||||
|
// These wrappers have no meaningful use within repeated fields as they lack
|
||||||
|
// the ability to detect presence on individual elements.
|
||||||
|
// These wrappers have no meaningful use within a map or a oneof since
|
||||||
|
// individual entries of a map or fields of a oneof can already detect presence.
|
||||||
|
//
|
||||||
|
import { ScalarType } from "@protobuf-ts/runtime";
|
||||||
|
import { LongType } from "@protobuf-ts/runtime";
|
||||||
|
import type { BinaryWriteOptions } from "@protobuf-ts/runtime";
|
||||||
|
import type { IBinaryWriter } from "@protobuf-ts/runtime";
|
||||||
|
import { WireType } from "@protobuf-ts/runtime";
|
||||||
|
import type { BinaryReadOptions } from "@protobuf-ts/runtime";
|
||||||
|
import type { IBinaryReader } from "@protobuf-ts/runtime";
|
||||||
|
import { UnknownFieldHandler } from "@protobuf-ts/runtime";
|
||||||
|
import type { PartialMessage } from "@protobuf-ts/runtime";
|
||||||
|
import { reflectionMergePartial } from "@protobuf-ts/runtime";
|
||||||
|
import { MESSAGE_TYPE } from "@protobuf-ts/runtime";
|
||||||
|
import type { JsonValue } from "@protobuf-ts/runtime";
|
||||||
|
import type { JsonReadOptions } from "@protobuf-ts/runtime";
|
||||||
|
import type { JsonWriteOptions } from "@protobuf-ts/runtime";
|
||||||
|
import { MessageType } from "@protobuf-ts/runtime";
|
||||||
|
/**
|
||||||
|
* Wrapper message for `double`.
|
||||||
|
*
|
||||||
|
* The JSON representation for `DoubleValue` is JSON number.
|
||||||
|
*
|
||||||
|
* @generated from protobuf message google.protobuf.DoubleValue
|
||||||
|
*/
|
||||||
|
export interface DoubleValue {
|
||||||
|
/**
|
||||||
|
* The double value.
|
||||||
|
*
|
||||||
|
* @generated from protobuf field: double value = 1;
|
||||||
|
*/
|
||||||
|
value: number;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Wrapper message for `float`.
|
||||||
|
*
|
||||||
|
* The JSON representation for `FloatValue` is JSON number.
|
||||||
|
*
|
||||||
|
* @generated from protobuf message google.protobuf.FloatValue
|
||||||
|
*/
|
||||||
|
export interface FloatValue {
|
||||||
|
/**
|
||||||
|
* The float value.
|
||||||
|
*
|
||||||
|
* @generated from protobuf field: float value = 1;
|
||||||
|
*/
|
||||||
|
value: number;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Wrapper message for `int64`.
|
||||||
|
*
|
||||||
|
* The JSON representation for `Int64Value` is JSON string.
|
||||||
|
*
|
||||||
|
* @generated from protobuf message google.protobuf.Int64Value
|
||||||
|
*/
|
||||||
|
export interface Int64Value {
|
||||||
|
/**
|
||||||
|
* The int64 value.
|
||||||
|
*
|
||||||
|
* @generated from protobuf field: int64 value = 1;
|
||||||
|
*/
|
||||||
|
value: string;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Wrapper message for `uint64`.
|
||||||
|
*
|
||||||
|
* The JSON representation for `UInt64Value` is JSON string.
|
||||||
|
*
|
||||||
|
* @generated from protobuf message google.protobuf.UInt64Value
|
||||||
|
*/
|
||||||
|
export interface UInt64Value {
|
||||||
|
/**
|
||||||
|
* The uint64 value.
|
||||||
|
*
|
||||||
|
* @generated from protobuf field: uint64 value = 1;
|
||||||
|
*/
|
||||||
|
value: string;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Wrapper message for `int32`.
|
||||||
|
*
|
||||||
|
* The JSON representation for `Int32Value` is JSON number.
|
||||||
|
*
|
||||||
|
* @generated from protobuf message google.protobuf.Int32Value
|
||||||
|
*/
|
||||||
|
export interface Int32Value {
|
||||||
|
/**
|
||||||
|
* The int32 value.
|
||||||
|
*
|
||||||
|
* @generated from protobuf field: int32 value = 1;
|
||||||
|
*/
|
||||||
|
value: number;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Wrapper message for `uint32`.
|
||||||
|
*
|
||||||
|
* The JSON representation for `UInt32Value` is JSON number.
|
||||||
|
*
|
||||||
|
* @generated from protobuf message google.protobuf.UInt32Value
|
||||||
|
*/
|
||||||
|
export interface UInt32Value {
|
||||||
|
/**
|
||||||
|
* The uint32 value.
|
||||||
|
*
|
||||||
|
* @generated from protobuf field: uint32 value = 1;
|
||||||
|
*/
|
||||||
|
value: number;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Wrapper message for `bool`.
|
||||||
|
*
|
||||||
|
* The JSON representation for `BoolValue` is JSON `true` and `false`.
|
||||||
|
*
|
||||||
|
* @generated from protobuf message google.protobuf.BoolValue
|
||||||
|
*/
|
||||||
|
export interface BoolValue {
|
||||||
|
/**
|
||||||
|
* The bool value.
|
||||||
|
*
|
||||||
|
* @generated from protobuf field: bool value = 1;
|
||||||
|
*/
|
||||||
|
value: boolean;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Wrapper message for `string`.
|
||||||
|
*
|
||||||
|
* The JSON representation for `StringValue` is JSON string.
|
||||||
|
*
|
||||||
|
* @generated from protobuf message google.protobuf.StringValue
|
||||||
|
*/
|
||||||
|
export interface StringValue {
|
||||||
|
/**
|
||||||
|
* The string value.
|
||||||
|
*
|
||||||
|
* @generated from protobuf field: string value = 1;
|
||||||
|
*/
|
||||||
|
value: string;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Wrapper message for `bytes`.
|
||||||
|
*
|
||||||
|
* The JSON representation for `BytesValue` is JSON string.
|
||||||
|
*
|
||||||
|
* @generated from protobuf message google.protobuf.BytesValue
|
||||||
|
*/
|
||||||
|
export interface BytesValue {
|
||||||
|
/**
|
||||||
|
* The bytes value.
|
||||||
|
*
|
||||||
|
* @generated from protobuf field: bytes value = 1;
|
||||||
|
*/
|
||||||
|
value: Uint8Array;
|
||||||
|
}
|
||||||
|
// @generated message type with reflection information, may provide speed optimized methods
|
||||||
|
class DoubleValue$Type extends MessageType<DoubleValue> {
|
||||||
|
constructor() {
|
||||||
|
super("google.protobuf.DoubleValue", [
|
||||||
|
{ no: 1, name: "value", kind: "scalar", T: 1 /*ScalarType.DOUBLE*/ }
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Encode `DoubleValue` to JSON number.
|
||||||
|
*/
|
||||||
|
internalJsonWrite(message: DoubleValue, options: JsonWriteOptions): JsonValue {
|
||||||
|
return this.refJsonWriter.scalar(2, message.value, "value", false, true);
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Decode `DoubleValue` from JSON number.
|
||||||
|
*/
|
||||||
|
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: DoubleValue): DoubleValue {
|
||||||
|
if (!target)
|
||||||
|
target = this.create();
|
||||||
|
target.value = this.refJsonReader.scalar(json, 1, undefined, "value") as number;
|
||||||
|
return target;
|
||||||
|
}
|
||||||
|
create(value?: PartialMessage<DoubleValue>): DoubleValue {
|
||||||
|
const message = { value: 0 };
|
||||||
|
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
|
||||||
|
if (value !== undefined)
|
||||||
|
reflectionMergePartial<DoubleValue>(this, message, value);
|
||||||
|
return message;
|
||||||
|
}
|
||||||
|
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: DoubleValue): DoubleValue {
|
||||||
|
let message = target ?? this.create(), end = reader.pos + length;
|
||||||
|
while (reader.pos < end) {
|
||||||
|
let [fieldNo, wireType] = reader.tag();
|
||||||
|
switch (fieldNo) {
|
||||||
|
case /* double value */ 1:
|
||||||
|
message.value = reader.double();
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
let u = options.readUnknownField;
|
||||||
|
if (u === "throw")
|
||||||
|
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||||
|
let d = reader.skip(wireType);
|
||||||
|
if (u !== false)
|
||||||
|
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return message;
|
||||||
|
}
|
||||||
|
internalBinaryWrite(message: DoubleValue, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||||
|
/* double value = 1; */
|
||||||
|
if (message.value !== 0)
|
||||||
|
writer.tag(1, WireType.Bit64).double(message.value);
|
||||||
|
let u = options.writeUnknownFields;
|
||||||
|
if (u !== false)
|
||||||
|
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||||
|
return writer;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* @generated MessageType for protobuf message google.protobuf.DoubleValue
|
||||||
|
*/
|
||||||
|
export const DoubleValue = new DoubleValue$Type();
|
||||||
|
// @generated message type with reflection information, may provide speed optimized methods
|
||||||
|
class FloatValue$Type extends MessageType<FloatValue> {
|
||||||
|
constructor() {
|
||||||
|
super("google.protobuf.FloatValue", [
|
||||||
|
{ no: 1, name: "value", kind: "scalar", T: 2 /*ScalarType.FLOAT*/ }
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Encode `FloatValue` to JSON number.
|
||||||
|
*/
|
||||||
|
internalJsonWrite(message: FloatValue, options: JsonWriteOptions): JsonValue {
|
||||||
|
return this.refJsonWriter.scalar(1, message.value, "value", false, true);
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Decode `FloatValue` from JSON number.
|
||||||
|
*/
|
||||||
|
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: FloatValue): FloatValue {
|
||||||
|
if (!target)
|
||||||
|
target = this.create();
|
||||||
|
target.value = this.refJsonReader.scalar(json, 1, undefined, "value") as number;
|
||||||
|
return target;
|
||||||
|
}
|
||||||
|
create(value?: PartialMessage<FloatValue>): FloatValue {
|
||||||
|
const message = { value: 0 };
|
||||||
|
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
|
||||||
|
if (value !== undefined)
|
||||||
|
reflectionMergePartial<FloatValue>(this, message, value);
|
||||||
|
return message;
|
||||||
|
}
|
||||||
|
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: FloatValue): FloatValue {
|
||||||
|
let message = target ?? this.create(), end = reader.pos + length;
|
||||||
|
while (reader.pos < end) {
|
||||||
|
let [fieldNo, wireType] = reader.tag();
|
||||||
|
switch (fieldNo) {
|
||||||
|
case /* float value */ 1:
|
||||||
|
message.value = reader.float();
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
let u = options.readUnknownField;
|
||||||
|
if (u === "throw")
|
||||||
|
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||||
|
let d = reader.skip(wireType);
|
||||||
|
if (u !== false)
|
||||||
|
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return message;
|
||||||
|
}
|
||||||
|
internalBinaryWrite(message: FloatValue, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||||
|
/* float value = 1; */
|
||||||
|
if (message.value !== 0)
|
||||||
|
writer.tag(1, WireType.Bit32).float(message.value);
|
||||||
|
let u = options.writeUnknownFields;
|
||||||
|
if (u !== false)
|
||||||
|
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||||
|
return writer;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* @generated MessageType for protobuf message google.protobuf.FloatValue
|
||||||
|
*/
|
||||||
|
export const FloatValue = new FloatValue$Type();
|
||||||
|
// @generated message type with reflection information, may provide speed optimized methods
|
||||||
|
class Int64Value$Type extends MessageType<Int64Value> {
|
||||||
|
constructor() {
|
||||||
|
super("google.protobuf.Int64Value", [
|
||||||
|
{ no: 1, name: "value", kind: "scalar", T: 3 /*ScalarType.INT64*/ }
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Encode `Int64Value` to JSON string.
|
||||||
|
*/
|
||||||
|
internalJsonWrite(message: Int64Value, options: JsonWriteOptions): JsonValue {
|
||||||
|
return this.refJsonWriter.scalar(ScalarType.INT64, message.value, "value", false, true);
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Decode `Int64Value` from JSON string.
|
||||||
|
*/
|
||||||
|
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: Int64Value): Int64Value {
|
||||||
|
if (!target)
|
||||||
|
target = this.create();
|
||||||
|
target.value = this.refJsonReader.scalar(json, ScalarType.INT64, LongType.STRING, "value") as any;
|
||||||
|
return target;
|
||||||
|
}
|
||||||
|
create(value?: PartialMessage<Int64Value>): Int64Value {
|
||||||
|
const message = { value: "0" };
|
||||||
|
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
|
||||||
|
if (value !== undefined)
|
||||||
|
reflectionMergePartial<Int64Value>(this, message, value);
|
||||||
|
return message;
|
||||||
|
}
|
||||||
|
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: Int64Value): Int64Value {
|
||||||
|
let message = target ?? this.create(), end = reader.pos + length;
|
||||||
|
while (reader.pos < end) {
|
||||||
|
let [fieldNo, wireType] = reader.tag();
|
||||||
|
switch (fieldNo) {
|
||||||
|
case /* int64 value */ 1:
|
||||||
|
message.value = reader.int64().toString();
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
let u = options.readUnknownField;
|
||||||
|
if (u === "throw")
|
||||||
|
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||||
|
let d = reader.skip(wireType);
|
||||||
|
if (u !== false)
|
||||||
|
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return message;
|
||||||
|
}
|
||||||
|
internalBinaryWrite(message: Int64Value, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||||
|
/* int64 value = 1; */
|
||||||
|
if (message.value !== "0")
|
||||||
|
writer.tag(1, WireType.Varint).int64(message.value);
|
||||||
|
let u = options.writeUnknownFields;
|
||||||
|
if (u !== false)
|
||||||
|
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||||
|
return writer;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* @generated MessageType for protobuf message google.protobuf.Int64Value
|
||||||
|
*/
|
||||||
|
export const Int64Value = new Int64Value$Type();
|
||||||
|
// @generated message type with reflection information, may provide speed optimized methods
|
||||||
|
class UInt64Value$Type extends MessageType<UInt64Value> {
|
||||||
|
constructor() {
|
||||||
|
super("google.protobuf.UInt64Value", [
|
||||||
|
{ no: 1, name: "value", kind: "scalar", T: 4 /*ScalarType.UINT64*/ }
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Encode `UInt64Value` to JSON string.
|
||||||
|
*/
|
||||||
|
internalJsonWrite(message: UInt64Value, options: JsonWriteOptions): JsonValue {
|
||||||
|
return this.refJsonWriter.scalar(ScalarType.UINT64, message.value, "value", false, true);
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Decode `UInt64Value` from JSON string.
|
||||||
|
*/
|
||||||
|
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: UInt64Value): UInt64Value {
|
||||||
|
if (!target)
|
||||||
|
target = this.create();
|
||||||
|
target.value = this.refJsonReader.scalar(json, ScalarType.UINT64, LongType.STRING, "value") as any;
|
||||||
|
return target;
|
||||||
|
}
|
||||||
|
create(value?: PartialMessage<UInt64Value>): UInt64Value {
|
||||||
|
const message = { value: "0" };
|
||||||
|
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
|
||||||
|
if (value !== undefined)
|
||||||
|
reflectionMergePartial<UInt64Value>(this, message, value);
|
||||||
|
return message;
|
||||||
|
}
|
||||||
|
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: UInt64Value): UInt64Value {
|
||||||
|
let message = target ?? this.create(), end = reader.pos + length;
|
||||||
|
while (reader.pos < end) {
|
||||||
|
let [fieldNo, wireType] = reader.tag();
|
||||||
|
switch (fieldNo) {
|
||||||
|
case /* uint64 value */ 1:
|
||||||
|
message.value = reader.uint64().toString();
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
let u = options.readUnknownField;
|
||||||
|
if (u === "throw")
|
||||||
|
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||||
|
let d = reader.skip(wireType);
|
||||||
|
if (u !== false)
|
||||||
|
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return message;
|
||||||
|
}
|
||||||
|
internalBinaryWrite(message: UInt64Value, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||||
|
/* uint64 value = 1; */
|
||||||
|
if (message.value !== "0")
|
||||||
|
writer.tag(1, WireType.Varint).uint64(message.value);
|
||||||
|
let u = options.writeUnknownFields;
|
||||||
|
if (u !== false)
|
||||||
|
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||||
|
return writer;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* @generated MessageType for protobuf message google.protobuf.UInt64Value
|
||||||
|
*/
|
||||||
|
export const UInt64Value = new UInt64Value$Type();
|
||||||
|
// @generated message type with reflection information, may provide speed optimized methods
|
||||||
|
class Int32Value$Type extends MessageType<Int32Value> {
|
||||||
|
constructor() {
|
||||||
|
super("google.protobuf.Int32Value", [
|
||||||
|
{ no: 1, name: "value", kind: "scalar", T: 5 /*ScalarType.INT32*/ }
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Encode `Int32Value` to JSON string.
|
||||||
|
*/
|
||||||
|
internalJsonWrite(message: Int32Value, options: JsonWriteOptions): JsonValue {
|
||||||
|
return this.refJsonWriter.scalar(5, message.value, "value", false, true);
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Decode `Int32Value` from JSON string.
|
||||||
|
*/
|
||||||
|
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: Int32Value): Int32Value {
|
||||||
|
if (!target)
|
||||||
|
target = this.create();
|
||||||
|
target.value = this.refJsonReader.scalar(json, 5, undefined, "value") as number;
|
||||||
|
return target;
|
||||||
|
}
|
||||||
|
create(value?: PartialMessage<Int32Value>): Int32Value {
|
||||||
|
const message = { value: 0 };
|
||||||
|
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
|
||||||
|
if (value !== undefined)
|
||||||
|
reflectionMergePartial<Int32Value>(this, message, value);
|
||||||
|
return message;
|
||||||
|
}
|
||||||
|
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: Int32Value): Int32Value {
|
||||||
|
let message = target ?? this.create(), end = reader.pos + length;
|
||||||
|
while (reader.pos < end) {
|
||||||
|
let [fieldNo, wireType] = reader.tag();
|
||||||
|
switch (fieldNo) {
|
||||||
|
case /* int32 value */ 1:
|
||||||
|
message.value = reader.int32();
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
let u = options.readUnknownField;
|
||||||
|
if (u === "throw")
|
||||||
|
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||||
|
let d = reader.skip(wireType);
|
||||||
|
if (u !== false)
|
||||||
|
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return message;
|
||||||
|
}
|
||||||
|
internalBinaryWrite(message: Int32Value, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||||
|
/* int32 value = 1; */
|
||||||
|
if (message.value !== 0)
|
||||||
|
writer.tag(1, WireType.Varint).int32(message.value);
|
||||||
|
let u = options.writeUnknownFields;
|
||||||
|
if (u !== false)
|
||||||
|
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||||
|
return writer;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* @generated MessageType for protobuf message google.protobuf.Int32Value
|
||||||
|
*/
|
||||||
|
export const Int32Value = new Int32Value$Type();
|
||||||
|
// @generated message type with reflection information, may provide speed optimized methods
|
||||||
|
class UInt32Value$Type extends MessageType<UInt32Value> {
|
||||||
|
constructor() {
|
||||||
|
super("google.protobuf.UInt32Value", [
|
||||||
|
{ no: 1, name: "value", kind: "scalar", T: 13 /*ScalarType.UINT32*/ }
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Encode `UInt32Value` to JSON string.
|
||||||
|
*/
|
||||||
|
internalJsonWrite(message: UInt32Value, options: JsonWriteOptions): JsonValue {
|
||||||
|
return this.refJsonWriter.scalar(13, message.value, "value", false, true);
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Decode `UInt32Value` from JSON string.
|
||||||
|
*/
|
||||||
|
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: UInt32Value): UInt32Value {
|
||||||
|
if (!target)
|
||||||
|
target = this.create();
|
||||||
|
target.value = this.refJsonReader.scalar(json, 13, undefined, "value") as number;
|
||||||
|
return target;
|
||||||
|
}
|
||||||
|
create(value?: PartialMessage<UInt32Value>): UInt32Value {
|
||||||
|
const message = { value: 0 };
|
||||||
|
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
|
||||||
|
if (value !== undefined)
|
||||||
|
reflectionMergePartial<UInt32Value>(this, message, value);
|
||||||
|
return message;
|
||||||
|
}
|
||||||
|
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: UInt32Value): UInt32Value {
|
||||||
|
let message = target ?? this.create(), end = reader.pos + length;
|
||||||
|
while (reader.pos < end) {
|
||||||
|
let [fieldNo, wireType] = reader.tag();
|
||||||
|
switch (fieldNo) {
|
||||||
|
case /* uint32 value */ 1:
|
||||||
|
message.value = reader.uint32();
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
let u = options.readUnknownField;
|
||||||
|
if (u === "throw")
|
||||||
|
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||||
|
let d = reader.skip(wireType);
|
||||||
|
if (u !== false)
|
||||||
|
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return message;
|
||||||
|
}
|
||||||
|
internalBinaryWrite(message: UInt32Value, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||||
|
/* uint32 value = 1; */
|
||||||
|
if (message.value !== 0)
|
||||||
|
writer.tag(1, WireType.Varint).uint32(message.value);
|
||||||
|
let u = options.writeUnknownFields;
|
||||||
|
if (u !== false)
|
||||||
|
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||||
|
return writer;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* @generated MessageType for protobuf message google.protobuf.UInt32Value
|
||||||
|
*/
|
||||||
|
export const UInt32Value = new UInt32Value$Type();
|
||||||
|
// @generated message type with reflection information, may provide speed optimized methods
|
||||||
|
class BoolValue$Type extends MessageType<BoolValue> {
|
||||||
|
constructor() {
|
||||||
|
super("google.protobuf.BoolValue", [
|
||||||
|
{ no: 1, name: "value", kind: "scalar", T: 8 /*ScalarType.BOOL*/ }
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Encode `BoolValue` to JSON bool.
|
||||||
|
*/
|
||||||
|
internalJsonWrite(message: BoolValue, options: JsonWriteOptions): JsonValue {
|
||||||
|
return message.value;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Decode `BoolValue` from JSON bool.
|
||||||
|
*/
|
||||||
|
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: BoolValue): BoolValue {
|
||||||
|
if (!target)
|
||||||
|
target = this.create();
|
||||||
|
target.value = this.refJsonReader.scalar(json, 8, undefined, "value") as boolean;
|
||||||
|
return target;
|
||||||
|
}
|
||||||
|
create(value?: PartialMessage<BoolValue>): BoolValue {
|
||||||
|
const message = { value: false };
|
||||||
|
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
|
||||||
|
if (value !== undefined)
|
||||||
|
reflectionMergePartial<BoolValue>(this, message, value);
|
||||||
|
return message;
|
||||||
|
}
|
||||||
|
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: BoolValue): BoolValue {
|
||||||
|
let message = target ?? this.create(), end = reader.pos + length;
|
||||||
|
while (reader.pos < end) {
|
||||||
|
let [fieldNo, wireType] = reader.tag();
|
||||||
|
switch (fieldNo) {
|
||||||
|
case /* bool value */ 1:
|
||||||
|
message.value = reader.bool();
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
let u = options.readUnknownField;
|
||||||
|
if (u === "throw")
|
||||||
|
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||||
|
let d = reader.skip(wireType);
|
||||||
|
if (u !== false)
|
||||||
|
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return message;
|
||||||
|
}
|
||||||
|
internalBinaryWrite(message: BoolValue, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||||
|
/* bool value = 1; */
|
||||||
|
if (message.value !== false)
|
||||||
|
writer.tag(1, WireType.Varint).bool(message.value);
|
||||||
|
let u = options.writeUnknownFields;
|
||||||
|
if (u !== false)
|
||||||
|
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||||
|
return writer;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* @generated MessageType for protobuf message google.protobuf.BoolValue
|
||||||
|
*/
|
||||||
|
export const BoolValue = new BoolValue$Type();
|
||||||
|
// @generated message type with reflection information, may provide speed optimized methods
|
||||||
|
class StringValue$Type extends MessageType<StringValue> {
|
||||||
|
constructor() {
|
||||||
|
super("google.protobuf.StringValue", [
|
||||||
|
{ no: 1, name: "value", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Encode `StringValue` to JSON string.
|
||||||
|
*/
|
||||||
|
internalJsonWrite(message: StringValue, options: JsonWriteOptions): JsonValue {
|
||||||
|
return message.value;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Decode `StringValue` from JSON string.
|
||||||
|
*/
|
||||||
|
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: StringValue): StringValue {
|
||||||
|
if (!target)
|
||||||
|
target = this.create();
|
||||||
|
target.value = this.refJsonReader.scalar(json, 9, undefined, "value") as string;
|
||||||
|
return target;
|
||||||
|
}
|
||||||
|
create(value?: PartialMessage<StringValue>): StringValue {
|
||||||
|
const message = { value: "" };
|
||||||
|
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
|
||||||
|
if (value !== undefined)
|
||||||
|
reflectionMergePartial<StringValue>(this, message, value);
|
||||||
|
return message;
|
||||||
|
}
|
||||||
|
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: StringValue): StringValue {
|
||||||
|
let message = target ?? this.create(), end = reader.pos + length;
|
||||||
|
while (reader.pos < end) {
|
||||||
|
let [fieldNo, wireType] = reader.tag();
|
||||||
|
switch (fieldNo) {
|
||||||
|
case /* string value */ 1:
|
||||||
|
message.value = reader.string();
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
let u = options.readUnknownField;
|
||||||
|
if (u === "throw")
|
||||||
|
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||||
|
let d = reader.skip(wireType);
|
||||||
|
if (u !== false)
|
||||||
|
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return message;
|
||||||
|
}
|
||||||
|
internalBinaryWrite(message: StringValue, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||||
|
/* string value = 1; */
|
||||||
|
if (message.value !== "")
|
||||||
|
writer.tag(1, WireType.LengthDelimited).string(message.value);
|
||||||
|
let u = options.writeUnknownFields;
|
||||||
|
if (u !== false)
|
||||||
|
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||||
|
return writer;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* @generated MessageType for protobuf message google.protobuf.StringValue
|
||||||
|
*/
|
||||||
|
export const StringValue = new StringValue$Type();
|
||||||
|
// @generated message type with reflection information, may provide speed optimized methods
|
||||||
|
class BytesValue$Type extends MessageType<BytesValue> {
|
||||||
|
constructor() {
|
||||||
|
super("google.protobuf.BytesValue", [
|
||||||
|
{ no: 1, name: "value", kind: "scalar", T: 12 /*ScalarType.BYTES*/ }
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Encode `BytesValue` to JSON string.
|
||||||
|
*/
|
||||||
|
internalJsonWrite(message: BytesValue, options: JsonWriteOptions): JsonValue {
|
||||||
|
return this.refJsonWriter.scalar(12, message.value, "value", false, true);
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Decode `BytesValue` from JSON string.
|
||||||
|
*/
|
||||||
|
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: BytesValue): BytesValue {
|
||||||
|
if (!target)
|
||||||
|
target = this.create();
|
||||||
|
target.value = this.refJsonReader.scalar(json, 12, undefined, "value") as Uint8Array;
|
||||||
|
return target;
|
||||||
|
}
|
||||||
|
create(value?: PartialMessage<BytesValue>): BytesValue {
|
||||||
|
const message = { value: new Uint8Array(0) };
|
||||||
|
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
|
||||||
|
if (value !== undefined)
|
||||||
|
reflectionMergePartial<BytesValue>(this, message, value);
|
||||||
|
return message;
|
||||||
|
}
|
||||||
|
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: BytesValue): BytesValue {
|
||||||
|
let message = target ?? this.create(), end = reader.pos + length;
|
||||||
|
while (reader.pos < end) {
|
||||||
|
let [fieldNo, wireType] = reader.tag();
|
||||||
|
switch (fieldNo) {
|
||||||
|
case /* bytes value */ 1:
|
||||||
|
message.value = reader.bytes();
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
let u = options.readUnknownField;
|
||||||
|
if (u === "throw")
|
||||||
|
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||||
|
let d = reader.skip(wireType);
|
||||||
|
if (u !== false)
|
||||||
|
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return message;
|
||||||
|
}
|
||||||
|
internalBinaryWrite(message: BytesValue, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||||
|
/* bytes value = 1; */
|
||||||
|
if (message.value.length)
|
||||||
|
writer.tag(1, WireType.LengthDelimited).bytes(message.value);
|
||||||
|
let u = options.writeUnknownFields;
|
||||||
|
if (u !== false)
|
||||||
|
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||||
|
return writer;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* @generated MessageType for protobuf message google.protobuf.BytesValue
|
||||||
|
*/
|
||||||
|
export const BytesValue = new BytesValue$Type();
|
|
@ -0,0 +1,474 @@
|
||||||
|
// @generated by protobuf-ts 2.9.1 with parameter long_type_string,client_none,generate_dependencies
|
||||||
|
// @generated from protobuf file "results/api/v1/blobcache.proto" (package "github.actions.results.api.v1", syntax proto3)
|
||||||
|
// tslint:disable
|
||||||
|
import { ServiceType } from "@protobuf-ts/runtime-rpc";
|
||||||
|
import type { BinaryWriteOptions } from "@protobuf-ts/runtime";
|
||||||
|
import type { IBinaryWriter } from "@protobuf-ts/runtime";
|
||||||
|
import { WireType } from "@protobuf-ts/runtime";
|
||||||
|
import type { BinaryReadOptions } from "@protobuf-ts/runtime";
|
||||||
|
import type { IBinaryReader } from "@protobuf-ts/runtime";
|
||||||
|
import { UnknownFieldHandler } from "@protobuf-ts/runtime";
|
||||||
|
import type { PartialMessage } from "@protobuf-ts/runtime";
|
||||||
|
import { reflectionMergePartial } from "@protobuf-ts/runtime";
|
||||||
|
import { MESSAGE_TYPE } from "@protobuf-ts/runtime";
|
||||||
|
import { MessageType } from "@protobuf-ts/runtime";
|
||||||
|
import { Timestamp } from "../../../google/protobuf/timestamp";
|
||||||
|
/**
|
||||||
|
* @generated from protobuf message github.actions.results.api.v1.GetCachedBlobRequest
|
||||||
|
*/
|
||||||
|
export interface GetCachedBlobRequest {
|
||||||
|
/**
|
||||||
|
* Owner of the blob(s) to be retrieved
|
||||||
|
*
|
||||||
|
* @generated from protobuf field: string owner = 1;
|
||||||
|
*/
|
||||||
|
owner: string;
|
||||||
|
/**
|
||||||
|
* Key(s) of te blob(s) to be retrieved
|
||||||
|
*
|
||||||
|
* @generated from protobuf field: repeated string keys = 2;
|
||||||
|
*/
|
||||||
|
keys: string[];
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* @generated from protobuf message github.actions.results.api.v1.GetCachedBlobResponse
|
||||||
|
*/
|
||||||
|
export interface GetCachedBlobResponse {
|
||||||
|
/**
|
||||||
|
* List of blobs that match the requested keys
|
||||||
|
*
|
||||||
|
* @generated from protobuf field: repeated github.actions.results.api.v1.GetCachedBlobResponse.Blob blobs = 1;
|
||||||
|
*/
|
||||||
|
blobs: GetCachedBlobResponse_Blob[];
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* @generated from protobuf message github.actions.results.api.v1.GetCachedBlobResponse.Blob
|
||||||
|
*/
|
||||||
|
export interface GetCachedBlobResponse_Blob {
|
||||||
|
/**
|
||||||
|
* Key of the blob
|
||||||
|
*
|
||||||
|
* @generated from protobuf field: string key = 1;
|
||||||
|
*/
|
||||||
|
key: string;
|
||||||
|
/**
|
||||||
|
* Download url for the cached blob
|
||||||
|
*
|
||||||
|
* @generated from protobuf field: string signed_url = 2;
|
||||||
|
*/
|
||||||
|
signedUrl: string;
|
||||||
|
/**
|
||||||
|
* Version of the cached blob entry
|
||||||
|
*
|
||||||
|
* @generated from protobuf field: int32 version = 3;
|
||||||
|
*/
|
||||||
|
version: number;
|
||||||
|
/**
|
||||||
|
* Checksum of the blob
|
||||||
|
*
|
||||||
|
* @generated from protobuf field: string checksum = 4;
|
||||||
|
*/
|
||||||
|
checksum: string;
|
||||||
|
/**
|
||||||
|
* Timestamp for when the blob cache entry expires
|
||||||
|
*
|
||||||
|
* @generated from protobuf field: google.protobuf.Timestamp expires_at = 5;
|
||||||
|
*/
|
||||||
|
expiresAt?: Timestamp;
|
||||||
|
/**
|
||||||
|
* Timestamp for when the blob cache entry was created
|
||||||
|
*
|
||||||
|
* @generated from protobuf field: google.protobuf.Timestamp created_at = 6;
|
||||||
|
*/
|
||||||
|
createdAt?: Timestamp;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* @generated from protobuf message github.actions.results.api.v1.GetCacheBlobUploadURLRequest
|
||||||
|
*/
|
||||||
|
export interface GetCacheBlobUploadURLRequest {
|
||||||
|
/**
|
||||||
|
* Owner of the blob(s) to be retrieved
|
||||||
|
*
|
||||||
|
* @generated from protobuf field: string organization = 1;
|
||||||
|
*/
|
||||||
|
organization: string;
|
||||||
|
/**
|
||||||
|
* Key(s) of te blob(s) to be retrieved
|
||||||
|
*
|
||||||
|
* @generated from protobuf field: repeated string keys = 2;
|
||||||
|
*/
|
||||||
|
keys: string[];
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* @generated from protobuf message github.actions.results.api.v1.GetCacheBlobUploadURLResponse
|
||||||
|
*/
|
||||||
|
export interface GetCacheBlobUploadURLResponse {
|
||||||
|
/**
|
||||||
|
* List of upload URLs that match the requested keys
|
||||||
|
*
|
||||||
|
* @generated from protobuf field: repeated github.actions.results.api.v1.GetCacheBlobUploadURLResponse.Url urls = 1;
|
||||||
|
*/
|
||||||
|
urls: GetCacheBlobUploadURLResponse_Url[];
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* @generated from protobuf message github.actions.results.api.v1.GetCacheBlobUploadURLResponse.Url
|
||||||
|
*/
|
||||||
|
export interface GetCacheBlobUploadURLResponse_Url {
|
||||||
|
/**
|
||||||
|
* Key of the blob
|
||||||
|
*
|
||||||
|
* @generated from protobuf field: string key = 1;
|
||||||
|
*/
|
||||||
|
key: string;
|
||||||
|
/**
|
||||||
|
* URL to the blob
|
||||||
|
*
|
||||||
|
* @generated from protobuf field: string url = 2;
|
||||||
|
*/
|
||||||
|
url: string;
|
||||||
|
}
|
||||||
|
// @generated message type with reflection information, may provide speed optimized methods
|
||||||
|
class GetCachedBlobRequest$Type extends MessageType<GetCachedBlobRequest> {
|
||||||
|
constructor() {
|
||||||
|
super("github.actions.results.api.v1.GetCachedBlobRequest", [
|
||||||
|
{ no: 1, name: "owner", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||||
|
{ no: 2, name: "keys", kind: "scalar", repeat: 2 /*RepeatType.UNPACKED*/, T: 9 /*ScalarType.STRING*/ }
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
create(value?: PartialMessage<GetCachedBlobRequest>): GetCachedBlobRequest {
|
||||||
|
const message = { owner: "", keys: [] };
|
||||||
|
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
|
||||||
|
if (value !== undefined)
|
||||||
|
reflectionMergePartial<GetCachedBlobRequest>(this, message, value);
|
||||||
|
return message;
|
||||||
|
}
|
||||||
|
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: GetCachedBlobRequest): GetCachedBlobRequest {
|
||||||
|
let message = target ?? this.create(), end = reader.pos + length;
|
||||||
|
while (reader.pos < end) {
|
||||||
|
let [fieldNo, wireType] = reader.tag();
|
||||||
|
switch (fieldNo) {
|
||||||
|
case /* string owner */ 1:
|
||||||
|
message.owner = reader.string();
|
||||||
|
break;
|
||||||
|
case /* repeated string keys */ 2:
|
||||||
|
message.keys.push(reader.string());
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
let u = options.readUnknownField;
|
||||||
|
if (u === "throw")
|
||||||
|
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||||
|
let d = reader.skip(wireType);
|
||||||
|
if (u !== false)
|
||||||
|
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return message;
|
||||||
|
}
|
||||||
|
internalBinaryWrite(message: GetCachedBlobRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||||
|
/* string owner = 1; */
|
||||||
|
if (message.owner !== "")
|
||||||
|
writer.tag(1, WireType.LengthDelimited).string(message.owner);
|
||||||
|
/* repeated string keys = 2; */
|
||||||
|
for (let i = 0; i < message.keys.length; i++)
|
||||||
|
writer.tag(2, WireType.LengthDelimited).string(message.keys[i]);
|
||||||
|
let u = options.writeUnknownFields;
|
||||||
|
if (u !== false)
|
||||||
|
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||||
|
return writer;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* @generated MessageType for protobuf message github.actions.results.api.v1.GetCachedBlobRequest
|
||||||
|
*/
|
||||||
|
export const GetCachedBlobRequest = new GetCachedBlobRequest$Type();
|
||||||
|
// @generated message type with reflection information, may provide speed optimized methods
|
||||||
|
class GetCachedBlobResponse$Type extends MessageType<GetCachedBlobResponse> {
|
||||||
|
constructor() {
|
||||||
|
super("github.actions.results.api.v1.GetCachedBlobResponse", [
|
||||||
|
{ no: 1, name: "blobs", kind: "message", repeat: 1 /*RepeatType.PACKED*/, T: () => GetCachedBlobResponse_Blob }
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
create(value?: PartialMessage<GetCachedBlobResponse>): GetCachedBlobResponse {
|
||||||
|
const message = { blobs: [] };
|
||||||
|
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
|
||||||
|
if (value !== undefined)
|
||||||
|
reflectionMergePartial<GetCachedBlobResponse>(this, message, value);
|
||||||
|
return message;
|
||||||
|
}
|
||||||
|
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: GetCachedBlobResponse): GetCachedBlobResponse {
|
||||||
|
let message = target ?? this.create(), end = reader.pos + length;
|
||||||
|
while (reader.pos < end) {
|
||||||
|
let [fieldNo, wireType] = reader.tag();
|
||||||
|
switch (fieldNo) {
|
||||||
|
case /* repeated github.actions.results.api.v1.GetCachedBlobResponse.Blob blobs */ 1:
|
||||||
|
message.blobs.push(GetCachedBlobResponse_Blob.internalBinaryRead(reader, reader.uint32(), options));
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
let u = options.readUnknownField;
|
||||||
|
if (u === "throw")
|
||||||
|
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||||
|
let d = reader.skip(wireType);
|
||||||
|
if (u !== false)
|
||||||
|
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return message;
|
||||||
|
}
|
||||||
|
internalBinaryWrite(message: GetCachedBlobResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||||
|
/* repeated github.actions.results.api.v1.GetCachedBlobResponse.Blob blobs = 1; */
|
||||||
|
for (let i = 0; i < message.blobs.length; i++)
|
||||||
|
GetCachedBlobResponse_Blob.internalBinaryWrite(message.blobs[i], writer.tag(1, WireType.LengthDelimited).fork(), options).join();
|
||||||
|
let u = options.writeUnknownFields;
|
||||||
|
if (u !== false)
|
||||||
|
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||||
|
return writer;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* @generated MessageType for protobuf message github.actions.results.api.v1.GetCachedBlobResponse
|
||||||
|
*/
|
||||||
|
export const GetCachedBlobResponse = new GetCachedBlobResponse$Type();
|
||||||
|
// @generated message type with reflection information, may provide speed optimized methods
|
||||||
|
class GetCachedBlobResponse_Blob$Type extends MessageType<GetCachedBlobResponse_Blob> {
|
||||||
|
constructor() {
|
||||||
|
super("github.actions.results.api.v1.GetCachedBlobResponse.Blob", [
|
||||||
|
{ no: 1, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||||
|
{ no: 2, name: "signed_url", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||||
|
{ no: 3, name: "version", kind: "scalar", T: 5 /*ScalarType.INT32*/ },
|
||||||
|
{ no: 4, name: "checksum", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||||
|
{ no: 5, name: "expires_at", kind: "message", T: () => Timestamp },
|
||||||
|
{ no: 6, name: "created_at", kind: "message", T: () => Timestamp }
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
create(value?: PartialMessage<GetCachedBlobResponse_Blob>): GetCachedBlobResponse_Blob {
|
||||||
|
const message = { key: "", signedUrl: "", version: 0, checksum: "" };
|
||||||
|
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
|
||||||
|
if (value !== undefined)
|
||||||
|
reflectionMergePartial<GetCachedBlobResponse_Blob>(this, message, value);
|
||||||
|
return message;
|
||||||
|
}
|
||||||
|
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: GetCachedBlobResponse_Blob): GetCachedBlobResponse_Blob {
|
||||||
|
let message = target ?? this.create(), end = reader.pos + length;
|
||||||
|
while (reader.pos < end) {
|
||||||
|
let [fieldNo, wireType] = reader.tag();
|
||||||
|
switch (fieldNo) {
|
||||||
|
case /* string key */ 1:
|
||||||
|
message.key = reader.string();
|
||||||
|
break;
|
||||||
|
case /* string signed_url */ 2:
|
||||||
|
message.signedUrl = reader.string();
|
||||||
|
break;
|
||||||
|
case /* int32 version */ 3:
|
||||||
|
message.version = reader.int32();
|
||||||
|
break;
|
||||||
|
case /* string checksum */ 4:
|
||||||
|
message.checksum = reader.string();
|
||||||
|
break;
|
||||||
|
case /* google.protobuf.Timestamp expires_at */ 5:
|
||||||
|
message.expiresAt = Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.expiresAt);
|
||||||
|
break;
|
||||||
|
case /* google.protobuf.Timestamp created_at */ 6:
|
||||||
|
message.createdAt = Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.createdAt);
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
let u = options.readUnknownField;
|
||||||
|
if (u === "throw")
|
||||||
|
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||||
|
let d = reader.skip(wireType);
|
||||||
|
if (u !== false)
|
||||||
|
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return message;
|
||||||
|
}
|
||||||
|
internalBinaryWrite(message: GetCachedBlobResponse_Blob, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||||
|
/* string key = 1; */
|
||||||
|
if (message.key !== "")
|
||||||
|
writer.tag(1, WireType.LengthDelimited).string(message.key);
|
||||||
|
/* string signed_url = 2; */
|
||||||
|
if (message.signedUrl !== "")
|
||||||
|
writer.tag(2, WireType.LengthDelimited).string(message.signedUrl);
|
||||||
|
/* int32 version = 3; */
|
||||||
|
if (message.version !== 0)
|
||||||
|
writer.tag(3, WireType.Varint).int32(message.version);
|
||||||
|
/* string checksum = 4; */
|
||||||
|
if (message.checksum !== "")
|
||||||
|
writer.tag(4, WireType.LengthDelimited).string(message.checksum);
|
||||||
|
/* google.protobuf.Timestamp expires_at = 5; */
|
||||||
|
if (message.expiresAt)
|
||||||
|
Timestamp.internalBinaryWrite(message.expiresAt, writer.tag(5, WireType.LengthDelimited).fork(), options).join();
|
||||||
|
/* google.protobuf.Timestamp created_at = 6; */
|
||||||
|
if (message.createdAt)
|
||||||
|
Timestamp.internalBinaryWrite(message.createdAt, writer.tag(6, WireType.LengthDelimited).fork(), options).join();
|
||||||
|
let u = options.writeUnknownFields;
|
||||||
|
if (u !== false)
|
||||||
|
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||||
|
return writer;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* @generated MessageType for protobuf message github.actions.results.api.v1.GetCachedBlobResponse.Blob
|
||||||
|
*/
|
||||||
|
export const GetCachedBlobResponse_Blob = new GetCachedBlobResponse_Blob$Type();
|
||||||
|
// @generated message type with reflection information, may provide speed optimized methods
|
||||||
|
class GetCacheBlobUploadURLRequest$Type extends MessageType<GetCacheBlobUploadURLRequest> {
|
||||||
|
constructor() {
|
||||||
|
super("github.actions.results.api.v1.GetCacheBlobUploadURLRequest", [
|
||||||
|
{ no: 1, name: "organization", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||||
|
{ no: 2, name: "keys", kind: "scalar", repeat: 2 /*RepeatType.UNPACKED*/, T: 9 /*ScalarType.STRING*/ }
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
create(value?: PartialMessage<GetCacheBlobUploadURLRequest>): GetCacheBlobUploadURLRequest {
|
||||||
|
const message = { organization: "", keys: [] };
|
||||||
|
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
|
||||||
|
if (value !== undefined)
|
||||||
|
reflectionMergePartial<GetCacheBlobUploadURLRequest>(this, message, value);
|
||||||
|
return message;
|
||||||
|
}
|
||||||
|
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: GetCacheBlobUploadURLRequest): GetCacheBlobUploadURLRequest {
|
||||||
|
let message = target ?? this.create(), end = reader.pos + length;
|
||||||
|
while (reader.pos < end) {
|
||||||
|
let [fieldNo, wireType] = reader.tag();
|
||||||
|
switch (fieldNo) {
|
||||||
|
case /* string organization */ 1:
|
||||||
|
message.organization = reader.string();
|
||||||
|
break;
|
||||||
|
case /* repeated string keys */ 2:
|
||||||
|
message.keys.push(reader.string());
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
let u = options.readUnknownField;
|
||||||
|
if (u === "throw")
|
||||||
|
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||||
|
let d = reader.skip(wireType);
|
||||||
|
if (u !== false)
|
||||||
|
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return message;
|
||||||
|
}
|
||||||
|
internalBinaryWrite(message: GetCacheBlobUploadURLRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||||
|
/* string organization = 1; */
|
||||||
|
if (message.organization !== "")
|
||||||
|
writer.tag(1, WireType.LengthDelimited).string(message.organization);
|
||||||
|
/* repeated string keys = 2; */
|
||||||
|
for (let i = 0; i < message.keys.length; i++)
|
||||||
|
writer.tag(2, WireType.LengthDelimited).string(message.keys[i]);
|
||||||
|
let u = options.writeUnknownFields;
|
||||||
|
if (u !== false)
|
||||||
|
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||||
|
return writer;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* @generated MessageType for protobuf message github.actions.results.api.v1.GetCacheBlobUploadURLRequest
|
||||||
|
*/
|
||||||
|
export const GetCacheBlobUploadURLRequest = new GetCacheBlobUploadURLRequest$Type();
|
||||||
|
// @generated message type with reflection information, may provide speed optimized methods
|
||||||
|
class GetCacheBlobUploadURLResponse$Type extends MessageType<GetCacheBlobUploadURLResponse> {
|
||||||
|
constructor() {
|
||||||
|
super("github.actions.results.api.v1.GetCacheBlobUploadURLResponse", [
|
||||||
|
{ no: 1, name: "urls", kind: "message", repeat: 1 /*RepeatType.PACKED*/, T: () => GetCacheBlobUploadURLResponse_Url }
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
create(value?: PartialMessage<GetCacheBlobUploadURLResponse>): GetCacheBlobUploadURLResponse {
|
||||||
|
const message = { urls: [] };
|
||||||
|
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
|
||||||
|
if (value !== undefined)
|
||||||
|
reflectionMergePartial<GetCacheBlobUploadURLResponse>(this, message, value);
|
||||||
|
return message;
|
||||||
|
}
|
||||||
|
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: GetCacheBlobUploadURLResponse): GetCacheBlobUploadURLResponse {
|
||||||
|
let message = target ?? this.create(), end = reader.pos + length;
|
||||||
|
while (reader.pos < end) {
|
||||||
|
let [fieldNo, wireType] = reader.tag();
|
||||||
|
switch (fieldNo) {
|
||||||
|
case /* repeated github.actions.results.api.v1.GetCacheBlobUploadURLResponse.Url urls */ 1:
|
||||||
|
message.urls.push(GetCacheBlobUploadURLResponse_Url.internalBinaryRead(reader, reader.uint32(), options));
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
let u = options.readUnknownField;
|
||||||
|
if (u === "throw")
|
||||||
|
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||||
|
let d = reader.skip(wireType);
|
||||||
|
if (u !== false)
|
||||||
|
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return message;
|
||||||
|
}
|
||||||
|
internalBinaryWrite(message: GetCacheBlobUploadURLResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||||
|
/* repeated github.actions.results.api.v1.GetCacheBlobUploadURLResponse.Url urls = 1; */
|
||||||
|
for (let i = 0; i < message.urls.length; i++)
|
||||||
|
GetCacheBlobUploadURLResponse_Url.internalBinaryWrite(message.urls[i], writer.tag(1, WireType.LengthDelimited).fork(), options).join();
|
||||||
|
let u = options.writeUnknownFields;
|
||||||
|
if (u !== false)
|
||||||
|
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||||
|
return writer;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* @generated MessageType for protobuf message github.actions.results.api.v1.GetCacheBlobUploadURLResponse
|
||||||
|
*/
|
||||||
|
export const GetCacheBlobUploadURLResponse = new GetCacheBlobUploadURLResponse$Type();
|
||||||
|
// @generated message type with reflection information, may provide speed optimized methods
|
||||||
|
class GetCacheBlobUploadURLResponse_Url$Type extends MessageType<GetCacheBlobUploadURLResponse_Url> {
|
||||||
|
constructor() {
|
||||||
|
super("github.actions.results.api.v1.GetCacheBlobUploadURLResponse.Url", [
|
||||||
|
{ no: 1, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||||
|
{ no: 2, name: "url", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
create(value?: PartialMessage<GetCacheBlobUploadURLResponse_Url>): GetCacheBlobUploadURLResponse_Url {
|
||||||
|
const message = { key: "", url: "" };
|
||||||
|
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
|
||||||
|
if (value !== undefined)
|
||||||
|
reflectionMergePartial<GetCacheBlobUploadURLResponse_Url>(this, message, value);
|
||||||
|
return message;
|
||||||
|
}
|
||||||
|
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: GetCacheBlobUploadURLResponse_Url): GetCacheBlobUploadURLResponse_Url {
|
||||||
|
let message = target ?? this.create(), end = reader.pos + length;
|
||||||
|
while (reader.pos < end) {
|
||||||
|
let [fieldNo, wireType] = reader.tag();
|
||||||
|
switch (fieldNo) {
|
||||||
|
case /* string key */ 1:
|
||||||
|
message.key = reader.string();
|
||||||
|
break;
|
||||||
|
case /* string url */ 2:
|
||||||
|
message.url = reader.string();
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
let u = options.readUnknownField;
|
||||||
|
if (u === "throw")
|
||||||
|
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||||
|
let d = reader.skip(wireType);
|
||||||
|
if (u !== false)
|
||||||
|
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return message;
|
||||||
|
}
|
||||||
|
internalBinaryWrite(message: GetCacheBlobUploadURLResponse_Url, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||||
|
/* string key = 1; */
|
||||||
|
if (message.key !== "")
|
||||||
|
writer.tag(1, WireType.LengthDelimited).string(message.key);
|
||||||
|
/* string url = 2; */
|
||||||
|
if (message.url !== "")
|
||||||
|
writer.tag(2, WireType.LengthDelimited).string(message.url);
|
||||||
|
let u = options.writeUnknownFields;
|
||||||
|
if (u !== false)
|
||||||
|
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||||
|
return writer;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* @generated MessageType for protobuf message github.actions.results.api.v1.GetCacheBlobUploadURLResponse.Url
|
||||||
|
*/
|
||||||
|
export const GetCacheBlobUploadURLResponse_Url = new GetCacheBlobUploadURLResponse_Url$Type();
|
||||||
|
/**
|
||||||
|
* @generated ServiceType for protobuf service github.actions.results.api.v1.BlobCacheService
|
||||||
|
*/
|
||||||
|
export const BlobCacheService = new ServiceType("github.actions.results.api.v1.BlobCacheService", [
|
||||||
|
{ name: "GetCachedBlob", options: {}, I: GetCachedBlobRequest, O: GetCachedBlobResponse },
|
||||||
|
{ name: "GetCacheBlobUploadURL", options: {}, I: GetCacheBlobUploadURLRequest, O: GetCacheBlobUploadURLResponse }
|
||||||
|
]);
|
|
@ -0,0 +1,433 @@
|
||||||
|
import {
|
||||||
|
TwirpContext,
|
||||||
|
TwirpServer,
|
||||||
|
RouterEvents,
|
||||||
|
TwirpError,
|
||||||
|
TwirpErrorCode,
|
||||||
|
Interceptor,
|
||||||
|
TwirpContentType,
|
||||||
|
chainInterceptors,
|
||||||
|
} from "twirp-ts";
|
||||||
|
import {
|
||||||
|
GetCachedBlobRequest,
|
||||||
|
GetCachedBlobResponse,
|
||||||
|
GetCacheBlobUploadURLRequest,
|
||||||
|
GetCacheBlobUploadURLResponse,
|
||||||
|
} from "./blobcache";
|
||||||
|
|
||||||
|
//==================================//
|
||||||
|
// Client Code //
|
||||||
|
//==================================//
|
||||||
|
|
||||||
|
interface Rpc {
|
||||||
|
request(
|
||||||
|
service: string,
|
||||||
|
method: string,
|
||||||
|
contentType: "application/json" | "application/protobuf",
|
||||||
|
data: object | Uint8Array
|
||||||
|
): Promise<object | Uint8Array>;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface BlobCacheServiceClient {
|
||||||
|
GetCachedBlob(request: GetCachedBlobRequest): Promise<GetCachedBlobResponse>;
|
||||||
|
GetCacheBlobUploadURL(
|
||||||
|
request: GetCacheBlobUploadURLRequest
|
||||||
|
): Promise<GetCacheBlobUploadURLResponse>;
|
||||||
|
}
|
||||||
|
|
||||||
|
export class BlobCacheServiceClientJSON implements BlobCacheServiceClient {
|
||||||
|
private readonly rpc: Rpc;
|
||||||
|
constructor(rpc: Rpc) {
|
||||||
|
this.rpc = rpc;
|
||||||
|
this.GetCachedBlob.bind(this);
|
||||||
|
this.GetCacheBlobUploadURL.bind(this);
|
||||||
|
}
|
||||||
|
GetCachedBlob(request: GetCachedBlobRequest): Promise<GetCachedBlobResponse> {
|
||||||
|
const data = GetCachedBlobRequest.toJson(request, {
|
||||||
|
useProtoFieldName: true,
|
||||||
|
emitDefaultValues: false,
|
||||||
|
});
|
||||||
|
const promise = this.rpc.request(
|
||||||
|
"github.actions.results.api.v1.BlobCacheService",
|
||||||
|
"GetCachedBlob",
|
||||||
|
"application/json",
|
||||||
|
data as object
|
||||||
|
);
|
||||||
|
return promise.then((data) =>
|
||||||
|
GetCachedBlobResponse.fromJson(data as any, { ignoreUnknownFields: true })
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
GetCacheBlobUploadURL(
|
||||||
|
request: GetCacheBlobUploadURLRequest
|
||||||
|
): Promise<GetCacheBlobUploadURLResponse> {
|
||||||
|
const data = GetCacheBlobUploadURLRequest.toJson(request, {
|
||||||
|
useProtoFieldName: true,
|
||||||
|
emitDefaultValues: false,
|
||||||
|
});
|
||||||
|
const promise = this.rpc.request(
|
||||||
|
"github.actions.results.api.v1.BlobCacheService",
|
||||||
|
"GetCacheBlobUploadURL",
|
||||||
|
"application/json",
|
||||||
|
data as object
|
||||||
|
);
|
||||||
|
return promise.then((data) =>
|
||||||
|
GetCacheBlobUploadURLResponse.fromJson(data as any, {
|
||||||
|
ignoreUnknownFields: true,
|
||||||
|
})
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export class BlobCacheServiceClientProtobuf implements BlobCacheServiceClient {
|
||||||
|
private readonly rpc: Rpc;
|
||||||
|
constructor(rpc: Rpc) {
|
||||||
|
this.rpc = rpc;
|
||||||
|
this.GetCachedBlob.bind(this);
|
||||||
|
this.GetCacheBlobUploadURL.bind(this);
|
||||||
|
}
|
||||||
|
GetCachedBlob(request: GetCachedBlobRequest): Promise<GetCachedBlobResponse> {
|
||||||
|
const data = GetCachedBlobRequest.toBinary(request);
|
||||||
|
const promise = this.rpc.request(
|
||||||
|
"github.actions.results.api.v1.BlobCacheService",
|
||||||
|
"GetCachedBlob",
|
||||||
|
"application/protobuf",
|
||||||
|
data
|
||||||
|
);
|
||||||
|
return promise.then((data) =>
|
||||||
|
GetCachedBlobResponse.fromBinary(data as Uint8Array)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
GetCacheBlobUploadURL(
|
||||||
|
request: GetCacheBlobUploadURLRequest
|
||||||
|
): Promise<GetCacheBlobUploadURLResponse> {
|
||||||
|
const data = GetCacheBlobUploadURLRequest.toBinary(request);
|
||||||
|
const promise = this.rpc.request(
|
||||||
|
"github.actions.results.api.v1.BlobCacheService",
|
||||||
|
"GetCacheBlobUploadURL",
|
||||||
|
"application/protobuf",
|
||||||
|
data
|
||||||
|
);
|
||||||
|
return promise.then((data) =>
|
||||||
|
GetCacheBlobUploadURLResponse.fromBinary(data as Uint8Array)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
//==================================//
|
||||||
|
// Server Code //
|
||||||
|
//==================================//
|
||||||
|
|
||||||
|
export interface BlobCacheServiceTwirp<T extends TwirpContext = TwirpContext> {
|
||||||
|
GetCachedBlob(
|
||||||
|
ctx: T,
|
||||||
|
request: GetCachedBlobRequest
|
||||||
|
): Promise<GetCachedBlobResponse>;
|
||||||
|
GetCacheBlobUploadURL(
|
||||||
|
ctx: T,
|
||||||
|
request: GetCacheBlobUploadURLRequest
|
||||||
|
): Promise<GetCacheBlobUploadURLResponse>;
|
||||||
|
}
|
||||||
|
|
||||||
|
export enum BlobCacheServiceMethod {
|
||||||
|
GetCachedBlob = "GetCachedBlob",
|
||||||
|
GetCacheBlobUploadURL = "GetCacheBlobUploadURL",
|
||||||
|
}
|
||||||
|
|
||||||
|
export const BlobCacheServiceMethodList = [
|
||||||
|
BlobCacheServiceMethod.GetCachedBlob,
|
||||||
|
BlobCacheServiceMethod.GetCacheBlobUploadURL,
|
||||||
|
];
|
||||||
|
|
||||||
|
export function createBlobCacheServiceServer<
|
||||||
|
T extends TwirpContext = TwirpContext
|
||||||
|
>(service: BlobCacheServiceTwirp<T>) {
|
||||||
|
return new TwirpServer<BlobCacheServiceTwirp, T>({
|
||||||
|
service,
|
||||||
|
packageName: "github.actions.results.api.v1",
|
||||||
|
serviceName: "BlobCacheService",
|
||||||
|
methodList: BlobCacheServiceMethodList,
|
||||||
|
matchRoute: matchBlobCacheServiceRoute,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function matchBlobCacheServiceRoute<T extends TwirpContext = TwirpContext>(
|
||||||
|
method: string,
|
||||||
|
events: RouterEvents<T>
|
||||||
|
) {
|
||||||
|
switch (method) {
|
||||||
|
case "GetCachedBlob":
|
||||||
|
return async (
|
||||||
|
ctx: T,
|
||||||
|
service: BlobCacheServiceTwirp,
|
||||||
|
data: Buffer,
|
||||||
|
interceptors?: Interceptor<
|
||||||
|
T,
|
||||||
|
GetCachedBlobRequest,
|
||||||
|
GetCachedBlobResponse
|
||||||
|
>[]
|
||||||
|
) => {
|
||||||
|
ctx = { ...ctx, methodName: "GetCachedBlob" };
|
||||||
|
await events.onMatch(ctx);
|
||||||
|
return handleBlobCacheServiceGetCachedBlobRequest(
|
||||||
|
ctx,
|
||||||
|
service,
|
||||||
|
data,
|
||||||
|
interceptors
|
||||||
|
);
|
||||||
|
};
|
||||||
|
case "GetCacheBlobUploadURL":
|
||||||
|
return async (
|
||||||
|
ctx: T,
|
||||||
|
service: BlobCacheServiceTwirp,
|
||||||
|
data: Buffer,
|
||||||
|
interceptors?: Interceptor<
|
||||||
|
T,
|
||||||
|
GetCacheBlobUploadURLRequest,
|
||||||
|
GetCacheBlobUploadURLResponse
|
||||||
|
>[]
|
||||||
|
) => {
|
||||||
|
ctx = { ...ctx, methodName: "GetCacheBlobUploadURL" };
|
||||||
|
await events.onMatch(ctx);
|
||||||
|
return handleBlobCacheServiceGetCacheBlobUploadURLRequest(
|
||||||
|
ctx,
|
||||||
|
service,
|
||||||
|
data,
|
||||||
|
interceptors
|
||||||
|
);
|
||||||
|
};
|
||||||
|
default:
|
||||||
|
events.onNotFound();
|
||||||
|
const msg = `no handler found`;
|
||||||
|
throw new TwirpError(TwirpErrorCode.BadRoute, msg);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function handleBlobCacheServiceGetCachedBlobRequest<
|
||||||
|
T extends TwirpContext = TwirpContext
|
||||||
|
>(
|
||||||
|
ctx: T,
|
||||||
|
service: BlobCacheServiceTwirp,
|
||||||
|
data: Buffer,
|
||||||
|
interceptors?: Interceptor<T, GetCachedBlobRequest, GetCachedBlobResponse>[]
|
||||||
|
): Promise<string | Uint8Array> {
|
||||||
|
switch (ctx.contentType) {
|
||||||
|
case TwirpContentType.JSON:
|
||||||
|
return handleBlobCacheServiceGetCachedBlobJSON<T>(
|
||||||
|
ctx,
|
||||||
|
service,
|
||||||
|
data,
|
||||||
|
interceptors
|
||||||
|
);
|
||||||
|
case TwirpContentType.Protobuf:
|
||||||
|
return handleBlobCacheServiceGetCachedBlobProtobuf<T>(
|
||||||
|
ctx,
|
||||||
|
service,
|
||||||
|
data,
|
||||||
|
interceptors
|
||||||
|
);
|
||||||
|
default:
|
||||||
|
const msg = "unexpected Content-Type";
|
||||||
|
throw new TwirpError(TwirpErrorCode.BadRoute, msg);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function handleBlobCacheServiceGetCacheBlobUploadURLRequest<
|
||||||
|
T extends TwirpContext = TwirpContext
|
||||||
|
>(
|
||||||
|
ctx: T,
|
||||||
|
service: BlobCacheServiceTwirp,
|
||||||
|
data: Buffer,
|
||||||
|
interceptors?: Interceptor<
|
||||||
|
T,
|
||||||
|
GetCacheBlobUploadURLRequest,
|
||||||
|
GetCacheBlobUploadURLResponse
|
||||||
|
>[]
|
||||||
|
): Promise<string | Uint8Array> {
|
||||||
|
switch (ctx.contentType) {
|
||||||
|
case TwirpContentType.JSON:
|
||||||
|
return handleBlobCacheServiceGetCacheBlobUploadURLJSON<T>(
|
||||||
|
ctx,
|
||||||
|
service,
|
||||||
|
data,
|
||||||
|
interceptors
|
||||||
|
);
|
||||||
|
case TwirpContentType.Protobuf:
|
||||||
|
return handleBlobCacheServiceGetCacheBlobUploadURLProtobuf<T>(
|
||||||
|
ctx,
|
||||||
|
service,
|
||||||
|
data,
|
||||||
|
interceptors
|
||||||
|
);
|
||||||
|
default:
|
||||||
|
const msg = "unexpected Content-Type";
|
||||||
|
throw new TwirpError(TwirpErrorCode.BadRoute, msg);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
async function handleBlobCacheServiceGetCachedBlobJSON<
|
||||||
|
T extends TwirpContext = TwirpContext
|
||||||
|
>(
|
||||||
|
ctx: T,
|
||||||
|
service: BlobCacheServiceTwirp,
|
||||||
|
data: Buffer,
|
||||||
|
interceptors?: Interceptor<T, GetCachedBlobRequest, GetCachedBlobResponse>[]
|
||||||
|
) {
|
||||||
|
let request: GetCachedBlobRequest;
|
||||||
|
let response: GetCachedBlobResponse;
|
||||||
|
|
||||||
|
try {
|
||||||
|
const body = JSON.parse(data.toString() || "{}");
|
||||||
|
request = GetCachedBlobRequest.fromJson(body, {
|
||||||
|
ignoreUnknownFields: true,
|
||||||
|
});
|
||||||
|
} catch (e) {
|
||||||
|
if (e instanceof Error) {
|
||||||
|
const msg = "the json request could not be decoded";
|
||||||
|
throw new TwirpError(TwirpErrorCode.Malformed, msg).withCause(e, true);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (interceptors && interceptors.length > 0) {
|
||||||
|
const interceptor = chainInterceptors(...interceptors) as Interceptor<
|
||||||
|
T,
|
||||||
|
GetCachedBlobRequest,
|
||||||
|
GetCachedBlobResponse
|
||||||
|
>;
|
||||||
|
response = await interceptor(ctx, request!, (ctx, inputReq) => {
|
||||||
|
return service.GetCachedBlob(ctx, inputReq);
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
response = await service.GetCachedBlob(ctx, request!);
|
||||||
|
}
|
||||||
|
|
||||||
|
return JSON.stringify(
|
||||||
|
GetCachedBlobResponse.toJson(response, {
|
||||||
|
useProtoFieldName: true,
|
||||||
|
emitDefaultValues: false,
|
||||||
|
}) as string
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
async function handleBlobCacheServiceGetCacheBlobUploadURLJSON<
|
||||||
|
T extends TwirpContext = TwirpContext
|
||||||
|
>(
|
||||||
|
ctx: T,
|
||||||
|
service: BlobCacheServiceTwirp,
|
||||||
|
data: Buffer,
|
||||||
|
interceptors?: Interceptor<
|
||||||
|
T,
|
||||||
|
GetCacheBlobUploadURLRequest,
|
||||||
|
GetCacheBlobUploadURLResponse
|
||||||
|
>[]
|
||||||
|
) {
|
||||||
|
let request: GetCacheBlobUploadURLRequest;
|
||||||
|
let response: GetCacheBlobUploadURLResponse;
|
||||||
|
|
||||||
|
try {
|
||||||
|
const body = JSON.parse(data.toString() || "{}");
|
||||||
|
request = GetCacheBlobUploadURLRequest.fromJson(body, {
|
||||||
|
ignoreUnknownFields: true,
|
||||||
|
});
|
||||||
|
} catch (e) {
|
||||||
|
if (e instanceof Error) {
|
||||||
|
const msg = "the json request could not be decoded";
|
||||||
|
throw new TwirpError(TwirpErrorCode.Malformed, msg).withCause(e, true);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (interceptors && interceptors.length > 0) {
|
||||||
|
const interceptor = chainInterceptors(...interceptors) as Interceptor<
|
||||||
|
T,
|
||||||
|
GetCacheBlobUploadURLRequest,
|
||||||
|
GetCacheBlobUploadURLResponse
|
||||||
|
>;
|
||||||
|
response = await interceptor(ctx, request!, (ctx, inputReq) => {
|
||||||
|
return service.GetCacheBlobUploadURL(ctx, inputReq);
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
response = await service.GetCacheBlobUploadURL(ctx, request!);
|
||||||
|
}
|
||||||
|
|
||||||
|
return JSON.stringify(
|
||||||
|
GetCacheBlobUploadURLResponse.toJson(response, {
|
||||||
|
useProtoFieldName: true,
|
||||||
|
emitDefaultValues: false,
|
||||||
|
}) as string
|
||||||
|
);
|
||||||
|
}
|
||||||
|
async function handleBlobCacheServiceGetCachedBlobProtobuf<
|
||||||
|
T extends TwirpContext = TwirpContext
|
||||||
|
>(
|
||||||
|
ctx: T,
|
||||||
|
service: BlobCacheServiceTwirp,
|
||||||
|
data: Buffer,
|
||||||
|
interceptors?: Interceptor<T, GetCachedBlobRequest, GetCachedBlobResponse>[]
|
||||||
|
) {
|
||||||
|
let request: GetCachedBlobRequest;
|
||||||
|
let response: GetCachedBlobResponse;
|
||||||
|
|
||||||
|
try {
|
||||||
|
request = GetCachedBlobRequest.fromBinary(data);
|
||||||
|
} catch (e) {
|
||||||
|
if (e instanceof Error) {
|
||||||
|
const msg = "the protobuf request could not be decoded";
|
||||||
|
throw new TwirpError(TwirpErrorCode.Malformed, msg).withCause(e, true);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (interceptors && interceptors.length > 0) {
|
||||||
|
const interceptor = chainInterceptors(...interceptors) as Interceptor<
|
||||||
|
T,
|
||||||
|
GetCachedBlobRequest,
|
||||||
|
GetCachedBlobResponse
|
||||||
|
>;
|
||||||
|
response = await interceptor(ctx, request!, (ctx, inputReq) => {
|
||||||
|
return service.GetCachedBlob(ctx, inputReq);
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
response = await service.GetCachedBlob(ctx, request!);
|
||||||
|
}
|
||||||
|
|
||||||
|
return Buffer.from(GetCachedBlobResponse.toBinary(response));
|
||||||
|
}
|
||||||
|
|
||||||
|
async function handleBlobCacheServiceGetCacheBlobUploadURLProtobuf<
|
||||||
|
T extends TwirpContext = TwirpContext
|
||||||
|
>(
|
||||||
|
ctx: T,
|
||||||
|
service: BlobCacheServiceTwirp,
|
||||||
|
data: Buffer,
|
||||||
|
interceptors?: Interceptor<
|
||||||
|
T,
|
||||||
|
GetCacheBlobUploadURLRequest,
|
||||||
|
GetCacheBlobUploadURLResponse
|
||||||
|
>[]
|
||||||
|
) {
|
||||||
|
let request: GetCacheBlobUploadURLRequest;
|
||||||
|
let response: GetCacheBlobUploadURLResponse;
|
||||||
|
|
||||||
|
try {
|
||||||
|
request = GetCacheBlobUploadURLRequest.fromBinary(data);
|
||||||
|
} catch (e) {
|
||||||
|
if (e instanceof Error) {
|
||||||
|
const msg = "the protobuf request could not be decoded";
|
||||||
|
throw new TwirpError(TwirpErrorCode.Malformed, msg).withCause(e, true);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (interceptors && interceptors.length > 0) {
|
||||||
|
const interceptor = chainInterceptors(...interceptors) as Interceptor<
|
||||||
|
T,
|
||||||
|
GetCacheBlobUploadURLRequest,
|
||||||
|
GetCacheBlobUploadURLResponse
|
||||||
|
>;
|
||||||
|
response = await interceptor(ctx, request!, (ctx, inputReq) => {
|
||||||
|
return service.GetCacheBlobUploadURL(ctx, inputReq);
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
response = await service.GetCacheBlobUploadURL(ctx, request!);
|
||||||
|
}
|
||||||
|
|
||||||
|
return Buffer.from(GetCacheBlobUploadURLResponse.toBinary(response));
|
||||||
|
}
|
|
@ -36,11 +36,12 @@ import {
|
||||||
retryHttpClientResponse,
|
retryHttpClientResponse,
|
||||||
retryTypedResponse
|
retryTypedResponse
|
||||||
} from './requestUtils'
|
} from './requestUtils'
|
||||||
|
import {CacheUrl} from './constants'
|
||||||
|
|
||||||
const versionSalt = '1.0'
|
const versionSalt = '1.0'
|
||||||
|
|
||||||
function getCacheApiUrl(resource: string): string {
|
function getCacheApiUrl(resource: string): string {
|
||||||
const baseUrl: string = process.env['ACTIONS_CACHE_URL'] || ''
|
const baseUrl: string = CacheUrl || ''
|
||||||
if (!baseUrl) {
|
if (!baseUrl) {
|
||||||
throw new Error('Cache Service Url not found, unable to restore cache.')
|
throw new Error('Cache Service Url not found, unable to restore cache.')
|
||||||
}
|
}
|
||||||
|
@ -111,8 +112,6 @@ export async function getCacheEntry(
|
||||||
options?.compressionMethod,
|
options?.compressionMethod,
|
||||||
options?.enableCrossOsArchive
|
options?.enableCrossOsArchive
|
||||||
)
|
)
|
||||||
|
|
||||||
core.debug(`We're running from the abyss`);
|
|
||||||
|
|
||||||
const resource = `cache?keys=${encodeURIComponent(
|
const resource = `cache?keys=${encodeURIComponent(
|
||||||
keys.join(',')
|
keys.join(',')
|
||||||
|
|
|
@ -0,0 +1,197 @@
|
||||||
|
import {HttpClient, HttpClientResponse, HttpCodes} from '@actions/http-client'
|
||||||
|
import {BearerCredentialHandler} from '@actions/http-client/lib/auth'
|
||||||
|
import {info, debug} from '@actions/core'
|
||||||
|
import {BlobCacheServiceClientJSON} from '../generated/results/api/v1/blobcache.twirp'
|
||||||
|
import {CacheUrl} from './constants'
|
||||||
|
import {getRuntimeToken} from './config'
|
||||||
|
// import {getUserAgentString} from './user-agent'
|
||||||
|
// import {NetworkError, UsageError} from './errors'
|
||||||
|
|
||||||
|
// The twirp http client must implement this interface
|
||||||
|
interface Rpc {
|
||||||
|
request(
|
||||||
|
service: string,
|
||||||
|
method: string,
|
||||||
|
contentType: 'application/json' | 'application/protobuf',
|
||||||
|
data: object | Uint8Array
|
||||||
|
): Promise<object | Uint8Array>
|
||||||
|
}
|
||||||
|
|
||||||
|
class BlobCacheServiceClient implements Rpc {
|
||||||
|
private httpClient: HttpClient
|
||||||
|
private baseUrl: string
|
||||||
|
private maxAttempts = 5
|
||||||
|
private baseRetryIntervalMilliseconds = 3000
|
||||||
|
private retryMultiplier = 1.5
|
||||||
|
|
||||||
|
constructor(
|
||||||
|
userAgent: string,
|
||||||
|
maxAttempts?: number,
|
||||||
|
baseRetryIntervalMilliseconds?: number,
|
||||||
|
retryMultiplier?: number
|
||||||
|
) {
|
||||||
|
const token = getRuntimeToken()
|
||||||
|
this.baseUrl = CacheUrl
|
||||||
|
if (maxAttempts) {
|
||||||
|
this.maxAttempts = maxAttempts
|
||||||
|
}
|
||||||
|
if (baseRetryIntervalMilliseconds) {
|
||||||
|
this.baseRetryIntervalMilliseconds = baseRetryIntervalMilliseconds
|
||||||
|
}
|
||||||
|
if (retryMultiplier) {
|
||||||
|
this.retryMultiplier = retryMultiplier
|
||||||
|
}
|
||||||
|
|
||||||
|
this.httpClient = new HttpClient(userAgent, [
|
||||||
|
new BearerCredentialHandler(token)
|
||||||
|
])
|
||||||
|
}
|
||||||
|
|
||||||
|
// This function satisfies the Rpc interface. It is compatible with the JSON
|
||||||
|
// JSON generated client.
|
||||||
|
async request(
|
||||||
|
service: string,
|
||||||
|
method: string,
|
||||||
|
contentType: 'application/json' | 'application/protobuf',
|
||||||
|
data: object | Uint8Array
|
||||||
|
): Promise<object | Uint8Array> {
|
||||||
|
const url = new URL(`/twirp/${service}/${method}`, this.baseUrl).href
|
||||||
|
debug(`[Request] ${method} ${url}`)
|
||||||
|
const headers = {
|
||||||
|
'Content-Type': contentType
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
const {body} = await this.retryableRequest(async () =>
|
||||||
|
this.httpClient.post(url, JSON.stringify(data), headers)
|
||||||
|
)
|
||||||
|
|
||||||
|
return body
|
||||||
|
} catch (error) {
|
||||||
|
throw new Error(`Failed to ${method}: ${error.message}`)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async retryableRequest(
|
||||||
|
operation: () => Promise<HttpClientResponse>
|
||||||
|
): Promise<{response: HttpClientResponse; body: object}> {
|
||||||
|
let attempt = 0
|
||||||
|
let errorMessage = ''
|
||||||
|
let rawBody = ''
|
||||||
|
while (attempt < this.maxAttempts) {
|
||||||
|
let isRetryable = false
|
||||||
|
|
||||||
|
try {
|
||||||
|
const response = await operation()
|
||||||
|
const statusCode = response.message.statusCode
|
||||||
|
rawBody = await response.readBody()
|
||||||
|
debug(`[Response] - ${response.message.statusCode}`)
|
||||||
|
debug(`Headers: ${JSON.stringify(response.message.headers, null, 2)}`)
|
||||||
|
const body = JSON.parse(rawBody)
|
||||||
|
debug(`Body: ${JSON.stringify(body, null, 2)}`)
|
||||||
|
if (this.isSuccessStatusCode(statusCode)) {
|
||||||
|
return {response, body}
|
||||||
|
}
|
||||||
|
isRetryable = this.isRetryableHttpStatusCode(statusCode)
|
||||||
|
errorMessage = `Failed request: (${statusCode}) ${response.message.statusMessage}`
|
||||||
|
if (body.msg) {
|
||||||
|
// if (UsageError.isUsageErrorMessage(body.msg)) {
|
||||||
|
// throw new UsageError()
|
||||||
|
// }
|
||||||
|
|
||||||
|
errorMessage = `${errorMessage}: ${body.msg}`
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
if (error instanceof SyntaxError) {
|
||||||
|
debug(`Raw Body: ${rawBody}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
// if (error instanceof UsageError) {
|
||||||
|
// throw error
|
||||||
|
// }
|
||||||
|
|
||||||
|
// if (NetworkError.isNetworkErrorCode(error?.code)) {
|
||||||
|
// throw new NetworkError(error?.code)
|
||||||
|
// }
|
||||||
|
|
||||||
|
isRetryable = true
|
||||||
|
errorMessage = error.message
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!isRetryable) {
|
||||||
|
throw new Error(`Received non-retryable error: ${errorMessage}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (attempt + 1 === this.maxAttempts) {
|
||||||
|
throw new Error(
|
||||||
|
`Failed to make request after ${this.maxAttempts} attempts: ${errorMessage}`
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const retryTimeMilliseconds =
|
||||||
|
this.getExponentialRetryTimeMilliseconds(attempt)
|
||||||
|
info(
|
||||||
|
`Attempt ${attempt + 1} of ${
|
||||||
|
this.maxAttempts
|
||||||
|
} failed with error: ${errorMessage}. Retrying request in ${retryTimeMilliseconds} ms...`
|
||||||
|
)
|
||||||
|
await this.sleep(retryTimeMilliseconds)
|
||||||
|
attempt++
|
||||||
|
}
|
||||||
|
|
||||||
|
throw new Error(`Request failed`)
|
||||||
|
}
|
||||||
|
|
||||||
|
isSuccessStatusCode(statusCode?: number): boolean {
|
||||||
|
if (!statusCode) return false
|
||||||
|
return statusCode >= 200 && statusCode < 300
|
||||||
|
}
|
||||||
|
|
||||||
|
isRetryableHttpStatusCode(statusCode?: number): boolean {
|
||||||
|
if (!statusCode) return false
|
||||||
|
|
||||||
|
const retryableStatusCodes = [
|
||||||
|
HttpCodes.BadGateway,
|
||||||
|
HttpCodes.GatewayTimeout,
|
||||||
|
HttpCodes.InternalServerError,
|
||||||
|
HttpCodes.ServiceUnavailable,
|
||||||
|
HttpCodes.TooManyRequests
|
||||||
|
]
|
||||||
|
|
||||||
|
return retryableStatusCodes.includes(statusCode)
|
||||||
|
}
|
||||||
|
|
||||||
|
async sleep(milliseconds: number): Promise<void> {
|
||||||
|
return new Promise(resolve => setTimeout(resolve, milliseconds))
|
||||||
|
}
|
||||||
|
|
||||||
|
getExponentialRetryTimeMilliseconds(attempt: number): number {
|
||||||
|
if (attempt < 0) {
|
||||||
|
throw new Error('attempt should be a positive integer')
|
||||||
|
}
|
||||||
|
|
||||||
|
if (attempt === 0) {
|
||||||
|
return this.baseRetryIntervalMilliseconds
|
||||||
|
}
|
||||||
|
|
||||||
|
const minTime =
|
||||||
|
this.baseRetryIntervalMilliseconds * this.retryMultiplier ** attempt
|
||||||
|
const maxTime = minTime * this.retryMultiplier
|
||||||
|
|
||||||
|
// returns a random number between minTime and maxTime (exclusive)
|
||||||
|
return Math.trunc(Math.random() * (maxTime - minTime) + minTime)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function internalBlobCacheTwirpClient(options?: {
|
||||||
|
maxAttempts?: number
|
||||||
|
retryIntervalMs?: number
|
||||||
|
retryMultiplier?: number
|
||||||
|
}): BlobCacheServiceClientJSON {
|
||||||
|
const client = new BlobCacheServiceClient(
|
||||||
|
'actions/cache',
|
||||||
|
options?.maxAttempts,
|
||||||
|
options?.retryIntervalMs,
|
||||||
|
options?.retryMultiplier
|
||||||
|
)
|
||||||
|
return new BlobCacheServiceClientJSON(client)
|
||||||
|
}
|
|
@ -0,0 +1,7 @@
|
||||||
|
export function getRuntimeToken(): string {
|
||||||
|
const token = process.env['ACTIONS_RUNTIME_TOKEN']
|
||||||
|
if (!token) {
|
||||||
|
throw new Error('Unable to get the ACTIONS_RUNTIME_TOKEN env variable')
|
||||||
|
}
|
||||||
|
return token
|
||||||
|
}
|
|
@ -36,3 +36,6 @@ export const SystemTarPathOnWindows = `${process.env['SYSTEMDRIVE']}\\Windows\\S
|
||||||
export const TarFilename = 'cache.tar'
|
export const TarFilename = 'cache.tar'
|
||||||
|
|
||||||
export const ManifestFilename = 'manifest.txt'
|
export const ManifestFilename = 'manifest.txt'
|
||||||
|
|
||||||
|
// Cache URL
|
||||||
|
export const CacheUrl = `${process.env['ACTIONS_CACHE_URL_NEXT']}`
|
||||||
|
|
Loading…
Reference in New Issue