1
0
Fork 0

Formatting and stylistic cleanup

pull/1857/head
Bassem Dghaidi 2024-11-14 03:22:03 -08:00 committed by GitHub
parent 69409b3acd
commit b2557ac90c
11 changed files with 321 additions and 297 deletions

View File

@ -1,8 +1,8 @@
import { downloadCache } from '../src/internal/cacheHttpClient' import {downloadCache} from '../src/internal/cacheHttpClient'
import { getCacheVersion } from '../src/internal/cacheUtils' import {getCacheVersion} from '../src/internal/cacheUtils'
import { CompressionMethod } from '../src/internal/constants' import {CompressionMethod} from '../src/internal/constants'
import * as downloadUtils from '../src/internal/downloadUtils' import * as downloadUtils from '../src/internal/downloadUtils'
import { DownloadOptions, getDownloadOptions } from '../src/options' import {DownloadOptions, getDownloadOptions} from '../src/options'
jest.mock('../src/internal/downloadUtils') jest.mock('../src/internal/downloadUtils')
@ -129,7 +129,7 @@ test('downloadCache passes options to download methods', async () => {
const archiveLocation = 'http://foo.blob.core.windows.net/bar/baz' const archiveLocation = 'http://foo.blob.core.windows.net/bar/baz'
const archivePath = '/foo/bar' const archivePath = '/foo/bar'
const options: DownloadOptions = { downloadConcurrency: 4 } const options: DownloadOptions = {downloadConcurrency: 4}
await downloadCache(archiveLocation, archivePath, options) await downloadCache(archiveLocation, archivePath, options)

View File

@ -4,8 +4,8 @@ import * as config from './internal/config'
import * as utils from './internal/cacheUtils' import * as utils from './internal/cacheUtils'
import * as cacheHttpClient from './internal/cacheHttpClient' import * as cacheHttpClient from './internal/cacheHttpClient'
import * as cacheTwirpClient from './internal/shared/cacheTwirpClient' import * as cacheTwirpClient from './internal/shared/cacheTwirpClient'
import { DownloadOptions, UploadOptions } from './options' import {DownloadOptions, UploadOptions} from './options'
import { createTar, extractTar, listTar } from './internal/tar' import {createTar, extractTar, listTar} from './internal/tar'
import { import {
CreateCacheEntryRequest, CreateCacheEntryRequest,
CreateCacheEntryResponse, CreateCacheEntryResponse,
@ -14,9 +14,9 @@ import {
GetCacheEntryDownloadURLRequest, GetCacheEntryDownloadURLRequest,
GetCacheEntryDownloadURLResponse GetCacheEntryDownloadURLResponse
} from './generated/results/api/v1/cache' } from './generated/results/api/v1/cache'
import { CacheFileSizeLimit } from './internal/constants' import {CacheFileSizeLimit} from './internal/constants'
import { UploadCacheFile } from './internal/blob/upload-cache' import {UploadCacheFile} from './internal/blob/upload-cache'
import { DownloadCacheFile } from './internal/blob/download-cache' import {DownloadCacheFile} from './internal/blob/download-cache'
export class ValidationError extends Error { export class ValidationError extends Error {
constructor(message: string) { constructor(message: string) {
super(message) super(message)
@ -86,23 +86,35 @@ export async function restoreCache(
const cacheServiceVersion: string = config.getCacheServiceVersion() const cacheServiceVersion: string = config.getCacheServiceVersion()
console.debug(`Cache service version: ${cacheServiceVersion}`) console.debug(`Cache service version: ${cacheServiceVersion}`)
switch (cacheServiceVersion) { switch (cacheServiceVersion) {
case "v2": case 'v2':
return await restoreCachev2(paths, primaryKey, restoreKeys, options, enableCrossOsArchive) return await restoreCachev2(
case "v1": paths,
primaryKey,
restoreKeys,
options,
enableCrossOsArchive
)
case 'v1':
default: default:
return await restoreCachev1(paths, primaryKey, restoreKeys, options, enableCrossOsArchive) return await restoreCachev1(
paths,
primaryKey,
restoreKeys,
options,
enableCrossOsArchive
)
} }
} }
/** /**
* Restores cache using the legacy Cache Service * Restores cache using the legacy Cache Service
* *
* @param paths * @param paths
* @param primaryKey * @param primaryKey
* @param restoreKeys * @param restoreKeys
* @param options * @param options
* @param enableCrossOsArchive * @param enableCrossOsArchive
* @returns * @returns
*/ */
async function restoreCachev1( async function restoreCachev1(
paths: string[], paths: string[],
@ -238,12 +250,15 @@ async function restoreCachev2(
version: utils.getCacheVersion( version: utils.getCacheVersion(
paths, paths,
compressionMethod, compressionMethod,
enableCrossOsArchive, enableCrossOsArchive
), )
} }
core.debug(`GetCacheEntryDownloadURLRequest: ${JSON.stringify(twirpClient)}`) core.debug(
const response: GetCacheEntryDownloadURLResponse = await twirpClient.GetCacheEntryDownloadURL(request) `GetCacheEntryDownloadURLRequest: ${JSON.stringify(twirpClient)}`
)
const response: GetCacheEntryDownloadURLResponse =
await twirpClient.GetCacheEntryDownloadURL(request)
core.debug(`GetCacheEntryDownloadURLResponse: ${JSON.stringify(response)}`) core.debug(`GetCacheEntryDownloadURLResponse: ${JSON.stringify(response)}`)
if (!response.ok) { if (!response.ok) {
@ -266,10 +281,7 @@ async function restoreCachev2(
core.debug(`Starting download of artifact to: ${archivePath}`) core.debug(`Starting download of artifact to: ${archivePath}`)
await DownloadCacheFile( await DownloadCacheFile(response.signedDownloadUrl, archivePath)
response.signedDownloadUrl,
archivePath
)
const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath) const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath)
core.info( core.info(
@ -320,9 +332,9 @@ export async function saveCache(
const cacheServiceVersion: string = config.getCacheServiceVersion() const cacheServiceVersion: string = config.getCacheServiceVersion()
console.debug(`Cache Service Version: ${cacheServiceVersion}`) console.debug(`Cache Service Version: ${cacheServiceVersion}`)
switch (cacheServiceVersion) { switch (cacheServiceVersion) {
case "v2": case 'v2':
return await saveCachev2(paths, key, options, enableCrossOsArchive) return await saveCachev2(paths, key, options, enableCrossOsArchive)
case "v1": case 'v1':
default: default:
return await saveCachev1(paths, key, options, enableCrossOsArchive) return await saveCachev1(paths, key, options, enableCrossOsArchive)
} }
@ -330,12 +342,12 @@ export async function saveCache(
/** /**
* Save cache using the legacy Cache Service * Save cache using the legacy Cache Service
* *
* @param paths * @param paths
* @param key * @param key
* @param options * @param options
* @param enableCrossOsArchive * @param enableCrossOsArchive
* @returns * @returns
*/ */
async function saveCachev1( async function saveCachev1(
paths: string[], paths: string[],
@ -398,9 +410,9 @@ async function saveCachev1(
} else if (reserveCacheResponse?.statusCode === 400) { } else if (reserveCacheResponse?.statusCode === 400) {
throw new Error( throw new Error(
reserveCacheResponse?.error?.message ?? reserveCacheResponse?.error?.message ??
`Cache size of ~${Math.round( `Cache size of ~${Math.round(
archiveFileSize / (1024 * 1024) archiveFileSize / (1024 * 1024)
)} MB (${archiveFileSize} B) is over the data cap limit, not saving cache.` )} MB (${archiveFileSize} B) is over the data cap limit, not saving cache.`
) )
} else { } else {
throw new ReserveCacheError( throw new ReserveCacheError(
@ -433,12 +445,12 @@ async function saveCachev1(
/** /**
* Save cache using the new Cache Service * Save cache using the new Cache Service
* *
* @param paths * @param paths
* @param key * @param key
* @param options * @param options
* @param enableCrossOsArchive * @param enableCrossOsArchive
* @returns * @returns
*/ */
async function saveCachev2( async function saveCachev2(
paths: string[], paths: string[],
@ -500,7 +512,8 @@ async function saveCachev2(
key: key, key: key,
version: version version: version
} }
const response: CreateCacheEntryResponse = await twirpClient.CreateCacheEntry(request) const response: CreateCacheEntryResponse =
await twirpClient.CreateCacheEntry(request)
if (!response.ok) { if (!response.ok) {
throw new ReserveCacheError( throw new ReserveCacheError(
`Unable to reserve cache with key ${key}, another job may be creating this cache.` `Unable to reserve cache with key ${key}, another job may be creating this cache.`
@ -508,21 +521,21 @@ async function saveCachev2(
} }
core.debug(`Saving Cache to: ${core.setSecret(response.signedUploadUrl)}`) core.debug(`Saving Cache to: ${core.setSecret(response.signedUploadUrl)}`)
await UploadCacheFile( await UploadCacheFile(response.signedUploadUrl, archivePath)
response.signedUploadUrl,
archivePath,
)
const finalizeRequest: FinalizeCacheEntryUploadRequest = { const finalizeRequest: FinalizeCacheEntryUploadRequest = {
workflowRunBackendId: backendIds.workflowRunBackendId, workflowRunBackendId: backendIds.workflowRunBackendId,
workflowJobRunBackendId: backendIds.workflowJobRunBackendId, workflowJobRunBackendId: backendIds.workflowJobRunBackendId,
key: key, key: key,
version: version, version: version,
sizeBytes: `${archiveFileSize}`, sizeBytes: `${archiveFileSize}`
} }
const finalizeResponse: FinalizeCacheEntryUploadResponse = await twirpClient.FinalizeCacheEntryUpload(finalizeRequest) const finalizeResponse: FinalizeCacheEntryUploadResponse =
core.debug(`FinalizeCacheEntryUploadResponse: ${JSON.stringify(finalizeResponse)}`) await twirpClient.FinalizeCacheEntryUpload(finalizeRequest)
core.debug(
`FinalizeCacheEntryUploadResponse: ${JSON.stringify(finalizeResponse)}`
)
if (!finalizeResponse.ok) { if (!finalizeResponse.ok) {
throw new Error( throw new Error(
@ -544,4 +557,4 @@ async function saveCachev2(
} }
return cacheId return cacheId
} }

View File

@ -3,15 +3,15 @@ import * as core from '@actions/core'
import { import {
BlobClient, BlobClient,
BlockBlobClient, BlockBlobClient,
BlobDownloadOptions, BlobDownloadOptions
} from '@azure/storage-blob' } from '@azure/storage-blob'
export async function DownloadCacheFile( export async function DownloadCacheFile(
signedUploadURL: string, signedUploadURL: string,
archivePath: string, archivePath: string
): Promise<{}> { ): Promise<{}> {
const downloadOptions: BlobDownloadOptions = { const downloadOptions: BlobDownloadOptions = {
maxRetryRequests: 5, maxRetryRequests: 5
} }
// TODO: tighten the configuration and pass the appropriate user-agent // TODO: tighten the configuration and pass the appropriate user-agent
@ -21,5 +21,10 @@ export async function DownloadCacheFile(
core.debug(`BlobClient: ${JSON.stringify(blobClient)}`) core.debug(`BlobClient: ${JSON.stringify(blobClient)}`)
core.debug(`blockBlobClient: ${JSON.stringify(blockBlobClient)}`) core.debug(`blockBlobClient: ${JSON.stringify(blockBlobClient)}`)
return blockBlobClient.downloadToFile(archivePath, 0, undefined, downloadOptions) return blockBlobClient.downloadToFile(
} archivePath,
0,
undefined,
downloadOptions
)
}

View File

@ -7,15 +7,15 @@ import {
export async function UploadCacheFile( export async function UploadCacheFile(
signedUploadURL: string, signedUploadURL: string,
archivePath: string, archivePath: string
): Promise<{}> { ): Promise<{}> {
// TODO: tighten the configuration and pass the appropriate user-agent // TODO: tighten the configuration and pass the appropriate user-agent
// Specify data transfer options // Specify data transfer options
const uploadOptions: BlockBlobParallelUploadOptions = { const uploadOptions: BlockBlobParallelUploadOptions = {
blockSize: 4 * 1024 * 1024, // 4 MiB max block size blockSize: 4 * 1024 * 1024, // 4 MiB max block size
concurrency: 4, // maximum number of parallel transfer workers concurrency: 4, // maximum number of parallel transfer workers
maxSingleShotSize: 8 * 1024 * 1024, // 8 MiB initial transfer size maxSingleShotSize: 8 * 1024 * 1024 // 8 MiB initial transfer size
}; }
const blobClient: BlobClient = new BlobClient(signedUploadURL) const blobClient: BlobClient = new BlobClient(signedUploadURL)
const blockBlobClient: BlockBlobClient = blobClient.getBlockBlobClient() const blockBlobClient: BlockBlobClient = blobClient.getBlockBlobClient()
@ -23,5 +23,5 @@ export async function UploadCacheFile(
core.debug(`BlobClient: ${JSON.stringify(blobClient)}`) core.debug(`BlobClient: ${JSON.stringify(blobClient)}`)
core.debug(`blockBlobClient: ${JSON.stringify(blockBlobClient)}`) core.debug(`blockBlobClient: ${JSON.stringify(blockBlobClient)}`)
return blockBlobClient.uploadFile(archivePath, uploadOptions); return blockBlobClient.uploadFile(archivePath, uploadOptions)
} }

View File

@ -1,12 +1,12 @@
import * as core from '@actions/core' import * as core from '@actions/core'
import { HttpClient } from '@actions/http-client' import {HttpClient} from '@actions/http-client'
import { BearerCredentialHandler } from '@actions/http-client/lib/auth' import {BearerCredentialHandler} from '@actions/http-client/lib/auth'
import { import {
RequestOptions, RequestOptions,
TypedResponse TypedResponse
} from '@actions/http-client/lib/interfaces' } from '@actions/http-client/lib/interfaces'
import * as fs from 'fs' import * as fs from 'fs'
import { URL } from 'url' import {URL} from 'url'
import * as utils from './cacheUtils' import * as utils from './cacheUtils'
import { import {
ArtifactCacheEntry, ArtifactCacheEntry,
@ -33,7 +33,7 @@ import {
retryHttpClientResponse, retryHttpClientResponse,
retryTypedResponse retryTypedResponse
} from './requestUtils' } from './requestUtils'
import { getCacheServiceURL } from './config' import {getCacheServiceURL} from './config'
function getCacheApiUrl(resource: string): string { function getCacheApiUrl(resource: string): string {
const baseUrl: string = getCacheServiceURL() const baseUrl: string = getCacheServiceURL()
@ -216,7 +216,8 @@ async function uploadChunk(
end: number end: number
): Promise<void> { ): Promise<void> {
core.debug( core.debug(
`Uploading chunk of size ${end - start + 1 `Uploading chunk of size ${
end - start + 1
} bytes at offset ${start} with content range: ${getContentRange( } bytes at offset ${start} with content range: ${getContentRange(
start, start,
end end
@ -312,7 +313,7 @@ async function commitCache(
cacheId: number, cacheId: number,
filesize: number filesize: number
): Promise<TypedResponse<null>> { ): Promise<TypedResponse<null>> {
const commitCacheRequest: CommitCacheRequest = { size: filesize } const commitCacheRequest: CommitCacheRequest = {size: filesize}
return await retryTypedResponse('commitCache', async () => return await retryTypedResponse('commitCache', async () =>
httpClient.postJson<null>( httpClient.postJson<null>(
getCacheApiUrl(`caches/${cacheId.toString()}`), getCacheApiUrl(`caches/${cacheId.toString()}`),

View File

@ -246,4 +246,4 @@ export function getBackendIdsFromToken(): BackendIds {
} }
throw InvalidJwtError throw InvalidJwtError
} }

View File

@ -7,17 +7,21 @@ export function getRuntimeToken(): string {
} }
export function getCacheServiceVersion(): string { export function getCacheServiceVersion(): string {
return process.env['ACTIONS_CACHE_SERVICE_V2'] ? 'v2' : 'v1'; return process.env['ACTIONS_CACHE_SERVICE_V2'] ? 'v2' : 'v1'
} }
export function getCacheServiceURL(): string { export function getCacheServiceURL(): string {
const version = getCacheServiceVersion() const version = getCacheServiceVersion()
switch (version) { switch (version) {
case 'v1': case 'v1':
return process.env['ACTIONS_CACHE_URL'] || process.env['ACTIONS_RESULTS_URL'] || "" return (
process.env['ACTIONS_CACHE_URL'] ||
process.env['ACTIONS_RESULTS_URL'] ||
''
)
case 'v2': case 'v2':
return process.env['ACTIONS_RESULTS_URL'] || "" return process.env['ACTIONS_RESULTS_URL'] || ''
default: default:
throw new Error(`Unsupported cache service version: ${version}`) throw new Error(`Unsupported cache service version: ${version}`)
} }
} }

View File

@ -37,4 +37,4 @@ export const TarFilename = 'cache.tar'
export const ManifestFilename = 'manifest.txt' export const ManifestFilename = 'manifest.txt'
export const CacheFileSizeLimit = 10 * Math.pow(1024, 3) // 10GiB per repository export const CacheFileSizeLimit = 10 * Math.pow(1024, 3) // 10GiB per repository

View File

@ -1,202 +1,203 @@
import { info, debug } from '@actions/core' import {info, debug} from '@actions/core'
import { getUserAgentString } from './user-agent' import {getUserAgentString} from './user-agent'
import { NetworkError, UsageError } from './errors' import {NetworkError, UsageError} from './errors'
import { getRuntimeToken, getCacheServiceURL } from '../config' import {getRuntimeToken, getCacheServiceURL} from '../config'
import { BearerCredentialHandler } from '@actions/http-client/lib/auth' import {BearerCredentialHandler} from '@actions/http-client/lib/auth'
import { HttpClient, HttpClientResponse, HttpCodes } from '@actions/http-client' import {HttpClient, HttpClientResponse, HttpCodes} from '@actions/http-client'
import { CacheServiceClientJSON } from '../../generated/results/api/v1/cache.twirp' import {CacheServiceClientJSON} from '../../generated/results/api/v1/cache.twirp'
// The twirp http client must implement this interface // The twirp http client must implement this interface
interface Rpc { interface Rpc {
request( request(
service: string, service: string,
method: string, method: string,
contentType: 'application/json' | 'application/protobuf', contentType: 'application/json' | 'application/protobuf',
data: object | Uint8Array data: object | Uint8Array
): Promise<object | Uint8Array> ): Promise<object | Uint8Array>
} }
/** /**
* This class is a wrapper around the CacheServiceClientJSON class generated by Twirp. * This class is a wrapper around the CacheServiceClientJSON class generated by Twirp.
* *
* It adds retry logic to the request method, which is not present in the generated client. * It adds retry logic to the request method, which is not present in the generated client.
* *
* This class is used to interact with cache service v2. * This class is used to interact with cache service v2.
*/ */
class CacheServiceClient implements Rpc { class CacheServiceClient implements Rpc {
private httpClient: HttpClient private httpClient: HttpClient
private baseUrl: string private baseUrl: string
private maxAttempts = 5 private maxAttempts = 5
private baseRetryIntervalMilliseconds = 3000 private baseRetryIntervalMilliseconds = 3000
private retryMultiplier = 1.5 private retryMultiplier = 1.5
constructor( constructor(
userAgent: string, userAgent: string,
maxAttempts?: number, maxAttempts?: number,
baseRetryIntervalMilliseconds?: number, baseRetryIntervalMilliseconds?: number,
retryMultiplier?: number retryMultiplier?: number
) { ) {
const token = getRuntimeToken() const token = getRuntimeToken()
this.baseUrl = getCacheServiceURL() this.baseUrl = getCacheServiceURL()
if (maxAttempts) { if (maxAttempts) {
this.maxAttempts = maxAttempts this.maxAttempts = maxAttempts
} }
if (baseRetryIntervalMilliseconds) { if (baseRetryIntervalMilliseconds) {
this.baseRetryIntervalMilliseconds = baseRetryIntervalMilliseconds this.baseRetryIntervalMilliseconds = baseRetryIntervalMilliseconds
} }
if (retryMultiplier) { if (retryMultiplier) {
this.retryMultiplier = retryMultiplier this.retryMultiplier = retryMultiplier
}
this.httpClient = new HttpClient(userAgent, [
new BearerCredentialHandler(token)
])
} }
// This function satisfies the Rpc interface. It is compatible with the JSON this.httpClient = new HttpClient(userAgent, [
// JSON generated client. new BearerCredentialHandler(token)
async request( ])
service: string, }
method: string,
contentType: 'application/json' | 'application/protobuf',
data: object | Uint8Array
): Promise<object | Uint8Array> {
const url = new URL(`/twirp/${service}/${method}`, this.baseUrl).href
debug(`[Request] ${method} ${url}`)
const headers = {
'Content-Type': contentType
}
try {
const { body } = await this.retryableRequest(async () =>
this.httpClient.post(url, JSON.stringify(data), headers)
)
return body // This function satisfies the Rpc interface. It is compatible with the JSON
} catch (error) { // JSON generated client.
throw new Error(`Failed to ${method}: ${error.message}`) async request(
} service: string,
method: string,
contentType: 'application/json' | 'application/protobuf',
data: object | Uint8Array
): Promise<object | Uint8Array> {
const url = new URL(`/twirp/${service}/${method}`, this.baseUrl).href
debug(`[Request] ${method} ${url}`)
const headers = {
'Content-Type': contentType
} }
try {
const {body} = await this.retryableRequest(async () =>
this.httpClient.post(url, JSON.stringify(data), headers)
)
async retryableRequest( return body
operation: () => Promise<HttpClientResponse> } catch (error) {
): Promise<{ response: HttpClientResponse; body: object }> { throw new Error(`Failed to ${method}: ${error.message}`)
let attempt = 0 }
let errorMessage = '' }
let rawBody = ''
while (attempt < this.maxAttempts) {
let isRetryable = false
try { async retryableRequest(
const response = await operation() operation: () => Promise<HttpClientResponse>
const statusCode = response.message.statusCode ): Promise<{response: HttpClientResponse; body: object}> {
rawBody = await response.readBody() let attempt = 0
debug(`[Response] - ${response.message.statusCode}`) let errorMessage = ''
debug(`Headers: ${JSON.stringify(response.message.headers, null, 2)}`) let rawBody = ''
const body = JSON.parse(rawBody) while (attempt < this.maxAttempts) {
debug(`Body: ${JSON.stringify(body, null, 2)}`) let isRetryable = false
if (this.isSuccessStatusCode(statusCode)) {
return { response, body }
}
isRetryable = this.isRetryableHttpStatusCode(statusCode)
errorMessage = `Failed request: (${statusCode}) ${response.message.statusMessage}`
if (body.msg) {
if (UsageError.isUsageErrorMessage(body.msg)) {
throw new UsageError()
}
errorMessage = `${errorMessage}: ${body.msg}` try {
} const response = await operation()
} catch (error) { const statusCode = response.message.statusCode
if (error instanceof SyntaxError) { rawBody = await response.readBody()
debug(`Raw Body: ${rawBody}`) debug(`[Response] - ${response.message.statusCode}`)
} debug(`Headers: ${JSON.stringify(response.message.headers, null, 2)}`)
const body = JSON.parse(rawBody)
debug(`Body: ${JSON.stringify(body, null, 2)}`)
if (this.isSuccessStatusCode(statusCode)) {
return {response, body}
}
isRetryable = this.isRetryableHttpStatusCode(statusCode)
errorMessage = `Failed request: (${statusCode}) ${response.message.statusMessage}`
if (body.msg) {
if (UsageError.isUsageErrorMessage(body.msg)) {
throw new UsageError()
}
if (error instanceof UsageError) { errorMessage = `${errorMessage}: ${body.msg}`
throw error }
} } catch (error) {
if (error instanceof SyntaxError) {
if (NetworkError.isNetworkErrorCode(error?.code)) { debug(`Raw Body: ${rawBody}`)
throw new NetworkError(error?.code)
}
isRetryable = true
errorMessage = error.message
}
if (!isRetryable) {
throw new Error(`Received non-retryable error: ${errorMessage}`)
}
if (attempt + 1 === this.maxAttempts) {
throw new Error(
`Failed to make request after ${this.maxAttempts} attempts: ${errorMessage}`
)
}
const retryTimeMilliseconds =
this.getExponentialRetryTimeMilliseconds(attempt)
info(
`Attempt ${attempt + 1} of ${this.maxAttempts
} failed with error: ${errorMessage}. Retrying request in ${retryTimeMilliseconds} ms...`
)
await this.sleep(retryTimeMilliseconds)
attempt++
} }
throw new Error(`Request failed`) if (error instanceof UsageError) {
} throw error
isSuccessStatusCode(statusCode?: number): boolean {
if (!statusCode) return false
return statusCode >= 200 && statusCode < 300
}
isRetryableHttpStatusCode(statusCode?: number): boolean {
if (!statusCode) return false
const retryableStatusCodes = [
HttpCodes.BadGateway,
HttpCodes.GatewayTimeout,
HttpCodes.InternalServerError,
HttpCodes.ServiceUnavailable,
HttpCodes.TooManyRequests
]
return retryableStatusCodes.includes(statusCode)
}
async sleep(milliseconds: number): Promise<void> {
return new Promise(resolve => setTimeout(resolve, milliseconds))
}
getExponentialRetryTimeMilliseconds(attempt: number): number {
if (attempt < 0) {
throw new Error('attempt should be a positive integer')
} }
if (attempt === 0) { if (NetworkError.isNetworkErrorCode(error?.code)) {
return this.baseRetryIntervalMilliseconds throw new NetworkError(error?.code)
} }
const minTime = isRetryable = true
this.baseRetryIntervalMilliseconds * this.retryMultiplier ** attempt errorMessage = error.message
const maxTime = minTime * this.retryMultiplier }
// returns a random number between minTime and maxTime (exclusive) if (!isRetryable) {
return Math.trunc(Math.random() * (maxTime - minTime) + minTime) throw new Error(`Received non-retryable error: ${errorMessage}`)
}
if (attempt + 1 === this.maxAttempts) {
throw new Error(
`Failed to make request after ${this.maxAttempts} attempts: ${errorMessage}`
)
}
const retryTimeMilliseconds =
this.getExponentialRetryTimeMilliseconds(attempt)
info(
`Attempt ${attempt + 1} of ${
this.maxAttempts
} failed with error: ${errorMessage}. Retrying request in ${retryTimeMilliseconds} ms...`
)
await this.sleep(retryTimeMilliseconds)
attempt++
} }
throw new Error(`Request failed`)
}
isSuccessStatusCode(statusCode?: number): boolean {
if (!statusCode) return false
return statusCode >= 200 && statusCode < 300
}
isRetryableHttpStatusCode(statusCode?: number): boolean {
if (!statusCode) return false
const retryableStatusCodes = [
HttpCodes.BadGateway,
HttpCodes.GatewayTimeout,
HttpCodes.InternalServerError,
HttpCodes.ServiceUnavailable,
HttpCodes.TooManyRequests
]
return retryableStatusCodes.includes(statusCode)
}
async sleep(milliseconds: number): Promise<void> {
return new Promise(resolve => setTimeout(resolve, milliseconds))
}
getExponentialRetryTimeMilliseconds(attempt: number): number {
if (attempt < 0) {
throw new Error('attempt should be a positive integer')
}
if (attempt === 0) {
return this.baseRetryIntervalMilliseconds
}
const minTime =
this.baseRetryIntervalMilliseconds * this.retryMultiplier ** attempt
const maxTime = minTime * this.retryMultiplier
// returns a random number between minTime and maxTime (exclusive)
return Math.trunc(Math.random() * (maxTime - minTime) + minTime)
}
} }
export function internalCacheTwirpClient(options?: { export function internalCacheTwirpClient(options?: {
maxAttempts?: number maxAttempts?: number
retryIntervalMs?: number retryIntervalMs?: number
retryMultiplier?: number retryMultiplier?: number
}): CacheServiceClientJSON { }): CacheServiceClientJSON {
const client = new CacheServiceClient( const client = new CacheServiceClient(
getUserAgentString(), getUserAgentString(),
options?.maxAttempts, options?.maxAttempts,
options?.retryIntervalMs, options?.retryIntervalMs,
options?.retryMultiplier options?.retryMultiplier
) )
return new CacheServiceClientJSON(client) return new CacheServiceClientJSON(client)
} }

View File

@ -1,72 +1,72 @@
export class FilesNotFoundError extends Error { export class FilesNotFoundError extends Error {
files: string[] files: string[]
constructor(files: string[] = []) { constructor(files: string[] = []) {
let message = 'No files were found to upload' let message = 'No files were found to upload'
if (files.length > 0) { if (files.length > 0) {
message += `: ${files.join(', ')}` message += `: ${files.join(', ')}`
}
super(message)
this.files = files
this.name = 'FilesNotFoundError'
} }
super(message)
this.files = files
this.name = 'FilesNotFoundError'
}
} }
export class InvalidResponseError extends Error { export class InvalidResponseError extends Error {
constructor(message: string) { constructor(message: string) {
super(message) super(message)
this.name = 'InvalidResponseError' this.name = 'InvalidResponseError'
} }
} }
export class CacheNotFoundError extends Error { export class CacheNotFoundError extends Error {
constructor(message = 'Cache not found') { constructor(message = 'Cache not found') {
super(message) super(message)
this.name = 'CacheNotFoundError' this.name = 'CacheNotFoundError'
} }
} }
export class GHESNotSupportedError extends Error { export class GHESNotSupportedError extends Error {
constructor( constructor(
message = '@actions/cache v4.1.4+, actions/cache/save@v4+ and actions/cache/restore@v4+ are not currently supported on GHES.' message = '@actions/cache v4.1.4+, actions/cache/save@v4+ and actions/cache/restore@v4+ are not currently supported on GHES.'
) { ) {
super(message) super(message)
this.name = 'GHESNotSupportedError' this.name = 'GHESNotSupportedError'
} }
} }
export class NetworkError extends Error { export class NetworkError extends Error {
code: string code: string
constructor(code: string) { constructor(code: string) {
const message = `Unable to make request: ${code}\nIf you are using self-hosted runners, please make sure your runner has access to all GitHub endpoints: https://docs.github.com/en/actions/hosting-your-own-runners/managing-self-hosted-runners/about-self-hosted-runners#communication-between-self-hosted-runners-and-github` const message = `Unable to make request: ${code}\nIf you are using self-hosted runners, please make sure your runner has access to all GitHub endpoints: https://docs.github.com/en/actions/hosting-your-own-runners/managing-self-hosted-runners/about-self-hosted-runners#communication-between-self-hosted-runners-and-github`
super(message) super(message)
this.code = code this.code = code
this.name = 'NetworkError' this.name = 'NetworkError'
} }
static isNetworkErrorCode = (code?: string): boolean => { static isNetworkErrorCode = (code?: string): boolean => {
if (!code) return false if (!code) return false
return [ return [
'ECONNRESET', 'ECONNRESET',
'ENOTFOUND', 'ENOTFOUND',
'ETIMEDOUT', 'ETIMEDOUT',
'ECONNREFUSED', 'ECONNREFUSED',
'EHOSTUNREACH' 'EHOSTUNREACH'
].includes(code) ].includes(code)
} }
} }
export class UsageError extends Error { export class UsageError extends Error {
constructor() { constructor() {
const message = `Cache storage quota has been hit. Unable to upload any new cache entries. Usage is recalculated every 6-12 hours.\nMore info on storage limits: https://docs.github.com/en/billing/managing-billing-for-github-actions/about-billing-for-github-actions#calculating-minute-and-storage-spending` const message = `Cache storage quota has been hit. Unable to upload any new cache entries. Usage is recalculated every 6-12 hours.\nMore info on storage limits: https://docs.github.com/en/billing/managing-billing-for-github-actions/about-billing-for-github-actions#calculating-minute-and-storage-spending`
super(message) super(message)
this.name = 'UsageError' this.name = 'UsageError'
} }
static isUsageErrorMessage = (msg?: string): boolean => { static isUsageErrorMessage = (msg?: string): boolean => {
if (!msg) return false if (!msg) return false
return msg.includes('insufficient usage') return msg.includes('insufficient usage')
} }
} }

View File

@ -5,5 +5,5 @@ const packageJson = require('../../../package.json')
* Ensure that this User Agent String is used in all HTTP calls so that we can monitor telemetry between different versions of this package * Ensure that this User Agent String is used in all HTTP calls so that we can monitor telemetry between different versions of this package
*/ */
export function getUserAgentString(): string { export function getUserAgentString(): string {
return `@actions/cache-${packageJson.version}` return `@actions/cache-${packageJson.version}`
} }