2023-11-17 06:45:51 +00:00
import * as core from '@actions/core'
import { HttpClient } from '@actions/http-client'
import { BearerCredentialHandler } from '@actions/http-client/lib/auth'
import {
RequestOptions ,
TypedResponse
} from '@actions/http-client/lib/interfaces'
import * as crypto from 'crypto'
import * as fs from 'fs'
import * as utils from './cacheUtils'
import { CompressionMethod } from './constants'
import {
ArtifactCacheEntry ,
InternalCacheOptions ,
CommitCacheRequest ,
ReserveCacheRequest ,
ReserveCacheResponse ,
ITypedResponseWithError ,
2023-11-22 12:12:10 +00:00
ArtifactCacheList ,
InternalS3CompletedPart ,
CommitCacheResponse
2023-11-17 06:45:51 +00:00
} from './contracts'
2023-11-22 12:12:10 +00:00
import { downloadCacheMultiConnection } from './downloadUtils'
import { isSuccessStatusCode , retry , retryTypedResponse } from './requestUtils'
import axios , { AxiosError } from 'axios'
2023-11-17 06:45:51 +00:00
const versionSalt = '1.0'
function getCacheApiUrl ( resource : string ) : string {
2023-11-22 12:12:10 +00:00
const baseUrl : string =
process . env [ 'ACTIONS_CACHE_URL' ] ? ? 'http://127.0.0.1:8002'
2023-11-17 06:45:51 +00:00
if ( ! baseUrl ) {
throw new Error ( 'Cache Service Url not found, unable to restore cache.' )
}
2023-11-22 12:12:10 +00:00
const url = ` ${ baseUrl } /v1/ ${ resource } `
2023-11-17 06:45:51 +00:00
core . debug ( ` Resource Url: ${ url } ` )
return url
}
function createAcceptHeader ( type : string , apiVersion : string ) : string {
return ` ${ type } ;api-version= ${ apiVersion } `
}
function getRequestOptions ( ) : RequestOptions {
const requestOptions : RequestOptions = {
headers : {
Accept : createAcceptHeader ( 'application/json' , 'v1' )
}
}
return requestOptions
}
function createHttpClient ( ) : HttpClient {
const token = process . env [ 'WARP_ACTION_TOKEN' ] ? ? ''
const bearerCredentialHandler = new BearerCredentialHandler ( token )
return new HttpClient (
'actions/cache' ,
[ bearerCredentialHandler ] ,
getRequestOptions ( )
)
}
export function getCacheVersion (
paths : string [ ] ,
compressionMethod? : CompressionMethod ,
enableCrossOsArchive = false
) : string {
const components = paths
// Add compression method to cache version to restore
// compressed cache as per compression method
if ( compressionMethod ) {
components . push ( compressionMethod )
}
// Only check for windows platforms if enableCrossOsArchive is false
if ( process . platform === 'win32' && ! enableCrossOsArchive ) {
components . push ( 'windows-only' )
}
// Add salt to cache version to support breaking changes in cache entry
components . push ( versionSalt )
return crypto . createHash ( 'sha256' ) . update ( components . join ( '|' ) ) . digest ( 'hex' )
}
export async function getCacheEntry (
keys : string [ ] ,
paths : string [ ] ,
options? : InternalCacheOptions
) : Promise < ArtifactCacheEntry | null > {
const httpClient = createHttpClient ( )
const version = getCacheVersion (
paths ,
options ? . compressionMethod ,
options ? . enableCrossOsArchive
)
const resource = ` cache?keys= ${ encodeURIComponent (
keys . join ( ',' )
) } & version = $ { version } `
const response = await retryTypedResponse ( 'getCacheEntry' , async ( ) = >
httpClient . getJson < ArtifactCacheEntry > ( getCacheApiUrl ( resource ) )
)
// Cache not found
if ( response . statusCode === 204 ) {
// List cache for primary key only if cache miss occurs
if ( core . isDebug ( ) ) {
await printCachesListForDiagnostics ( keys [ 0 ] , httpClient , version )
}
return null
}
if ( ! isSuccessStatusCode ( response . statusCode ) ) {
throw new Error ( ` Cache service responded with ${ response . statusCode } ` )
}
const cacheResult = response . result
2023-11-22 12:12:10 +00:00
const cacheDownloadUrl = cacheResult ? . pre_signed_url
2023-11-17 06:45:51 +00:00
if ( ! cacheDownloadUrl ) {
// Cache achiveLocation not found. This should never happen, and hence bail out.
throw new Error ( 'Cache not found.' )
}
core . setSecret ( cacheDownloadUrl )
core . debug ( ` Cache Result: ` )
core . debug ( JSON . stringify ( cacheResult ) )
return cacheResult
}
async function printCachesListForDiagnostics (
key : string ,
httpClient : HttpClient ,
version : string
) : Promise < void > {
const resource = ` caches?key= ${ encodeURIComponent ( key ) } `
const response = await retryTypedResponse ( 'listCache' , async ( ) = >
httpClient . getJson < ArtifactCacheList > ( getCacheApiUrl ( resource ) )
)
if ( response . statusCode === 200 ) {
const cacheListResult = response . result
const totalCount = cacheListResult ? . totalCount
if ( totalCount && totalCount > 0 ) {
core . debug (
` No matching cache found for cache key ' ${ key } ', version ' ${ version } and scope ${ process . env [ 'GITHUB_REF' ] } . There exist one or more cache(s) with similar key but they have different version or scope. See more info on cache matching here: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key \ nOther caches with similar key: `
)
2023-11-22 12:12:10 +00:00
for ( const cacheEntry of cacheListResult ? . artifactCaches ? ? [ ] ) {
2023-11-17 06:45:51 +00:00
core . debug (
2023-11-22 12:12:10 +00:00
` Cache Key: ${ cacheEntry ? . cache_key } , Cache Version: ${ cacheEntry ? . cache_version } `
2023-11-17 06:45:51 +00:00
)
}
}
}
}
export async function downloadCache (
archiveLocation : string ,
2023-11-22 12:12:10 +00:00
archivePath : string
2023-11-17 06:45:51 +00:00
) : Promise < void > {
2023-11-22 12:12:10 +00:00
await downloadCacheMultiConnection ( archiveLocation , archivePath , 8 )
2023-11-17 06:45:51 +00:00
}
// Reserve Cache
export async function reserveCache (
2023-11-22 12:12:10 +00:00
cacheKey : string ,
numberOfChunks : number ,
2023-11-17 06:45:51 +00:00
options? : InternalCacheOptions
) : Promise < ITypedResponseWithError < ReserveCacheResponse > > {
const httpClient = createHttpClient ( )
const reserveCacheRequest : ReserveCacheRequest = {
2023-11-22 12:12:10 +00:00
cache_key : cacheKey ,
number_of_chunks : numberOfChunks ,
content_type : 'application/zstd'
2023-11-17 06:45:51 +00:00
}
const response = await retryTypedResponse ( 'reserveCache' , async ( ) = >
httpClient . postJson < ReserveCacheResponse > (
2023-11-22 12:12:10 +00:00
getCacheApiUrl ( 'cache/reserve' ) ,
2023-11-17 06:45:51 +00:00
reserveCacheRequest
)
)
return response
}
function getContentRange ( start : number , end : number ) : string {
// Format: `bytes start-end/filesize
// start and end are inclusive
// filesize can be *
// For a 200 byte chunk starting at byte 0:
// Content-Range: bytes 0-199/*
return ` bytes ${ start } - ${ end } /* `
}
async function uploadChunk (
resourceUrl : string ,
openStream : ( ) = > NodeJS . ReadableStream ,
2023-11-22 12:12:10 +00:00
partNumber : number ,
2023-11-17 06:45:51 +00:00
start : number ,
end : number
2023-11-22 12:12:10 +00:00
) : Promise < InternalS3CompletedPart > {
2023-11-17 06:45:51 +00:00
core . debug (
` Uploading chunk of size ${
end - start + 1
} bytes at offset $ { start } with content range : $ { getContentRange (
start ,
end
) } `
)
2023-11-22 12:12:10 +00:00
// Manually convert the readable stream to a buffer. S3 doesn't allow stream as input
const chunks = await utils . streamToBuffer ( openStream ( ) )
2023-11-17 06:45:51 +00:00
2023-11-22 12:12:10 +00:00
try {
// HACK: Using axios here as S3 API doesn't allow readable stream as input and Github's HTTP client is not able to send buffer as body
const response = await axios . request ( {
method : 'PUT' ,
url : resourceUrl ,
headers : {
'Content-Type' : 'application/octet-stream'
} ,
data : chunks
} )
return {
ETag : response.headers.etag ? ? '' ,
PartNumber : partNumber
}
} catch ( error ) {
2023-11-17 06:45:51 +00:00
throw new Error (
2023-11-22 12:12:10 +00:00
` Cache service responded with ${
( error as AxiosError ) . status
} during upload chunk . `
2023-11-17 06:45:51 +00:00
)
}
}
2023-11-22 12:12:10 +00:00
async function uploadFileToS3 (
preSignedURLs : string [ ] ,
archivePath : string
) : Promise < InternalS3CompletedPart [ ] > {
2023-11-17 06:45:51 +00:00
// Upload Chunks
const fileSize = utils . getArchiveFileSizeInBytes ( archivePath )
2023-11-22 12:12:10 +00:00
const numberOfChunks = preSignedURLs . length
2023-11-17 06:45:51 +00:00
2023-11-22 12:12:10 +00:00
const fd = fs . openSync ( archivePath , 'r' )
2023-11-17 06:45:51 +00:00
core . debug ( 'Awaiting all uploads' )
let offset = 0
try {
2023-11-22 12:12:10 +00:00
const completedParts = await Promise . all (
preSignedURLs . map ( async ( presignedURL , index ) = > {
const chunkSize = Math . ceil ( fileSize / numberOfChunks )
const start = offset
const end = offset + chunkSize - 1
offset += chunkSize
return await uploadChunk (
presignedURL ,
( ) = >
fs
. createReadStream ( archivePath , {
fd ,
start ,
end ,
autoClose : false
} )
. on ( 'error' , error = > {
throw new Error (
` Cache upload failed because file read failed with ${ error . message } `
)
} ) ,
index + 1 ,
start ,
end
)
2023-11-17 06:45:51 +00:00
} )
)
2023-11-22 12:12:10 +00:00
return completedParts
2023-11-17 06:45:51 +00:00
} finally {
fs . closeSync ( fd )
}
}
async function commitCache (
httpClient : HttpClient ,
2023-11-22 12:12:10 +00:00
cacheKey : string ,
cacheVersion : string ,
uploadKey : string ,
uploadID : string ,
parts : InternalS3CompletedPart [ ]
) : Promise < TypedResponse < CommitCacheResponse > > {
const commitCacheRequest : CommitCacheRequest = {
cache_key : cacheKey ,
cache_version : cacheVersion ,
upload_key : uploadKey ,
upload_id : uploadID ,
parts : parts ,
os : process.env [ 'RUNNER_OS' ] ? ? 'Linux' ,
vcs_type : 'github'
}
2023-11-17 06:45:51 +00:00
return await retryTypedResponse ( 'commitCache' , async ( ) = >
2023-11-22 12:12:10 +00:00
httpClient . postJson < CommitCacheResponse > (
getCacheApiUrl ( ` cache/commit ` ) ,
2023-11-17 06:45:51 +00:00
commitCacheRequest
)
)
}
export async function saveCache (
2023-11-22 12:12:10 +00:00
cacheKey : string ,
cacheVersion : string ,
uploadId : string ,
uploadKey : string ,
numberOfChunks : number ,
preSignedURLs : string [ ] ,
archivePath : string
) : Promise < string > {
// Number of chunks should match the number of pre-signed URLs
if ( numberOfChunks !== preSignedURLs . length ) {
throw new Error (
` Number of chunks ( ${ numberOfChunks } ) should match the number of pre-signed URLs ( ${ preSignedURLs . length } ). `
)
}
2023-11-17 06:45:51 +00:00
const httpClient = createHttpClient ( )
core . debug ( 'Upload cache' )
2023-11-22 12:12:10 +00:00
const completedParts = await uploadFileToS3 ( preSignedURLs , archivePath )
// Sort parts in ascending order by partNumber
completedParts . sort ( ( a , b ) = > a . PartNumber - b . PartNumber )
2023-11-17 06:45:51 +00:00
// Commit Cache
2023-11-22 12:12:10 +00:00
core . debug ( 'Committing cache' )
2023-11-17 06:45:51 +00:00
const cacheSize = utils . getArchiveFileSizeInBytes ( archivePath )
core . info (
` Cache Size: ~ ${ Math . round ( cacheSize / ( 1024 * 1024 ) ) } MB ( ${ cacheSize } B) `
)
2023-11-22 12:12:10 +00:00
const commitCacheResponse = await commitCache (
httpClient ,
cacheKey ,
cacheVersion ,
uploadKey ,
uploadId ,
completedParts
)
2023-11-17 06:45:51 +00:00
if ( ! isSuccessStatusCode ( commitCacheResponse . statusCode ) ) {
throw new Error (
` Cache service responded with ${ commitCacheResponse . statusCode } during commit cache. `
)
}
core . info ( 'Cache saved successfully' )
2023-11-22 12:12:10 +00:00
return commitCacheResponse . result ? . cache_key ? ? ''
}
export async function deleteCache ( keys : string [ ] ) {
const httpClient = createHttpClient ( )
const resource = ` cache?keys= ${ encodeURIComponent ( keys . join ( ',' ) ) } `
const response = await httpClient . del ( getCacheApiUrl ( resource ) )
if ( ! isSuccessStatusCode ( response . message . statusCode ) ) {
throw new Error (
` Cache service responded with ${ response . message . statusCode } `
)
}
2023-11-17 06:45:51 +00:00
}