1
0
Fork 0

Merge pull request #5 from WarpBuilds/feat-add-streaming-download-s3

Feat add streaming download s3
pull/1935/head
Prajjwal 2024-06-03 17:40:24 +05:30 committed by GitHub
commit 5ee82ffa13
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
3 changed files with 51 additions and 22 deletions

View File

@ -1,6 +1,6 @@
{
"name": "github-actions.warp-cache",
"version": "1.1.12",
"version": "1.1.15",
"preview": true,
"description": "Github action to use WarpBuild's in-house cache offering",
"keywords": [

View File

@ -123,7 +123,7 @@ export async function restoreCache(
)
core.debug(`Archive Path: ${archivePath}`)
let cacheKey: string = ''
let cacheKey = cacheEntry?.cache_entry?.cache_user_given_key ?? primaryKey
switch (cacheEntry.provider) {
case 's3': {
@ -131,18 +131,40 @@ export async function restoreCache(
return undefined
}
cacheKey = cacheEntry.s3.pre_signed_url
if (options?.lookupOnly) {
core.info('Lookup only - skipping download')
return cacheKey
}
try {
let readStream: NodeJS.ReadableStream | undefined
let downloadCommandPipe = getDownloadCommandPipeForWget(
cacheEntry?.s3?.pre_signed_url
)
await extractStreamingTar(
readStream,
archivePath,
compressionMethod,
downloadCommandPipe
)
} catch (error) {
core.debug(`Failed to download cache: ${error}`)
core.info(
`Streaming download failed. Likely a cloud provider issue. Retrying with multipart download`
)
// Wait 1 second
await new Promise(resolve => setTimeout(resolve, 1000))
try {
await cacheHttpClient.downloadCache(
cacheEntry.provider,
cacheEntry.s3?.pre_signed_url,
archivePath
)
} catch (error) {
core.info('Cache Miss. Failed to download cache.')
return undefined
}
if (core.isDebug()) {
await listTar(archivePath, compressionMethod)
@ -156,6 +178,8 @@ export async function restoreCache(
)
await extractTar(archivePath, compressionMethod)
}
core.info('Cache restored successfully')
break
}
@ -164,7 +188,6 @@ export async function restoreCache(
if (!cacheEntry.gcs?.cache_key) {
return undefined
}
cacheKey = cacheEntry.gcs?.cache_key
if (options?.lookupOnly) {
core.info('Lookup only - skipping download')
@ -255,7 +278,7 @@ export async function restoreCache(
}
}
return cacheEntry?.cache_entry?.cache_user_given_key ?? cacheKey
return cacheKey
} catch (error) {
const typedError = error as Error
if (typedError.name === ValidationError.name) {
@ -343,7 +366,10 @@ export async function saveCache(
// Calculate number of chunks required. This is only required if backend is S3 as Google Cloud SDK will do it for us
const uploadOptions = getUploadOptions()
const maxChunkSize = uploadOptions?.uploadChunkSize ?? 32 * 1024 * 1024 // Default 32MB
const numberOfChunks = Math.floor(archiveFileSize / maxChunkSize)
const numberOfChunks = Math.max(
Math.floor(archiveFileSize / maxChunkSize),
1
)
const reserveCacheResponse = await cacheHttpClient.reserveCache(
key,
numberOfChunks,

View File

@ -379,6 +379,9 @@ export async function saveCache(
!S3UploadId ||
!S3UploadKey
) {
core.debug(
`S3 params are not set. Number of Chunks: ${S3NumberOfChunks}, PreSigned URLs: ${S3PreSignedURLs}, Upload ID: ${S3UploadId}, Upload Key: ${S3UploadKey}`
)
throw new Error(
'Unable to upload cache to S3. One of the following required parameters is missing: numberOfChunks, preSignedURLs, uploadId, uploadKey.'
)