mirror of https://github.com/actions/toolkit
Merge pull request #5 from WarpBuilds/feat-add-streaming-download-s3
Feat add streaming download s3pull/1935/head
commit
5ee82ffa13
|
@ -1,6 +1,6 @@
|
||||||
{
|
{
|
||||||
"name": "github-actions.warp-cache",
|
"name": "github-actions.warp-cache",
|
||||||
"version": "1.1.12",
|
"version": "1.1.15",
|
||||||
"preview": true,
|
"preview": true,
|
||||||
"description": "Github action to use WarpBuild's in-house cache offering",
|
"description": "Github action to use WarpBuild's in-house cache offering",
|
||||||
"keywords": [
|
"keywords": [
|
||||||
|
|
|
@ -123,7 +123,7 @@ export async function restoreCache(
|
||||||
)
|
)
|
||||||
core.debug(`Archive Path: ${archivePath}`)
|
core.debug(`Archive Path: ${archivePath}`)
|
||||||
|
|
||||||
let cacheKey: string = ''
|
let cacheKey = cacheEntry?.cache_entry?.cache_user_given_key ?? primaryKey
|
||||||
|
|
||||||
switch (cacheEntry.provider) {
|
switch (cacheEntry.provider) {
|
||||||
case 's3': {
|
case 's3': {
|
||||||
|
@ -131,31 +131,55 @@ export async function restoreCache(
|
||||||
return undefined
|
return undefined
|
||||||
}
|
}
|
||||||
|
|
||||||
cacheKey = cacheEntry.s3.pre_signed_url
|
|
||||||
|
|
||||||
if (options?.lookupOnly) {
|
if (options?.lookupOnly) {
|
||||||
core.info('Lookup only - skipping download')
|
core.info('Lookup only - skipping download')
|
||||||
return cacheKey
|
return cacheKey
|
||||||
}
|
}
|
||||||
|
|
||||||
await cacheHttpClient.downloadCache(
|
try {
|
||||||
cacheEntry.provider,
|
let readStream: NodeJS.ReadableStream | undefined
|
||||||
cacheEntry.s3?.pre_signed_url,
|
let downloadCommandPipe = getDownloadCommandPipeForWget(
|
||||||
archivePath
|
cacheEntry?.s3?.pre_signed_url
|
||||||
)
|
)
|
||||||
|
await extractStreamingTar(
|
||||||
|
readStream,
|
||||||
|
archivePath,
|
||||||
|
compressionMethod,
|
||||||
|
downloadCommandPipe
|
||||||
|
)
|
||||||
|
} catch (error) {
|
||||||
|
core.debug(`Failed to download cache: ${error}`)
|
||||||
|
core.info(
|
||||||
|
`Streaming download failed. Likely a cloud provider issue. Retrying with multipart download`
|
||||||
|
)
|
||||||
|
// Wait 1 second
|
||||||
|
await new Promise(resolve => setTimeout(resolve, 1000))
|
||||||
|
|
||||||
if (core.isDebug()) {
|
try {
|
||||||
await listTar(archivePath, compressionMethod)
|
await cacheHttpClient.downloadCache(
|
||||||
|
cacheEntry.provider,
|
||||||
|
cacheEntry.s3?.pre_signed_url,
|
||||||
|
archivePath
|
||||||
|
)
|
||||||
|
} catch (error) {
|
||||||
|
core.info('Cache Miss. Failed to download cache.')
|
||||||
|
return undefined
|
||||||
|
}
|
||||||
|
|
||||||
|
if (core.isDebug()) {
|
||||||
|
await listTar(archivePath, compressionMethod)
|
||||||
|
}
|
||||||
|
|
||||||
|
const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath)
|
||||||
|
core.info(
|
||||||
|
`Cache Size: ~${Math.round(
|
||||||
|
archiveFileSize / (1024 * 1024)
|
||||||
|
)} MB (${archiveFileSize} B)`
|
||||||
|
)
|
||||||
|
|
||||||
|
await extractTar(archivePath, compressionMethod)
|
||||||
}
|
}
|
||||||
|
|
||||||
const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath)
|
|
||||||
core.info(
|
|
||||||
`Cache Size: ~${Math.round(
|
|
||||||
archiveFileSize / (1024 * 1024)
|
|
||||||
)} MB (${archiveFileSize} B)`
|
|
||||||
)
|
|
||||||
|
|
||||||
await extractTar(archivePath, compressionMethod)
|
|
||||||
core.info('Cache restored successfully')
|
core.info('Cache restored successfully')
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
|
@ -164,7 +188,6 @@ export async function restoreCache(
|
||||||
if (!cacheEntry.gcs?.cache_key) {
|
if (!cacheEntry.gcs?.cache_key) {
|
||||||
return undefined
|
return undefined
|
||||||
}
|
}
|
||||||
cacheKey = cacheEntry.gcs?.cache_key
|
|
||||||
|
|
||||||
if (options?.lookupOnly) {
|
if (options?.lookupOnly) {
|
||||||
core.info('Lookup only - skipping download')
|
core.info('Lookup only - skipping download')
|
||||||
|
@ -255,7 +278,7 @@ export async function restoreCache(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return cacheEntry?.cache_entry?.cache_user_given_key ?? cacheKey
|
return cacheKey
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
const typedError = error as Error
|
const typedError = error as Error
|
||||||
if (typedError.name === ValidationError.name) {
|
if (typedError.name === ValidationError.name) {
|
||||||
|
@ -343,7 +366,10 @@ export async function saveCache(
|
||||||
// Calculate number of chunks required. This is only required if backend is S3 as Google Cloud SDK will do it for us
|
// Calculate number of chunks required. This is only required if backend is S3 as Google Cloud SDK will do it for us
|
||||||
const uploadOptions = getUploadOptions()
|
const uploadOptions = getUploadOptions()
|
||||||
const maxChunkSize = uploadOptions?.uploadChunkSize ?? 32 * 1024 * 1024 // Default 32MB
|
const maxChunkSize = uploadOptions?.uploadChunkSize ?? 32 * 1024 * 1024 // Default 32MB
|
||||||
const numberOfChunks = Math.floor(archiveFileSize / maxChunkSize)
|
const numberOfChunks = Math.max(
|
||||||
|
Math.floor(archiveFileSize / maxChunkSize),
|
||||||
|
1
|
||||||
|
)
|
||||||
const reserveCacheResponse = await cacheHttpClient.reserveCache(
|
const reserveCacheResponse = await cacheHttpClient.reserveCache(
|
||||||
key,
|
key,
|
||||||
numberOfChunks,
|
numberOfChunks,
|
||||||
|
|
|
@ -379,6 +379,9 @@ export async function saveCache(
|
||||||
!S3UploadId ||
|
!S3UploadId ||
|
||||||
!S3UploadKey
|
!S3UploadKey
|
||||||
) {
|
) {
|
||||||
|
core.debug(
|
||||||
|
`S3 params are not set. Number of Chunks: ${S3NumberOfChunks}, PreSigned URLs: ${S3PreSignedURLs}, Upload ID: ${S3UploadId}, Upload Key: ${S3UploadKey}`
|
||||||
|
)
|
||||||
throw new Error(
|
throw new Error(
|
||||||
'Unable to upload cache to S3. One of the following required parameters is missing: numberOfChunks, preSignedURLs, uploadId, uploadKey.'
|
'Unable to upload cache to S3. One of the following required parameters is missing: numberOfChunks, preSignedURLs, uploadId, uploadKey.'
|
||||||
)
|
)
|
||||||
|
|
Loading…
Reference in New Issue