1
0
Fork 0

moves to streaming for gcs

pull/1935/head
Prajjwal 2024-04-15 16:18:16 +05:30
parent d0787ac29d
commit 63c6089651
4 changed files with 36 additions and 35 deletions

View File

@ -1,6 +1,6 @@
{ {
"name": "github-actions.warp-cache", "name": "github-actions.warp-cache",
"version": "1.0.6", "version": "1.1.0",
"preview": true, "preview": true,
"description": "Github action to use WarpBuild's in-house cache offering", "description": "Github action to use WarpBuild's in-house cache offering",
"keywords": [ "keywords": [

View File

@ -171,39 +171,39 @@ export async function restoreCache(
const archiveLocation = `gs://${cacheEntry.gcs?.bucket_name}/${cacheEntry.gcs?.cache_key}` const archiveLocation = `gs://${cacheEntry.gcs?.bucket_name}/${cacheEntry.gcs?.cache_key}`
await cacheHttpClient.downloadCache( // await cacheHttpClient.downloadCache(
cacheEntry.provider, // cacheEntry.provider,
archiveLocation, // archiveLocation,
archivePath, // archivePath,
cacheEntry.gcs?.short_lived_token?.access_token ?? '' // cacheEntry.gcs?.short_lived_token?.access_token ?? ''
) // )
if (core.isDebug()) { // if (core.isDebug()) {
await listTar(archivePath, compressionMethod) // await listTar(archivePath, compressionMethod)
} // }
const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath) // const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath)
core.info( // core.info(
`Cache Size: ~${Math.round( // `Cache Size: ~${Math.round(
archiveFileSize / (1024 * 1024) // archiveFileSize / (1024 * 1024)
)} MB (${archiveFileSize} B)` // )} MB (${archiveFileSize} B)`
) // )
await extractTar(archivePath, compressionMethod) // await extractTar(archivePath, compressionMethod)
// For GCS, we do a streaming download which means that we extract the archive while we are downloading it. // For GCS, we do a streaming download which means that we extract the archive while we are downloading it.
// const readStream = cacheHttpClient.downloadCacheStreaming( const readStream = cacheHttpClient.downloadCacheStreaming(
// 'gcs', 'gcs',
// archiveLocation, archiveLocation,
// cacheEntry?.gcs?.short_lived_token?.access_token ?? '' cacheEntry?.gcs?.short_lived_token?.access_token ?? ''
// ) )
// if (!readStream) { if (!readStream) {
// return undefined return undefined
// } }
// await extractStreamingTar(readStream, archivePath, compressionMethod) await extractStreamingTar(readStream, archivePath, compressionMethod)
core.info('Cache restored successfully') core.info('Cache restored successfully')
break break
} }

View File

@ -311,7 +311,8 @@ export async function downloadCacheMultipartGCP(
const transferManager = new TransferManager(storage.bucket(bucketName)) const transferManager = new TransferManager(storage.bucket(bucketName))
await transferManager.downloadFileInChunks(objectName, { await transferManager.downloadFileInChunks(objectName, {
destination: archivePath, destination: archivePath,
chunkSizeBytes: 8 * 1024 * 1024 // 8MB chunk size noReturnData: true,
chunkSizeBytes: 1024 * 1024 * 8
}) })
} catch (error) { } catch (error) {
core.debug(`Failed to download cache: ${error}`) core.debug(`Failed to download cache: ${error}`)

View File

@ -31,14 +31,14 @@ process.env['GITHUB_REF'] = 'refs/heads/main'
// true // true
// ) // )
// restoreCache( restoreCache(
// ['/Users/prajjwal/Repos/warpbuild/playground/test_fs'], ['/Users/prajjwal/Repos/warpbuild/playground/test_fs'],
// 'test-fs-local-key', 'test-fs-local-key',
// ['test-fs'], ['test-fs'],
// {}, {},
// true, true,
// false false
// ) )
// deleteCache( // deleteCache(
// ['/Users/prajjwal/Repos/warpbuild/playground/test_fs'], // ['/Users/prajjwal/Repos/warpbuild/playground/test_fs'],