1
0
Fork 0

Merge pull request #1882 from actions/enhance-blob-client

Enhance blob client resilience & performance
pull/1469/merge
Bassem Dghaidi 2024-12-02 20:48:46 +01:00 committed by GitHub
commit a10e209c8d
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
11 changed files with 518 additions and 260 deletions

View File

@ -11,8 +11,6 @@ const downloadConcurrency = 8
const timeoutInMs = 30000 const timeoutInMs = 30000
const segmentTimeoutInMs = 600000 const segmentTimeoutInMs = 600000
const lookupOnly = false const lookupOnly = false
const uploadConcurrency = 4
const uploadChunkSize = 32 * 1024 * 1024
test('getDownloadOptions sets defaults', async () => { test('getDownloadOptions sets defaults', async () => {
const actualOptions = getDownloadOptions() const actualOptions = getDownloadOptions()
@ -43,18 +41,21 @@ test('getDownloadOptions overrides all settings', async () => {
}) })
test('getUploadOptions sets defaults', async () => { test('getUploadOptions sets defaults', async () => {
const expectedOptions: UploadOptions = {
uploadConcurrency: 4,
uploadChunkSize: 32 * 1024 * 1024,
useAzureSdk: false
}
const actualOptions = getUploadOptions() const actualOptions = getUploadOptions()
expect(actualOptions).toEqual({ expect(actualOptions).toEqual(expectedOptions)
uploadConcurrency,
uploadChunkSize
})
}) })
test('getUploadOptions overrides all settings', async () => { test('getUploadOptions overrides all settings', async () => {
const expectedOptions: UploadOptions = { const expectedOptions: UploadOptions = {
uploadConcurrency: 2, uploadConcurrency: 2,
uploadChunkSize: 16 * 1024 * 1024 uploadChunkSize: 16 * 1024 * 1024,
useAzureSdk: true
} }
const actualOptions = getUploadOptions(expectedOptions) const actualOptions = getUploadOptions(expectedOptions)
@ -62,6 +63,34 @@ test('getUploadOptions overrides all settings', async () => {
expect(actualOptions).toEqual(expectedOptions) expect(actualOptions).toEqual(expectedOptions)
}) })
test('env variables override all getUploadOptions settings', async () => {
const expectedOptions: UploadOptions = {
uploadConcurrency: 16,
uploadChunkSize: 64 * 1024 * 1024,
useAzureSdk: true
}
process.env.CACHE_UPLOAD_CONCURRENCY = '16'
process.env.CACHE_UPLOAD_CHUNK_SIZE = '64'
const actualOptions = getUploadOptions(expectedOptions)
expect(actualOptions).toEqual(expectedOptions)
})
test('env variables override all getUploadOptions settings but do not exceed caps', async () => {
const expectedOptions: UploadOptions = {
uploadConcurrency: 32,
uploadChunkSize: 128 * 1024 * 1024,
useAzureSdk: true
}
process.env.CACHE_UPLOAD_CONCURRENCY = '64'
process.env.CACHE_UPLOAD_CHUNK_SIZE = '256'
const actualOptions = getUploadOptions(expectedOptions)
expect(actualOptions).toEqual(expectedOptions)
})
test('getDownloadOptions overrides download timeout minutes', async () => { test('getDownloadOptions overrides download timeout minutes', async () => {
const expectedOptions: DownloadOptions = { const expectedOptions: DownloadOptions = {
useAzureSdk: false, useAzureSdk: false,

View File

@ -3,11 +3,11 @@ import * as path from 'path'
import * as tar from '../src/internal/tar' import * as tar from '../src/internal/tar'
import * as config from '../src/internal/config' import * as config from '../src/internal/config'
import * as cacheUtils from '../src/internal/cacheUtils' import * as cacheUtils from '../src/internal/cacheUtils'
import * as downloadCacheModule from '../src/internal/blob/download-cache' import * as cacheHttpClient from '../src/internal/cacheHttpClient'
import {restoreCache} from '../src/cache' import {restoreCache} from '../src/cache'
import {CacheFilename, CompressionMethod} from '../src/internal/constants' import {CacheFilename, CompressionMethod} from '../src/internal/constants'
import {CacheServiceClientJSON} from '../src/generated/results/api/v1/cache.twirp' import {CacheServiceClientJSON} from '../src/generated/results/api/v1/cache.twirp'
import {BlobDownloadResponseParsed} from '@azure/storage-blob' import {DownloadOptions} from '../src/options'
jest.mock('../src/internal/cacheHttpClient') jest.mock('../src/internal/cacheHttpClient')
jest.mock('../src/internal/cacheUtils') jest.mock('../src/internal/cacheUtils')
@ -142,6 +142,7 @@ test('restore with gzip compressed cache found', async () => {
const signedDownloadUrl = 'https://blob-storage.local?signed=true' const signedDownloadUrl = 'https://blob-storage.local?signed=true'
const cacheVersion = const cacheVersion =
'd90f107aaeb22920dba0c637a23c37b5bc497b4dfa3b07fe3f79bf88a273c11b' 'd90f107aaeb22920dba0c637a23c37b5bc497b4dfa3b07fe3f79bf88a273c11b'
const options = {useAzureSdk: true} as DownloadOptions
const getCacheVersionMock = jest.spyOn(cacheUtils, 'getCacheVersion') const getCacheVersionMock = jest.spyOn(cacheUtils, 'getCacheVersion')
getCacheVersionMock.mockReturnValue(cacheVersion) getCacheVersionMock.mockReturnValue(cacheVersion)
@ -169,17 +170,7 @@ test('restore with gzip compressed cache found', async () => {
}) })
const archivePath = path.join(tempPath, CacheFilename.Gzip) const archivePath = path.join(tempPath, CacheFilename.Gzip)
const downloadCacheFileMock = jest.spyOn( const downloadCacheMock = jest.spyOn(cacheHttpClient, 'downloadCache')
downloadCacheModule,
'downloadCacheFile'
)
downloadCacheFileMock.mockReturnValue(
Promise.resolve({
_response: {
status: 200
}
} as BlobDownloadResponseParsed)
)
const fileSize = 142 const fileSize = 142
const getArchiveFileSizeInBytesMock = jest const getArchiveFileSizeInBytesMock = jest
@ -189,7 +180,7 @@ test('restore with gzip compressed cache found', async () => {
const extractTarMock = jest.spyOn(tar, 'extractTar') const extractTarMock = jest.spyOn(tar, 'extractTar')
const unlinkFileMock = jest.spyOn(cacheUtils, 'unlinkFile') const unlinkFileMock = jest.spyOn(cacheUtils, 'unlinkFile')
const cacheKey = await restoreCache(paths, key) const cacheKey = await restoreCache(paths, key, [], options)
expect(cacheKey).toBe(key) expect(cacheKey).toBe(key)
expect(getCacheVersionMock).toHaveBeenCalledWith( expect(getCacheVersionMock).toHaveBeenCalledWith(
@ -203,9 +194,10 @@ test('restore with gzip compressed cache found', async () => {
version: cacheVersion version: cacheVersion
}) })
expect(createTempDirectoryMock).toHaveBeenCalledTimes(1) expect(createTempDirectoryMock).toHaveBeenCalledTimes(1)
expect(downloadCacheFileMock).toHaveBeenCalledWith( expect(downloadCacheMock).toHaveBeenCalledWith(
signedDownloadUrl, signedDownloadUrl,
archivePath archivePath,
options
) )
expect(getArchiveFileSizeInBytesMock).toHaveBeenCalledWith(archivePath) expect(getArchiveFileSizeInBytesMock).toHaveBeenCalledWith(archivePath)
expect(logInfoMock).toHaveBeenCalledWith(`Cache Size: ~0 MB (142 B)`) expect(logInfoMock).toHaveBeenCalledWith(`Cache Size: ~0 MB (142 B)`)
@ -226,6 +218,7 @@ test('restore with zstd compressed cache found', async () => {
const signedDownloadUrl = 'https://blob-storage.local?signed=true' const signedDownloadUrl = 'https://blob-storage.local?signed=true'
const cacheVersion = const cacheVersion =
'8e2e96a184cb0cd6b48285b176c06a418f3d7fce14c29d9886fd1bb4f05c513d' '8e2e96a184cb0cd6b48285b176c06a418f3d7fce14c29d9886fd1bb4f05c513d'
const options = {useAzureSdk: true} as DownloadOptions
const getCacheVersionMock = jest.spyOn(cacheUtils, 'getCacheVersion') const getCacheVersionMock = jest.spyOn(cacheUtils, 'getCacheVersion')
getCacheVersionMock.mockReturnValue(cacheVersion) getCacheVersionMock.mockReturnValue(cacheVersion)
@ -253,17 +246,7 @@ test('restore with zstd compressed cache found', async () => {
}) })
const archivePath = path.join(tempPath, CacheFilename.Zstd) const archivePath = path.join(tempPath, CacheFilename.Zstd)
const downloadCacheFileMock = jest.spyOn( const downloadCacheMock = jest.spyOn(cacheHttpClient, 'downloadCache')
downloadCacheModule,
'downloadCacheFile'
)
downloadCacheFileMock.mockReturnValue(
Promise.resolve({
_response: {
status: 200
}
} as BlobDownloadResponseParsed)
)
const fileSize = 62915000 const fileSize = 62915000
const getArchiveFileSizeInBytesMock = jest const getArchiveFileSizeInBytesMock = jest
@ -273,7 +256,7 @@ test('restore with zstd compressed cache found', async () => {
const extractTarMock = jest.spyOn(tar, 'extractTar') const extractTarMock = jest.spyOn(tar, 'extractTar')
const unlinkFileMock = jest.spyOn(cacheUtils, 'unlinkFile') const unlinkFileMock = jest.spyOn(cacheUtils, 'unlinkFile')
const cacheKey = await restoreCache(paths, key) const cacheKey = await restoreCache(paths, key, [], options)
expect(cacheKey).toBe(key) expect(cacheKey).toBe(key)
expect(getCacheVersionMock).toHaveBeenCalledWith( expect(getCacheVersionMock).toHaveBeenCalledWith(
@ -287,9 +270,10 @@ test('restore with zstd compressed cache found', async () => {
version: cacheVersion version: cacheVersion
}) })
expect(createTempDirectoryMock).toHaveBeenCalledTimes(1) expect(createTempDirectoryMock).toHaveBeenCalledTimes(1)
expect(downloadCacheFileMock).toHaveBeenCalledWith( expect(downloadCacheMock).toHaveBeenCalledWith(
signedDownloadUrl, signedDownloadUrl,
archivePath archivePath,
options
) )
expect(getArchiveFileSizeInBytesMock).toHaveBeenCalledWith(archivePath) expect(getArchiveFileSizeInBytesMock).toHaveBeenCalledWith(archivePath)
expect(logInfoMock).toHaveBeenCalledWith(`Cache Size: ~60 MB (62915000 B)`) expect(logInfoMock).toHaveBeenCalledWith(`Cache Size: ~60 MB (62915000 B)`)
@ -311,6 +295,7 @@ test('restore with cache found for restore key', async () => {
const signedDownloadUrl = 'https://blob-storage.local?signed=true' const signedDownloadUrl = 'https://blob-storage.local?signed=true'
const cacheVersion = const cacheVersion =
'b8b58e9bd7b1e8f83d9f05c7e06ea865ba44a0330e07a14db74ac74386677bed' 'b8b58e9bd7b1e8f83d9f05c7e06ea865ba44a0330e07a14db74ac74386677bed'
const options = {useAzureSdk: true} as DownloadOptions
const getCacheVersionMock = jest.spyOn(cacheUtils, 'getCacheVersion') const getCacheVersionMock = jest.spyOn(cacheUtils, 'getCacheVersion')
getCacheVersionMock.mockReturnValue(cacheVersion) getCacheVersionMock.mockReturnValue(cacheVersion)
@ -338,17 +323,7 @@ test('restore with cache found for restore key', async () => {
}) })
const archivePath = path.join(tempPath, CacheFilename.Gzip) const archivePath = path.join(tempPath, CacheFilename.Gzip)
const downloadCacheFileMock = jest.spyOn( const downloadCacheMock = jest.spyOn(cacheHttpClient, 'downloadCache')
downloadCacheModule,
'downloadCacheFile'
)
downloadCacheFileMock.mockReturnValue(
Promise.resolve({
_response: {
status: 200
}
} as BlobDownloadResponseParsed)
)
const fileSize = 142 const fileSize = 142
const getArchiveFileSizeInBytesMock = jest const getArchiveFileSizeInBytesMock = jest
@ -358,7 +333,7 @@ test('restore with cache found for restore key', async () => {
const extractTarMock = jest.spyOn(tar, 'extractTar') const extractTarMock = jest.spyOn(tar, 'extractTar')
const unlinkFileMock = jest.spyOn(cacheUtils, 'unlinkFile') const unlinkFileMock = jest.spyOn(cacheUtils, 'unlinkFile')
const cacheKey = await restoreCache(paths, key, restoreKeys) const cacheKey = await restoreCache(paths, key, restoreKeys, options)
expect(cacheKey).toBe(restoreKeys[0]) expect(cacheKey).toBe(restoreKeys[0])
expect(getCacheVersionMock).toHaveBeenCalledWith( expect(getCacheVersionMock).toHaveBeenCalledWith(
@ -372,9 +347,10 @@ test('restore with cache found for restore key', async () => {
version: cacheVersion version: cacheVersion
}) })
expect(createTempDirectoryMock).toHaveBeenCalledTimes(1) expect(createTempDirectoryMock).toHaveBeenCalledTimes(1)
expect(downloadCacheFileMock).toHaveBeenCalledWith( expect(downloadCacheMock).toHaveBeenCalledWith(
signedDownloadUrl, signedDownloadUrl,
archivePath archivePath,
options
) )
expect(getArchiveFileSizeInBytesMock).toHaveBeenCalledWith(archivePath) expect(getArchiveFileSizeInBytesMock).toHaveBeenCalledWith(archivePath)
expect(logInfoMock).toHaveBeenCalledWith(`Cache Size: ~0 MB (142 B)`) expect(logInfoMock).toHaveBeenCalledWith(`Cache Size: ~0 MB (142 B)`)
@ -388,14 +364,14 @@ test('restore with cache found for restore key', async () => {
expect(compressionMethodMock).toHaveBeenCalledTimes(1) expect(compressionMethodMock).toHaveBeenCalledTimes(1)
}) })
test('restore with dry run', async () => { test('restore with lookup only enabled', async () => {
const paths = ['node_modules'] const paths = ['node_modules']
const key = 'node-test' const key = 'node-test'
const options = {lookupOnly: true}
const compressionMethod = CompressionMethod.Gzip const compressionMethod = CompressionMethod.Gzip
const signedDownloadUrl = 'https://blob-storage.local?signed=true' const signedDownloadUrl = 'https://blob-storage.local?signed=true'
const cacheVersion = const cacheVersion =
'd90f107aaeb22920dba0c637a23c37b5bc497b4dfa3b07fe3f79bf88a273c11b' 'd90f107aaeb22920dba0c637a23c37b5bc497b4dfa3b07fe3f79bf88a273c11b'
const options = {lookupOnly: true, useAzureSdk: true} as DownloadOptions
const getCacheVersionMock = jest.spyOn(cacheUtils, 'getCacheVersion') const getCacheVersionMock = jest.spyOn(cacheUtils, 'getCacheVersion')
getCacheVersionMock.mockReturnValue(cacheVersion) getCacheVersionMock.mockReturnValue(cacheVersion)
@ -416,10 +392,7 @@ test('restore with dry run', async () => {
) )
const createTempDirectoryMock = jest.spyOn(cacheUtils, 'createTempDirectory') const createTempDirectoryMock = jest.spyOn(cacheUtils, 'createTempDirectory')
const downloadCacheFileMock = jest.spyOn( const downloadCacheMock = jest.spyOn(cacheHttpClient, 'downloadCache')
downloadCacheModule,
'downloadCacheFile'
)
const cacheKey = await restoreCache(paths, key, undefined, options) const cacheKey = await restoreCache(paths, key, undefined, options)
@ -438,5 +411,5 @@ test('restore with dry run', async () => {
// creating a tempDir and downloading the cache are skipped // creating a tempDir and downloading the cache are skipped
expect(createTempDirectoryMock).toHaveBeenCalledTimes(0) expect(createTempDirectoryMock).toHaveBeenCalledTimes(0)
expect(downloadCacheFileMock).toHaveBeenCalledTimes(0) expect(downloadCacheMock).toHaveBeenCalledTimes(0)
}) })

View File

@ -270,7 +270,12 @@ test('save with server error should fail', async () => {
compression compression
) )
expect(saveCacheMock).toHaveBeenCalledTimes(1) expect(saveCacheMock).toHaveBeenCalledTimes(1)
expect(saveCacheMock).toHaveBeenCalledWith(cacheId, archiveFile, undefined) expect(saveCacheMock).toHaveBeenCalledWith(
cacheId,
archiveFile,
'',
undefined
)
expect(getCompressionMock).toHaveBeenCalledTimes(1) expect(getCompressionMock).toHaveBeenCalledTimes(1)
}) })
@ -315,7 +320,12 @@ test('save with valid inputs uploads a cache', async () => {
compression compression
) )
expect(saveCacheMock).toHaveBeenCalledTimes(1) expect(saveCacheMock).toHaveBeenCalledTimes(1)
expect(saveCacheMock).toHaveBeenCalledWith(cacheId, archiveFile, undefined) expect(saveCacheMock).toHaveBeenCalledWith(
cacheId,
archiveFile,
'',
undefined
)
expect(getCompressionMock).toHaveBeenCalledTimes(1) expect(getCompressionMock).toHaveBeenCalledTimes(1)
}) })

View File

@ -6,15 +6,14 @@ import {CacheFilename, CompressionMethod} from '../src/internal/constants'
import * as config from '../src/internal/config' import * as config from '../src/internal/config'
import * as tar from '../src/internal/tar' import * as tar from '../src/internal/tar'
import {CacheServiceClientJSON} from '../src/generated/results/api/v1/cache.twirp' import {CacheServiceClientJSON} from '../src/generated/results/api/v1/cache.twirp'
import * as uploadCacheModule from '../src/internal/blob/upload-cache' import * as cacheHttpClient from '../src/internal/cacheHttpClient'
import {BlobUploadCommonResponse} from '@azure/storage-blob' import {UploadOptions} from '../src/options'
import {InvalidResponseError} from '../src/internal/shared/errors'
let logDebugMock: jest.SpyInstance let logDebugMock: jest.SpyInstance
jest.mock('../src/internal/tar') jest.mock('../src/internal/tar')
let uploadFileMock = jest.fn() const uploadFileMock = jest.fn()
const blockBlobClientMock = jest.fn().mockImplementation(() => ({ const blockBlobClientMock = jest.fn().mockImplementation(() => ({
uploadFile: uploadFileMock uploadFile: uploadFileMock
})) }))
@ -116,15 +115,7 @@ test('create cache entry failure', async () => {
.spyOn(cacheUtils, 'getArchiveFileSizeInBytes') .spyOn(cacheUtils, 'getArchiveFileSizeInBytes')
.mockReturnValueOnce(archiveFileSize) .mockReturnValueOnce(archiveFileSize)
const cacheVersion = cacheUtils.getCacheVersion(paths, compression) const cacheVersion = cacheUtils.getCacheVersion(paths, compression)
const uploadCacheFileMock = jest const saveCacheMock = jest.spyOn(cacheHttpClient, 'saveCache')
.spyOn(uploadCacheModule, 'uploadCacheFile')
.mockReturnValueOnce(
Promise.resolve({
_response: {
status: 200
}
} as BlobUploadCommonResponse)
)
const cacheId = await saveCache(paths, key) const cacheId = await saveCache(paths, key)
expect(cacheId).toBe(-1) expect(cacheId).toBe(-1)
@ -139,15 +130,21 @@ test('create cache entry failure', async () => {
expect(createTarMock).toHaveBeenCalledTimes(1) expect(createTarMock).toHaveBeenCalledTimes(1)
expect(getCompressionMock).toHaveBeenCalledTimes(1) expect(getCompressionMock).toHaveBeenCalledTimes(1)
expect(finalizeCacheEntryMock).toHaveBeenCalledTimes(0) expect(finalizeCacheEntryMock).toHaveBeenCalledTimes(0)
expect(uploadCacheFileMock).toHaveBeenCalledTimes(0) expect(saveCacheMock).toHaveBeenCalledTimes(0)
}) })
test('finalize save cache failure', async () => { test('save cache fails if a signedUploadURL was not passed', async () => {
const paths = 'node_modules' const paths = 'node_modules'
const key = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43' const key = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43'
const cachePaths = [path.resolve(paths)] const cachePaths = [path.resolve(paths)]
const logWarningMock = jest.spyOn(core, 'warning') const signedUploadURL = ''
const signedUploadURL = 'https://blob-storage.local?signed=true' const archiveFileSize = 1024
const options: UploadOptions = {
archiveSizeBytes: archiveFileSize, // These should always match
useAzureSdk: true,
uploadChunkSize: 64 * 1024 * 1024,
uploadConcurrency: 8
}
const createCacheEntryMock = jest const createCacheEntryMock = jest
.spyOn(CacheServiceClientJSON.prototype, 'CreateCacheEntry') .spyOn(CacheServiceClientJSON.prototype, 'CreateCacheEntry')
@ -156,15 +153,7 @@ test('finalize save cache failure', async () => {
) )
const createTarMock = jest.spyOn(tar, 'createTar') const createTarMock = jest.spyOn(tar, 'createTar')
const saveCacheMock = jest.spyOn(cacheHttpClient, 'saveCache')
const uploadCacheMock = jest.spyOn(uploadCacheModule, 'uploadCacheFile')
uploadCacheMock.mockReturnValueOnce(
Promise.resolve({
_response: {
status: 200
}
} as BlobUploadCommonResponse)
)
const compression = CompressionMethod.Zstd const compression = CompressionMethod.Zstd
const getCompressionMock = jest const getCompressionMock = jest
@ -172,7 +161,66 @@ test('finalize save cache failure', async () => {
.mockReturnValueOnce(Promise.resolve(compression)) .mockReturnValueOnce(Promise.resolve(compression))
const cacheVersion = cacheUtils.getCacheVersion([paths], compression) const cacheVersion = cacheUtils.getCacheVersion([paths], compression)
jest
.spyOn(cacheUtils, 'getArchiveFileSizeInBytes')
.mockReturnValueOnce(archiveFileSize)
const cacheId = await saveCache([paths], key, options)
expect(cacheId).toBe(-1)
expect(createCacheEntryMock).toHaveBeenCalledWith({
key,
version: cacheVersion
})
const archiveFolder = '/foo/bar'
const archiveFile = path.join(archiveFolder, CacheFilename.Zstd)
expect(createTarMock).toHaveBeenCalledWith(
archiveFolder,
cachePaths,
compression
)
expect(saveCacheMock).toHaveBeenCalledWith(
-1,
archiveFile,
signedUploadURL,
options
)
expect(getCompressionMock).toHaveBeenCalledTimes(1)
})
test('finalize save cache failure', async () => {
const paths = 'node_modules'
const key = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43'
const cachePaths = [path.resolve(paths)]
const logWarningMock = jest.spyOn(core, 'warning')
const signedUploadURL = 'https://blob-storage.local?signed=true'
const archiveFileSize = 1024 const archiveFileSize = 1024
const options: UploadOptions = {
archiveSizeBytes: archiveFileSize, // These should always match
useAzureSdk: true,
uploadChunkSize: 64 * 1024 * 1024,
uploadConcurrency: 8
}
const createCacheEntryMock = jest
.spyOn(CacheServiceClientJSON.prototype, 'CreateCacheEntry')
.mockReturnValue(
Promise.resolve({ok: true, signedUploadUrl: signedUploadURL})
)
const createTarMock = jest.spyOn(tar, 'createTar')
const saveCacheMock = jest
.spyOn(cacheHttpClient, 'saveCache')
.mockResolvedValue(Promise.resolve())
const compression = CompressionMethod.Zstd
const getCompressionMock = jest
.spyOn(cacheUtils, 'getCompressionMethod')
.mockReturnValueOnce(Promise.resolve(compression))
const cacheVersion = cacheUtils.getCacheVersion([paths], compression)
jest jest
.spyOn(cacheUtils, 'getArchiveFileSizeInBytes') .spyOn(cacheUtils, 'getArchiveFileSizeInBytes')
.mockReturnValueOnce(archiveFileSize) .mockReturnValueOnce(archiveFileSize)
@ -181,7 +229,7 @@ test('finalize save cache failure', async () => {
.spyOn(CacheServiceClientJSON.prototype, 'FinalizeCacheEntryUpload') .spyOn(CacheServiceClientJSON.prototype, 'FinalizeCacheEntryUpload')
.mockReturnValue(Promise.resolve({ok: false, entryId: ''})) .mockReturnValue(Promise.resolve({ok: false, entryId: ''}))
const cacheId = await saveCache([paths], key) const cacheId = await saveCache([paths], key, options)
expect(createCacheEntryMock).toHaveBeenCalledWith({ expect(createCacheEntryMock).toHaveBeenCalledWith({
key, key,
@ -196,7 +244,12 @@ test('finalize save cache failure', async () => {
compression compression
) )
expect(uploadCacheMock).toHaveBeenCalledWith(signedUploadURL, archiveFile) expect(saveCacheMock).toHaveBeenCalledWith(
-1,
archiveFile,
signedUploadURL,
options
)
expect(getCompressionMock).toHaveBeenCalledTimes(1) expect(getCompressionMock).toHaveBeenCalledTimes(1)
expect(finalizeCacheEntryMock).toHaveBeenCalledWith({ expect(finalizeCacheEntryMock).toHaveBeenCalledWith({
@ -211,66 +264,20 @@ test('finalize save cache failure', async () => {
) )
}) })
test('save with uploadCache Server error will fail', async () => {
const paths = 'node_modules'
const key = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43'
const signedUploadURL = 'https://blob-storage.local?signed=true'
jest
.spyOn(CacheServiceClientJSON.prototype, 'CreateCacheEntry')
.mockReturnValue(
Promise.resolve({ok: true, signedUploadUrl: signedUploadURL})
)
const archiveFileSize = 1024
jest
.spyOn(cacheUtils, 'getArchiveFileSizeInBytes')
.mockReturnValueOnce(archiveFileSize)
jest
.spyOn(uploadCacheModule, 'uploadCacheFile')
.mockRejectedValueOnce(new InvalidResponseError('boom'))
const cacheId = await saveCache([paths], key)
expect(cacheId).toBe(-1)
})
test('uploadFile returns 500', async () => {
const paths = 'node_modules'
const key = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43'
const signedUploadURL = 'https://blob-storage.local?signed=true'
const logWarningMock = jest.spyOn(core, 'warning')
jest
.spyOn(CacheServiceClientJSON.prototype, 'CreateCacheEntry')
.mockReturnValue(
Promise.resolve({ok: true, signedUploadUrl: signedUploadURL})
)
const archiveFileSize = 1024
jest
.spyOn(cacheUtils, 'getArchiveFileSizeInBytes')
.mockReturnValueOnce(archiveFileSize)
jest.spyOn(uploadCacheModule, 'uploadCacheFile').mockRestore()
uploadFileMock = jest.fn().mockResolvedValueOnce({
_response: {
status: 500
}
})
const cacheId = await saveCache([paths], key)
expect(logWarningMock).toHaveBeenCalledWith(
'Failed to save: Upload failed with status code 500'
)
expect(cacheId).toBe(-1)
})
test('save with valid inputs uploads a cache', async () => { test('save with valid inputs uploads a cache', async () => {
const paths = 'node_modules' const paths = 'node_modules'
const key = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43' const key = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43'
const cachePaths = [path.resolve(paths)] const cachePaths = [path.resolve(paths)]
const signedUploadURL = 'https://blob-storage.local?signed=true' const signedUploadURL = 'https://blob-storage.local?signed=true'
const createTarMock = jest.spyOn(tar, 'createTar') const createTarMock = jest.spyOn(tar, 'createTar')
const archiveFileSize = 1024 const archiveFileSize = 1024
const options: UploadOptions = {
archiveSizeBytes: archiveFileSize, // These should always match
useAzureSdk: true,
uploadChunkSize: 64 * 1024 * 1024,
uploadConcurrency: 8
}
jest jest
.spyOn(cacheUtils, 'getArchiveFileSizeInBytes') .spyOn(cacheUtils, 'getArchiveFileSizeInBytes')
.mockReturnValueOnce(archiveFileSize) .mockReturnValueOnce(archiveFileSize)
@ -282,15 +289,7 @@ test('save with valid inputs uploads a cache', async () => {
Promise.resolve({ok: true, signedUploadUrl: signedUploadURL}) Promise.resolve({ok: true, signedUploadUrl: signedUploadURL})
) )
const uploadCacheMock = jest const saveCacheMock = jest.spyOn(cacheHttpClient, 'saveCache')
.spyOn(uploadCacheModule, 'uploadCacheFile')
.mockReturnValueOnce(
Promise.resolve({
_response: {
status: 200
}
} as BlobUploadCommonResponse)
)
const compression = CompressionMethod.Zstd const compression = CompressionMethod.Zstd
const getCompressionMock = jest const getCompressionMock = jest
@ -306,7 +305,12 @@ test('save with valid inputs uploads a cache', async () => {
const archiveFolder = '/foo/bar' const archiveFolder = '/foo/bar'
const archiveFile = path.join(archiveFolder, CacheFilename.Zstd) const archiveFile = path.join(archiveFolder, CacheFilename.Zstd)
expect(uploadCacheMock).toHaveBeenCalledWith(signedUploadURL, archiveFile) expect(saveCacheMock).toHaveBeenCalledWith(
-1,
archiveFile,
signedUploadURL,
options
)
expect(createTarMock).toHaveBeenCalledWith( expect(createTarMock).toHaveBeenCalledWith(
archiveFolder, archiveFolder,
cachePaths, cachePaths,

View File

@ -0,0 +1,58 @@
import * as uploadUtils from '../src/internal/uploadUtils'
import {TransferProgressEvent} from '@azure/ms-rest-js'
test('upload progress tracked correctly', () => {
const progress = new uploadUtils.UploadProgress(1000)
expect(progress.contentLength).toBe(1000)
expect(progress.sentBytes).toBe(0)
expect(progress.displayedComplete).toBe(false)
expect(progress.timeoutHandle).toBeUndefined()
expect(progress.getTransferredBytes()).toBe(0)
expect(progress.isDone()).toBe(false)
progress.onProgress()({loadedBytes: 0} as TransferProgressEvent)
expect(progress.contentLength).toBe(1000)
expect(progress.sentBytes).toBe(0)
expect(progress.displayedComplete).toBe(false)
expect(progress.timeoutHandle).toBeUndefined()
expect(progress.getTransferredBytes()).toBe(0)
expect(progress.isDone()).toBe(false)
progress.onProgress()({loadedBytes: 250} as TransferProgressEvent)
expect(progress.contentLength).toBe(1000)
expect(progress.sentBytes).toBe(250)
expect(progress.displayedComplete).toBe(false)
expect(progress.timeoutHandle).toBeUndefined()
expect(progress.getTransferredBytes()).toBe(250)
expect(progress.isDone()).toBe(false)
progress.onProgress()({loadedBytes: 500} as TransferProgressEvent)
expect(progress.contentLength).toBe(1000)
expect(progress.sentBytes).toBe(500)
expect(progress.displayedComplete).toBe(false)
expect(progress.timeoutHandle).toBeUndefined()
expect(progress.getTransferredBytes()).toBe(500)
expect(progress.isDone()).toBe(false)
progress.onProgress()({loadedBytes: 750} as TransferProgressEvent)
expect(progress.contentLength).toBe(1000)
expect(progress.sentBytes).toBe(750)
expect(progress.displayedComplete).toBe(false)
expect(progress.timeoutHandle).toBeUndefined()
expect(progress.getTransferredBytes()).toBe(750)
expect(progress.isDone()).toBe(false)
progress.onProgress()({loadedBytes: 1000} as TransferProgressEvent)
expect(progress.contentLength).toBe(1000)
expect(progress.sentBytes).toBe(1000)
expect(progress.displayedComplete).toBe(false)
expect(progress.timeoutHandle).toBeUndefined()
expect(progress.getTransferredBytes()).toBe(1000)
expect(progress.isDone()).toBe(true)
})

View File

@ -13,8 +13,6 @@ import {
GetCacheEntryDownloadURLRequest GetCacheEntryDownloadURLRequest
} from './generated/results/api/v1/cache' } from './generated/results/api/v1/cache'
import {CacheFileSizeLimit} from './internal/constants' import {CacheFileSizeLimit} from './internal/constants'
import {uploadCacheFile} from './internal/blob/upload-cache'
import {downloadCacheFile} from './internal/blob/download-cache'
export class ValidationError extends Error { export class ValidationError extends Error {
constructor(message: string) { constructor(message: string) {
super(message) super(message)
@ -66,8 +64,8 @@ export function isFeatureAvailable(): boolean {
* Restores cache from keys * Restores cache from keys
* *
* @param paths a list of file paths to restore from the cache * @param paths a list of file paths to restore from the cache
* @param primaryKey an explicit key for restoring the cache * @param primaryKey an explicit key for restoring the cache. Lookup is done with prefix matching.
* @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for key * @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for primaryKey
* @param downloadOptions cache download options * @param downloadOptions cache download options
* @param enableCrossOsArchive an optional boolean enabled to restore on windows any cache created on any platform * @param enableCrossOsArchive an optional boolean enabled to restore on windows any cache created on any platform
* @returns string returns the key for the cache hit, otherwise returns undefined * @returns string returns the key for the cache hit, otherwise returns undefined
@ -108,12 +106,12 @@ export async function restoreCache(
/** /**
* Restores cache using the legacy Cache Service * Restores cache using the legacy Cache Service
* *
* @param paths * @param paths a list of file paths to restore from the cache
* @param primaryKey * @param primaryKey an explicit key for restoring the cache. Lookup is done with prefix matching.
* @param restoreKeys * @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for primaryKey
* @param options * @param options cache download options
* @param enableCrossOsArchive * @param enableCrossOsArchive an optional boolean enabled to restore on Windows any cache created on any platform
* @returns * @returns string returns the key for the cache hit, otherwise returns undefined
*/ */
async function restoreCacheV1( async function restoreCacheV1(
paths: string[], paths: string[],
@ -204,11 +202,11 @@ async function restoreCacheV1(
} }
/** /**
* Restores cache using the new Cache Service * Restores cache using Cache Service v2
* *
* @param paths a list of file paths to restore from the cache * @param paths a list of file paths to restore from the cache
* @param primaryKey an explicit key for restoring the cache * @param primaryKey an explicit key for restoring the cache. Lookup is done with prefix matching
* @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for key * @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for primaryKey
* @param downloadOptions cache download options * @param downloadOptions cache download options
* @param enableCrossOsArchive an optional boolean enabled to restore on windows any cache created on any platform * @param enableCrossOsArchive an optional boolean enabled to restore on windows any cache created on any platform
* @returns string returns the key for the cache hit, otherwise returns undefined * @returns string returns the key for the cache hit, otherwise returns undefined
@ -220,6 +218,11 @@ async function restoreCacheV2(
options?: DownloadOptions, options?: DownloadOptions,
enableCrossOsArchive = false enableCrossOsArchive = false
): Promise<string | undefined> { ): Promise<string | undefined> {
// Override UploadOptions to force the use of Azure
options = {
...options,
useAzureSdk: true
}
restoreKeys = restoreKeys || [] restoreKeys = restoreKeys || []
const keys = [primaryKey, ...restoreKeys] const keys = [primaryKey, ...restoreKeys]
@ -271,11 +274,11 @@ async function restoreCacheV2(
core.debug(`Archive path: ${archivePath}`) core.debug(`Archive path: ${archivePath}`)
core.debug(`Starting download of archive to: ${archivePath}`) core.debug(`Starting download of archive to: ${archivePath}`)
const downloadResponse = await downloadCacheFile( await cacheHttpClient.downloadCache(
response.signedDownloadUrl, response.signedDownloadUrl,
archivePath archivePath,
options
) )
core.debug(`Download response status: ${downloadResponse._response.status}`)
const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath) const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath)
core.info( core.info(
@ -422,7 +425,7 @@ async function saveCacheV1(
} }
core.debug(`Saving Cache (ID: ${cacheId})`) core.debug(`Saving Cache (ID: ${cacheId})`)
await cacheHttpClient.saveCache(cacheId, archivePath, options) await cacheHttpClient.saveCache(cacheId, archivePath, '', options)
} catch (error) { } catch (error) {
const typedError = error as Error const typedError = error as Error
if (typedError.name === ValidationError.name) { if (typedError.name === ValidationError.name) {
@ -445,12 +448,12 @@ async function saveCacheV1(
} }
/** /**
* Save cache using the new Cache Service * Save cache using Cache Service v2
* *
* @param paths * @param paths a list of file paths to restore from the cache
* @param key * @param key an explicit key for restoring the cache
* @param options * @param options cache upload options
* @param enableCrossOsArchive * @param enableCrossOsArchive an optional boolean enabled to save cache on windows which could be restored on any platform
* @returns * @returns
*/ */
async function saveCacheV2( async function saveCacheV2(
@ -459,6 +462,15 @@ async function saveCacheV2(
options?: UploadOptions, options?: UploadOptions,
enableCrossOsArchive = false enableCrossOsArchive = false
): Promise<number> { ): Promise<number> {
// Override UploadOptions to force the use of Azure
// ...options goes first because we want to override the default values
// set in UploadOptions with these specific figures
options = {
...options,
uploadChunkSize: 64 * 1024 * 1024, // 64 MiB
uploadConcurrency: 8, // 8 workers for parallel upload
useAzureSdk: true
}
const compressionMethod = await utils.getCompressionMethod() const compressionMethod = await utils.getCompressionMethod()
const twirpClient = cacheTwirpClient.internalCacheTwirpClient() const twirpClient = cacheTwirpClient.internalCacheTwirpClient()
let cacheId = -1 let cacheId = -1
@ -499,6 +511,9 @@ async function saveCacheV2(
) )
} }
// Set the archive size in the options, will be used to display the upload progress
options.archiveSizeBytes = archiveFileSize
core.debug('Reserving Cache') core.debug('Reserving Cache')
const version = utils.getCacheVersion( const version = utils.getCacheVersion(
paths, paths,
@ -518,11 +533,12 @@ async function saveCacheV2(
} }
core.debug(`Attempting to upload cache located at: ${archivePath}`) core.debug(`Attempting to upload cache located at: ${archivePath}`)
const uploadResponse = await uploadCacheFile( await cacheHttpClient.saveCache(
cacheId,
archivePath,
response.signedUploadUrl, response.signedUploadUrl,
archivePath options
) )
core.debug(`Download response status: ${uploadResponse._response.status}`)
const finalizeRequest: FinalizeCacheEntryUploadRequest = { const finalizeRequest: FinalizeCacheEntryUploadRequest = {
key, key,

View File

@ -1,31 +0,0 @@
import * as core from '@actions/core'
import {
BlobClient,
BlockBlobClient,
BlobDownloadOptions,
BlobDownloadResponseParsed
} from '@azure/storage-blob'
export async function downloadCacheFile(
signedUploadURL: string,
archivePath: string
): Promise<BlobDownloadResponseParsed> {
const downloadOptions: BlobDownloadOptions = {
maxRetryRequests: 5
}
const blobClient: BlobClient = new BlobClient(signedUploadURL)
const blockBlobClient: BlockBlobClient = blobClient.getBlockBlobClient()
core.debug(
`BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}`
)
return blockBlobClient.downloadToFile(
archivePath,
0,
undefined,
downloadOptions
)
}

View File

@ -1,37 +0,0 @@
import * as core from '@actions/core'
import {
BlobClient,
BlobUploadCommonResponse,
BlockBlobClient,
BlockBlobParallelUploadOptions
} from '@azure/storage-blob'
import {InvalidResponseError} from '../shared/errors'
export async function uploadCacheFile(
signedUploadURL: string,
archivePath: string
): Promise<BlobUploadCommonResponse> {
// Specify data transfer options
const uploadOptions: BlockBlobParallelUploadOptions = {
blockSize: 4 * 1024 * 1024, // 4 MiB max block size
concurrency: 4, // maximum number of parallel transfer workers
maxSingleShotSize: 8 * 1024 * 1024 // 8 MiB initial transfer size
}
const blobClient: BlobClient = new BlobClient(signedUploadURL)
const blockBlobClient: BlockBlobClient = blobClient.getBlockBlobClient()
core.debug(
`BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}`
)
const resp = await blockBlobClient.uploadFile(archivePath, uploadOptions)
if (resp._response.status >= 400) {
throw new InvalidResponseError(
`Upload failed with status code ${resp._response.status}`
)
}
return resp
}

View File

@ -8,6 +8,7 @@ import {
import * as fs from 'fs' import * as fs from 'fs'
import {URL} from 'url' import {URL} from 'url'
import * as utils from './cacheUtils' import * as utils from './cacheUtils'
import {uploadCacheArchiveSDK} from './uploadUtils'
import { import {
ArtifactCacheEntry, ArtifactCacheEntry,
InternalCacheOptions, InternalCacheOptions,
@ -34,6 +35,7 @@ import {
retryTypedResponse retryTypedResponse
} from './requestUtils' } from './requestUtils'
import {getCacheServiceURL} from './config' import {getCacheServiceURL} from './config'
import {getUserAgentString} from './shared/user-agent'
function getCacheApiUrl(resource: string): string { function getCacheApiUrl(resource: string): string {
const baseUrl: string = getCacheServiceURL() const baseUrl: string = getCacheServiceURL()
@ -65,7 +67,7 @@ function createHttpClient(): HttpClient {
const bearerCredentialHandler = new BearerCredentialHandler(token) const bearerCredentialHandler = new BearerCredentialHandler(token)
return new HttpClient( return new HttpClient(
'actions/cache', getUserAgentString(),
[bearerCredentialHandler], [bearerCredentialHandler],
getRequestOptions() getRequestOptions()
) )
@ -325,8 +327,20 @@ async function commitCache(
export async function saveCache( export async function saveCache(
cacheId: number, cacheId: number,
archivePath: string, archivePath: string,
signedUploadURL?: string,
options?: UploadOptions options?: UploadOptions
): Promise<void> { ): Promise<void> {
const uploadOptions = getUploadOptions(options)
if (uploadOptions.useAzureSdk) {
// Use Azure storage SDK to upload caches directly to Azure
if (!signedUploadURL) {
throw new Error(
'Azure Storage SDK can only be used when a signed URL is provided.'
)
}
await uploadCacheArchiveSDK(signedUploadURL, archivePath, options)
} else {
const httpClient = createHttpClient() const httpClient = createHttpClient()
core.debug('Upload cache') core.debug('Upload cache')
@ -336,10 +350,16 @@ export async function saveCache(
core.debug('Commiting cache') core.debug('Commiting cache')
const cacheSize = utils.getArchiveFileSizeInBytes(archivePath) const cacheSize = utils.getArchiveFileSizeInBytes(archivePath)
core.info( core.info(
`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)` `Cache Size: ~${Math.round(
cacheSize / (1024 * 1024)
)} MB (${cacheSize} B)`
) )
const commitCacheResponse = await commitCache(httpClient, cacheId, cacheSize) const commitCacheResponse = await commitCache(
httpClient,
cacheId,
cacheSize
)
if (!isSuccessStatusCode(commitCacheResponse.statusCode)) { if (!isSuccessStatusCode(commitCacheResponse.statusCode)) {
throw new Error( throw new Error(
`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.` `Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`
@ -347,4 +367,5 @@ export async function saveCache(
} }
core.info('Cache saved successfully') core.info('Cache saved successfully')
}
} }

View File

@ -0,0 +1,177 @@
import * as core from '@actions/core'
import {
BlobClient,
BlobUploadCommonResponse,
BlockBlobClient,
BlockBlobParallelUploadOptions
} from '@azure/storage-blob'
import {TransferProgressEvent} from '@azure/ms-rest-js'
import {InvalidResponseError} from './shared/errors'
import {UploadOptions} from '../options'
/**
* Class for tracking the upload state and displaying stats.
*/
export class UploadProgress {
contentLength: number
sentBytes: number
startTime: number
displayedComplete: boolean
timeoutHandle?: ReturnType<typeof setTimeout>
constructor(contentLength: number) {
this.contentLength = contentLength
this.sentBytes = 0
this.displayedComplete = false
this.startTime = Date.now()
}
/**
* Sets the number of bytes sent
*
* @param sentBytes the number of bytes sent
*/
setSentBytes(sentBytes: number): void {
this.sentBytes = sentBytes
}
/**
* Returns the total number of bytes transferred.
*/
getTransferredBytes(): number {
return this.sentBytes
}
/**
* Returns true if the upload is complete.
*/
isDone(): boolean {
return this.getTransferredBytes() === this.contentLength
}
/**
* Prints the current upload stats. Once the upload completes, this will print one
* last line and then stop.
*/
display(): void {
if (this.displayedComplete) {
return
}
const transferredBytes = this.sentBytes
const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(
1
)
const elapsedTime = Date.now() - this.startTime
const uploadSpeed = (
transferredBytes /
(1024 * 1024) /
(elapsedTime / 1000)
).toFixed(1)
core.info(
`Sent ${transferredBytes} of ${this.contentLength} (${percentage}%), ${uploadSpeed} MBs/sec`
)
if (this.isDone()) {
this.displayedComplete = true
}
}
/**
* Returns a function used to handle TransferProgressEvents.
*/
onProgress(): (progress: TransferProgressEvent) => void {
return (progress: TransferProgressEvent) => {
this.setSentBytes(progress.loadedBytes)
}
}
/**
* Starts the timer that displays the stats.
*
* @param delayInMs the delay between each write
*/
startDisplayTimer(delayInMs = 1000): void {
const displayCallback = (): void => {
this.display()
if (!this.isDone()) {
this.timeoutHandle = setTimeout(displayCallback, delayInMs)
}
}
this.timeoutHandle = setTimeout(displayCallback, delayInMs)
}
/**
* Stops the timer that displays the stats. As this typically indicates the upload
* is complete, this will display one last line, unless the last line has already
* been written.
*/
stopDisplayTimer(): void {
if (this.timeoutHandle) {
clearTimeout(this.timeoutHandle)
this.timeoutHandle = undefined
}
this.display()
}
}
/**
* Uploads a cache archive directly to Azure Blob Storage using the Azure SDK.
* This function will display progress information to the console. Concurrency of the
* upload is determined by the calling functions.
*
* @param signedUploadURL
* @param archivePath
* @param options
* @returns
*/
export async function uploadCacheArchiveSDK(
signedUploadURL: string,
archivePath: string,
options?: UploadOptions
): Promise<BlobUploadCommonResponse> {
const blobClient: BlobClient = new BlobClient(signedUploadURL)
const blockBlobClient: BlockBlobClient = blobClient.getBlockBlobClient()
const uploadProgress = new UploadProgress(options?.archiveSizeBytes ?? 0)
// Specify data transfer options
const uploadOptions: BlockBlobParallelUploadOptions = {
blockSize: options?.uploadChunkSize,
concurrency: options?.uploadConcurrency, // maximum number of parallel transfer workers
maxSingleShotSize: 128 * 1024 * 1024, // 128 MiB initial transfer size
onProgress: uploadProgress.onProgress()
}
try {
uploadProgress.startDisplayTimer()
core.debug(
`BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}`
)
const response = await blockBlobClient.uploadFile(
archivePath,
uploadOptions
)
// TODO: better management of non-retryable errors
if (response._response.status >= 400) {
throw new InvalidResponseError(
`uploadCacheArchiveSDK: upload failed with status code ${response._response.status}`
)
}
return response
} catch (error) {
core.warning(
`uploadCacheArchiveSDK: internal error uploading cache archive: ${error.message}`
)
throw error
} finally {
uploadProgress.stopDisplayTimer()
}
}

View File

@ -4,6 +4,14 @@ import * as core from '@actions/core'
* Options to control cache upload * Options to control cache upload
*/ */
export interface UploadOptions { export interface UploadOptions {
/**
* Indicates whether to use the Azure Blob SDK to download caches
* that are stored on Azure Blob Storage to improve reliability and
* performance
*
* @default false
*/
useAzureSdk?: boolean
/** /**
* Number of parallel cache upload * Number of parallel cache upload
* *
@ -16,6 +24,10 @@ export interface UploadOptions {
* @default 32MB * @default 32MB
*/ */
uploadChunkSize?: number uploadChunkSize?: number
/**
* Archive size in bytes
*/
archiveSizeBytes?: number
} }
/** /**
@ -76,12 +88,18 @@ export interface DownloadOptions {
* @param copy the original upload options * @param copy the original upload options
*/ */
export function getUploadOptions(copy?: UploadOptions): UploadOptions { export function getUploadOptions(copy?: UploadOptions): UploadOptions {
// Defaults if not overriden
const result: UploadOptions = { const result: UploadOptions = {
useAzureSdk: false,
uploadConcurrency: 4, uploadConcurrency: 4,
uploadChunkSize: 32 * 1024 * 1024 uploadChunkSize: 32 * 1024 * 1024
} }
if (copy) { if (copy) {
if (typeof copy.useAzureSdk === 'boolean') {
result.useAzureSdk = copy.useAzureSdk
}
if (typeof copy.uploadConcurrency === 'number') { if (typeof copy.uploadConcurrency === 'number') {
result.uploadConcurrency = copy.uploadConcurrency result.uploadConcurrency = copy.uploadConcurrency
} }
@ -91,6 +109,26 @@ export function getUploadOptions(copy?: UploadOptions): UploadOptions {
} }
} }
/**
* Add env var overrides
*/
// Cap the uploadConcurrency at 32
result.uploadConcurrency = !isNaN(
Number(process.env['CACHE_UPLOAD_CONCURRENCY'])
)
? Math.min(32, Number(process.env['CACHE_UPLOAD_CONCURRENCY']))
: result.uploadConcurrency
// Cap the uploadChunkSize at 128MiB
result.uploadChunkSize = !isNaN(
Number(process.env['CACHE_UPLOAD_CHUNK_SIZE'])
)
? Math.min(
128 * 1024 * 1024,
Number(process.env['CACHE_UPLOAD_CHUNK_SIZE']) * 1024 * 1024
)
: result.uploadChunkSize
core.debug(`Use Azure SDK: ${result.useAzureSdk}`)
core.debug(`Upload concurrency: ${result.uploadConcurrency}`) core.debug(`Upload concurrency: ${result.uploadConcurrency}`)
core.debug(`Upload chunk size: ${result.uploadChunkSize}`) core.debug(`Upload chunk size: ${result.uploadChunkSize}`)