diff --git a/packages/cache/__tests__/options.test.ts b/packages/cache/__tests__/options.test.ts index 7585b60f..b4c5a1f1 100644 --- a/packages/cache/__tests__/options.test.ts +++ b/packages/cache/__tests__/options.test.ts @@ -11,8 +11,6 @@ const downloadConcurrency = 8 const timeoutInMs = 30000 const segmentTimeoutInMs = 600000 const lookupOnly = false -const uploadConcurrency = 4 -const uploadChunkSize = 32 * 1024 * 1024 test('getDownloadOptions sets defaults', async () => { const actualOptions = getDownloadOptions() @@ -43,18 +41,21 @@ test('getDownloadOptions overrides all settings', async () => { }) test('getUploadOptions sets defaults', async () => { + const expectedOptions: UploadOptions = { + uploadConcurrency: 4, + uploadChunkSize: 32 * 1024 * 1024, + useAzureSdk: false + } const actualOptions = getUploadOptions() - expect(actualOptions).toEqual({ - uploadConcurrency, - uploadChunkSize - }) + expect(actualOptions).toEqual(expectedOptions) }) test('getUploadOptions overrides all settings', async () => { const expectedOptions: UploadOptions = { uploadConcurrency: 2, - uploadChunkSize: 16 * 1024 * 1024 + uploadChunkSize: 16 * 1024 * 1024, + useAzureSdk: true } const actualOptions = getUploadOptions(expectedOptions) @@ -62,6 +63,34 @@ test('getUploadOptions overrides all settings', async () => { expect(actualOptions).toEqual(expectedOptions) }) +test('env variables override all getUploadOptions settings', async () => { + const expectedOptions: UploadOptions = { + uploadConcurrency: 16, + uploadChunkSize: 64 * 1024 * 1024, + useAzureSdk: true + } + + process.env.CACHE_UPLOAD_CONCURRENCY = '16' + process.env.CACHE_UPLOAD_CHUNK_SIZE = '64' + + const actualOptions = getUploadOptions(expectedOptions) + expect(actualOptions).toEqual(expectedOptions) +}) + +test('env variables override all getUploadOptions settings but do not exceed caps', async () => { + const expectedOptions: UploadOptions = { + uploadConcurrency: 32, + uploadChunkSize: 128 * 1024 * 1024, + useAzureSdk: true + } + + process.env.CACHE_UPLOAD_CONCURRENCY = '64' + process.env.CACHE_UPLOAD_CHUNK_SIZE = '256' + + const actualOptions = getUploadOptions(expectedOptions) + expect(actualOptions).toEqual(expectedOptions) +}) + test('getDownloadOptions overrides download timeout minutes', async () => { const expectedOptions: DownloadOptions = { useAzureSdk: false, diff --git a/packages/cache/__tests__/restoreCacheV2.test.ts b/packages/cache/__tests__/restoreCacheV2.test.ts index cc4f9e3c..edcb16d7 100644 --- a/packages/cache/__tests__/restoreCacheV2.test.ts +++ b/packages/cache/__tests__/restoreCacheV2.test.ts @@ -3,11 +3,11 @@ import * as path from 'path' import * as tar from '../src/internal/tar' import * as config from '../src/internal/config' import * as cacheUtils from '../src/internal/cacheUtils' -import * as downloadCacheModule from '../src/internal/blob/download-cache' +import * as cacheHttpClient from '../src/internal/cacheHttpClient' import {restoreCache} from '../src/cache' import {CacheFilename, CompressionMethod} from '../src/internal/constants' import {CacheServiceClientJSON} from '../src/generated/results/api/v1/cache.twirp' -import {BlobDownloadResponseParsed} from '@azure/storage-blob' +import {DownloadOptions} from '../src/options' jest.mock('../src/internal/cacheHttpClient') jest.mock('../src/internal/cacheUtils') @@ -142,6 +142,7 @@ test('restore with gzip compressed cache found', async () => { const signedDownloadUrl = 'https://blob-storage.local?signed=true' const cacheVersion = 'd90f107aaeb22920dba0c637a23c37b5bc497b4dfa3b07fe3f79bf88a273c11b' + const options = {useAzureSdk: true} as DownloadOptions const getCacheVersionMock = jest.spyOn(cacheUtils, 'getCacheVersion') getCacheVersionMock.mockReturnValue(cacheVersion) @@ -169,17 +170,7 @@ test('restore with gzip compressed cache found', async () => { }) const archivePath = path.join(tempPath, CacheFilename.Gzip) - const downloadCacheFileMock = jest.spyOn( - downloadCacheModule, - 'downloadCacheFile' - ) - downloadCacheFileMock.mockReturnValue( - Promise.resolve({ - _response: { - status: 200 - } - } as BlobDownloadResponseParsed) - ) + const downloadCacheMock = jest.spyOn(cacheHttpClient, 'downloadCache') const fileSize = 142 const getArchiveFileSizeInBytesMock = jest @@ -189,7 +180,7 @@ test('restore with gzip compressed cache found', async () => { const extractTarMock = jest.spyOn(tar, 'extractTar') const unlinkFileMock = jest.spyOn(cacheUtils, 'unlinkFile') - const cacheKey = await restoreCache(paths, key) + const cacheKey = await restoreCache(paths, key, [], options) expect(cacheKey).toBe(key) expect(getCacheVersionMock).toHaveBeenCalledWith( @@ -203,9 +194,10 @@ test('restore with gzip compressed cache found', async () => { version: cacheVersion }) expect(createTempDirectoryMock).toHaveBeenCalledTimes(1) - expect(downloadCacheFileMock).toHaveBeenCalledWith( + expect(downloadCacheMock).toHaveBeenCalledWith( signedDownloadUrl, - archivePath + archivePath, + options ) expect(getArchiveFileSizeInBytesMock).toHaveBeenCalledWith(archivePath) expect(logInfoMock).toHaveBeenCalledWith(`Cache Size: ~0 MB (142 B)`) @@ -226,6 +218,7 @@ test('restore with zstd compressed cache found', async () => { const signedDownloadUrl = 'https://blob-storage.local?signed=true' const cacheVersion = '8e2e96a184cb0cd6b48285b176c06a418f3d7fce14c29d9886fd1bb4f05c513d' + const options = {useAzureSdk: true} as DownloadOptions const getCacheVersionMock = jest.spyOn(cacheUtils, 'getCacheVersion') getCacheVersionMock.mockReturnValue(cacheVersion) @@ -253,17 +246,7 @@ test('restore with zstd compressed cache found', async () => { }) const archivePath = path.join(tempPath, CacheFilename.Zstd) - const downloadCacheFileMock = jest.spyOn( - downloadCacheModule, - 'downloadCacheFile' - ) - downloadCacheFileMock.mockReturnValue( - Promise.resolve({ - _response: { - status: 200 - } - } as BlobDownloadResponseParsed) - ) + const downloadCacheMock = jest.spyOn(cacheHttpClient, 'downloadCache') const fileSize = 62915000 const getArchiveFileSizeInBytesMock = jest @@ -273,7 +256,7 @@ test('restore with zstd compressed cache found', async () => { const extractTarMock = jest.spyOn(tar, 'extractTar') const unlinkFileMock = jest.spyOn(cacheUtils, 'unlinkFile') - const cacheKey = await restoreCache(paths, key) + const cacheKey = await restoreCache(paths, key, [], options) expect(cacheKey).toBe(key) expect(getCacheVersionMock).toHaveBeenCalledWith( @@ -287,9 +270,10 @@ test('restore with zstd compressed cache found', async () => { version: cacheVersion }) expect(createTempDirectoryMock).toHaveBeenCalledTimes(1) - expect(downloadCacheFileMock).toHaveBeenCalledWith( + expect(downloadCacheMock).toHaveBeenCalledWith( signedDownloadUrl, - archivePath + archivePath, + options ) expect(getArchiveFileSizeInBytesMock).toHaveBeenCalledWith(archivePath) expect(logInfoMock).toHaveBeenCalledWith(`Cache Size: ~60 MB (62915000 B)`) @@ -311,6 +295,7 @@ test('restore with cache found for restore key', async () => { const signedDownloadUrl = 'https://blob-storage.local?signed=true' const cacheVersion = 'b8b58e9bd7b1e8f83d9f05c7e06ea865ba44a0330e07a14db74ac74386677bed' + const options = {useAzureSdk: true} as DownloadOptions const getCacheVersionMock = jest.spyOn(cacheUtils, 'getCacheVersion') getCacheVersionMock.mockReturnValue(cacheVersion) @@ -338,17 +323,7 @@ test('restore with cache found for restore key', async () => { }) const archivePath = path.join(tempPath, CacheFilename.Gzip) - const downloadCacheFileMock = jest.spyOn( - downloadCacheModule, - 'downloadCacheFile' - ) - downloadCacheFileMock.mockReturnValue( - Promise.resolve({ - _response: { - status: 200 - } - } as BlobDownloadResponseParsed) - ) + const downloadCacheMock = jest.spyOn(cacheHttpClient, 'downloadCache') const fileSize = 142 const getArchiveFileSizeInBytesMock = jest @@ -358,7 +333,7 @@ test('restore with cache found for restore key', async () => { const extractTarMock = jest.spyOn(tar, 'extractTar') const unlinkFileMock = jest.spyOn(cacheUtils, 'unlinkFile') - const cacheKey = await restoreCache(paths, key, restoreKeys) + const cacheKey = await restoreCache(paths, key, restoreKeys, options) expect(cacheKey).toBe(restoreKeys[0]) expect(getCacheVersionMock).toHaveBeenCalledWith( @@ -372,9 +347,10 @@ test('restore with cache found for restore key', async () => { version: cacheVersion }) expect(createTempDirectoryMock).toHaveBeenCalledTimes(1) - expect(downloadCacheFileMock).toHaveBeenCalledWith( + expect(downloadCacheMock).toHaveBeenCalledWith( signedDownloadUrl, - archivePath + archivePath, + options ) expect(getArchiveFileSizeInBytesMock).toHaveBeenCalledWith(archivePath) expect(logInfoMock).toHaveBeenCalledWith(`Cache Size: ~0 MB (142 B)`) @@ -388,14 +364,14 @@ test('restore with cache found for restore key', async () => { expect(compressionMethodMock).toHaveBeenCalledTimes(1) }) -test('restore with dry run', async () => { +test('restore with lookup only enabled', async () => { const paths = ['node_modules'] const key = 'node-test' - const options = {lookupOnly: true} const compressionMethod = CompressionMethod.Gzip const signedDownloadUrl = 'https://blob-storage.local?signed=true' const cacheVersion = 'd90f107aaeb22920dba0c637a23c37b5bc497b4dfa3b07fe3f79bf88a273c11b' + const options = {lookupOnly: true, useAzureSdk: true} as DownloadOptions const getCacheVersionMock = jest.spyOn(cacheUtils, 'getCacheVersion') getCacheVersionMock.mockReturnValue(cacheVersion) @@ -416,10 +392,7 @@ test('restore with dry run', async () => { ) const createTempDirectoryMock = jest.spyOn(cacheUtils, 'createTempDirectory') - const downloadCacheFileMock = jest.spyOn( - downloadCacheModule, - 'downloadCacheFile' - ) + const downloadCacheMock = jest.spyOn(cacheHttpClient, 'downloadCache') const cacheKey = await restoreCache(paths, key, undefined, options) @@ -438,5 +411,5 @@ test('restore with dry run', async () => { // creating a tempDir and downloading the cache are skipped expect(createTempDirectoryMock).toHaveBeenCalledTimes(0) - expect(downloadCacheFileMock).toHaveBeenCalledTimes(0) + expect(downloadCacheMock).toHaveBeenCalledTimes(0) }) diff --git a/packages/cache/__tests__/saveCache.test.ts b/packages/cache/__tests__/saveCache.test.ts index 81049e0a..e5ed695b 100644 --- a/packages/cache/__tests__/saveCache.test.ts +++ b/packages/cache/__tests__/saveCache.test.ts @@ -270,7 +270,12 @@ test('save with server error should fail', async () => { compression ) expect(saveCacheMock).toHaveBeenCalledTimes(1) - expect(saveCacheMock).toHaveBeenCalledWith(cacheId, archiveFile, undefined) + expect(saveCacheMock).toHaveBeenCalledWith( + cacheId, + archiveFile, + '', + undefined + ) expect(getCompressionMock).toHaveBeenCalledTimes(1) }) @@ -315,7 +320,12 @@ test('save with valid inputs uploads a cache', async () => { compression ) expect(saveCacheMock).toHaveBeenCalledTimes(1) - expect(saveCacheMock).toHaveBeenCalledWith(cacheId, archiveFile, undefined) + expect(saveCacheMock).toHaveBeenCalledWith( + cacheId, + archiveFile, + '', + undefined + ) expect(getCompressionMock).toHaveBeenCalledTimes(1) }) diff --git a/packages/cache/__tests__/saveCacheV2.test.ts b/packages/cache/__tests__/saveCacheV2.test.ts index 67c7f1de..6744425d 100644 --- a/packages/cache/__tests__/saveCacheV2.test.ts +++ b/packages/cache/__tests__/saveCacheV2.test.ts @@ -6,15 +6,14 @@ import {CacheFilename, CompressionMethod} from '../src/internal/constants' import * as config from '../src/internal/config' import * as tar from '../src/internal/tar' import {CacheServiceClientJSON} from '../src/generated/results/api/v1/cache.twirp' -import * as uploadCacheModule from '../src/internal/blob/upload-cache' -import {BlobUploadCommonResponse} from '@azure/storage-blob' -import {InvalidResponseError} from '../src/internal/shared/errors' +import * as cacheHttpClient from '../src/internal/cacheHttpClient' +import {UploadOptions} from '../src/options' let logDebugMock: jest.SpyInstance jest.mock('../src/internal/tar') -let uploadFileMock = jest.fn() +const uploadFileMock = jest.fn() const blockBlobClientMock = jest.fn().mockImplementation(() => ({ uploadFile: uploadFileMock })) @@ -116,15 +115,7 @@ test('create cache entry failure', async () => { .spyOn(cacheUtils, 'getArchiveFileSizeInBytes') .mockReturnValueOnce(archiveFileSize) const cacheVersion = cacheUtils.getCacheVersion(paths, compression) - const uploadCacheFileMock = jest - .spyOn(uploadCacheModule, 'uploadCacheFile') - .mockReturnValueOnce( - Promise.resolve({ - _response: { - status: 200 - } - } as BlobUploadCommonResponse) - ) + const saveCacheMock = jest.spyOn(cacheHttpClient, 'saveCache') const cacheId = await saveCache(paths, key) expect(cacheId).toBe(-1) @@ -139,15 +130,21 @@ test('create cache entry failure', async () => { expect(createTarMock).toHaveBeenCalledTimes(1) expect(getCompressionMock).toHaveBeenCalledTimes(1) expect(finalizeCacheEntryMock).toHaveBeenCalledTimes(0) - expect(uploadCacheFileMock).toHaveBeenCalledTimes(0) + expect(saveCacheMock).toHaveBeenCalledTimes(0) }) -test('finalize save cache failure', async () => { +test('save cache fails if a signedUploadURL was not passed', async () => { const paths = 'node_modules' const key = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43' const cachePaths = [path.resolve(paths)] - const logWarningMock = jest.spyOn(core, 'warning') - const signedUploadURL = 'https://blob-storage.local?signed=true' + const signedUploadURL = '' + const archiveFileSize = 1024 + const options: UploadOptions = { + archiveSizeBytes: archiveFileSize, // These should always match + useAzureSdk: true, + uploadChunkSize: 64 * 1024 * 1024, + uploadConcurrency: 8 + } const createCacheEntryMock = jest .spyOn(CacheServiceClientJSON.prototype, 'CreateCacheEntry') @@ -156,15 +153,7 @@ test('finalize save cache failure', async () => { ) const createTarMock = jest.spyOn(tar, 'createTar') - - const uploadCacheMock = jest.spyOn(uploadCacheModule, 'uploadCacheFile') - uploadCacheMock.mockReturnValueOnce( - Promise.resolve({ - _response: { - status: 200 - } - } as BlobUploadCommonResponse) - ) + const saveCacheMock = jest.spyOn(cacheHttpClient, 'saveCache') const compression = CompressionMethod.Zstd const getCompressionMock = jest @@ -172,7 +161,66 @@ test('finalize save cache failure', async () => { .mockReturnValueOnce(Promise.resolve(compression)) const cacheVersion = cacheUtils.getCacheVersion([paths], compression) + jest + .spyOn(cacheUtils, 'getArchiveFileSizeInBytes') + .mockReturnValueOnce(archiveFileSize) + + const cacheId = await saveCache([paths], key, options) + + expect(cacheId).toBe(-1) + expect(createCacheEntryMock).toHaveBeenCalledWith({ + key, + version: cacheVersion + }) + + const archiveFolder = '/foo/bar' + const archiveFile = path.join(archiveFolder, CacheFilename.Zstd) + expect(createTarMock).toHaveBeenCalledWith( + archiveFolder, + cachePaths, + compression + ) + + expect(saveCacheMock).toHaveBeenCalledWith( + -1, + archiveFile, + signedUploadURL, + options + ) + expect(getCompressionMock).toHaveBeenCalledTimes(1) +}) + +test('finalize save cache failure', async () => { + const paths = 'node_modules' + const key = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43' + const cachePaths = [path.resolve(paths)] + const logWarningMock = jest.spyOn(core, 'warning') + const signedUploadURL = 'https://blob-storage.local?signed=true' const archiveFileSize = 1024 + const options: UploadOptions = { + archiveSizeBytes: archiveFileSize, // These should always match + useAzureSdk: true, + uploadChunkSize: 64 * 1024 * 1024, + uploadConcurrency: 8 + } + + const createCacheEntryMock = jest + .spyOn(CacheServiceClientJSON.prototype, 'CreateCacheEntry') + .mockReturnValue( + Promise.resolve({ok: true, signedUploadUrl: signedUploadURL}) + ) + + const createTarMock = jest.spyOn(tar, 'createTar') + const saveCacheMock = jest + .spyOn(cacheHttpClient, 'saveCache') + .mockResolvedValue(Promise.resolve()) + + const compression = CompressionMethod.Zstd + const getCompressionMock = jest + .spyOn(cacheUtils, 'getCompressionMethod') + .mockReturnValueOnce(Promise.resolve(compression)) + + const cacheVersion = cacheUtils.getCacheVersion([paths], compression) jest .spyOn(cacheUtils, 'getArchiveFileSizeInBytes') .mockReturnValueOnce(archiveFileSize) @@ -181,7 +229,7 @@ test('finalize save cache failure', async () => { .spyOn(CacheServiceClientJSON.prototype, 'FinalizeCacheEntryUpload') .mockReturnValue(Promise.resolve({ok: false, entryId: ''})) - const cacheId = await saveCache([paths], key) + const cacheId = await saveCache([paths], key, options) expect(createCacheEntryMock).toHaveBeenCalledWith({ key, @@ -196,7 +244,12 @@ test('finalize save cache failure', async () => { compression ) - expect(uploadCacheMock).toHaveBeenCalledWith(signedUploadURL, archiveFile) + expect(saveCacheMock).toHaveBeenCalledWith( + -1, + archiveFile, + signedUploadURL, + options + ) expect(getCompressionMock).toHaveBeenCalledTimes(1) expect(finalizeCacheEntryMock).toHaveBeenCalledWith({ @@ -211,66 +264,20 @@ test('finalize save cache failure', async () => { ) }) -test('save with uploadCache Server error will fail', async () => { - const paths = 'node_modules' - const key = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43' - const signedUploadURL = 'https://blob-storage.local?signed=true' - jest - .spyOn(CacheServiceClientJSON.prototype, 'CreateCacheEntry') - .mockReturnValue( - Promise.resolve({ok: true, signedUploadUrl: signedUploadURL}) - ) - - const archiveFileSize = 1024 - jest - .spyOn(cacheUtils, 'getArchiveFileSizeInBytes') - .mockReturnValueOnce(archiveFileSize) - jest - .spyOn(uploadCacheModule, 'uploadCacheFile') - .mockRejectedValueOnce(new InvalidResponseError('boom')) - - const cacheId = await saveCache([paths], key) - expect(cacheId).toBe(-1) -}) - -test('uploadFile returns 500', async () => { - const paths = 'node_modules' - const key = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43' - const signedUploadURL = 'https://blob-storage.local?signed=true' - const logWarningMock = jest.spyOn(core, 'warning') - jest - .spyOn(CacheServiceClientJSON.prototype, 'CreateCacheEntry') - .mockReturnValue( - Promise.resolve({ok: true, signedUploadUrl: signedUploadURL}) - ) - - const archiveFileSize = 1024 - jest - .spyOn(cacheUtils, 'getArchiveFileSizeInBytes') - .mockReturnValueOnce(archiveFileSize) - jest.spyOn(uploadCacheModule, 'uploadCacheFile').mockRestore() - - uploadFileMock = jest.fn().mockResolvedValueOnce({ - _response: { - status: 500 - } - }) - const cacheId = await saveCache([paths], key) - - expect(logWarningMock).toHaveBeenCalledWith( - 'Failed to save: Upload failed with status code 500' - ) - expect(cacheId).toBe(-1) -}) - test('save with valid inputs uploads a cache', async () => { const paths = 'node_modules' const key = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43' const cachePaths = [path.resolve(paths)] const signedUploadURL = 'https://blob-storage.local?signed=true' const createTarMock = jest.spyOn(tar, 'createTar') - const archiveFileSize = 1024 + const options: UploadOptions = { + archiveSizeBytes: archiveFileSize, // These should always match + useAzureSdk: true, + uploadChunkSize: 64 * 1024 * 1024, + uploadConcurrency: 8 + } + jest .spyOn(cacheUtils, 'getArchiveFileSizeInBytes') .mockReturnValueOnce(archiveFileSize) @@ -282,15 +289,7 @@ test('save with valid inputs uploads a cache', async () => { Promise.resolve({ok: true, signedUploadUrl: signedUploadURL}) ) - const uploadCacheMock = jest - .spyOn(uploadCacheModule, 'uploadCacheFile') - .mockReturnValueOnce( - Promise.resolve({ - _response: { - status: 200 - } - } as BlobUploadCommonResponse) - ) + const saveCacheMock = jest.spyOn(cacheHttpClient, 'saveCache') const compression = CompressionMethod.Zstd const getCompressionMock = jest @@ -306,7 +305,12 @@ test('save with valid inputs uploads a cache', async () => { const archiveFolder = '/foo/bar' const archiveFile = path.join(archiveFolder, CacheFilename.Zstd) - expect(uploadCacheMock).toHaveBeenCalledWith(signedUploadURL, archiveFile) + expect(saveCacheMock).toHaveBeenCalledWith( + -1, + archiveFile, + signedUploadURL, + options + ) expect(createTarMock).toHaveBeenCalledWith( archiveFolder, cachePaths, diff --git a/packages/cache/__tests__/uploadUtils.test.ts b/packages/cache/__tests__/uploadUtils.test.ts new file mode 100644 index 00000000..2f0b8b55 --- /dev/null +++ b/packages/cache/__tests__/uploadUtils.test.ts @@ -0,0 +1,58 @@ +import * as uploadUtils from '../src/internal/uploadUtils' +import {TransferProgressEvent} from '@azure/ms-rest-js' + +test('upload progress tracked correctly', () => { + const progress = new uploadUtils.UploadProgress(1000) + + expect(progress.contentLength).toBe(1000) + expect(progress.sentBytes).toBe(0) + expect(progress.displayedComplete).toBe(false) + expect(progress.timeoutHandle).toBeUndefined() + expect(progress.getTransferredBytes()).toBe(0) + expect(progress.isDone()).toBe(false) + + progress.onProgress()({loadedBytes: 0} as TransferProgressEvent) + + expect(progress.contentLength).toBe(1000) + expect(progress.sentBytes).toBe(0) + expect(progress.displayedComplete).toBe(false) + expect(progress.timeoutHandle).toBeUndefined() + expect(progress.getTransferredBytes()).toBe(0) + expect(progress.isDone()).toBe(false) + + progress.onProgress()({loadedBytes: 250} as TransferProgressEvent) + + expect(progress.contentLength).toBe(1000) + expect(progress.sentBytes).toBe(250) + expect(progress.displayedComplete).toBe(false) + expect(progress.timeoutHandle).toBeUndefined() + expect(progress.getTransferredBytes()).toBe(250) + expect(progress.isDone()).toBe(false) + + progress.onProgress()({loadedBytes: 500} as TransferProgressEvent) + + expect(progress.contentLength).toBe(1000) + expect(progress.sentBytes).toBe(500) + expect(progress.displayedComplete).toBe(false) + expect(progress.timeoutHandle).toBeUndefined() + expect(progress.getTransferredBytes()).toBe(500) + expect(progress.isDone()).toBe(false) + + progress.onProgress()({loadedBytes: 750} as TransferProgressEvent) + + expect(progress.contentLength).toBe(1000) + expect(progress.sentBytes).toBe(750) + expect(progress.displayedComplete).toBe(false) + expect(progress.timeoutHandle).toBeUndefined() + expect(progress.getTransferredBytes()).toBe(750) + expect(progress.isDone()).toBe(false) + + progress.onProgress()({loadedBytes: 1000} as TransferProgressEvent) + + expect(progress.contentLength).toBe(1000) + expect(progress.sentBytes).toBe(1000) + expect(progress.displayedComplete).toBe(false) + expect(progress.timeoutHandle).toBeUndefined() + expect(progress.getTransferredBytes()).toBe(1000) + expect(progress.isDone()).toBe(true) +}) diff --git a/packages/cache/src/cache.ts b/packages/cache/src/cache.ts index 0a73059a..9b02489f 100644 --- a/packages/cache/src/cache.ts +++ b/packages/cache/src/cache.ts @@ -13,8 +13,6 @@ import { GetCacheEntryDownloadURLRequest } from './generated/results/api/v1/cache' import {CacheFileSizeLimit} from './internal/constants' -import {uploadCacheFile} from './internal/blob/upload-cache' -import {downloadCacheFile} from './internal/blob/download-cache' export class ValidationError extends Error { constructor(message: string) { super(message) @@ -66,8 +64,8 @@ export function isFeatureAvailable(): boolean { * Restores cache from keys * * @param paths a list of file paths to restore from the cache - * @param primaryKey an explicit key for restoring the cache - * @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for key + * @param primaryKey an explicit key for restoring the cache. Lookup is done with prefix matching. + * @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for primaryKey * @param downloadOptions cache download options * @param enableCrossOsArchive an optional boolean enabled to restore on windows any cache created on any platform * @returns string returns the key for the cache hit, otherwise returns undefined @@ -108,12 +106,12 @@ export async function restoreCache( /** * Restores cache using the legacy Cache Service * - * @param paths - * @param primaryKey - * @param restoreKeys - * @param options - * @param enableCrossOsArchive - * @returns + * @param paths a list of file paths to restore from the cache + * @param primaryKey an explicit key for restoring the cache. Lookup is done with prefix matching. + * @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for primaryKey + * @param options cache download options + * @param enableCrossOsArchive an optional boolean enabled to restore on Windows any cache created on any platform + * @returns string returns the key for the cache hit, otherwise returns undefined */ async function restoreCacheV1( paths: string[], @@ -204,11 +202,11 @@ async function restoreCacheV1( } /** - * Restores cache using the new Cache Service + * Restores cache using Cache Service v2 * * @param paths a list of file paths to restore from the cache - * @param primaryKey an explicit key for restoring the cache - * @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for key + * @param primaryKey an explicit key for restoring the cache. Lookup is done with prefix matching + * @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for primaryKey * @param downloadOptions cache download options * @param enableCrossOsArchive an optional boolean enabled to restore on windows any cache created on any platform * @returns string returns the key for the cache hit, otherwise returns undefined @@ -220,6 +218,11 @@ async function restoreCacheV2( options?: DownloadOptions, enableCrossOsArchive = false ): Promise { + // Override UploadOptions to force the use of Azure + options = { + ...options, + useAzureSdk: true + } restoreKeys = restoreKeys || [] const keys = [primaryKey, ...restoreKeys] @@ -271,11 +274,11 @@ async function restoreCacheV2( core.debug(`Archive path: ${archivePath}`) core.debug(`Starting download of archive to: ${archivePath}`) - const downloadResponse = await downloadCacheFile( + await cacheHttpClient.downloadCache( response.signedDownloadUrl, - archivePath + archivePath, + options ) - core.debug(`Download response status: ${downloadResponse._response.status}`) const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath) core.info( @@ -422,7 +425,7 @@ async function saveCacheV1( } core.debug(`Saving Cache (ID: ${cacheId})`) - await cacheHttpClient.saveCache(cacheId, archivePath, options) + await cacheHttpClient.saveCache(cacheId, archivePath, '', options) } catch (error) { const typedError = error as Error if (typedError.name === ValidationError.name) { @@ -445,12 +448,12 @@ async function saveCacheV1( } /** - * Save cache using the new Cache Service + * Save cache using Cache Service v2 * - * @param paths - * @param key - * @param options - * @param enableCrossOsArchive + * @param paths a list of file paths to restore from the cache + * @param key an explicit key for restoring the cache + * @param options cache upload options + * @param enableCrossOsArchive an optional boolean enabled to save cache on windows which could be restored on any platform * @returns */ async function saveCacheV2( @@ -459,6 +462,15 @@ async function saveCacheV2( options?: UploadOptions, enableCrossOsArchive = false ): Promise { + // Override UploadOptions to force the use of Azure + // ...options goes first because we want to override the default values + // set in UploadOptions with these specific figures + options = { + ...options, + uploadChunkSize: 64 * 1024 * 1024, // 64 MiB + uploadConcurrency: 8, // 8 workers for parallel upload + useAzureSdk: true + } const compressionMethod = await utils.getCompressionMethod() const twirpClient = cacheTwirpClient.internalCacheTwirpClient() let cacheId = -1 @@ -499,6 +511,9 @@ async function saveCacheV2( ) } + // Set the archive size in the options, will be used to display the upload progress + options.archiveSizeBytes = archiveFileSize + core.debug('Reserving Cache') const version = utils.getCacheVersion( paths, @@ -518,11 +533,12 @@ async function saveCacheV2( } core.debug(`Attempting to upload cache located at: ${archivePath}`) - const uploadResponse = await uploadCacheFile( + await cacheHttpClient.saveCache( + cacheId, + archivePath, response.signedUploadUrl, - archivePath + options ) - core.debug(`Download response status: ${uploadResponse._response.status}`) const finalizeRequest: FinalizeCacheEntryUploadRequest = { key, diff --git a/packages/cache/src/internal/blob/download-cache.ts b/packages/cache/src/internal/blob/download-cache.ts deleted file mode 100644 index e974cb2f..00000000 --- a/packages/cache/src/internal/blob/download-cache.ts +++ /dev/null @@ -1,31 +0,0 @@ -import * as core from '@actions/core' - -import { - BlobClient, - BlockBlobClient, - BlobDownloadOptions, - BlobDownloadResponseParsed -} from '@azure/storage-blob' - -export async function downloadCacheFile( - signedUploadURL: string, - archivePath: string -): Promise { - const downloadOptions: BlobDownloadOptions = { - maxRetryRequests: 5 - } - - const blobClient: BlobClient = new BlobClient(signedUploadURL) - const blockBlobClient: BlockBlobClient = blobClient.getBlockBlobClient() - - core.debug( - `BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}` - ) - - return blockBlobClient.downloadToFile( - archivePath, - 0, - undefined, - downloadOptions - ) -} diff --git a/packages/cache/src/internal/blob/upload-cache.ts b/packages/cache/src/internal/blob/upload-cache.ts deleted file mode 100644 index b9970c46..00000000 --- a/packages/cache/src/internal/blob/upload-cache.ts +++ /dev/null @@ -1,37 +0,0 @@ -import * as core from '@actions/core' -import { - BlobClient, - BlobUploadCommonResponse, - BlockBlobClient, - BlockBlobParallelUploadOptions -} from '@azure/storage-blob' -import {InvalidResponseError} from '../shared/errors' - -export async function uploadCacheFile( - signedUploadURL: string, - archivePath: string -): Promise { - // Specify data transfer options - const uploadOptions: BlockBlobParallelUploadOptions = { - blockSize: 4 * 1024 * 1024, // 4 MiB max block size - concurrency: 4, // maximum number of parallel transfer workers - maxSingleShotSize: 8 * 1024 * 1024 // 8 MiB initial transfer size - } - - const blobClient: BlobClient = new BlobClient(signedUploadURL) - const blockBlobClient: BlockBlobClient = blobClient.getBlockBlobClient() - - core.debug( - `BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}` - ) - - const resp = await blockBlobClient.uploadFile(archivePath, uploadOptions) - - if (resp._response.status >= 400) { - throw new InvalidResponseError( - `Upload failed with status code ${resp._response.status}` - ) - } - - return resp -} diff --git a/packages/cache/src/internal/cacheHttpClient.ts b/packages/cache/src/internal/cacheHttpClient.ts index 051348ec..2470555b 100644 --- a/packages/cache/src/internal/cacheHttpClient.ts +++ b/packages/cache/src/internal/cacheHttpClient.ts @@ -8,6 +8,7 @@ import { import * as fs from 'fs' import {URL} from 'url' import * as utils from './cacheUtils' +import {uploadCacheArchiveSDK} from './uploadUtils' import { ArtifactCacheEntry, InternalCacheOptions, @@ -34,6 +35,7 @@ import { retryTypedResponse } from './requestUtils' import {getCacheServiceURL} from './config' +import {getUserAgentString} from './shared/user-agent' function getCacheApiUrl(resource: string): string { const baseUrl: string = getCacheServiceURL() @@ -65,7 +67,7 @@ function createHttpClient(): HttpClient { const bearerCredentialHandler = new BearerCredentialHandler(token) return new HttpClient( - 'actions/cache', + getUserAgentString(), [bearerCredentialHandler], getRequestOptions() ) @@ -325,26 +327,45 @@ async function commitCache( export async function saveCache( cacheId: number, archivePath: string, + signedUploadURL?: string, options?: UploadOptions ): Promise { - const httpClient = createHttpClient() + const uploadOptions = getUploadOptions(options) - core.debug('Upload cache') - await uploadFile(httpClient, cacheId, archivePath, options) + if (uploadOptions.useAzureSdk) { + // Use Azure storage SDK to upload caches directly to Azure + if (!signedUploadURL) { + throw new Error( + 'Azure Storage SDK can only be used when a signed URL is provided.' + ) + } + await uploadCacheArchiveSDK(signedUploadURL, archivePath, options) + } else { + const httpClient = createHttpClient() - // Commit Cache - core.debug('Commiting cache') - const cacheSize = utils.getArchiveFileSizeInBytes(archivePath) - core.info( - `Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)` - ) + core.debug('Upload cache') + await uploadFile(httpClient, cacheId, archivePath, options) - const commitCacheResponse = await commitCache(httpClient, cacheId, cacheSize) - if (!isSuccessStatusCode(commitCacheResponse.statusCode)) { - throw new Error( - `Cache service responded with ${commitCacheResponse.statusCode} during commit cache.` + // Commit Cache + core.debug('Commiting cache') + const cacheSize = utils.getArchiveFileSizeInBytes(archivePath) + core.info( + `Cache Size: ~${Math.round( + cacheSize / (1024 * 1024) + )} MB (${cacheSize} B)` ) - } - core.info('Cache saved successfully') + const commitCacheResponse = await commitCache( + httpClient, + cacheId, + cacheSize + ) + if (!isSuccessStatusCode(commitCacheResponse.statusCode)) { + throw new Error( + `Cache service responded with ${commitCacheResponse.statusCode} during commit cache.` + ) + } + + core.info('Cache saved successfully') + } } diff --git a/packages/cache/src/internal/uploadUtils.ts b/packages/cache/src/internal/uploadUtils.ts new file mode 100644 index 00000000..1b4f7af0 --- /dev/null +++ b/packages/cache/src/internal/uploadUtils.ts @@ -0,0 +1,177 @@ +import * as core from '@actions/core' +import { + BlobClient, + BlobUploadCommonResponse, + BlockBlobClient, + BlockBlobParallelUploadOptions +} from '@azure/storage-blob' +import {TransferProgressEvent} from '@azure/ms-rest-js' +import {InvalidResponseError} from './shared/errors' +import {UploadOptions} from '../options' + +/** + * Class for tracking the upload state and displaying stats. + */ +export class UploadProgress { + contentLength: number + sentBytes: number + startTime: number + displayedComplete: boolean + timeoutHandle?: ReturnType + + constructor(contentLength: number) { + this.contentLength = contentLength + this.sentBytes = 0 + this.displayedComplete = false + this.startTime = Date.now() + } + + /** + * Sets the number of bytes sent + * + * @param sentBytes the number of bytes sent + */ + setSentBytes(sentBytes: number): void { + this.sentBytes = sentBytes + } + + /** + * Returns the total number of bytes transferred. + */ + getTransferredBytes(): number { + return this.sentBytes + } + + /** + * Returns true if the upload is complete. + */ + isDone(): boolean { + return this.getTransferredBytes() === this.contentLength + } + + /** + * Prints the current upload stats. Once the upload completes, this will print one + * last line and then stop. + */ + display(): void { + if (this.displayedComplete) { + return + } + + const transferredBytes = this.sentBytes + const percentage = (100 * (transferredBytes / this.contentLength)).toFixed( + 1 + ) + const elapsedTime = Date.now() - this.startTime + const uploadSpeed = ( + transferredBytes / + (1024 * 1024) / + (elapsedTime / 1000) + ).toFixed(1) + + core.info( + `Sent ${transferredBytes} of ${this.contentLength} (${percentage}%), ${uploadSpeed} MBs/sec` + ) + + if (this.isDone()) { + this.displayedComplete = true + } + } + + /** + * Returns a function used to handle TransferProgressEvents. + */ + onProgress(): (progress: TransferProgressEvent) => void { + return (progress: TransferProgressEvent) => { + this.setSentBytes(progress.loadedBytes) + } + } + + /** + * Starts the timer that displays the stats. + * + * @param delayInMs the delay between each write + */ + startDisplayTimer(delayInMs = 1000): void { + const displayCallback = (): void => { + this.display() + + if (!this.isDone()) { + this.timeoutHandle = setTimeout(displayCallback, delayInMs) + } + } + + this.timeoutHandle = setTimeout(displayCallback, delayInMs) + } + + /** + * Stops the timer that displays the stats. As this typically indicates the upload + * is complete, this will display one last line, unless the last line has already + * been written. + */ + stopDisplayTimer(): void { + if (this.timeoutHandle) { + clearTimeout(this.timeoutHandle) + this.timeoutHandle = undefined + } + + this.display() + } +} + +/** + * Uploads a cache archive directly to Azure Blob Storage using the Azure SDK. + * This function will display progress information to the console. Concurrency of the + * upload is determined by the calling functions. + * + * @param signedUploadURL + * @param archivePath + * @param options + * @returns + */ +export async function uploadCacheArchiveSDK( + signedUploadURL: string, + archivePath: string, + options?: UploadOptions +): Promise { + const blobClient: BlobClient = new BlobClient(signedUploadURL) + const blockBlobClient: BlockBlobClient = blobClient.getBlockBlobClient() + const uploadProgress = new UploadProgress(options?.archiveSizeBytes ?? 0) + + // Specify data transfer options + const uploadOptions: BlockBlobParallelUploadOptions = { + blockSize: options?.uploadChunkSize, + concurrency: options?.uploadConcurrency, // maximum number of parallel transfer workers + maxSingleShotSize: 128 * 1024 * 1024, // 128 MiB initial transfer size + onProgress: uploadProgress.onProgress() + } + + try { + uploadProgress.startDisplayTimer() + + core.debug( + `BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}` + ) + + const response = await blockBlobClient.uploadFile( + archivePath, + uploadOptions + ) + + // TODO: better management of non-retryable errors + if (response._response.status >= 400) { + throw new InvalidResponseError( + `uploadCacheArchiveSDK: upload failed with status code ${response._response.status}` + ) + } + + return response + } catch (error) { + core.warning( + `uploadCacheArchiveSDK: internal error uploading cache archive: ${error.message}` + ) + throw error + } finally { + uploadProgress.stopDisplayTimer() + } +} diff --git a/packages/cache/src/options.ts b/packages/cache/src/options.ts index d768ff54..3e4063f2 100644 --- a/packages/cache/src/options.ts +++ b/packages/cache/src/options.ts @@ -4,6 +4,14 @@ import * as core from '@actions/core' * Options to control cache upload */ export interface UploadOptions { + /** + * Indicates whether to use the Azure Blob SDK to download caches + * that are stored on Azure Blob Storage to improve reliability and + * performance + * + * @default false + */ + useAzureSdk?: boolean /** * Number of parallel cache upload * @@ -16,6 +24,10 @@ export interface UploadOptions { * @default 32MB */ uploadChunkSize?: number + /** + * Archive size in bytes + */ + archiveSizeBytes?: number } /** @@ -76,12 +88,18 @@ export interface DownloadOptions { * @param copy the original upload options */ export function getUploadOptions(copy?: UploadOptions): UploadOptions { + // Defaults if not overriden const result: UploadOptions = { + useAzureSdk: false, uploadConcurrency: 4, uploadChunkSize: 32 * 1024 * 1024 } if (copy) { + if (typeof copy.useAzureSdk === 'boolean') { + result.useAzureSdk = copy.useAzureSdk + } + if (typeof copy.uploadConcurrency === 'number') { result.uploadConcurrency = copy.uploadConcurrency } @@ -91,6 +109,26 @@ export function getUploadOptions(copy?: UploadOptions): UploadOptions { } } + /** + * Add env var overrides + */ + // Cap the uploadConcurrency at 32 + result.uploadConcurrency = !isNaN( + Number(process.env['CACHE_UPLOAD_CONCURRENCY']) + ) + ? Math.min(32, Number(process.env['CACHE_UPLOAD_CONCURRENCY'])) + : result.uploadConcurrency + // Cap the uploadChunkSize at 128MiB + result.uploadChunkSize = !isNaN( + Number(process.env['CACHE_UPLOAD_CHUNK_SIZE']) + ) + ? Math.min( + 128 * 1024 * 1024, + Number(process.env['CACHE_UPLOAD_CHUNK_SIZE']) * 1024 * 1024 + ) + : result.uploadChunkSize + + core.debug(`Use Azure SDK: ${result.useAzureSdk}`) core.debug(`Upload concurrency: ${result.uploadConcurrency}`) core.debug(`Upload chunk size: ${result.uploadChunkSize}`)