From 39d19810a88675c2360d4949b352d94cc453827b Mon Sep 17 00:00:00 2001 From: Bassem Dghaidi <568794+Link-@users.noreply.github.com> Date: Fri, 22 Nov 2024 09:01:59 -0800 Subject: [PATCH 1/9] Add restore tests --- .../cache/__tests__/restoreCacheV2.test.ts | 327 ++++++++++++++++++ packages/cache/src/cache.ts | 36 +- 2 files changed, 352 insertions(+), 11 deletions(-) create mode 100644 packages/cache/__tests__/restoreCacheV2.test.ts diff --git a/packages/cache/__tests__/restoreCacheV2.test.ts b/packages/cache/__tests__/restoreCacheV2.test.ts new file mode 100644 index 00000000..87b2d1d0 --- /dev/null +++ b/packages/cache/__tests__/restoreCacheV2.test.ts @@ -0,0 +1,327 @@ +import * as core from '@actions/core' +import * as path from 'path' +import * as tar from '../src/internal/tar' +import * as config from '../src/internal/config' +import * as cacheUtils from '../src/internal/cacheUtils' +import * as cacheHttpClient from '../src/internal/cacheHttpClient' +import { restoreCache } from '../src/cache' +import { CacheFilename, CompressionMethod } from '../src/internal/constants' +import { ArtifactCacheEntry } from '../src/internal/contracts' +import { CacheServiceClientJSON } from '../src/generated/results/api/v1/cache.twirp' + +jest.mock('../src/internal/cacheHttpClient') +jest.mock('../src/internal/cacheUtils') +jest.mock('../src/internal/config') +jest.mock('../src/internal/tar') + +beforeAll(() => { + jest.spyOn(console, 'log').mockImplementation(() => { }) + jest.spyOn(core, 'debug').mockImplementation(() => { }) + jest.spyOn(core, 'info').mockImplementation(() => { }) + jest.spyOn(core, 'warning').mockImplementation(() => { }) + jest.spyOn(core, 'error').mockImplementation(() => { }) + + jest.spyOn(cacheUtils, 'getCacheFileName').mockImplementation(cm => { + const actualUtils = jest.requireActual('../src/internal/cacheUtils') + return actualUtils.getCacheFileName(cm) + }) + + // Ensure that we're using v2 for these tests + jest.spyOn(config, 'getCacheServiceVersion').mockReturnValue('v2') +}) + +test('restore with no path should fail', async () => { + const paths: string[] = [] + const key = 'node-test' + await expect(restoreCache(paths, key)).rejects.toThrowError( + `Path Validation Error: At least one directory or file path is required` + ) +}) + +test('restore with too many keys should fail', async () => { + const paths = ['node_modules'] + const key = 'node-test' + const restoreKeys = [...Array(20).keys()].map(x => x.toString()) + await expect(restoreCache(paths, key, restoreKeys)).rejects.toThrowError( + `Key Validation Error: Keys are limited to a maximum of 10.` + ) +}) + +test('restore with large key should fail', async () => { + const paths = ['node_modules'] + const key = 'foo'.repeat(512) // Over the 512 character limit + await expect(restoreCache(paths, key)).rejects.toThrowError( + `Key Validation Error: ${key} cannot be larger than 512 characters.` + ) +}) + +test('restore with invalid key should fail', async () => { + const paths = ['node_modules'] + const key = 'comma,comma' + await expect(restoreCache(paths, key)).rejects.toThrowError( + `Key Validation Error: ${key} cannot contain commas.` + ) +}) + +test('restore with no cache found', async () => { + const paths = ['node_modules'] + const key = 'node-test' + + jest + .spyOn(CacheServiceClientJSON.prototype, 'GetCacheEntryDownloadURL') + .mockReturnValue(Promise.resolve({ ok: false, signedDownloadUrl: '' })) + + const cacheKey = await restoreCache(paths, key) + + expect(cacheKey).toBe(undefined) +}) + +test('restore with server error should fail', async () => { + const paths = ['node_modules'] + const key = 'node-test' + const logWarningMock = jest.spyOn(core, 'warning') + + jest + .spyOn(CacheServiceClientJSON.prototype, 'GetCacheEntryDownloadURL') + .mockImplementation(() => { + throw new Error('HTTP Error Occurred') + }) + + const cacheKey = await restoreCache(paths, key) + expect(cacheKey).toBe(undefined) + expect(logWarningMock).toHaveBeenCalledTimes(1) + expect(logWarningMock).toHaveBeenCalledWith( + 'Failed to restore: HTTP Error Occurred' + ) +}) + +// test('restore with restore keys and no cache found', async () => { +// const paths = ['node_modules'] +// const key = 'node-test' +// const restoreKey = 'node-' + +// jest +// .spyOn(CacheServiceClientJSON.prototype, 'GetCacheEntryDownloadURL') +// .mockImplementation(() => { +// return Promise.resolve(null) +// }) +// jest.spyOn(cacheHttpClient, 'getCacheEntry').mockImplementation(async () => { +// return Promise.resolve(null) +// }) + +// const cacheKey = await restoreCache(paths, key, [restoreKey]) + +// expect(cacheKey).toBe(undefined) +// }) + +// test('restore with gzip compressed cache found', async () => { +// const paths = ['node_modules'] +// const key = 'node-test' + +// const cacheEntry: ArtifactCacheEntry = { +// cacheKey: key, +// scope: 'refs/heads/main', +// archiveLocation: 'www.actionscache.test/download' +// } +// const getCacheMock = jest.spyOn(cacheHttpClient, 'getCacheEntry') +// getCacheMock.mockImplementation(async () => { +// return Promise.resolve(cacheEntry) +// }) + +// const tempPath = '/foo/bar' + +// const createTempDirectoryMock = jest.spyOn(cacheUtils, 'createTempDirectory') +// createTempDirectoryMock.mockImplementation(async () => { +// return Promise.resolve(tempPath) +// }) + +// const archivePath = path.join(tempPath, CacheFilename.Gzip) +// const downloadCacheMock = jest.spyOn(cacheHttpClient, 'downloadCache') + +// const fileSize = 142 +// const getArchiveFileSizeInBytesMock = jest +// .spyOn(cacheUtils, 'getArchiveFileSizeInBytes') +// .mockReturnValue(fileSize) + +// const extractTarMock = jest.spyOn(tar, 'extractTar') +// const unlinkFileMock = jest.spyOn(cacheUtils, 'unlinkFile') + +// const compression = CompressionMethod.Gzip +// const getCompressionMock = jest +// .spyOn(cacheUtils, 'getCompressionMethod') +// .mockReturnValue(Promise.resolve(compression)) + +// const cacheKey = await restoreCache(paths, key) + +// expect(cacheKey).toBe(key) +// expect(getCacheMock).toHaveBeenCalledWith([key], paths, { +// compressionMethod: compression, +// enableCrossOsArchive: false +// }) +// expect(createTempDirectoryMock).toHaveBeenCalledTimes(1) +// expect(downloadCacheMock).toHaveBeenCalledWith( +// cacheEntry.archiveLocation, +// archivePath, +// undefined +// ) +// expect(getArchiveFileSizeInBytesMock).toHaveBeenCalledWith(archivePath) + +// expect(extractTarMock).toHaveBeenCalledTimes(1) +// expect(extractTarMock).toHaveBeenCalledWith(archivePath, compression) + +// expect(unlinkFileMock).toHaveBeenCalledTimes(1) +// expect(unlinkFileMock).toHaveBeenCalledWith(archivePath) + +// expect(getCompressionMock).toHaveBeenCalledTimes(1) +// }) + +// test('restore with zstd compressed cache found', async () => { +// const paths = ['node_modules'] +// const key = 'node-test' + +// const infoMock = jest.spyOn(core, 'info') + +// const cacheEntry: ArtifactCacheEntry = { +// cacheKey: key, +// scope: 'refs/heads/main', +// archiveLocation: 'www.actionscache.test/download' +// } +// const getCacheMock = jest.spyOn(cacheHttpClient, 'getCacheEntry') +// getCacheMock.mockImplementation(async () => { +// return Promise.resolve(cacheEntry) +// }) +// const tempPath = '/foo/bar' + +// const createTempDirectoryMock = jest.spyOn(cacheUtils, 'createTempDirectory') +// createTempDirectoryMock.mockImplementation(async () => { +// return Promise.resolve(tempPath) +// }) + +// const archivePath = path.join(tempPath, CacheFilename.Zstd) +// const downloadCacheMock = jest.spyOn(cacheHttpClient, 'downloadCache') + +// const fileSize = 62915000 +// const getArchiveFileSizeInBytesMock = jest +// .spyOn(cacheUtils, 'getArchiveFileSizeInBytes') +// .mockReturnValue(fileSize) + +// const extractTarMock = jest.spyOn(tar, 'extractTar') +// const compression = CompressionMethod.Zstd +// const getCompressionMock = jest +// .spyOn(cacheUtils, 'getCompressionMethod') +// .mockReturnValue(Promise.resolve(compression)) + +// const cacheKey = await restoreCache(paths, key) + +// expect(cacheKey).toBe(key) +// expect(getCacheMock).toHaveBeenCalledWith([key], paths, { +// compressionMethod: compression, +// enableCrossOsArchive: false +// }) +// expect(createTempDirectoryMock).toHaveBeenCalledTimes(1) +// expect(downloadCacheMock).toHaveBeenCalledWith( +// cacheEntry.archiveLocation, +// archivePath, +// undefined +// ) +// expect(getArchiveFileSizeInBytesMock).toHaveBeenCalledWith(archivePath) +// expect(infoMock).toHaveBeenCalledWith(`Cache Size: ~60 MB (62915000 B)`) + +// expect(extractTarMock).toHaveBeenCalledTimes(1) +// expect(extractTarMock).toHaveBeenCalledWith(archivePath, compression) +// expect(getCompressionMock).toHaveBeenCalledTimes(1) +// }) + +// test('restore with cache found for restore key', async () => { +// const paths = ['node_modules'] +// const key = 'node-test' +// const restoreKey = 'node-' + +// const infoMock = jest.spyOn(core, 'info') + +// const cacheEntry: ArtifactCacheEntry = { +// cacheKey: restoreKey, +// scope: 'refs/heads/main', +// archiveLocation: 'www.actionscache.test/download' +// } +// const getCacheMock = jest.spyOn(cacheHttpClient, 'getCacheEntry') +// getCacheMock.mockImplementation(async () => { +// return Promise.resolve(cacheEntry) +// }) +// const tempPath = '/foo/bar' + +// const createTempDirectoryMock = jest.spyOn(cacheUtils, 'createTempDirectory') +// createTempDirectoryMock.mockImplementation(async () => { +// return Promise.resolve(tempPath) +// }) + +// const archivePath = path.join(tempPath, CacheFilename.Zstd) +// const downloadCacheMock = jest.spyOn(cacheHttpClient, 'downloadCache') + +// const fileSize = 142 +// const getArchiveFileSizeInBytesMock = jest +// .spyOn(cacheUtils, 'getArchiveFileSizeInBytes') +// .mockReturnValue(fileSize) + +// const extractTarMock = jest.spyOn(tar, 'extractTar') +// const compression = CompressionMethod.Zstd +// const getCompressionMock = jest +// .spyOn(cacheUtils, 'getCompressionMethod') +// .mockReturnValue(Promise.resolve(compression)) + +// const cacheKey = await restoreCache(paths, key, [restoreKey]) + +// expect(cacheKey).toBe(restoreKey) +// expect(getCacheMock).toHaveBeenCalledWith([key, restoreKey], paths, { +// compressionMethod: compression, +// enableCrossOsArchive: false +// }) +// expect(createTempDirectoryMock).toHaveBeenCalledTimes(1) +// expect(downloadCacheMock).toHaveBeenCalledWith( +// cacheEntry.archiveLocation, +// archivePath, +// undefined +// ) +// expect(getArchiveFileSizeInBytesMock).toHaveBeenCalledWith(archivePath) +// expect(infoMock).toHaveBeenCalledWith(`Cache Size: ~0 MB (142 B)`) + +// expect(extractTarMock).toHaveBeenCalledTimes(1) +// expect(extractTarMock).toHaveBeenCalledWith(archivePath, compression) +// expect(getCompressionMock).toHaveBeenCalledTimes(1) +// }) + +// test('restore with dry run', async () => { +// const paths = ['node_modules'] +// const key = 'node-test' +// const options = { lookupOnly: true } + +// const cacheEntry: ArtifactCacheEntry = { +// cacheKey: key, +// scope: 'refs/heads/main', +// archiveLocation: 'www.actionscache.test/download' +// } +// const getCacheMock = jest.spyOn(cacheHttpClient, 'getCacheEntry') +// getCacheMock.mockImplementation(async () => { +// return Promise.resolve(cacheEntry) +// }) + +// const createTempDirectoryMock = jest.spyOn(cacheUtils, 'createTempDirectory') +// const downloadCacheMock = jest.spyOn(cacheHttpClient, 'downloadCache') + +// const compression = CompressionMethod.Gzip +// const getCompressionMock = jest +// .spyOn(cacheUtils, 'getCompressionMethod') +// .mockReturnValue(Promise.resolve(compression)) + +// const cacheKey = await restoreCache(paths, key, undefined, options) + +// expect(cacheKey).toBe(key) +// expect(getCompressionMock).toHaveBeenCalledTimes(1) +// expect(getCacheMock).toHaveBeenCalledWith([key], paths, { +// compressionMethod: compression, +// enableCrossOsArchive: false +// }) +// // creating a tempDir and downloading the cache are skipped +// expect(createTempDirectoryMock).toHaveBeenCalledTimes(0) +// expect(downloadCacheMock).toHaveBeenCalledTimes(0) +// }) diff --git a/packages/cache/src/cache.ts b/packages/cache/src/cache.ts index 1450c8ac..f9863b14 100644 --- a/packages/cache/src/cache.ts +++ b/packages/cache/src/cache.ts @@ -3,18 +3,18 @@ import * as path from 'path' import * as utils from './internal/cacheUtils' import * as cacheHttpClient from './internal/cacheHttpClient' import * as cacheTwirpClient from './internal/shared/cacheTwirpClient' -import {getCacheServiceVersion, isGhes} from './internal/config' -import {DownloadOptions, UploadOptions} from './options' -import {createTar, extractTar, listTar} from './internal/tar' +import { getCacheServiceVersion, isGhes } from './internal/config' +import { DownloadOptions, UploadOptions } from './options' +import { createTar, extractTar, listTar } from './internal/tar' import { CreateCacheEntryRequest, FinalizeCacheEntryUploadRequest, FinalizeCacheEntryUploadResponse, GetCacheEntryDownloadURLRequest } from './generated/results/api/v1/cache' -import {CacheFileSizeLimit} from './internal/constants' -import {uploadCacheFile} from './internal/blob/upload-cache' -import {downloadCacheFile} from './internal/blob/download-cache' +import { CacheFileSizeLimit } from './internal/constants' +import { uploadCacheFile } from './internal/blob/upload-cache' +import { downloadCacheFile } from './internal/blob/download-cache' export class ValidationError extends Error { constructor(message: string) { super(message) @@ -287,7 +287,13 @@ async function restoreCacheV2( return request.key } catch (error) { - throw new Error(`Failed to restore: ${error.message}`) + const typedError = error as Error + if (typedError.name === ValidationError.name) { + throw error + } else { + // Supress all non-validation cache related errors because caching should be optional + core.warning(`Failed to restore: ${(error as Error).message}`) + } } finally { try { if (archivePath) { @@ -297,6 +303,8 @@ async function restoreCacheV2( core.debug(`Failed to delete archive: ${error}`) } } + + return undefined } /** @@ -397,9 +405,9 @@ async function saveCacheV1( } else if (reserveCacheResponse?.statusCode === 400) { throw new Error( reserveCacheResponse?.error?.message ?? - `Cache size of ~${Math.round( - archiveFileSize / (1024 * 1024) - )} MB (${archiveFileSize} B) is over the data cap limit, not saving cache.` + `Cache size of ~${Math.round( + archiveFileSize / (1024 * 1024) + )} MB (${archiveFileSize} B) is over the data cap limit, not saving cache.` ) } else { throw new ReserveCacheError( @@ -525,7 +533,13 @@ async function saveCacheV2( cacheId = parseInt(finalizeResponse.entryId) } catch (error) { const typedError = error as Error - core.warning(`Failed to save: ${typedError.message}`) + if (typedError.name === ValidationError.name) { + throw error + } else if (typedError.name === ReserveCacheError.name) { + core.info(`Failed to save: ${typedError.message}`) + } else { + core.warning(`Failed to save: ${typedError.message}`) + } } finally { // Try to delete the archive to save space try { From 4de30f744eb65b2f721d1a7993516d8c01c475d8 Mon Sep 17 00:00:00 2001 From: Bassem Dghaidi <568794+Link-@users.noreply.github.com> Date: Mon, 25 Nov 2024 03:53:03 -0800 Subject: [PATCH 2/9] Add more tests for restoreCacheV2 --- .../cache/__tests__/restoreCacheV2.test.ts | 372 ++++++++++-------- packages/cache/src/cache.ts | 18 +- .../cache/src/internal/blob/download-cache.ts | 5 +- 3 files changed, 216 insertions(+), 179 deletions(-) diff --git a/packages/cache/__tests__/restoreCacheV2.test.ts b/packages/cache/__tests__/restoreCacheV2.test.ts index 87b2d1d0..707432ca 100644 --- a/packages/cache/__tests__/restoreCacheV2.test.ts +++ b/packages/cache/__tests__/restoreCacheV2.test.ts @@ -3,11 +3,12 @@ import * as path from 'path' import * as tar from '../src/internal/tar' import * as config from '../src/internal/config' import * as cacheUtils from '../src/internal/cacheUtils' -import * as cacheHttpClient from '../src/internal/cacheHttpClient' -import { restoreCache } from '../src/cache' -import { CacheFilename, CompressionMethod } from '../src/internal/constants' -import { ArtifactCacheEntry } from '../src/internal/contracts' -import { CacheServiceClientJSON } from '../src/generated/results/api/v1/cache.twirp' +import * as downloadCacheModule from '../src/internal/blob/download-cache' +import {restoreCache} from '../src/cache' +import {CacheFilename, CompressionMethod} from '../src/internal/constants' +import {CacheServiceClientJSON} from '../src/generated/results/api/v1/cache.twirp' +import {BlobDownloadResponseParsed} from '@azure/storage-blob' +// import {executePromisesSequentially} from '@azure/ms-rest-js' jest.mock('../src/internal/cacheHttpClient') jest.mock('../src/internal/cacheUtils') @@ -15,222 +16,257 @@ jest.mock('../src/internal/config') jest.mock('../src/internal/tar') beforeAll(() => { - jest.spyOn(console, 'log').mockImplementation(() => { }) - jest.spyOn(core, 'debug').mockImplementation(() => { }) - jest.spyOn(core, 'info').mockImplementation(() => { }) - jest.spyOn(core, 'warning').mockImplementation(() => { }) - jest.spyOn(core, 'error').mockImplementation(() => { }) + jest.spyOn(console, 'log').mockImplementation(() => {}) + jest.spyOn(core, 'debug').mockImplementation(() => {}) + jest.spyOn(core, 'info').mockImplementation(() => {}) + jest.spyOn(core, 'warning').mockImplementation(() => {}) + jest.spyOn(core, 'error').mockImplementation(() => {}) - jest.spyOn(cacheUtils, 'getCacheFileName').mockImplementation(cm => { - const actualUtils = jest.requireActual('../src/internal/cacheUtils') - return actualUtils.getCacheFileName(cm) - }) + jest.spyOn(cacheUtils, 'getCacheFileName').mockImplementation(cm => { + const actualUtils = jest.requireActual('../src/internal/cacheUtils') + return actualUtils.getCacheFileName(cm) + }) - // Ensure that we're using v2 for these tests - jest.spyOn(config, 'getCacheServiceVersion').mockReturnValue('v2') + // Ensure that we're using v2 for these tests + jest.spyOn(config, 'getCacheServiceVersion').mockReturnValue('v2') }) test('restore with no path should fail', async () => { - const paths: string[] = [] - const key = 'node-test' - await expect(restoreCache(paths, key)).rejects.toThrowError( - `Path Validation Error: At least one directory or file path is required` - ) + const paths: string[] = [] + const key = 'node-test' + await expect(restoreCache(paths, key)).rejects.toThrowError( + `Path Validation Error: At least one directory or file path is required` + ) }) test('restore with too many keys should fail', async () => { - const paths = ['node_modules'] - const key = 'node-test' - const restoreKeys = [...Array(20).keys()].map(x => x.toString()) - await expect(restoreCache(paths, key, restoreKeys)).rejects.toThrowError( - `Key Validation Error: Keys are limited to a maximum of 10.` - ) + const paths = ['node_modules'] + const key = 'node-test' + const restoreKeys = [...Array(20).keys()].map(x => x.toString()) + await expect(restoreCache(paths, key, restoreKeys)).rejects.toThrowError( + `Key Validation Error: Keys are limited to a maximum of 10.` + ) }) test('restore with large key should fail', async () => { - const paths = ['node_modules'] - const key = 'foo'.repeat(512) // Over the 512 character limit - await expect(restoreCache(paths, key)).rejects.toThrowError( - `Key Validation Error: ${key} cannot be larger than 512 characters.` - ) + const paths = ['node_modules'] + const key = 'foo'.repeat(512) // Over the 512 character limit + await expect(restoreCache(paths, key)).rejects.toThrowError( + `Key Validation Error: ${key} cannot be larger than 512 characters.` + ) }) test('restore with invalid key should fail', async () => { - const paths = ['node_modules'] - const key = 'comma,comma' - await expect(restoreCache(paths, key)).rejects.toThrowError( - `Key Validation Error: ${key} cannot contain commas.` - ) + const paths = ['node_modules'] + const key = 'comma,comma' + await expect(restoreCache(paths, key)).rejects.toThrowError( + `Key Validation Error: ${key} cannot contain commas.` + ) }) test('restore with no cache found', async () => { - const paths = ['node_modules'] - const key = 'node-test' + const paths = ['node_modules'] + const key = 'node-test' - jest - .spyOn(CacheServiceClientJSON.prototype, 'GetCacheEntryDownloadURL') - .mockReturnValue(Promise.resolve({ ok: false, signedDownloadUrl: '' })) + jest + .spyOn(CacheServiceClientJSON.prototype, 'GetCacheEntryDownloadURL') + .mockReturnValue(Promise.resolve({ok: false, signedDownloadUrl: ''})) - const cacheKey = await restoreCache(paths, key) + const cacheKey = await restoreCache(paths, key) - expect(cacheKey).toBe(undefined) + expect(cacheKey).toBe(undefined) }) test('restore with server error should fail', async () => { - const paths = ['node_modules'] - const key = 'node-test' - const logWarningMock = jest.spyOn(core, 'warning') + const paths = ['node_modules'] + const key = 'node-test' + const logWarningMock = jest.spyOn(core, 'warning') - jest - .spyOn(CacheServiceClientJSON.prototype, 'GetCacheEntryDownloadURL') - .mockImplementation(() => { - throw new Error('HTTP Error Occurred') - }) + jest + .spyOn(CacheServiceClientJSON.prototype, 'GetCacheEntryDownloadURL') + .mockImplementation(() => { + throw new Error('HTTP Error Occurred') + }) - const cacheKey = await restoreCache(paths, key) - expect(cacheKey).toBe(undefined) - expect(logWarningMock).toHaveBeenCalledTimes(1) - expect(logWarningMock).toHaveBeenCalledWith( - 'Failed to restore: HTTP Error Occurred' - ) + const cacheKey = await restoreCache(paths, key) + expect(cacheKey).toBe(undefined) + expect(logWarningMock).toHaveBeenCalledTimes(1) + expect(logWarningMock).toHaveBeenCalledWith( + 'Failed to restore: HTTP Error Occurred' + ) }) -// test('restore with restore keys and no cache found', async () => { -// const paths = ['node_modules'] -// const key = 'node-test' -// const restoreKey = 'node-' +test('restore with restore keys and no cache found', async () => { + const paths = ['node_modules'] + const key = 'node-test' + const restoreKey = 'node-' + const logWarningMock = jest.spyOn(core, 'warning') -// jest -// .spyOn(CacheServiceClientJSON.prototype, 'GetCacheEntryDownloadURL') -// .mockImplementation(() => { -// return Promise.resolve(null) -// }) -// jest.spyOn(cacheHttpClient, 'getCacheEntry').mockImplementation(async () => { -// return Promise.resolve(null) -// }) + jest + .spyOn(CacheServiceClientJSON.prototype, 'GetCacheEntryDownloadURL') + .mockReturnValue(Promise.resolve({ok: false, signedDownloadUrl: ''})) -// const cacheKey = await restoreCache(paths, key, [restoreKey]) + const cacheKey = await restoreCache(paths, key, [restoreKey]) -// expect(cacheKey).toBe(undefined) -// }) + expect(cacheKey).toBe(undefined) + expect(logWarningMock).toHaveBeenCalledWith( + `Cache not found for keys: ${[key, restoreKey].join(', ')}` + ) +}) -// test('restore with gzip compressed cache found', async () => { -// const paths = ['node_modules'] -// const key = 'node-test' +test('restore with gzip compressed cache found', async () => { + const paths = ['node_modules'] + const key = 'node-test' + const logInfoMock = jest.spyOn(core, 'info') + const compressionMethod = CompressionMethod.Gzip + const signedDownloadUrl = 'https://blob-storage.local?signed=true' + const cacheVersion = + 'd90f107aaeb22920dba0c637a23c37b5bc497b4dfa3b07fe3f79bf88a273c11b' -// const cacheEntry: ArtifactCacheEntry = { -// cacheKey: key, -// scope: 'refs/heads/main', -// archiveLocation: 'www.actionscache.test/download' -// } -// const getCacheMock = jest.spyOn(cacheHttpClient, 'getCacheEntry') -// getCacheMock.mockImplementation(async () => { -// return Promise.resolve(cacheEntry) -// }) + const getCacheVersionMock = jest.spyOn(cacheUtils, 'getCacheVersion') + getCacheVersionMock.mockReturnValue(cacheVersion) -// const tempPath = '/foo/bar' + const compressionMethodMock = jest.spyOn(cacheUtils, 'getCompressionMethod') + compressionMethodMock.mockReturnValue(Promise.resolve(compressionMethod)) -// const createTempDirectoryMock = jest.spyOn(cacheUtils, 'createTempDirectory') -// createTempDirectoryMock.mockImplementation(async () => { -// return Promise.resolve(tempPath) -// }) + const getCacheDownloadURLMock = jest.spyOn( + CacheServiceClientJSON.prototype, + 'GetCacheEntryDownloadURL' + ) + getCacheDownloadURLMock.mockReturnValue( + Promise.resolve({ok: true, signedDownloadUrl}) + ) -// const archivePath = path.join(tempPath, CacheFilename.Gzip) -// const downloadCacheMock = jest.spyOn(cacheHttpClient, 'downloadCache') + const tempPath = '/foo/bar' -// const fileSize = 142 -// const getArchiveFileSizeInBytesMock = jest -// .spyOn(cacheUtils, 'getArchiveFileSizeInBytes') -// .mockReturnValue(fileSize) + const createTempDirectoryMock = jest.spyOn(cacheUtils, 'createTempDirectory') + createTempDirectoryMock.mockImplementation(async () => { + return Promise.resolve(tempPath) + }) -// const extractTarMock = jest.spyOn(tar, 'extractTar') -// const unlinkFileMock = jest.spyOn(cacheUtils, 'unlinkFile') + const archivePath = path.join(tempPath, CacheFilename.Gzip) + const downloadCacheFileMock = jest.spyOn( + downloadCacheModule, + 'downloadCacheFile' + ) + downloadCacheFileMock.mockReturnValue( + Promise.resolve({} as BlobDownloadResponseParsed) + ) -// const compression = CompressionMethod.Gzip -// const getCompressionMock = jest -// .spyOn(cacheUtils, 'getCompressionMethod') -// .mockReturnValue(Promise.resolve(compression)) + const fileSize = 142 + const getArchiveFileSizeInBytesMock = jest + .spyOn(cacheUtils, 'getArchiveFileSizeInBytes') + .mockReturnValue(fileSize) -// const cacheKey = await restoreCache(paths, key) + const extractTarMock = jest.spyOn(tar, 'extractTar') + const unlinkFileMock = jest.spyOn(cacheUtils, 'unlinkFile') -// expect(cacheKey).toBe(key) -// expect(getCacheMock).toHaveBeenCalledWith([key], paths, { -// compressionMethod: compression, -// enableCrossOsArchive: false -// }) -// expect(createTempDirectoryMock).toHaveBeenCalledTimes(1) -// expect(downloadCacheMock).toHaveBeenCalledWith( -// cacheEntry.archiveLocation, -// archivePath, -// undefined -// ) -// expect(getArchiveFileSizeInBytesMock).toHaveBeenCalledWith(archivePath) + const cacheKey = await restoreCache(paths, key) -// expect(extractTarMock).toHaveBeenCalledTimes(1) -// expect(extractTarMock).toHaveBeenCalledWith(archivePath, compression) + expect(cacheKey).toBe(key) + expect(getCacheVersionMock).toHaveBeenCalledWith( + paths, + compressionMethod, + false + ) + expect(getCacheDownloadURLMock).toHaveBeenCalledWith({ + key, + restoreKeys: [], + version: cacheVersion + }) + expect(createTempDirectoryMock).toHaveBeenCalledTimes(1) + expect(downloadCacheFileMock).toHaveBeenCalledWith( + signedDownloadUrl, + archivePath + ) + expect(getArchiveFileSizeInBytesMock).toHaveBeenCalledWith(archivePath) + expect(logInfoMock).toHaveBeenCalledWith(`Cache Size: ~0 MB (142 B)`) -// expect(unlinkFileMock).toHaveBeenCalledTimes(1) -// expect(unlinkFileMock).toHaveBeenCalledWith(archivePath) + expect(extractTarMock).toHaveBeenCalledTimes(1) + expect(extractTarMock).toHaveBeenCalledWith(archivePath, compressionMethod) -// expect(getCompressionMock).toHaveBeenCalledTimes(1) -// }) + expect(unlinkFileMock).toHaveBeenCalledTimes(1) + expect(unlinkFileMock).toHaveBeenCalledWith(archivePath) -// test('restore with zstd compressed cache found', async () => { -// const paths = ['node_modules'] -// const key = 'node-test' + expect(compressionMethodMock).toHaveBeenCalledTimes(1) +}) -// const infoMock = jest.spyOn(core, 'info') +test('restore with zstd compressed cache found', async () => { + const paths = ['node_modules'] + const key = 'node-test' + const logInfoMock = jest.spyOn(core, 'info') + const compressionMethod = CompressionMethod.Zstd + const signedDownloadUrl = 'https://blob-storage.local?signed=true' + const cacheVersion = + '8e2e96a184cb0cd6b48285b176c06a418f3d7fce14c29d9886fd1bb4f05c513d' -// const cacheEntry: ArtifactCacheEntry = { -// cacheKey: key, -// scope: 'refs/heads/main', -// archiveLocation: 'www.actionscache.test/download' -// } -// const getCacheMock = jest.spyOn(cacheHttpClient, 'getCacheEntry') -// getCacheMock.mockImplementation(async () => { -// return Promise.resolve(cacheEntry) -// }) -// const tempPath = '/foo/bar' + const getCacheVersionMock = jest.spyOn(cacheUtils, 'getCacheVersion') + getCacheVersionMock.mockReturnValue(cacheVersion) -// const createTempDirectoryMock = jest.spyOn(cacheUtils, 'createTempDirectory') -// createTempDirectoryMock.mockImplementation(async () => { -// return Promise.resolve(tempPath) -// }) + const compressionMethodMock = jest.spyOn(cacheUtils, 'getCompressionMethod') + compressionMethodMock.mockReturnValue(Promise.resolve(compressionMethod)) -// const archivePath = path.join(tempPath, CacheFilename.Zstd) -// const downloadCacheMock = jest.spyOn(cacheHttpClient, 'downloadCache') + const getCacheDownloadURLMock = jest.spyOn( + CacheServiceClientJSON.prototype, + 'GetCacheEntryDownloadURL' + ) + getCacheDownloadURLMock.mockReturnValue( + Promise.resolve({ok: true, signedDownloadUrl}) + ) -// const fileSize = 62915000 -// const getArchiveFileSizeInBytesMock = jest -// .spyOn(cacheUtils, 'getArchiveFileSizeInBytes') -// .mockReturnValue(fileSize) + const tempPath = '/foo/bar' -// const extractTarMock = jest.spyOn(tar, 'extractTar') -// const compression = CompressionMethod.Zstd -// const getCompressionMock = jest -// .spyOn(cacheUtils, 'getCompressionMethod') -// .mockReturnValue(Promise.resolve(compression)) + const createTempDirectoryMock = jest.spyOn(cacheUtils, 'createTempDirectory') + createTempDirectoryMock.mockImplementation(async () => { + return Promise.resolve(tempPath) + }) -// const cacheKey = await restoreCache(paths, key) + const archivePath = path.join(tempPath, CacheFilename.Zstd) + const downloadCacheFileMock = jest.spyOn( + downloadCacheModule, + 'downloadCacheFile' + ) + downloadCacheFileMock.mockReturnValue( + Promise.resolve({} as BlobDownloadResponseParsed) + ) -// expect(cacheKey).toBe(key) -// expect(getCacheMock).toHaveBeenCalledWith([key], paths, { -// compressionMethod: compression, -// enableCrossOsArchive: false -// }) -// expect(createTempDirectoryMock).toHaveBeenCalledTimes(1) -// expect(downloadCacheMock).toHaveBeenCalledWith( -// cacheEntry.archiveLocation, -// archivePath, -// undefined -// ) -// expect(getArchiveFileSizeInBytesMock).toHaveBeenCalledWith(archivePath) -// expect(infoMock).toHaveBeenCalledWith(`Cache Size: ~60 MB (62915000 B)`) + const fileSize = 62915000 + const getArchiveFileSizeInBytesMock = jest + .spyOn(cacheUtils, 'getArchiveFileSizeInBytes') + .mockReturnValue(fileSize) -// expect(extractTarMock).toHaveBeenCalledTimes(1) -// expect(extractTarMock).toHaveBeenCalledWith(archivePath, compression) -// expect(getCompressionMock).toHaveBeenCalledTimes(1) -// }) + const extractTarMock = jest.spyOn(tar, 'extractTar') + const unlinkFileMock = jest.spyOn(cacheUtils, 'unlinkFile') + + const cacheKey = await restoreCache(paths, key) + + expect(cacheKey).toBe(key) + expect(getCacheVersionMock).toHaveBeenCalledWith( + paths, + compressionMethod, + false + ) + expect(getCacheDownloadURLMock).toHaveBeenCalledWith({ + key, + restoreKeys: [], + version: cacheVersion + }) + expect(createTempDirectoryMock).toHaveBeenCalledTimes(1) + expect(downloadCacheFileMock).toHaveBeenCalledWith( + signedDownloadUrl, + archivePath + ) + expect(getArchiveFileSizeInBytesMock).toHaveBeenCalledWith(archivePath) + expect(logInfoMock).toHaveBeenCalledWith(`Cache Size: ~60 MB (62915000 B)`) + + expect(extractTarMock).toHaveBeenCalledTimes(1) + expect(extractTarMock).toHaveBeenCalledWith(archivePath, compressionMethod) + + expect(unlinkFileMock).toHaveBeenCalledTimes(1) + expect(unlinkFileMock).toHaveBeenCalledWith(archivePath) + + expect(compressionMethodMock).toHaveBeenCalledTimes(1) +}) // test('restore with cache found for restore key', async () => { // const paths = ['node_modules'] diff --git a/packages/cache/src/cache.ts b/packages/cache/src/cache.ts index f9863b14..07d6c7ce 100644 --- a/packages/cache/src/cache.ts +++ b/packages/cache/src/cache.ts @@ -3,18 +3,18 @@ import * as path from 'path' import * as utils from './internal/cacheUtils' import * as cacheHttpClient from './internal/cacheHttpClient' import * as cacheTwirpClient from './internal/shared/cacheTwirpClient' -import { getCacheServiceVersion, isGhes } from './internal/config' -import { DownloadOptions, UploadOptions } from './options' -import { createTar, extractTar, listTar } from './internal/tar' +import {getCacheServiceVersion, isGhes} from './internal/config' +import {DownloadOptions, UploadOptions} from './options' +import {createTar, extractTar, listTar} from './internal/tar' import { CreateCacheEntryRequest, FinalizeCacheEntryUploadRequest, FinalizeCacheEntryUploadResponse, GetCacheEntryDownloadURLRequest } from './generated/results/api/v1/cache' -import { CacheFileSizeLimit } from './internal/constants' -import { uploadCacheFile } from './internal/blob/upload-cache' -import { downloadCacheFile } from './internal/blob/download-cache' +import {CacheFileSizeLimit} from './internal/constants' +import {uploadCacheFile} from './internal/blob/upload-cache' +import {downloadCacheFile} from './internal/blob/download-cache' export class ValidationError extends Error { constructor(message: string) { super(message) @@ -405,9 +405,9 @@ async function saveCacheV1( } else if (reserveCacheResponse?.statusCode === 400) { throw new Error( reserveCacheResponse?.error?.message ?? - `Cache size of ~${Math.round( - archiveFileSize / (1024 * 1024) - )} MB (${archiveFileSize} B) is over the data cap limit, not saving cache.` + `Cache size of ~${Math.round( + archiveFileSize / (1024 * 1024) + )} MB (${archiveFileSize} B) is over the data cap limit, not saving cache.` ) } else { throw new ReserveCacheError( diff --git a/packages/cache/src/internal/blob/download-cache.ts b/packages/cache/src/internal/blob/download-cache.ts index 807c73a4..e974cb2f 100644 --- a/packages/cache/src/internal/blob/download-cache.ts +++ b/packages/cache/src/internal/blob/download-cache.ts @@ -3,13 +3,14 @@ import * as core from '@actions/core' import { BlobClient, BlockBlobClient, - BlobDownloadOptions + BlobDownloadOptions, + BlobDownloadResponseParsed } from '@azure/storage-blob' export async function downloadCacheFile( signedUploadURL: string, archivePath: string -): Promise<{}> { +): Promise { const downloadOptions: BlobDownloadOptions = { maxRetryRequests: 5 } From 54ac2dd012c3e940fb0f8a5a425df857f02a73a4 Mon Sep 17 00:00:00 2001 From: Bassem Dghaidi <568794+Link-@users.noreply.github.com> Date: Mon, 25 Nov 2024 04:08:47 -0800 Subject: [PATCH 3/9] Add cache service version debug message --- .../cache/__tests__/restoreCacheV2.test.ts | 578 ++++++++++-------- packages/cache/src/cache.ts | 22 +- 2 files changed, 321 insertions(+), 279 deletions(-) diff --git a/packages/cache/__tests__/restoreCacheV2.test.ts b/packages/cache/__tests__/restoreCacheV2.test.ts index 707432ca..78b78aaa 100644 --- a/packages/cache/__tests__/restoreCacheV2.test.ts +++ b/packages/cache/__tests__/restoreCacheV2.test.ts @@ -4,10 +4,10 @@ import * as tar from '../src/internal/tar' import * as config from '../src/internal/config' import * as cacheUtils from '../src/internal/cacheUtils' import * as downloadCacheModule from '../src/internal/blob/download-cache' -import {restoreCache} from '../src/cache' -import {CacheFilename, CompressionMethod} from '../src/internal/constants' -import {CacheServiceClientJSON} from '../src/generated/results/api/v1/cache.twirp' -import {BlobDownloadResponseParsed} from '@azure/storage-blob' +import { restoreCache } from '../src/cache' +import { CacheFilename, CompressionMethod } from '../src/internal/constants' +import { CacheServiceClientJSON } from '../src/generated/results/api/v1/cache.twirp' +import { BlobDownloadResponseParsed } from '@azure/storage-blob' // import {executePromisesSequentially} from '@azure/ms-rest-js' jest.mock('../src/internal/cacheHttpClient') @@ -15,349 +15,389 @@ jest.mock('../src/internal/cacheUtils') jest.mock('../src/internal/config') jest.mock('../src/internal/tar') +let logDebugMock: jest.SpyInstance +let logInfoMock: jest.SpyInstance + beforeAll(() => { - jest.spyOn(console, 'log').mockImplementation(() => {}) - jest.spyOn(core, 'debug').mockImplementation(() => {}) - jest.spyOn(core, 'info').mockImplementation(() => {}) - jest.spyOn(core, 'warning').mockImplementation(() => {}) - jest.spyOn(core, 'error').mockImplementation(() => {}) + jest.spyOn(console, 'log').mockImplementation(() => { }) + jest.spyOn(core, 'debug').mockImplementation(() => { }) + jest.spyOn(core, 'info').mockImplementation(() => { }) + jest.spyOn(core, 'warning').mockImplementation(() => { }) + jest.spyOn(core, 'error').mockImplementation(() => { }) - jest.spyOn(cacheUtils, 'getCacheFileName').mockImplementation(cm => { - const actualUtils = jest.requireActual('../src/internal/cacheUtils') - return actualUtils.getCacheFileName(cm) - }) + jest.spyOn(cacheUtils, 'getCacheFileName').mockImplementation(cm => { + const actualUtils = jest.requireActual('../src/internal/cacheUtils') + return actualUtils.getCacheFileName(cm) + }) - // Ensure that we're using v2 for these tests - jest.spyOn(config, 'getCacheServiceVersion').mockReturnValue('v2') + // Ensure that we're using v2 for these tests + jest.spyOn(config, 'getCacheServiceVersion').mockReturnValue('v2') + + logDebugMock = jest.spyOn(core, 'debug') + logInfoMock = jest.spyOn(core, 'info') +}) + +afterEach(() => { + expect(logDebugMock).toHaveBeenCalledWith('Cache service version: v2') }) test('restore with no path should fail', async () => { - const paths: string[] = [] - const key = 'node-test' - await expect(restoreCache(paths, key)).rejects.toThrowError( - `Path Validation Error: At least one directory or file path is required` - ) + const paths: string[] = [] + const key = 'node-test' + await expect(restoreCache(paths, key)).rejects.toThrowError( + `Path Validation Error: At least one directory or file path is required` + ) }) test('restore with too many keys should fail', async () => { - const paths = ['node_modules'] - const key = 'node-test' - const restoreKeys = [...Array(20).keys()].map(x => x.toString()) - await expect(restoreCache(paths, key, restoreKeys)).rejects.toThrowError( - `Key Validation Error: Keys are limited to a maximum of 10.` - ) + const paths = ['node_modules'] + const key = 'node-test' + const restoreKeys = [...Array(20).keys()].map(x => x.toString()) + await expect(restoreCache(paths, key, restoreKeys)).rejects.toThrowError( + `Key Validation Error: Keys are limited to a maximum of 10.` + ) }) test('restore with large key should fail', async () => { - const paths = ['node_modules'] - const key = 'foo'.repeat(512) // Over the 512 character limit - await expect(restoreCache(paths, key)).rejects.toThrowError( - `Key Validation Error: ${key} cannot be larger than 512 characters.` - ) + const paths = ['node_modules'] + const key = 'foo'.repeat(512) // Over the 512 character limit + await expect(restoreCache(paths, key)).rejects.toThrowError( + `Key Validation Error: ${key} cannot be larger than 512 characters.` + ) }) test('restore with invalid key should fail', async () => { - const paths = ['node_modules'] - const key = 'comma,comma' - await expect(restoreCache(paths, key)).rejects.toThrowError( - `Key Validation Error: ${key} cannot contain commas.` - ) + const paths = ['node_modules'] + const key = 'comma,comma' + await expect(restoreCache(paths, key)).rejects.toThrowError( + `Key Validation Error: ${key} cannot contain commas.` + ) }) test('restore with no cache found', async () => { - const paths = ['node_modules'] - const key = 'node-test' + const paths = ['node_modules'] + const key = 'node-test' - jest - .spyOn(CacheServiceClientJSON.prototype, 'GetCacheEntryDownloadURL') - .mockReturnValue(Promise.resolve({ok: false, signedDownloadUrl: ''})) + jest + .spyOn(CacheServiceClientJSON.prototype, 'GetCacheEntryDownloadURL') + .mockReturnValue(Promise.resolve({ ok: false, signedDownloadUrl: '' })) - const cacheKey = await restoreCache(paths, key) + const cacheKey = await restoreCache(paths, key) - expect(cacheKey).toBe(undefined) + expect(cacheKey).toBe(undefined) }) test('restore with server error should fail', async () => { - const paths = ['node_modules'] - const key = 'node-test' - const logWarningMock = jest.spyOn(core, 'warning') + const paths = ['node_modules'] + const key = 'node-test' + const logWarningMock = jest.spyOn(core, 'warning') - jest - .spyOn(CacheServiceClientJSON.prototype, 'GetCacheEntryDownloadURL') - .mockImplementation(() => { - throw new Error('HTTP Error Occurred') - }) + jest + .spyOn(CacheServiceClientJSON.prototype, 'GetCacheEntryDownloadURL') + .mockImplementation(() => { + throw new Error('HTTP Error Occurred') + }) - const cacheKey = await restoreCache(paths, key) - expect(cacheKey).toBe(undefined) - expect(logWarningMock).toHaveBeenCalledTimes(1) - expect(logWarningMock).toHaveBeenCalledWith( - 'Failed to restore: HTTP Error Occurred' - ) + const cacheKey = await restoreCache(paths, key) + expect(cacheKey).toBe(undefined) + expect(logWarningMock).toHaveBeenCalledTimes(1) + expect(logWarningMock).toHaveBeenCalledWith( + 'Failed to restore: HTTP Error Occurred' + ) }) test('restore with restore keys and no cache found', async () => { - const paths = ['node_modules'] - const key = 'node-test' - const restoreKey = 'node-' - const logWarningMock = jest.spyOn(core, 'warning') + const paths = ['node_modules'] + const key = 'node-test' + const restoreKey = 'node-' + const logWarningMock = jest.spyOn(core, 'warning') - jest - .spyOn(CacheServiceClientJSON.prototype, 'GetCacheEntryDownloadURL') - .mockReturnValue(Promise.resolve({ok: false, signedDownloadUrl: ''})) + jest + .spyOn(CacheServiceClientJSON.prototype, 'GetCacheEntryDownloadURL') + .mockReturnValue(Promise.resolve({ ok: false, signedDownloadUrl: '' })) - const cacheKey = await restoreCache(paths, key, [restoreKey]) + const cacheKey = await restoreCache(paths, key, [restoreKey]) - expect(cacheKey).toBe(undefined) - expect(logWarningMock).toHaveBeenCalledWith( - `Cache not found for keys: ${[key, restoreKey].join(', ')}` - ) + expect(cacheKey).toBe(undefined) + expect(logWarningMock).toHaveBeenCalledWith( + `Cache not found for keys: ${[key, restoreKey].join(', ')}` + ) }) test('restore with gzip compressed cache found', async () => { - const paths = ['node_modules'] - const key = 'node-test' - const logInfoMock = jest.spyOn(core, 'info') - const compressionMethod = CompressionMethod.Gzip - const signedDownloadUrl = 'https://blob-storage.local?signed=true' - const cacheVersion = - 'd90f107aaeb22920dba0c637a23c37b5bc497b4dfa3b07fe3f79bf88a273c11b' + const paths = ['node_modules'] + const key = 'node-test' + const compressionMethod = CompressionMethod.Gzip + const signedDownloadUrl = 'https://blob-storage.local?signed=true' + const cacheVersion = + 'd90f107aaeb22920dba0c637a23c37b5bc497b4dfa3b07fe3f79bf88a273c11b' - const getCacheVersionMock = jest.spyOn(cacheUtils, 'getCacheVersion') - getCacheVersionMock.mockReturnValue(cacheVersion) + const getCacheVersionMock = jest.spyOn(cacheUtils, 'getCacheVersion') + getCacheVersionMock.mockReturnValue(cacheVersion) - const compressionMethodMock = jest.spyOn(cacheUtils, 'getCompressionMethod') - compressionMethodMock.mockReturnValue(Promise.resolve(compressionMethod)) + const compressionMethodMock = jest.spyOn(cacheUtils, 'getCompressionMethod') + compressionMethodMock.mockReturnValue(Promise.resolve(compressionMethod)) - const getCacheDownloadURLMock = jest.spyOn( - CacheServiceClientJSON.prototype, - 'GetCacheEntryDownloadURL' - ) - getCacheDownloadURLMock.mockReturnValue( - Promise.resolve({ok: true, signedDownloadUrl}) - ) + const getCacheDownloadURLMock = jest.spyOn( + CacheServiceClientJSON.prototype, + 'GetCacheEntryDownloadURL' + ) + getCacheDownloadURLMock.mockReturnValue( + Promise.resolve({ ok: true, signedDownloadUrl }) + ) - const tempPath = '/foo/bar' + const tempPath = '/foo/bar' - const createTempDirectoryMock = jest.spyOn(cacheUtils, 'createTempDirectory') - createTempDirectoryMock.mockImplementation(async () => { - return Promise.resolve(tempPath) - }) + const createTempDirectoryMock = jest.spyOn(cacheUtils, 'createTempDirectory') + createTempDirectoryMock.mockImplementation(async () => { + return Promise.resolve(tempPath) + }) - const archivePath = path.join(tempPath, CacheFilename.Gzip) - const downloadCacheFileMock = jest.spyOn( - downloadCacheModule, - 'downloadCacheFile' - ) - downloadCacheFileMock.mockReturnValue( - Promise.resolve({} as BlobDownloadResponseParsed) - ) + const archivePath = path.join(tempPath, CacheFilename.Gzip) + const downloadCacheFileMock = jest.spyOn( + downloadCacheModule, + 'downloadCacheFile' + ) + downloadCacheFileMock.mockReturnValue( + Promise.resolve({} as BlobDownloadResponseParsed) + ) - const fileSize = 142 - const getArchiveFileSizeInBytesMock = jest - .spyOn(cacheUtils, 'getArchiveFileSizeInBytes') - .mockReturnValue(fileSize) + const fileSize = 142 + const getArchiveFileSizeInBytesMock = jest + .spyOn(cacheUtils, 'getArchiveFileSizeInBytes') + .mockReturnValue(fileSize) - const extractTarMock = jest.spyOn(tar, 'extractTar') - const unlinkFileMock = jest.spyOn(cacheUtils, 'unlinkFile') + const extractTarMock = jest.spyOn(tar, 'extractTar') + const unlinkFileMock = jest.spyOn(cacheUtils, 'unlinkFile') - const cacheKey = await restoreCache(paths, key) + const cacheKey = await restoreCache(paths, key) - expect(cacheKey).toBe(key) - expect(getCacheVersionMock).toHaveBeenCalledWith( - paths, - compressionMethod, - false - ) - expect(getCacheDownloadURLMock).toHaveBeenCalledWith({ - key, - restoreKeys: [], - version: cacheVersion - }) - expect(createTempDirectoryMock).toHaveBeenCalledTimes(1) - expect(downloadCacheFileMock).toHaveBeenCalledWith( - signedDownloadUrl, - archivePath - ) - expect(getArchiveFileSizeInBytesMock).toHaveBeenCalledWith(archivePath) - expect(logInfoMock).toHaveBeenCalledWith(`Cache Size: ~0 MB (142 B)`) + expect(cacheKey).toBe(key) + expect(getCacheVersionMock).toHaveBeenCalledWith( + paths, + compressionMethod, + false + ) + expect(getCacheDownloadURLMock).toHaveBeenCalledWith({ + key, + restoreKeys: [], + version: cacheVersion + }) + expect(createTempDirectoryMock).toHaveBeenCalledTimes(1) + expect(downloadCacheFileMock).toHaveBeenCalledWith( + signedDownloadUrl, + archivePath + ) + expect(getArchiveFileSizeInBytesMock).toHaveBeenCalledWith(archivePath) + expect(logInfoMock).toHaveBeenCalledWith(`Cache Size: ~0 MB (142 B)`) - expect(extractTarMock).toHaveBeenCalledTimes(1) - expect(extractTarMock).toHaveBeenCalledWith(archivePath, compressionMethod) + expect(extractTarMock).toHaveBeenCalledTimes(1) + expect(extractTarMock).toHaveBeenCalledWith(archivePath, compressionMethod) - expect(unlinkFileMock).toHaveBeenCalledTimes(1) - expect(unlinkFileMock).toHaveBeenCalledWith(archivePath) + expect(unlinkFileMock).toHaveBeenCalledTimes(1) + expect(unlinkFileMock).toHaveBeenCalledWith(archivePath) - expect(compressionMethodMock).toHaveBeenCalledTimes(1) + expect(compressionMethodMock).toHaveBeenCalledTimes(1) }) test('restore with zstd compressed cache found', async () => { - const paths = ['node_modules'] - const key = 'node-test' - const logInfoMock = jest.spyOn(core, 'info') - const compressionMethod = CompressionMethod.Zstd - const signedDownloadUrl = 'https://blob-storage.local?signed=true' - const cacheVersion = - '8e2e96a184cb0cd6b48285b176c06a418f3d7fce14c29d9886fd1bb4f05c513d' + const paths = ['node_modules'] + const key = 'node-test' + const compressionMethod = CompressionMethod.Zstd + const signedDownloadUrl = 'https://blob-storage.local?signed=true' + const cacheVersion = + '8e2e96a184cb0cd6b48285b176c06a418f3d7fce14c29d9886fd1bb4f05c513d' - const getCacheVersionMock = jest.spyOn(cacheUtils, 'getCacheVersion') - getCacheVersionMock.mockReturnValue(cacheVersion) + const getCacheVersionMock = jest.spyOn(cacheUtils, 'getCacheVersion') + getCacheVersionMock.mockReturnValue(cacheVersion) - const compressionMethodMock = jest.spyOn(cacheUtils, 'getCompressionMethod') - compressionMethodMock.mockReturnValue(Promise.resolve(compressionMethod)) + const compressionMethodMock = jest.spyOn(cacheUtils, 'getCompressionMethod') + compressionMethodMock.mockReturnValue(Promise.resolve(compressionMethod)) - const getCacheDownloadURLMock = jest.spyOn( - CacheServiceClientJSON.prototype, - 'GetCacheEntryDownloadURL' - ) - getCacheDownloadURLMock.mockReturnValue( - Promise.resolve({ok: true, signedDownloadUrl}) - ) + const getCacheDownloadURLMock = jest.spyOn( + CacheServiceClientJSON.prototype, + 'GetCacheEntryDownloadURL' + ) + getCacheDownloadURLMock.mockReturnValue( + Promise.resolve({ ok: true, signedDownloadUrl }) + ) - const tempPath = '/foo/bar' + const tempPath = '/foo/bar' - const createTempDirectoryMock = jest.spyOn(cacheUtils, 'createTempDirectory') - createTempDirectoryMock.mockImplementation(async () => { - return Promise.resolve(tempPath) - }) + const createTempDirectoryMock = jest.spyOn(cacheUtils, 'createTempDirectory') + createTempDirectoryMock.mockImplementation(async () => { + return Promise.resolve(tempPath) + }) - const archivePath = path.join(tempPath, CacheFilename.Zstd) - const downloadCacheFileMock = jest.spyOn( - downloadCacheModule, - 'downloadCacheFile' - ) - downloadCacheFileMock.mockReturnValue( - Promise.resolve({} as BlobDownloadResponseParsed) - ) + const archivePath = path.join(tempPath, CacheFilename.Zstd) + const downloadCacheFileMock = jest.spyOn( + downloadCacheModule, + 'downloadCacheFile' + ) + downloadCacheFileMock.mockReturnValue( + Promise.resolve({} as BlobDownloadResponseParsed) + ) - const fileSize = 62915000 - const getArchiveFileSizeInBytesMock = jest - .spyOn(cacheUtils, 'getArchiveFileSizeInBytes') - .mockReturnValue(fileSize) + const fileSize = 62915000 + const getArchiveFileSizeInBytesMock = jest + .spyOn(cacheUtils, 'getArchiveFileSizeInBytes') + .mockReturnValue(fileSize) - const extractTarMock = jest.spyOn(tar, 'extractTar') - const unlinkFileMock = jest.spyOn(cacheUtils, 'unlinkFile') + const extractTarMock = jest.spyOn(tar, 'extractTar') + const unlinkFileMock = jest.spyOn(cacheUtils, 'unlinkFile') - const cacheKey = await restoreCache(paths, key) + const cacheKey = await restoreCache(paths, key) - expect(cacheKey).toBe(key) - expect(getCacheVersionMock).toHaveBeenCalledWith( - paths, - compressionMethod, - false - ) - expect(getCacheDownloadURLMock).toHaveBeenCalledWith({ - key, - restoreKeys: [], - version: cacheVersion - }) - expect(createTempDirectoryMock).toHaveBeenCalledTimes(1) - expect(downloadCacheFileMock).toHaveBeenCalledWith( - signedDownloadUrl, - archivePath - ) - expect(getArchiveFileSizeInBytesMock).toHaveBeenCalledWith(archivePath) - expect(logInfoMock).toHaveBeenCalledWith(`Cache Size: ~60 MB (62915000 B)`) + expect(cacheKey).toBe(key) + expect(getCacheVersionMock).toHaveBeenCalledWith( + paths, + compressionMethod, + false + ) + expect(getCacheDownloadURLMock).toHaveBeenCalledWith({ + key, + restoreKeys: [], + version: cacheVersion + }) + expect(createTempDirectoryMock).toHaveBeenCalledTimes(1) + expect(downloadCacheFileMock).toHaveBeenCalledWith( + signedDownloadUrl, + archivePath + ) + expect(getArchiveFileSizeInBytesMock).toHaveBeenCalledWith(archivePath) + expect(logInfoMock).toHaveBeenCalledWith(`Cache Size: ~60 MB (62915000 B)`) - expect(extractTarMock).toHaveBeenCalledTimes(1) - expect(extractTarMock).toHaveBeenCalledWith(archivePath, compressionMethod) + expect(extractTarMock).toHaveBeenCalledTimes(1) + expect(extractTarMock).toHaveBeenCalledWith(archivePath, compressionMethod) - expect(unlinkFileMock).toHaveBeenCalledTimes(1) - expect(unlinkFileMock).toHaveBeenCalledWith(archivePath) + expect(unlinkFileMock).toHaveBeenCalledTimes(1) + expect(unlinkFileMock).toHaveBeenCalledWith(archivePath) - expect(compressionMethodMock).toHaveBeenCalledTimes(1) + expect(compressionMethodMock).toHaveBeenCalledTimes(1) }) -// test('restore with cache found for restore key', async () => { -// const paths = ['node_modules'] -// const key = 'node-test' -// const restoreKey = 'node-' +test('restore with cache found for restore key', async () => { + const paths = ['node_modules'] + const key = 'node-test' + const restoreKey = 'node-' + const compressionMethod = CompressionMethod.Gzip + const signedDownloadUrl = 'https://blob-storage.local?signed=true' + const cacheVersion = + 'b8b58e9bd7b1e8f83d9f05c7e06ea865ba44a0330e07a14db74ac74386677bed' -// const infoMock = jest.spyOn(core, 'info') + const getCacheVersionMock = jest.spyOn(cacheUtils, 'getCacheVersion') + getCacheVersionMock.mockReturnValue(cacheVersion) -// const cacheEntry: ArtifactCacheEntry = { -// cacheKey: restoreKey, -// scope: 'refs/heads/main', -// archiveLocation: 'www.actionscache.test/download' -// } -// const getCacheMock = jest.spyOn(cacheHttpClient, 'getCacheEntry') -// getCacheMock.mockImplementation(async () => { -// return Promise.resolve(cacheEntry) -// }) -// const tempPath = '/foo/bar' + const compressionMethodMock = jest.spyOn(cacheUtils, 'getCompressionMethod') + compressionMethodMock.mockReturnValue(Promise.resolve(compressionMethod)) -// const createTempDirectoryMock = jest.spyOn(cacheUtils, 'createTempDirectory') -// createTempDirectoryMock.mockImplementation(async () => { -// return Promise.resolve(tempPath) -// }) + const getCacheDownloadURLMock = jest.spyOn( + CacheServiceClientJSON.prototype, + 'GetCacheEntryDownloadURL' + ) + getCacheDownloadURLMock.mockReturnValue( + Promise.resolve({ ok: true, signedDownloadUrl }) + ) -// const archivePath = path.join(tempPath, CacheFilename.Zstd) -// const downloadCacheMock = jest.spyOn(cacheHttpClient, 'downloadCache') + const tempPath = '/foo/bar' -// const fileSize = 142 -// const getArchiveFileSizeInBytesMock = jest -// .spyOn(cacheUtils, 'getArchiveFileSizeInBytes') -// .mockReturnValue(fileSize) + const createTempDirectoryMock = jest.spyOn(cacheUtils, 'createTempDirectory') + createTempDirectoryMock.mockImplementation(async () => { + return Promise.resolve(tempPath) + }) -// const extractTarMock = jest.spyOn(tar, 'extractTar') -// const compression = CompressionMethod.Zstd -// const getCompressionMock = jest -// .spyOn(cacheUtils, 'getCompressionMethod') -// .mockReturnValue(Promise.resolve(compression)) + const archivePath = path.join(tempPath, CacheFilename.Gzip) + const downloadCacheFileMock = jest.spyOn( + downloadCacheModule, + 'downloadCacheFile' + ) + downloadCacheFileMock.mockReturnValue( + Promise.resolve({} as BlobDownloadResponseParsed) + ) -// const cacheKey = await restoreCache(paths, key, [restoreKey]) + const fileSize = 142 + const getArchiveFileSizeInBytesMock = jest + .spyOn(cacheUtils, 'getArchiveFileSizeInBytes') + .mockReturnValue(fileSize) -// expect(cacheKey).toBe(restoreKey) -// expect(getCacheMock).toHaveBeenCalledWith([key, restoreKey], paths, { -// compressionMethod: compression, -// enableCrossOsArchive: false -// }) -// expect(createTempDirectoryMock).toHaveBeenCalledTimes(1) -// expect(downloadCacheMock).toHaveBeenCalledWith( -// cacheEntry.archiveLocation, -// archivePath, -// undefined -// ) -// expect(getArchiveFileSizeInBytesMock).toHaveBeenCalledWith(archivePath) -// expect(infoMock).toHaveBeenCalledWith(`Cache Size: ~0 MB (142 B)`) + const extractTarMock = jest.spyOn(tar, 'extractTar') + const unlinkFileMock = jest.spyOn(cacheUtils, 'unlinkFile') -// expect(extractTarMock).toHaveBeenCalledTimes(1) -// expect(extractTarMock).toHaveBeenCalledWith(archivePath, compression) -// expect(getCompressionMock).toHaveBeenCalledTimes(1) -// }) + const cacheKey = await restoreCache(paths, key, [restoreKey]) -// test('restore with dry run', async () => { -// const paths = ['node_modules'] -// const key = 'node-test' -// const options = { lookupOnly: true } + expect(cacheKey).toBe(restoreKey) + expect(getCacheVersionMock).toHaveBeenCalledWith( + paths, + compressionMethod, + false + ) + expect(getCacheDownloadURLMock).toHaveBeenCalledWith({ + key, + restoreKeys: restoreKey, + version: cacheVersion + }) + expect(createTempDirectoryMock).toHaveBeenCalledTimes(1) + expect(downloadCacheFileMock).toHaveBeenCalledWith( + signedDownloadUrl, + archivePath + ) + expect(getArchiveFileSizeInBytesMock).toHaveBeenCalledWith(archivePath) + expect(logInfoMock).toHaveBeenCalledWith(`Cache Size: ~0 MB (142 B)`) -// const cacheEntry: ArtifactCacheEntry = { -// cacheKey: key, -// scope: 'refs/heads/main', -// archiveLocation: 'www.actionscache.test/download' -// } -// const getCacheMock = jest.spyOn(cacheHttpClient, 'getCacheEntry') -// getCacheMock.mockImplementation(async () => { -// return Promise.resolve(cacheEntry) -// }) + expect(extractTarMock).toHaveBeenCalledTimes(1) + expect(extractTarMock).toHaveBeenCalledWith(archivePath, compressionMethod) -// const createTempDirectoryMock = jest.spyOn(cacheUtils, 'createTempDirectory') -// const downloadCacheMock = jest.spyOn(cacheHttpClient, 'downloadCache') + expect(unlinkFileMock).toHaveBeenCalledTimes(1) + expect(unlinkFileMock).toHaveBeenCalledWith(archivePath) -// const compression = CompressionMethod.Gzip -// const getCompressionMock = jest -// .spyOn(cacheUtils, 'getCompressionMethod') -// .mockReturnValue(Promise.resolve(compression)) + expect(compressionMethodMock).toHaveBeenCalledTimes(1) +}) -// const cacheKey = await restoreCache(paths, key, undefined, options) +test('restore with dry run', async () => { + const paths = ['node_modules'] + const key = 'node-test' + const options = { lookupOnly: true } + const compressionMethod = CompressionMethod.Gzip + const signedDownloadUrl = 'https://blob-storage.local?signed=true' + const cacheVersion = + 'd90f107aaeb22920dba0c637a23c37b5bc497b4dfa3b07fe3f79bf88a273c11b' -// expect(cacheKey).toBe(key) -// expect(getCompressionMock).toHaveBeenCalledTimes(1) -// expect(getCacheMock).toHaveBeenCalledWith([key], paths, { -// compressionMethod: compression, -// enableCrossOsArchive: false -// }) -// // creating a tempDir and downloading the cache are skipped -// expect(createTempDirectoryMock).toHaveBeenCalledTimes(0) -// expect(downloadCacheMock).toHaveBeenCalledTimes(0) -// }) + const getCacheVersionMock = jest.spyOn(cacheUtils, 'getCacheVersion') + getCacheVersionMock.mockReturnValue(cacheVersion) + + const compressionMethodMock = jest.spyOn(cacheUtils, 'getCompressionMethod') + compressionMethodMock.mockReturnValue(Promise.resolve(compressionMethod)) + + const getCacheDownloadURLMock = jest.spyOn( + CacheServiceClientJSON.prototype, + 'GetCacheEntryDownloadURL' + ) + getCacheDownloadURLMock.mockReturnValue( + Promise.resolve({ ok: true, signedDownloadUrl }) + ) + + const createTempDirectoryMock = jest.spyOn(cacheUtils, 'createTempDirectory') + const downloadCacheFileMock = jest.spyOn( + downloadCacheModule, + 'downloadCacheFile' + ) + + const cacheKey = await restoreCache(paths, key, undefined, options) + + expect(cacheKey).toBe(key) + expect(getCacheVersionMock).toHaveBeenCalledWith( + paths, + compressionMethod, + false + ) + expect(getCacheDownloadURLMock).toHaveBeenCalledWith({ + key, + restoreKeys: [], + version: cacheVersion + }) + expect(logInfoMock).toHaveBeenCalledWith('Lookup only - skipping download') + + // creating a tempDir and downloading the cache are skipped + expect(createTempDirectoryMock).toHaveBeenCalledTimes(0) + expect(downloadCacheFileMock).toHaveBeenCalledTimes(0) +}) diff --git a/packages/cache/src/cache.ts b/packages/cache/src/cache.ts index 07d6c7ce..a2ce38f8 100644 --- a/packages/cache/src/cache.ts +++ b/packages/cache/src/cache.ts @@ -3,18 +3,18 @@ import * as path from 'path' import * as utils from './internal/cacheUtils' import * as cacheHttpClient from './internal/cacheHttpClient' import * as cacheTwirpClient from './internal/shared/cacheTwirpClient' -import {getCacheServiceVersion, isGhes} from './internal/config' -import {DownloadOptions, UploadOptions} from './options' -import {createTar, extractTar, listTar} from './internal/tar' +import { getCacheServiceVersion, isGhes } from './internal/config' +import { DownloadOptions, UploadOptions } from './options' +import { createTar, extractTar, listTar } from './internal/tar' import { CreateCacheEntryRequest, FinalizeCacheEntryUploadRequest, FinalizeCacheEntryUploadResponse, GetCacheEntryDownloadURLRequest } from './generated/results/api/v1/cache' -import {CacheFileSizeLimit} from './internal/constants' -import {uploadCacheFile} from './internal/blob/upload-cache' -import {downloadCacheFile} from './internal/blob/download-cache' +import { CacheFileSizeLimit } from './internal/constants' +import { uploadCacheFile } from './internal/blob/upload-cache' +import { downloadCacheFile } from './internal/blob/download-cache' export class ValidationError extends Error { constructor(message: string) { super(message) @@ -79,9 +79,11 @@ export async function restoreCache( options?: DownloadOptions, enableCrossOsArchive = false ): Promise { + const cacheServiceVersion: string = getCacheServiceVersion() + core.debug(`Cache service version: ${cacheServiceVersion}`) + checkPaths(paths) - const cacheServiceVersion: string = getCacheServiceVersion() switch (cacheServiceVersion) { case 'v2': return await restoreCacheV2( @@ -405,9 +407,9 @@ async function saveCacheV1( } else if (reserveCacheResponse?.statusCode === 400) { throw new Error( reserveCacheResponse?.error?.message ?? - `Cache size of ~${Math.round( - archiveFileSize / (1024 * 1024) - )} MB (${archiveFileSize} B) is over the data cap limit, not saving cache.` + `Cache size of ~${Math.round( + archiveFileSize / (1024 * 1024) + )} MB (${archiveFileSize} B) is over the data cap limit, not saving cache.` ) } else { throw new ReserveCacheError( From 4dadd612d6e122c49c46883d9c83d6f88cd3c975 Mon Sep 17 00:00:00 2001 From: Bassem Dghaidi <568794+Link-@users.noreply.github.com> Date: Mon, 25 Nov 2024 05:42:50 -0800 Subject: [PATCH 4/9] Add support for matching on restore key values --- .../cache/__tests__/restoreCacheV2.test.ts | 54 ++- packages/cache/src/cache.ts | 20 +- .../src/generated/results/api/v1/cache.ts | 342 ++---------------- .../results/entities/v1/cacheentry.ts | 163 +++++++++ 4 files changed, 249 insertions(+), 330 deletions(-) create mode 100644 packages/cache/src/generated/results/entities/v1/cacheentry.ts diff --git a/packages/cache/__tests__/restoreCacheV2.test.ts b/packages/cache/__tests__/restoreCacheV2.test.ts index 78b78aaa..f9fe0e9e 100644 --- a/packages/cache/__tests__/restoreCacheV2.test.ts +++ b/packages/cache/__tests__/restoreCacheV2.test.ts @@ -80,7 +80,13 @@ test('restore with no cache found', async () => { jest .spyOn(CacheServiceClientJSON.prototype, 'GetCacheEntryDownloadURL') - .mockReturnValue(Promise.resolve({ ok: false, signedDownloadUrl: '' })) + .mockReturnValue( + Promise.resolve({ + ok: false, + signedDownloadUrl: '', + matchedKey: '' + }) + ) const cacheKey = await restoreCache(paths, key) @@ -109,18 +115,24 @@ test('restore with server error should fail', async () => { test('restore with restore keys and no cache found', async () => { const paths = ['node_modules'] const key = 'node-test' - const restoreKey = 'node-' + const restoreKeys = ['node-'] const logWarningMock = jest.spyOn(core, 'warning') jest .spyOn(CacheServiceClientJSON.prototype, 'GetCacheEntryDownloadURL') - .mockReturnValue(Promise.resolve({ ok: false, signedDownloadUrl: '' })) + .mockReturnValue( + Promise.resolve({ + ok: false, + signedDownloadUrl: '', + matchedKey: '' + }) + ) - const cacheKey = await restoreCache(paths, key, [restoreKey]) + const cacheKey = await restoreCache(paths, key, restoreKeys) expect(cacheKey).toBe(undefined) expect(logWarningMock).toHaveBeenCalledWith( - `Cache not found for keys: ${[key, restoreKey].join(', ')}` + `Cache not found for keys: ${[key, ...restoreKeys].join(', ')}` ) }) @@ -143,7 +155,11 @@ test('restore with gzip compressed cache found', async () => { 'GetCacheEntryDownloadURL' ) getCacheDownloadURLMock.mockReturnValue( - Promise.resolve({ ok: true, signedDownloadUrl }) + Promise.resolve({ + ok: true, + signedDownloadUrl, + matchedKey: key + }) ) const tempPath = '/foo/bar' @@ -219,7 +235,11 @@ test('restore with zstd compressed cache found', async () => { 'GetCacheEntryDownloadURL' ) getCacheDownloadURLMock.mockReturnValue( - Promise.resolve({ ok: true, signedDownloadUrl }) + Promise.resolve({ + ok: true, + signedDownloadUrl, + matchedKey: key + }) ) const tempPath = '/foo/bar' @@ -279,7 +299,7 @@ test('restore with zstd compressed cache found', async () => { test('restore with cache found for restore key', async () => { const paths = ['node_modules'] const key = 'node-test' - const restoreKey = 'node-' + const restoreKeys = ['node-'] const compressionMethod = CompressionMethod.Gzip const signedDownloadUrl = 'https://blob-storage.local?signed=true' const cacheVersion = @@ -296,7 +316,11 @@ test('restore with cache found for restore key', async () => { 'GetCacheEntryDownloadURL' ) getCacheDownloadURLMock.mockReturnValue( - Promise.resolve({ ok: true, signedDownloadUrl }) + Promise.resolve({ + ok: true, + signedDownloadUrl, + matchedKey: restoreKeys[0] + }) ) const tempPath = '/foo/bar' @@ -323,9 +347,9 @@ test('restore with cache found for restore key', async () => { const extractTarMock = jest.spyOn(tar, 'extractTar') const unlinkFileMock = jest.spyOn(cacheUtils, 'unlinkFile') - const cacheKey = await restoreCache(paths, key, [restoreKey]) + const cacheKey = await restoreCache(paths, key, restoreKeys) - expect(cacheKey).toBe(restoreKey) + expect(cacheKey).toBe(restoreKeys[0]) expect(getCacheVersionMock).toHaveBeenCalledWith( paths, compressionMethod, @@ -333,7 +357,7 @@ test('restore with cache found for restore key', async () => { ) expect(getCacheDownloadURLMock).toHaveBeenCalledWith({ key, - restoreKeys: restoreKey, + restoreKeys: restoreKeys, version: cacheVersion }) expect(createTempDirectoryMock).toHaveBeenCalledTimes(1) @@ -373,7 +397,11 @@ test('restore with dry run', async () => { 'GetCacheEntryDownloadURL' ) getCacheDownloadURLMock.mockReturnValue( - Promise.resolve({ ok: true, signedDownloadUrl }) + Promise.resolve({ + ok: true, + signedDownloadUrl, + matchedKey: key + }) ) const createTempDirectoryMock = jest.spyOn(cacheUtils, 'createTempDirectory') diff --git a/packages/cache/src/cache.ts b/packages/cache/src/cache.ts index a2ce38f8..1f26e5ce 100644 --- a/packages/cache/src/cache.ts +++ b/packages/cache/src/cache.ts @@ -3,18 +3,18 @@ import * as path from 'path' import * as utils from './internal/cacheUtils' import * as cacheHttpClient from './internal/cacheHttpClient' import * as cacheTwirpClient from './internal/shared/cacheTwirpClient' -import { getCacheServiceVersion, isGhes } from './internal/config' -import { DownloadOptions, UploadOptions } from './options' -import { createTar, extractTar, listTar } from './internal/tar' +import {getCacheServiceVersion, isGhes} from './internal/config' +import {DownloadOptions, UploadOptions} from './options' +import {createTar, extractTar, listTar} from './internal/tar' import { CreateCacheEntryRequest, FinalizeCacheEntryUploadRequest, FinalizeCacheEntryUploadResponse, GetCacheEntryDownloadURLRequest } from './generated/results/api/v1/cache' -import { CacheFileSizeLimit } from './internal/constants' -import { uploadCacheFile } from './internal/blob/upload-cache' -import { downloadCacheFile } from './internal/blob/download-cache' +import {CacheFileSizeLimit} from './internal/constants' +import {uploadCacheFile} from './internal/blob/upload-cache' +import {downloadCacheFile} from './internal/blob/download-cache' export class ValidationError extends Error { constructor(message: string) { super(message) @@ -287,7 +287,7 @@ async function restoreCacheV2( await extractTar(archivePath, compressionMethod) core.info('Cache restored successfully') - return request.key + return response.matchedKey } catch (error) { const typedError = error as Error if (typedError.name === ValidationError.name) { @@ -407,9 +407,9 @@ async function saveCacheV1( } else if (reserveCacheResponse?.statusCode === 400) { throw new Error( reserveCacheResponse?.error?.message ?? - `Cache size of ~${Math.round( - archiveFileSize / (1024 * 1024) - )} MB (${archiveFileSize} B) is over the data cap limit, not saving cache.` + `Cache size of ~${Math.round( + archiveFileSize / (1024 * 1024) + )} MB (${archiveFileSize} B) is over the data cap limit, not saving cache.` ) } else { throw new ReserveCacheError( diff --git a/packages/cache/src/generated/results/api/v1/cache.ts b/packages/cache/src/generated/results/api/v1/cache.ts index 0736c7ad..387bbd15 100644 --- a/packages/cache/src/generated/results/api/v1/cache.ts +++ b/packages/cache/src/generated/results/api/v1/cache.ts @@ -12,7 +12,7 @@ import type { PartialMessage } from "@protobuf-ts/runtime"; import { reflectionMergePartial } from "@protobuf-ts/runtime"; import { MESSAGE_TYPE } from "@protobuf-ts/runtime"; import { MessageType } from "@protobuf-ts/runtime"; -import { Timestamp } from "../../../google/protobuf/timestamp"; +import { CacheEntry } from "../../entities/v1/cacheentry"; import { CacheMetadata } from "../../entities/v1/cachemetadata"; /** * @generated from protobuf message github.actions.results.api.v1.CreateCacheEntryRequest @@ -139,6 +139,12 @@ export interface GetCacheEntryDownloadURLResponse { * @generated from protobuf field: string signed_download_url = 2; */ signedDownloadUrl: string; + /** + * Key or restore key that matches the lookup + * + * @generated from protobuf field: string matched_key = 3; + */ + matchedKey: string; } /** * @generated from protobuf message github.actions.results.api.v1.DeleteCacheEntryRequest @@ -200,62 +206,11 @@ export interface ListCacheEntriesRequest { */ export interface ListCacheEntriesResponse { /** - * @generated from protobuf field: repeated github.actions.results.api.v1.ListCacheEntriesResponse.CacheEntry entries = 1; - */ - entries: ListCacheEntriesResponse_CacheEntry[]; -} -/** - * @generated from protobuf message github.actions.results.api.v1.ListCacheEntriesResponse.CacheEntry - */ -export interface ListCacheEntriesResponse_CacheEntry { - /** - * An explicit key for a cache entry + * Cache entries in the defined scope * - * @generated from protobuf field: string key = 1; + * @generated from protobuf field: repeated github.actions.results.entities.v1.CacheEntry entries = 1; */ - key: string; - /** - * SHA256 hex digest of the cache archive - * - * @generated from protobuf field: string hash = 2; - */ - hash: string; - /** - * Cache entry size in bytes - * - * @generated from protobuf field: int64 size_bytes = 3; - */ - sizeBytes: string; - /** - * Access scope - * - * @generated from protobuf field: string scope = 4; - */ - scope: string; - /** - * Version SHA256 hex digest - * - * @generated from protobuf field: string version = 5; - */ - version: string; - /** - * When the cache entry was created - * - * @generated from protobuf field: google.protobuf.Timestamp created_at = 6; - */ - createdAt?: Timestamp; - /** - * When the cache entry was last accessed - * - * @generated from protobuf field: google.protobuf.Timestamp last_accessed_at = 7; - */ - lastAccessedAt?: Timestamp; - /** - * When the cache entry is set to expire - * - * @generated from protobuf field: google.protobuf.Timestamp expires_at = 8; - */ - expiresAt?: Timestamp; + entries: CacheEntry[]; } /** * @generated from protobuf message github.actions.results.api.v1.LookupCacheEntryRequest @@ -296,61 +251,12 @@ export interface LookupCacheEntryResponse { * @generated from protobuf field: bool exists = 1; */ exists: boolean; -} -/** - * Matched cache entry metadata - * - * @generated from protobuf message github.actions.results.api.v1.LookupCacheEntryResponse.CacheEntry - */ -export interface LookupCacheEntryResponse_CacheEntry { /** - * An explicit key for a cache entry + * Matched cache entry metadata * - * @generated from protobuf field: string key = 1; + * @generated from protobuf field: github.actions.results.entities.v1.CacheEntry entry = 2; */ - key: string; - /** - * SHA256 hex digest of the cache archive - * - * @generated from protobuf field: string hash = 2; - */ - hash: string; - /** - * Cache entry size in bytes - * - * @generated from protobuf field: int64 size_bytes = 3; - */ - sizeBytes: string; - /** - * Access scope - * - * @generated from protobuf field: string scope = 4; - */ - scope: string; - /** - * Version SHA256 hex digest - * - * @generated from protobuf field: string version = 5; - */ - version: string; - /** - * When the cache entry was created - * - * @generated from protobuf field: google.protobuf.Timestamp created_at = 6; - */ - createdAt?: Timestamp; - /** - * When the cache entry was last accessed - * - * @generated from protobuf field: google.protobuf.Timestamp last_accessed_at = 7; - */ - lastAccessedAt?: Timestamp; - /** - * When the cache entry is set to expire - * - * @generated from protobuf field: google.protobuf.Timestamp expires_at = 8; - */ - expiresAt?: Timestamp; + entry?: CacheEntry; } // @generated message type with reflection information, may provide speed optimized methods class CreateCacheEntryRequest$Type extends MessageType { @@ -662,11 +568,12 @@ class GetCacheEntryDownloadURLResponse$Type extends MessageType): GetCacheEntryDownloadURLResponse { - const message = { ok: false, signedDownloadUrl: "" }; + const message = { ok: false, signedDownloadUrl: "", matchedKey: "" }; globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); if (value !== undefined) reflectionMergePartial(this, message, value); @@ -683,6 +590,9 @@ class GetCacheEntryDownloadURLResponse$Type extends MessageType { constructor() { super("github.actions.results.api.v1.ListCacheEntriesResponse", [ - { no: 1, name: "entries", kind: "message", repeat: 1 /*RepeatType.PACKED*/, T: () => ListCacheEntriesResponse_CacheEntry } + { no: 1, name: "entries", kind: "message", repeat: 1 /*RepeatType.PACKED*/, T: () => CacheEntry } ]); } create(value?: PartialMessage): ListCacheEntriesResponse { @@ -899,8 +812,8 @@ class ListCacheEntriesResponse$Type extends MessageType { - constructor() { - super("github.actions.results.api.v1.ListCacheEntriesResponse.CacheEntry", [ - { no: 1, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, - { no: 2, name: "hash", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, - { no: 3, name: "size_bytes", kind: "scalar", T: 3 /*ScalarType.INT64*/ }, - { no: 4, name: "scope", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, - { no: 5, name: "version", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, - { no: 6, name: "created_at", kind: "message", T: () => Timestamp }, - { no: 7, name: "last_accessed_at", kind: "message", T: () => Timestamp }, - { no: 8, name: "expires_at", kind: "message", T: () => Timestamp } - ]); - } - create(value?: PartialMessage): ListCacheEntriesResponse_CacheEntry { - const message = { key: "", hash: "", sizeBytes: "0", scope: "", version: "" }; - globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); - if (value !== undefined) - reflectionMergePartial(this, message, value); - return message; - } - internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: ListCacheEntriesResponse_CacheEntry): ListCacheEntriesResponse_CacheEntry { - let message = target ?? this.create(), end = reader.pos + length; - while (reader.pos < end) { - let [fieldNo, wireType] = reader.tag(); - switch (fieldNo) { - case /* string key */ 1: - message.key = reader.string(); - break; - case /* string hash */ 2: - message.hash = reader.string(); - break; - case /* int64 size_bytes */ 3: - message.sizeBytes = reader.int64().toString(); - break; - case /* string scope */ 4: - message.scope = reader.string(); - break; - case /* string version */ 5: - message.version = reader.string(); - break; - case /* google.protobuf.Timestamp created_at */ 6: - message.createdAt = Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.createdAt); - break; - case /* google.protobuf.Timestamp last_accessed_at */ 7: - message.lastAccessedAt = Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.lastAccessedAt); - break; - case /* google.protobuf.Timestamp expires_at */ 8: - message.expiresAt = Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.expiresAt); - break; - default: - let u = options.readUnknownField; - if (u === "throw") - throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); - let d = reader.skip(wireType); - if (u !== false) - (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); - } - } - return message; - } - internalBinaryWrite(message: ListCacheEntriesResponse_CacheEntry, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { - /* string key = 1; */ - if (message.key !== "") - writer.tag(1, WireType.LengthDelimited).string(message.key); - /* string hash = 2; */ - if (message.hash !== "") - writer.tag(2, WireType.LengthDelimited).string(message.hash); - /* int64 size_bytes = 3; */ - if (message.sizeBytes !== "0") - writer.tag(3, WireType.Varint).int64(message.sizeBytes); - /* string scope = 4; */ - if (message.scope !== "") - writer.tag(4, WireType.LengthDelimited).string(message.scope); - /* string version = 5; */ - if (message.version !== "") - writer.tag(5, WireType.LengthDelimited).string(message.version); - /* google.protobuf.Timestamp created_at = 6; */ - if (message.createdAt) - Timestamp.internalBinaryWrite(message.createdAt, writer.tag(6, WireType.LengthDelimited).fork(), options).join(); - /* google.protobuf.Timestamp last_accessed_at = 7; */ - if (message.lastAccessedAt) - Timestamp.internalBinaryWrite(message.lastAccessedAt, writer.tag(7, WireType.LengthDelimited).fork(), options).join(); - /* google.protobuf.Timestamp expires_at = 8; */ - if (message.expiresAt) - Timestamp.internalBinaryWrite(message.expiresAt, writer.tag(8, WireType.LengthDelimited).fork(), options).join(); - let u = options.writeUnknownFields; - if (u !== false) - (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); - return writer; - } -} -/** - * @generated MessageType for protobuf message github.actions.results.api.v1.ListCacheEntriesResponse.CacheEntry - */ -export const ListCacheEntriesResponse_CacheEntry = new ListCacheEntriesResponse_CacheEntry$Type(); -// @generated message type with reflection information, may provide speed optimized methods class LookupCacheEntryRequest$Type extends MessageType { constructor() { super("github.actions.results.api.v1.LookupCacheEntryRequest", [ @@ -1095,7 +912,8 @@ export const LookupCacheEntryRequest = new LookupCacheEntryRequest$Type(); class LookupCacheEntryResponse$Type extends MessageType { constructor() { super("github.actions.results.api.v1.LookupCacheEntryResponse", [ - { no: 1, name: "exists", kind: "scalar", T: 8 /*ScalarType.BOOL*/ } + { no: 1, name: "exists", kind: "scalar", T: 8 /*ScalarType.BOOL*/ }, + { no: 2, name: "entry", kind: "message", T: () => CacheEntry } ]); } create(value?: PartialMessage): LookupCacheEntryResponse { @@ -1113,6 +931,9 @@ class LookupCacheEntryResponse$Type extends MessageType { - constructor() { - super("github.actions.results.api.v1.LookupCacheEntryResponse.CacheEntry", [ - { no: 1, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, - { no: 2, name: "hash", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, - { no: 3, name: "size_bytes", kind: "scalar", T: 3 /*ScalarType.INT64*/ }, - { no: 4, name: "scope", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, - { no: 5, name: "version", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, - { no: 6, name: "created_at", kind: "message", T: () => Timestamp }, - { no: 7, name: "last_accessed_at", kind: "message", T: () => Timestamp }, - { no: 8, name: "expires_at", kind: "message", T: () => Timestamp } - ]); - } - create(value?: PartialMessage): LookupCacheEntryResponse_CacheEntry { - const message = { key: "", hash: "", sizeBytes: "0", scope: "", version: "" }; - globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); - if (value !== undefined) - reflectionMergePartial(this, message, value); - return message; - } - internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: LookupCacheEntryResponse_CacheEntry): LookupCacheEntryResponse_CacheEntry { - let message = target ?? this.create(), end = reader.pos + length; - while (reader.pos < end) { - let [fieldNo, wireType] = reader.tag(); - switch (fieldNo) { - case /* string key */ 1: - message.key = reader.string(); - break; - case /* string hash */ 2: - message.hash = reader.string(); - break; - case /* int64 size_bytes */ 3: - message.sizeBytes = reader.int64().toString(); - break; - case /* string scope */ 4: - message.scope = reader.string(); - break; - case /* string version */ 5: - message.version = reader.string(); - break; - case /* google.protobuf.Timestamp created_at */ 6: - message.createdAt = Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.createdAt); - break; - case /* google.protobuf.Timestamp last_accessed_at */ 7: - message.lastAccessedAt = Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.lastAccessedAt); - break; - case /* google.protobuf.Timestamp expires_at */ 8: - message.expiresAt = Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.expiresAt); - break; - default: - let u = options.readUnknownField; - if (u === "throw") - throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); - let d = reader.skip(wireType); - if (u !== false) - (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); - } - } - return message; - } - internalBinaryWrite(message: LookupCacheEntryResponse_CacheEntry, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { - /* string key = 1; */ - if (message.key !== "") - writer.tag(1, WireType.LengthDelimited).string(message.key); - /* string hash = 2; */ - if (message.hash !== "") - writer.tag(2, WireType.LengthDelimited).string(message.hash); - /* int64 size_bytes = 3; */ - if (message.sizeBytes !== "0") - writer.tag(3, WireType.Varint).int64(message.sizeBytes); - /* string scope = 4; */ - if (message.scope !== "") - writer.tag(4, WireType.LengthDelimited).string(message.scope); - /* string version = 5; */ - if (message.version !== "") - writer.tag(5, WireType.LengthDelimited).string(message.version); - /* google.protobuf.Timestamp created_at = 6; */ - if (message.createdAt) - Timestamp.internalBinaryWrite(message.createdAt, writer.tag(6, WireType.LengthDelimited).fork(), options).join(); - /* google.protobuf.Timestamp last_accessed_at = 7; */ - if (message.lastAccessedAt) - Timestamp.internalBinaryWrite(message.lastAccessedAt, writer.tag(7, WireType.LengthDelimited).fork(), options).join(); - /* google.protobuf.Timestamp expires_at = 8; */ - if (message.expiresAt) - Timestamp.internalBinaryWrite(message.expiresAt, writer.tag(8, WireType.LengthDelimited).fork(), options).join(); - let u = options.writeUnknownFields; - if (u !== false) - (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); - return writer; - } -} -/** - * @generated MessageType for protobuf message github.actions.results.api.v1.LookupCacheEntryResponse.CacheEntry - */ -export const LookupCacheEntryResponse_CacheEntry = new LookupCacheEntryResponse_CacheEntry$Type(); /** * @generated ServiceType for protobuf service github.actions.results.api.v1.CacheService */ diff --git a/packages/cache/src/generated/results/entities/v1/cacheentry.ts b/packages/cache/src/generated/results/entities/v1/cacheentry.ts new file mode 100644 index 00000000..b55b4afa --- /dev/null +++ b/packages/cache/src/generated/results/entities/v1/cacheentry.ts @@ -0,0 +1,163 @@ +// @generated by protobuf-ts 2.9.1 with parameter long_type_string,client_none,generate_dependencies +// @generated from protobuf file "results/entities/v1/cacheentry.proto" (package "github.actions.results.entities.v1", syntax proto3) +// tslint:disable +import type { BinaryWriteOptions } from "@protobuf-ts/runtime"; +import type { IBinaryWriter } from "@protobuf-ts/runtime"; +import { WireType } from "@protobuf-ts/runtime"; +import type { BinaryReadOptions } from "@protobuf-ts/runtime"; +import type { IBinaryReader } from "@protobuf-ts/runtime"; +import { UnknownFieldHandler } from "@protobuf-ts/runtime"; +import type { PartialMessage } from "@protobuf-ts/runtime"; +import { reflectionMergePartial } from "@protobuf-ts/runtime"; +import { MESSAGE_TYPE } from "@protobuf-ts/runtime"; +import { MessageType } from "@protobuf-ts/runtime"; +import { Timestamp } from "../../../google/protobuf/timestamp"; +/** + * @generated from protobuf message github.actions.results.entities.v1.CacheEntry + */ +export interface CacheEntry { + /** + * An explicit key for a cache entry + * + * @generated from protobuf field: string key = 1; + */ + key: string; + /** + * SHA256 hex digest of the cache archive + * + * @generated from protobuf field: string hash = 2; + */ + hash: string; + /** + * Cache entry size in bytes + * + * @generated from protobuf field: int64 size_bytes = 3; + */ + sizeBytes: string; + /** + * Access scope + * + * @generated from protobuf field: string scope = 4; + */ + scope: string; + /** + * Version SHA256 hex digest + * + * @generated from protobuf field: string version = 5; + */ + version: string; + /** + * When the cache entry was created + * + * @generated from protobuf field: google.protobuf.Timestamp created_at = 6; + */ + createdAt?: Timestamp; + /** + * When the cache entry was last accessed + * + * @generated from protobuf field: google.protobuf.Timestamp last_accessed_at = 7; + */ + lastAccessedAt?: Timestamp; + /** + * When the cache entry is set to expire + * + * @generated from protobuf field: google.protobuf.Timestamp expires_at = 8; + */ + expiresAt?: Timestamp; +} +// @generated message type with reflection information, may provide speed optimized methods +class CacheEntry$Type extends MessageType { + constructor() { + super("github.actions.results.entities.v1.CacheEntry", [ + { no: 1, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 2, name: "hash", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 3, name: "size_bytes", kind: "scalar", T: 3 /*ScalarType.INT64*/ }, + { no: 4, name: "scope", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 5, name: "version", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 6, name: "created_at", kind: "message", T: () => Timestamp }, + { no: 7, name: "last_accessed_at", kind: "message", T: () => Timestamp }, + { no: 8, name: "expires_at", kind: "message", T: () => Timestamp } + ]); + } + create(value?: PartialMessage): CacheEntry { + const message = { key: "", hash: "", sizeBytes: "0", scope: "", version: "" }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: CacheEntry): CacheEntry { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string key */ 1: + message.key = reader.string(); + break; + case /* string hash */ 2: + message.hash = reader.string(); + break; + case /* int64 size_bytes */ 3: + message.sizeBytes = reader.int64().toString(); + break; + case /* string scope */ 4: + message.scope = reader.string(); + break; + case /* string version */ 5: + message.version = reader.string(); + break; + case /* google.protobuf.Timestamp created_at */ 6: + message.createdAt = Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.createdAt); + break; + case /* google.protobuf.Timestamp last_accessed_at */ 7: + message.lastAccessedAt = Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.lastAccessedAt); + break; + case /* google.protobuf.Timestamp expires_at */ 8: + message.expiresAt = Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.expiresAt); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: CacheEntry, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string key = 1; */ + if (message.key !== "") + writer.tag(1, WireType.LengthDelimited).string(message.key); + /* string hash = 2; */ + if (message.hash !== "") + writer.tag(2, WireType.LengthDelimited).string(message.hash); + /* int64 size_bytes = 3; */ + if (message.sizeBytes !== "0") + writer.tag(3, WireType.Varint).int64(message.sizeBytes); + /* string scope = 4; */ + if (message.scope !== "") + writer.tag(4, WireType.LengthDelimited).string(message.scope); + /* string version = 5; */ + if (message.version !== "") + writer.tag(5, WireType.LengthDelimited).string(message.version); + /* google.protobuf.Timestamp created_at = 6; */ + if (message.createdAt) + Timestamp.internalBinaryWrite(message.createdAt, writer.tag(6, WireType.LengthDelimited).fork(), options).join(); + /* google.protobuf.Timestamp last_accessed_at = 7; */ + if (message.lastAccessedAt) + Timestamp.internalBinaryWrite(message.lastAccessedAt, writer.tag(7, WireType.LengthDelimited).fork(), options).join(); + /* google.protobuf.Timestamp expires_at = 8; */ + if (message.expiresAt) + Timestamp.internalBinaryWrite(message.expiresAt, writer.tag(8, WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message github.actions.results.entities.v1.CacheEntry + */ +export const CacheEntry = new CacheEntry$Type(); From de236da416f84474a98d0ac6e9ad35dd13314552 Mon Sep 17 00:00:00 2001 From: Bassem Dghaidi <568794+Link-@users.noreply.github.com> Date: Mon, 25 Nov 2024 05:47:51 -0800 Subject: [PATCH 5/9] Fix cache lookup scenario --- .../cache/__tests__/restoreCacheV2.test.ts | 652 +++++++++--------- packages/cache/src/cache.ts | 2 +- 2 files changed, 327 insertions(+), 327 deletions(-) diff --git a/packages/cache/__tests__/restoreCacheV2.test.ts b/packages/cache/__tests__/restoreCacheV2.test.ts index f9fe0e9e..c74d7fab 100644 --- a/packages/cache/__tests__/restoreCacheV2.test.ts +++ b/packages/cache/__tests__/restoreCacheV2.test.ts @@ -4,10 +4,10 @@ import * as tar from '../src/internal/tar' import * as config from '../src/internal/config' import * as cacheUtils from '../src/internal/cacheUtils' import * as downloadCacheModule from '../src/internal/blob/download-cache' -import { restoreCache } from '../src/cache' -import { CacheFilename, CompressionMethod } from '../src/internal/constants' -import { CacheServiceClientJSON } from '../src/generated/results/api/v1/cache.twirp' -import { BlobDownloadResponseParsed } from '@azure/storage-blob' +import {restoreCache} from '../src/cache' +import {CacheFilename, CompressionMethod} from '../src/internal/constants' +import {CacheServiceClientJSON} from '../src/generated/results/api/v1/cache.twirp' +import {BlobDownloadResponseParsed} from '@azure/storage-blob' // import {executePromisesSequentially} from '@azure/ms-rest-js' jest.mock('../src/internal/cacheHttpClient') @@ -19,413 +19,413 @@ let logDebugMock: jest.SpyInstance let logInfoMock: jest.SpyInstance beforeAll(() => { - jest.spyOn(console, 'log').mockImplementation(() => { }) - jest.spyOn(core, 'debug').mockImplementation(() => { }) - jest.spyOn(core, 'info').mockImplementation(() => { }) - jest.spyOn(core, 'warning').mockImplementation(() => { }) - jest.spyOn(core, 'error').mockImplementation(() => { }) + jest.spyOn(console, 'log').mockImplementation(() => {}) + jest.spyOn(core, 'debug').mockImplementation(() => {}) + jest.spyOn(core, 'info').mockImplementation(() => {}) + jest.spyOn(core, 'warning').mockImplementation(() => {}) + jest.spyOn(core, 'error').mockImplementation(() => {}) - jest.spyOn(cacheUtils, 'getCacheFileName').mockImplementation(cm => { - const actualUtils = jest.requireActual('../src/internal/cacheUtils') - return actualUtils.getCacheFileName(cm) - }) + jest.spyOn(cacheUtils, 'getCacheFileName').mockImplementation(cm => { + const actualUtils = jest.requireActual('../src/internal/cacheUtils') + return actualUtils.getCacheFileName(cm) + }) - // Ensure that we're using v2 for these tests - jest.spyOn(config, 'getCacheServiceVersion').mockReturnValue('v2') + // Ensure that we're using v2 for these tests + jest.spyOn(config, 'getCacheServiceVersion').mockReturnValue('v2') - logDebugMock = jest.spyOn(core, 'debug') - logInfoMock = jest.spyOn(core, 'info') + logDebugMock = jest.spyOn(core, 'debug') + logInfoMock = jest.spyOn(core, 'info') }) afterEach(() => { - expect(logDebugMock).toHaveBeenCalledWith('Cache service version: v2') + expect(logDebugMock).toHaveBeenCalledWith('Cache service version: v2') }) test('restore with no path should fail', async () => { - const paths: string[] = [] - const key = 'node-test' - await expect(restoreCache(paths, key)).rejects.toThrowError( - `Path Validation Error: At least one directory or file path is required` - ) + const paths: string[] = [] + const key = 'node-test' + await expect(restoreCache(paths, key)).rejects.toThrowError( + `Path Validation Error: At least one directory or file path is required` + ) }) test('restore with too many keys should fail', async () => { - const paths = ['node_modules'] - const key = 'node-test' - const restoreKeys = [...Array(20).keys()].map(x => x.toString()) - await expect(restoreCache(paths, key, restoreKeys)).rejects.toThrowError( - `Key Validation Error: Keys are limited to a maximum of 10.` - ) + const paths = ['node_modules'] + const key = 'node-test' + const restoreKeys = [...Array(20).keys()].map(x => x.toString()) + await expect(restoreCache(paths, key, restoreKeys)).rejects.toThrowError( + `Key Validation Error: Keys are limited to a maximum of 10.` + ) }) test('restore with large key should fail', async () => { - const paths = ['node_modules'] - const key = 'foo'.repeat(512) // Over the 512 character limit - await expect(restoreCache(paths, key)).rejects.toThrowError( - `Key Validation Error: ${key} cannot be larger than 512 characters.` - ) + const paths = ['node_modules'] + const key = 'foo'.repeat(512) // Over the 512 character limit + await expect(restoreCache(paths, key)).rejects.toThrowError( + `Key Validation Error: ${key} cannot be larger than 512 characters.` + ) }) test('restore with invalid key should fail', async () => { - const paths = ['node_modules'] - const key = 'comma,comma' - await expect(restoreCache(paths, key)).rejects.toThrowError( - `Key Validation Error: ${key} cannot contain commas.` - ) + const paths = ['node_modules'] + const key = 'comma,comma' + await expect(restoreCache(paths, key)).rejects.toThrowError( + `Key Validation Error: ${key} cannot contain commas.` + ) }) test('restore with no cache found', async () => { - const paths = ['node_modules'] - const key = 'node-test' + const paths = ['node_modules'] + const key = 'node-test' - jest - .spyOn(CacheServiceClientJSON.prototype, 'GetCacheEntryDownloadURL') - .mockReturnValue( - Promise.resolve({ - ok: false, - signedDownloadUrl: '', - matchedKey: '' - }) - ) + jest + .spyOn(CacheServiceClientJSON.prototype, 'GetCacheEntryDownloadURL') + .mockReturnValue( + Promise.resolve({ + ok: false, + signedDownloadUrl: '', + matchedKey: '' + }) + ) - const cacheKey = await restoreCache(paths, key) + const cacheKey = await restoreCache(paths, key) - expect(cacheKey).toBe(undefined) + expect(cacheKey).toBe(undefined) }) test('restore with server error should fail', async () => { - const paths = ['node_modules'] - const key = 'node-test' - const logWarningMock = jest.spyOn(core, 'warning') + const paths = ['node_modules'] + const key = 'node-test' + const logWarningMock = jest.spyOn(core, 'warning') - jest - .spyOn(CacheServiceClientJSON.prototype, 'GetCacheEntryDownloadURL') - .mockImplementation(() => { - throw new Error('HTTP Error Occurred') - }) + jest + .spyOn(CacheServiceClientJSON.prototype, 'GetCacheEntryDownloadURL') + .mockImplementation(() => { + throw new Error('HTTP Error Occurred') + }) - const cacheKey = await restoreCache(paths, key) - expect(cacheKey).toBe(undefined) - expect(logWarningMock).toHaveBeenCalledTimes(1) - expect(logWarningMock).toHaveBeenCalledWith( - 'Failed to restore: HTTP Error Occurred' - ) + const cacheKey = await restoreCache(paths, key) + expect(cacheKey).toBe(undefined) + expect(logWarningMock).toHaveBeenCalledTimes(1) + expect(logWarningMock).toHaveBeenCalledWith( + 'Failed to restore: HTTP Error Occurred' + ) }) test('restore with restore keys and no cache found', async () => { - const paths = ['node_modules'] - const key = 'node-test' - const restoreKeys = ['node-'] - const logWarningMock = jest.spyOn(core, 'warning') + const paths = ['node_modules'] + const key = 'node-test' + const restoreKeys = ['node-'] + const logWarningMock = jest.spyOn(core, 'warning') - jest - .spyOn(CacheServiceClientJSON.prototype, 'GetCacheEntryDownloadURL') - .mockReturnValue( - Promise.resolve({ - ok: false, - signedDownloadUrl: '', - matchedKey: '' - }) - ) - - const cacheKey = await restoreCache(paths, key, restoreKeys) - - expect(cacheKey).toBe(undefined) - expect(logWarningMock).toHaveBeenCalledWith( - `Cache not found for keys: ${[key, ...restoreKeys].join(', ')}` + jest + .spyOn(CacheServiceClientJSON.prototype, 'GetCacheEntryDownloadURL') + .mockReturnValue( + Promise.resolve({ + ok: false, + signedDownloadUrl: '', + matchedKey: '' + }) ) + + const cacheKey = await restoreCache(paths, key, restoreKeys) + + expect(cacheKey).toBe(undefined) + expect(logWarningMock).toHaveBeenCalledWith( + `Cache not found for keys: ${[key, ...restoreKeys].join(', ')}` + ) }) test('restore with gzip compressed cache found', async () => { - const paths = ['node_modules'] - const key = 'node-test' - const compressionMethod = CompressionMethod.Gzip - const signedDownloadUrl = 'https://blob-storage.local?signed=true' - const cacheVersion = - 'd90f107aaeb22920dba0c637a23c37b5bc497b4dfa3b07fe3f79bf88a273c11b' + const paths = ['node_modules'] + const key = 'node-test' + const compressionMethod = CompressionMethod.Gzip + const signedDownloadUrl = 'https://blob-storage.local?signed=true' + const cacheVersion = + 'd90f107aaeb22920dba0c637a23c37b5bc497b4dfa3b07fe3f79bf88a273c11b' - const getCacheVersionMock = jest.spyOn(cacheUtils, 'getCacheVersion') - getCacheVersionMock.mockReturnValue(cacheVersion) + const getCacheVersionMock = jest.spyOn(cacheUtils, 'getCacheVersion') + getCacheVersionMock.mockReturnValue(cacheVersion) - const compressionMethodMock = jest.spyOn(cacheUtils, 'getCompressionMethod') - compressionMethodMock.mockReturnValue(Promise.resolve(compressionMethod)) + const compressionMethodMock = jest.spyOn(cacheUtils, 'getCompressionMethod') + compressionMethodMock.mockReturnValue(Promise.resolve(compressionMethod)) - const getCacheDownloadURLMock = jest.spyOn( - CacheServiceClientJSON.prototype, - 'GetCacheEntryDownloadURL' - ) - getCacheDownloadURLMock.mockReturnValue( - Promise.resolve({ - ok: true, - signedDownloadUrl, - matchedKey: key - }) - ) - - const tempPath = '/foo/bar' - - const createTempDirectoryMock = jest.spyOn(cacheUtils, 'createTempDirectory') - createTempDirectoryMock.mockImplementation(async () => { - return Promise.resolve(tempPath) + const getCacheDownloadURLMock = jest.spyOn( + CacheServiceClientJSON.prototype, + 'GetCacheEntryDownloadURL' + ) + getCacheDownloadURLMock.mockReturnValue( + Promise.resolve({ + ok: true, + signedDownloadUrl, + matchedKey: key }) + ) - const archivePath = path.join(tempPath, CacheFilename.Gzip) - const downloadCacheFileMock = jest.spyOn( - downloadCacheModule, - 'downloadCacheFile' - ) - downloadCacheFileMock.mockReturnValue( - Promise.resolve({} as BlobDownloadResponseParsed) - ) + const tempPath = '/foo/bar' - const fileSize = 142 - const getArchiveFileSizeInBytesMock = jest - .spyOn(cacheUtils, 'getArchiveFileSizeInBytes') - .mockReturnValue(fileSize) + const createTempDirectoryMock = jest.spyOn(cacheUtils, 'createTempDirectory') + createTempDirectoryMock.mockImplementation(async () => { + return Promise.resolve(tempPath) + }) - const extractTarMock = jest.spyOn(tar, 'extractTar') - const unlinkFileMock = jest.spyOn(cacheUtils, 'unlinkFile') + const archivePath = path.join(tempPath, CacheFilename.Gzip) + const downloadCacheFileMock = jest.spyOn( + downloadCacheModule, + 'downloadCacheFile' + ) + downloadCacheFileMock.mockReturnValue( + Promise.resolve({} as BlobDownloadResponseParsed) + ) - const cacheKey = await restoreCache(paths, key) + const fileSize = 142 + const getArchiveFileSizeInBytesMock = jest + .spyOn(cacheUtils, 'getArchiveFileSizeInBytes') + .mockReturnValue(fileSize) - expect(cacheKey).toBe(key) - expect(getCacheVersionMock).toHaveBeenCalledWith( - paths, - compressionMethod, - false - ) - expect(getCacheDownloadURLMock).toHaveBeenCalledWith({ - key, - restoreKeys: [], - version: cacheVersion - }) - expect(createTempDirectoryMock).toHaveBeenCalledTimes(1) - expect(downloadCacheFileMock).toHaveBeenCalledWith( - signedDownloadUrl, - archivePath - ) - expect(getArchiveFileSizeInBytesMock).toHaveBeenCalledWith(archivePath) - expect(logInfoMock).toHaveBeenCalledWith(`Cache Size: ~0 MB (142 B)`) + const extractTarMock = jest.spyOn(tar, 'extractTar') + const unlinkFileMock = jest.spyOn(cacheUtils, 'unlinkFile') - expect(extractTarMock).toHaveBeenCalledTimes(1) - expect(extractTarMock).toHaveBeenCalledWith(archivePath, compressionMethod) + const cacheKey = await restoreCache(paths, key) - expect(unlinkFileMock).toHaveBeenCalledTimes(1) - expect(unlinkFileMock).toHaveBeenCalledWith(archivePath) + expect(cacheKey).toBe(key) + expect(getCacheVersionMock).toHaveBeenCalledWith( + paths, + compressionMethod, + false + ) + expect(getCacheDownloadURLMock).toHaveBeenCalledWith({ + key, + restoreKeys: [], + version: cacheVersion + }) + expect(createTempDirectoryMock).toHaveBeenCalledTimes(1) + expect(downloadCacheFileMock).toHaveBeenCalledWith( + signedDownloadUrl, + archivePath + ) + expect(getArchiveFileSizeInBytesMock).toHaveBeenCalledWith(archivePath) + expect(logInfoMock).toHaveBeenCalledWith(`Cache Size: ~0 MB (142 B)`) - expect(compressionMethodMock).toHaveBeenCalledTimes(1) + expect(extractTarMock).toHaveBeenCalledTimes(1) + expect(extractTarMock).toHaveBeenCalledWith(archivePath, compressionMethod) + + expect(unlinkFileMock).toHaveBeenCalledTimes(1) + expect(unlinkFileMock).toHaveBeenCalledWith(archivePath) + + expect(compressionMethodMock).toHaveBeenCalledTimes(1) }) test('restore with zstd compressed cache found', async () => { - const paths = ['node_modules'] - const key = 'node-test' - const compressionMethod = CompressionMethod.Zstd - const signedDownloadUrl = 'https://blob-storage.local?signed=true' - const cacheVersion = - '8e2e96a184cb0cd6b48285b176c06a418f3d7fce14c29d9886fd1bb4f05c513d' + const paths = ['node_modules'] + const key = 'node-test' + const compressionMethod = CompressionMethod.Zstd + const signedDownloadUrl = 'https://blob-storage.local?signed=true' + const cacheVersion = + '8e2e96a184cb0cd6b48285b176c06a418f3d7fce14c29d9886fd1bb4f05c513d' - const getCacheVersionMock = jest.spyOn(cacheUtils, 'getCacheVersion') - getCacheVersionMock.mockReturnValue(cacheVersion) + const getCacheVersionMock = jest.spyOn(cacheUtils, 'getCacheVersion') + getCacheVersionMock.mockReturnValue(cacheVersion) - const compressionMethodMock = jest.spyOn(cacheUtils, 'getCompressionMethod') - compressionMethodMock.mockReturnValue(Promise.resolve(compressionMethod)) + const compressionMethodMock = jest.spyOn(cacheUtils, 'getCompressionMethod') + compressionMethodMock.mockReturnValue(Promise.resolve(compressionMethod)) - const getCacheDownloadURLMock = jest.spyOn( - CacheServiceClientJSON.prototype, - 'GetCacheEntryDownloadURL' - ) - getCacheDownloadURLMock.mockReturnValue( - Promise.resolve({ - ok: true, - signedDownloadUrl, - matchedKey: key - }) - ) - - const tempPath = '/foo/bar' - - const createTempDirectoryMock = jest.spyOn(cacheUtils, 'createTempDirectory') - createTempDirectoryMock.mockImplementation(async () => { - return Promise.resolve(tempPath) + const getCacheDownloadURLMock = jest.spyOn( + CacheServiceClientJSON.prototype, + 'GetCacheEntryDownloadURL' + ) + getCacheDownloadURLMock.mockReturnValue( + Promise.resolve({ + ok: true, + signedDownloadUrl, + matchedKey: key }) + ) - const archivePath = path.join(tempPath, CacheFilename.Zstd) - const downloadCacheFileMock = jest.spyOn( - downloadCacheModule, - 'downloadCacheFile' - ) - downloadCacheFileMock.mockReturnValue( - Promise.resolve({} as BlobDownloadResponseParsed) - ) + const tempPath = '/foo/bar' - const fileSize = 62915000 - const getArchiveFileSizeInBytesMock = jest - .spyOn(cacheUtils, 'getArchiveFileSizeInBytes') - .mockReturnValue(fileSize) + const createTempDirectoryMock = jest.spyOn(cacheUtils, 'createTempDirectory') + createTempDirectoryMock.mockImplementation(async () => { + return Promise.resolve(tempPath) + }) - const extractTarMock = jest.spyOn(tar, 'extractTar') - const unlinkFileMock = jest.spyOn(cacheUtils, 'unlinkFile') + const archivePath = path.join(tempPath, CacheFilename.Zstd) + const downloadCacheFileMock = jest.spyOn( + downloadCacheModule, + 'downloadCacheFile' + ) + downloadCacheFileMock.mockReturnValue( + Promise.resolve({} as BlobDownloadResponseParsed) + ) - const cacheKey = await restoreCache(paths, key) + const fileSize = 62915000 + const getArchiveFileSizeInBytesMock = jest + .spyOn(cacheUtils, 'getArchiveFileSizeInBytes') + .mockReturnValue(fileSize) - expect(cacheKey).toBe(key) - expect(getCacheVersionMock).toHaveBeenCalledWith( - paths, - compressionMethod, - false - ) - expect(getCacheDownloadURLMock).toHaveBeenCalledWith({ - key, - restoreKeys: [], - version: cacheVersion - }) - expect(createTempDirectoryMock).toHaveBeenCalledTimes(1) - expect(downloadCacheFileMock).toHaveBeenCalledWith( - signedDownloadUrl, - archivePath - ) - expect(getArchiveFileSizeInBytesMock).toHaveBeenCalledWith(archivePath) - expect(logInfoMock).toHaveBeenCalledWith(`Cache Size: ~60 MB (62915000 B)`) + const extractTarMock = jest.spyOn(tar, 'extractTar') + const unlinkFileMock = jest.spyOn(cacheUtils, 'unlinkFile') - expect(extractTarMock).toHaveBeenCalledTimes(1) - expect(extractTarMock).toHaveBeenCalledWith(archivePath, compressionMethod) + const cacheKey = await restoreCache(paths, key) - expect(unlinkFileMock).toHaveBeenCalledTimes(1) - expect(unlinkFileMock).toHaveBeenCalledWith(archivePath) + expect(cacheKey).toBe(key) + expect(getCacheVersionMock).toHaveBeenCalledWith( + paths, + compressionMethod, + false + ) + expect(getCacheDownloadURLMock).toHaveBeenCalledWith({ + key, + restoreKeys: [], + version: cacheVersion + }) + expect(createTempDirectoryMock).toHaveBeenCalledTimes(1) + expect(downloadCacheFileMock).toHaveBeenCalledWith( + signedDownloadUrl, + archivePath + ) + expect(getArchiveFileSizeInBytesMock).toHaveBeenCalledWith(archivePath) + expect(logInfoMock).toHaveBeenCalledWith(`Cache Size: ~60 MB (62915000 B)`) - expect(compressionMethodMock).toHaveBeenCalledTimes(1) + expect(extractTarMock).toHaveBeenCalledTimes(1) + expect(extractTarMock).toHaveBeenCalledWith(archivePath, compressionMethod) + + expect(unlinkFileMock).toHaveBeenCalledTimes(1) + expect(unlinkFileMock).toHaveBeenCalledWith(archivePath) + + expect(compressionMethodMock).toHaveBeenCalledTimes(1) }) test('restore with cache found for restore key', async () => { - const paths = ['node_modules'] - const key = 'node-test' - const restoreKeys = ['node-'] - const compressionMethod = CompressionMethod.Gzip - const signedDownloadUrl = 'https://blob-storage.local?signed=true' - const cacheVersion = - 'b8b58e9bd7b1e8f83d9f05c7e06ea865ba44a0330e07a14db74ac74386677bed' + const paths = ['node_modules'] + const key = 'node-test' + const restoreKeys = ['node-'] + const compressionMethod = CompressionMethod.Gzip + const signedDownloadUrl = 'https://blob-storage.local?signed=true' + const cacheVersion = + 'b8b58e9bd7b1e8f83d9f05c7e06ea865ba44a0330e07a14db74ac74386677bed' - const getCacheVersionMock = jest.spyOn(cacheUtils, 'getCacheVersion') - getCacheVersionMock.mockReturnValue(cacheVersion) + const getCacheVersionMock = jest.spyOn(cacheUtils, 'getCacheVersion') + getCacheVersionMock.mockReturnValue(cacheVersion) - const compressionMethodMock = jest.spyOn(cacheUtils, 'getCompressionMethod') - compressionMethodMock.mockReturnValue(Promise.resolve(compressionMethod)) + const compressionMethodMock = jest.spyOn(cacheUtils, 'getCompressionMethod') + compressionMethodMock.mockReturnValue(Promise.resolve(compressionMethod)) - const getCacheDownloadURLMock = jest.spyOn( - CacheServiceClientJSON.prototype, - 'GetCacheEntryDownloadURL' - ) - getCacheDownloadURLMock.mockReturnValue( - Promise.resolve({ - ok: true, - signedDownloadUrl, - matchedKey: restoreKeys[0] - }) - ) - - const tempPath = '/foo/bar' - - const createTempDirectoryMock = jest.spyOn(cacheUtils, 'createTempDirectory') - createTempDirectoryMock.mockImplementation(async () => { - return Promise.resolve(tempPath) + const getCacheDownloadURLMock = jest.spyOn( + CacheServiceClientJSON.prototype, + 'GetCacheEntryDownloadURL' + ) + getCacheDownloadURLMock.mockReturnValue( + Promise.resolve({ + ok: true, + signedDownloadUrl, + matchedKey: restoreKeys[0] }) + ) - const archivePath = path.join(tempPath, CacheFilename.Gzip) - const downloadCacheFileMock = jest.spyOn( - downloadCacheModule, - 'downloadCacheFile' - ) - downloadCacheFileMock.mockReturnValue( - Promise.resolve({} as BlobDownloadResponseParsed) - ) + const tempPath = '/foo/bar' - const fileSize = 142 - const getArchiveFileSizeInBytesMock = jest - .spyOn(cacheUtils, 'getArchiveFileSizeInBytes') - .mockReturnValue(fileSize) + const createTempDirectoryMock = jest.spyOn(cacheUtils, 'createTempDirectory') + createTempDirectoryMock.mockImplementation(async () => { + return Promise.resolve(tempPath) + }) - const extractTarMock = jest.spyOn(tar, 'extractTar') - const unlinkFileMock = jest.spyOn(cacheUtils, 'unlinkFile') + const archivePath = path.join(tempPath, CacheFilename.Gzip) + const downloadCacheFileMock = jest.spyOn( + downloadCacheModule, + 'downloadCacheFile' + ) + downloadCacheFileMock.mockReturnValue( + Promise.resolve({} as BlobDownloadResponseParsed) + ) - const cacheKey = await restoreCache(paths, key, restoreKeys) + const fileSize = 142 + const getArchiveFileSizeInBytesMock = jest + .spyOn(cacheUtils, 'getArchiveFileSizeInBytes') + .mockReturnValue(fileSize) - expect(cacheKey).toBe(restoreKeys[0]) - expect(getCacheVersionMock).toHaveBeenCalledWith( - paths, - compressionMethod, - false - ) - expect(getCacheDownloadURLMock).toHaveBeenCalledWith({ - key, - restoreKeys: restoreKeys, - version: cacheVersion - }) - expect(createTempDirectoryMock).toHaveBeenCalledTimes(1) - expect(downloadCacheFileMock).toHaveBeenCalledWith( - signedDownloadUrl, - archivePath - ) - expect(getArchiveFileSizeInBytesMock).toHaveBeenCalledWith(archivePath) - expect(logInfoMock).toHaveBeenCalledWith(`Cache Size: ~0 MB (142 B)`) + const extractTarMock = jest.spyOn(tar, 'extractTar') + const unlinkFileMock = jest.spyOn(cacheUtils, 'unlinkFile') - expect(extractTarMock).toHaveBeenCalledTimes(1) - expect(extractTarMock).toHaveBeenCalledWith(archivePath, compressionMethod) + const cacheKey = await restoreCache(paths, key, restoreKeys) - expect(unlinkFileMock).toHaveBeenCalledTimes(1) - expect(unlinkFileMock).toHaveBeenCalledWith(archivePath) + expect(cacheKey).toBe(restoreKeys[0]) + expect(getCacheVersionMock).toHaveBeenCalledWith( + paths, + compressionMethod, + false + ) + expect(getCacheDownloadURLMock).toHaveBeenCalledWith({ + key, + restoreKeys, + version: cacheVersion + }) + expect(createTempDirectoryMock).toHaveBeenCalledTimes(1) + expect(downloadCacheFileMock).toHaveBeenCalledWith( + signedDownloadUrl, + archivePath + ) + expect(getArchiveFileSizeInBytesMock).toHaveBeenCalledWith(archivePath) + expect(logInfoMock).toHaveBeenCalledWith(`Cache Size: ~0 MB (142 B)`) - expect(compressionMethodMock).toHaveBeenCalledTimes(1) + expect(extractTarMock).toHaveBeenCalledTimes(1) + expect(extractTarMock).toHaveBeenCalledWith(archivePath, compressionMethod) + + expect(unlinkFileMock).toHaveBeenCalledTimes(1) + expect(unlinkFileMock).toHaveBeenCalledWith(archivePath) + + expect(compressionMethodMock).toHaveBeenCalledTimes(1) }) test('restore with dry run', async () => { - const paths = ['node_modules'] - const key = 'node-test' - const options = { lookupOnly: true } - const compressionMethod = CompressionMethod.Gzip - const signedDownloadUrl = 'https://blob-storage.local?signed=true' - const cacheVersion = - 'd90f107aaeb22920dba0c637a23c37b5bc497b4dfa3b07fe3f79bf88a273c11b' + const paths = ['node_modules'] + const key = 'node-test' + const options = {lookupOnly: true} + const compressionMethod = CompressionMethod.Gzip + const signedDownloadUrl = 'https://blob-storage.local?signed=true' + const cacheVersion = + 'd90f107aaeb22920dba0c637a23c37b5bc497b4dfa3b07fe3f79bf88a273c11b' - const getCacheVersionMock = jest.spyOn(cacheUtils, 'getCacheVersion') - getCacheVersionMock.mockReturnValue(cacheVersion) + const getCacheVersionMock = jest.spyOn(cacheUtils, 'getCacheVersion') + getCacheVersionMock.mockReturnValue(cacheVersion) - const compressionMethodMock = jest.spyOn(cacheUtils, 'getCompressionMethod') - compressionMethodMock.mockReturnValue(Promise.resolve(compressionMethod)) + const compressionMethodMock = jest.spyOn(cacheUtils, 'getCompressionMethod') + compressionMethodMock.mockReturnValue(Promise.resolve(compressionMethod)) - const getCacheDownloadURLMock = jest.spyOn( - CacheServiceClientJSON.prototype, - 'GetCacheEntryDownloadURL' - ) - getCacheDownloadURLMock.mockReturnValue( - Promise.resolve({ - ok: true, - signedDownloadUrl, - matchedKey: key - }) - ) - - const createTempDirectoryMock = jest.spyOn(cacheUtils, 'createTempDirectory') - const downloadCacheFileMock = jest.spyOn( - downloadCacheModule, - 'downloadCacheFile' - ) - - const cacheKey = await restoreCache(paths, key, undefined, options) - - expect(cacheKey).toBe(key) - expect(getCacheVersionMock).toHaveBeenCalledWith( - paths, - compressionMethod, - false - ) - expect(getCacheDownloadURLMock).toHaveBeenCalledWith({ - key, - restoreKeys: [], - version: cacheVersion + const getCacheDownloadURLMock = jest.spyOn( + CacheServiceClientJSON.prototype, + 'GetCacheEntryDownloadURL' + ) + getCacheDownloadURLMock.mockReturnValue( + Promise.resolve({ + ok: true, + signedDownloadUrl, + matchedKey: key }) - expect(logInfoMock).toHaveBeenCalledWith('Lookup only - skipping download') + ) - // creating a tempDir and downloading the cache are skipped - expect(createTempDirectoryMock).toHaveBeenCalledTimes(0) - expect(downloadCacheFileMock).toHaveBeenCalledTimes(0) + const createTempDirectoryMock = jest.spyOn(cacheUtils, 'createTempDirectory') + const downloadCacheFileMock = jest.spyOn( + downloadCacheModule, + 'downloadCacheFile' + ) + + const cacheKey = await restoreCache(paths, key, undefined, options) + + expect(cacheKey).toBe(key) + expect(getCacheVersionMock).toHaveBeenCalledWith( + paths, + compressionMethod, + false + ) + expect(getCacheDownloadURLMock).toHaveBeenCalledWith({ + key, + restoreKeys: [], + version: cacheVersion + }) + expect(logInfoMock).toHaveBeenCalledWith('Lookup only - skipping download') + + // creating a tempDir and downloading the cache are skipped + expect(createTempDirectoryMock).toHaveBeenCalledTimes(0) + expect(downloadCacheFileMock).toHaveBeenCalledTimes(0) }) diff --git a/packages/cache/src/cache.ts b/packages/cache/src/cache.ts index 1f26e5ce..0f8f370d 100644 --- a/packages/cache/src/cache.ts +++ b/packages/cache/src/cache.ts @@ -261,7 +261,7 @@ async function restoreCacheV2( if (options?.lookupOnly) { core.info('Lookup only - skipping download') - return request.key + return response.matchedKey } archivePath = path.join( From 2d2513915c0f108e65ece5b165edf195bccfa73b Mon Sep 17 00:00:00 2001 From: Bassem Dghaidi <568794+Link-@users.noreply.github.com> Date: Mon, 25 Nov 2024 16:13:20 +0100 Subject: [PATCH 6/9] Remove unused package Co-authored-by: Rob Herley --- packages/cache/__tests__/restoreCacheV2.test.ts | 1 - 1 file changed, 1 deletion(-) diff --git a/packages/cache/__tests__/restoreCacheV2.test.ts b/packages/cache/__tests__/restoreCacheV2.test.ts index c74d7fab..46a1ee0f 100644 --- a/packages/cache/__tests__/restoreCacheV2.test.ts +++ b/packages/cache/__tests__/restoreCacheV2.test.ts @@ -8,7 +8,6 @@ import {restoreCache} from '../src/cache' import {CacheFilename, CompressionMethod} from '../src/internal/constants' import {CacheServiceClientJSON} from '../src/generated/results/api/v1/cache.twirp' import {BlobDownloadResponseParsed} from '@azure/storage-blob' -// import {executePromisesSequentially} from '@azure/ms-rest-js' jest.mock('../src/internal/cacheHttpClient') jest.mock('../src/internal/cacheUtils') From 0e321b26f42796370493a2863297be202d41d673 Mon Sep 17 00:00:00 2001 From: Bassem Dghaidi <568794+Link-@users.noreply.github.com> Date: Mon, 25 Nov 2024 07:34:07 -0800 Subject: [PATCH 7/9] Add the download cache file status code to debug log --- packages/cache/src/cache.ts | 21 +++++++++++---------- 1 file changed, 11 insertions(+), 10 deletions(-) diff --git a/packages/cache/src/cache.ts b/packages/cache/src/cache.ts index 0f8f370d..ca3b844f 100644 --- a/packages/cache/src/cache.ts +++ b/packages/cache/src/cache.ts @@ -3,18 +3,18 @@ import * as path from 'path' import * as utils from './internal/cacheUtils' import * as cacheHttpClient from './internal/cacheHttpClient' import * as cacheTwirpClient from './internal/shared/cacheTwirpClient' -import {getCacheServiceVersion, isGhes} from './internal/config' -import {DownloadOptions, UploadOptions} from './options' -import {createTar, extractTar, listTar} from './internal/tar' +import { getCacheServiceVersion, isGhes } from './internal/config' +import { DownloadOptions, UploadOptions } from './options' +import { createTar, extractTar, listTar } from './internal/tar' import { CreateCacheEntryRequest, FinalizeCacheEntryUploadRequest, FinalizeCacheEntryUploadResponse, GetCacheEntryDownloadURLRequest } from './generated/results/api/v1/cache' -import {CacheFileSizeLimit} from './internal/constants' -import {uploadCacheFile} from './internal/blob/upload-cache' -import {downloadCacheFile} from './internal/blob/download-cache' +import { CacheFileSizeLimit } from './internal/constants' +import { uploadCacheFile } from './internal/blob/upload-cache' +import { downloadCacheFile } from './internal/blob/download-cache' export class ValidationError extends Error { constructor(message: string) { super(message) @@ -271,7 +271,8 @@ async function restoreCacheV2( core.debug(`Archive path: ${archivePath}`) core.debug(`Starting download of archive to: ${archivePath}`) - await downloadCacheFile(response.signedDownloadUrl, archivePath) + const downloadResponse = await downloadCacheFile(response.signedDownloadUrl, archivePath) + core.debug(`Download response status: ${downloadResponse._response.status}`) const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath) core.info( @@ -407,9 +408,9 @@ async function saveCacheV1( } else if (reserveCacheResponse?.statusCode === 400) { throw new Error( reserveCacheResponse?.error?.message ?? - `Cache size of ~${Math.round( - archiveFileSize / (1024 * 1024) - )} MB (${archiveFileSize} B) is over the data cap limit, not saving cache.` + `Cache size of ~${Math.round( + archiveFileSize / (1024 * 1024) + )} MB (${archiveFileSize} B) is over the data cap limit, not saving cache.` ) } else { throw new ReserveCacheError( From 4d31e1048ae67c6b145618fa34b92aad57ab340a Mon Sep 17 00:00:00 2001 From: Bassem Dghaidi <568794+Link-@users.noreply.github.com> Date: Mon, 25 Nov 2024 07:34:52 -0800 Subject: [PATCH 8/9] Add the download cache file status code to debug log --- packages/cache/src/cache.ts | 23 +++++++++++++---------- 1 file changed, 13 insertions(+), 10 deletions(-) diff --git a/packages/cache/src/cache.ts b/packages/cache/src/cache.ts index ca3b844f..8b7a8d02 100644 --- a/packages/cache/src/cache.ts +++ b/packages/cache/src/cache.ts @@ -3,18 +3,18 @@ import * as path from 'path' import * as utils from './internal/cacheUtils' import * as cacheHttpClient from './internal/cacheHttpClient' import * as cacheTwirpClient from './internal/shared/cacheTwirpClient' -import { getCacheServiceVersion, isGhes } from './internal/config' -import { DownloadOptions, UploadOptions } from './options' -import { createTar, extractTar, listTar } from './internal/tar' +import {getCacheServiceVersion, isGhes} from './internal/config' +import {DownloadOptions, UploadOptions} from './options' +import {createTar, extractTar, listTar} from './internal/tar' import { CreateCacheEntryRequest, FinalizeCacheEntryUploadRequest, FinalizeCacheEntryUploadResponse, GetCacheEntryDownloadURLRequest } from './generated/results/api/v1/cache' -import { CacheFileSizeLimit } from './internal/constants' -import { uploadCacheFile } from './internal/blob/upload-cache' -import { downloadCacheFile } from './internal/blob/download-cache' +import {CacheFileSizeLimit} from './internal/constants' +import {uploadCacheFile} from './internal/blob/upload-cache' +import {downloadCacheFile} from './internal/blob/download-cache' export class ValidationError extends Error { constructor(message: string) { super(message) @@ -271,7 +271,10 @@ async function restoreCacheV2( core.debug(`Archive path: ${archivePath}`) core.debug(`Starting download of archive to: ${archivePath}`) - const downloadResponse = await downloadCacheFile(response.signedDownloadUrl, archivePath) + const downloadResponse = await downloadCacheFile( + response.signedDownloadUrl, + archivePath + ) core.debug(`Download response status: ${downloadResponse._response.status}`) const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath) @@ -408,9 +411,9 @@ async function saveCacheV1( } else if (reserveCacheResponse?.statusCode === 400) { throw new Error( reserveCacheResponse?.error?.message ?? - `Cache size of ~${Math.round( - archiveFileSize / (1024 * 1024) - )} MB (${archiveFileSize} B) is over the data cap limit, not saving cache.` + `Cache size of ~${Math.round( + archiveFileSize / (1024 * 1024) + )} MB (${archiveFileSize} B) is over the data cap limit, not saving cache.` ) } else { throw new ReserveCacheError( From 35ede8fcf0bc19ecfa7d038ccb54132ed132b301 Mon Sep 17 00:00:00 2001 From: Bassem Dghaidi <568794+Link-@users.noreply.github.com> Date: Mon, 25 Nov 2024 12:08:07 -0800 Subject: [PATCH 9/9] Add a new debug message for downloads --- .../cache/__tests__/restoreCacheV2.test.ts | 18 +++++++++++++++--- 1 file changed, 15 insertions(+), 3 deletions(-) diff --git a/packages/cache/__tests__/restoreCacheV2.test.ts b/packages/cache/__tests__/restoreCacheV2.test.ts index 46a1ee0f..cc4f9e3c 100644 --- a/packages/cache/__tests__/restoreCacheV2.test.ts +++ b/packages/cache/__tests__/restoreCacheV2.test.ts @@ -174,7 +174,11 @@ test('restore with gzip compressed cache found', async () => { 'downloadCacheFile' ) downloadCacheFileMock.mockReturnValue( - Promise.resolve({} as BlobDownloadResponseParsed) + Promise.resolve({ + _response: { + status: 200 + } + } as BlobDownloadResponseParsed) ) const fileSize = 142 @@ -254,7 +258,11 @@ test('restore with zstd compressed cache found', async () => { 'downloadCacheFile' ) downloadCacheFileMock.mockReturnValue( - Promise.resolve({} as BlobDownloadResponseParsed) + Promise.resolve({ + _response: { + status: 200 + } + } as BlobDownloadResponseParsed) ) const fileSize = 62915000 @@ -335,7 +343,11 @@ test('restore with cache found for restore key', async () => { 'downloadCacheFile' ) downloadCacheFileMock.mockReturnValue( - Promise.resolve({} as BlobDownloadResponseParsed) + Promise.resolve({ + _response: { + status: 200 + } + } as BlobDownloadResponseParsed) ) const fileSize = 142