mirror of https://github.com/actions/toolkit
Refactor saveCacheV2 to use saveCache from cacheHttpClient
parent
eaf0083ee2
commit
62f5f1885b
|
@ -47,14 +47,16 @@ test('getUploadOptions sets defaults', async () => {
|
|||
|
||||
expect(actualOptions).toEqual({
|
||||
uploadConcurrency,
|
||||
uploadChunkSize
|
||||
uploadChunkSize,
|
||||
useAzureSdk
|
||||
})
|
||||
})
|
||||
|
||||
test('getUploadOptions overrides all settings', async () => {
|
||||
const expectedOptions: UploadOptions = {
|
||||
uploadConcurrency: 2,
|
||||
uploadChunkSize: 16 * 1024 * 1024
|
||||
uploadChunkSize: 16 * 1024 * 1024,
|
||||
useAzureSdk: true
|
||||
}
|
||||
|
||||
const actualOptions = getUploadOptions(expectedOptions)
|
||||
|
|
|
@ -270,7 +270,12 @@ test('save with server error should fail', async () => {
|
|||
compression
|
||||
)
|
||||
expect(saveCacheMock).toHaveBeenCalledTimes(1)
|
||||
expect(saveCacheMock).toHaveBeenCalledWith(cacheId, archiveFile, undefined)
|
||||
expect(saveCacheMock).toHaveBeenCalledWith(
|
||||
cacheId,
|
||||
archiveFile,
|
||||
'',
|
||||
undefined
|
||||
)
|
||||
expect(getCompressionMock).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
|
||||
|
@ -315,7 +320,12 @@ test('save with valid inputs uploads a cache', async () => {
|
|||
compression
|
||||
)
|
||||
expect(saveCacheMock).toHaveBeenCalledTimes(1)
|
||||
expect(saveCacheMock).toHaveBeenCalledWith(cacheId, archiveFile, undefined)
|
||||
expect(saveCacheMock).toHaveBeenCalledWith(
|
||||
cacheId,
|
||||
archiveFile,
|
||||
'',
|
||||
undefined
|
||||
)
|
||||
expect(getCompressionMock).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
|
||||
|
|
|
@ -6,15 +6,14 @@ import {CacheFilename, CompressionMethod} from '../src/internal/constants'
|
|||
import * as config from '../src/internal/config'
|
||||
import * as tar from '../src/internal/tar'
|
||||
import {CacheServiceClientJSON} from '../src/generated/results/api/v1/cache.twirp'
|
||||
import * as uploadCacheModule from '../src/internal/uploadUtils'
|
||||
import {BlobUploadCommonResponse} from '@azure/storage-blob'
|
||||
import {InvalidResponseError} from '../src/internal/shared/errors'
|
||||
import * as cacheHttpClient from '../src/internal/cacheHttpClient'
|
||||
import {UploadOptions} from '../src/options'
|
||||
|
||||
let logDebugMock: jest.SpyInstance
|
||||
|
||||
jest.mock('../src/internal/tar')
|
||||
|
||||
let uploadFileMock = jest.fn()
|
||||
const uploadFileMock = jest.fn()
|
||||
const blockBlobClientMock = jest.fn().mockImplementation(() => ({
|
||||
uploadFile: uploadFileMock
|
||||
}))
|
||||
|
@ -116,15 +115,7 @@ test('create cache entry failure', async () => {
|
|||
.spyOn(cacheUtils, 'getArchiveFileSizeInBytes')
|
||||
.mockReturnValueOnce(archiveFileSize)
|
||||
const cacheVersion = cacheUtils.getCacheVersion(paths, compression)
|
||||
const uploadCacheArchiveSDKMock = jest
|
||||
.spyOn(uploadCacheModule, 'uploadCacheArchiveSDK')
|
||||
.mockReturnValueOnce(
|
||||
Promise.resolve({
|
||||
_response: {
|
||||
status: 200
|
||||
}
|
||||
} as BlobUploadCommonResponse)
|
||||
)
|
||||
const saveCacheMock = jest.spyOn(cacheHttpClient, 'saveCache')
|
||||
|
||||
const cacheId = await saveCache(paths, key)
|
||||
expect(cacheId).toBe(-1)
|
||||
|
@ -139,15 +130,15 @@ test('create cache entry failure', async () => {
|
|||
expect(createTarMock).toHaveBeenCalledTimes(1)
|
||||
expect(getCompressionMock).toHaveBeenCalledTimes(1)
|
||||
expect(finalizeCacheEntryMock).toHaveBeenCalledTimes(0)
|
||||
expect(uploadCacheArchiveSDKMock).toHaveBeenCalledTimes(0)
|
||||
expect(saveCacheMock).toHaveBeenCalledTimes(0)
|
||||
})
|
||||
|
||||
test('finalize save cache failure', async () => {
|
||||
test('save cache fails if a signedUploadURL was not passed', async () => {
|
||||
const paths = 'node_modules'
|
||||
const key = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43'
|
||||
const cachePaths = [path.resolve(paths)]
|
||||
const logWarningMock = jest.spyOn(core, 'warning')
|
||||
const signedUploadURL = 'https://blob-storage.local?signed=true'
|
||||
const signedUploadURL = ''
|
||||
const options: UploadOptions = {useAzureSdk: true}
|
||||
|
||||
const createCacheEntryMock = jest
|
||||
.spyOn(CacheServiceClientJSON.prototype, 'CreateCacheEntry')
|
||||
|
@ -156,16 +147,63 @@ test('finalize save cache failure', async () => {
|
|||
)
|
||||
|
||||
const createTarMock = jest.spyOn(tar, 'createTar')
|
||||
const saveCacheMock = jest.spyOn(cacheHttpClient, 'saveCache')
|
||||
|
||||
const uploadCacheMock = jest.spyOn(uploadCacheModule, 'uploadCacheArchiveSDK')
|
||||
uploadCacheMock.mockReturnValueOnce(
|
||||
Promise.resolve({
|
||||
_response: {
|
||||
status: 200
|
||||
}
|
||||
} as BlobUploadCommonResponse)
|
||||
const compression = CompressionMethod.Zstd
|
||||
const getCompressionMock = jest
|
||||
.spyOn(cacheUtils, 'getCompressionMethod')
|
||||
.mockReturnValueOnce(Promise.resolve(compression))
|
||||
|
||||
const cacheVersion = cacheUtils.getCacheVersion([paths], compression)
|
||||
const archiveFileSize = 1024
|
||||
jest
|
||||
.spyOn(cacheUtils, 'getArchiveFileSizeInBytes')
|
||||
.mockReturnValueOnce(archiveFileSize)
|
||||
|
||||
const cacheId = await saveCache([paths], key, options)
|
||||
|
||||
expect(cacheId).toBe(-1)
|
||||
expect(createCacheEntryMock).toHaveBeenCalledWith({
|
||||
key,
|
||||
version: cacheVersion
|
||||
})
|
||||
|
||||
const archiveFolder = '/foo/bar'
|
||||
const archiveFile = path.join(archiveFolder, CacheFilename.Zstd)
|
||||
expect(createTarMock).toHaveBeenCalledWith(
|
||||
archiveFolder,
|
||||
cachePaths,
|
||||
compression
|
||||
)
|
||||
|
||||
expect(saveCacheMock).toHaveBeenCalledWith(
|
||||
-1,
|
||||
archiveFile,
|
||||
signedUploadURL,
|
||||
options
|
||||
)
|
||||
expect(getCompressionMock).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
|
||||
test('finalize save cache failure', async () => {
|
||||
const paths = 'node_modules'
|
||||
const key = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43'
|
||||
const cachePaths = [path.resolve(paths)]
|
||||
const logWarningMock = jest.spyOn(core, 'warning')
|
||||
const signedUploadURL = 'https://blob-storage.local?signed=true'
|
||||
const options: UploadOptions = {useAzureSdk: true}
|
||||
|
||||
const createCacheEntryMock = jest
|
||||
.spyOn(CacheServiceClientJSON.prototype, 'CreateCacheEntry')
|
||||
.mockReturnValue(
|
||||
Promise.resolve({ok: true, signedUploadUrl: signedUploadURL})
|
||||
)
|
||||
|
||||
const createTarMock = jest.spyOn(tar, 'createTar')
|
||||
const saveCacheMock = jest
|
||||
.spyOn(cacheHttpClient, 'saveCache')
|
||||
.mockResolvedValue(Promise.resolve())
|
||||
|
||||
const compression = CompressionMethod.Zstd
|
||||
const getCompressionMock = jest
|
||||
.spyOn(cacheUtils, 'getCompressionMethod')
|
||||
|
@ -181,7 +219,7 @@ test('finalize save cache failure', async () => {
|
|||
.spyOn(CacheServiceClientJSON.prototype, 'FinalizeCacheEntryUpload')
|
||||
.mockReturnValue(Promise.resolve({ok: false, entryId: ''}))
|
||||
|
||||
const cacheId = await saveCache([paths], key)
|
||||
const cacheId = await saveCache([paths], key, options)
|
||||
|
||||
expect(createCacheEntryMock).toHaveBeenCalledWith({
|
||||
key,
|
||||
|
@ -196,7 +234,12 @@ test('finalize save cache failure', async () => {
|
|||
compression
|
||||
)
|
||||
|
||||
expect(uploadCacheMock).toHaveBeenCalledWith(signedUploadURL, archiveFile)
|
||||
expect(saveCacheMock).toHaveBeenCalledWith(
|
||||
-1,
|
||||
archiveFile,
|
||||
signedUploadURL,
|
||||
options
|
||||
)
|
||||
expect(getCompressionMock).toHaveBeenCalledTimes(1)
|
||||
|
||||
expect(finalizeCacheEntryMock).toHaveBeenCalledWith({
|
||||
|
@ -211,64 +254,13 @@ test('finalize save cache failure', async () => {
|
|||
)
|
||||
})
|
||||
|
||||
test('save with uploadCache Server error will fail', async () => {
|
||||
const paths = 'node_modules'
|
||||
const key = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43'
|
||||
const signedUploadURL = 'https://blob-storage.local?signed=true'
|
||||
jest
|
||||
.spyOn(CacheServiceClientJSON.prototype, 'CreateCacheEntry')
|
||||
.mockReturnValue(
|
||||
Promise.resolve({ok: true, signedUploadUrl: signedUploadURL})
|
||||
)
|
||||
|
||||
const archiveFileSize = 1024
|
||||
jest
|
||||
.spyOn(cacheUtils, 'getArchiveFileSizeInBytes')
|
||||
.mockReturnValueOnce(archiveFileSize)
|
||||
jest
|
||||
.spyOn(uploadCacheModule, 'uploadCacheArchiveSDK')
|
||||
.mockRejectedValueOnce(new InvalidResponseError('boom'))
|
||||
|
||||
const cacheId = await saveCache([paths], key)
|
||||
expect(cacheId).toBe(-1)
|
||||
})
|
||||
|
||||
test('uploadFile returns 500', async () => {
|
||||
const paths = 'node_modules'
|
||||
const key = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43'
|
||||
const signedUploadURL = 'https://blob-storage.local?signed=true'
|
||||
const logWarningMock = jest.spyOn(core, 'warning')
|
||||
jest
|
||||
.spyOn(CacheServiceClientJSON.prototype, 'CreateCacheEntry')
|
||||
.mockReturnValue(
|
||||
Promise.resolve({ok: true, signedUploadUrl: signedUploadURL})
|
||||
)
|
||||
|
||||
const archiveFileSize = 1024
|
||||
jest
|
||||
.spyOn(cacheUtils, 'getArchiveFileSizeInBytes')
|
||||
.mockReturnValueOnce(archiveFileSize)
|
||||
jest.spyOn(uploadCacheModule, 'uploadCacheArchiveSDK').mockRestore()
|
||||
|
||||
uploadFileMock = jest.fn().mockResolvedValueOnce({
|
||||
_response: {
|
||||
status: 500
|
||||
}
|
||||
})
|
||||
const cacheId = await saveCache([paths], key)
|
||||
|
||||
expect(logWarningMock).toHaveBeenCalledWith(
|
||||
'Failed to save: Upload failed with status code 500'
|
||||
)
|
||||
expect(cacheId).toBe(-1)
|
||||
})
|
||||
|
||||
test('save with valid inputs uploads a cache', async () => {
|
||||
const paths = 'node_modules'
|
||||
const key = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43'
|
||||
const cachePaths = [path.resolve(paths)]
|
||||
const signedUploadURL = 'https://blob-storage.local?signed=true'
|
||||
const createTarMock = jest.spyOn(tar, 'createTar')
|
||||
const options: UploadOptions = {useAzureSdk: true}
|
||||
|
||||
const archiveFileSize = 1024
|
||||
jest
|
||||
|
@ -282,15 +274,7 @@ test('save with valid inputs uploads a cache', async () => {
|
|||
Promise.resolve({ok: true, signedUploadUrl: signedUploadURL})
|
||||
)
|
||||
|
||||
const uploadCacheMock = jest
|
||||
.spyOn(uploadCacheModule, 'uploadCacheArchiveSDK')
|
||||
.mockReturnValueOnce(
|
||||
Promise.resolve({
|
||||
_response: {
|
||||
status: 200
|
||||
}
|
||||
} as BlobUploadCommonResponse)
|
||||
)
|
||||
const saveCacheMock = jest.spyOn(cacheHttpClient, 'saveCache')
|
||||
|
||||
const compression = CompressionMethod.Zstd
|
||||
const getCompressionMock = jest
|
||||
|
@ -306,7 +290,12 @@ test('save with valid inputs uploads a cache', async () => {
|
|||
|
||||
const archiveFolder = '/foo/bar'
|
||||
const archiveFile = path.join(archiveFolder, CacheFilename.Zstd)
|
||||
expect(uploadCacheMock).toHaveBeenCalledWith(signedUploadURL, archiveFile)
|
||||
expect(saveCacheMock).toHaveBeenCalledWith(
|
||||
-1,
|
||||
archiveFile,
|
||||
signedUploadURL,
|
||||
options
|
||||
)
|
||||
expect(createTarMock).toHaveBeenCalledWith(
|
||||
archiveFolder,
|
||||
cachePaths,
|
||||
|
|
|
@ -13,7 +13,6 @@ import {
|
|||
GetCacheEntryDownloadURLRequest
|
||||
} from './generated/results/api/v1/cache'
|
||||
import {CacheFileSizeLimit} from './internal/constants'
|
||||
import {uploadCacheArchiveSDK} from './internal/uploadUtils'
|
||||
export class ValidationError extends Error {
|
||||
constructor(message: string) {
|
||||
super(message)
|
||||
|
@ -421,7 +420,7 @@ async function saveCacheV1(
|
|||
}
|
||||
|
||||
core.debug(`Saving Cache (ID: ${cacheId})`)
|
||||
await cacheHttpClient.saveCache(cacheId, archivePath, options)
|
||||
await cacheHttpClient.saveCache(cacheId, archivePath, '', options)
|
||||
} catch (error) {
|
||||
const typedError = error as Error
|
||||
if (typedError.name === ValidationError.name) {
|
||||
|
@ -458,6 +457,11 @@ async function saveCacheV2(
|
|||
options?: UploadOptions,
|
||||
enableCrossOsArchive = false
|
||||
): Promise<number> {
|
||||
// Override UploadOptions to force the use of Azure
|
||||
options = {
|
||||
...options,
|
||||
useAzureSdk: true
|
||||
}
|
||||
const compressionMethod = await utils.getCompressionMethod()
|
||||
const twirpClient = cacheTwirpClient.internalCacheTwirpClient()
|
||||
let cacheId = -1
|
||||
|
@ -517,11 +521,12 @@ async function saveCacheV2(
|
|||
}
|
||||
|
||||
core.debug(`Attempting to upload cache located at: ${archivePath}`)
|
||||
const uploadResponse = await uploadCacheArchiveSDK(
|
||||
await cacheHttpClient.saveCache(
|
||||
cacheId,
|
||||
archivePath,
|
||||
response.signedUploadUrl,
|
||||
archivePath
|
||||
options
|
||||
)
|
||||
core.debug(`Download response status: ${uploadResponse._response.status}`)
|
||||
|
||||
const finalizeRequest: FinalizeCacheEntryUploadRequest = {
|
||||
key,
|
||||
|
|
|
@ -8,6 +8,7 @@ import {
|
|||
import * as fs from 'fs'
|
||||
import {URL} from 'url'
|
||||
import * as utils from './cacheUtils'
|
||||
import {uploadCacheArchiveSDK} from './uploadUtils'
|
||||
import {
|
||||
ArtifactCacheEntry,
|
||||
InternalCacheOptions,
|
||||
|
@ -326,26 +327,45 @@ async function commitCache(
|
|||
export async function saveCache(
|
||||
cacheId: number,
|
||||
archivePath: string,
|
||||
signedUploadURL?: string,
|
||||
options?: UploadOptions
|
||||
): Promise<void> {
|
||||
const httpClient = createHttpClient()
|
||||
const uploadOptions = getUploadOptions(options)
|
||||
|
||||
core.debug('Upload cache')
|
||||
await uploadFile(httpClient, cacheId, archivePath, options)
|
||||
if (uploadOptions.useAzureSdk) {
|
||||
// Use Azure storage SDK to upload caches directly to Azure
|
||||
if (!signedUploadURL) {
|
||||
throw new Error(
|
||||
'Azure Storage SDK can only be used when a signed URL is provided.'
|
||||
)
|
||||
}
|
||||
await uploadCacheArchiveSDK(signedUploadURL, archivePath, options)
|
||||
} else {
|
||||
const httpClient = createHttpClient()
|
||||
|
||||
// Commit Cache
|
||||
core.debug('Commiting cache')
|
||||
const cacheSize = utils.getArchiveFileSizeInBytes(archivePath)
|
||||
core.info(
|
||||
`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`
|
||||
)
|
||||
core.debug('Upload cache')
|
||||
await uploadFile(httpClient, cacheId, archivePath, options)
|
||||
|
||||
const commitCacheResponse = await commitCache(httpClient, cacheId, cacheSize)
|
||||
if (!isSuccessStatusCode(commitCacheResponse.statusCode)) {
|
||||
throw new Error(
|
||||
`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`
|
||||
// Commit Cache
|
||||
core.debug('Commiting cache')
|
||||
const cacheSize = utils.getArchiveFileSizeInBytes(archivePath)
|
||||
core.info(
|
||||
`Cache Size: ~${Math.round(
|
||||
cacheSize / (1024 * 1024)
|
||||
)} MB (${cacheSize} B)`
|
||||
)
|
||||
}
|
||||
|
||||
core.info('Cache saved successfully')
|
||||
const commitCacheResponse = await commitCache(
|
||||
httpClient,
|
||||
cacheId,
|
||||
cacheSize
|
||||
)
|
||||
if (!isSuccessStatusCode(commitCacheResponse.statusCode)) {
|
||||
throw new Error(
|
||||
`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`
|
||||
)
|
||||
}
|
||||
|
||||
core.info('Cache saved successfully')
|
||||
}
|
||||
}
|
||||
|
|
|
@ -6,16 +6,18 @@ import {
|
|||
BlockBlobParallelUploadOptions
|
||||
} from '@azure/storage-blob'
|
||||
import {InvalidResponseError} from './shared/errors'
|
||||
import {UploadOptions} from '../options'
|
||||
|
||||
export async function uploadCacheArchiveSDK(
|
||||
signedUploadURL: string,
|
||||
archivePath: string
|
||||
archivePath: string,
|
||||
options?: UploadOptions
|
||||
): Promise<BlobUploadCommonResponse> {
|
||||
// Specify data transfer options
|
||||
const uploadOptions: BlockBlobParallelUploadOptions = {
|
||||
blockSize: 4 * 1024 * 1024, // 4 MiB max block size
|
||||
concurrency: 4, // maximum number of parallel transfer workers
|
||||
maxSingleShotSize: 8 * 1024 * 1024 // 8 MiB initial transfer size
|
||||
blockSize: options?.uploadChunkSize,
|
||||
concurrency: options?.uploadConcurrency, // maximum number of parallel transfer workers
|
||||
maxSingleShotSize: 128 * 1024 * 1024 // 128 MiB initial transfer size
|
||||
}
|
||||
|
||||
const blobClient: BlobClient = new BlobClient(signedUploadURL)
|
||||
|
|
|
@ -4,6 +4,14 @@ import * as core from '@actions/core'
|
|||
* Options to control cache upload
|
||||
*/
|
||||
export interface UploadOptions {
|
||||
/**
|
||||
* Indicates whether to use the Azure Blob SDK to download caches
|
||||
* that are stored on Azure Blob Storage to improve reliability and
|
||||
* performance
|
||||
*
|
||||
* @default false
|
||||
*/
|
||||
useAzureSdk?: boolean
|
||||
/**
|
||||
* Number of parallel cache upload
|
||||
*
|
||||
|
@ -77,11 +85,16 @@ export interface DownloadOptions {
|
|||
*/
|
||||
export function getUploadOptions(copy?: UploadOptions): UploadOptions {
|
||||
const result: UploadOptions = {
|
||||
useAzureSdk: false,
|
||||
uploadConcurrency: 4,
|
||||
uploadChunkSize: 32 * 1024 * 1024
|
||||
}
|
||||
|
||||
if (copy) {
|
||||
if (typeof copy.useAzureSdk === 'boolean') {
|
||||
result.useAzureSdk = copy.useAzureSdk
|
||||
}
|
||||
|
||||
if (typeof copy.uploadConcurrency === 'number') {
|
||||
result.uploadConcurrency = copy.uploadConcurrency
|
||||
}
|
||||
|
@ -91,6 +104,7 @@ export function getUploadOptions(copy?: UploadOptions): UploadOptions {
|
|||
}
|
||||
}
|
||||
|
||||
core.debug(`Use Azure SDK: ${result.useAzureSdk}`)
|
||||
core.debug(`Upload concurrency: ${result.uploadConcurrency}`)
|
||||
core.debug(`Upload chunk size: ${result.uploadChunkSize}`)
|
||||
|
||||
|
|
Loading…
Reference in New Issue