mirror of https://github.com/actions/toolkit
Initial changes
parent
9b7bcb1567
commit
e051715283
|
@ -73,14 +73,16 @@ test('restore with no cache found', async () => {
|
|||
test('restore with server error should fail', async () => {
|
||||
const paths = ['node_modules']
|
||||
const key = 'node-test'
|
||||
const logWarningMock = jest.spyOn(core, "warning");
|
||||
|
||||
jest.spyOn(cacheHttpClient, 'getCacheEntry').mockImplementation(() => {
|
||||
throw new Error('HTTP Error Occurred')
|
||||
})
|
||||
|
||||
await expect(restoreCache(paths, key)).rejects.toThrowError(
|
||||
'HTTP Error Occurred'
|
||||
)
|
||||
const cacheKey = await restoreCache(paths, key)
|
||||
expect(cacheKey).toBe(undefined)
|
||||
expect(logWarningMock).toHaveBeenCalledTimes(1)
|
||||
expect(logWarningMock).toHaveBeenCalledWith('Fail to restore: Error: HTTP Error Occurred')
|
||||
})
|
||||
|
||||
test('restore with restore keys and no cache found', async () => {
|
||||
|
|
|
@ -48,6 +48,7 @@ test('save with large cache outputs should fail', async () => {
|
|||
const cachePaths = [path.resolve(filePath)]
|
||||
|
||||
const createTarMock = jest.spyOn(tar, 'createTar')
|
||||
const logWarningMock = jest.spyOn(core, "warning");
|
||||
|
||||
const cacheSize = 11 * 1024 * 1024 * 1024 //~11GB, over the 10GB limit
|
||||
jest
|
||||
|
@ -58,9 +59,10 @@ test('save with large cache outputs should fail', async () => {
|
|||
.spyOn(cacheUtils, 'getCompressionMethod')
|
||||
.mockReturnValueOnce(Promise.resolve(compression))
|
||||
|
||||
await expect(saveCache([filePath], primaryKey)).rejects.toThrowError(
|
||||
'Cache size of ~11264 MB (11811160064 B) is over the 10GB limit, not saving cache.'
|
||||
)
|
||||
const cacheId = await saveCache([filePath], primaryKey)
|
||||
expect(cacheId).toBe(-1)
|
||||
expect(logWarningMock).toHaveBeenCalledTimes(1)
|
||||
expect(logWarningMock).toHaveBeenCalledWith('Fail to save: Error: Cache size of ~11264 MB (11811160064 B) is over the 10GB limit, not saving cache.')
|
||||
|
||||
const archiveFolder = '/foo/bar'
|
||||
|
||||
|
@ -79,6 +81,7 @@ test('save with large cache outputs should fail in GHES with error message', asy
|
|||
const cachePaths = [path.resolve(filePath)]
|
||||
|
||||
const createTarMock = jest.spyOn(tar, 'createTar')
|
||||
const logWarningMock = jest.spyOn(core, "warning");
|
||||
|
||||
const cacheSize = 11 * 1024 * 1024 * 1024 //~11GB, over the 10GB limit
|
||||
jest
|
||||
|
@ -106,9 +109,10 @@ test('save with large cache outputs should fail in GHES with error message', asy
|
|||
return response
|
||||
})
|
||||
|
||||
await expect(saveCache([filePath], primaryKey)).rejects.toThrowError(
|
||||
'The cache filesize must be between 0 and 1073741824 bytes'
|
||||
)
|
||||
const cacheId = await saveCache([filePath], primaryKey)
|
||||
expect(cacheId).toBe(-1)
|
||||
expect(logWarningMock).toHaveBeenCalledTimes(1)
|
||||
expect(logWarningMock).toHaveBeenCalledWith('Fail to save: Error: The cache filesize must be between 0 and 1073741824 bytes')
|
||||
|
||||
const archiveFolder = '/foo/bar'
|
||||
expect(reserveCacheMock).toHaveBeenCalledTimes(1)
|
||||
|
@ -127,6 +131,7 @@ test('save with large cache outputs should fail in GHES without error message',
|
|||
const cachePaths = [path.resolve(filePath)]
|
||||
|
||||
const createTarMock = jest.spyOn(tar, 'createTar')
|
||||
const logWarningMock = jest.spyOn(core, "warning");
|
||||
|
||||
const cacheSize = 11 * 1024 * 1024 * 1024 //~11GB, over the 10GB limit
|
||||
jest
|
||||
|
@ -150,9 +155,10 @@ test('save with large cache outputs should fail in GHES without error message',
|
|||
return response
|
||||
})
|
||||
|
||||
await expect(saveCache([filePath], primaryKey)).rejects.toThrowError(
|
||||
'Cache size of ~11264 MB (11811160064 B) is over the data cap limit, not saving cache.'
|
||||
)
|
||||
const cacheId = await saveCache([filePath], primaryKey)
|
||||
expect(cacheId).toBe(-1)
|
||||
expect(logWarningMock).toHaveBeenCalledTimes(1)
|
||||
expect(logWarningMock).toHaveBeenCalledWith('Fail to save: Error: Cache size of ~11264 MB (11811160064 B) is over the data cap limit, not saving cache.')
|
||||
|
||||
const archiveFolder = '/foo/bar'
|
||||
expect(reserveCacheMock).toHaveBeenCalledTimes(1)
|
||||
|
@ -168,6 +174,7 @@ test('save with large cache outputs should fail in GHES without error message',
|
|||
test('save with reserve cache failure should fail', async () => {
|
||||
const paths = ['node_modules']
|
||||
const primaryKey = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43'
|
||||
const logWarningMock = jest.spyOn(core, "warning");
|
||||
|
||||
const reserveCacheMock = jest
|
||||
.spyOn(cacheHttpClient, 'reserveCache')
|
||||
|
@ -187,9 +194,11 @@ test('save with reserve cache failure should fail', async () => {
|
|||
.spyOn(cacheUtils, 'getCompressionMethod')
|
||||
.mockReturnValueOnce(Promise.resolve(compression))
|
||||
|
||||
await expect(saveCache(paths, primaryKey)).rejects.toThrowError(
|
||||
`Unable to reserve cache with key ${primaryKey}, another job may be creating this cache.`
|
||||
)
|
||||
const cacheId = await saveCache(paths, primaryKey)
|
||||
expect(cacheId).toBe(-1)
|
||||
expect(logWarningMock).toHaveBeenCalledTimes(1)
|
||||
expect(logWarningMock).toHaveBeenCalledWith('Fail to save: ReserveCacheError: Unable to reserve cache with key ${primaryKey}, another job may be creating this cache.')
|
||||
|
||||
expect(reserveCacheMock).toHaveBeenCalledTimes(1)
|
||||
expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey, paths, {
|
||||
compressionMethod: compression
|
||||
|
@ -203,7 +212,7 @@ test('save with server error should fail', async () => {
|
|||
const filePath = 'node_modules'
|
||||
const primaryKey = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43'
|
||||
const cachePaths = [path.resolve(filePath)]
|
||||
|
||||
const logWarningMock = jest.spyOn(core, "warning");
|
||||
const cacheId = 4
|
||||
const reserveCacheMock = jest
|
||||
.spyOn(cacheHttpClient, 'reserveCache')
|
||||
|
@ -228,9 +237,10 @@ test('save with server error should fail', async () => {
|
|||
.spyOn(cacheUtils, 'getCompressionMethod')
|
||||
.mockReturnValueOnce(Promise.resolve(compression))
|
||||
|
||||
await expect(saveCache([filePath], primaryKey)).rejects.toThrowError(
|
||||
'HTTP Error Occurred'
|
||||
)
|
||||
await saveCache([filePath], primaryKey)
|
||||
expect(logWarningMock).toHaveBeenCalledTimes(1)
|
||||
expect(logWarningMock).toHaveBeenCalledWith('Fail to save: Error: HTTP Error Occurred')
|
||||
|
||||
expect(reserveCacheMock).toHaveBeenCalledTimes(1)
|
||||
expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey, [filePath], {
|
||||
compressionMethod: compression
|
||||
|
|
|
@ -4,6 +4,7 @@ import * as utils from './internal/cacheUtils'
|
|||
import * as cacheHttpClient from './internal/cacheHttpClient'
|
||||
import {createTar, extractTar, listTar} from './internal/tar'
|
||||
import {DownloadOptions, UploadOptions} from './options'
|
||||
import { ArtifactCacheEntry } from './internal/contracts'
|
||||
|
||||
export class ValidationError extends Error {
|
||||
constructor(message: string) {
|
||||
|
@ -86,23 +87,24 @@ export async function restoreCache(
|
|||
}
|
||||
|
||||
const compressionMethod = await utils.getCompressionMethod()
|
||||
|
||||
// path are needed to compute version
|
||||
const cacheEntry = await cacheHttpClient.getCacheEntry(keys, paths, {
|
||||
compressionMethod
|
||||
})
|
||||
if (!cacheEntry?.archiveLocation) {
|
||||
// Cache not found
|
||||
return undefined
|
||||
}
|
||||
|
||||
const archivePath = path.join(
|
||||
await utils.createTempDirectory(),
|
||||
utils.getCacheFileName(compressionMethod)
|
||||
)
|
||||
core.debug(`Archive Path: ${archivePath}`)
|
||||
|
||||
let archivePath = ""
|
||||
try {
|
||||
// path are needed to compute version
|
||||
const cacheEntry = await cacheHttpClient.getCacheEntry(keys, paths, {
|
||||
compressionMethod
|
||||
})
|
||||
|
||||
if (!cacheEntry?.archiveLocation) {
|
||||
// Cache not found
|
||||
return undefined
|
||||
}
|
||||
|
||||
archivePath = path.join(
|
||||
await utils.createTempDirectory(),
|
||||
utils.getCacheFileName(compressionMethod)
|
||||
)
|
||||
core.debug(`Archive Path: ${archivePath}`)
|
||||
|
||||
// Download the cache from the cache entry
|
||||
await cacheHttpClient.downloadCache(
|
||||
cacheEntry.archiveLocation,
|
||||
|
@ -123,7 +125,12 @@ export async function restoreCache(
|
|||
|
||||
await extractTar(archivePath, compressionMethod)
|
||||
core.info('Cache restored successfully')
|
||||
} finally {
|
||||
|
||||
return cacheEntry.cacheKey
|
||||
} catch(error) {
|
||||
// Supress all cache related errors because caching should be optional
|
||||
core.warning(`Fail to restore: ${error}`);
|
||||
}finally {
|
||||
// Try to delete the archive to save space
|
||||
try {
|
||||
await utils.unlinkFile(archivePath)
|
||||
|
@ -132,7 +139,7 @@ export async function restoreCache(
|
|||
}
|
||||
}
|
||||
|
||||
return cacheEntry.cacheKey
|
||||
return undefined
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -152,7 +159,7 @@ export async function saveCache(
|
|||
checkKey(key)
|
||||
|
||||
const compressionMethod = await utils.getCompressionMethod()
|
||||
let cacheId = null
|
||||
let cacheId = -1
|
||||
|
||||
const cachePaths = await utils.resolvePaths(paths)
|
||||
core.debug('Cache Paths:')
|
||||
|
@ -217,6 +224,8 @@ export async function saveCache(
|
|||
|
||||
core.debug(`Saving Cache (ID: ${cacheId})`)
|
||||
await cacheHttpClient.saveCache(cacheId, archivePath, options)
|
||||
} catch(error) {
|
||||
core.warning(`Fail to save: ${error}`)
|
||||
} finally {
|
||||
// Try to delete the archive to save space
|
||||
try {
|
||||
|
|
Loading…
Reference in New Issue