mirror of https://github.com/actions/toolkit
Initial changes
parent
9b7bcb1567
commit
e051715283
|
@ -73,14 +73,16 @@ test('restore with no cache found', async () => {
|
||||||
test('restore with server error should fail', async () => {
|
test('restore with server error should fail', async () => {
|
||||||
const paths = ['node_modules']
|
const paths = ['node_modules']
|
||||||
const key = 'node-test'
|
const key = 'node-test'
|
||||||
|
const logWarningMock = jest.spyOn(core, "warning");
|
||||||
|
|
||||||
jest.spyOn(cacheHttpClient, 'getCacheEntry').mockImplementation(() => {
|
jest.spyOn(cacheHttpClient, 'getCacheEntry').mockImplementation(() => {
|
||||||
throw new Error('HTTP Error Occurred')
|
throw new Error('HTTP Error Occurred')
|
||||||
})
|
})
|
||||||
|
|
||||||
await expect(restoreCache(paths, key)).rejects.toThrowError(
|
const cacheKey = await restoreCache(paths, key)
|
||||||
'HTTP Error Occurred'
|
expect(cacheKey).toBe(undefined)
|
||||||
)
|
expect(logWarningMock).toHaveBeenCalledTimes(1)
|
||||||
|
expect(logWarningMock).toHaveBeenCalledWith('Fail to restore: Error: HTTP Error Occurred')
|
||||||
})
|
})
|
||||||
|
|
||||||
test('restore with restore keys and no cache found', async () => {
|
test('restore with restore keys and no cache found', async () => {
|
||||||
|
|
|
@ -48,6 +48,7 @@ test('save with large cache outputs should fail', async () => {
|
||||||
const cachePaths = [path.resolve(filePath)]
|
const cachePaths = [path.resolve(filePath)]
|
||||||
|
|
||||||
const createTarMock = jest.spyOn(tar, 'createTar')
|
const createTarMock = jest.spyOn(tar, 'createTar')
|
||||||
|
const logWarningMock = jest.spyOn(core, "warning");
|
||||||
|
|
||||||
const cacheSize = 11 * 1024 * 1024 * 1024 //~11GB, over the 10GB limit
|
const cacheSize = 11 * 1024 * 1024 * 1024 //~11GB, over the 10GB limit
|
||||||
jest
|
jest
|
||||||
|
@ -57,10 +58,11 @@ test('save with large cache outputs should fail', async () => {
|
||||||
const getCompressionMock = jest
|
const getCompressionMock = jest
|
||||||
.spyOn(cacheUtils, 'getCompressionMethod')
|
.spyOn(cacheUtils, 'getCompressionMethod')
|
||||||
.mockReturnValueOnce(Promise.resolve(compression))
|
.mockReturnValueOnce(Promise.resolve(compression))
|
||||||
|
|
||||||
await expect(saveCache([filePath], primaryKey)).rejects.toThrowError(
|
const cacheId = await saveCache([filePath], primaryKey)
|
||||||
'Cache size of ~11264 MB (11811160064 B) is over the 10GB limit, not saving cache.'
|
expect(cacheId).toBe(-1)
|
||||||
)
|
expect(logWarningMock).toHaveBeenCalledTimes(1)
|
||||||
|
expect(logWarningMock).toHaveBeenCalledWith('Fail to save: Error: Cache size of ~11264 MB (11811160064 B) is over the 10GB limit, not saving cache.')
|
||||||
|
|
||||||
const archiveFolder = '/foo/bar'
|
const archiveFolder = '/foo/bar'
|
||||||
|
|
||||||
|
@ -79,6 +81,7 @@ test('save with large cache outputs should fail in GHES with error message', asy
|
||||||
const cachePaths = [path.resolve(filePath)]
|
const cachePaths = [path.resolve(filePath)]
|
||||||
|
|
||||||
const createTarMock = jest.spyOn(tar, 'createTar')
|
const createTarMock = jest.spyOn(tar, 'createTar')
|
||||||
|
const logWarningMock = jest.spyOn(core, "warning");
|
||||||
|
|
||||||
const cacheSize = 11 * 1024 * 1024 * 1024 //~11GB, over the 10GB limit
|
const cacheSize = 11 * 1024 * 1024 * 1024 //~11GB, over the 10GB limit
|
||||||
jest
|
jest
|
||||||
|
@ -105,10 +108,11 @@ test('save with large cache outputs should fail in GHES with error message', asy
|
||||||
}
|
}
|
||||||
return response
|
return response
|
||||||
})
|
})
|
||||||
|
|
||||||
await expect(saveCache([filePath], primaryKey)).rejects.toThrowError(
|
const cacheId = await saveCache([filePath], primaryKey)
|
||||||
'The cache filesize must be between 0 and 1073741824 bytes'
|
expect(cacheId).toBe(-1)
|
||||||
)
|
expect(logWarningMock).toHaveBeenCalledTimes(1)
|
||||||
|
expect(logWarningMock).toHaveBeenCalledWith('Fail to save: Error: The cache filesize must be between 0 and 1073741824 bytes')
|
||||||
|
|
||||||
const archiveFolder = '/foo/bar'
|
const archiveFolder = '/foo/bar'
|
||||||
expect(reserveCacheMock).toHaveBeenCalledTimes(1)
|
expect(reserveCacheMock).toHaveBeenCalledTimes(1)
|
||||||
|
@ -127,6 +131,7 @@ test('save with large cache outputs should fail in GHES without error message',
|
||||||
const cachePaths = [path.resolve(filePath)]
|
const cachePaths = [path.resolve(filePath)]
|
||||||
|
|
||||||
const createTarMock = jest.spyOn(tar, 'createTar')
|
const createTarMock = jest.spyOn(tar, 'createTar')
|
||||||
|
const logWarningMock = jest.spyOn(core, "warning");
|
||||||
|
|
||||||
const cacheSize = 11 * 1024 * 1024 * 1024 //~11GB, over the 10GB limit
|
const cacheSize = 11 * 1024 * 1024 * 1024 //~11GB, over the 10GB limit
|
||||||
jest
|
jest
|
||||||
|
@ -150,9 +155,10 @@ test('save with large cache outputs should fail in GHES without error message',
|
||||||
return response
|
return response
|
||||||
})
|
})
|
||||||
|
|
||||||
await expect(saveCache([filePath], primaryKey)).rejects.toThrowError(
|
const cacheId = await saveCache([filePath], primaryKey)
|
||||||
'Cache size of ~11264 MB (11811160064 B) is over the data cap limit, not saving cache.'
|
expect(cacheId).toBe(-1)
|
||||||
)
|
expect(logWarningMock).toHaveBeenCalledTimes(1)
|
||||||
|
expect(logWarningMock).toHaveBeenCalledWith('Fail to save: Error: Cache size of ~11264 MB (11811160064 B) is over the data cap limit, not saving cache.')
|
||||||
|
|
||||||
const archiveFolder = '/foo/bar'
|
const archiveFolder = '/foo/bar'
|
||||||
expect(reserveCacheMock).toHaveBeenCalledTimes(1)
|
expect(reserveCacheMock).toHaveBeenCalledTimes(1)
|
||||||
|
@ -168,6 +174,7 @@ test('save with large cache outputs should fail in GHES without error message',
|
||||||
test('save with reserve cache failure should fail', async () => {
|
test('save with reserve cache failure should fail', async () => {
|
||||||
const paths = ['node_modules']
|
const paths = ['node_modules']
|
||||||
const primaryKey = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43'
|
const primaryKey = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43'
|
||||||
|
const logWarningMock = jest.spyOn(core, "warning");
|
||||||
|
|
||||||
const reserveCacheMock = jest
|
const reserveCacheMock = jest
|
||||||
.spyOn(cacheHttpClient, 'reserveCache')
|
.spyOn(cacheHttpClient, 'reserveCache')
|
||||||
|
@ -187,9 +194,11 @@ test('save with reserve cache failure should fail', async () => {
|
||||||
.spyOn(cacheUtils, 'getCompressionMethod')
|
.spyOn(cacheUtils, 'getCompressionMethod')
|
||||||
.mockReturnValueOnce(Promise.resolve(compression))
|
.mockReturnValueOnce(Promise.resolve(compression))
|
||||||
|
|
||||||
await expect(saveCache(paths, primaryKey)).rejects.toThrowError(
|
const cacheId = await saveCache(paths, primaryKey)
|
||||||
`Unable to reserve cache with key ${primaryKey}, another job may be creating this cache.`
|
expect(cacheId).toBe(-1)
|
||||||
)
|
expect(logWarningMock).toHaveBeenCalledTimes(1)
|
||||||
|
expect(logWarningMock).toHaveBeenCalledWith('Fail to save: ReserveCacheError: Unable to reserve cache with key ${primaryKey}, another job may be creating this cache.')
|
||||||
|
|
||||||
expect(reserveCacheMock).toHaveBeenCalledTimes(1)
|
expect(reserveCacheMock).toHaveBeenCalledTimes(1)
|
||||||
expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey, paths, {
|
expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey, paths, {
|
||||||
compressionMethod: compression
|
compressionMethod: compression
|
||||||
|
@ -203,7 +212,7 @@ test('save with server error should fail', async () => {
|
||||||
const filePath = 'node_modules'
|
const filePath = 'node_modules'
|
||||||
const primaryKey = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43'
|
const primaryKey = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43'
|
||||||
const cachePaths = [path.resolve(filePath)]
|
const cachePaths = [path.resolve(filePath)]
|
||||||
|
const logWarningMock = jest.spyOn(core, "warning");
|
||||||
const cacheId = 4
|
const cacheId = 4
|
||||||
const reserveCacheMock = jest
|
const reserveCacheMock = jest
|
||||||
.spyOn(cacheHttpClient, 'reserveCache')
|
.spyOn(cacheHttpClient, 'reserveCache')
|
||||||
|
@ -227,10 +236,11 @@ test('save with server error should fail', async () => {
|
||||||
const getCompressionMock = jest
|
const getCompressionMock = jest
|
||||||
.spyOn(cacheUtils, 'getCompressionMethod')
|
.spyOn(cacheUtils, 'getCompressionMethod')
|
||||||
.mockReturnValueOnce(Promise.resolve(compression))
|
.mockReturnValueOnce(Promise.resolve(compression))
|
||||||
|
|
||||||
|
await saveCache([filePath], primaryKey)
|
||||||
|
expect(logWarningMock).toHaveBeenCalledTimes(1)
|
||||||
|
expect(logWarningMock).toHaveBeenCalledWith('Fail to save: Error: HTTP Error Occurred')
|
||||||
|
|
||||||
await expect(saveCache([filePath], primaryKey)).rejects.toThrowError(
|
|
||||||
'HTTP Error Occurred'
|
|
||||||
)
|
|
||||||
expect(reserveCacheMock).toHaveBeenCalledTimes(1)
|
expect(reserveCacheMock).toHaveBeenCalledTimes(1)
|
||||||
expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey, [filePath], {
|
expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey, [filePath], {
|
||||||
compressionMethod: compression
|
compressionMethod: compression
|
||||||
|
|
|
@ -4,6 +4,7 @@ import * as utils from './internal/cacheUtils'
|
||||||
import * as cacheHttpClient from './internal/cacheHttpClient'
|
import * as cacheHttpClient from './internal/cacheHttpClient'
|
||||||
import {createTar, extractTar, listTar} from './internal/tar'
|
import {createTar, extractTar, listTar} from './internal/tar'
|
||||||
import {DownloadOptions, UploadOptions} from './options'
|
import {DownloadOptions, UploadOptions} from './options'
|
||||||
|
import { ArtifactCacheEntry } from './internal/contracts'
|
||||||
|
|
||||||
export class ValidationError extends Error {
|
export class ValidationError extends Error {
|
||||||
constructor(message: string) {
|
constructor(message: string) {
|
||||||
|
@ -86,23 +87,24 @@ export async function restoreCache(
|
||||||
}
|
}
|
||||||
|
|
||||||
const compressionMethod = await utils.getCompressionMethod()
|
const compressionMethod = await utils.getCompressionMethod()
|
||||||
|
let archivePath = ""
|
||||||
// path are needed to compute version
|
|
||||||
const cacheEntry = await cacheHttpClient.getCacheEntry(keys, paths, {
|
|
||||||
compressionMethod
|
|
||||||
})
|
|
||||||
if (!cacheEntry?.archiveLocation) {
|
|
||||||
// Cache not found
|
|
||||||
return undefined
|
|
||||||
}
|
|
||||||
|
|
||||||
const archivePath = path.join(
|
|
||||||
await utils.createTempDirectory(),
|
|
||||||
utils.getCacheFileName(compressionMethod)
|
|
||||||
)
|
|
||||||
core.debug(`Archive Path: ${archivePath}`)
|
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
// path are needed to compute version
|
||||||
|
const cacheEntry = await cacheHttpClient.getCacheEntry(keys, paths, {
|
||||||
|
compressionMethod
|
||||||
|
})
|
||||||
|
|
||||||
|
if (!cacheEntry?.archiveLocation) {
|
||||||
|
// Cache not found
|
||||||
|
return undefined
|
||||||
|
}
|
||||||
|
|
||||||
|
archivePath = path.join(
|
||||||
|
await utils.createTempDirectory(),
|
||||||
|
utils.getCacheFileName(compressionMethod)
|
||||||
|
)
|
||||||
|
core.debug(`Archive Path: ${archivePath}`)
|
||||||
|
|
||||||
// Download the cache from the cache entry
|
// Download the cache from the cache entry
|
||||||
await cacheHttpClient.downloadCache(
|
await cacheHttpClient.downloadCache(
|
||||||
cacheEntry.archiveLocation,
|
cacheEntry.archiveLocation,
|
||||||
|
@ -123,7 +125,12 @@ export async function restoreCache(
|
||||||
|
|
||||||
await extractTar(archivePath, compressionMethod)
|
await extractTar(archivePath, compressionMethod)
|
||||||
core.info('Cache restored successfully')
|
core.info('Cache restored successfully')
|
||||||
} finally {
|
|
||||||
|
return cacheEntry.cacheKey
|
||||||
|
} catch(error) {
|
||||||
|
// Supress all cache related errors because caching should be optional
|
||||||
|
core.warning(`Fail to restore: ${error}`);
|
||||||
|
}finally {
|
||||||
// Try to delete the archive to save space
|
// Try to delete the archive to save space
|
||||||
try {
|
try {
|
||||||
await utils.unlinkFile(archivePath)
|
await utils.unlinkFile(archivePath)
|
||||||
|
@ -132,7 +139,7 @@ export async function restoreCache(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return cacheEntry.cacheKey
|
return undefined
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -152,7 +159,7 @@ export async function saveCache(
|
||||||
checkKey(key)
|
checkKey(key)
|
||||||
|
|
||||||
const compressionMethod = await utils.getCompressionMethod()
|
const compressionMethod = await utils.getCompressionMethod()
|
||||||
let cacheId = null
|
let cacheId = -1
|
||||||
|
|
||||||
const cachePaths = await utils.resolvePaths(paths)
|
const cachePaths = await utils.resolvePaths(paths)
|
||||||
core.debug('Cache Paths:')
|
core.debug('Cache Paths:')
|
||||||
|
@ -217,6 +224,8 @@ export async function saveCache(
|
||||||
|
|
||||||
core.debug(`Saving Cache (ID: ${cacheId})`)
|
core.debug(`Saving Cache (ID: ${cacheId})`)
|
||||||
await cacheHttpClient.saveCache(cacheId, archivePath, options)
|
await cacheHttpClient.saveCache(cacheId, archivePath, options)
|
||||||
|
} catch(error) {
|
||||||
|
core.warning(`Fail to save: ${error}`)
|
||||||
} finally {
|
} finally {
|
||||||
// Try to delete the archive to save space
|
// Try to delete the archive to save space
|
||||||
try {
|
try {
|
||||||
|
|
Loading…
Reference in New Issue