mirror of https://github.com/actions/toolkit
Merge pull request #1249 from actions/phantsure/gzip-fallback
Add fallback to gzip compression if cache not foundpull/1257/head
commit
cb3dc49fec
|
@ -161,6 +161,81 @@ test('restore with gzip compressed cache found', async () => {
|
|||
expect(getCompressionMock).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
|
||||
test('restore with zstd as default but gzip compressed cache found on windows', async () => {
|
||||
if (process.platform === 'win32') {
|
||||
const paths = ['node_modules']
|
||||
const key = 'node-test'
|
||||
|
||||
const cacheEntry: ArtifactCacheEntry = {
|
||||
cacheKey: key,
|
||||
scope: 'refs/heads/main',
|
||||
archiveLocation: 'www.actionscache.test/download'
|
||||
}
|
||||
const getCacheMock = jest.spyOn(cacheHttpClient, 'getCacheEntry')
|
||||
getCacheMock
|
||||
.mockImplementationOnce(async () => {
|
||||
throw new Error('Cache not found.')
|
||||
})
|
||||
.mockImplementationOnce(async () => {
|
||||
return Promise.resolve(cacheEntry)
|
||||
})
|
||||
|
||||
const tempPath = '/foo/bar'
|
||||
|
||||
const createTempDirectoryMock = jest.spyOn(
|
||||
cacheUtils,
|
||||
'createTempDirectory'
|
||||
)
|
||||
createTempDirectoryMock.mockImplementation(async () => {
|
||||
return Promise.resolve(tempPath)
|
||||
})
|
||||
|
||||
const archivePath = path.join(tempPath, CacheFilename.Gzip)
|
||||
const downloadCacheMock = jest.spyOn(cacheHttpClient, 'downloadCache')
|
||||
|
||||
const fileSize = 142
|
||||
const getArchiveFileSizeInBytesMock = jest
|
||||
.spyOn(cacheUtils, 'getArchiveFileSizeInBytes')
|
||||
.mockReturnValue(fileSize)
|
||||
|
||||
const extractTarMock = jest.spyOn(tar, 'extractTar')
|
||||
const unlinkFileMock = jest.spyOn(cacheUtils, 'unlinkFile')
|
||||
|
||||
const compression = CompressionMethod.Zstd
|
||||
const getCompressionMock = jest
|
||||
.spyOn(cacheUtils, 'getCompressionMethod')
|
||||
.mockReturnValue(Promise.resolve(compression))
|
||||
|
||||
const cacheKey = await restoreCache(paths, key)
|
||||
|
||||
expect(cacheKey).toBe(key)
|
||||
expect(getCacheMock).toHaveBeenNthCalledWith(1, [key], paths, {
|
||||
compressionMethod: compression
|
||||
})
|
||||
expect(getCacheMock).toHaveBeenNthCalledWith(2, [key], paths, {
|
||||
compressionMethod: CompressionMethod.Gzip
|
||||
})
|
||||
expect(createTempDirectoryMock).toHaveBeenCalledTimes(1)
|
||||
expect(downloadCacheMock).toHaveBeenCalledWith(
|
||||
cacheEntry.archiveLocation,
|
||||
archivePath,
|
||||
undefined
|
||||
)
|
||||
expect(getArchiveFileSizeInBytesMock).toHaveBeenCalledWith(archivePath)
|
||||
|
||||
expect(extractTarMock).toHaveBeenCalledTimes(1)
|
||||
expect(extractTarMock).toHaveBeenCalledWith(
|
||||
archivePath,
|
||||
CompressionMethod.Gzip
|
||||
)
|
||||
|
||||
expect(unlinkFileMock).toHaveBeenCalledTimes(1)
|
||||
expect(unlinkFileMock).toHaveBeenCalledWith(archivePath)
|
||||
|
||||
expect(getCompressionMock).toHaveBeenCalledTimes(1)
|
||||
}
|
||||
})
|
||||
|
||||
test('restore with zstd compressed cache found', async () => {
|
||||
const paths = ['node_modules']
|
||||
const key = 'node-test'
|
||||
|
|
|
@ -4,6 +4,8 @@ import * as utils from './internal/cacheUtils'
|
|||
import * as cacheHttpClient from './internal/cacheHttpClient'
|
||||
import {createTar, extractTar, listTar} from './internal/tar'
|
||||
import {DownloadOptions, UploadOptions} from './options'
|
||||
import {CompressionMethod} from './internal/constants'
|
||||
import {ArtifactCacheEntry} from './internal/contracts'
|
||||
|
||||
export class ValidationError extends Error {
|
||||
constructor(message: string) {
|
||||
|
@ -85,19 +87,38 @@ export async function restoreCache(
|
|||
checkKey(key)
|
||||
}
|
||||
|
||||
const compressionMethod = await utils.getCompressionMethod()
|
||||
let cacheEntry: ArtifactCacheEntry | null
|
||||
let compressionMethod = await utils.getCompressionMethod()
|
||||
let archivePath = ''
|
||||
try {
|
||||
// path are needed to compute version
|
||||
const cacheEntry = await cacheHttpClient.getCacheEntry(keys, paths, {
|
||||
compressionMethod
|
||||
})
|
||||
try {
|
||||
// path are needed to compute version
|
||||
cacheEntry = await cacheHttpClient.getCacheEntry(keys, paths, {
|
||||
compressionMethod
|
||||
})
|
||||
} catch (error) {
|
||||
// This is to support the old cache entry created
|
||||
// by the old version of the cache action on windows.
|
||||
if (
|
||||
process.platform === 'win32' &&
|
||||
compressionMethod !== CompressionMethod.Gzip
|
||||
) {
|
||||
compressionMethod = CompressionMethod.Gzip
|
||||
cacheEntry = await cacheHttpClient.getCacheEntry(keys, paths, {
|
||||
compressionMethod
|
||||
})
|
||||
if (!cacheEntry?.archiveLocation) {
|
||||
throw error
|
||||
}
|
||||
} else {
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
if (!cacheEntry?.archiveLocation) {
|
||||
// Cache not found
|
||||
return undefined
|
||||
}
|
||||
|
||||
archivePath = path.join(
|
||||
await utils.createTempDirectory(),
|
||||
utils.getCacheFileName(compressionMethod)
|
||||
|
|
Loading…
Reference in New Issue