mirror of https://github.com/actions/toolkit
Reconfigure catch block
parent
d31c2dd88d
commit
7a532d03f4
|
@ -174,7 +174,7 @@ test('restore with zstd as default but gzip compressed cache found on windows',
|
||||||
const getCacheMock = jest.spyOn(cacheHttpClient, 'getCacheEntry')
|
const getCacheMock = jest.spyOn(cacheHttpClient, 'getCacheEntry')
|
||||||
getCacheMock
|
getCacheMock
|
||||||
.mockImplementationOnce(async () => {
|
.mockImplementationOnce(async () => {
|
||||||
throw new Error('Cache not found.')
|
return Promise.resolve(null)
|
||||||
})
|
})
|
||||||
.mockImplementationOnce(async () => {
|
.mockImplementationOnce(async () => {
|
||||||
return Promise.resolve(cacheEntry)
|
return Promise.resolve(cacheEntry)
|
||||||
|
|
|
@ -91,18 +91,13 @@ export async function restoreCache(
|
||||||
let compressionMethod = await utils.getCompressionMethod()
|
let compressionMethod = await utils.getCompressionMethod()
|
||||||
let archivePath = ''
|
let archivePath = ''
|
||||||
try {
|
try {
|
||||||
try {
|
|
||||||
console.log('before first get cache entry')
|
|
||||||
// path are needed to compute version
|
// path are needed to compute version
|
||||||
cacheEntry = await cacheHttpClient.getCacheEntry(keys, paths, {
|
cacheEntry = await cacheHttpClient.getCacheEntry(keys, paths, {
|
||||||
compressionMethod
|
compressionMethod
|
||||||
})
|
})
|
||||||
console.log('after first get cache entry')
|
if (!cacheEntry?.archiveLocation) {
|
||||||
console.log(cacheEntry)
|
|
||||||
} catch (error) {
|
|
||||||
// This is to support the old cache entry created
|
// This is to support the old cache entry created
|
||||||
// by the old version of the cache action on windows.
|
// by the old version of the cache action on windows.
|
||||||
console.log('in first catch block')
|
|
||||||
if (
|
if (
|
||||||
process.platform === 'win32' &&
|
process.platform === 'win32' &&
|
||||||
compressionMethod !== CompressionMethod.Gzip
|
compressionMethod !== CompressionMethod.Gzip
|
||||||
|
@ -114,19 +109,15 @@ export async function restoreCache(
|
||||||
cacheEntry = await cacheHttpClient.getCacheEntry(keys, paths, {
|
cacheEntry = await cacheHttpClient.getCacheEntry(keys, paths, {
|
||||||
compressionMethod
|
compressionMethod
|
||||||
})
|
})
|
||||||
console.log(cacheEntry)
|
|
||||||
if (!cacheEntry?.archiveLocation) {
|
if (!cacheEntry?.archiveLocation) {
|
||||||
throw error
|
return undefined
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
throw error
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!cacheEntry?.archiveLocation) {
|
|
||||||
// Cache not found
|
// Cache not found
|
||||||
return undefined
|
return undefined
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
archivePath = path.join(
|
archivePath = path.join(
|
||||||
await utils.createTempDirectory(),
|
await utils.createTempDirectory(),
|
||||||
utils.getCacheFileName(compressionMethod)
|
utils.getCacheFileName(compressionMethod)
|
||||||
|
@ -156,8 +147,6 @@ export async function restoreCache(
|
||||||
|
|
||||||
return cacheEntry.cacheKey
|
return cacheEntry.cacheKey
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.log('In second catch block')
|
|
||||||
console.log(error)
|
|
||||||
const typedError = error as Error
|
const typedError = error as Error
|
||||||
if (typedError.name === ValidationError.name) {
|
if (typedError.name === ValidationError.name) {
|
||||||
throw error
|
throw error
|
||||||
|
@ -193,8 +182,7 @@ export async function saveCache(
|
||||||
checkPaths(paths)
|
checkPaths(paths)
|
||||||
checkKey(key)
|
checkKey(key)
|
||||||
|
|
||||||
// const compressionMethod = await utils.getCompressionMethod()
|
const compressionMethod = await utils.getCompressionMethod()
|
||||||
const compressionMethod = CompressionMethod.Gzip
|
|
||||||
let cacheId = -1
|
let cacheId = -1
|
||||||
|
|
||||||
const cachePaths = await utils.resolvePaths(paths)
|
const cachePaths = await utils.resolvePaths(paths)
|
||||||
|
|
|
@ -127,6 +127,8 @@ async function getArgs(
|
||||||
type: string,
|
type: string,
|
||||||
archivePath = ''
|
archivePath = ''
|
||||||
): Promise<string> {
|
): Promise<string> {
|
||||||
|
let args: string
|
||||||
|
|
||||||
const tarPath = await getTarPath()
|
const tarPath = await getTarPath()
|
||||||
const tarArgs = await getTarArgs(
|
const tarArgs = await getTarArgs(
|
||||||
tarPath,
|
tarPath,
|
||||||
|
@ -142,11 +144,18 @@ async function getArgs(
|
||||||
tarPath.type === ArchiveToolType.BSD &&
|
tarPath.type === ArchiveToolType.BSD &&
|
||||||
compressionMethod !== CompressionMethod.Gzip &&
|
compressionMethod !== CompressionMethod.Gzip &&
|
||||||
IS_WINDOWS
|
IS_WINDOWS
|
||||||
|
|
||||||
if (BSD_TAR_ZSTD && type !== 'create') {
|
if (BSD_TAR_ZSTD && type !== 'create') {
|
||||||
return [...compressionArgs, ...tarArgs].join(' ')
|
args = [...compressionArgs, ...tarArgs].join(' ')
|
||||||
} else {
|
} else {
|
||||||
return [...tarArgs, ...compressionArgs].join(' ')
|
args = [...tarArgs, ...compressionArgs].join(' ')
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (BSD_TAR_ZSTD) {
|
||||||
|
args = ['cmd /c "', args, '"'].join(' ')
|
||||||
|
}
|
||||||
|
|
||||||
|
return args
|
||||||
}
|
}
|
||||||
|
|
||||||
function getWorkingDirectory(): string {
|
function getWorkingDirectory(): string {
|
||||||
|
|
Loading…
Reference in New Issue