mirror of https://github.com/actions/toolkit
Make caching more verbose
- Print cache size when saving cache similarly to restoring - Print restore success similarly to saving - Print cached file list if debug logging is enabled See also: https://github.com/actions/cache/issues/471pull/650/head
parent
c861dd8859
commit
bfdba95ece
|
@ -186,3 +186,75 @@ test('gzip create tar', async () => {
|
|||
}
|
||||
)
|
||||
})
|
||||
|
||||
test('zstd list tar', async () => {
|
||||
const execMock = jest.spyOn(exec, 'exec')
|
||||
|
||||
const archivePath = IS_WINDOWS
|
||||
? `${process.env['windir']}\\fakepath\\cache.tar`
|
||||
: 'cache.tar'
|
||||
const tarPath = 'tar'
|
||||
|
||||
await tar.listTar(archivePath, CompressionMethod.Zstd)
|
||||
|
||||
expect(execMock).toHaveBeenCalledTimes(1)
|
||||
expect(execMock).toHaveBeenCalledWith(
|
||||
`"${tarPath}"`,
|
||||
[
|
||||
'--use-compress-program',
|
||||
'zstd -d --long=30',
|
||||
'-tf',
|
||||
IS_WINDOWS ? archivePath.replace(/\\/g, '/') : archivePath,
|
||||
'-P'
|
||||
].concat(IS_WINDOWS ? ['--force-local'] : []),
|
||||
{cwd: undefined}
|
||||
)
|
||||
})
|
||||
|
||||
test('zstdWithoutLong list tar', async () => {
|
||||
const execMock = jest.spyOn(exec, 'exec')
|
||||
|
||||
const archivePath = IS_WINDOWS
|
||||
? `${process.env['windir']}\\fakepath\\cache.tar`
|
||||
: 'cache.tar'
|
||||
const tarPath = 'tar'
|
||||
|
||||
await tar.listTar(archivePath, CompressionMethod.ZstdWithoutLong)
|
||||
|
||||
expect(execMock).toHaveBeenCalledTimes(1)
|
||||
expect(execMock).toHaveBeenCalledWith(
|
||||
`"${tarPath}"`,
|
||||
[
|
||||
'--use-compress-program',
|
||||
'zstd -d',
|
||||
'-tf',
|
||||
IS_WINDOWS ? archivePath.replace(/\\/g, '/') : archivePath,
|
||||
'-P'
|
||||
].concat(IS_WINDOWS ? ['--force-local'] : []),
|
||||
{cwd: undefined}
|
||||
)
|
||||
})
|
||||
|
||||
test('gzip list tar', async () => {
|
||||
const execMock = jest.spyOn(exec, 'exec')
|
||||
const archivePath = IS_WINDOWS
|
||||
? `${process.env['windir']}\\fakepath\\cache.tar`
|
||||
: 'cache.tar'
|
||||
|
||||
await tar.listTar(archivePath, CompressionMethod.Gzip)
|
||||
|
||||
const tarPath = IS_WINDOWS
|
||||
? `${process.env['windir']}\\System32\\tar.exe`
|
||||
: 'tar'
|
||||
expect(execMock).toHaveBeenCalledTimes(1)
|
||||
expect(execMock).toHaveBeenCalledWith(
|
||||
`"${tarPath}"`,
|
||||
[
|
||||
'-z',
|
||||
'-tf',
|
||||
IS_WINDOWS ? archivePath.replace(/\\/g, '/') : archivePath,
|
||||
'-P'
|
||||
],
|
||||
{cwd: undefined}
|
||||
)
|
||||
})
|
||||
|
|
|
@ -2,7 +2,7 @@ import * as core from '@actions/core'
|
|||
import * as path from 'path'
|
||||
import * as utils from './internal/cacheUtils'
|
||||
import * as cacheHttpClient from './internal/cacheHttpClient'
|
||||
import {createTar, extractTar} from './internal/tar'
|
||||
import {createTar, extractTar, listTar} from './internal/tar'
|
||||
import {DownloadOptions, UploadOptions} from './options'
|
||||
|
||||
export class ValidationError extends Error {
|
||||
|
@ -100,6 +100,10 @@ export async function restoreCache(
|
|||
options
|
||||
)
|
||||
|
||||
if (core.isDebug()) {
|
||||
await listTar(archivePath, compressionMethod)
|
||||
}
|
||||
|
||||
const archiveFileSize = utils.getArchiveFileSizeIsBytes(archivePath)
|
||||
core.info(
|
||||
`Cache Size: ~${Math.round(
|
||||
|
@ -108,6 +112,7 @@ export async function restoreCache(
|
|||
)
|
||||
|
||||
await extractTar(archivePath, compressionMethod)
|
||||
core.info('Cache restored successfully')
|
||||
} finally {
|
||||
// Try to delete the archive to save space
|
||||
try {
|
||||
|
@ -162,6 +167,9 @@ export async function saveCache(
|
|||
core.debug(`Archive Path: ${archivePath}`)
|
||||
|
||||
await createTar(archiveFolder, cachePaths, compressionMethod)
|
||||
if (core.isDebug()) {
|
||||
await listTar(archivePath, compressionMethod)
|
||||
}
|
||||
|
||||
const fileSizeLimit = 5 * 1024 * 1024 * 1024 // 5GB per repo limit
|
||||
const archiveFileSize = utils.getArchiveFileSizeIsBytes(archivePath)
|
||||
|
|
|
@ -301,6 +301,10 @@ export async function saveCache(
|
|||
// Commit Cache
|
||||
core.debug('Commiting cache')
|
||||
const cacheSize = utils.getArchiveFileSizeIsBytes(archivePath)
|
||||
core.info(
|
||||
`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`
|
||||
)
|
||||
|
||||
const commitCacheResponse = await commitCache(httpClient, cacheId, cacheSize)
|
||||
if (!isSuccessStatusCode(commitCacheResponse.statusCode)) {
|
||||
throw new Error(
|
||||
|
|
|
@ -113,3 +113,30 @@ export async function createTar(
|
|||
]
|
||||
await execTar(args, compressionMethod, archiveFolder)
|
||||
}
|
||||
|
||||
export async function listTar(
|
||||
archivePath: string,
|
||||
compressionMethod: CompressionMethod
|
||||
): Promise<void> {
|
||||
// --d: Decompress.
|
||||
// --long=#: Enables long distance matching with # bits.
|
||||
// Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
|
||||
// Using 30 here because we also support 32-bit self-hosted runners.
|
||||
function getCompressionProgram(): string[] {
|
||||
switch (compressionMethod) {
|
||||
case CompressionMethod.Zstd:
|
||||
return ['--use-compress-program', 'zstd -d --long=30']
|
||||
case CompressionMethod.ZstdWithoutLong:
|
||||
return ['--use-compress-program', 'zstd -d']
|
||||
default:
|
||||
return ['-z']
|
||||
}
|
||||
}
|
||||
const args = [
|
||||
...getCompressionProgram(),
|
||||
'-tf',
|
||||
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
|
||||
'-P'
|
||||
]
|
||||
await execTar(args, compressionMethod)
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue