1
0
Fork 0

Make caching more verbose

- Print cache size when saving cache similarly to restoring
- Print restore success similarly to saving
- Print cached file list if debug logging is enabled

See also: https://github.com/actions/cache/issues/471
pull/650/head
Yaroslav Dynnikov 2020-11-26 01:56:57 +03:00
parent c861dd8859
commit bfdba95ece
4 changed files with 112 additions and 1 deletions

View File

@ -186,3 +186,75 @@ test('gzip create tar', async () => {
} }
) )
}) })
test('zstd list tar', async () => {
const execMock = jest.spyOn(exec, 'exec')
const archivePath = IS_WINDOWS
? `${process.env['windir']}\\fakepath\\cache.tar`
: 'cache.tar'
const tarPath = 'tar'
await tar.listTar(archivePath, CompressionMethod.Zstd)
expect(execMock).toHaveBeenCalledTimes(1)
expect(execMock).toHaveBeenCalledWith(
`"${tarPath}"`,
[
'--use-compress-program',
'zstd -d --long=30',
'-tf',
IS_WINDOWS ? archivePath.replace(/\\/g, '/') : archivePath,
'-P'
].concat(IS_WINDOWS ? ['--force-local'] : []),
{cwd: undefined}
)
})
test('zstdWithoutLong list tar', async () => {
const execMock = jest.spyOn(exec, 'exec')
const archivePath = IS_WINDOWS
? `${process.env['windir']}\\fakepath\\cache.tar`
: 'cache.tar'
const tarPath = 'tar'
await tar.listTar(archivePath, CompressionMethod.ZstdWithoutLong)
expect(execMock).toHaveBeenCalledTimes(1)
expect(execMock).toHaveBeenCalledWith(
`"${tarPath}"`,
[
'--use-compress-program',
'zstd -d',
'-tf',
IS_WINDOWS ? archivePath.replace(/\\/g, '/') : archivePath,
'-P'
].concat(IS_WINDOWS ? ['--force-local'] : []),
{cwd: undefined}
)
})
test('gzip list tar', async () => {
const execMock = jest.spyOn(exec, 'exec')
const archivePath = IS_WINDOWS
? `${process.env['windir']}\\fakepath\\cache.tar`
: 'cache.tar'
await tar.listTar(archivePath, CompressionMethod.Gzip)
const tarPath = IS_WINDOWS
? `${process.env['windir']}\\System32\\tar.exe`
: 'tar'
expect(execMock).toHaveBeenCalledTimes(1)
expect(execMock).toHaveBeenCalledWith(
`"${tarPath}"`,
[
'-z',
'-tf',
IS_WINDOWS ? archivePath.replace(/\\/g, '/') : archivePath,
'-P'
],
{cwd: undefined}
)
})

View File

@ -2,7 +2,7 @@ import * as core from '@actions/core'
import * as path from 'path' import * as path from 'path'
import * as utils from './internal/cacheUtils' import * as utils from './internal/cacheUtils'
import * as cacheHttpClient from './internal/cacheHttpClient' import * as cacheHttpClient from './internal/cacheHttpClient'
import {createTar, extractTar} from './internal/tar' import {createTar, extractTar, listTar} from './internal/tar'
import {DownloadOptions, UploadOptions} from './options' import {DownloadOptions, UploadOptions} from './options'
export class ValidationError extends Error { export class ValidationError extends Error {
@ -100,6 +100,10 @@ export async function restoreCache(
options options
) )
if (core.isDebug()) {
await listTar(archivePath, compressionMethod)
}
const archiveFileSize = utils.getArchiveFileSizeIsBytes(archivePath) const archiveFileSize = utils.getArchiveFileSizeIsBytes(archivePath)
core.info( core.info(
`Cache Size: ~${Math.round( `Cache Size: ~${Math.round(
@ -108,6 +112,7 @@ export async function restoreCache(
) )
await extractTar(archivePath, compressionMethod) await extractTar(archivePath, compressionMethod)
core.info('Cache restored successfully')
} finally { } finally {
// Try to delete the archive to save space // Try to delete the archive to save space
try { try {
@ -162,6 +167,9 @@ export async function saveCache(
core.debug(`Archive Path: ${archivePath}`) core.debug(`Archive Path: ${archivePath}`)
await createTar(archiveFolder, cachePaths, compressionMethod) await createTar(archiveFolder, cachePaths, compressionMethod)
if (core.isDebug()) {
await listTar(archivePath, compressionMethod)
}
const fileSizeLimit = 5 * 1024 * 1024 * 1024 // 5GB per repo limit const fileSizeLimit = 5 * 1024 * 1024 * 1024 // 5GB per repo limit
const archiveFileSize = utils.getArchiveFileSizeIsBytes(archivePath) const archiveFileSize = utils.getArchiveFileSizeIsBytes(archivePath)

View File

@ -301,6 +301,10 @@ export async function saveCache(
// Commit Cache // Commit Cache
core.debug('Commiting cache') core.debug('Commiting cache')
const cacheSize = utils.getArchiveFileSizeIsBytes(archivePath) const cacheSize = utils.getArchiveFileSizeIsBytes(archivePath)
core.info(
`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`
)
const commitCacheResponse = await commitCache(httpClient, cacheId, cacheSize) const commitCacheResponse = await commitCache(httpClient, cacheId, cacheSize)
if (!isSuccessStatusCode(commitCacheResponse.statusCode)) { if (!isSuccessStatusCode(commitCacheResponse.statusCode)) {
throw new Error( throw new Error(

View File

@ -113,3 +113,30 @@ export async function createTar(
] ]
await execTar(args, compressionMethod, archiveFolder) await execTar(args, compressionMethod, archiveFolder)
} }
export async function listTar(
archivePath: string,
compressionMethod: CompressionMethod
): Promise<void> {
// --d: Decompress.
// --long=#: Enables long distance matching with # bits.
// Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
// Using 30 here because we also support 32-bit self-hosted runners.
function getCompressionProgram(): string[] {
switch (compressionMethod) {
case CompressionMethod.Zstd:
return ['--use-compress-program', 'zstd -d --long=30']
case CompressionMethod.ZstdWithoutLong:
return ['--use-compress-program', 'zstd -d']
default:
return ['-z']
}
}
const args = [
...getCompressionProgram(),
'-tf',
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
'-P'
]
await execTar(args, compressionMethod)
}