1
0
Fork 0

Merge pull request #1260 from actions/phantsure/cache-testing

Fix known issues for cache
pull/1277/head
Sampark Sharma 2022-12-12 18:09:19 +05:30 committed by GitHub
commit 8b695c1f30
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
6 changed files with 214 additions and 64 deletions

View File

@ -0,0 +1,91 @@
name: cache-windows-bsd-unit-tests
on:
push:
branches:
- main
paths-ignore:
- '**.md'
pull_request:
paths-ignore:
- '**.md'
jobs:
build:
name: Build
runs-on: windows-latest
steps:
- name: Checkout
uses: actions/checkout@v2
- shell: bash
run: |
rm "C:\Program Files\Git\usr\bin\tar.exe"
- name: Set Node.js 12.x
uses: actions/setup-node@v1
with:
node-version: 12.x
# In order to save & restore cache from a shell script, certain env variables need to be set that are only available in the
# node context. This runs a local action that gets and sets the necessary env variables that are needed
- name: Set env variables
uses: ./packages/cache/__tests__/__fixtures__/
# Need root node_modules because certain npm packages like jest are configured for the entire repository and it won't be possible
# without these to just compile the cache package
- name: Install root npm packages
run: npm ci
- name: Compile cache package
run: |
npm ci
npm run tsc
working-directory: packages/cache
- name: Generate files in working directory
shell: bash
run: packages/cache/__tests__/create-cache-files.sh ${{ runner.os }} test-cache
- name: Generate files outside working directory
shell: bash
run: packages/cache/__tests__/create-cache-files.sh ${{ runner.os }} ~/test-cache
# We're using node -e to call the functions directly available in the @actions/cache package
- name: Save cache using saveCache()
run: |
node -e "Promise.resolve(require('./packages/cache/lib/cache').saveCache(['test-cache','~/test-cache'],'test-${{ runner.os }}-${{ github.run_id }}'))"
- name: Delete cache folders before restoring
shell: bash
run: |
rm -rf test-cache
rm -rf ~/test-cache
- name: Restore cache using restoreCache() with http-client
run: |
node -e "Promise.resolve(require('./packages/cache/lib/cache').restoreCache(['test-cache','~/test-cache'],'test-${{ runner.os }}-${{ github.run_id }}',[],{useAzureSdk: false}))"
- name: Verify cache restored with http-client
shell: bash
run: |
packages/cache/__tests__/verify-cache-files.sh ${{ runner.os }} test-cache
packages/cache/__tests__/verify-cache-files.sh ${{ runner.os }} ~/test-cache
- name: Delete cache folders before restoring
shell: bash
run: |
rm -rf test-cache
rm -rf ~/test-cache
rm -f cache.tar
- name: Restore cache using restoreCache() with Azure SDK
run: |
node -e "Promise.resolve(require('./packages/cache/lib/cache').restoreCache(['test-cache','~/test-cache'],'test-${{ runner.os }}-${{ github.run_id }}'))"
- name: Verify cache restored with Azure SDK
shell: bash
run: |
packages/cache/__tests__/verify-cache-files.sh ${{ runner.os }} test-cache
packages/cache/__tests__/verify-cache-files.sh ${{ runner.os }} ~/test-cache

View File

@ -174,7 +174,7 @@ test('restore with zstd as default but gzip compressed cache found on windows',
const getCacheMock = jest.spyOn(cacheHttpClient, 'getCacheEntry') const getCacheMock = jest.spyOn(cacheHttpClient, 'getCacheEntry')
getCacheMock getCacheMock
.mockImplementationOnce(async () => { .mockImplementationOnce(async () => {
throw new Error('Cache not found.') return Promise.resolve(null)
}) })
.mockImplementationOnce(async () => { .mockImplementationOnce(async () => {
return Promise.resolve(cacheEntry) return Promise.resolve(cacheEntry)

View File

@ -73,7 +73,9 @@ test('zstd extract tar', async () => {
'--use-compress-program', '--use-compress-program',
IS_WINDOWS ? '"zstd -d --long=30"' : 'unzstd --long=30' IS_WINDOWS ? '"zstd -d --long=30"' : 'unzstd --long=30'
]) ])
.join(' ') .join(' '),
undefined,
{cwd: undefined}
) )
}) })
@ -92,20 +94,31 @@ test('zstd extract tar with windows BSDtar', async () => {
await tar.extractTar(archivePath, CompressionMethod.Zstd) await tar.extractTar(archivePath, CompressionMethod.Zstd)
expect(mkdirMock).toHaveBeenCalledWith(workspace) expect(mkdirMock).toHaveBeenCalledWith(workspace)
expect(execMock).toHaveBeenCalledTimes(1) expect(execMock).toHaveBeenCalledTimes(2)
expect(execMock).toHaveBeenCalledWith(
expect(execMock).toHaveBeenNthCalledWith(
1,
[ [
'zstd -d --long=30 -o', 'zstd -d --long=30 -o',
TarFilename.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), TarFilename.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/')
'&&', ].join(' '),
undefined,
{cwd: undefined}
)
expect(execMock).toHaveBeenNthCalledWith(
2,
[
`"${tarPath}"`, `"${tarPath}"`,
'-xf', '-xf',
TarFilename.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), TarFilename.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
'-P', '-P',
'-C', '-C',
workspace?.replace(/\\/g, '/') workspace?.replace(/\\/g, '/')
].join(' ') ].join(' '),
undefined,
{cwd: undefined}
) )
} }
}) })
@ -135,7 +148,9 @@ test('gzip extract tar', async () => {
.concat(IS_WINDOWS ? ['--force-local'] : []) .concat(IS_WINDOWS ? ['--force-local'] : [])
.concat(IS_MAC ? ['--delay-directory-restore'] : []) .concat(IS_MAC ? ['--delay-directory-restore'] : [])
.concat(['-z']) .concat(['-z'])
.join(' ') .join(' '),
undefined,
{cwd: undefined}
) )
}) })
@ -162,7 +177,9 @@ test('gzip extract GNU tar on windows with GNUtar in path', async () => {
workspace?.replace(/\\/g, '/'), workspace?.replace(/\\/g, '/'),
'--force-local', '--force-local',
'-z' '-z'
].join(' ') ].join(' '),
undefined,
{cwd: undefined}
) )
} }
}) })
@ -230,8 +247,10 @@ test('zstd create tar with windows BSDtar', async () => {
const tarPath = SystemTarPathOnWindows const tarPath = SystemTarPathOnWindows
expect(execMock).toHaveBeenCalledTimes(1) expect(execMock).toHaveBeenCalledTimes(2)
expect(execMock).toHaveBeenCalledWith(
expect(execMock).toHaveBeenNthCalledWith(
1,
[ [
`"${tarPath}"`, `"${tarPath}"`,
'--posix', '--posix',
@ -243,8 +262,17 @@ test('zstd create tar with windows BSDtar', async () => {
'-C', '-C',
workspace?.replace(/\\/g, '/'), workspace?.replace(/\\/g, '/'),
'--files-from', '--files-from',
ManifestFilename, ManifestFilename
'&&', ].join(' '),
undefined, // args
{
cwd: archiveFolder
}
)
expect(execMock).toHaveBeenNthCalledWith(
2,
[
'zstd -T0 --long=30 -o', 'zstd -T0 --long=30 -o',
CacheFilename.Zstd.replace(/\\/g, '/'), CacheFilename.Zstd.replace(/\\/g, '/'),
TarFilename.replace(/\\/g, '/') TarFilename.replace(/\\/g, '/')
@ -320,7 +348,9 @@ test('zstd list tar', async () => {
'--use-compress-program', '--use-compress-program',
IS_WINDOWS ? '"zstd -d --long=30"' : 'unzstd --long=30' IS_WINDOWS ? '"zstd -d --long=30"' : 'unzstd --long=30'
]) ])
.join(' ') .join(' '),
undefined,
{cwd: undefined}
) )
}) })
@ -335,18 +365,29 @@ test('zstd list tar with windows BSDtar', async () => {
await tar.listTar(archivePath, CompressionMethod.Zstd) await tar.listTar(archivePath, CompressionMethod.Zstd)
const tarPath = SystemTarPathOnWindows const tarPath = SystemTarPathOnWindows
expect(execMock).toHaveBeenCalledTimes(1) expect(execMock).toHaveBeenCalledTimes(2)
expect(execMock).toHaveBeenCalledWith(
expect(execMock).toHaveBeenNthCalledWith(
1,
[ [
'zstd -d --long=30 -o', 'zstd -d --long=30 -o',
TarFilename.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), TarFilename.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/')
'&&', ].join(' '),
undefined,
{cwd: undefined}
)
expect(execMock).toHaveBeenNthCalledWith(
2,
[
`"${tarPath}"`, `"${tarPath}"`,
'-tf', '-tf',
TarFilename.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), TarFilename.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
'-P' '-P'
].join(' ') ].join(' '),
undefined,
{cwd: undefined}
) )
} }
}) })
@ -372,7 +413,9 @@ test('zstdWithoutLong list tar', async () => {
.concat(IS_WINDOWS ? ['--force-local'] : []) .concat(IS_WINDOWS ? ['--force-local'] : [])
.concat(IS_MAC ? ['--delay-directory-restore'] : []) .concat(IS_MAC ? ['--delay-directory-restore'] : [])
.concat(['--use-compress-program', IS_WINDOWS ? '"zstd -d"' : 'unzstd']) .concat(['--use-compress-program', IS_WINDOWS ? '"zstd -d"' : 'unzstd'])
.join(' ') .join(' '),
undefined,
{cwd: undefined}
) )
}) })
@ -396,6 +439,8 @@ test('gzip list tar', async () => {
.concat(IS_WINDOWS ? ['--force-local'] : []) .concat(IS_WINDOWS ? ['--force-local'] : [])
.concat(IS_MAC ? ['--delay-directory-restore'] : []) .concat(IS_MAC ? ['--delay-directory-restore'] : [])
.concat(['-z']) .concat(['-z'])
.join(' ') .join(' '),
undefined,
{cwd: undefined}
) )
}) })

View File

@ -91,14 +91,12 @@ export async function restoreCache(
let compressionMethod = await utils.getCompressionMethod() let compressionMethod = await utils.getCompressionMethod()
let archivePath = '' let archivePath = ''
try { try {
try { // path are needed to compute version
// path are needed to compute version cacheEntry = await cacheHttpClient.getCacheEntry(keys, paths, {
cacheEntry = await cacheHttpClient.getCacheEntry(keys, paths, { compressionMethod
compressionMethod })
}) if (!cacheEntry?.archiveLocation) {
} catch (error) { // This is to support the old cache entry created by gzip on windows.
// This is to support the old cache entry created
// by the old version of the cache action on windows.
if ( if (
process.platform === 'win32' && process.platform === 'win32' &&
compressionMethod !== CompressionMethod.Gzip compressionMethod !== CompressionMethod.Gzip
@ -108,17 +106,18 @@ export async function restoreCache(
compressionMethod compressionMethod
}) })
if (!cacheEntry?.archiveLocation) { if (!cacheEntry?.archiveLocation) {
throw error return undefined
} }
core.debug(
"Couldn't find cache entry with zstd compression, falling back to gzip compression."
)
} else { } else {
throw error // Cache not found
return undefined
} }
} }
if (!cacheEntry?.archiveLocation) {
// Cache not found
return undefined
}
archivePath = path.join( archivePath = path.join(
await utils.createTempDirectory(), await utils.createTempDirectory(),
utils.getCacheFileName(compressionMethod) utils.getCacheFileName(compressionMethod)

View File

@ -104,6 +104,7 @@ export async function getCacheEntry(
httpClient.getJson<ArtifactCacheEntry>(getCacheApiUrl(resource)) httpClient.getJson<ArtifactCacheEntry>(getCacheApiUrl(resource))
) )
if (response.statusCode === 204) { if (response.statusCode === 204) {
// Cache not found
return null return null
} }
if (!isSuccessStatusCode(response.statusCode)) { if (!isSuccessStatusCode(response.statusCode)) {
@ -113,6 +114,7 @@ export async function getCacheEntry(
const cacheResult = response.result const cacheResult = response.result
const cacheDownloadUrl = cacheResult?.archiveLocation const cacheDownloadUrl = cacheResult?.archiveLocation
if (!cacheDownloadUrl) { if (!cacheDownloadUrl) {
// Cache achiveLocation not found. This should never happen, and hence bail out.
throw new Error('Cache not found.') throw new Error('Cache not found.')
} }
core.setSecret(cacheDownloadUrl) core.setSecret(cacheDownloadUrl)

View File

@ -14,7 +14,7 @@ import {
const IS_WINDOWS = process.platform === 'win32' const IS_WINDOWS = process.platform === 'win32'
// Function also mutates the args array. For non-mutation call with passing an empty array. // Returns tar path and type: BSD or GNU
async function getTarPath(): Promise<ArchiveTool> { async function getTarPath(): Promise<ArchiveTool> {
switch (process.platform) { switch (process.platform) {
case 'win32': { case 'win32': {
@ -43,6 +43,7 @@ async function getTarPath(): Promise<ArchiveTool> {
default: default:
break break
} }
// Default assumption is GNU tar is present in path
return <ArchiveTool>{ return <ArchiveTool>{
path: await io.which('tar', true), path: await io.which('tar', true),
type: ArchiveToolType.GNU type: ArchiveToolType.GNU
@ -60,6 +61,7 @@ async function getTarArgs(
const cacheFileName = utils.getCacheFileName(compressionMethod) const cacheFileName = utils.getCacheFileName(compressionMethod)
const tarFile = 'cache.tar' const tarFile = 'cache.tar'
const workingDirectory = getWorkingDirectory() const workingDirectory = getWorkingDirectory()
// Speficic args for BSD tar on windows for workaround
const BSD_TAR_ZSTD = const BSD_TAR_ZSTD =
tarPath.type === ArchiveToolType.BSD && tarPath.type === ArchiveToolType.BSD &&
compressionMethod !== CompressionMethod.Gzip && compressionMethod !== CompressionMethod.Gzip &&
@ -122,11 +124,14 @@ async function getTarArgs(
return args return args
} }
async function getArgs( // Returns commands to run tar and compression program
async function getCommands(
compressionMethod: CompressionMethod, compressionMethod: CompressionMethod,
type: string, type: string,
archivePath = '' archivePath = ''
): Promise<string> { ): Promise<string[]> {
let args
const tarPath = await getTarPath() const tarPath = await getTarPath()
const tarArgs = await getTarArgs( const tarArgs = await getTarArgs(
tarPath, tarPath,
@ -142,11 +147,18 @@ async function getArgs(
tarPath.type === ArchiveToolType.BSD && tarPath.type === ArchiveToolType.BSD &&
compressionMethod !== CompressionMethod.Gzip && compressionMethod !== CompressionMethod.Gzip &&
IS_WINDOWS IS_WINDOWS
if (BSD_TAR_ZSTD && type !== 'create') { if (BSD_TAR_ZSTD && type !== 'create') {
return [...compressionArgs, ...tarArgs].join(' ') args = [[...compressionArgs].join(' '), [...tarArgs].join(' ')]
} else { } else {
return [...tarArgs, ...compressionArgs].join(' ') args = [[...tarArgs].join(' '), [...compressionArgs].join(' ')]
} }
if (BSD_TAR_ZSTD) {
return args
}
return [args.join(' ')]
} }
function getWorkingDirectory(): string { function getWorkingDirectory(): string {
@ -173,8 +185,7 @@ async function getDecompressionProgram(
? [ ? [
'zstd -d --long=30 -o', 'zstd -d --long=30 -o',
TarFilename, TarFilename,
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/')
'&&'
] ]
: [ : [
'--use-compress-program', '--use-compress-program',
@ -185,8 +196,7 @@ async function getDecompressionProgram(
? [ ? [
'zstd -d -o', 'zstd -d -o',
TarFilename, TarFilename,
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/')
'&&'
] ]
: ['--use-compress-program', IS_WINDOWS ? '"zstd -d"' : 'unzstd'] : ['--use-compress-program', IS_WINDOWS ? '"zstd -d"' : 'unzstd']
default: default:
@ -194,6 +204,7 @@ async function getDecompressionProgram(
} }
} }
// Used for creating the archive
// -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores. // -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores.
// zstdmt is equivalent to 'zstd -T0' // zstdmt is equivalent to 'zstd -T0'
// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
@ -212,7 +223,6 @@ async function getCompressionProgram(
case CompressionMethod.Zstd: case CompressionMethod.Zstd:
return BSD_TAR_ZSTD return BSD_TAR_ZSTD
? [ ? [
'&&',
'zstd -T0 --long=30 -o', 'zstd -T0 --long=30 -o',
cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
TarFilename TarFilename
@ -224,7 +234,6 @@ async function getCompressionProgram(
case CompressionMethod.ZstdWithoutLong: case CompressionMethod.ZstdWithoutLong:
return BSD_TAR_ZSTD return BSD_TAR_ZSTD
? [ ? [
'&&',
'zstd -T0 -o', 'zstd -T0 -o',
cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
TarFilename TarFilename
@ -235,18 +244,29 @@ async function getCompressionProgram(
} }
} }
// Executes all commands as separate processes
async function execCommands(commands: string[], cwd?: string): Promise<void> {
for (const command of commands) {
try {
await exec(command, undefined, {cwd})
} catch (error) {
throw new Error(
`${command.split(' ')[0]} failed with error: ${error?.message}`
)
}
}
}
// List the contents of a tar
export async function listTar( export async function listTar(
archivePath: string, archivePath: string,
compressionMethod: CompressionMethod compressionMethod: CompressionMethod
): Promise<void> { ): Promise<void> {
const args = await getArgs(compressionMethod, 'list', archivePath) const commands = await getCommands(compressionMethod, 'list', archivePath)
try { await execCommands(commands)
await exec(args)
} catch (error) {
throw new Error(`Tar failed with error: ${error?.message}`)
}
} }
// Extract a tar
export async function extractTar( export async function extractTar(
archivePath: string, archivePath: string,
compressionMethod: CompressionMethod compressionMethod: CompressionMethod
@ -254,14 +274,11 @@ export async function extractTar(
// Create directory to extract tar into // Create directory to extract tar into
const workingDirectory = getWorkingDirectory() const workingDirectory = getWorkingDirectory()
await io.mkdirP(workingDirectory) await io.mkdirP(workingDirectory)
const args = await getArgs(compressionMethod, 'extract', archivePath) const commands = await getCommands(compressionMethod, 'extract', archivePath)
try { await execCommands(commands)
await exec(args)
} catch (error) {
throw new Error(`Tar failed with error: ${error?.message}`)
}
} }
// Create a tar
export async function createTar( export async function createTar(
archiveFolder: string, archiveFolder: string,
sourceDirectories: string[], sourceDirectories: string[],
@ -272,10 +289,6 @@ export async function createTar(
path.join(archiveFolder, ManifestFilename), path.join(archiveFolder, ManifestFilename),
sourceDirectories.join('\n') sourceDirectories.join('\n')
) )
const args = await getArgs(compressionMethod, 'create') const commands = await getCommands(compressionMethod, 'create')
try { await execCommands(commands, archiveFolder)
await exec(args, undefined, {cwd: archiveFolder})
} catch (error) {
throw new Error(`Tar failed with error: ${error?.message}`)
}
} }