diff --git a/.github/workflows/cache-windows-test.yml b/.github/workflows/cache-windows-test.yml new file mode 100644 index 00000000..3868f296 --- /dev/null +++ b/.github/workflows/cache-windows-test.yml @@ -0,0 +1,90 @@ +name: cache-windows-bsd-unit-tests +on: + push: + branches: + - main + paths-ignore: + - '**.md' + pull_request: + paths-ignore: + - '**.md' + +jobs: + build: + name: Build + + runs-on: windows-latest + + steps: + - name: Checkout + uses: actions/checkout@v2 + + - shell: bash + run: | + rm "C:\Program Files\Git\usr\bin\tar.exe" + + - name: Set Node.js 12.x + uses: actions/setup-node@v1 + with: + node-version: 12.x + + # In order to save & restore cache from a shell script, certain env variables need to be set that are only available in the + # node context. This runs a local action that gets and sets the necessary env variables that are needed + - name: Set env variables + uses: ./packages/cache/__tests__/__fixtures__/ + + # Need root node_modules because certain npm packages like jest are configured for the entire repository and it won't be possible + # without these to just compile the cache package + - name: Install root npm packages + run: npm ci + + - name: Compile cache package + run: | + npm ci + npm run tsc + working-directory: packages/cache + + - name: Generate files in working directory + shell: bash + run: packages/cache/__tests__/create-cache-files.sh ${{ runner.os }} test-cache + + - name: Generate files outside working directory + shell: bash + run: packages/cache/__tests__/create-cache-files.sh ${{ runner.os }} ~/test-cache + + # We're using node -e to call the functions directly available in the @actions/cache package + - name: Save cache using saveCache() + run: | + node -e "Promise.resolve(require('./packages/cache/lib/cache').saveCache(['test-cache','~/test-cache'],'test-${{ runner.os }}-${{ github.run_id }}'))" + + - name: Delete cache folders before restoring + shell: bash + run: | + rm -rf test-cache + rm -rf ~/test-cache + + - name: Restore cache using restoreCache() with http-client + run: | + node -e "Promise.resolve(require('./packages/cache/lib/cache').restoreCache(['test-cache','~/test-cache'],'test-${{ runner.os }}-${{ github.run_id }}',[],{useAzureSdk: false}))" + + - name: Verify cache restored with http-client + shell: bash + run: | + packages/cache/__tests__/verify-cache-files.sh ${{ runner.os }} test-cache + packages/cache/__tests__/verify-cache-files.sh ${{ runner.os }} ~/test-cache + + - name: Delete cache folders before restoring + shell: bash + run: | + rm -rf test-cache + rm -rf ~/test-cache + + - name: Restore cache using restoreCache() with Azure SDK + run: | + node -e "Promise.resolve(require('./packages/cache/lib/cache').restoreCache(['test-cache','~/test-cache'],'test-${{ runner.os }}-${{ github.run_id }}'))" + + - name: Verify cache restored with Azure SDK + shell: bash + run: | + packages/cache/__tests__/verify-cache-files.sh ${{ runner.os }} test-cache + packages/cache/__tests__/verify-cache-files.sh ${{ runner.os }} ~/test-cache diff --git a/packages/cache/RELEASES.md b/packages/cache/RELEASES.md index 73518e1c..e5ea9d49 100644 --- a/packages/cache/RELEASES.md +++ b/packages/cache/RELEASES.md @@ -91,3 +91,24 @@ ### 3.0.6 - Added `@azure/abort-controller` to dependencies to fix compatibility issue with ESM [#1208](https://github.com/actions/toolkit/issues/1208) + +### 3.1.0-beta.1 +- Update actions/cache on windows to use gnu tar and zstd by default and fallback to bsdtar and zstd if gnu tar is not available. ([issue](https://github.com/actions/cache/issues/984)) + +### 3.1.0-beta.2 +- Added support for fallback to gzip to restore old caches on windows. + +### 3.1.0-beta.3 +- Bug Fixes for fallback to gzip to restore old caches on windows and bsdtar if gnutar is not available. + +### 3.1.0 +- Update actions/cache on windows to use gnu tar and zstd by default +- Update actions/cache on windows to fallback to bsdtar and zstd if gnu tar is not available. +- Added support for fallback to gzip to restore old caches on windows. + +### 3.1.1 +- Reverted changes in 3.1.0 to fix issue with symlink restoration on windows. +- Added support for verbose logging about cache version during cache miss. + +### 3.1.2 +- Fix issue with symlink restoration on windows. \ No newline at end of file diff --git a/packages/cache/__tests__/cacheHttpClient.test.ts b/packages/cache/__tests__/cacheHttpClient.test.ts index 90dd5cfc..a0164d93 100644 --- a/packages/cache/__tests__/cacheHttpClient.test.ts +++ b/packages/cache/__tests__/cacheHttpClient.test.ts @@ -7,7 +7,7 @@ jest.mock('../src/internal/downloadUtils') test('getCacheVersion with one path returns version', async () => { const paths = ['node_modules'] - const result = getCacheVersion(paths) + const result = getCacheVersion(paths, undefined, true) expect(result).toEqual( 'b3e0c6cb5ecf32614eeb2997d905b9c297046d7cbf69062698f25b14b4cb0985' ) @@ -15,7 +15,7 @@ test('getCacheVersion with one path returns version', async () => { test('getCacheVersion with multiple paths returns version', async () => { const paths = ['node_modules', 'dist'] - const result = getCacheVersion(paths) + const result = getCacheVersion(paths, undefined, true) expect(result).toEqual( '165c3053bc646bf0d4fac17b1f5731caca6fe38e0e464715c0c3c6b6318bf436' ) @@ -23,22 +23,33 @@ test('getCacheVersion with multiple paths returns version', async () => { test('getCacheVersion with zstd compression returns version', async () => { const paths = ['node_modules'] - const result = getCacheVersion(paths, CompressionMethod.Zstd) + const result = getCacheVersion(paths, CompressionMethod.Zstd, true) expect(result).toEqual( '273877e14fd65d270b87a198edbfa2db5a43de567c9a548d2a2505b408befe24' ) }) -test('getCacheVersion with gzip compression does not change vesion', async () => { +test('getCacheVersion with gzip compression returns version', async () => { const paths = ['node_modules'] - const result = getCacheVersion(paths, CompressionMethod.Gzip) + const result = getCacheVersion(paths, CompressionMethod.Gzip, true) expect(result).toEqual( - 'b3e0c6cb5ecf32614eeb2997d905b9c297046d7cbf69062698f25b14b4cb0985' + '470e252814dbffc9524891b17cf4e5749b26c1b5026e63dd3f00972db2393117' ) }) +test('getCacheVersion with enableCrossOsArchive as false returns version on windows', async () => { + if (process.platform === 'win32') { + const paths = ['node_modules'] + const result = getCacheVersion(paths) + + expect(result).toEqual( + '2db19d6596dc34f51f0043120148827a264863f5c6ac857569c2af7119bad14e' + ) + } +}) + test('downloadCache uses http-client for non-Azure URLs', async () => { const downloadCacheHttpClientMock = jest.spyOn( downloadUtils, diff --git a/packages/cache/__tests__/restoreCache.test.ts b/packages/cache/__tests__/restoreCache.test.ts index 36ec8801..5318a007 100644 --- a/packages/cache/__tests__/restoreCache.test.ts +++ b/packages/cache/__tests__/restoreCache.test.ts @@ -142,7 +142,8 @@ test('restore with gzip compressed cache found', async () => { expect(cacheKey).toBe(key) expect(getCacheMock).toHaveBeenCalledWith([key], paths, { - compressionMethod: compression + compressionMethod: compression, + enableCrossOsArchive: false }) expect(createTempDirectoryMock).toHaveBeenCalledTimes(1) expect(downloadCacheMock).toHaveBeenCalledWith( @@ -201,7 +202,8 @@ test('restore with zstd compressed cache found', async () => { expect(cacheKey).toBe(key) expect(getCacheMock).toHaveBeenCalledWith([key], paths, { - compressionMethod: compression + compressionMethod: compression, + enableCrossOsArchive: false }) expect(createTempDirectoryMock).toHaveBeenCalledTimes(1) expect(downloadCacheMock).toHaveBeenCalledWith( @@ -258,7 +260,8 @@ test('restore with cache found for restore key', async () => { expect(cacheKey).toBe(restoreKey) expect(getCacheMock).toHaveBeenCalledWith([key, restoreKey], paths, { - compressionMethod: compression + compressionMethod: compression, + enableCrossOsArchive: false }) expect(createTempDirectoryMock).toHaveBeenCalledTimes(1) expect(downloadCacheMock).toHaveBeenCalledWith( diff --git a/packages/cache/__tests__/saveCache.test.ts b/packages/cache/__tests__/saveCache.test.ts index 945b254f..4d0027be 100644 --- a/packages/cache/__tests__/saveCache.test.ts +++ b/packages/cache/__tests__/saveCache.test.ts @@ -209,7 +209,9 @@ test('save with reserve cache failure should fail', async () => { expect(reserveCacheMock).toHaveBeenCalledTimes(1) expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey, paths, { - compressionMethod: compression + cacheSize: undefined, + compressionMethod: compression, + enableCrossOsArchive: false }) expect(createTarMock).toHaveBeenCalledTimes(1) expect(saveCacheMock).toHaveBeenCalledTimes(0) @@ -253,7 +255,9 @@ test('save with server error should fail', async () => { expect(reserveCacheMock).toHaveBeenCalledTimes(1) expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey, [filePath], { - compressionMethod: compression + cacheSize: undefined, + compressionMethod: compression, + enableCrossOsArchive: false }) const archiveFolder = '/foo/bar' const archiveFile = path.join(archiveFolder, CacheFilename.Zstd) @@ -296,7 +300,9 @@ test('save with valid inputs uploads a cache', async () => { expect(reserveCacheMock).toHaveBeenCalledTimes(1) expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey, [filePath], { - compressionMethod: compression + cacheSize: undefined, + compressionMethod: compression, + enableCrossOsArchive: false }) const archiveFolder = '/foo/bar' const archiveFile = path.join(archiveFolder, CacheFilename.Zstd) diff --git a/packages/cache/__tests__/tar.test.ts b/packages/cache/__tests__/tar.test.ts index e4233bc9..7ae77601 100644 --- a/packages/cache/__tests__/tar.test.ts +++ b/packages/cache/__tests__/tar.test.ts @@ -1,19 +1,29 @@ import * as exec from '@actions/exec' +import {exportVariable} from '@actions/core' import * as io from '@actions/io' import * as path from 'path' -import {CacheFilename, CompressionMethod} from '../src/internal/constants' +import { + CacheFilename, + CompressionMethod, + GnuTarPathOnWindows, + ManifestFilename, + SystemTarPathOnWindows, + TarFilename +} from '../src/internal/constants' import * as tar from '../src/internal/tar' import * as utils from '../src/internal/cacheUtils' // eslint-disable-next-line @typescript-eslint/no-require-imports import fs = require('fs') +exportVariable('MSYS', 'winsymlinks:nativestrict') + jest.mock('@actions/exec') jest.mock('@actions/io') const IS_WINDOWS = process.platform === 'win32' const IS_MAC = process.platform === 'darwin' -const defaultTarPath = process.platform === 'darwin' ? 'gtar' : 'tar' +const defaultTarPath = IS_MAC ? 'gtar' : 'tar' function getTempDir(): string { return path.join(__dirname, '_temp', 'tar') @@ -28,6 +38,10 @@ beforeAll(async () => { await jest.requireActual('@actions/io').rmRF(getTempDir()) }) +beforeEach(async () => { + jest.restoreAllMocks() +}) + afterAll(async () => { delete process.env['GITHUB_WORKSPACE'] await jest.requireActual('@actions/io').rmRF(getTempDir()) @@ -41,16 +55,15 @@ test('zstd extract tar', async () => { ? `${process.env['windir']}\\fakepath\\cache.tar` : 'cache.tar' const workspace = process.env['GITHUB_WORKSPACE'] + const tarPath = IS_WINDOWS ? GnuTarPathOnWindows : defaultTarPath await tar.extractTar(archivePath, CompressionMethod.Zstd) expect(mkdirMock).toHaveBeenCalledWith(workspace) expect(execMock).toHaveBeenCalledTimes(1) expect(execMock).toHaveBeenCalledWith( - `"${defaultTarPath}"`, [ - '--use-compress-program', - IS_WINDOWS ? 'zstd -d --long=30' : 'unzstd --long=30', + `"${tarPath}"`, '-xf', IS_WINDOWS ? archivePath.replace(/\\/g, '/') : archivePath, '-P', @@ -58,11 +71,61 @@ test('zstd extract tar', async () => { IS_WINDOWS ? workspace?.replace(/\\/g, '/') : workspace ] .concat(IS_WINDOWS ? ['--force-local'] : []) - .concat(IS_MAC ? ['--delay-directory-restore'] : []), + .concat(IS_MAC ? ['--delay-directory-restore'] : []) + .concat([ + '--use-compress-program', + IS_WINDOWS ? '"zstd -d --long=30"' : 'unzstd --long=30' + ]) + .join(' '), + undefined, {cwd: undefined} ) }) +test('zstd extract tar with windows BSDtar', async () => { + if (IS_WINDOWS) { + const mkdirMock = jest.spyOn(io, 'mkdirP') + const execMock = jest.spyOn(exec, 'exec') + jest + .spyOn(utils, 'getGnuTarPathOnWindows') + .mockReturnValue(Promise.resolve('')) + + const archivePath = `${process.env['windir']}\\fakepath\\cache.tar` + const workspace = process.env['GITHUB_WORKSPACE'] + const tarPath = SystemTarPathOnWindows + + await tar.extractTar(archivePath, CompressionMethod.Zstd) + + expect(mkdirMock).toHaveBeenCalledWith(workspace) + expect(execMock).toHaveBeenCalledTimes(2) + + expect(execMock).toHaveBeenNthCalledWith( + 1, + [ + 'zstd -d --long=30 --force -o', + TarFilename.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), + archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/') + ].join(' '), + undefined, + {cwd: undefined} + ) + + expect(execMock).toHaveBeenNthCalledWith( + 2, + [ + `"${tarPath}"`, + '-xf', + TarFilename.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), + '-P', + '-C', + workspace?.replace(/\\/g, '/') + ].join(' '), + undefined, + {cwd: undefined} + ) + } +}) + test('gzip extract tar', async () => { const mkdirMock = jest.spyOn(io, 'mkdirP') const execMock = jest.spyOn(exec, 'exec') @@ -74,50 +137,51 @@ test('gzip extract tar', async () => { await tar.extractTar(archivePath, CompressionMethod.Gzip) expect(mkdirMock).toHaveBeenCalledWith(workspace) - const tarPath = IS_WINDOWS - ? `${process.env['windir']}\\System32\\tar.exe` - : defaultTarPath + const tarPath = IS_WINDOWS ? GnuTarPathOnWindows : defaultTarPath expect(execMock).toHaveBeenCalledTimes(1) expect(execMock).toHaveBeenCalledWith( - `"${tarPath}"`, [ - '-z', + `"${tarPath}"`, '-xf', IS_WINDOWS ? archivePath.replace(/\\/g, '/') : archivePath, '-P', '-C', IS_WINDOWS ? workspace?.replace(/\\/g, '/') : workspace - ].concat(IS_MAC ? ['--delay-directory-restore'] : []), + ] + .concat(IS_WINDOWS ? ['--force-local'] : []) + .concat(IS_MAC ? ['--delay-directory-restore'] : []) + .concat(['-z']) + .join(' '), + undefined, {cwd: undefined} ) }) -test('gzip extract GNU tar on windows', async () => { +test('gzip extract GNU tar on windows with GNUtar in path', async () => { if (IS_WINDOWS) { - jest.spyOn(fs, 'existsSync').mockReturnValueOnce(false) - - const isGnuMock = jest - .spyOn(utils, 'isGnuTarInstalled') - .mockReturnValue(Promise.resolve(true)) + // GNU tar present in path but not at default location + jest + .spyOn(utils, 'getGnuTarPathOnWindows') + .mockReturnValue(Promise.resolve('tar')) const execMock = jest.spyOn(exec, 'exec') const archivePath = `${process.env['windir']}\\fakepath\\cache.tar` const workspace = process.env['GITHUB_WORKSPACE'] await tar.extractTar(archivePath, CompressionMethod.Gzip) - expect(isGnuMock).toHaveBeenCalledTimes(1) expect(execMock).toHaveBeenCalledTimes(1) expect(execMock).toHaveBeenCalledWith( - `"tar"`, [ - '-z', + `"tar"`, '-xf', archivePath.replace(/\\/g, '/'), '-P', '-C', workspace?.replace(/\\/g, '/'), - '--force-local' - ], + '--force-local', + '-z' + ].join(' '), + undefined, {cwd: undefined} ) } @@ -134,13 +198,13 @@ test('zstd create tar', async () => { await tar.createTar(archiveFolder, sourceDirectories, CompressionMethod.Zstd) + const tarPath = IS_WINDOWS ? GnuTarPathOnWindows : defaultTarPath + expect(execMock).toHaveBeenCalledTimes(1) expect(execMock).toHaveBeenCalledWith( - `"${defaultTarPath}"`, [ + `"${tarPath}"`, '--posix', - '--use-compress-program', - IS_WINDOWS ? 'zstd -T0 --long=30' : 'zstdmt --long=30', '-cf', IS_WINDOWS ? CacheFilename.Zstd.replace(/\\/g, '/') : CacheFilename.Zstd, '--exclude', @@ -149,16 +213,81 @@ test('zstd create tar', async () => { '-C', IS_WINDOWS ? workspace?.replace(/\\/g, '/') : workspace, '--files-from', - 'manifest.txt' + ManifestFilename ] .concat(IS_WINDOWS ? ['--force-local'] : []) - .concat(IS_MAC ? ['--delay-directory-restore'] : []), + .concat(IS_MAC ? ['--delay-directory-restore'] : []) + .concat([ + '--use-compress-program', + IS_WINDOWS ? '"zstd -T0 --long=30"' : 'zstdmt --long=30' + ]) + .join(' '), + undefined, // args { cwd: archiveFolder } ) }) +test('zstd create tar with windows BSDtar', async () => { + if (IS_WINDOWS) { + const execMock = jest.spyOn(exec, 'exec') + jest + .spyOn(utils, 'getGnuTarPathOnWindows') + .mockReturnValue(Promise.resolve('')) + + const archiveFolder = getTempDir() + const workspace = process.env['GITHUB_WORKSPACE'] + const sourceDirectories = ['~/.npm/cache', `${workspace}/dist`] + + await fs.promises.mkdir(archiveFolder, {recursive: true}) + + await tar.createTar( + archiveFolder, + sourceDirectories, + CompressionMethod.Zstd + ) + + const tarPath = SystemTarPathOnWindows + + expect(execMock).toHaveBeenCalledTimes(2) + + expect(execMock).toHaveBeenNthCalledWith( + 1, + [ + `"${tarPath}"`, + '--posix', + '-cf', + TarFilename.replace(/\\/g, '/'), + '--exclude', + TarFilename.replace(/\\/g, '/'), + '-P', + '-C', + workspace?.replace(/\\/g, '/'), + '--files-from', + ManifestFilename + ].join(' '), + undefined, // args + { + cwd: archiveFolder + } + ) + + expect(execMock).toHaveBeenNthCalledWith( + 2, + [ + 'zstd -T0 --long=30 --force -o', + CacheFilename.Zstd.replace(/\\/g, '/'), + TarFilename.replace(/\\/g, '/') + ].join(' '), + undefined, // args + { + cwd: archiveFolder + } + ) + } +}) + test('gzip create tar', async () => { const execMock = jest.spyOn(exec, 'exec') @@ -170,16 +299,13 @@ test('gzip create tar', async () => { await tar.createTar(archiveFolder, sourceDirectories, CompressionMethod.Gzip) - const tarPath = IS_WINDOWS - ? `${process.env['windir']}\\System32\\tar.exe` - : defaultTarPath + const tarPath = IS_WINDOWS ? GnuTarPathOnWindows : defaultTarPath expect(execMock).toHaveBeenCalledTimes(1) expect(execMock).toHaveBeenCalledWith( - `"${tarPath}"`, [ + `"${tarPath}"`, '--posix', - '-z', '-cf', IS_WINDOWS ? CacheFilename.Gzip.replace(/\\/g, '/') : CacheFilename.Gzip, '--exclude', @@ -188,8 +314,13 @@ test('gzip create tar', async () => { '-C', IS_WINDOWS ? workspace?.replace(/\\/g, '/') : workspace, '--files-from', - 'manifest.txt' - ].concat(IS_MAC ? ['--delay-directory-restore'] : []), + ManifestFilename + ] + .concat(IS_WINDOWS ? ['--force-local'] : []) + .concat(IS_MAC ? ['--delay-directory-restore'] : []) + .concat(['-z']) + .join(' '), + undefined, // args { cwd: archiveFolder } @@ -205,22 +336,65 @@ test('zstd list tar', async () => { await tar.listTar(archivePath, CompressionMethod.Zstd) + const tarPath = IS_WINDOWS ? GnuTarPathOnWindows : defaultTarPath expect(execMock).toHaveBeenCalledTimes(1) expect(execMock).toHaveBeenCalledWith( - `"${defaultTarPath}"`, [ - '--use-compress-program', - IS_WINDOWS ? 'zstd -d --long=30' : 'unzstd --long=30', + `"${tarPath}"`, '-tf', IS_WINDOWS ? archivePath.replace(/\\/g, '/') : archivePath, '-P' ] .concat(IS_WINDOWS ? ['--force-local'] : []) - .concat(IS_MAC ? ['--delay-directory-restore'] : []), + .concat(IS_MAC ? ['--delay-directory-restore'] : []) + .concat([ + '--use-compress-program', + IS_WINDOWS ? '"zstd -d --long=30"' : 'unzstd --long=30' + ]) + .join(' '), + undefined, {cwd: undefined} ) }) +test('zstd list tar with windows BSDtar', async () => { + if (IS_WINDOWS) { + const execMock = jest.spyOn(exec, 'exec') + jest + .spyOn(utils, 'getGnuTarPathOnWindows') + .mockReturnValue(Promise.resolve('')) + const archivePath = `${process.env['windir']}\\fakepath\\cache.tar` + + await tar.listTar(archivePath, CompressionMethod.Zstd) + + const tarPath = SystemTarPathOnWindows + expect(execMock).toHaveBeenCalledTimes(2) + + expect(execMock).toHaveBeenNthCalledWith( + 1, + [ + 'zstd -d --long=30 --force -o', + TarFilename.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), + archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/') + ].join(' '), + undefined, + {cwd: undefined} + ) + + expect(execMock).toHaveBeenNthCalledWith( + 2, + [ + `"${tarPath}"`, + '-tf', + TarFilename.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), + '-P' + ].join(' '), + undefined, + {cwd: undefined} + ) + } +}) + test('zstdWithoutLong list tar', async () => { const execMock = jest.spyOn(exec, 'exec') @@ -230,18 +404,20 @@ test('zstdWithoutLong list tar', async () => { await tar.listTar(archivePath, CompressionMethod.ZstdWithoutLong) + const tarPath = IS_WINDOWS ? GnuTarPathOnWindows : defaultTarPath expect(execMock).toHaveBeenCalledTimes(1) expect(execMock).toHaveBeenCalledWith( - `"${defaultTarPath}"`, [ - '--use-compress-program', - IS_WINDOWS ? 'zstd -d' : 'unzstd', + `"${tarPath}"`, '-tf', IS_WINDOWS ? archivePath.replace(/\\/g, '/') : archivePath, '-P' ] .concat(IS_WINDOWS ? ['--force-local'] : []) - .concat(IS_MAC ? ['--delay-directory-restore'] : []), + .concat(IS_MAC ? ['--delay-directory-restore'] : []) + .concat(['--use-compress-program', IS_WINDOWS ? '"zstd -d"' : 'unzstd']) + .join(' '), + undefined, {cwd: undefined} ) }) @@ -254,18 +430,20 @@ test('gzip list tar', async () => { await tar.listTar(archivePath, CompressionMethod.Gzip) - const tarPath = IS_WINDOWS - ? `${process.env['windir']}\\System32\\tar.exe` - : defaultTarPath + const tarPath = IS_WINDOWS ? GnuTarPathOnWindows : defaultTarPath expect(execMock).toHaveBeenCalledTimes(1) expect(execMock).toHaveBeenCalledWith( - `"${tarPath}"`, [ - '-z', + `"${tarPath}"`, '-tf', IS_WINDOWS ? archivePath.replace(/\\/g, '/') : archivePath, '-P' - ].concat(IS_MAC ? ['--delay-directory-restore'] : []), + ] + .concat(IS_WINDOWS ? ['--force-local'] : []) + .concat(IS_MAC ? ['--delay-directory-restore'] : []) + .concat(['-z']) + .join(' '), + undefined, {cwd: undefined} ) }) diff --git a/packages/cache/package-lock.json b/packages/cache/package-lock.json index 52afe053..62c8952c 100644 --- a/packages/cache/package-lock.json +++ b/packages/cache/package-lock.json @@ -1,12 +1,12 @@ { "name": "@actions/cache", - "version": "3.1.1", + "version": "3.1.2", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "@actions/cache", - "version": "3.1.1", + "version": "3.1.2", "license": "MIT", "dependencies": { "@actions/core": "^1.10.0", diff --git a/packages/cache/package.json b/packages/cache/package.json index ccd37fa1..4852c241 100644 --- a/packages/cache/package.json +++ b/packages/cache/package.json @@ -1,6 +1,6 @@ { "name": "@actions/cache", - "version": "3.1.1", + "version": "3.1.2", "preview": true, "description": "Actions cache lib", "keywords": [ diff --git a/packages/cache/src/cache.ts b/packages/cache/src/cache.ts index 609c7f94..ffa13e78 100644 --- a/packages/cache/src/cache.ts +++ b/packages/cache/src/cache.ts @@ -60,13 +60,15 @@ export function isFeatureAvailable(): boolean { * @param primaryKey an explicit key for restoring the cache * @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for key * @param downloadOptions cache download options + * @param enableCrossOsArchive an optional boolean enabled to restore on windows any cache created on any platform * @returns string returns the key for the cache hit, otherwise returns undefined */ export async function restoreCache( paths: string[], primaryKey: string, restoreKeys?: string[], - options?: DownloadOptions + options?: DownloadOptions, + enableCrossOsArchive = false ): Promise { checkPaths(paths) @@ -90,9 +92,9 @@ export async function restoreCache( try { // path are needed to compute version const cacheEntry = await cacheHttpClient.getCacheEntry(keys, paths, { - compressionMethod + compressionMethod, + enableCrossOsArchive }) - if (!cacheEntry?.archiveLocation) { // Cache not found return undefined @@ -151,13 +153,15 @@ export async function restoreCache( * * @param paths a list of file paths to be cached * @param key an explicit key for restoring the cache + * @param enableCrossOsArchive an optional boolean enabled to save cache on windows which could be restored on any platform * @param options cache upload options * @returns number returns cacheId if the cache was saved successfully and throws an error if save fails */ export async function saveCache( paths: string[], key: string, - options?: UploadOptions + options?: UploadOptions, + enableCrossOsArchive = false ): Promise { checkPaths(paths) checkKey(key) @@ -207,6 +211,7 @@ export async function saveCache( paths, { compressionMethod, + enableCrossOsArchive, cacheSize: archiveFileSize } ) diff --git a/packages/cache/src/internal/cacheHttpClient.ts b/packages/cache/src/internal/cacheHttpClient.ts index d5ecd9a8..e05cac58 100644 --- a/packages/cache/src/internal/cacheHttpClient.ts +++ b/packages/cache/src/internal/cacheHttpClient.ts @@ -73,13 +73,21 @@ function createHttpClient(): HttpClient { export function getCacheVersion( paths: string[], - compressionMethod?: CompressionMethod + compressionMethod?: CompressionMethod, + enableCrossOsArchive = false ): string { - const components = paths.concat( - !compressionMethod || compressionMethod === CompressionMethod.Gzip - ? [] - : [compressionMethod] - ) + const components = paths + + // Add compression method to cache version to restore + // compressed cache as per compression method + if (compressionMethod) { + components.push(compressionMethod) + } + + // Only check for windows platforms if enableCrossOsArchive is false + if (process.platform === 'win32' && !enableCrossOsArchive) { + components.push('windows-only') + } // Add salt to cache version to support breaking changes in cache entry components.push(versionSalt) @@ -96,7 +104,11 @@ export async function getCacheEntry( options?: InternalCacheOptions ): Promise { const httpClient = createHttpClient() - const version = getCacheVersion(paths, options?.compressionMethod) + const version = getCacheVersion( + paths, + options?.compressionMethod, + options?.enableCrossOsArchive + ) const resource = `cache?keys=${encodeURIComponent( keys.join(',') )}&version=${version}` @@ -104,6 +116,7 @@ export async function getCacheEntry( const response = await retryTypedResponse('getCacheEntry', async () => httpClient.getJson(getCacheApiUrl(resource)) ) + // Cache not found if (response.statusCode === 204) { // List cache for primary key only if cache miss occurs if (core.isDebug()) { @@ -118,6 +131,7 @@ export async function getCacheEntry( const cacheResult = response.result const cacheDownloadUrl = cacheResult?.archiveLocation if (!cacheDownloadUrl) { + // Cache achiveLocation not found. This should never happen, and hence bail out. throw new Error('Cache not found.') } core.setSecret(cacheDownloadUrl) @@ -179,7 +193,11 @@ export async function reserveCache( options?: InternalCacheOptions ): Promise> { const httpClient = createHttpClient() - const version = getCacheVersion(paths, options?.compressionMethod) + const version = getCacheVersion( + paths, + options?.compressionMethod, + options?.enableCrossOsArchive + ) const reserveCacheRequest: ReserveCacheRequest = { key, diff --git a/packages/cache/src/internal/cacheUtils.ts b/packages/cache/src/internal/cacheUtils.ts index c2ace526..ea1e7de6 100644 --- a/packages/cache/src/internal/cacheUtils.ts +++ b/packages/cache/src/internal/cacheUtils.ts @@ -7,7 +7,11 @@ import * as path from 'path' import * as semver from 'semver' import * as util from 'util' import {v4 as uuidV4} from 'uuid' -import {CacheFilename, CompressionMethod} from './constants' +import { + CacheFilename, + CompressionMethod, + GnuTarPathOnWindows +} from './constants' // From https://github.com/actions/toolkit/blob/main/packages/tool-cache/src/tool-cache.ts#L23 export async function createTempDirectory(): Promise { @@ -90,11 +94,6 @@ async function getVersion(app: string): Promise { // Use zstandard if possible to maximize cache performance export async function getCompressionMethod(): Promise { - if (process.platform === 'win32' && !(await isGnuTarInstalled())) { - // Disable zstd due to bug https://github.com/actions/cache/issues/301 - return CompressionMethod.Gzip - } - const versionOutput = await getVersion('zstd') const version = semver.clean(versionOutput) @@ -116,9 +115,12 @@ export function getCacheFileName(compressionMethod: CompressionMethod): string { : CacheFilename.Zstd } -export async function isGnuTarInstalled(): Promise { +export async function getGnuTarPathOnWindows(): Promise { + if (fs.existsSync(GnuTarPathOnWindows)) { + return GnuTarPathOnWindows + } const versionOutput = await getVersion('tar') - return versionOutput.toLowerCase().includes('gnu tar') + return versionOutput.toLowerCase().includes('gnu tar') ? io.which('tar') : '' } export function assertDefined(name: string, value?: T): T { diff --git a/packages/cache/src/internal/constants.ts b/packages/cache/src/internal/constants.ts index 2f78d326..4dbff574 100644 --- a/packages/cache/src/internal/constants.ts +++ b/packages/cache/src/internal/constants.ts @@ -11,6 +11,11 @@ export enum CompressionMethod { Zstd = 'zstd' } +export enum ArchiveToolType { + GNU = 'gnu', + BSD = 'bsd' +} + // The default number of retry attempts. export const DefaultRetryAttempts = 2 @@ -21,3 +26,13 @@ export const DefaultRetryDelay = 5000 // over the socket during this period, the socket is destroyed and the download // is aborted. export const SocketTimeout = 5000 + +// The default path of GNUtar on hosted Windows runners +export const GnuTarPathOnWindows = `${process.env['PROGRAMFILES']}\\Git\\usr\\bin\\tar.exe` + +// The default path of BSDtar on hosted Windows runners +export const SystemTarPathOnWindows = `${process.env['SYSTEMDRIVE']}\\Windows\\System32\\tar.exe` + +export const TarFilename = 'cache.tar' + +export const ManifestFilename = 'manifest.txt' diff --git a/packages/cache/src/internal/contracts.d.ts b/packages/cache/src/internal/contracts.d.ts index b5f53bdc..6fcd9427 100644 --- a/packages/cache/src/internal/contracts.d.ts +++ b/packages/cache/src/internal/contracts.d.ts @@ -35,5 +35,11 @@ export interface ReserveCacheResponse { export interface InternalCacheOptions { compressionMethod?: CompressionMethod + enableCrossOsArchive?: boolean cacheSize?: number } + +export interface ArchiveTool { + path: string + type: string +} diff --git a/packages/cache/src/internal/tar.ts b/packages/cache/src/internal/tar.ts index 2e28ca1a..8c6337e6 100644 --- a/packages/cache/src/internal/tar.ts +++ b/packages/cache/src/internal/tar.ts @@ -1,27 +1,32 @@ import {exec} from '@actions/exec' +import {exportVariable} from '@actions/core' import * as io from '@actions/io' import {existsSync, writeFileSync} from 'fs' import * as path from 'path' import * as utils from './cacheUtils' -import {CompressionMethod} from './constants' +import {ArchiveTool} from './contracts' +import { + CompressionMethod, + SystemTarPathOnWindows, + ArchiveToolType, + TarFilename, + ManifestFilename +} from './constants' const IS_WINDOWS = process.platform === 'win32' +exportVariable('MSYS', 'winsymlinks:nativestrict') -async function getTarPath( - args: string[], - compressionMethod: CompressionMethod -): Promise { +// Returns tar path and type: BSD or GNU +async function getTarPath(): Promise { switch (process.platform) { case 'win32': { - const systemTar = `${process.env['windir']}\\System32\\tar.exe` - if (compressionMethod !== CompressionMethod.Gzip) { - // We only use zstandard compression on windows when gnu tar is installed due to - // a bug with compressing large files with bsdtar + zstd - args.push('--force-local') + const gnuTar = await utils.getGnuTarPathOnWindows() + const systemTar = SystemTarPathOnWindows + if (gnuTar) { + // Use GNUtar as default on windows + return {path: gnuTar, type: ArchiveToolType.GNU} } else if (existsSync(systemTar)) { - return systemTar - } else if (await utils.isGnuTarInstalled()) { - args.push('--force-local') + return {path: systemTar, type: ArchiveToolType.BSD} } break } @@ -29,27 +34,133 @@ async function getTarPath( const gnuTar = await io.which('gtar', false) if (gnuTar) { // fix permission denied errors when extracting BSD tar archive with GNU tar - https://github.com/actions/cache/issues/527 - args.push('--delay-directory-restore') - return gnuTar + return {path: gnuTar, type: ArchiveToolType.GNU} + } else { + return { + path: await io.which('tar', true), + type: ArchiveToolType.BSD + } } - break } default: break } - return await io.which('tar', true) + // Default assumption is GNU tar is present in path + return { + path: await io.which('tar', true), + type: ArchiveToolType.GNU + } } -async function execTar( - args: string[], +// Return arguments for tar as per tarPath, compressionMethod, method type and os +async function getTarArgs( + tarPath: ArchiveTool, compressionMethod: CompressionMethod, - cwd?: string -): Promise { - try { - await exec(`"${await getTarPath(args, compressionMethod)}"`, args, {cwd}) - } catch (error) { - throw new Error(`Tar failed with error: ${error?.message}`) + type: string, + archivePath = '' +): Promise { + const args = [`"${tarPath.path}"`] + const cacheFileName = utils.getCacheFileName(compressionMethod) + const tarFile = 'cache.tar' + const workingDirectory = getWorkingDirectory() + // Speficic args for BSD tar on windows for workaround + const BSD_TAR_ZSTD = + tarPath.type === ArchiveToolType.BSD && + compressionMethod !== CompressionMethod.Gzip && + IS_WINDOWS + + // Method specific args + switch (type) { + case 'create': + args.push( + '--posix', + '-cf', + BSD_TAR_ZSTD + ? tarFile + : cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), + '--exclude', + BSD_TAR_ZSTD + ? tarFile + : cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), + '-P', + '-C', + workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), + '--files-from', + ManifestFilename + ) + break + case 'extract': + args.push( + '-xf', + BSD_TAR_ZSTD + ? tarFile + : archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), + '-P', + '-C', + workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/') + ) + break + case 'list': + args.push( + '-tf', + BSD_TAR_ZSTD + ? tarFile + : archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), + '-P' + ) + break } + + // Platform specific args + if (tarPath.type === ArchiveToolType.GNU) { + switch (process.platform) { + case 'win32': + args.push('--force-local') + break + case 'darwin': + args.push('--delay-directory-restore') + break + } + } + + return args +} + +// Returns commands to run tar and compression program +async function getCommands( + compressionMethod: CompressionMethod, + type: string, + archivePath = '' +): Promise { + let args + + const tarPath = await getTarPath() + const tarArgs = await getTarArgs( + tarPath, + compressionMethod, + type, + archivePath + ) + const compressionArgs = + type !== 'create' + ? await getDecompressionProgram(tarPath, compressionMethod, archivePath) + : await getCompressionProgram(tarPath, compressionMethod) + const BSD_TAR_ZSTD = + tarPath.type === ArchiveToolType.BSD && + compressionMethod !== CompressionMethod.Gzip && + IS_WINDOWS + + if (BSD_TAR_ZSTD && type !== 'create') { + args = [[...compressionArgs].join(' '), [...tarArgs].join(' ')] + } else { + args = [[...tarArgs].join(' '), [...compressionArgs].join(' ')] + } + + if (BSD_TAR_ZSTD) { + return args + } + + return [args.join(' ')] } function getWorkingDirectory(): string { @@ -57,37 +168,107 @@ function getWorkingDirectory(): string { } // Common function for extractTar and listTar to get the compression method -function getCompressionProgram(compressionMethod: CompressionMethod): string[] { +async function getDecompressionProgram( + tarPath: ArchiveTool, + compressionMethod: CompressionMethod, + archivePath: string +): Promise { // -d: Decompress. // unzstd is equivalent to 'zstd -d' // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. // Using 30 here because we also support 32-bit self-hosted runners. + const BSD_TAR_ZSTD = + tarPath.type === ArchiveToolType.BSD && + compressionMethod !== CompressionMethod.Gzip && + IS_WINDOWS switch (compressionMethod) { case CompressionMethod.Zstd: - return [ - '--use-compress-program', - IS_WINDOWS ? 'zstd -d --long=30' : 'unzstd --long=30' - ] + return BSD_TAR_ZSTD + ? [ + 'zstd -d --long=30 --force -o', + TarFilename, + archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/') + ] + : [ + '--use-compress-program', + IS_WINDOWS ? '"zstd -d --long=30"' : 'unzstd --long=30' + ] case CompressionMethod.ZstdWithoutLong: - return ['--use-compress-program', IS_WINDOWS ? 'zstd -d' : 'unzstd'] + return BSD_TAR_ZSTD + ? [ + 'zstd -d --force -o', + TarFilename, + archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/') + ] + : ['--use-compress-program', IS_WINDOWS ? '"zstd -d"' : 'unzstd'] default: return ['-z'] } } +// Used for creating the archive +// -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores. +// zstdmt is equivalent to 'zstd -T0' +// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. +// Using 30 here because we also support 32-bit self-hosted runners. +// Long range mode is added to zstd in v1.3.2 release, so we will not use --long in older version of zstd. +async function getCompressionProgram( + tarPath: ArchiveTool, + compressionMethod: CompressionMethod +): Promise { + const cacheFileName = utils.getCacheFileName(compressionMethod) + const BSD_TAR_ZSTD = + tarPath.type === ArchiveToolType.BSD && + compressionMethod !== CompressionMethod.Gzip && + IS_WINDOWS + switch (compressionMethod) { + case CompressionMethod.Zstd: + return BSD_TAR_ZSTD + ? [ + 'zstd -T0 --long=30 --force -o', + cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), + TarFilename + ] + : [ + '--use-compress-program', + IS_WINDOWS ? '"zstd -T0 --long=30"' : 'zstdmt --long=30' + ] + case CompressionMethod.ZstdWithoutLong: + return BSD_TAR_ZSTD + ? [ + 'zstd -T0 --force -o', + cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), + TarFilename + ] + : ['--use-compress-program', IS_WINDOWS ? '"zstd -T0"' : 'zstdmt'] + default: + return ['-z'] + } +} + +// Executes all commands as separate processes +async function execCommands(commands: string[], cwd?: string): Promise { + for (const command of commands) { + try { + await exec(command, undefined, {cwd}) + } catch (error) { + throw new Error( + `${command.split(' ')[0]} failed with error: ${error?.message}` + ) + } + } +} + +// List the contents of a tar export async function listTar( archivePath: string, compressionMethod: CompressionMethod ): Promise { - const args = [ - ...getCompressionProgram(compressionMethod), - '-tf', - archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), - '-P' - ] - await execTar(args, compressionMethod) + const commands = await getCommands(compressionMethod, 'list', archivePath) + await execCommands(commands) } +// Extract a tar export async function extractTar( archivePath: string, compressionMethod: CompressionMethod @@ -95,61 +276,21 @@ export async function extractTar( // Create directory to extract tar into const workingDirectory = getWorkingDirectory() await io.mkdirP(workingDirectory) - const args = [ - ...getCompressionProgram(compressionMethod), - '-xf', - archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), - '-P', - '-C', - workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/') - ] - await execTar(args, compressionMethod) + const commands = await getCommands(compressionMethod, 'extract', archivePath) + await execCommands(commands) } +// Create a tar export async function createTar( archiveFolder: string, sourceDirectories: string[], compressionMethod: CompressionMethod ): Promise { // Write source directories to manifest.txt to avoid command length limits - const manifestFilename = 'manifest.txt' - const cacheFileName = utils.getCacheFileName(compressionMethod) writeFileSync( - path.join(archiveFolder, manifestFilename), + path.join(archiveFolder, ManifestFilename), sourceDirectories.join('\n') ) - const workingDirectory = getWorkingDirectory() - - // -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores. - // zstdmt is equivalent to 'zstd -T0' - // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. - // Using 30 here because we also support 32-bit self-hosted runners. - // Long range mode is added to zstd in v1.3.2 release, so we will not use --long in older version of zstd. - function getCompressionProgram(): string[] { - switch (compressionMethod) { - case CompressionMethod.Zstd: - return [ - '--use-compress-program', - IS_WINDOWS ? 'zstd -T0 --long=30' : 'zstdmt --long=30' - ] - case CompressionMethod.ZstdWithoutLong: - return ['--use-compress-program', IS_WINDOWS ? 'zstd -T0' : 'zstdmt'] - default: - return ['-z'] - } - } - const args = [ - '--posix', - ...getCompressionProgram(), - '-cf', - cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), - '--exclude', - cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), - '-P', - '-C', - workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), - '--files-from', - manifestFilename - ] - await execTar(args, compressionMethod, archiveFolder) + const commands = await getCommands(compressionMethod, 'create') + await execCommands(commands, archiveFolder) }