From 932779cf587ca0cf7eab689300724926f63e63d1 Mon Sep 17 00:00:00 2001 From: Aiqiao Yan Date: Wed, 6 May 2020 11:10:18 -0400 Subject: [PATCH 1/7] Initial commit to create @actions/cache package --- README.md | 9 + packages/cache/CONTRIBUTIONS.md | 0 packages/cache/README.md | 1 + packages/cache/RELEASES.md | 5 + .../__tests__/__fixtures__/helloWorld.txt | 1 + .../cache/__tests__/cacheHttpClient.test.ts | 28 ++ packages/cache/__tests__/cacheUtils.test.ts | 177 +++++++++ packages/cache/__tests__/restoreCache.test.ts | 303 ++++++++++++++++ packages/cache/__tests__/saveCache.test.ts | 219 +++++++++++ packages/cache/__tests__/tar.test.ts | 191 ++++++++++ packages/cache/package-lock.json | 32 ++ packages/cache/package.json | 49 +++ packages/cache/src/cache.ts | 169 +++++++++ .../cache/src/internal/cacheHttpClient.ts | 339 ++++++++++++++++++ packages/cache/src/internal/cacheUtils.ts | 104 ++++++ packages/cache/src/internal/constants.ts | 14 + packages/cache/src/internal/contracts.d.ts | 25 ++ packages/cache/src/internal/tar.ts | 86 +++++ packages/cache/tsconfig.json | 11 + 19 files changed, 1763 insertions(+) create mode 100644 packages/cache/CONTRIBUTIONS.md create mode 100644 packages/cache/README.md create mode 100644 packages/cache/RELEASES.md create mode 100644 packages/cache/__tests__/__fixtures__/helloWorld.txt create mode 100644 packages/cache/__tests__/cacheHttpClient.test.ts create mode 100644 packages/cache/__tests__/cacheUtils.test.ts create mode 100644 packages/cache/__tests__/restoreCache.test.ts create mode 100644 packages/cache/__tests__/saveCache.test.ts create mode 100644 packages/cache/__tests__/tar.test.ts create mode 100644 packages/cache/package-lock.json create mode 100644 packages/cache/package.json create mode 100644 packages/cache/src/cache.ts create mode 100644 packages/cache/src/internal/cacheHttpClient.ts create mode 100644 packages/cache/src/internal/cacheUtils.ts create mode 100644 packages/cache/src/internal/constants.ts create mode 100644 packages/cache/src/internal/contracts.d.ts create mode 100644 packages/cache/src/internal/tar.ts create mode 100644 packages/cache/tsconfig.json diff --git a/README.md b/README.md index 3dab6d39..8701ab5c 100644 --- a/README.md +++ b/README.md @@ -82,6 +82,15 @@ $ npm install @actions/artifact --save ```
+:dart: [@actions/cache](packages/cache) + +Provides functions to interact with actions cache. Read more [here](packages/cache) + +```bash +$ npm install @actions/artifact --save +``` +
+ ## Creating an Action with the Toolkit :question: [Choosing an action type](docs/action-types.md) diff --git a/packages/cache/CONTRIBUTIONS.md b/packages/cache/CONTRIBUTIONS.md new file mode 100644 index 00000000..e69de29b diff --git a/packages/cache/README.md b/packages/cache/README.md new file mode 100644 index 00000000..b65c7f34 --- /dev/null +++ b/packages/cache/README.md @@ -0,0 +1 @@ +# `@actions/cache` diff --git a/packages/cache/RELEASES.md b/packages/cache/RELEASES.md new file mode 100644 index 00000000..b47fc550 --- /dev/null +++ b/packages/cache/RELEASES.md @@ -0,0 +1,5 @@ +# @actions/cache Releases + +### 0.0.0 + +- Initial release \ No newline at end of file diff --git a/packages/cache/__tests__/__fixtures__/helloWorld.txt b/packages/cache/__tests__/__fixtures__/helloWorld.txt new file mode 100644 index 00000000..95d09f2b --- /dev/null +++ b/packages/cache/__tests__/__fixtures__/helloWorld.txt @@ -0,0 +1 @@ +hello world \ No newline at end of file diff --git a/packages/cache/__tests__/cacheHttpClient.test.ts b/packages/cache/__tests__/cacheHttpClient.test.ts new file mode 100644 index 00000000..d2165280 --- /dev/null +++ b/packages/cache/__tests__/cacheHttpClient.test.ts @@ -0,0 +1,28 @@ +import {getCacheVersion} from '../src/internal/cacheHttpClient' +import {CompressionMethod} from '../src/internal/constants' + +test('getCacheVersion with path input and compression method undefined returns version', async () => { + const inputPath = 'node_modules' + const result = getCacheVersion(inputPath) + expect(result).toEqual( + 'b3e0c6cb5ecf32614eeb2997d905b9c297046d7cbf69062698f25b14b4cb0985' + ) +}) + +test('getCacheVersion with zstd compression returns version', async () => { + const inputPath = 'node_modules' + const result = getCacheVersion(inputPath, CompressionMethod.Zstd) + + expect(result).toEqual( + '273877e14fd65d270b87a198edbfa2db5a43de567c9a548d2a2505b408befe24' + ) +}) + +test('getCacheVersion with gzip compression does not change vesion', async () => { + const inputPath = 'node_modules' + const result = getCacheVersion(inputPath, CompressionMethod.Gzip) + + expect(result).toEqual( + 'b3e0c6cb5ecf32614eeb2997d905b9c297046d7cbf69062698f25b14b4cb0985' + ) +}) diff --git a/packages/cache/__tests__/cacheUtils.test.ts b/packages/cache/__tests__/cacheUtils.test.ts new file mode 100644 index 00000000..b09eed13 --- /dev/null +++ b/packages/cache/__tests__/cacheUtils.test.ts @@ -0,0 +1,177 @@ +import * as core from '@actions/core' +import * as io from '@actions/io' +import {promises as fs} from 'fs' +import * as os from 'os' +import * as path from 'path' +import {v4 as uuidV4} from 'uuid' +import * as cacheUtils from '../src/internal/cacheUtils' + +jest.mock('@actions/core') +jest.mock('os') + +function getTempDir(): string { + return path.join(__dirname, '_temp', 'cacheUtils') +} + +afterAll(async () => { + delete process.env['GITHUB_WORKSPACE'] + await io.rmRF(getTempDir()) +}) + +test('getArchiveFileSize returns file size', () => { + const filePath = path.join(__dirname, '__fixtures__', 'helloWorld.txt') + + const size = cacheUtils.getArchiveFileSize(filePath) + + expect(size).toBe(11) +}) + +test('logWarning logs a message with a warning prefix', () => { + const message = 'A warning occurred.' + + const infoMock = jest.spyOn(core, 'info') + + cacheUtils.logWarning(message) + + expect(infoMock).toHaveBeenCalledWith(`[warning]${message}`) +}) + +test('resolvePaths with no ~ in path', async () => { + const filePath = '.cache' + + // Create the following layout: + // cwd + // cwd/.cache + // cwd/.cache/file.txt + + const root = path.join(getTempDir(), 'no-tilde') + // tarball entries will be relative to workspace + process.env['GITHUB_WORKSPACE'] = root + + await fs.mkdir(root, {recursive: true}) + const cache = path.join(root, '.cache') + await fs.mkdir(cache, {recursive: true}) + await fs.writeFile(path.join(cache, 'file.txt'), 'cached') + + const originalCwd = process.cwd() + + try { + process.chdir(root) + + const resolvedPath = await cacheUtils.resolvePaths([filePath]) + + const expectedPath = [filePath] + expect(resolvedPath).toStrictEqual(expectedPath) + } finally { + process.chdir(originalCwd) + } +}) + +test('resolvePaths with ~ in path', async () => { + const cacheDir = uuidV4() + const filePath = `~/${cacheDir}` + // Create the following layout: + // ~/uuid + // ~/uuid/file.txt + + const homedir = jest.requireActual('os').homedir() + const homedirMock = jest.spyOn(os, 'homedir') + homedirMock.mockReturnValue(homedir) + + const target = path.join(homedir, cacheDir) + await fs.mkdir(target, {recursive: true}) + await fs.writeFile(path.join(target, 'file.txt'), 'cached') + + const root = getTempDir() + process.env['GITHUB_WORKSPACE'] = root + + try { + const resolvedPath = await cacheUtils.resolvePaths([filePath]) + + const expectedPath = [path.relative(root, target)] + expect(resolvedPath).toStrictEqual(expectedPath) + } finally { + await io.rmRF(target) + } +}) + +test('resolvePaths with home not found', async () => { + const filePath = '~/.cache/yarn' + const homedirMock = jest.spyOn(os, 'homedir') + homedirMock.mockReturnValue('') + + await expect(cacheUtils.resolvePaths([filePath])).rejects.toThrow( + 'Unable to determine HOME directory' + ) +}) + +test('resolvePaths inclusion pattern returns found', async () => { + const pattern = '*.ts' + // Create the following layout: + // inclusion-patterns + // inclusion-patterns/miss.txt + // inclusion-patterns/test.ts + + const root = path.join(getTempDir(), 'inclusion-patterns') + // tarball entries will be relative to workspace + process.env['GITHUB_WORKSPACE'] = root + + await fs.mkdir(root, {recursive: true}) + await fs.writeFile(path.join(root, 'miss.txt'), 'no match') + await fs.writeFile(path.join(root, 'test.ts'), 'match') + + const originalCwd = process.cwd() + + try { + process.chdir(root) + + const resolvedPath = await cacheUtils.resolvePaths([pattern]) + + const expectedPath = ['test.ts'] + expect(resolvedPath).toStrictEqual(expectedPath) + } finally { + process.chdir(originalCwd) + } +}) + +test('resolvePaths exclusion pattern returns not found', async () => { + const patterns = ['*.ts', '!test.ts'] + // Create the following layout: + // exclusion-patterns + // exclusion-patterns/miss.txt + // exclusion-patterns/test.ts + + const root = path.join(getTempDir(), 'exclusion-patterns') + // tarball entries will be relative to workspace + process.env['GITHUB_WORKSPACE'] = root + + await fs.mkdir(root, {recursive: true}) + await fs.writeFile(path.join(root, 'miss.txt'), 'no match') + await fs.writeFile(path.join(root, 'test.ts'), 'no match') + + const originalCwd = process.cwd() + + try { + process.chdir(root) + + const resolvedPath = await cacheUtils.resolvePaths(patterns) + + const expectedPath: string[] = [] + expect(resolvedPath).toStrictEqual(expectedPath) + } finally { + process.chdir(originalCwd) + } +}) + +test('unlinkFile unlinks file', async () => { + const testDirectory = await fs.mkdtemp('unlinkFileTest') + const testFile = path.join(testDirectory, 'test.txt') + await fs.writeFile(testFile, 'hello world') + + await cacheUtils.unlinkFile(testFile) + + // This should throw as testFile should not exist + await expect(fs.stat(testFile)).rejects.toThrow() + + await fs.rmdir(testDirectory) +}) diff --git a/packages/cache/__tests__/restoreCache.test.ts b/packages/cache/__tests__/restoreCache.test.ts new file mode 100644 index 00000000..d1f016d6 --- /dev/null +++ b/packages/cache/__tests__/restoreCache.test.ts @@ -0,0 +1,303 @@ +import * as core from '@actions/core' +import * as path from 'path' +import {restoreCache} from '../src/cache' +import * as cacheHttpClient from '../src/internal/cacheHttpClient' +import * as cacheUtils from '../src/internal/cacheUtils' +import {CacheFilename, CompressionMethod} from '../src/internal/constants' +import {ArtifactCacheEntry} from '../src/internal/contracts' +import * as tar from '../src/internal/tar' + +jest.mock('../src/internal/cacheHttpClient') +jest.mock('../src/internal/cacheUtils') +jest.mock('../src/internal/tar') + +beforeAll(() => { + // eslint-disable-next-line @typescript-eslint/promise-function-async + jest.spyOn(cacheUtils, 'getCacheFileName').mockImplementation(cm => { + const actualUtils = jest.requireActual('../src/internal/cacheUtils') + return actualUtils.getCacheFileName(cm) + }) +}) + +test('restore with no path should fail', async () => { + const inputPath = '' + const key = 'node-test' + const failedMock = jest.spyOn(core, 'setFailed') + await restoreCache(inputPath, key) + expect(failedMock).toHaveBeenCalledWith( + 'Input required and not supplied: path' + ) +}) + +test('restore with too many keys should fail', async () => { + const inputPath = 'node_modules' + const key = 'node-test' + const restoreKeys = [...Array(20).keys()].map(x => x.toString()) + const failedMock = jest.spyOn(core, 'setFailed') + await restoreCache(inputPath, key, restoreKeys) + expect(failedMock).toHaveBeenCalledWith( + `Key Validation Error: Keys are limited to a maximum of 10.` + ) +}) + +test('restore with large key should fail', async () => { + const inputPath = 'node_modules' + const key = 'foo'.repeat(512) // Over the 512 character limit + const failedMock = jest.spyOn(core, 'setFailed') + await restoreCache(inputPath, key) + expect(failedMock).toHaveBeenCalledWith( + `Key Validation Error: ${key} cannot be larger than 512 characters.` + ) +}) + +test('restore with invalid key should fail', async () => { + const inputPath = 'node_modules' + const key = 'comma,comma' + const failedMock = jest.spyOn(core, 'setFailed') + await restoreCache(inputPath, key) + expect(failedMock).toHaveBeenCalledWith( + `Key Validation Error: ${key} cannot contain commas.` + ) +}) + +test('restore with no cache found', async () => { + const inputPath = 'node_modules' + const key = 'node-test' + + const infoMock = jest.spyOn(core, 'info') + const failedMock = jest.spyOn(core, 'setFailed') + + const clientMock = jest.spyOn(cacheHttpClient, 'getCacheEntry') + clientMock.mockImplementation(async () => { + return Promise.resolve(null) + }) + + await restoreCache(inputPath, key) + + expect(failedMock).toHaveBeenCalledTimes(0) + expect(infoMock).toHaveBeenCalledWith( + `Cache not found for input keys: ${key}` + ) +}) + +test('restore with server error should fail', async () => { + const inputPath = 'node_modules' + const key = 'node-test' + + const logWarningMock = jest.spyOn(cacheUtils, 'logWarning') + const failedMock = jest.spyOn(core, 'setFailed') + + const clientMock = jest.spyOn(cacheHttpClient, 'getCacheEntry') + clientMock.mockImplementation(() => { + throw new Error('HTTP Error Occurred') + }) + + await restoreCache(inputPath, key) + + expect(logWarningMock).toHaveBeenCalledTimes(1) + expect(logWarningMock).toHaveBeenCalledWith('HTTP Error Occurred') + expect(failedMock).toHaveBeenCalledTimes(0) +}) + +test('restore with restore keys and no cache found', async () => { + const inputPath = 'node_modules' + const key = 'node-test' + const restoreKey = 'node-' + + const infoMock = jest.spyOn(core, 'info') + const failedMock = jest.spyOn(core, 'setFailed') + + const clientMock = jest.spyOn(cacheHttpClient, 'getCacheEntry') + clientMock.mockImplementation(async () => { + return Promise.resolve(null) + }) + + await restoreCache(inputPath, key, [restoreKey]) + + expect(failedMock).toHaveBeenCalledTimes(0) + expect(infoMock).toHaveBeenCalledWith( + `Cache not found for input keys: ${key}, ${restoreKey}` + ) +}) + +test('restore with gzip compressed cache found', async () => { + const inputPath = 'node_modules' + const key = 'node-test' + + const infoMock = jest.spyOn(core, 'info') + const failedMock = jest.spyOn(core, 'setFailed') + + const cacheEntry: ArtifactCacheEntry = { + cacheKey: key, + scope: 'refs/heads/master', + archiveLocation: 'www.actionscache.test/download' + } + const getCacheMock = jest.spyOn(cacheHttpClient, 'getCacheEntry') + getCacheMock.mockImplementation(async () => { + return Promise.resolve(cacheEntry) + }) + + const tempPath = '/foo/bar' + + const createTempDirectoryMock = jest.spyOn(cacheUtils, 'createTempDirectory') + createTempDirectoryMock.mockImplementation(async () => { + return Promise.resolve(tempPath) + }) + + const archivePath = path.join(tempPath, CacheFilename.Gzip) + const downloadCacheMock = jest.spyOn(cacheHttpClient, 'downloadCache') + + const fileSize = 142 + const getArchiveFileSizeMock = jest + .spyOn(cacheUtils, 'getArchiveFileSize') + .mockReturnValue(fileSize) + + const extractTarMock = jest.spyOn(tar, 'extractTar') + const unlinkFileMock = jest.spyOn(cacheUtils, 'unlinkFile') + + const compression = CompressionMethod.Gzip + const getCompressionMock = jest + .spyOn(cacheUtils, 'getCompressionMethod') + .mockReturnValue(Promise.resolve(compression)) + + await restoreCache(inputPath, key) + + expect(getCacheMock).toHaveBeenCalledWith([key], inputPath, { + compressionMethod: compression + }) + expect(createTempDirectoryMock).toHaveBeenCalledTimes(1) + expect(downloadCacheMock).toHaveBeenCalledWith( + cacheEntry.archiveLocation, + archivePath + ) + expect(getArchiveFileSizeMock).toHaveBeenCalledWith(archivePath) + + expect(extractTarMock).toHaveBeenCalledTimes(1) + expect(extractTarMock).toHaveBeenCalledWith(archivePath, compression) + + expect(unlinkFileMock).toHaveBeenCalledTimes(1) + expect(unlinkFileMock).toHaveBeenCalledWith(archivePath) + + expect(infoMock).toHaveBeenCalledWith(`Cache restored from key: ${key}`) + expect(failedMock).toHaveBeenCalledTimes(0) + expect(getCompressionMock).toHaveBeenCalledTimes(1) +}) + +test('restore with a pull request event and zstd compressed cache found', async () => { + const inputPath = 'node_modules' + const key = 'node-test' + + const infoMock = jest.spyOn(core, 'info') + const failedMock = jest.spyOn(core, 'setFailed') + + const cacheEntry: ArtifactCacheEntry = { + cacheKey: key, + scope: 'refs/heads/master', + archiveLocation: 'www.actionscache.test/download' + } + const getCacheMock = jest.spyOn(cacheHttpClient, 'getCacheEntry') + getCacheMock.mockImplementation(async () => { + return Promise.resolve(cacheEntry) + }) + const tempPath = '/foo/bar' + + const createTempDirectoryMock = jest.spyOn(cacheUtils, 'createTempDirectory') + createTempDirectoryMock.mockImplementation(async () => { + return Promise.resolve(tempPath) + }) + + const archivePath = path.join(tempPath, CacheFilename.Zstd) + const downloadCacheMock = jest.spyOn(cacheHttpClient, 'downloadCache') + + const fileSize = 62915000 + const getArchiveFileSizeMock = jest + .spyOn(cacheUtils, 'getArchiveFileSize') + .mockReturnValue(fileSize) + + const extractTarMock = jest.spyOn(tar, 'extractTar') + const compression = CompressionMethod.Zstd + const getCompressionMock = jest + .spyOn(cacheUtils, 'getCompressionMethod') + .mockReturnValue(Promise.resolve(compression)) + + await restoreCache(inputPath, key) + + expect(getCacheMock).toHaveBeenCalledWith([key], inputPath, { + compressionMethod: compression + }) + expect(createTempDirectoryMock).toHaveBeenCalledTimes(1) + expect(downloadCacheMock).toHaveBeenCalledWith( + cacheEntry.archiveLocation, + archivePath + ) + expect(getArchiveFileSizeMock).toHaveBeenCalledWith(archivePath) + expect(infoMock).toHaveBeenCalledWith(`Cache Size: ~60 MB (62915000 B)`) + + expect(extractTarMock).toHaveBeenCalledTimes(1) + expect(extractTarMock).toHaveBeenCalledWith(archivePath, compression) + + expect(infoMock).toHaveBeenCalledWith(`Cache restored from key: ${key}`) + expect(failedMock).toHaveBeenCalledTimes(0) + expect(getCompressionMock).toHaveBeenCalledTimes(1) +}) + +test('restore with cache found for restore key', async () => { + const inputPath = 'node_modules' + const key = 'node-test' + const restoreKey = 'node-' + + const infoMock = jest.spyOn(core, 'info') + const failedMock = jest.spyOn(core, 'setFailed') + + const cacheEntry: ArtifactCacheEntry = { + cacheKey: restoreKey, + scope: 'refs/heads/master', + archiveLocation: 'www.actionscache.test/download' + } + const getCacheMock = jest.spyOn(cacheHttpClient, 'getCacheEntry') + getCacheMock.mockImplementation(async () => { + return Promise.resolve(cacheEntry) + }) + const tempPath = '/foo/bar' + + const createTempDirectoryMock = jest.spyOn(cacheUtils, 'createTempDirectory') + createTempDirectoryMock.mockImplementation(async () => { + return Promise.resolve(tempPath) + }) + + const archivePath = path.join(tempPath, CacheFilename.Zstd) + const downloadCacheMock = jest.spyOn(cacheHttpClient, 'downloadCache') + + const fileSize = 142 + const getArchiveFileSizeMock = jest + .spyOn(cacheUtils, 'getArchiveFileSize') + .mockReturnValue(fileSize) + + const extractTarMock = jest.spyOn(tar, 'extractTar') + const compression = CompressionMethod.Zstd + const getCompressionMock = jest + .spyOn(cacheUtils, 'getCompressionMethod') + .mockReturnValue(Promise.resolve(compression)) + + await restoreCache(inputPath, key, [restoreKey]) + + expect(getCacheMock).toHaveBeenCalledWith([key, restoreKey], inputPath, { + compressionMethod: compression + }) + expect(createTempDirectoryMock).toHaveBeenCalledTimes(1) + expect(downloadCacheMock).toHaveBeenCalledWith( + cacheEntry.archiveLocation, + archivePath + ) + expect(getArchiveFileSizeMock).toHaveBeenCalledWith(archivePath) + expect(infoMock).toHaveBeenCalledWith(`Cache Size: ~0 MB (142 B)`) + + expect(extractTarMock).toHaveBeenCalledTimes(1) + expect(extractTarMock).toHaveBeenCalledWith(archivePath, compression) + + expect(infoMock).toHaveBeenCalledWith( + `Cache restored from key: ${restoreKey}` + ) + expect(failedMock).toHaveBeenCalledTimes(0) + expect(getCompressionMock).toHaveBeenCalledTimes(1) +}) diff --git a/packages/cache/__tests__/saveCache.test.ts b/packages/cache/__tests__/saveCache.test.ts new file mode 100644 index 00000000..f1346634 --- /dev/null +++ b/packages/cache/__tests__/saveCache.test.ts @@ -0,0 +1,219 @@ +import * as core from '@actions/core' +import * as path from 'path' +import {saveCache} from '../src/cache' +import * as cacheHttpClient from '../src/internal/cacheHttpClient' +import * as cacheUtils from '../src/internal/cacheUtils' +import {CacheFilename, CompressionMethod} from '../src/internal/constants' +import * as tar from '../src/internal/tar' + +jest.mock('@actions/core') +jest.mock('../src/internal/cacheHttpClient') +jest.mock('../src/internal/cacheUtils') +jest.mock('../src/internal/tar') + +beforeAll(() => { + // eslint-disable-next-line @typescript-eslint/promise-function-async + jest.spyOn(cacheUtils, 'getCacheFileName').mockImplementation(cm => { + const actualUtils = jest.requireActual('../src/internal/cacheUtils') + return actualUtils.getCacheFileName(cm) + }) + + jest.spyOn(cacheUtils, 'resolvePaths').mockImplementation(async filePaths => { + return filePaths.map(x => path.resolve(x)) + }) + + jest.spyOn(cacheUtils, 'createTempDirectory').mockImplementation(async () => { + return Promise.resolve('/foo/bar') + }) +}) + +test('save with missing input outputs warning', async () => { + const logWarningMock = jest.spyOn(cacheUtils, 'logWarning') + const failedMock = jest.spyOn(core, 'setFailed') + + const inputPath = '' + const primaryKey = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43' + + await saveCache(inputPath, primaryKey) + + expect(logWarningMock).toHaveBeenCalledWith( + 'Input required and not supplied: path' + ) + expect(logWarningMock).toHaveBeenCalledTimes(1) + expect(failedMock).toHaveBeenCalledTimes(0) +}) + +test('save with large cache outputs warning', async () => { + const logWarningMock = jest.spyOn(cacheUtils, 'logWarning') + const failedMock = jest.spyOn(core, 'setFailed') + + const inputPath = 'node_modules' + const primaryKey = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43' + const cachePaths = [path.resolve(inputPath)] + + const createTarMock = jest.spyOn(tar, 'createTar') + + const cacheSize = 6 * 1024 * 1024 * 1024 //~6GB, over the 5GB limit + jest.spyOn(cacheUtils, 'getArchiveFileSize').mockImplementationOnce(() => { + return cacheSize + }) + const compression = CompressionMethod.Gzip + const getCompressionMock = jest + .spyOn(cacheUtils, 'getCompressionMethod') + .mockReturnValue(Promise.resolve(compression)) + + await saveCache(inputPath, primaryKey) + + const archiveFolder = '/foo/bar' + + expect(createTarMock).toHaveBeenCalledTimes(1) + expect(createTarMock).toHaveBeenCalledWith( + archiveFolder, + cachePaths, + compression + ) + expect(logWarningMock).toHaveBeenCalledTimes(1) + expect(logWarningMock).toHaveBeenCalledWith( + 'Cache size of ~6144 MB (6442450944 B) is over the 5GB limit, not saving cache.' + ) + expect(failedMock).toHaveBeenCalledTimes(0) + expect(getCompressionMock).toHaveBeenCalledTimes(1) +}) + +test('save with reserve cache failure outputs warning', async () => { + const infoMock = jest.spyOn(core, 'info') + const logWarningMock = jest.spyOn(cacheUtils, 'logWarning') + const failedMock = jest.spyOn(core, 'setFailed') + + const inputPath = 'node_modules' + const primaryKey = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43' + + const reserveCacheMock = jest + .spyOn(cacheHttpClient, 'reserveCache') + .mockImplementation(async () => { + return -1 + }) + + const createTarMock = jest.spyOn(tar, 'createTar') + const saveCacheMock = jest.spyOn(cacheHttpClient, 'saveCache') + const compression = CompressionMethod.Zstd + const getCompressionMock = jest + .spyOn(cacheUtils, 'getCompressionMethod') + .mockReturnValue(Promise.resolve(compression)) + + await saveCache(inputPath, primaryKey) + + expect(reserveCacheMock).toHaveBeenCalledTimes(1) + expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey, inputPath, { + compressionMethod: compression + }) + + expect(infoMock).toHaveBeenCalledWith( + `Unable to reserve cache with key ${primaryKey}, another job may be creating this cache.` + ) + + expect(createTarMock).toHaveBeenCalledTimes(0) + expect(saveCacheMock).toHaveBeenCalledTimes(0) + expect(logWarningMock).toHaveBeenCalledTimes(0) + expect(failedMock).toHaveBeenCalledTimes(0) + expect(getCompressionMock).toHaveBeenCalledTimes(1) +}) + +test('save with server error outputs warning', async () => { + const logWarningMock = jest.spyOn(cacheUtils, 'logWarning') + const failedMock = jest.spyOn(core, 'setFailed') + + const inputPath = 'node_modules' + const primaryKey = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43' + const cachePaths = [path.resolve(inputPath)] + + const cacheId = 4 + const reserveCacheMock = jest + .spyOn(cacheHttpClient, 'reserveCache') + .mockImplementation(async () => { + return cacheId + }) + + const createTarMock = jest.spyOn(tar, 'createTar') + + const saveCacheMock = jest + .spyOn(cacheHttpClient, 'saveCache') + .mockImplementationOnce(async () => { + throw new Error('HTTP Error Occurred') + }) + const compression = CompressionMethod.Zstd + const getCompressionMock = jest + .spyOn(cacheUtils, 'getCompressionMethod') + .mockReturnValue(Promise.resolve(compression)) + + await saveCache(inputPath, primaryKey) + + expect(reserveCacheMock).toHaveBeenCalledTimes(1) + expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey, inputPath, { + compressionMethod: compression + }) + + const archiveFolder = '/foo/bar' + const archiveFile = path.join(archiveFolder, CacheFilename.Zstd) + + expect(createTarMock).toHaveBeenCalledTimes(1) + expect(createTarMock).toHaveBeenCalledWith( + archiveFolder, + cachePaths, + compression + ) + + expect(saveCacheMock).toHaveBeenCalledTimes(1) + expect(saveCacheMock).toHaveBeenCalledWith(cacheId, archiveFile) + + expect(logWarningMock).toHaveBeenCalledTimes(1) + expect(logWarningMock).toHaveBeenCalledWith('HTTP Error Occurred') + + expect(failedMock).toHaveBeenCalledTimes(0) + expect(getCompressionMock).toHaveBeenCalledTimes(1) +}) + +test('save with valid inputs uploads a cache', async () => { + const failedMock = jest.spyOn(core, 'setFailed') + + const inputPath = 'node_modules' + const primaryKey = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43' + const cachePaths = [path.resolve(inputPath)] + + const cacheId = 4 + const reserveCacheMock = jest + .spyOn(cacheHttpClient, 'reserveCache') + .mockImplementation(async () => { + return cacheId + }) + const createTarMock = jest.spyOn(tar, 'createTar') + + const saveCacheMock = jest.spyOn(cacheHttpClient, 'saveCache') + const compression = CompressionMethod.Zstd + const getCompressionMock = jest + .spyOn(cacheUtils, 'getCompressionMethod') + .mockReturnValue(Promise.resolve(compression)) + + await saveCache(inputPath, primaryKey) + + expect(reserveCacheMock).toHaveBeenCalledTimes(1) + expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey, inputPath, { + compressionMethod: compression + }) + + const archiveFolder = '/foo/bar' + const archiveFile = path.join(archiveFolder, CacheFilename.Zstd) + + expect(createTarMock).toHaveBeenCalledTimes(1) + expect(createTarMock).toHaveBeenCalledWith( + archiveFolder, + cachePaths, + compression + ) + + expect(saveCacheMock).toHaveBeenCalledTimes(1) + expect(saveCacheMock).toHaveBeenCalledWith(cacheId, archiveFile) + + expect(failedMock).toHaveBeenCalledTimes(0) + expect(getCompressionMock).toHaveBeenCalledTimes(1) +}) diff --git a/packages/cache/__tests__/tar.test.ts b/packages/cache/__tests__/tar.test.ts new file mode 100644 index 00000000..0aa6c784 --- /dev/null +++ b/packages/cache/__tests__/tar.test.ts @@ -0,0 +1,191 @@ +import * as exec from '@actions/exec' +import * as io from '@actions/io' +import * as path from 'path' +import {CacheFilename, CompressionMethod} from '../src/internal/constants' +import * as tar from '../src/internal/tar' +import * as utils from '../src/internal/cacheUtils' +// eslint-disable-next-line @typescript-eslint/no-require-imports +import fs = require('fs') + +jest.mock('@actions/exec') +jest.mock('@actions/io') + +const IS_WINDOWS = process.platform === 'win32' + +function getTempDir(): string { + return path.join(__dirname, '_temp', 'tar') +} + +beforeAll(async () => { + jest.spyOn(io, 'which').mockImplementation(async tool => { + return tool + }) + + process.env['GITHUB_WORKSPACE'] = process.cwd() + await jest.requireActual('@actions/io').rmRF(getTempDir()) +}) + +afterAll(async () => { + delete process.env['GITHUB_WORKSPACE'] + await jest.requireActual('@actions/io').rmRF(getTempDir()) +}) + +test('zstd extract tar', async () => { + const mkdirMock = jest.spyOn(io, 'mkdirP') + const execMock = jest.spyOn(exec, 'exec') + + const archivePath = IS_WINDOWS + ? `${process.env['windir']}\\fakepath\\cache.tar` + : 'cache.tar' + const workspace = process.env['GITHUB_WORKSPACE'] + + await tar.extractTar(archivePath, CompressionMethod.Zstd) + + expect(mkdirMock).toHaveBeenCalledWith(workspace) + const tarPath = IS_WINDOWS + ? `${process.env['windir']}\\System32\\tar.exe` + : 'tar' + expect(execMock).toHaveBeenCalledTimes(1) + expect(execMock).toHaveBeenCalledWith( + `"${tarPath}"`, + [ + '--use-compress-program', + 'zstd -d --long=30', + '-xf', + IS_WINDOWS ? archivePath.replace(/\\/g, '/') : archivePath, + '-P', + '-C', + IS_WINDOWS ? workspace?.replace(/\\/g, '/') : workspace + ], + {cwd: undefined} + ) +}) + +test('gzip extract tar', async () => { + const mkdirMock = jest.spyOn(io, 'mkdirP') + const execMock = jest.spyOn(exec, 'exec') + const archivePath = IS_WINDOWS + ? `${process.env['windir']}\\fakepath\\cache.tar` + : 'cache.tar' + const workspace = process.env['GITHUB_WORKSPACE'] + + await tar.extractTar(archivePath, CompressionMethod.Gzip) + + expect(mkdirMock).toHaveBeenCalledWith(workspace) + const tarPath = IS_WINDOWS + ? `${process.env['windir']}\\System32\\tar.exe` + : 'tar' + expect(execMock).toHaveBeenCalledTimes(1) + expect(execMock).toHaveBeenCalledWith( + `"${tarPath}"`, + [ + '-z', + '-xf', + IS_WINDOWS ? archivePath.replace(/\\/g, '/') : archivePath, + '-P', + '-C', + IS_WINDOWS ? workspace?.replace(/\\/g, '/') : workspace + ], + {cwd: undefined} + ) +}) + +test('gzip extract GNU tar on windows', async () => { + if (IS_WINDOWS) { + jest.spyOn(fs, 'existsSync').mockReturnValueOnce(false) + + const isGnuMock = jest + .spyOn(utils, 'useGnuTar') + .mockReturnValue(Promise.resolve(true)) + const execMock = jest.spyOn(exec, 'exec') + const archivePath = `${process.env['windir']}\\fakepath\\cache.tar` + const workspace = process.env['GITHUB_WORKSPACE'] + + await tar.extractTar(archivePath, CompressionMethod.Gzip) + + expect(isGnuMock).toHaveBeenCalledTimes(1) + expect(execMock).toHaveBeenCalledTimes(1) + expect(execMock).toHaveBeenCalledWith( + `"tar"`, + [ + '-z', + '-xf', + archivePath.replace(/\\/g, '/'), + '-P', + '-C', + workspace?.replace(/\\/g, '/'), + '--force-local' + ], + {cwd: undefined} + ) + } +}) + +test('zstd create tar', async () => { + const execMock = jest.spyOn(exec, 'exec') + + const archiveFolder = getTempDir() + const workspace = process.env['GITHUB_WORKSPACE'] + const sourceDirectories = ['~/.npm/cache', `${workspace}/dist`] + + await fs.promises.mkdir(archiveFolder, {recursive: true}) + + await tar.createTar(archiveFolder, sourceDirectories, CompressionMethod.Zstd) + + const tarPath = IS_WINDOWS + ? `${process.env['windir']}\\System32\\tar.exe` + : 'tar' + + expect(execMock).toHaveBeenCalledTimes(1) + expect(execMock).toHaveBeenCalledWith( + `"${tarPath}"`, + [ + '--use-compress-program', + 'zstd -T0 --long=30', + '-cf', + IS_WINDOWS ? CacheFilename.Zstd.replace(/\\/g, '/') : CacheFilename.Zstd, + '-P', + '-C', + IS_WINDOWS ? workspace?.replace(/\\/g, '/') : workspace, + '--files-from', + 'manifest.txt' + ], + { + cwd: archiveFolder + } + ) +}) + +test('gzip create tar', async () => { + const execMock = jest.spyOn(exec, 'exec') + + const archiveFolder = getTempDir() + const workspace = process.env['GITHUB_WORKSPACE'] + const sourceDirectories = ['~/.npm/cache', `${workspace}/dist`] + + await fs.promises.mkdir(archiveFolder, {recursive: true}) + + await tar.createTar(archiveFolder, sourceDirectories, CompressionMethod.Gzip) + + const tarPath = IS_WINDOWS + ? `${process.env['windir']}\\System32\\tar.exe` + : 'tar' + + expect(execMock).toHaveBeenCalledTimes(1) + expect(execMock).toHaveBeenCalledWith( + `"${tarPath}"`, + [ + '-z', + '-cf', + IS_WINDOWS ? CacheFilename.Gzip.replace(/\\/g, '/') : CacheFilename.Gzip, + '-P', + '-C', + IS_WINDOWS ? workspace?.replace(/\\/g, '/') : workspace, + '--files-from', + 'manifest.txt' + ], + { + cwd: archiveFolder + } + ) +}) diff --git a/packages/cache/package-lock.json b/packages/cache/package-lock.json new file mode 100644 index 00000000..4a263beb --- /dev/null +++ b/packages/cache/package-lock.json @@ -0,0 +1,32 @@ +{ + "name": "@actions/cache", + "version": "0.0.0", + "lockfileVersion": 1, + "requires": true, + "dependencies": { + "@actions/http-client": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-1.0.8.tgz", + "integrity": "sha512-G4JjJ6f9Hb3Zvejj+ewLLKLf99ZC+9v+yCxoYf9vSyH+WkzPLB2LuUtRMGNkooMqdugGBFStIKXOuvH1W+EctA==", + "requires": { + "tunnel": "0.0.6" + } + }, + "@types/uuid": { + "version": "3.4.9", + "resolved": "https://registry.npmjs.org/@types/uuid/-/uuid-3.4.9.tgz", + "integrity": "sha512-XDwyIlt/47l2kWLTzw/mtrpLdB+GPSskR2n/PIcPn+VYhVO77rGhRncIR5GPU0KRzXuqkDO+J5qqrG0Y8P6jzQ==", + "dev": true + }, + "tunnel": { + "version": "0.0.6", + "resolved": "https://registry.npmjs.org/tunnel/-/tunnel-0.0.6.tgz", + "integrity": "sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg==" + }, + "uuid": { + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz", + "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==" + } + } +} diff --git a/packages/cache/package.json b/packages/cache/package.json new file mode 100644 index 00000000..1454048c --- /dev/null +++ b/packages/cache/package.json @@ -0,0 +1,49 @@ +{ + "name": "@actions/cache", + "version": "0.0.0", + "preview": true, + "description": "Actions artifact cache lib", + "keywords": [ + "github", + "actions", + "cache" + ], + "homepage": "https://github.com/actions/toolkit/tree/master/packages/cache", + "license": "MIT", + "main": "lib/cache.js", + "types": "lib/cache.d.ts", + "directories": { + "lib": "lib", + "test": "__tests__" + }, + "files": [ + "lib" + ], + "publishConfig": { + "access": "public" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/actions/toolkit.git", + "directory": "packages/cache" + }, + "scripts": { + "audit-moderate": "npm install && npm audit --audit-level=moderate", + "test": "echo \"Error: run tests from root\" && exit 1", + "tsc": "tsc" + }, + "bugs": { + "url": "https://github.com/actions/toolkit/issues" + }, + "dependencies": { + "@actions/core": "^1.2.4", + "@actions/exec": "^1.0.1", + "@actions/glob": "^0.1.0", + "@actions/http-client": "^1.0.8", + "@actions/io": "^1.0.1", + "uuid": "^3.3.3" + }, + "devDependencies": { + "@types/uuid": "^3.4.5" + } +} diff --git a/packages/cache/src/cache.ts b/packages/cache/src/cache.ts new file mode 100644 index 00000000..4aab741e --- /dev/null +++ b/packages/cache/src/cache.ts @@ -0,0 +1,169 @@ +import * as core from '@actions/core' +import * as pathUtils from 'path' +import * as utils from './internal/cacheUtils' +import * as cacheHttpClient from './internal/cacheHttpClient' +import {createTar, extractTar} from './internal/tar' + +/** + * Restores cache from keys + * + * @param path a string representing files that were cached + * @param primaryKey an explicit key for restoring the cache + * @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for key + * @returns string returns the key for the cache hit, otherwise return undefined + */ +export async function restoreCache( + path: string, + primaryKey: string, + restoreKeys?: string[] +): Promise { + try { + if (!path || path.length === 0) { + throw new Error('Input required and not supplied: path') + } + + restoreKeys = restoreKeys || [] + const keys = [primaryKey, ...restoreKeys] + + core.debug('Resolved Keys:') + core.debug(JSON.stringify(keys)) + + if (keys.length > 10) { + core.setFailed( + `Key Validation Error: Keys are limited to a maximum of 10.` + ) + return undefined + } + for (const key of keys) { + if (key.length > 512) { + core.setFailed( + `Key Validation Error: ${key} cannot be larger than 512 characters.` + ) + return undefined + } + const regex = /^[^,]*$/ + if (!regex.test(key)) { + core.setFailed(`Key Validation Error: ${key} cannot contain commas.`) + return undefined + } + } + + const compressionMethod = await utils.getCompressionMethod() + + try { + // path are needed to compute version + const cacheEntry = await cacheHttpClient.getCacheEntry(keys, path, { + compressionMethod + }) + if (!cacheEntry?.archiveLocation) { + core.info(`Cache not found for input keys: ${keys.join(', ')}`) + return undefined + } + + const archivePath = pathUtils.join( + await utils.createTempDirectory(), + utils.getCacheFileName(compressionMethod) + ) + core.debug(`Archive Path: ${archivePath}`) + + try { + // Download the cache from the cache entry + await cacheHttpClient.downloadCache( + cacheEntry.archiveLocation, + archivePath + ) + + const archiveFileSize = utils.getArchiveFileSize(archivePath) + core.info( + `Cache Size: ~${Math.round( + archiveFileSize / (1024 * 1024) + )} MB (${archiveFileSize} B)` + ) + + await extractTar(archivePath, compressionMethod) + } finally { + // Try to delete the archive to save space + try { + await utils.unlinkFile(archivePath) + } catch (error) { + core.debug(`Failed to delete archive: ${error}`) + } + } + + core.info(`Cache restored from key: ${cacheEntry && cacheEntry.cacheKey}`) + + return cacheEntry.cacheKey + } catch (error) { + utils.logWarning(error.message) + return undefined + } + } catch (error) { + core.setFailed(error.message) + return undefined + } +} + +/** + * Saves a file with the specified key + * + * @param path a string representing files to be cached + * @param key an explicit key for restoring the cache + * @returns number returns cacheId if the cache was saved successfully, otherwise return -1 + */ +export async function saveCache(path: string, key: string): Promise { + try { + if (!path || path.length === 0) { + throw new Error('Input required and not supplied: path') + } + + const compressionMethod = await utils.getCompressionMethod() + + core.debug('Reserving Cache') + const cacheId = await cacheHttpClient.reserveCache(key, path, { + compressionMethod + }) + if (cacheId === -1) { + core.info( + `Unable to reserve cache with key ${key}, another job may be creating this cache.` + ) + return -1 + } + core.debug(`Cache ID: ${cacheId}`) + const cachePaths = await utils.resolvePaths( + path.split('\n').filter(x => x !== '') + ) + + core.debug('Cache Paths:') + core.debug(`${JSON.stringify(cachePaths)}`) + + const archiveFolder = await utils.createTempDirectory() + const archivePath = pathUtils.join( + archiveFolder, + utils.getCacheFileName(compressionMethod) + ) + + core.debug(`Archive Path: ${archivePath}`) + + await createTar(archiveFolder, cachePaths, compressionMethod) + + const fileSizeLimit = 5 * 1024 * 1024 * 1024 // 5GB per repo limit + const archiveFileSize = utils.getArchiveFileSize(archivePath) + core.debug(`File Size: ${archiveFileSize}`) + if (archiveFileSize > fileSizeLimit) { + utils.logWarning( + `Cache size of ~${Math.round( + archiveFileSize / (1024 * 1024) + )} MB (${archiveFileSize} B) is over the 5GB limit, not saving cache.` + ) + return -1 + } + + core.debug(`Saving Cache (ID: ${cacheId})`) + await cacheHttpClient.saveCache(cacheId, archivePath) + + return cacheId + } catch (error) { + utils.logWarning(error.message) + return -1 + } +} diff --git a/packages/cache/src/internal/cacheHttpClient.ts b/packages/cache/src/internal/cacheHttpClient.ts new file mode 100644 index 00000000..92e9498a --- /dev/null +++ b/packages/cache/src/internal/cacheHttpClient.ts @@ -0,0 +1,339 @@ +import * as core from '@actions/core' +import {HttpClient, HttpCodes} from '@actions/http-client' +import {BearerCredentialHandler} from '@actions/http-client/auth' +import { + IHttpClientResponse, + IRequestOptions, + ITypedResponse +} from '@actions/http-client/interfaces' +import * as crypto from 'crypto' +import * as fs from 'fs' +import * as stream from 'stream' +import * as util from 'util' + +import * as utils from './cacheUtils' +import {CompressionMethod, SocketTimeout} from './constants' +import { + ArtifactCacheEntry, + CacheOptions, + CommitCacheRequest, + ReserveCacheRequest, + ReserveCacheResponse +} from './contracts' + +const versionSalt = '1.0' + +function isSuccessStatusCode(statusCode?: number): boolean { + if (!statusCode) { + return false + } + return statusCode >= 200 && statusCode < 300 +} + +function isRetryableStatusCode(statusCode?: number): boolean { + if (!statusCode) { + return false + } + const retryableStatusCodes = [ + HttpCodes.BadGateway, + HttpCodes.ServiceUnavailable, + HttpCodes.GatewayTimeout + ] + return retryableStatusCodes.includes(statusCode) +} + +function getCacheApiUrl(resource: string): string { + // Ideally we just use ACTIONS_CACHE_URL + const baseUrl: string = ( + process.env['ACTIONS_CACHE_URL'] || + process.env['ACTIONS_RUNTIME_URL'] || + '' + ).replace('pipelines', 'artifactcache') + if (!baseUrl) { + throw new Error('Cache Service Url not found, unable to restore cache.') + } + + const url = `${baseUrl}_apis/artifactcache/${resource}` + core.debug(`Resource Url: ${url}`) + return url +} + +function createAcceptHeader(type: string, apiVersion: string): string { + return `${type};api-version=${apiVersion}` +} + +function getRequestOptions(): IRequestOptions { + const requestOptions: IRequestOptions = { + headers: { + Accept: createAcceptHeader('application/json', '6.0-preview.1') + } + } + + return requestOptions +} + +function createHttpClient(): HttpClient { + const token = process.env['ACTIONS_RUNTIME_TOKEN'] || '' + const bearerCredentialHandler = new BearerCredentialHandler(token) + + return new HttpClient( + 'actions/cache', + [bearerCredentialHandler], + getRequestOptions() + ) +} + +export function getCacheVersion( + inputPath: string, + compressionMethod?: CompressionMethod +): string { + const components = [inputPath].concat( + compressionMethod === CompressionMethod.Zstd ? [compressionMethod] : [] + ) + + // Add salt to cache version to support breaking changes in cache entry + components.push(versionSalt) + + return crypto + .createHash('sha256') + .update(components.join('|')) + .digest('hex') +} + +export async function getCacheEntry( + keys: string[], + inputPath: string, + options?: CacheOptions +): Promise { + const httpClient = createHttpClient() + const version = getCacheVersion(inputPath, options?.compressionMethod) + const resource = `cache?keys=${encodeURIComponent( + keys.join(',') + )}&version=${version}` + + const response = await httpClient.getJson( + getCacheApiUrl(resource) + ) + if (response.statusCode === 204) { + return null + } + if (!isSuccessStatusCode(response.statusCode)) { + throw new Error(`Cache service responded with ${response.statusCode}`) + } + + const cacheResult = response.result + const cacheDownloadUrl = cacheResult?.archiveLocation + if (!cacheDownloadUrl) { + throw new Error('Cache not found.') + } + core.setSecret(cacheDownloadUrl) + core.debug(`Cache Result:`) + core.debug(JSON.stringify(cacheResult)) + + return cacheResult +} + +async function pipeResponseToStream( + response: IHttpClientResponse, + output: NodeJS.WritableStream +): Promise { + const pipeline = util.promisify(stream.pipeline) + await pipeline(response.message, output) +} + +export async function downloadCache( + archiveLocation: string, + archivePath: string +): Promise { + const writableStream = fs.createWriteStream(archivePath) + const httpClient = new HttpClient('actions/cache') + const downloadResponse = await httpClient.get(archiveLocation) + + // Abort download if no traffic received over the socket. + downloadResponse.message.socket.setTimeout(SocketTimeout, () => { + downloadResponse.message.destroy() + core.debug(`Aborting download, socket timed out after ${SocketTimeout} ms`) + }) + + await pipeResponseToStream(downloadResponse, writableStream) + + // Validate download size. + const contentLengthHeader = downloadResponse.message.headers['content-length'] + + if (contentLengthHeader) { + const expectedLength = parseInt(contentLengthHeader) + const actualLength = utils.getArchiveFileSize(archivePath) + + if (actualLength !== expectedLength) { + throw new Error( + `Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}` + ) + } + } else { + core.debug('Unable to validate download, no Content-Length header') + } +} + +// Reserve Cache +export async function reserveCache( + key: string, + inputPath: string, + options?: CacheOptions +): Promise { + const httpClient = createHttpClient() + const version = getCacheVersion(inputPath, options?.compressionMethod) + + const reserveCacheRequest: ReserveCacheRequest = { + key, + version + } + const response = await httpClient.postJson( + getCacheApiUrl('caches'), + reserveCacheRequest + ) + return response?.result?.cacheId ?? -1 +} + +function getContentRange(start: number, end: number): string { + // Format: `bytes start-end/filesize + // start and end are inclusive + // filesize can be * + // For a 200 byte chunk starting at byte 0: + // Content-Range: bytes 0-199/* + return `bytes ${start}-${end}/*` +} + +async function uploadChunk( + httpClient: HttpClient, + resourceUrl: string, + data: NodeJS.ReadableStream, + start: number, + end: number +): Promise { + core.debug( + `Uploading chunk of size ${end - + start + + 1} bytes at offset ${start} with content range: ${getContentRange( + start, + end + )}` + ) + const additionalHeaders = { + 'Content-Type': 'application/octet-stream', + 'Content-Range': getContentRange(start, end) + } + + const uploadChunkRequest = async (): Promise => { + return await httpClient.sendStream( + 'PATCH', + resourceUrl, + data, + additionalHeaders + ) + } + + const response = await uploadChunkRequest() + if (isSuccessStatusCode(response.message.statusCode)) { + return + } + + if (isRetryableStatusCode(response.message.statusCode)) { + core.debug( + `Received ${response.message.statusCode}, retrying chunk at offset ${start}.` + ) + const retryResponse = await uploadChunkRequest() + if (isSuccessStatusCode(retryResponse.message.statusCode)) { + return + } + } + + throw new Error( + `Cache service responded with ${response.message.statusCode} during chunk upload.` + ) +} + +function parseEnvNumber(key: string): number | undefined { + const value = Number(process.env[key]) + if (Number.isNaN(value) || value < 0) { + return undefined + } + return value +} + +async function uploadFile( + httpClient: HttpClient, + cacheId: number, + archivePath: string +): Promise { + // Upload Chunks + const fileSize = fs.statSync(archivePath).size + const resourceUrl = getCacheApiUrl(`caches/${cacheId.toString()}`) + const fd = fs.openSync(archivePath, 'r') + + const concurrency = parseEnvNumber('CACHE_UPLOAD_CONCURRENCY') ?? 4 // # of HTTP requests in parallel + const MAX_CHUNK_SIZE = + parseEnvNumber('CACHE_UPLOAD_CHUNK_SIZE') ?? 32 * 1024 * 1024 // 32 MB Chunks + core.debug(`Concurrency: ${concurrency} and Chunk Size: ${MAX_CHUNK_SIZE}`) + + const parallelUploads = [...new Array(concurrency).keys()] + core.debug('Awaiting all uploads') + let offset = 0 + + try { + await Promise.all( + parallelUploads.map(async () => { + while (offset < fileSize) { + const chunkSize = Math.min(fileSize - offset, MAX_CHUNK_SIZE) + const start = offset + const end = offset + chunkSize - 1 + offset += MAX_CHUNK_SIZE + const chunk = fs.createReadStream(archivePath, { + fd, + start, + end, + autoClose: false + }) + + await uploadChunk(httpClient, resourceUrl, chunk, start, end) + } + }) + ) + } finally { + fs.closeSync(fd) + } + return +} + +async function commitCache( + httpClient: HttpClient, + cacheId: number, + filesize: number +): Promise> { + const commitCacheRequest: CommitCacheRequest = {size: filesize} + return await httpClient.postJson( + getCacheApiUrl(`caches/${cacheId.toString()}`), + commitCacheRequest + ) +} + +export async function saveCache( + cacheId: number, + archivePath: string +): Promise { + const httpClient = createHttpClient() + + core.debug('Upload cache') + await uploadFile(httpClient, cacheId, archivePath) + + // Commit Cache + core.debug('Commiting cache') + const cacheSize = utils.getArchiveFileSize(archivePath) + const commitCacheResponse = await commitCache(httpClient, cacheId, cacheSize) + if (!isSuccessStatusCode(commitCacheResponse.statusCode)) { + throw new Error( + `Cache service responded with ${commitCacheResponse.statusCode} during commit cache.` + ) + } + + core.info('Cache saved successfully') +} diff --git a/packages/cache/src/internal/cacheUtils.ts b/packages/cache/src/internal/cacheUtils.ts new file mode 100644 index 00000000..8cce071f --- /dev/null +++ b/packages/cache/src/internal/cacheUtils.ts @@ -0,0 +1,104 @@ +import * as core from '@actions/core' +import * as exec from '@actions/exec' +import * as glob from '@actions/glob' +import * as io from '@actions/io' +import * as fs from 'fs' +import * as path from 'path' +import * as util from 'util' +import {v4 as uuidV4} from 'uuid' +import {CacheFilename, CompressionMethod} from './constants' + +// From https://github.com/actions/toolkit/blob/master/packages/tool-cache/src/tool-cache.ts#L23 +export async function createTempDirectory(): Promise { + const IS_WINDOWS = process.platform === 'win32' + + let tempDirectory: string = process.env['RUNNER_TEMP'] || '' + + if (!tempDirectory) { + let baseLocation: string + if (IS_WINDOWS) { + // On Windows use the USERPROFILE env variable + baseLocation = process.env['USERPROFILE'] || 'C:\\' + } else { + if (process.platform === 'darwin') { + baseLocation = '/Users' + } else { + baseLocation = '/home' + } + } + tempDirectory = path.join(baseLocation, 'actions', 'temp') + } + + const dest = path.join(tempDirectory, uuidV4()) + await io.mkdirP(dest) + return dest +} + +export function getArchiveFileSize(filePath: string): number { + return fs.statSync(filePath).size +} + +export function logWarning(message: string): void { + const warningPrefix = '[warning]' + core.info(`${warningPrefix}${message}`) +} + +export async function resolvePaths(patterns: string[]): Promise { + const paths: string[] = [] + const workspace = process.env['GITHUB_WORKSPACE'] ?? process.cwd() + const globber = await glob.create(patterns.join('\n'), { + implicitDescendants: false + }) + + for await (const file of globber.globGenerator()) { + const relativeFile = path.relative(workspace, file) + core.debug(`Matched: ${relativeFile}`) + // Paths are made relative so the tar entries are all relative to the root of the workspace. + paths.push(`${relativeFile}`) + } + + return paths +} + +export async function unlinkFile(filePath: fs.PathLike): Promise { + return util.promisify(fs.unlink)(filePath) +} + +async function getVersion(app: string): Promise { + core.debug(`Checking ${app} --version`) + let versionOutput = '' + try { + await exec.exec(`${app} --version`, [], { + ignoreReturnCode: true, + silent: true, + listeners: { + stdout: (data: Buffer): string => (versionOutput += data.toString()), + stderr: (data: Buffer): string => (versionOutput += data.toString()) + } + }) + } catch (err) { + core.debug(err.message) + } + + versionOutput = versionOutput.trim() + core.debug(versionOutput) + return versionOutput +} + +export async function getCompressionMethod(): Promise { + const versionOutput = await getVersion('zstd') + return versionOutput.toLowerCase().includes('zstd command line interface') + ? CompressionMethod.Zstd + : CompressionMethod.Gzip +} + +export function getCacheFileName(compressionMethod: CompressionMethod): string { + return compressionMethod === CompressionMethod.Zstd + ? CacheFilename.Zstd + : CacheFilename.Gzip +} + +export async function useGnuTar(): Promise { + const versionOutput = await getVersion('tar') + return versionOutput.toLowerCase().includes('gnu tar') +} diff --git a/packages/cache/src/internal/constants.ts b/packages/cache/src/internal/constants.ts new file mode 100644 index 00000000..b3d2a577 --- /dev/null +++ b/packages/cache/src/internal/constants.ts @@ -0,0 +1,14 @@ +export enum CacheFilename { + Gzip = 'cache.tgz', + Zstd = 'cache.tzst' +} + +export enum CompressionMethod { + Gzip = 'gzip', + Zstd = 'zstd' +} + +// Socket timeout in milliseconds during download. If no traffic is received +// over the socket during this period, the socket is destroyed and the download +// is aborted. +export const SocketTimeout = 5000 diff --git a/packages/cache/src/internal/contracts.d.ts b/packages/cache/src/internal/contracts.d.ts new file mode 100644 index 00000000..ca3f3620 --- /dev/null +++ b/packages/cache/src/internal/contracts.d.ts @@ -0,0 +1,25 @@ +import {CompressionMethod} from './constants' + +export interface ArtifactCacheEntry { + cacheKey?: string + scope?: string + creationTime?: string + archiveLocation?: string +} + +export interface CommitCacheRequest { + size: number +} + +export interface ReserveCacheRequest { + key: string + version?: string +} + +export interface ReserveCacheResponse { + cacheId: number +} + +export interface CacheOptions { + compressionMethod?: CompressionMethod +} diff --git a/packages/cache/src/internal/tar.ts b/packages/cache/src/internal/tar.ts new file mode 100644 index 00000000..221c7c70 --- /dev/null +++ b/packages/cache/src/internal/tar.ts @@ -0,0 +1,86 @@ +import {exec} from '@actions/exec' +import * as io from '@actions/io' +import {existsSync, writeFileSync} from 'fs' +import * as path from 'path' +import * as utils from './cacheUtils' +import {CompressionMethod} from './constants' + +async function getTarPath(args: string[]): Promise { + // Explicitly use BSD Tar on Windows + const IS_WINDOWS = process.platform === 'win32' + if (IS_WINDOWS) { + const systemTar = `${process.env['windir']}\\System32\\tar.exe` + if (existsSync(systemTar)) { + return systemTar + } else if (await utils.useGnuTar()) { + args.push('--force-local') + } + } + return await io.which('tar', true) +} + +async function execTar(args: string[], cwd?: string): Promise { + try { + await exec(`"${await getTarPath(args)}"`, args, {cwd}) + } catch (error) { + throw new Error(`Tar failed with error: ${error?.message}`) + } +} + +function getWorkingDirectory(): string { + return process.env['GITHUB_WORKSPACE'] ?? process.cwd() +} + +export async function extractTar( + archivePath: string, + compressionMethod: CompressionMethod +): Promise { + // Create directory to extract tar into + const workingDirectory = getWorkingDirectory() + await io.mkdirP(workingDirectory) + // --d: Decompress. + // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. + // Using 30 here because we also support 32-bit self-hosted runners. + const args = [ + ...(compressionMethod === CompressionMethod.Zstd + ? ['--use-compress-program', 'zstd -d --long=30'] + : ['-z']), + '-xf', + archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), + '-P', + '-C', + workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/') + ] + await execTar(args) +} + +export async function createTar( + archiveFolder: string, + sourceDirectories: string[], + compressionMethod: CompressionMethod +): Promise { + // Write source directories to manifest.txt to avoid command length limits + const manifestFilename = 'manifest.txt' + const cacheFileName = utils.getCacheFileName(compressionMethod) + writeFileSync( + path.join(archiveFolder, manifestFilename), + sourceDirectories.join('\n') + ) + // -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores. + // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. + // Using 30 here because we also support 32-bit self-hosted runners. + const workingDirectory = getWorkingDirectory() + const args = [ + ...(compressionMethod === CompressionMethod.Zstd + ? ['--use-compress-program', 'zstd -T0 --long=30'] + : ['-z']), + '-cf', + cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), + '-P', + '-C', + workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), + '--files-from', + manifestFilename + ] + await execTar(args, archiveFolder) +} diff --git a/packages/cache/tsconfig.json b/packages/cache/tsconfig.json new file mode 100644 index 00000000..a8b812a6 --- /dev/null +++ b/packages/cache/tsconfig.json @@ -0,0 +1,11 @@ +{ + "extends": "../../tsconfig.json", + "compilerOptions": { + "baseUrl": "./", + "outDir": "./lib", + "rootDir": "./src" + }, + "include": [ + "./src" + ] +} \ No newline at end of file From 7409ad5faea73337d7cf3081e29b8e34f7ae0e44 Mon Sep 17 00:00:00 2001 From: Aiqiao Yan Date: Wed, 6 May 2020 17:53:22 -0400 Subject: [PATCH 2/7] Change variable path to a list --- README.md | 2 +- packages/cache/README.md | 12 + .../cache/__tests__/cacheHttpClient.test.ts | 22 +- packages/cache/__tests__/cacheUtils.test.ts | 140 ---------- packages/cache/__tests__/restoreCache.test.ts | 81 +++--- packages/cache/__tests__/saveCache.test.ts | 102 +++---- packages/cache/src/cache.ts | 254 ++++++++---------- .../cache/src/internal/cacheHttpClient.ts | 12 +- packages/cache/src/internal/cacheUtils.ts | 5 - 9 files changed, 216 insertions(+), 414 deletions(-) diff --git a/README.md b/README.md index 8701ab5c..22fbc27c 100644 --- a/README.md +++ b/README.md @@ -87,7 +87,7 @@ $ npm install @actions/artifact --save Provides functions to interact with actions cache. Read more [here](packages/cache) ```bash -$ npm install @actions/artifact --save +$ npm install @actions/cache --save ```
diff --git a/packages/cache/README.md b/packages/cache/README.md index b65c7f34..402a7f50 100644 --- a/packages/cache/README.md +++ b/packages/cache/README.md @@ -1 +1,13 @@ # `@actions/cache` + +> Functions necessary for caching dependencies and build outputs to improve workflow execution time. + +## Usage + +#### Restore Cache + +#### Save Cache + +## Additional Documentation + +See ["Caching dependencies to speed up workflows"](https://help.github.com/github/automating-your-workflow-with-github-actions/caching-dependencies-to-speed-up-workflows). \ No newline at end of file diff --git a/packages/cache/__tests__/cacheHttpClient.test.ts b/packages/cache/__tests__/cacheHttpClient.test.ts index d2165280..e5d7eacf 100644 --- a/packages/cache/__tests__/cacheHttpClient.test.ts +++ b/packages/cache/__tests__/cacheHttpClient.test.ts @@ -1,17 +1,25 @@ import {getCacheVersion} from '../src/internal/cacheHttpClient' import {CompressionMethod} from '../src/internal/constants' -test('getCacheVersion with path input and compression method undefined returns version', async () => { - const inputPath = 'node_modules' - const result = getCacheVersion(inputPath) +test('getCacheVersion with one path returns version', async () => { + const paths = ['node_modules'] + const result = getCacheVersion(paths) expect(result).toEqual( 'b3e0c6cb5ecf32614eeb2997d905b9c297046d7cbf69062698f25b14b4cb0985' ) }) +test('getCacheVersion with multiple paths returns version', async () => { + const paths = ['node_modules', 'dist'] + const result = getCacheVersion(paths) + expect(result).toEqual( + '165c3053bc646bf0d4fac17b1f5731caca6fe38e0e464715c0c3c6b6318bf436' + ) +}) + test('getCacheVersion with zstd compression returns version', async () => { - const inputPath = 'node_modules' - const result = getCacheVersion(inputPath, CompressionMethod.Zstd) + const paths = ['node_modules'] + const result = getCacheVersion(paths, CompressionMethod.Zstd) expect(result).toEqual( '273877e14fd65d270b87a198edbfa2db5a43de567c9a548d2a2505b408befe24' @@ -19,8 +27,8 @@ test('getCacheVersion with zstd compression returns version', async () => { }) test('getCacheVersion with gzip compression does not change vesion', async () => { - const inputPath = 'node_modules' - const result = getCacheVersion(inputPath, CompressionMethod.Gzip) + const paths = ['node_modules'] + const result = getCacheVersion(paths, CompressionMethod.Gzip) expect(result).toEqual( 'b3e0c6cb5ecf32614eeb2997d905b9c297046d7cbf69062698f25b14b4cb0985' diff --git a/packages/cache/__tests__/cacheUtils.test.ts b/packages/cache/__tests__/cacheUtils.test.ts index b09eed13..0a4b0f4c 100644 --- a/packages/cache/__tests__/cacheUtils.test.ts +++ b/packages/cache/__tests__/cacheUtils.test.ts @@ -1,9 +1,6 @@ -import * as core from '@actions/core' import * as io from '@actions/io' import {promises as fs} from 'fs' -import * as os from 'os' import * as path from 'path' -import {v4 as uuidV4} from 'uuid' import * as cacheUtils from '../src/internal/cacheUtils' jest.mock('@actions/core') @@ -26,143 +23,6 @@ test('getArchiveFileSize returns file size', () => { expect(size).toBe(11) }) -test('logWarning logs a message with a warning prefix', () => { - const message = 'A warning occurred.' - - const infoMock = jest.spyOn(core, 'info') - - cacheUtils.logWarning(message) - - expect(infoMock).toHaveBeenCalledWith(`[warning]${message}`) -}) - -test('resolvePaths with no ~ in path', async () => { - const filePath = '.cache' - - // Create the following layout: - // cwd - // cwd/.cache - // cwd/.cache/file.txt - - const root = path.join(getTempDir(), 'no-tilde') - // tarball entries will be relative to workspace - process.env['GITHUB_WORKSPACE'] = root - - await fs.mkdir(root, {recursive: true}) - const cache = path.join(root, '.cache') - await fs.mkdir(cache, {recursive: true}) - await fs.writeFile(path.join(cache, 'file.txt'), 'cached') - - const originalCwd = process.cwd() - - try { - process.chdir(root) - - const resolvedPath = await cacheUtils.resolvePaths([filePath]) - - const expectedPath = [filePath] - expect(resolvedPath).toStrictEqual(expectedPath) - } finally { - process.chdir(originalCwd) - } -}) - -test('resolvePaths with ~ in path', async () => { - const cacheDir = uuidV4() - const filePath = `~/${cacheDir}` - // Create the following layout: - // ~/uuid - // ~/uuid/file.txt - - const homedir = jest.requireActual('os').homedir() - const homedirMock = jest.spyOn(os, 'homedir') - homedirMock.mockReturnValue(homedir) - - const target = path.join(homedir, cacheDir) - await fs.mkdir(target, {recursive: true}) - await fs.writeFile(path.join(target, 'file.txt'), 'cached') - - const root = getTempDir() - process.env['GITHUB_WORKSPACE'] = root - - try { - const resolvedPath = await cacheUtils.resolvePaths([filePath]) - - const expectedPath = [path.relative(root, target)] - expect(resolvedPath).toStrictEqual(expectedPath) - } finally { - await io.rmRF(target) - } -}) - -test('resolvePaths with home not found', async () => { - const filePath = '~/.cache/yarn' - const homedirMock = jest.spyOn(os, 'homedir') - homedirMock.mockReturnValue('') - - await expect(cacheUtils.resolvePaths([filePath])).rejects.toThrow( - 'Unable to determine HOME directory' - ) -}) - -test('resolvePaths inclusion pattern returns found', async () => { - const pattern = '*.ts' - // Create the following layout: - // inclusion-patterns - // inclusion-patterns/miss.txt - // inclusion-patterns/test.ts - - const root = path.join(getTempDir(), 'inclusion-patterns') - // tarball entries will be relative to workspace - process.env['GITHUB_WORKSPACE'] = root - - await fs.mkdir(root, {recursive: true}) - await fs.writeFile(path.join(root, 'miss.txt'), 'no match') - await fs.writeFile(path.join(root, 'test.ts'), 'match') - - const originalCwd = process.cwd() - - try { - process.chdir(root) - - const resolvedPath = await cacheUtils.resolvePaths([pattern]) - - const expectedPath = ['test.ts'] - expect(resolvedPath).toStrictEqual(expectedPath) - } finally { - process.chdir(originalCwd) - } -}) - -test('resolvePaths exclusion pattern returns not found', async () => { - const patterns = ['*.ts', '!test.ts'] - // Create the following layout: - // exclusion-patterns - // exclusion-patterns/miss.txt - // exclusion-patterns/test.ts - - const root = path.join(getTempDir(), 'exclusion-patterns') - // tarball entries will be relative to workspace - process.env['GITHUB_WORKSPACE'] = root - - await fs.mkdir(root, {recursive: true}) - await fs.writeFile(path.join(root, 'miss.txt'), 'no match') - await fs.writeFile(path.join(root, 'test.ts'), 'no match') - - const originalCwd = process.cwd() - - try { - process.chdir(root) - - const resolvedPath = await cacheUtils.resolvePaths(patterns) - - const expectedPath: string[] = [] - expect(resolvedPath).toStrictEqual(expectedPath) - } finally { - process.chdir(originalCwd) - } -}) - test('unlinkFile unlinks file', async () => { const testDirectory = await fs.mkdtemp('unlinkFileTest') const testFile = path.join(testDirectory, 'test.txt') diff --git a/packages/cache/__tests__/restoreCache.test.ts b/packages/cache/__tests__/restoreCache.test.ts index d1f016d6..5c807fcf 100644 --- a/packages/cache/__tests__/restoreCache.test.ts +++ b/packages/cache/__tests__/restoreCache.test.ts @@ -20,112 +20,95 @@ beforeAll(() => { }) test('restore with no path should fail', async () => { - const inputPath = '' + const paths: string[] = [] const key = 'node-test' - const failedMock = jest.spyOn(core, 'setFailed') - await restoreCache(inputPath, key) - expect(failedMock).toHaveBeenCalledWith( - 'Input required and not supplied: path' + await expect(restoreCache(paths, key)).rejects.toThrowError( + `Path Validation Error: At least one directory or file path is required` ) }) test('restore with too many keys should fail', async () => { - const inputPath = 'node_modules' + const paths = ['node_modules'] const key = 'node-test' const restoreKeys = [...Array(20).keys()].map(x => x.toString()) - const failedMock = jest.spyOn(core, 'setFailed') - await restoreCache(inputPath, key, restoreKeys) - expect(failedMock).toHaveBeenCalledWith( + await expect(restoreCache(paths, key, restoreKeys)).rejects.toThrowError( `Key Validation Error: Keys are limited to a maximum of 10.` ) }) test('restore with large key should fail', async () => { - const inputPath = 'node_modules' + const paths = ['node_modules'] const key = 'foo'.repeat(512) // Over the 512 character limit - const failedMock = jest.spyOn(core, 'setFailed') - await restoreCache(inputPath, key) - expect(failedMock).toHaveBeenCalledWith( + await expect(restoreCache(paths, key)).rejects.toThrowError( `Key Validation Error: ${key} cannot be larger than 512 characters.` ) }) test('restore with invalid key should fail', async () => { - const inputPath = 'node_modules' + const paths = ['node_modules'] const key = 'comma,comma' - const failedMock = jest.spyOn(core, 'setFailed') - await restoreCache(inputPath, key) - expect(failedMock).toHaveBeenCalledWith( + await expect(restoreCache(paths, key)).rejects.toThrowError( `Key Validation Error: ${key} cannot contain commas.` ) }) test('restore with no cache found', async () => { - const inputPath = 'node_modules' + const paths = ['node_modules'] const key = 'node-test' const infoMock = jest.spyOn(core, 'info') - const failedMock = jest.spyOn(core, 'setFailed') - const clientMock = jest.spyOn(cacheHttpClient, 'getCacheEntry') clientMock.mockImplementation(async () => { return Promise.resolve(null) }) - await restoreCache(inputPath, key) + const cacheKey = await restoreCache(paths, key) - expect(failedMock).toHaveBeenCalledTimes(0) + expect(cacheKey).toBe(undefined) expect(infoMock).toHaveBeenCalledWith( `Cache not found for input keys: ${key}` ) }) test('restore with server error should fail', async () => { - const inputPath = 'node_modules' + const paths = ['node_modules'] const key = 'node-test' - const logWarningMock = jest.spyOn(cacheUtils, 'logWarning') - const failedMock = jest.spyOn(core, 'setFailed') - const clientMock = jest.spyOn(cacheHttpClient, 'getCacheEntry') clientMock.mockImplementation(() => { throw new Error('HTTP Error Occurred') }) - await restoreCache(inputPath, key) - - expect(logWarningMock).toHaveBeenCalledTimes(1) - expect(logWarningMock).toHaveBeenCalledWith('HTTP Error Occurred') - expect(failedMock).toHaveBeenCalledTimes(0) + await expect(restoreCache(paths, key)).rejects.toThrowError( + 'HTTP Error Occurred' + ) }) test('restore with restore keys and no cache found', async () => { - const inputPath = 'node_modules' + const paths = ['node_modules'] const key = 'node-test' const restoreKey = 'node-' const infoMock = jest.spyOn(core, 'info') - const failedMock = jest.spyOn(core, 'setFailed') const clientMock = jest.spyOn(cacheHttpClient, 'getCacheEntry') clientMock.mockImplementation(async () => { return Promise.resolve(null) }) - await restoreCache(inputPath, key, [restoreKey]) + const cacheKey = await restoreCache(paths, key, [restoreKey]) - expect(failedMock).toHaveBeenCalledTimes(0) + expect(cacheKey).toBe(undefined) expect(infoMock).toHaveBeenCalledWith( `Cache not found for input keys: ${key}, ${restoreKey}` ) }) test('restore with gzip compressed cache found', async () => { - const inputPath = 'node_modules' + const paths = ['node_modules'] const key = 'node-test' const infoMock = jest.spyOn(core, 'info') - const failedMock = jest.spyOn(core, 'setFailed') const cacheEntry: ArtifactCacheEntry = { cacheKey: key, @@ -160,9 +143,10 @@ test('restore with gzip compressed cache found', async () => { .spyOn(cacheUtils, 'getCompressionMethod') .mockReturnValue(Promise.resolve(compression)) - await restoreCache(inputPath, key) + const cacheKey = await restoreCache(paths, key) - expect(getCacheMock).toHaveBeenCalledWith([key], inputPath, { + expect(cacheKey).toBe(key) + expect(getCacheMock).toHaveBeenCalledWith([key], paths, { compressionMethod: compression }) expect(createTempDirectoryMock).toHaveBeenCalledTimes(1) @@ -179,16 +163,14 @@ test('restore with gzip compressed cache found', async () => { expect(unlinkFileMock).toHaveBeenCalledWith(archivePath) expect(infoMock).toHaveBeenCalledWith(`Cache restored from key: ${key}`) - expect(failedMock).toHaveBeenCalledTimes(0) expect(getCompressionMock).toHaveBeenCalledTimes(1) }) test('restore with a pull request event and zstd compressed cache found', async () => { - const inputPath = 'node_modules' + const paths = ['node_modules'] const key = 'node-test' const infoMock = jest.spyOn(core, 'info') - const failedMock = jest.spyOn(core, 'setFailed') const cacheEntry: ArtifactCacheEntry = { cacheKey: key, @@ -220,9 +202,10 @@ test('restore with a pull request event and zstd compressed cache found', async .spyOn(cacheUtils, 'getCompressionMethod') .mockReturnValue(Promise.resolve(compression)) - await restoreCache(inputPath, key) + const cacheKey = await restoreCache(paths, key) - expect(getCacheMock).toHaveBeenCalledWith([key], inputPath, { + expect(cacheKey).toBe(key) + expect(getCacheMock).toHaveBeenCalledWith([key], paths, { compressionMethod: compression }) expect(createTempDirectoryMock).toHaveBeenCalledTimes(1) @@ -237,17 +220,15 @@ test('restore with a pull request event and zstd compressed cache found', async expect(extractTarMock).toHaveBeenCalledWith(archivePath, compression) expect(infoMock).toHaveBeenCalledWith(`Cache restored from key: ${key}`) - expect(failedMock).toHaveBeenCalledTimes(0) expect(getCompressionMock).toHaveBeenCalledTimes(1) }) test('restore with cache found for restore key', async () => { - const inputPath = 'node_modules' + const paths = ['node_modules'] const key = 'node-test' const restoreKey = 'node-' const infoMock = jest.spyOn(core, 'info') - const failedMock = jest.spyOn(core, 'setFailed') const cacheEntry: ArtifactCacheEntry = { cacheKey: restoreKey, @@ -279,9 +260,10 @@ test('restore with cache found for restore key', async () => { .spyOn(cacheUtils, 'getCompressionMethod') .mockReturnValue(Promise.resolve(compression)) - await restoreCache(inputPath, key, [restoreKey]) + const cacheKey = await restoreCache(paths, key, [restoreKey]) - expect(getCacheMock).toHaveBeenCalledWith([key, restoreKey], inputPath, { + expect(cacheKey).toBe(restoreKey) + expect(getCacheMock).toHaveBeenCalledWith([key, restoreKey], paths, { compressionMethod: compression }) expect(createTempDirectoryMock).toHaveBeenCalledTimes(1) @@ -298,6 +280,5 @@ test('restore with cache found for restore key', async () => { expect(infoMock).toHaveBeenCalledWith( `Cache restored from key: ${restoreKey}` ) - expect(failedMock).toHaveBeenCalledTimes(0) expect(getCompressionMock).toHaveBeenCalledTimes(1) }) diff --git a/packages/cache/__tests__/saveCache.test.ts b/packages/cache/__tests__/saveCache.test.ts index f1346634..2339a197 100644 --- a/packages/cache/__tests__/saveCache.test.ts +++ b/packages/cache/__tests__/saveCache.test.ts @@ -1,4 +1,3 @@ -import * as core from '@actions/core' import * as path from 'path' import {saveCache} from '../src/cache' import * as cacheHttpClient from '../src/internal/cacheHttpClient' @@ -27,42 +26,31 @@ beforeAll(() => { }) }) -test('save with missing input outputs warning', async () => { - const logWarningMock = jest.spyOn(cacheUtils, 'logWarning') - const failedMock = jest.spyOn(core, 'setFailed') - - const inputPath = '' +test('save with missing input should fail', async () => { + const paths: string[] = [] const primaryKey = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43' - - await saveCache(inputPath, primaryKey) - - expect(logWarningMock).toHaveBeenCalledWith( - 'Input required and not supplied: path' + await expect(saveCache(paths, primaryKey)).rejects.toThrowError( + `Path Validation Error: At least one directory or file path is required` ) - expect(logWarningMock).toHaveBeenCalledTimes(1) - expect(failedMock).toHaveBeenCalledTimes(0) }) -test('save with large cache outputs warning', async () => { - const logWarningMock = jest.spyOn(cacheUtils, 'logWarning') - const failedMock = jest.spyOn(core, 'setFailed') - - const inputPath = 'node_modules' +test('save with large cache outputs should fail', async () => { + const filePath = 'node_modules' const primaryKey = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43' - const cachePaths = [path.resolve(inputPath)] + const cachePaths = [path.resolve(filePath)] const createTarMock = jest.spyOn(tar, 'createTar') const cacheSize = 6 * 1024 * 1024 * 1024 //~6GB, over the 5GB limit - jest.spyOn(cacheUtils, 'getArchiveFileSize').mockImplementationOnce(() => { - return cacheSize - }) + jest.spyOn(cacheUtils, 'getArchiveFileSize').mockReturnValue(cacheSize) const compression = CompressionMethod.Gzip const getCompressionMock = jest .spyOn(cacheUtils, 'getCompressionMethod') - .mockReturnValue(Promise.resolve(compression)) + .mockReturnValueOnce(Promise.resolve(compression)) - await saveCache(inputPath, primaryKey) + await expect(saveCache([filePath], primaryKey)).rejects.toThrowError( + 'Cache size of ~6144 MB (6442450944 B) is over the 5GB limit, not saving cache.' + ) const archiveFolder = '/foo/bar' @@ -72,20 +60,11 @@ test('save with large cache outputs warning', async () => { cachePaths, compression ) - expect(logWarningMock).toHaveBeenCalledTimes(1) - expect(logWarningMock).toHaveBeenCalledWith( - 'Cache size of ~6144 MB (6442450944 B) is over the 5GB limit, not saving cache.' - ) - expect(failedMock).toHaveBeenCalledTimes(0) expect(getCompressionMock).toHaveBeenCalledTimes(1) }) -test('save with reserve cache failure outputs warning', async () => { - const infoMock = jest.spyOn(core, 'info') - const logWarningMock = jest.spyOn(cacheUtils, 'logWarning') - const failedMock = jest.spyOn(core, 'setFailed') - - const inputPath = 'node_modules' +test('save with reserve cache failure should fail', async () => { + const paths = ['node_modules'] const primaryKey = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43' const reserveCacheMock = jest @@ -99,33 +78,24 @@ test('save with reserve cache failure outputs warning', async () => { const compression = CompressionMethod.Zstd const getCompressionMock = jest .spyOn(cacheUtils, 'getCompressionMethod') - .mockReturnValue(Promise.resolve(compression)) + .mockReturnValueOnce(Promise.resolve(compression)) - await saveCache(inputPath, primaryKey) - - expect(reserveCacheMock).toHaveBeenCalledTimes(1) - expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey, inputPath, { - compressionMethod: compression - }) - - expect(infoMock).toHaveBeenCalledWith( + await expect(saveCache(paths, primaryKey)).rejects.toThrowError( `Unable to reserve cache with key ${primaryKey}, another job may be creating this cache.` ) - + expect(reserveCacheMock).toHaveBeenCalledTimes(1) + expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey, paths, { + compressionMethod: compression + }) expect(createTarMock).toHaveBeenCalledTimes(0) expect(saveCacheMock).toHaveBeenCalledTimes(0) - expect(logWarningMock).toHaveBeenCalledTimes(0) - expect(failedMock).toHaveBeenCalledTimes(0) expect(getCompressionMock).toHaveBeenCalledTimes(1) }) -test('save with server error outputs warning', async () => { - const logWarningMock = jest.spyOn(cacheUtils, 'logWarning') - const failedMock = jest.spyOn(core, 'setFailed') - - const inputPath = 'node_modules' +test('save with server error should fail', async () => { + const filePath = 'node_modules' const primaryKey = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43' - const cachePaths = [path.resolve(inputPath)] + const cachePaths = [path.resolve(filePath)] const cacheId = 4 const reserveCacheMock = jest @@ -144,12 +114,13 @@ test('save with server error outputs warning', async () => { const compression = CompressionMethod.Zstd const getCompressionMock = jest .spyOn(cacheUtils, 'getCompressionMethod') - .mockReturnValue(Promise.resolve(compression)) - - await saveCache(inputPath, primaryKey) + .mockReturnValueOnce(Promise.resolve(compression)) + await expect(await saveCache([filePath], primaryKey)).rejects.toThrowError( + 'HTTP Error Occurred' + ) expect(reserveCacheMock).toHaveBeenCalledTimes(1) - expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey, inputPath, { + expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey, [filePath], { compressionMethod: compression }) @@ -165,20 +136,13 @@ test('save with server error outputs warning', async () => { expect(saveCacheMock).toHaveBeenCalledTimes(1) expect(saveCacheMock).toHaveBeenCalledWith(cacheId, archiveFile) - - expect(logWarningMock).toHaveBeenCalledTimes(1) - expect(logWarningMock).toHaveBeenCalledWith('HTTP Error Occurred') - - expect(failedMock).toHaveBeenCalledTimes(0) expect(getCompressionMock).toHaveBeenCalledTimes(1) }) test('save with valid inputs uploads a cache', async () => { - const failedMock = jest.spyOn(core, 'setFailed') - - const inputPath = 'node_modules' + const filePath = 'node_modules' const primaryKey = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43' - const cachePaths = [path.resolve(inputPath)] + const cachePaths = [path.resolve(filePath)] const cacheId = 4 const reserveCacheMock = jest @@ -194,10 +158,10 @@ test('save with valid inputs uploads a cache', async () => { .spyOn(cacheUtils, 'getCompressionMethod') .mockReturnValue(Promise.resolve(compression)) - await saveCache(inputPath, primaryKey) + await saveCache([filePath], primaryKey) expect(reserveCacheMock).toHaveBeenCalledTimes(1) - expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey, inputPath, { + expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey, [filePath], { compressionMethod: compression }) @@ -213,7 +177,5 @@ test('save with valid inputs uploads a cache', async () => { expect(saveCacheMock).toHaveBeenCalledTimes(1) expect(saveCacheMock).toHaveBeenCalledWith(cacheId, archiveFile) - - expect(failedMock).toHaveBeenCalledTimes(0) expect(getCompressionMock).toHaveBeenCalledTimes(1) }) diff --git a/packages/cache/src/cache.ts b/packages/cache/src/cache.ts index 4aab741e..cf9be5eb 100644 --- a/packages/cache/src/cache.ts +++ b/packages/cache/src/cache.ts @@ -1,169 +1,153 @@ import * as core from '@actions/core' -import * as pathUtils from 'path' +import * as path from 'path' import * as utils from './internal/cacheUtils' import * as cacheHttpClient from './internal/cacheHttpClient' import {createTar, extractTar} from './internal/tar' +function checkPaths(paths: string[]): void { + if (!paths || paths.length === 0) { + throw new Error( + `Path Validation Error: At least one directory or file path is required` + ) + } +} + +function checkKey(key: string): void { + if (key.length > 512) { + throw new Error( + `Key Validation Error: ${key} cannot be larger than 512 characters.` + ) + } + const regex = /^[^,]*$/ + if (!regex.test(key)) { + throw new Error(`Key Validation Error: ${key} cannot contain commas.`) + } +} + /** * Restores cache from keys * - * @param path a string representing files that were cached + * @param paths a list of file paths to restore from the cache * @param primaryKey an explicit key for restoring the cache * @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for key * @returns string returns the key for the cache hit, otherwise return undefined */ export async function restoreCache( - path: string, + paths: string[], primaryKey: string, restoreKeys?: string[] ): Promise { - try { - if (!path || path.length === 0) { - throw new Error('Input required and not supplied: path') - } + checkPaths(paths) - restoreKeys = restoreKeys || [] - const keys = [primaryKey, ...restoreKeys] + restoreKeys = restoreKeys || [] + const keys = [primaryKey, ...restoreKeys] - core.debug('Resolved Keys:') - core.debug(JSON.stringify(keys)) + core.debug('Resolved Keys:') + core.debug(JSON.stringify(keys)) - if (keys.length > 10) { - core.setFailed( - `Key Validation Error: Keys are limited to a maximum of 10.` - ) - return undefined - } - for (const key of keys) { - if (key.length > 512) { - core.setFailed( - `Key Validation Error: ${key} cannot be larger than 512 characters.` - ) - return undefined - } - const regex = /^[^,]*$/ - if (!regex.test(key)) { - core.setFailed(`Key Validation Error: ${key} cannot contain commas.`) - return undefined - } - } + if (keys.length > 10) { + throw new Error( + `Key Validation Error: Keys are limited to a maximum of 10.` + ) + } + for (const key of keys) { + checkKey(key) + } - const compressionMethod = await utils.getCompressionMethod() + const compressionMethod = await utils.getCompressionMethod() - try { - // path are needed to compute version - const cacheEntry = await cacheHttpClient.getCacheEntry(keys, path, { - compressionMethod - }) - if (!cacheEntry?.archiveLocation) { - core.info(`Cache not found for input keys: ${keys.join(', ')}`) - return undefined - } - - const archivePath = pathUtils.join( - await utils.createTempDirectory(), - utils.getCacheFileName(compressionMethod) - ) - core.debug(`Archive Path: ${archivePath}`) - - try { - // Download the cache from the cache entry - await cacheHttpClient.downloadCache( - cacheEntry.archiveLocation, - archivePath - ) - - const archiveFileSize = utils.getArchiveFileSize(archivePath) - core.info( - `Cache Size: ~${Math.round( - archiveFileSize / (1024 * 1024) - )} MB (${archiveFileSize} B)` - ) - - await extractTar(archivePath, compressionMethod) - } finally { - // Try to delete the archive to save space - try { - await utils.unlinkFile(archivePath) - } catch (error) { - core.debug(`Failed to delete archive: ${error}`) - } - } - - core.info(`Cache restored from key: ${cacheEntry && cacheEntry.cacheKey}`) - - return cacheEntry.cacheKey - } catch (error) { - utils.logWarning(error.message) - return undefined - } - } catch (error) { - core.setFailed(error.message) + // path are needed to compute version + const cacheEntry = await cacheHttpClient.getCacheEntry(keys, paths, { + compressionMethod + }) + if (!cacheEntry?.archiveLocation) { + core.info(`Cache not found for input keys: ${keys.join(', ')}`) return undefined } + + const archivePath = path.join( + await utils.createTempDirectory(), + utils.getCacheFileName(compressionMethod) + ) + core.debug(`Archive Path: ${archivePath}`) + + try { + // Download the cache from the cache entry + await cacheHttpClient.downloadCache(cacheEntry.archiveLocation, archivePath) + + const archiveFileSize = utils.getArchiveFileSize(archivePath) + core.info( + `Cache Size: ~${Math.round( + archiveFileSize / (1024 * 1024) + )} MB (${archiveFileSize} B)` + ) + + await extractTar(archivePath, compressionMethod) + } finally { + // Try to delete the archive to save space + try { + await utils.unlinkFile(archivePath) + } catch (error) { + core.debug(`Failed to delete archive: ${error}`) + } + } + + core.info(`Cache restored from key: ${cacheEntry && cacheEntry.cacheKey}`) + + return cacheEntry.cacheKey } /** - * Saves a file with the specified key + * Saves a list of files with the specified key * - * @param path a string representing files to be cached + * @param paths a list of file paths to be cached * @param key an explicit key for restoring the cache - * @returns number returns cacheId if the cache was saved successfully, otherwise return -1 + * @returns number returns cacheId if the cache was saved successfully */ -export async function saveCache(path: string, key: string): Promise { - try { - if (!path || path.length === 0) { - throw new Error('Input required and not supplied: path') - } +export async function saveCache(paths: string[], key: string): Promise { + checkPaths(paths) + checkKey(key) - const compressionMethod = await utils.getCompressionMethod() + const compressionMethod = await utils.getCompressionMethod() - core.debug('Reserving Cache') - const cacheId = await cacheHttpClient.reserveCache(key, path, { - compressionMethod - }) - if (cacheId === -1) { - core.info( - `Unable to reserve cache with key ${key}, another job may be creating this cache.` - ) - return -1 - } - core.debug(`Cache ID: ${cacheId}`) - const cachePaths = await utils.resolvePaths( - path.split('\n').filter(x => x !== '') + core.debug('Reserving Cache') + const cacheId = await cacheHttpClient.reserveCache(key, paths, { + compressionMethod + }) + if (cacheId === -1) { + throw new Error( + `Unable to reserve cache with key ${key}, another job may be creating this cache.` ) - - core.debug('Cache Paths:') - core.debug(`${JSON.stringify(cachePaths)}`) - - const archiveFolder = await utils.createTempDirectory() - const archivePath = pathUtils.join( - archiveFolder, - utils.getCacheFileName(compressionMethod) - ) - - core.debug(`Archive Path: ${archivePath}`) - - await createTar(archiveFolder, cachePaths, compressionMethod) - - const fileSizeLimit = 5 * 1024 * 1024 * 1024 // 5GB per repo limit - const archiveFileSize = utils.getArchiveFileSize(archivePath) - core.debug(`File Size: ${archiveFileSize}`) - if (archiveFileSize > fileSizeLimit) { - utils.logWarning( - `Cache size of ~${Math.round( - archiveFileSize / (1024 * 1024) - )} MB (${archiveFileSize} B) is over the 5GB limit, not saving cache.` - ) - return -1 - } - - core.debug(`Saving Cache (ID: ${cacheId})`) - await cacheHttpClient.saveCache(cacheId, archivePath) - - return cacheId - } catch (error) { - utils.logWarning(error.message) - return -1 } + core.debug(`Cache ID: ${cacheId}`) + const cachePaths = await utils.resolvePaths(paths) + + core.debug('Cache Paths:') + core.debug(`${JSON.stringify(cachePaths)}`) + + const archiveFolder = await utils.createTempDirectory() + const archivePath = path.join( + archiveFolder, + utils.getCacheFileName(compressionMethod) + ) + + core.debug(`Archive Path: ${archivePath}`) + + await createTar(archiveFolder, cachePaths, compressionMethod) + + const fileSizeLimit = 5 * 1024 * 1024 * 1024 // 5GB per repo limit + const archiveFileSize = utils.getArchiveFileSize(archivePath) + core.debug(`File Size: ${archiveFileSize}`) + if (archiveFileSize > fileSizeLimit) { + throw new Error( + `Cache size of ~${Math.round( + archiveFileSize / (1024 * 1024) + )} MB (${archiveFileSize} B) is over the 5GB limit, not saving cache.` + ) + } + + core.debug(`Saving Cache (ID: ${cacheId})`) + await cacheHttpClient.saveCache(cacheId, archivePath) + + return cacheId } diff --git a/packages/cache/src/internal/cacheHttpClient.ts b/packages/cache/src/internal/cacheHttpClient.ts index 92e9498a..f4f8c4d1 100644 --- a/packages/cache/src/internal/cacheHttpClient.ts +++ b/packages/cache/src/internal/cacheHttpClient.ts @@ -84,10 +84,10 @@ function createHttpClient(): HttpClient { } export function getCacheVersion( - inputPath: string, + paths: string[], compressionMethod?: CompressionMethod ): string { - const components = [inputPath].concat( + const components = paths.concat( compressionMethod === CompressionMethod.Zstd ? [compressionMethod] : [] ) @@ -102,11 +102,11 @@ export function getCacheVersion( export async function getCacheEntry( keys: string[], - inputPath: string, + paths: string[], options?: CacheOptions ): Promise { const httpClient = createHttpClient() - const version = getCacheVersion(inputPath, options?.compressionMethod) + const version = getCacheVersion(paths, options?.compressionMethod) const resource = `cache?keys=${encodeURIComponent( keys.join(',') )}&version=${version}` @@ -177,11 +177,11 @@ export async function downloadCache( // Reserve Cache export async function reserveCache( key: string, - inputPath: string, + paths: string[], options?: CacheOptions ): Promise { const httpClient = createHttpClient() - const version = getCacheVersion(inputPath, options?.compressionMethod) + const version = getCacheVersion(paths, options?.compressionMethod) const reserveCacheRequest: ReserveCacheRequest = { key, diff --git a/packages/cache/src/internal/cacheUtils.ts b/packages/cache/src/internal/cacheUtils.ts index 8cce071f..8743963a 100644 --- a/packages/cache/src/internal/cacheUtils.ts +++ b/packages/cache/src/internal/cacheUtils.ts @@ -38,11 +38,6 @@ export function getArchiveFileSize(filePath: string): number { return fs.statSync(filePath).size } -export function logWarning(message: string): void { - const warningPrefix = '[warning]' - core.info(`${warningPrefix}${message}`) -} - export async function resolvePaths(patterns: string[]): Promise { const paths: string[] = [] const workspace = process.env['GITHUB_WORKSPACE'] ?? process.cwd() From 15fefd93366e98ede1b0739e6e461e85cfbb04c4 Mon Sep 17 00:00:00 2001 From: Aiqiao Yan Date: Wed, 6 May 2020 20:07:39 -0400 Subject: [PATCH 3/7] Fix tests --- README.md | 2 +- packages/cache/RELEASES.md | 2 +- packages/cache/__tests__/restoreCache.test.ts | 9 +++------ packages/cache/__tests__/saveCache.test.ts | 6 +++--- packages/cache/package-lock.json | 2 +- packages/cache/package.json | 4 ++-- 6 files changed, 11 insertions(+), 14 deletions(-) diff --git a/README.md b/README.md index 22fbc27c..df36e70b 100644 --- a/README.md +++ b/README.md @@ -84,7 +84,7 @@ $ npm install @actions/artifact --save :dart: [@actions/cache](packages/cache) -Provides functions to interact with actions cache. Read more [here](packages/cache) +Provides functions to cache dependencies and build outputs to improve workflow execution time.. Read more [here](packages/cache) ```bash $ npm install @actions/cache --save diff --git a/packages/cache/RELEASES.md b/packages/cache/RELEASES.md index b47fc550..1250c050 100644 --- a/packages/cache/RELEASES.md +++ b/packages/cache/RELEASES.md @@ -1,5 +1,5 @@ # @actions/cache Releases -### 0.0.0 +### 1.0.0 - Initial release \ No newline at end of file diff --git a/packages/cache/__tests__/restoreCache.test.ts b/packages/cache/__tests__/restoreCache.test.ts index 5c807fcf..b466b9af 100644 --- a/packages/cache/__tests__/restoreCache.test.ts +++ b/packages/cache/__tests__/restoreCache.test.ts @@ -57,8 +57,7 @@ test('restore with no cache found', async () => { const key = 'node-test' const infoMock = jest.spyOn(core, 'info') - const clientMock = jest.spyOn(cacheHttpClient, 'getCacheEntry') - clientMock.mockImplementation(async () => { + jest.spyOn(cacheHttpClient, 'getCacheEntry').mockImplementation(async () => { return Promise.resolve(null) }) @@ -74,8 +73,7 @@ test('restore with server error should fail', async () => { const paths = ['node_modules'] const key = 'node-test' - const clientMock = jest.spyOn(cacheHttpClient, 'getCacheEntry') - clientMock.mockImplementation(() => { + jest.spyOn(cacheHttpClient, 'getCacheEntry').mockImplementation(() => { throw new Error('HTTP Error Occurred') }) @@ -91,8 +89,7 @@ test('restore with restore keys and no cache found', async () => { const infoMock = jest.spyOn(core, 'info') - const clientMock = jest.spyOn(cacheHttpClient, 'getCacheEntry') - clientMock.mockImplementation(async () => { + jest.spyOn(cacheHttpClient, 'getCacheEntry').mockImplementation(async () => { return Promise.resolve(null) }) diff --git a/packages/cache/__tests__/saveCache.test.ts b/packages/cache/__tests__/saveCache.test.ts index 2339a197..2fc379b3 100644 --- a/packages/cache/__tests__/saveCache.test.ts +++ b/packages/cache/__tests__/saveCache.test.ts @@ -42,7 +42,7 @@ test('save with large cache outputs should fail', async () => { const createTarMock = jest.spyOn(tar, 'createTar') const cacheSize = 6 * 1024 * 1024 * 1024 //~6GB, over the 5GB limit - jest.spyOn(cacheUtils, 'getArchiveFileSize').mockReturnValue(cacheSize) + jest.spyOn(cacheUtils, 'getArchiveFileSize').mockReturnValueOnce(cacheSize) const compression = CompressionMethod.Gzip const getCompressionMock = jest .spyOn(cacheUtils, 'getCompressionMethod') @@ -108,7 +108,7 @@ test('save with server error should fail', async () => { const saveCacheMock = jest .spyOn(cacheHttpClient, 'saveCache') - .mockImplementationOnce(async () => { + .mockImplementationOnce(() => { throw new Error('HTTP Error Occurred') }) const compression = CompressionMethod.Zstd @@ -116,7 +116,7 @@ test('save with server error should fail', async () => { .spyOn(cacheUtils, 'getCompressionMethod') .mockReturnValueOnce(Promise.resolve(compression)) - await expect(await saveCache([filePath], primaryKey)).rejects.toThrowError( + await expect(saveCache([filePath], primaryKey)).rejects.toThrowError( 'HTTP Error Occurred' ) expect(reserveCacheMock).toHaveBeenCalledTimes(1) diff --git a/packages/cache/package-lock.json b/packages/cache/package-lock.json index 4a263beb..569bab49 100644 --- a/packages/cache/package-lock.json +++ b/packages/cache/package-lock.json @@ -1,6 +1,6 @@ { "name": "@actions/cache", - "version": "0.0.0", + "version": "1.0.0", "lockfileVersion": 1, "requires": true, "dependencies": { diff --git a/packages/cache/package.json b/packages/cache/package.json index 1454048c..2ee2d9bb 100644 --- a/packages/cache/package.json +++ b/packages/cache/package.json @@ -1,8 +1,8 @@ { "name": "@actions/cache", - "version": "0.0.0", + "version": "1.0.0", "preview": true, - "description": "Actions artifact cache lib", + "description": "Actions cache lib", "keywords": [ "github", "actions", From c534ad2cbd9ce38d33cdeaee134869afd430876d Mon Sep 17 00:00:00 2001 From: Aiqiao Yan Date: Thu, 7 May 2020 15:03:20 -0400 Subject: [PATCH 4/7] Add docs and tests --- .github/workflows/cache-tests.yml | 61 +++++++++++++++++++ README.md | 4 +- packages/cache/CONTRIBUTIONS.md | 0 packages/cache/README.md | 28 +++++++++ .../cache/__tests__/__fixtures__/action.yml | 5 ++ .../cache/__tests__/__fixtures__/index.js | 5 ++ packages/cache/package-lock.json | 54 ++++++++++++++++ scripts/create-cache-files.sh | 17 ++++++ scripts/verify-cache-files.sh | 36 +++++++++++ 9 files changed, 209 insertions(+), 1 deletion(-) create mode 100644 .github/workflows/cache-tests.yml delete mode 100644 packages/cache/CONTRIBUTIONS.md create mode 100644 packages/cache/__tests__/__fixtures__/action.yml create mode 100644 packages/cache/__tests__/__fixtures__/index.js create mode 100755 scripts/create-cache-files.sh create mode 100755 scripts/verify-cache-files.sh diff --git a/.github/workflows/cache-tests.yml b/.github/workflows/cache-tests.yml new file mode 100644 index 00000000..aa382ffc --- /dev/null +++ b/.github/workflows/cache-tests.yml @@ -0,0 +1,61 @@ +name: cache-unit-tests +on: push + +jobs: + build: + name: Build + + strategy: + matrix: + runs-on: [ubuntu-latest, windows-latest, macOS-latest] + fail-fast: false + + runs-on: ${{ matrix.runs-on }} + + steps: + - name: Checkout + uses: actions/checkout@v2 + + - name: Set Node.js 12.x + uses: actions/setup-node@v1 + with: + node-version: 12.x + + # In order to save & restore cache artifacts from a shell script, certain env variables need to be set that are only available in the + # node context. This runs a local action that gets and sets the necessary env variables that are needed + - name: Set env variables + uses: ./packages/cache/__tests__/__fixtures__/ + + # Need root node_modules because certain npm packages like jest are configured for the entire repository and it won't be possible + # without these to just compile the cache package + - name: Install root npm packages + run: npm ci + + - name: Compile cache package + run: | + npm ci + npm run tsc + working-directory: packages/cache + + - name: Generate files in working directory + shell: bash + run: scripts/create-cache-files.sh ${{ runner.os }} test-cache + + - name: Generate files outside working directory + shell: bash + run: scripts/create-cache-files.sh ${{ runner.os }} ~/test-cache + + # We're using node -e to call the functions directly available in the @actions/cache package + - name: Save cache using saveCache() + run: | + node -e "Promise.resolve(require('./packages/cache/lib/cache').saveCache(['test-cache','~/test-cache'],'test-${{ runner.os }}-${{ github.run_id }}'))" + + - name: Restore cache using restoreCache() + run: | + node -e "Promise.resolve(require('./packages/cache/lib/cache').restoreCache(['test-cache','~/test-cache'],'test-${{ runner.os }}-${{ github.run_id }}'))" + + - name: Verify cache + shell: bash + run: | + scripts/verify-cache-files.sh ${{ runner.os }} test-cache + scripts/verify-cache-files.sh ${{ runner.os }} ~/test-cache \ No newline at end of file diff --git a/README.md b/README.md index df36e70b..4ffb0d44 100644 --- a/README.md +++ b/README.md @@ -59,6 +59,8 @@ $ npm install @actions/io --save Provides functions for downloading and caching tools. e.g. setup-* actions. Read more [here](packages/tool-cache) +See @actions/cache for caching workflow dependencies. + ```bash $ npm install @actions/tool-cache --save ``` @@ -84,7 +86,7 @@ $ npm install @actions/artifact --save :dart: [@actions/cache](packages/cache) -Provides functions to cache dependencies and build outputs to improve workflow execution time.. Read more [here](packages/cache) +Provides functions to cache dependencies and build outputs to improve workflow execution time. Read more [here](packages/cache) ```bash $ npm install @actions/cache --save diff --git a/packages/cache/CONTRIBUTIONS.md b/packages/cache/CONTRIBUTIONS.md deleted file mode 100644 index e69de29b..00000000 diff --git a/packages/cache/README.md b/packages/cache/README.md index 402a7f50..f15d0b16 100644 --- a/packages/cache/README.md +++ b/packages/cache/README.md @@ -6,8 +6,36 @@ #### Restore Cache +Restores a cache based on `key` and `restoreKeys` to the `paths` provided. Function returns the cache key for cache hit and returns undefined if cache not found. + +```js +const cache = require('@actions/cache'); +const paths = [ + 'node_modules', + 'packages/*/node_modules/' +] +const key = 'npm-foobar-d5ea0750' +const restoreKeys = [ + 'npm-foobar-', + 'npm-' +] +const cacheKey = await cache.restoreCache(paths, key, restoreKeys) +``` + #### Save Cache +Saves a cache containing the files in `paths` using the `key` provided. Function returns the cache id if the cache was save succesfully. + +```js +const cache = require('@actions/cache'); +const paths = [ + 'node_modules', + 'packages/*/node_modules/' +] +const key = 'npm-foobar-d5ea0750' +const cacheId = await cache.saveCache(paths, key) +``` + ## Additional Documentation See ["Caching dependencies to speed up workflows"](https://help.github.com/github/automating-your-workflow-with-github-actions/caching-dependencies-to-speed-up-workflows). \ No newline at end of file diff --git a/packages/cache/__tests__/__fixtures__/action.yml b/packages/cache/__tests__/__fixtures__/action.yml new file mode 100644 index 00000000..7cd98502 --- /dev/null +++ b/packages/cache/__tests__/__fixtures__/action.yml @@ -0,0 +1,5 @@ +name: 'Set env variables' +description: 'Sets certain env variables so that e2e restore and save cache can be tested in a shell' +runs: + using: 'node12' + main: 'index.js' \ No newline at end of file diff --git a/packages/cache/__tests__/__fixtures__/index.js b/packages/cache/__tests__/__fixtures__/index.js new file mode 100644 index 00000000..82bc6c43 --- /dev/null +++ b/packages/cache/__tests__/__fixtures__/index.js @@ -0,0 +1,5 @@ +// Certain env variables are not set by default in a shell context and are only available in a node context from a running action +// In order to be able to restore and save cache e2e in a shell when running CI tests, we need these env variables set +console.log(`::set-env name=ACTIONS_RUNTIME_URL::${process.env.ACTIONS_RUNTIME_URL}`) +console.log(`::set-env name=ACTIONS_RUNTIME_TOKEN::${process.env.ACTIONS_RUNTIME_TOKEN}`) +console.log(`::set-env name=GITHUB_RUN_ID::${process.env.GITHUB_RUN_ID}`) \ No newline at end of file diff --git a/packages/cache/package-lock.json b/packages/cache/package-lock.json index 569bab49..4e2de82a 100644 --- a/packages/cache/package-lock.json +++ b/packages/cache/package-lock.json @@ -4,6 +4,28 @@ "lockfileVersion": 1, "requires": true, "dependencies": { + "@actions/core": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/@actions/core/-/core-1.2.4.tgz", + "integrity": "sha512-YJCEq8BE3CdN8+7HPZ/4DxJjk/OkZV2FFIf+DlZTC/4iBlzYCD5yjRR6eiOS5llO11zbRltIRuKAjMKaWTE6cg==" + }, + "@actions/exec": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/@actions/exec/-/exec-1.0.4.tgz", + "integrity": "sha512-4DPChWow9yc9W3WqEbUj8Nr86xkpyE29ZzWjXucHItclLbEW6jr80Zx4nqv18QL6KK65+cifiQZXvnqgTV6oHw==", + "requires": { + "@actions/io": "^1.0.1" + } + }, + "@actions/glob": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/@actions/glob/-/glob-0.1.0.tgz", + "integrity": "sha512-lx8SzyQ2FE9+UUvjqY1f28QbTJv+w8qP7kHHbfQRhphrlcx0Mdmm1tZdGJzfxv1jxREa/sLW4Oy8CbGQKCJySA==", + "requires": { + "@actions/core": "^1.2.0", + "minimatch": "^3.0.4" + } + }, "@actions/http-client": { "version": "1.0.8", "resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-1.0.8.tgz", @@ -12,12 +34,44 @@ "tunnel": "0.0.6" } }, + "@actions/io": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@actions/io/-/io-1.0.2.tgz", + "integrity": "sha512-J8KuFqVPr3p6U8W93DOXlXW6zFvrQAJANdS+vw0YhusLIq+bszW8zmK2Fh1C2kDPX8FMvwIl1OUcFgvJoXLbAg==" + }, "@types/uuid": { "version": "3.4.9", "resolved": "https://registry.npmjs.org/@types/uuid/-/uuid-3.4.9.tgz", "integrity": "sha512-XDwyIlt/47l2kWLTzw/mtrpLdB+GPSskR2n/PIcPn+VYhVO77rGhRncIR5GPU0KRzXuqkDO+J5qqrG0Y8P6jzQ==", "dev": true }, + "balanced-match": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz", + "integrity": "sha1-ibTRmasr7kneFk6gK4nORi1xt2c=" + }, + "brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "requires": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=" + }, + "minimatch": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", + "integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==", + "requires": { + "brace-expansion": "^1.1.7" + } + }, "tunnel": { "version": "0.0.6", "resolved": "https://registry.npmjs.org/tunnel/-/tunnel-0.0.6.tgz", diff --git a/scripts/create-cache-files.sh b/scripts/create-cache-files.sh new file mode 100755 index 00000000..0ce4140f --- /dev/null +++ b/scripts/create-cache-files.sh @@ -0,0 +1,17 @@ +#!/bin/sh + +# Validate args +prefix="$1" +if [ -z "$prefix" ]; then + echo "Must supply prefix argument" + exit 1 +fi + +path="$2" +if [ -z "$path" ]; then + echo "Must supply path argument" + exit 1 +fi + +mkdir -p $path +echo "$prefix $GITHUB_RUN_ID" > $path/test-file.txt diff --git a/scripts/verify-cache-files.sh b/scripts/verify-cache-files.sh new file mode 100755 index 00000000..3ee8a842 --- /dev/null +++ b/scripts/verify-cache-files.sh @@ -0,0 +1,36 @@ +#!/bin/sh + +# Validate args +prefix="$1" +if [ -z "$prefix" ]; then + echo "Must supply prefix argument" + exit 1 +fi + +path="$2" +if [ -z "$path" ]; then + echo "Must specify path argument" + exit 1 +fi + +# Sanity check GITHUB_RUN_ID defined +if [ -z "$GITHUB_RUN_ID" ]; then + echo "GITHUB_RUN_ID not defined" + exit 1 +fi + +# Verify file exists +file="$path/test-file.txt" +echo "Checking for $file" +if [ ! -e $file ]; then + echo "File does not exist" + exit 1 +fi + +# Verify file content +content="$(cat $file)" +echo "File content:\n$content" +if [ -z "$(echo $content | grep --fixed-strings "$prefix $GITHUB_RUN_ID")" ]; then + echo "Unexpected file content" + exit 1 +fi From 1413cd0e32562e29ebe9aa04d4b7176120890791 Mon Sep 17 00:00:00 2001 From: Aiqiao Yan Date: Tue, 12 May 2020 12:37:03 -0400 Subject: [PATCH 5/7] Add cache upload options and pull from latest actions/cache master --- .github/workflows/cache-tests.yml | 12 +- README.md | 2 +- .../cache/__tests__/cacheHttpClient.test.ts | 141 ++++++++++++- packages/cache/__tests__/saveCache.test.ts | 4 +- packages/cache/src/cache.ts | 10 +- .../cache/src/internal/cacheHttpClient.ts | 190 ++++++++++++------ packages/cache/src/options.ts | 17 ++ 7 files changed, 309 insertions(+), 67 deletions(-) create mode 100644 packages/cache/src/options.ts diff --git a/.github/workflows/cache-tests.yml b/.github/workflows/cache-tests.yml index aa382ffc..e63ac90c 100644 --- a/.github/workflows/cache-tests.yml +++ b/.github/workflows/cache-tests.yml @@ -1,5 +1,13 @@ name: cache-unit-tests -on: push +on: + push: + branches: + - master + paths-ignore: + - '**.md' + pull_request: + paths-ignore: + - '**.md' jobs: build: @@ -21,7 +29,7 @@ jobs: with: node-version: 12.x - # In order to save & restore cache artifacts from a shell script, certain env variables need to be set that are only available in the + # In order to save & restore cache from a shell script, certain env variables need to be set that are only available in the # node context. This runs a local action that gets and sets the necessary env variables that are needed - name: Set env variables uses: ./packages/cache/__tests__/__fixtures__/ diff --git a/README.md b/README.md index 4ffb0d44..6fd01f8c 100644 --- a/README.md +++ b/README.md @@ -89,7 +89,7 @@ $ npm install @actions/artifact --save Provides functions to cache dependencies and build outputs to improve workflow execution time. Read more [here](packages/cache) ```bash -$ npm install @actions/cache --save +$ npm install @actions/cache ```
diff --git a/packages/cache/__tests__/cacheHttpClient.test.ts b/packages/cache/__tests__/cacheHttpClient.test.ts index e5d7eacf..a7f3fec1 100644 --- a/packages/cache/__tests__/cacheHttpClient.test.ts +++ b/packages/cache/__tests__/cacheHttpClient.test.ts @@ -1,4 +1,4 @@ -import {getCacheVersion} from '../src/internal/cacheHttpClient' +import {getCacheVersion, retry} from '../src/internal/cacheHttpClient' import {CompressionMethod} from '../src/internal/constants' test('getCacheVersion with one path returns version', async () => { @@ -34,3 +34,142 @@ test('getCacheVersion with gzip compression does not change vesion', async () => 'b3e0c6cb5ecf32614eeb2997d905b9c297046d7cbf69062698f25b14b4cb0985' ) }) + +interface TestResponse { + statusCode: number + result: string | null +} + +async function handleResponse( + response: TestResponse | undefined +): Promise { + if (!response) { + // eslint-disable-next-line no-undef + fail('Retry method called too many times') + } + + if (response.statusCode === 999) { + throw Error('Test Error') + } else { + return Promise.resolve(response) + } +} + +async function testRetryExpectingResult( + responses: TestResponse[], + expectedResult: string | null +): Promise { + responses = responses.reverse() // Reverse responses since we pop from end + + const actualResult = await retry( + 'test', + async () => handleResponse(responses.pop()), + (response: TestResponse) => response.statusCode + ) + + expect(actualResult.result).toEqual(expectedResult) +} + +async function testRetryExpectingError( + responses: TestResponse[] +): Promise { + responses = responses.reverse() // Reverse responses since we pop from end + + expect( + retry( + 'test', + async () => handleResponse(responses.pop()), + (response: TestResponse) => response.statusCode + ) + ).rejects.toBeInstanceOf(Error) +} + +test('retry works on successful response', async () => { + await testRetryExpectingResult( + [ + { + statusCode: 200, + result: 'Ok' + } + ], + 'Ok' + ) +}) + +test('retry works after retryable status code', async () => { + await testRetryExpectingResult( + [ + { + statusCode: 503, + result: null + }, + { + statusCode: 200, + result: 'Ok' + } + ], + 'Ok' + ) +}) + +test('retry fails after exhausting retries', async () => { + await testRetryExpectingError([ + { + statusCode: 503, + result: null + }, + { + statusCode: 503, + result: null + }, + { + statusCode: 200, + result: 'Ok' + } + ]) +}) + +test('retry fails after non-retryable status code', async () => { + await testRetryExpectingError([ + { + statusCode: 500, + result: null + }, + { + statusCode: 200, + result: 'Ok' + } + ]) +}) + +test('retry works after error', async () => { + await testRetryExpectingResult( + [ + { + statusCode: 999, + result: null + }, + { + statusCode: 200, + result: 'Ok' + } + ], + 'Ok' + ) +}) + +test('retry returns after client error', async () => { + await testRetryExpectingResult( + [ + { + statusCode: 400, + result: null + }, + { + statusCode: 200, + result: 'Ok' + } + ], + null + ) +}) diff --git a/packages/cache/__tests__/saveCache.test.ts b/packages/cache/__tests__/saveCache.test.ts index 2fc379b3..720d2ad6 100644 --- a/packages/cache/__tests__/saveCache.test.ts +++ b/packages/cache/__tests__/saveCache.test.ts @@ -135,7 +135,7 @@ test('save with server error should fail', async () => { ) expect(saveCacheMock).toHaveBeenCalledTimes(1) - expect(saveCacheMock).toHaveBeenCalledWith(cacheId, archiveFile) + expect(saveCacheMock).toHaveBeenCalledWith(cacheId, archiveFile, undefined) expect(getCompressionMock).toHaveBeenCalledTimes(1) }) @@ -176,6 +176,6 @@ test('save with valid inputs uploads a cache', async () => { ) expect(saveCacheMock).toHaveBeenCalledTimes(1) - expect(saveCacheMock).toHaveBeenCalledWith(cacheId, archiveFile) + expect(saveCacheMock).toHaveBeenCalledWith(cacheId, archiveFile, undefined) expect(getCompressionMock).toHaveBeenCalledTimes(1) }) diff --git a/packages/cache/src/cache.ts b/packages/cache/src/cache.ts index cf9be5eb..39eaf6de 100644 --- a/packages/cache/src/cache.ts +++ b/packages/cache/src/cache.ts @@ -3,6 +3,7 @@ import * as path from 'path' import * as utils from './internal/cacheUtils' import * as cacheHttpClient from './internal/cacheHttpClient' import {createTar, extractTar} from './internal/tar' +import {UploadOptions} from './options' function checkPaths(paths: string[]): void { if (!paths || paths.length === 0) { @@ -102,9 +103,14 @@ export async function restoreCache( * * @param paths a list of file paths to be cached * @param key an explicit key for restoring the cache + * @param options cache upload options * @returns number returns cacheId if the cache was saved successfully */ -export async function saveCache(paths: string[], key: string): Promise { +export async function saveCache( + paths: string[], + key: string, + options?: UploadOptions +): Promise { checkPaths(paths) checkKey(key) @@ -147,7 +153,7 @@ export async function saveCache(paths: string[], key: string): Promise { } core.debug(`Saving Cache (ID: ${cacheId})`) - await cacheHttpClient.saveCache(cacheId, archivePath) + await cacheHttpClient.saveCache(cacheId, archivePath, options) return cacheId } diff --git a/packages/cache/src/internal/cacheHttpClient.ts b/packages/cache/src/internal/cacheHttpClient.ts index f4f8c4d1..a3b9633a 100644 --- a/packages/cache/src/internal/cacheHttpClient.ts +++ b/packages/cache/src/internal/cacheHttpClient.ts @@ -20,6 +20,7 @@ import { ReserveCacheRequest, ReserveCacheResponse } from './contracts' +import {UploadOptions} from '../options' const versionSalt = '1.0' @@ -30,6 +31,13 @@ function isSuccessStatusCode(statusCode?: number): boolean { return statusCode >= 200 && statusCode < 300 } +function isServerErrorStatusCode(statusCode?: number): boolean { + if (!statusCode) { + return true + } + return statusCode >= 500 +} + function isRetryableStatusCode(statusCode?: number): boolean { if (!statusCode) { return false @@ -100,6 +108,75 @@ export function getCacheVersion( .digest('hex') } +export async function retry( + name: string, + method: () => Promise, + getStatusCode: (arg0: T) => number | undefined, + maxAttempts = 2 +): Promise { + let response: T | undefined = undefined + let statusCode: number | undefined = undefined + let isRetryable = false + let errorMessage = '' + let attempt = 1 + + while (attempt <= maxAttempts) { + try { + response = await method() + statusCode = getStatusCode(response) + + if (!isServerErrorStatusCode(statusCode)) { + return response + } + + isRetryable = isRetryableStatusCode(statusCode) + errorMessage = `Cache service responded with ${statusCode}` + } catch (error) { + isRetryable = true + errorMessage = error.message + } + + core.debug( + `${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}` + ) + + if (!isRetryable) { + core.debug(`${name} - Error is not retryable`) + break + } + + attempt++ + } + + throw Error(`${name} failed: ${errorMessage}`) +} + +export async function retryTypedResponse( + name: string, + method: () => Promise>, + maxAttempts = 2 +): Promise> { + return await retry( + name, + method, + (response: ITypedResponse) => response.statusCode, + maxAttempts + ) +} + +export async function retryHttpClientResponse( + name: string, + method: () => Promise, + maxAttempts = 2 +): Promise { + return await retry( + name, + method, + (response: IHttpClientResponse) => response.message.statusCode, + maxAttempts + ) +} + export async function getCacheEntry( keys: string[], paths: string[], @@ -111,8 +188,8 @@ export async function getCacheEntry( keys.join(',') )}&version=${version}` - const response = await httpClient.getJson( - getCacheApiUrl(resource) + const response = await retryTypedResponse('getCacheEntry', async () => + httpClient.getJson(getCacheApiUrl(resource)) ) if (response.statusCode === 204) { return null @@ -145,9 +222,12 @@ export async function downloadCache( archiveLocation: string, archivePath: string ): Promise { - const writableStream = fs.createWriteStream(archivePath) + const writeStream = fs.createWriteStream(archivePath) const httpClient = new HttpClient('actions/cache') - const downloadResponse = await httpClient.get(archiveLocation) + const downloadResponse = await retryHttpClientResponse( + 'downloadCache', + async () => httpClient.get(archiveLocation) + ) // Abort download if no traffic received over the socket. downloadResponse.message.socket.setTimeout(SocketTimeout, () => { @@ -155,7 +235,7 @@ export async function downloadCache( core.debug(`Aborting download, socket timed out after ${SocketTimeout} ms`) }) - await pipeResponseToStream(downloadResponse, writableStream) + await pipeResponseToStream(downloadResponse, writeStream) // Validate download size. const contentLengthHeader = downloadResponse.message.headers['content-length'] @@ -187,9 +267,11 @@ export async function reserveCache( key, version } - const response = await httpClient.postJson( - getCacheApiUrl('caches'), - reserveCacheRequest + const response = await retryTypedResponse('reserveCache', async () => + httpClient.postJson( + getCacheApiUrl('caches'), + reserveCacheRequest + ) ) return response?.result?.cacheId ?? -1 } @@ -206,7 +288,7 @@ function getContentRange(start: number, end: number): string { async function uploadChunk( httpClient: HttpClient, resourceUrl: string, - data: NodeJS.ReadableStream, + openStream: () => NodeJS.ReadableStream, start: number, end: number ): Promise { @@ -223,56 +305,31 @@ async function uploadChunk( 'Content-Range': getContentRange(start, end) } - const uploadChunkRequest = async (): Promise => { - return await httpClient.sendStream( - 'PATCH', - resourceUrl, - data, - additionalHeaders - ) - } - - const response = await uploadChunkRequest() - if (isSuccessStatusCode(response.message.statusCode)) { - return - } - - if (isRetryableStatusCode(response.message.statusCode)) { - core.debug( - `Received ${response.message.statusCode}, retrying chunk at offset ${start}.` - ) - const retryResponse = await uploadChunkRequest() - if (isSuccessStatusCode(retryResponse.message.statusCode)) { - return - } - } - - throw new Error( - `Cache service responded with ${response.message.statusCode} during chunk upload.` + await retryHttpClientResponse( + `uploadChunk (start: ${start}, end: ${end})`, + async () => + httpClient.sendStream( + 'PATCH', + resourceUrl, + openStream(), + additionalHeaders + ) ) } -function parseEnvNumber(key: string): number | undefined { - const value = Number(process.env[key]) - if (Number.isNaN(value) || value < 0) { - return undefined - } - return value -} - async function uploadFile( httpClient: HttpClient, cacheId: number, - archivePath: string + archivePath: string, + options?: UploadOptions ): Promise { // Upload Chunks const fileSize = fs.statSync(archivePath).size const resourceUrl = getCacheApiUrl(`caches/${cacheId.toString()}`) const fd = fs.openSync(archivePath, 'r') - const concurrency = parseEnvNumber('CACHE_UPLOAD_CONCURRENCY') ?? 4 // # of HTTP requests in parallel - const MAX_CHUNK_SIZE = - parseEnvNumber('CACHE_UPLOAD_CHUNK_SIZE') ?? 32 * 1024 * 1024 // 32 MB Chunks + const concurrency = options?.uploadConcurrency ?? 4 // # of HTTP requests in parallel + const MAX_CHUNK_SIZE = options?.uploadChunkSize ?? 32 * 1024 * 1024 // 32 MB Chunks core.debug(`Concurrency: ${concurrency} and Chunk Size: ${MAX_CHUNK_SIZE}`) const parallelUploads = [...new Array(concurrency).keys()] @@ -287,14 +344,26 @@ async function uploadFile( const start = offset const end = offset + chunkSize - 1 offset += MAX_CHUNK_SIZE - const chunk = fs.createReadStream(archivePath, { - fd, - start, - end, - autoClose: false - }) - await uploadChunk(httpClient, resourceUrl, chunk, start, end) + await uploadChunk( + httpClient, + resourceUrl, + () => + fs + .createReadStream(archivePath, { + fd, + start, + end, + autoClose: false + }) + .on('error', error => { + throw new Error( + `Cache upload failed because file read failed with ${error.Message}` + ) + }), + start, + end + ) } }) ) @@ -310,20 +379,23 @@ async function commitCache( filesize: number ): Promise> { const commitCacheRequest: CommitCacheRequest = {size: filesize} - return await httpClient.postJson( - getCacheApiUrl(`caches/${cacheId.toString()}`), - commitCacheRequest + return await retryTypedResponse('commitCache', async () => + httpClient.postJson( + getCacheApiUrl(`caches/${cacheId.toString()}`), + commitCacheRequest + ) ) } export async function saveCache( cacheId: number, - archivePath: string + archivePath: string, + options?: UploadOptions ): Promise { const httpClient = createHttpClient() core.debug('Upload cache') - await uploadFile(httpClient, cacheId, archivePath) + await uploadFile(httpClient, cacheId, archivePath, options) // Commit Cache core.debug('Commiting cache') diff --git a/packages/cache/src/options.ts b/packages/cache/src/options.ts new file mode 100644 index 00000000..c5ecdad5 --- /dev/null +++ b/packages/cache/src/options.ts @@ -0,0 +1,17 @@ +/** + * Options to control cache upload + */ +export interface UploadOptions { + /** + * Number of parallel cache upload + * + * @default 4 + */ + uploadConcurrency?: number + /** + * Maximum chunk size for cache upload + * + * @default 32MB + */ + uploadChunkSize?: number +} From b3c8e19a7aec4fd27a7afc5760cc309f6b1308b2 Mon Sep 17 00:00:00 2001 From: Aiqiao Yan Date: Tue, 12 May 2020 14:47:31 -0400 Subject: [PATCH 6/7] Attempt to fix the test --- packages/cache/package-lock.json | 6 ++++ packages/cache/package.json | 1 + packages/cache/src/cache.ts | 28 +++++++++++++++---- .../cache/src/internal/cacheHttpClient.ts | 6 ++-- packages/cache/src/internal/contracts.d.ts | 2 +- 5 files changed, 33 insertions(+), 10 deletions(-) diff --git a/packages/cache/package-lock.json b/packages/cache/package-lock.json index 4e2de82a..9d8b6cbf 100644 --- a/packages/cache/package-lock.json +++ b/packages/cache/package-lock.json @@ -77,6 +77,12 @@ "resolved": "https://registry.npmjs.org/tunnel/-/tunnel-0.0.6.tgz", "integrity": "sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg==" }, + "typescript": { + "version": "3.8.3", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-3.8.3.tgz", + "integrity": "sha512-MYlEfn5VrLNsgudQTVJeNaQFUAI7DkhnOjdpAp4T+ku1TfQClewlbSuTVHiA+8skNBgaf02TL/kLOvig4y3G8w==", + "dev": true + }, "uuid": { "version": "3.4.0", "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz", diff --git a/packages/cache/package.json b/packages/cache/package.json index 2ee2d9bb..fea57f0b 100644 --- a/packages/cache/package.json +++ b/packages/cache/package.json @@ -44,6 +44,7 @@ "uuid": "^3.3.3" }, "devDependencies": { + "typescript": "^3.8.3", "@types/uuid": "^3.4.5" } } diff --git a/packages/cache/src/cache.ts b/packages/cache/src/cache.ts index 39eaf6de..5f55c82b 100644 --- a/packages/cache/src/cache.ts +++ b/packages/cache/src/cache.ts @@ -5,9 +5,23 @@ import * as cacheHttpClient from './internal/cacheHttpClient' import {createTar, extractTar} from './internal/tar' import {UploadOptions} from './options' +export class ValidationError extends Error { + constructor(message: string) { + super(message) + this.name = 'ValidationError' + } +} + +export class ReserveCacheError extends Error { + constructor(message: string) { + super(message) + this.name = 'ReserveCacheError' + } +} + function checkPaths(paths: string[]): void { if (!paths || paths.length === 0) { - throw new Error( + throw new ValidationError( `Path Validation Error: At least one directory or file path is required` ) } @@ -15,13 +29,15 @@ function checkPaths(paths: string[]): void { function checkKey(key: string): void { if (key.length > 512) { - throw new Error( + throw new ValidationError( `Key Validation Error: ${key} cannot be larger than 512 characters.` ) } const regex = /^[^,]*$/ if (!regex.test(key)) { - throw new Error(`Key Validation Error: ${key} cannot contain commas.`) + throw new ValidationError( + `Key Validation Error: ${key} cannot contain commas.` + ) } } @@ -47,7 +63,7 @@ export async function restoreCache( core.debug(JSON.stringify(keys)) if (keys.length > 10) { - throw new Error( + throw new ValidationError( `Key Validation Error: Keys are limited to a maximum of 10.` ) } @@ -121,13 +137,13 @@ export async function saveCache( compressionMethod }) if (cacheId === -1) { - throw new Error( + throw new ReserveCacheError( `Unable to reserve cache with key ${key}, another job may be creating this cache.` ) } core.debug(`Cache ID: ${cacheId}`) - const cachePaths = await utils.resolvePaths(paths) + const cachePaths = await utils.resolvePaths(paths) core.debug('Cache Paths:') core.debug(`${JSON.stringify(cachePaths)}`) diff --git a/packages/cache/src/internal/cacheHttpClient.ts b/packages/cache/src/internal/cacheHttpClient.ts index a3b9633a..43692fa7 100644 --- a/packages/cache/src/internal/cacheHttpClient.ts +++ b/packages/cache/src/internal/cacheHttpClient.ts @@ -15,7 +15,7 @@ import * as utils from './cacheUtils' import {CompressionMethod, SocketTimeout} from './constants' import { ArtifactCacheEntry, - CacheOptions, + InternalCacheOptions, CommitCacheRequest, ReserveCacheRequest, ReserveCacheResponse @@ -180,7 +180,7 @@ export async function retryHttpClientResponse( export async function getCacheEntry( keys: string[], paths: string[], - options?: CacheOptions + options?: InternalCacheOptions ): Promise { const httpClient = createHttpClient() const version = getCacheVersion(paths, options?.compressionMethod) @@ -258,7 +258,7 @@ export async function downloadCache( export async function reserveCache( key: string, paths: string[], - options?: CacheOptions + options?: InternalCacheOptions ): Promise { const httpClient = createHttpClient() const version = getCacheVersion(paths, options?.compressionMethod) diff --git a/packages/cache/src/internal/contracts.d.ts b/packages/cache/src/internal/contracts.d.ts index ca3f3620..80484769 100644 --- a/packages/cache/src/internal/contracts.d.ts +++ b/packages/cache/src/internal/contracts.d.ts @@ -20,6 +20,6 @@ export interface ReserveCacheResponse { cacheId: number } -export interface CacheOptions { +export interface InternalCacheOptions { compressionMethod?: CompressionMethod } From d2b2399bd2fce877e9dd828711f74b83306a602b Mon Sep 17 00:00:00 2001 From: Aiqiao Yan Date: Fri, 15 May 2020 12:18:50 -0400 Subject: [PATCH 7/7] React to feedback --- .github/workflows/artifact-tests.yml | 8 ++-- .github/workflows/cache-tests.yml | 8 ++-- .../artifact/__tests__}/test-artifact-file.sh | 0 packages/cache/README.md | 12 ++--- packages/cache/RELEASES.md | 2 +- packages/cache/__tests__/cacheUtils.test.ts | 19 ++------ .../cache/__tests__}/create-cache-files.sh | 0 packages/cache/__tests__/restoreCache.test.ts | 44 +++++++------------ packages/cache/__tests__/saveCache.test.ts | 12 ++++- .../cache/__tests__}/verify-cache-files.sh | 0 packages/cache/package-lock.json | 2 +- packages/cache/package.json | 2 +- packages/cache/src/cache.ts | 14 +++--- .../cache/src/internal/cacheHttpClient.ts | 4 +- packages/cache/src/internal/cacheUtils.ts | 3 +- packages/cache/src/options.ts | 2 +- 16 files changed, 59 insertions(+), 73 deletions(-) rename {scripts => packages/artifact/__tests__}/test-artifact-file.sh (100%) rename {scripts => packages/cache/__tests__}/create-cache-files.sh (100%) rename {scripts => packages/cache/__tests__}/verify-cache-files.sh (100%) diff --git a/.github/workflows/artifact-tests.yml b/.github/workflows/artifact-tests.yml index 8b45404a..5fc98f7e 100644 --- a/.github/workflows/artifact-tests.yml +++ b/.github/workflows/artifact-tests.yml @@ -64,8 +64,8 @@ jobs: - name: Verify downloadArtifact() shell: bash run: | - scripts/test-artifact-file.sh "artifact-1-directory/artifact-path/world.txt" "${{ env.non-gzip-artifact-content }}" - scripts/test-artifact-file.sh "artifact-2-directory/artifact-path/gzip.txt" "${{ env.gzip-artifact-content }}" + packages/artifact/__tests__/test-artifact-file.sh "artifact-1-directory/artifact-path/world.txt" "${{ env.non-gzip-artifact-content }}" + packages/artifact/__tests__/test-artifact-file.sh "artifact-2-directory/artifact-path/gzip.txt" "${{ env.gzip-artifact-content }}" - name: Download artifacts using downloadAllArtifacts() run: | @@ -75,5 +75,5 @@ jobs: - name: Verify downloadAllArtifacts() shell: bash run: | - scripts/test-artifact-file.sh "multi-artifact-directory/my-artifact-1/artifact-path/world.txt" "${{ env.non-gzip-artifact-content }}" - scripts/test-artifact-file.sh "multi-artifact-directory/my-artifact-2/artifact-path/gzip.txt" "${{ env.gzip-artifact-content }}" \ No newline at end of file + packages/artifact/__tests__/test-artifact-file.sh "multi-artifact-directory/my-artifact-1/artifact-path/world.txt" "${{ env.non-gzip-artifact-content }}" + packages/artifact/__tests__/test-artifact-file.sh "multi-artifact-directory/my-artifact-2/artifact-path/gzip.txt" "${{ env.gzip-artifact-content }}" \ No newline at end of file diff --git a/.github/workflows/cache-tests.yml b/.github/workflows/cache-tests.yml index e63ac90c..10120ddc 100644 --- a/.github/workflows/cache-tests.yml +++ b/.github/workflows/cache-tests.yml @@ -47,11 +47,11 @@ jobs: - name: Generate files in working directory shell: bash - run: scripts/create-cache-files.sh ${{ runner.os }} test-cache + run: packages/cache/__tests__/create-cache-files.sh ${{ runner.os }} test-cache - name: Generate files outside working directory shell: bash - run: scripts/create-cache-files.sh ${{ runner.os }} ~/test-cache + run: packages/cache/__tests__/create-cache-files.sh ${{ runner.os }} ~/test-cache # We're using node -e to call the functions directly available in the @actions/cache package - name: Save cache using saveCache() @@ -65,5 +65,5 @@ jobs: - name: Verify cache shell: bash run: | - scripts/verify-cache-files.sh ${{ runner.os }} test-cache - scripts/verify-cache-files.sh ${{ runner.os }} ~/test-cache \ No newline at end of file + packages/cache/__tests__/verify-cache-files.sh ${{ runner.os }} test-cache + packages/cache/__tests__/verify-cache-files.sh ${{ runner.os }} ~/test-cache \ No newline at end of file diff --git a/scripts/test-artifact-file.sh b/packages/artifact/__tests__/test-artifact-file.sh similarity index 100% rename from scripts/test-artifact-file.sh rename to packages/artifact/__tests__/test-artifact-file.sh diff --git a/packages/cache/README.md b/packages/cache/README.md index f15d0b16..de0893d0 100644 --- a/packages/cache/README.md +++ b/packages/cache/README.md @@ -2,6 +2,10 @@ > Functions necessary for caching dependencies and build outputs to improve workflow execution time. +See ["Caching dependencies to speed up workflows"](https://help.github.com/github/automating-your-workflow-with-github-actions/caching-dependencies-to-speed-up-workflows) for how caching works. + +Note that GitHub will remove any cache entries that have not been accessed in over 7 days. There is no limit on the number of caches you can store, but the total size of all caches in a repository is limited to 5 GB. If you exceed this limit, GitHub will save your cache but will begin evicting caches until the total size is less than 5 GB. + ## Usage #### Restore Cache @@ -24,7 +28,7 @@ const cacheKey = await cache.restoreCache(paths, key, restoreKeys) #### Save Cache -Saves a cache containing the files in `paths` using the `key` provided. Function returns the cache id if the cache was save succesfully. +Saves a cache containing the files in `paths` using the `key` provided. The files would be compressed using zstandard compression algorithm if zstd is installed, otherwise gzip is used. Function returns the cache id if the cache was saved succesfully and throws an error if cache upload fails. ```js const cache = require('@actions/cache'); @@ -34,8 +38,4 @@ const paths = [ ] const key = 'npm-foobar-d5ea0750' const cacheId = await cache.saveCache(paths, key) -``` - -## Additional Documentation - -See ["Caching dependencies to speed up workflows"](https://help.github.com/github/automating-your-workflow-with-github-actions/caching-dependencies-to-speed-up-workflows). \ No newline at end of file +``` \ No newline at end of file diff --git a/packages/cache/RELEASES.md b/packages/cache/RELEASES.md index 1250c050..920a20e8 100644 --- a/packages/cache/RELEASES.md +++ b/packages/cache/RELEASES.md @@ -1,5 +1,5 @@ # @actions/cache Releases -### 1.0.0 +### 0.1.0 - Initial release \ No newline at end of file diff --git a/packages/cache/__tests__/cacheUtils.test.ts b/packages/cache/__tests__/cacheUtils.test.ts index 0a4b0f4c..25d7ca82 100644 --- a/packages/cache/__tests__/cacheUtils.test.ts +++ b/packages/cache/__tests__/cacheUtils.test.ts @@ -1,24 +1,11 @@ -import * as io from '@actions/io' import {promises as fs} from 'fs' import * as path from 'path' import * as cacheUtils from '../src/internal/cacheUtils' -jest.mock('@actions/core') -jest.mock('os') - -function getTempDir(): string { - return path.join(__dirname, '_temp', 'cacheUtils') -} - -afterAll(async () => { - delete process.env['GITHUB_WORKSPACE'] - await io.rmRF(getTempDir()) -}) - -test('getArchiveFileSize returns file size', () => { +test('getArchiveFileSizeIsBytes returns file size', () => { const filePath = path.join(__dirname, '__fixtures__', 'helloWorld.txt') - const size = cacheUtils.getArchiveFileSize(filePath) + const size = cacheUtils.getArchiveFileSizeIsBytes(filePath) expect(size).toBe(11) }) @@ -28,6 +15,8 @@ test('unlinkFile unlinks file', async () => { const testFile = path.join(testDirectory, 'test.txt') await fs.writeFile(testFile, 'hello world') + await expect(fs.stat(testFile)).resolves.not.toThrow() + await cacheUtils.unlinkFile(testFile) // This should throw as testFile should not exist diff --git a/scripts/create-cache-files.sh b/packages/cache/__tests__/create-cache-files.sh similarity index 100% rename from scripts/create-cache-files.sh rename to packages/cache/__tests__/create-cache-files.sh diff --git a/packages/cache/__tests__/restoreCache.test.ts b/packages/cache/__tests__/restoreCache.test.ts index b466b9af..2e0fd068 100644 --- a/packages/cache/__tests__/restoreCache.test.ts +++ b/packages/cache/__tests__/restoreCache.test.ts @@ -12,6 +12,12 @@ jest.mock('../src/internal/cacheUtils') jest.mock('../src/internal/tar') beforeAll(() => { + jest.spyOn(console, 'log').mockImplementation(() => {}) + jest.spyOn(core, 'debug').mockImplementation(() => {}) + jest.spyOn(core, 'info').mockImplementation(() => {}) + jest.spyOn(core, 'warning').mockImplementation(() => {}) + jest.spyOn(core, 'error').mockImplementation(() => {}) + // eslint-disable-next-line @typescript-eslint/promise-function-async jest.spyOn(cacheUtils, 'getCacheFileName').mockImplementation(cm => { const actualUtils = jest.requireActual('../src/internal/cacheUtils') @@ -56,7 +62,6 @@ test('restore with no cache found', async () => { const paths = ['node_modules'] const key = 'node-test' - const infoMock = jest.spyOn(core, 'info') jest.spyOn(cacheHttpClient, 'getCacheEntry').mockImplementation(async () => { return Promise.resolve(null) }) @@ -64,9 +69,6 @@ test('restore with no cache found', async () => { const cacheKey = await restoreCache(paths, key) expect(cacheKey).toBe(undefined) - expect(infoMock).toHaveBeenCalledWith( - `Cache not found for input keys: ${key}` - ) }) test('restore with server error should fail', async () => { @@ -87,8 +89,6 @@ test('restore with restore keys and no cache found', async () => { const key = 'node-test' const restoreKey = 'node-' - const infoMock = jest.spyOn(core, 'info') - jest.spyOn(cacheHttpClient, 'getCacheEntry').mockImplementation(async () => { return Promise.resolve(null) }) @@ -96,17 +96,12 @@ test('restore with restore keys and no cache found', async () => { const cacheKey = await restoreCache(paths, key, [restoreKey]) expect(cacheKey).toBe(undefined) - expect(infoMock).toHaveBeenCalledWith( - `Cache not found for input keys: ${key}, ${restoreKey}` - ) }) test('restore with gzip compressed cache found', async () => { const paths = ['node_modules'] const key = 'node-test' - const infoMock = jest.spyOn(core, 'info') - const cacheEntry: ArtifactCacheEntry = { cacheKey: key, scope: 'refs/heads/master', @@ -128,8 +123,8 @@ test('restore with gzip compressed cache found', async () => { const downloadCacheMock = jest.spyOn(cacheHttpClient, 'downloadCache') const fileSize = 142 - const getArchiveFileSizeMock = jest - .spyOn(cacheUtils, 'getArchiveFileSize') + const getArchiveFileSizeIsBytesMock = jest + .spyOn(cacheUtils, 'getArchiveFileSizeIsBytes') .mockReturnValue(fileSize) const extractTarMock = jest.spyOn(tar, 'extractTar') @@ -151,7 +146,7 @@ test('restore with gzip compressed cache found', async () => { cacheEntry.archiveLocation, archivePath ) - expect(getArchiveFileSizeMock).toHaveBeenCalledWith(archivePath) + expect(getArchiveFileSizeIsBytesMock).toHaveBeenCalledWith(archivePath) expect(extractTarMock).toHaveBeenCalledTimes(1) expect(extractTarMock).toHaveBeenCalledWith(archivePath, compression) @@ -159,11 +154,10 @@ test('restore with gzip compressed cache found', async () => { expect(unlinkFileMock).toHaveBeenCalledTimes(1) expect(unlinkFileMock).toHaveBeenCalledWith(archivePath) - expect(infoMock).toHaveBeenCalledWith(`Cache restored from key: ${key}`) expect(getCompressionMock).toHaveBeenCalledTimes(1) }) -test('restore with a pull request event and zstd compressed cache found', async () => { +test('restore with zstd compressed cache found', async () => { const paths = ['node_modules'] const key = 'node-test' @@ -189,8 +183,8 @@ test('restore with a pull request event and zstd compressed cache found', async const downloadCacheMock = jest.spyOn(cacheHttpClient, 'downloadCache') const fileSize = 62915000 - const getArchiveFileSizeMock = jest - .spyOn(cacheUtils, 'getArchiveFileSize') + const getArchiveFileSizeIsBytesMock = jest + .spyOn(cacheUtils, 'getArchiveFileSizeIsBytes') .mockReturnValue(fileSize) const extractTarMock = jest.spyOn(tar, 'extractTar') @@ -210,13 +204,11 @@ test('restore with a pull request event and zstd compressed cache found', async cacheEntry.archiveLocation, archivePath ) - expect(getArchiveFileSizeMock).toHaveBeenCalledWith(archivePath) + expect(getArchiveFileSizeIsBytesMock).toHaveBeenCalledWith(archivePath) expect(infoMock).toHaveBeenCalledWith(`Cache Size: ~60 MB (62915000 B)`) expect(extractTarMock).toHaveBeenCalledTimes(1) expect(extractTarMock).toHaveBeenCalledWith(archivePath, compression) - - expect(infoMock).toHaveBeenCalledWith(`Cache restored from key: ${key}`) expect(getCompressionMock).toHaveBeenCalledTimes(1) }) @@ -247,8 +239,8 @@ test('restore with cache found for restore key', async () => { const downloadCacheMock = jest.spyOn(cacheHttpClient, 'downloadCache') const fileSize = 142 - const getArchiveFileSizeMock = jest - .spyOn(cacheUtils, 'getArchiveFileSize') + const getArchiveFileSizeIsBytesMock = jest + .spyOn(cacheUtils, 'getArchiveFileSizeIsBytes') .mockReturnValue(fileSize) const extractTarMock = jest.spyOn(tar, 'extractTar') @@ -268,14 +260,10 @@ test('restore with cache found for restore key', async () => { cacheEntry.archiveLocation, archivePath ) - expect(getArchiveFileSizeMock).toHaveBeenCalledWith(archivePath) + expect(getArchiveFileSizeIsBytesMock).toHaveBeenCalledWith(archivePath) expect(infoMock).toHaveBeenCalledWith(`Cache Size: ~0 MB (142 B)`) expect(extractTarMock).toHaveBeenCalledTimes(1) expect(extractTarMock).toHaveBeenCalledWith(archivePath, compression) - - expect(infoMock).toHaveBeenCalledWith( - `Cache restored from key: ${restoreKey}` - ) expect(getCompressionMock).toHaveBeenCalledTimes(1) }) diff --git a/packages/cache/__tests__/saveCache.test.ts b/packages/cache/__tests__/saveCache.test.ts index 720d2ad6..71132878 100644 --- a/packages/cache/__tests__/saveCache.test.ts +++ b/packages/cache/__tests__/saveCache.test.ts @@ -1,3 +1,4 @@ +import * as core from '@actions/core' import * as path from 'path' import {saveCache} from '../src/cache' import * as cacheHttpClient from '../src/internal/cacheHttpClient' @@ -5,12 +6,17 @@ import * as cacheUtils from '../src/internal/cacheUtils' import {CacheFilename, CompressionMethod} from '../src/internal/constants' import * as tar from '../src/internal/tar' -jest.mock('@actions/core') jest.mock('../src/internal/cacheHttpClient') jest.mock('../src/internal/cacheUtils') jest.mock('../src/internal/tar') beforeAll(() => { + jest.spyOn(console, 'log').mockImplementation(() => {}) + jest.spyOn(core, 'debug').mockImplementation(() => {}) + jest.spyOn(core, 'info').mockImplementation(() => {}) + jest.spyOn(core, 'warning').mockImplementation(() => {}) + jest.spyOn(core, 'error').mockImplementation(() => {}) + // eslint-disable-next-line @typescript-eslint/promise-function-async jest.spyOn(cacheUtils, 'getCacheFileName').mockImplementation(cm => { const actualUtils = jest.requireActual('../src/internal/cacheUtils') @@ -42,7 +48,9 @@ test('save with large cache outputs should fail', async () => { const createTarMock = jest.spyOn(tar, 'createTar') const cacheSize = 6 * 1024 * 1024 * 1024 //~6GB, over the 5GB limit - jest.spyOn(cacheUtils, 'getArchiveFileSize').mockReturnValueOnce(cacheSize) + jest + .spyOn(cacheUtils, 'getArchiveFileSizeIsBytes') + .mockReturnValueOnce(cacheSize) const compression = CompressionMethod.Gzip const getCompressionMock = jest .spyOn(cacheUtils, 'getCompressionMethod') diff --git a/scripts/verify-cache-files.sh b/packages/cache/__tests__/verify-cache-files.sh similarity index 100% rename from scripts/verify-cache-files.sh rename to packages/cache/__tests__/verify-cache-files.sh diff --git a/packages/cache/package-lock.json b/packages/cache/package-lock.json index 9d8b6cbf..08f18a7c 100644 --- a/packages/cache/package-lock.json +++ b/packages/cache/package-lock.json @@ -1,6 +1,6 @@ { "name": "@actions/cache", - "version": "1.0.0", + "version": "0.1.0", "lockfileVersion": 1, "requires": true, "dependencies": { diff --git a/packages/cache/package.json b/packages/cache/package.json index fea57f0b..69e93181 100644 --- a/packages/cache/package.json +++ b/packages/cache/package.json @@ -1,6 +1,6 @@ { "name": "@actions/cache", - "version": "1.0.0", + "version": "0.1.0", "preview": true, "description": "Actions cache lib", "keywords": [ diff --git a/packages/cache/src/cache.ts b/packages/cache/src/cache.ts index 5f55c82b..fc04a297 100644 --- a/packages/cache/src/cache.ts +++ b/packages/cache/src/cache.ts @@ -9,6 +9,7 @@ export class ValidationError extends Error { constructor(message: string) { super(message) this.name = 'ValidationError' + Object.setPrototypeOf(this, ValidationError.prototype) } } @@ -16,6 +17,7 @@ export class ReserveCacheError extends Error { constructor(message: string) { super(message) this.name = 'ReserveCacheError' + Object.setPrototypeOf(this, ReserveCacheError.prototype) } } @@ -47,7 +49,7 @@ function checkKey(key: string): void { * @param paths a list of file paths to restore from the cache * @param primaryKey an explicit key for restoring the cache * @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for key - * @returns string returns the key for the cache hit, otherwise return undefined + * @returns string returns the key for the cache hit, otherwise returns undefined */ export async function restoreCache( paths: string[], @@ -78,7 +80,7 @@ export async function restoreCache( compressionMethod }) if (!cacheEntry?.archiveLocation) { - core.info(`Cache not found for input keys: ${keys.join(', ')}`) + // Cache not found return undefined } @@ -92,7 +94,7 @@ export async function restoreCache( // Download the cache from the cache entry await cacheHttpClient.downloadCache(cacheEntry.archiveLocation, archivePath) - const archiveFileSize = utils.getArchiveFileSize(archivePath) + const archiveFileSize = utils.getArchiveFileSizeIsBytes(archivePath) core.info( `Cache Size: ~${Math.round( archiveFileSize / (1024 * 1024) @@ -109,8 +111,6 @@ export async function restoreCache( } } - core.info(`Cache restored from key: ${cacheEntry && cacheEntry.cacheKey}`) - return cacheEntry.cacheKey } @@ -120,7 +120,7 @@ export async function restoreCache( * @param paths a list of file paths to be cached * @param key an explicit key for restoring the cache * @param options cache upload options - * @returns number returns cacheId if the cache was saved successfully + * @returns number returns cacheId if the cache was saved successfully and throws an error if save fails */ export async function saveCache( paths: string[], @@ -158,7 +158,7 @@ export async function saveCache( await createTar(archiveFolder, cachePaths, compressionMethod) const fileSizeLimit = 5 * 1024 * 1024 * 1024 // 5GB per repo limit - const archiveFileSize = utils.getArchiveFileSize(archivePath) + const archiveFileSize = utils.getArchiveFileSizeIsBytes(archivePath) core.debug(`File Size: ${archiveFileSize}`) if (archiveFileSize > fileSizeLimit) { throw new Error( diff --git a/packages/cache/src/internal/cacheHttpClient.ts b/packages/cache/src/internal/cacheHttpClient.ts index 43692fa7..b7d34448 100644 --- a/packages/cache/src/internal/cacheHttpClient.ts +++ b/packages/cache/src/internal/cacheHttpClient.ts @@ -242,7 +242,7 @@ export async function downloadCache( if (contentLengthHeader) { const expectedLength = parseInt(contentLengthHeader) - const actualLength = utils.getArchiveFileSize(archivePath) + const actualLength = utils.getArchiveFileSizeIsBytes(archivePath) if (actualLength !== expectedLength) { throw new Error( @@ -399,7 +399,7 @@ export async function saveCache( // Commit Cache core.debug('Commiting cache') - const cacheSize = utils.getArchiveFileSize(archivePath) + const cacheSize = utils.getArchiveFileSizeIsBytes(archivePath) const commitCacheResponse = await commitCache(httpClient, cacheId, cacheSize) if (!isSuccessStatusCode(commitCacheResponse.statusCode)) { throw new Error( diff --git a/packages/cache/src/internal/cacheUtils.ts b/packages/cache/src/internal/cacheUtils.ts index 8743963a..f3f85006 100644 --- a/packages/cache/src/internal/cacheUtils.ts +++ b/packages/cache/src/internal/cacheUtils.ts @@ -34,7 +34,7 @@ export async function createTempDirectory(): Promise { return dest } -export function getArchiveFileSize(filePath: string): number { +export function getArchiveFileSizeIsBytes(filePath: string): number { return fs.statSync(filePath).size } @@ -80,6 +80,7 @@ async function getVersion(app: string): Promise { return versionOutput } +// Use zstandard if possible to maximize cache performance export async function getCompressionMethod(): Promise { const versionOutput = await getVersion('zstd') return versionOutput.toLowerCase().includes('zstd command line interface') diff --git a/packages/cache/src/options.ts b/packages/cache/src/options.ts index c5ecdad5..97441c1e 100644 --- a/packages/cache/src/options.ts +++ b/packages/cache/src/options.ts @@ -9,7 +9,7 @@ export interface UploadOptions { */ uploadConcurrency?: number /** - * Maximum chunk size for cache upload + * Maximum chunk size in bytes for cache upload * * @default 32MB */