diff --git a/README.md b/README.md
index 8701ab5c..22fbc27c 100644
--- a/README.md
+++ b/README.md
@@ -87,7 +87,7 @@ $ npm install @actions/artifact --save
Provides functions to interact with actions cache. Read more [here](packages/cache)
```bash
-$ npm install @actions/artifact --save
+$ npm install @actions/cache --save
```
diff --git a/packages/cache/README.md b/packages/cache/README.md
index b65c7f34..402a7f50 100644
--- a/packages/cache/README.md
+++ b/packages/cache/README.md
@@ -1 +1,13 @@
# `@actions/cache`
+
+> Functions necessary for caching dependencies and build outputs to improve workflow execution time.
+
+## Usage
+
+#### Restore Cache
+
+#### Save Cache
+
+## Additional Documentation
+
+See ["Caching dependencies to speed up workflows"](https://help.github.com/github/automating-your-workflow-with-github-actions/caching-dependencies-to-speed-up-workflows).
\ No newline at end of file
diff --git a/packages/cache/__tests__/cacheHttpClient.test.ts b/packages/cache/__tests__/cacheHttpClient.test.ts
index d2165280..e5d7eacf 100644
--- a/packages/cache/__tests__/cacheHttpClient.test.ts
+++ b/packages/cache/__tests__/cacheHttpClient.test.ts
@@ -1,17 +1,25 @@
import {getCacheVersion} from '../src/internal/cacheHttpClient'
import {CompressionMethod} from '../src/internal/constants'
-test('getCacheVersion with path input and compression method undefined returns version', async () => {
- const inputPath = 'node_modules'
- const result = getCacheVersion(inputPath)
+test('getCacheVersion with one path returns version', async () => {
+ const paths = ['node_modules']
+ const result = getCacheVersion(paths)
expect(result).toEqual(
'b3e0c6cb5ecf32614eeb2997d905b9c297046d7cbf69062698f25b14b4cb0985'
)
})
+test('getCacheVersion with multiple paths returns version', async () => {
+ const paths = ['node_modules', 'dist']
+ const result = getCacheVersion(paths)
+ expect(result).toEqual(
+ '165c3053bc646bf0d4fac17b1f5731caca6fe38e0e464715c0c3c6b6318bf436'
+ )
+})
+
test('getCacheVersion with zstd compression returns version', async () => {
- const inputPath = 'node_modules'
- const result = getCacheVersion(inputPath, CompressionMethod.Zstd)
+ const paths = ['node_modules']
+ const result = getCacheVersion(paths, CompressionMethod.Zstd)
expect(result).toEqual(
'273877e14fd65d270b87a198edbfa2db5a43de567c9a548d2a2505b408befe24'
@@ -19,8 +27,8 @@ test('getCacheVersion with zstd compression returns version', async () => {
})
test('getCacheVersion with gzip compression does not change vesion', async () => {
- const inputPath = 'node_modules'
- const result = getCacheVersion(inputPath, CompressionMethod.Gzip)
+ const paths = ['node_modules']
+ const result = getCacheVersion(paths, CompressionMethod.Gzip)
expect(result).toEqual(
'b3e0c6cb5ecf32614eeb2997d905b9c297046d7cbf69062698f25b14b4cb0985'
diff --git a/packages/cache/__tests__/cacheUtils.test.ts b/packages/cache/__tests__/cacheUtils.test.ts
index b09eed13..0a4b0f4c 100644
--- a/packages/cache/__tests__/cacheUtils.test.ts
+++ b/packages/cache/__tests__/cacheUtils.test.ts
@@ -1,9 +1,6 @@
-import * as core from '@actions/core'
import * as io from '@actions/io'
import {promises as fs} from 'fs'
-import * as os from 'os'
import * as path from 'path'
-import {v4 as uuidV4} from 'uuid'
import * as cacheUtils from '../src/internal/cacheUtils'
jest.mock('@actions/core')
@@ -26,143 +23,6 @@ test('getArchiveFileSize returns file size', () => {
expect(size).toBe(11)
})
-test('logWarning logs a message with a warning prefix', () => {
- const message = 'A warning occurred.'
-
- const infoMock = jest.spyOn(core, 'info')
-
- cacheUtils.logWarning(message)
-
- expect(infoMock).toHaveBeenCalledWith(`[warning]${message}`)
-})
-
-test('resolvePaths with no ~ in path', async () => {
- const filePath = '.cache'
-
- // Create the following layout:
- // cwd
- // cwd/.cache
- // cwd/.cache/file.txt
-
- const root = path.join(getTempDir(), 'no-tilde')
- // tarball entries will be relative to workspace
- process.env['GITHUB_WORKSPACE'] = root
-
- await fs.mkdir(root, {recursive: true})
- const cache = path.join(root, '.cache')
- await fs.mkdir(cache, {recursive: true})
- await fs.writeFile(path.join(cache, 'file.txt'), 'cached')
-
- const originalCwd = process.cwd()
-
- try {
- process.chdir(root)
-
- const resolvedPath = await cacheUtils.resolvePaths([filePath])
-
- const expectedPath = [filePath]
- expect(resolvedPath).toStrictEqual(expectedPath)
- } finally {
- process.chdir(originalCwd)
- }
-})
-
-test('resolvePaths with ~ in path', async () => {
- const cacheDir = uuidV4()
- const filePath = `~/${cacheDir}`
- // Create the following layout:
- // ~/uuid
- // ~/uuid/file.txt
-
- const homedir = jest.requireActual('os').homedir()
- const homedirMock = jest.spyOn(os, 'homedir')
- homedirMock.mockReturnValue(homedir)
-
- const target = path.join(homedir, cacheDir)
- await fs.mkdir(target, {recursive: true})
- await fs.writeFile(path.join(target, 'file.txt'), 'cached')
-
- const root = getTempDir()
- process.env['GITHUB_WORKSPACE'] = root
-
- try {
- const resolvedPath = await cacheUtils.resolvePaths([filePath])
-
- const expectedPath = [path.relative(root, target)]
- expect(resolvedPath).toStrictEqual(expectedPath)
- } finally {
- await io.rmRF(target)
- }
-})
-
-test('resolvePaths with home not found', async () => {
- const filePath = '~/.cache/yarn'
- const homedirMock = jest.spyOn(os, 'homedir')
- homedirMock.mockReturnValue('')
-
- await expect(cacheUtils.resolvePaths([filePath])).rejects.toThrow(
- 'Unable to determine HOME directory'
- )
-})
-
-test('resolvePaths inclusion pattern returns found', async () => {
- const pattern = '*.ts'
- // Create the following layout:
- // inclusion-patterns
- // inclusion-patterns/miss.txt
- // inclusion-patterns/test.ts
-
- const root = path.join(getTempDir(), 'inclusion-patterns')
- // tarball entries will be relative to workspace
- process.env['GITHUB_WORKSPACE'] = root
-
- await fs.mkdir(root, {recursive: true})
- await fs.writeFile(path.join(root, 'miss.txt'), 'no match')
- await fs.writeFile(path.join(root, 'test.ts'), 'match')
-
- const originalCwd = process.cwd()
-
- try {
- process.chdir(root)
-
- const resolvedPath = await cacheUtils.resolvePaths([pattern])
-
- const expectedPath = ['test.ts']
- expect(resolvedPath).toStrictEqual(expectedPath)
- } finally {
- process.chdir(originalCwd)
- }
-})
-
-test('resolvePaths exclusion pattern returns not found', async () => {
- const patterns = ['*.ts', '!test.ts']
- // Create the following layout:
- // exclusion-patterns
- // exclusion-patterns/miss.txt
- // exclusion-patterns/test.ts
-
- const root = path.join(getTempDir(), 'exclusion-patterns')
- // tarball entries will be relative to workspace
- process.env['GITHUB_WORKSPACE'] = root
-
- await fs.mkdir(root, {recursive: true})
- await fs.writeFile(path.join(root, 'miss.txt'), 'no match')
- await fs.writeFile(path.join(root, 'test.ts'), 'no match')
-
- const originalCwd = process.cwd()
-
- try {
- process.chdir(root)
-
- const resolvedPath = await cacheUtils.resolvePaths(patterns)
-
- const expectedPath: string[] = []
- expect(resolvedPath).toStrictEqual(expectedPath)
- } finally {
- process.chdir(originalCwd)
- }
-})
-
test('unlinkFile unlinks file', async () => {
const testDirectory = await fs.mkdtemp('unlinkFileTest')
const testFile = path.join(testDirectory, 'test.txt')
diff --git a/packages/cache/__tests__/restoreCache.test.ts b/packages/cache/__tests__/restoreCache.test.ts
index d1f016d6..5c807fcf 100644
--- a/packages/cache/__tests__/restoreCache.test.ts
+++ b/packages/cache/__tests__/restoreCache.test.ts
@@ -20,112 +20,95 @@ beforeAll(() => {
})
test('restore with no path should fail', async () => {
- const inputPath = ''
+ const paths: string[] = []
const key = 'node-test'
- const failedMock = jest.spyOn(core, 'setFailed')
- await restoreCache(inputPath, key)
- expect(failedMock).toHaveBeenCalledWith(
- 'Input required and not supplied: path'
+ await expect(restoreCache(paths, key)).rejects.toThrowError(
+ `Path Validation Error: At least one directory or file path is required`
)
})
test('restore with too many keys should fail', async () => {
- const inputPath = 'node_modules'
+ const paths = ['node_modules']
const key = 'node-test'
const restoreKeys = [...Array(20).keys()].map(x => x.toString())
- const failedMock = jest.spyOn(core, 'setFailed')
- await restoreCache(inputPath, key, restoreKeys)
- expect(failedMock).toHaveBeenCalledWith(
+ await expect(restoreCache(paths, key, restoreKeys)).rejects.toThrowError(
`Key Validation Error: Keys are limited to a maximum of 10.`
)
})
test('restore with large key should fail', async () => {
- const inputPath = 'node_modules'
+ const paths = ['node_modules']
const key = 'foo'.repeat(512) // Over the 512 character limit
- const failedMock = jest.spyOn(core, 'setFailed')
- await restoreCache(inputPath, key)
- expect(failedMock).toHaveBeenCalledWith(
+ await expect(restoreCache(paths, key)).rejects.toThrowError(
`Key Validation Error: ${key} cannot be larger than 512 characters.`
)
})
test('restore with invalid key should fail', async () => {
- const inputPath = 'node_modules'
+ const paths = ['node_modules']
const key = 'comma,comma'
- const failedMock = jest.spyOn(core, 'setFailed')
- await restoreCache(inputPath, key)
- expect(failedMock).toHaveBeenCalledWith(
+ await expect(restoreCache(paths, key)).rejects.toThrowError(
`Key Validation Error: ${key} cannot contain commas.`
)
})
test('restore with no cache found', async () => {
- const inputPath = 'node_modules'
+ const paths = ['node_modules']
const key = 'node-test'
const infoMock = jest.spyOn(core, 'info')
- const failedMock = jest.spyOn(core, 'setFailed')
-
const clientMock = jest.spyOn(cacheHttpClient, 'getCacheEntry')
clientMock.mockImplementation(async () => {
return Promise.resolve(null)
})
- await restoreCache(inputPath, key)
+ const cacheKey = await restoreCache(paths, key)
- expect(failedMock).toHaveBeenCalledTimes(0)
+ expect(cacheKey).toBe(undefined)
expect(infoMock).toHaveBeenCalledWith(
`Cache not found for input keys: ${key}`
)
})
test('restore with server error should fail', async () => {
- const inputPath = 'node_modules'
+ const paths = ['node_modules']
const key = 'node-test'
- const logWarningMock = jest.spyOn(cacheUtils, 'logWarning')
- const failedMock = jest.spyOn(core, 'setFailed')
-
const clientMock = jest.spyOn(cacheHttpClient, 'getCacheEntry')
clientMock.mockImplementation(() => {
throw new Error('HTTP Error Occurred')
})
- await restoreCache(inputPath, key)
-
- expect(logWarningMock).toHaveBeenCalledTimes(1)
- expect(logWarningMock).toHaveBeenCalledWith('HTTP Error Occurred')
- expect(failedMock).toHaveBeenCalledTimes(0)
+ await expect(restoreCache(paths, key)).rejects.toThrowError(
+ 'HTTP Error Occurred'
+ )
})
test('restore with restore keys and no cache found', async () => {
- const inputPath = 'node_modules'
+ const paths = ['node_modules']
const key = 'node-test'
const restoreKey = 'node-'
const infoMock = jest.spyOn(core, 'info')
- const failedMock = jest.spyOn(core, 'setFailed')
const clientMock = jest.spyOn(cacheHttpClient, 'getCacheEntry')
clientMock.mockImplementation(async () => {
return Promise.resolve(null)
})
- await restoreCache(inputPath, key, [restoreKey])
+ const cacheKey = await restoreCache(paths, key, [restoreKey])
- expect(failedMock).toHaveBeenCalledTimes(0)
+ expect(cacheKey).toBe(undefined)
expect(infoMock).toHaveBeenCalledWith(
`Cache not found for input keys: ${key}, ${restoreKey}`
)
})
test('restore with gzip compressed cache found', async () => {
- const inputPath = 'node_modules'
+ const paths = ['node_modules']
const key = 'node-test'
const infoMock = jest.spyOn(core, 'info')
- const failedMock = jest.spyOn(core, 'setFailed')
const cacheEntry: ArtifactCacheEntry = {
cacheKey: key,
@@ -160,9 +143,10 @@ test('restore with gzip compressed cache found', async () => {
.spyOn(cacheUtils, 'getCompressionMethod')
.mockReturnValue(Promise.resolve(compression))
- await restoreCache(inputPath, key)
+ const cacheKey = await restoreCache(paths, key)
- expect(getCacheMock).toHaveBeenCalledWith([key], inputPath, {
+ expect(cacheKey).toBe(key)
+ expect(getCacheMock).toHaveBeenCalledWith([key], paths, {
compressionMethod: compression
})
expect(createTempDirectoryMock).toHaveBeenCalledTimes(1)
@@ -179,16 +163,14 @@ test('restore with gzip compressed cache found', async () => {
expect(unlinkFileMock).toHaveBeenCalledWith(archivePath)
expect(infoMock).toHaveBeenCalledWith(`Cache restored from key: ${key}`)
- expect(failedMock).toHaveBeenCalledTimes(0)
expect(getCompressionMock).toHaveBeenCalledTimes(1)
})
test('restore with a pull request event and zstd compressed cache found', async () => {
- const inputPath = 'node_modules'
+ const paths = ['node_modules']
const key = 'node-test'
const infoMock = jest.spyOn(core, 'info')
- const failedMock = jest.spyOn(core, 'setFailed')
const cacheEntry: ArtifactCacheEntry = {
cacheKey: key,
@@ -220,9 +202,10 @@ test('restore with a pull request event and zstd compressed cache found', async
.spyOn(cacheUtils, 'getCompressionMethod')
.mockReturnValue(Promise.resolve(compression))
- await restoreCache(inputPath, key)
+ const cacheKey = await restoreCache(paths, key)
- expect(getCacheMock).toHaveBeenCalledWith([key], inputPath, {
+ expect(cacheKey).toBe(key)
+ expect(getCacheMock).toHaveBeenCalledWith([key], paths, {
compressionMethod: compression
})
expect(createTempDirectoryMock).toHaveBeenCalledTimes(1)
@@ -237,17 +220,15 @@ test('restore with a pull request event and zstd compressed cache found', async
expect(extractTarMock).toHaveBeenCalledWith(archivePath, compression)
expect(infoMock).toHaveBeenCalledWith(`Cache restored from key: ${key}`)
- expect(failedMock).toHaveBeenCalledTimes(0)
expect(getCompressionMock).toHaveBeenCalledTimes(1)
})
test('restore with cache found for restore key', async () => {
- const inputPath = 'node_modules'
+ const paths = ['node_modules']
const key = 'node-test'
const restoreKey = 'node-'
const infoMock = jest.spyOn(core, 'info')
- const failedMock = jest.spyOn(core, 'setFailed')
const cacheEntry: ArtifactCacheEntry = {
cacheKey: restoreKey,
@@ -279,9 +260,10 @@ test('restore with cache found for restore key', async () => {
.spyOn(cacheUtils, 'getCompressionMethod')
.mockReturnValue(Promise.resolve(compression))
- await restoreCache(inputPath, key, [restoreKey])
+ const cacheKey = await restoreCache(paths, key, [restoreKey])
- expect(getCacheMock).toHaveBeenCalledWith([key, restoreKey], inputPath, {
+ expect(cacheKey).toBe(restoreKey)
+ expect(getCacheMock).toHaveBeenCalledWith([key, restoreKey], paths, {
compressionMethod: compression
})
expect(createTempDirectoryMock).toHaveBeenCalledTimes(1)
@@ -298,6 +280,5 @@ test('restore with cache found for restore key', async () => {
expect(infoMock).toHaveBeenCalledWith(
`Cache restored from key: ${restoreKey}`
)
- expect(failedMock).toHaveBeenCalledTimes(0)
expect(getCompressionMock).toHaveBeenCalledTimes(1)
})
diff --git a/packages/cache/__tests__/saveCache.test.ts b/packages/cache/__tests__/saveCache.test.ts
index f1346634..2339a197 100644
--- a/packages/cache/__tests__/saveCache.test.ts
+++ b/packages/cache/__tests__/saveCache.test.ts
@@ -1,4 +1,3 @@
-import * as core from '@actions/core'
import * as path from 'path'
import {saveCache} from '../src/cache'
import * as cacheHttpClient from '../src/internal/cacheHttpClient'
@@ -27,42 +26,31 @@ beforeAll(() => {
})
})
-test('save with missing input outputs warning', async () => {
- const logWarningMock = jest.spyOn(cacheUtils, 'logWarning')
- const failedMock = jest.spyOn(core, 'setFailed')
-
- const inputPath = ''
+test('save with missing input should fail', async () => {
+ const paths: string[] = []
const primaryKey = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43'
-
- await saveCache(inputPath, primaryKey)
-
- expect(logWarningMock).toHaveBeenCalledWith(
- 'Input required and not supplied: path'
+ await expect(saveCache(paths, primaryKey)).rejects.toThrowError(
+ `Path Validation Error: At least one directory or file path is required`
)
- expect(logWarningMock).toHaveBeenCalledTimes(1)
- expect(failedMock).toHaveBeenCalledTimes(0)
})
-test('save with large cache outputs warning', async () => {
- const logWarningMock = jest.spyOn(cacheUtils, 'logWarning')
- const failedMock = jest.spyOn(core, 'setFailed')
-
- const inputPath = 'node_modules'
+test('save with large cache outputs should fail', async () => {
+ const filePath = 'node_modules'
const primaryKey = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43'
- const cachePaths = [path.resolve(inputPath)]
+ const cachePaths = [path.resolve(filePath)]
const createTarMock = jest.spyOn(tar, 'createTar')
const cacheSize = 6 * 1024 * 1024 * 1024 //~6GB, over the 5GB limit
- jest.spyOn(cacheUtils, 'getArchiveFileSize').mockImplementationOnce(() => {
- return cacheSize
- })
+ jest.spyOn(cacheUtils, 'getArchiveFileSize').mockReturnValue(cacheSize)
const compression = CompressionMethod.Gzip
const getCompressionMock = jest
.spyOn(cacheUtils, 'getCompressionMethod')
- .mockReturnValue(Promise.resolve(compression))
+ .mockReturnValueOnce(Promise.resolve(compression))
- await saveCache(inputPath, primaryKey)
+ await expect(saveCache([filePath], primaryKey)).rejects.toThrowError(
+ 'Cache size of ~6144 MB (6442450944 B) is over the 5GB limit, not saving cache.'
+ )
const archiveFolder = '/foo/bar'
@@ -72,20 +60,11 @@ test('save with large cache outputs warning', async () => {
cachePaths,
compression
)
- expect(logWarningMock).toHaveBeenCalledTimes(1)
- expect(logWarningMock).toHaveBeenCalledWith(
- 'Cache size of ~6144 MB (6442450944 B) is over the 5GB limit, not saving cache.'
- )
- expect(failedMock).toHaveBeenCalledTimes(0)
expect(getCompressionMock).toHaveBeenCalledTimes(1)
})
-test('save with reserve cache failure outputs warning', async () => {
- const infoMock = jest.spyOn(core, 'info')
- const logWarningMock = jest.spyOn(cacheUtils, 'logWarning')
- const failedMock = jest.spyOn(core, 'setFailed')
-
- const inputPath = 'node_modules'
+test('save with reserve cache failure should fail', async () => {
+ const paths = ['node_modules']
const primaryKey = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43'
const reserveCacheMock = jest
@@ -99,33 +78,24 @@ test('save with reserve cache failure outputs warning', async () => {
const compression = CompressionMethod.Zstd
const getCompressionMock = jest
.spyOn(cacheUtils, 'getCompressionMethod')
- .mockReturnValue(Promise.resolve(compression))
+ .mockReturnValueOnce(Promise.resolve(compression))
- await saveCache(inputPath, primaryKey)
-
- expect(reserveCacheMock).toHaveBeenCalledTimes(1)
- expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey, inputPath, {
- compressionMethod: compression
- })
-
- expect(infoMock).toHaveBeenCalledWith(
+ await expect(saveCache(paths, primaryKey)).rejects.toThrowError(
`Unable to reserve cache with key ${primaryKey}, another job may be creating this cache.`
)
-
+ expect(reserveCacheMock).toHaveBeenCalledTimes(1)
+ expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey, paths, {
+ compressionMethod: compression
+ })
expect(createTarMock).toHaveBeenCalledTimes(0)
expect(saveCacheMock).toHaveBeenCalledTimes(0)
- expect(logWarningMock).toHaveBeenCalledTimes(0)
- expect(failedMock).toHaveBeenCalledTimes(0)
expect(getCompressionMock).toHaveBeenCalledTimes(1)
})
-test('save with server error outputs warning', async () => {
- const logWarningMock = jest.spyOn(cacheUtils, 'logWarning')
- const failedMock = jest.spyOn(core, 'setFailed')
-
- const inputPath = 'node_modules'
+test('save with server error should fail', async () => {
+ const filePath = 'node_modules'
const primaryKey = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43'
- const cachePaths = [path.resolve(inputPath)]
+ const cachePaths = [path.resolve(filePath)]
const cacheId = 4
const reserveCacheMock = jest
@@ -144,12 +114,13 @@ test('save with server error outputs warning', async () => {
const compression = CompressionMethod.Zstd
const getCompressionMock = jest
.spyOn(cacheUtils, 'getCompressionMethod')
- .mockReturnValue(Promise.resolve(compression))
-
- await saveCache(inputPath, primaryKey)
+ .mockReturnValueOnce(Promise.resolve(compression))
+ await expect(await saveCache([filePath], primaryKey)).rejects.toThrowError(
+ 'HTTP Error Occurred'
+ )
expect(reserveCacheMock).toHaveBeenCalledTimes(1)
- expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey, inputPath, {
+ expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey, [filePath], {
compressionMethod: compression
})
@@ -165,20 +136,13 @@ test('save with server error outputs warning', async () => {
expect(saveCacheMock).toHaveBeenCalledTimes(1)
expect(saveCacheMock).toHaveBeenCalledWith(cacheId, archiveFile)
-
- expect(logWarningMock).toHaveBeenCalledTimes(1)
- expect(logWarningMock).toHaveBeenCalledWith('HTTP Error Occurred')
-
- expect(failedMock).toHaveBeenCalledTimes(0)
expect(getCompressionMock).toHaveBeenCalledTimes(1)
})
test('save with valid inputs uploads a cache', async () => {
- const failedMock = jest.spyOn(core, 'setFailed')
-
- const inputPath = 'node_modules'
+ const filePath = 'node_modules'
const primaryKey = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43'
- const cachePaths = [path.resolve(inputPath)]
+ const cachePaths = [path.resolve(filePath)]
const cacheId = 4
const reserveCacheMock = jest
@@ -194,10 +158,10 @@ test('save with valid inputs uploads a cache', async () => {
.spyOn(cacheUtils, 'getCompressionMethod')
.mockReturnValue(Promise.resolve(compression))
- await saveCache(inputPath, primaryKey)
+ await saveCache([filePath], primaryKey)
expect(reserveCacheMock).toHaveBeenCalledTimes(1)
- expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey, inputPath, {
+ expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey, [filePath], {
compressionMethod: compression
})
@@ -213,7 +177,5 @@ test('save with valid inputs uploads a cache', async () => {
expect(saveCacheMock).toHaveBeenCalledTimes(1)
expect(saveCacheMock).toHaveBeenCalledWith(cacheId, archiveFile)
-
- expect(failedMock).toHaveBeenCalledTimes(0)
expect(getCompressionMock).toHaveBeenCalledTimes(1)
})
diff --git a/packages/cache/src/cache.ts b/packages/cache/src/cache.ts
index 4aab741e..cf9be5eb 100644
--- a/packages/cache/src/cache.ts
+++ b/packages/cache/src/cache.ts
@@ -1,169 +1,153 @@
import * as core from '@actions/core'
-import * as pathUtils from 'path'
+import * as path from 'path'
import * as utils from './internal/cacheUtils'
import * as cacheHttpClient from './internal/cacheHttpClient'
import {createTar, extractTar} from './internal/tar'
+function checkPaths(paths: string[]): void {
+ if (!paths || paths.length === 0) {
+ throw new Error(
+ `Path Validation Error: At least one directory or file path is required`
+ )
+ }
+}
+
+function checkKey(key: string): void {
+ if (key.length > 512) {
+ throw new Error(
+ `Key Validation Error: ${key} cannot be larger than 512 characters.`
+ )
+ }
+ const regex = /^[^,]*$/
+ if (!regex.test(key)) {
+ throw new Error(`Key Validation Error: ${key} cannot contain commas.`)
+ }
+}
+
/**
* Restores cache from keys
*
- * @param path a string representing files that were cached
+ * @param paths a list of file paths to restore from the cache
* @param primaryKey an explicit key for restoring the cache
* @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for key
* @returns string returns the key for the cache hit, otherwise return undefined
*/
export async function restoreCache(
- path: string,
+ paths: string[],
primaryKey: string,
restoreKeys?: string[]
): Promise {
- try {
- if (!path || path.length === 0) {
- throw new Error('Input required and not supplied: path')
- }
+ checkPaths(paths)
- restoreKeys = restoreKeys || []
- const keys = [primaryKey, ...restoreKeys]
+ restoreKeys = restoreKeys || []
+ const keys = [primaryKey, ...restoreKeys]
- core.debug('Resolved Keys:')
- core.debug(JSON.stringify(keys))
+ core.debug('Resolved Keys:')
+ core.debug(JSON.stringify(keys))
- if (keys.length > 10) {
- core.setFailed(
- `Key Validation Error: Keys are limited to a maximum of 10.`
- )
- return undefined
- }
- for (const key of keys) {
- if (key.length > 512) {
- core.setFailed(
- `Key Validation Error: ${key} cannot be larger than 512 characters.`
- )
- return undefined
- }
- const regex = /^[^,]*$/
- if (!regex.test(key)) {
- core.setFailed(`Key Validation Error: ${key} cannot contain commas.`)
- return undefined
- }
- }
+ if (keys.length > 10) {
+ throw new Error(
+ `Key Validation Error: Keys are limited to a maximum of 10.`
+ )
+ }
+ for (const key of keys) {
+ checkKey(key)
+ }
- const compressionMethod = await utils.getCompressionMethod()
+ const compressionMethod = await utils.getCompressionMethod()
- try {
- // path are needed to compute version
- const cacheEntry = await cacheHttpClient.getCacheEntry(keys, path, {
- compressionMethod
- })
- if (!cacheEntry?.archiveLocation) {
- core.info(`Cache not found for input keys: ${keys.join(', ')}`)
- return undefined
- }
-
- const archivePath = pathUtils.join(
- await utils.createTempDirectory(),
- utils.getCacheFileName(compressionMethod)
- )
- core.debug(`Archive Path: ${archivePath}`)
-
- try {
- // Download the cache from the cache entry
- await cacheHttpClient.downloadCache(
- cacheEntry.archiveLocation,
- archivePath
- )
-
- const archiveFileSize = utils.getArchiveFileSize(archivePath)
- core.info(
- `Cache Size: ~${Math.round(
- archiveFileSize / (1024 * 1024)
- )} MB (${archiveFileSize} B)`
- )
-
- await extractTar(archivePath, compressionMethod)
- } finally {
- // Try to delete the archive to save space
- try {
- await utils.unlinkFile(archivePath)
- } catch (error) {
- core.debug(`Failed to delete archive: ${error}`)
- }
- }
-
- core.info(`Cache restored from key: ${cacheEntry && cacheEntry.cacheKey}`)
-
- return cacheEntry.cacheKey
- } catch (error) {
- utils.logWarning(error.message)
- return undefined
- }
- } catch (error) {
- core.setFailed(error.message)
+ // path are needed to compute version
+ const cacheEntry = await cacheHttpClient.getCacheEntry(keys, paths, {
+ compressionMethod
+ })
+ if (!cacheEntry?.archiveLocation) {
+ core.info(`Cache not found for input keys: ${keys.join(', ')}`)
return undefined
}
+
+ const archivePath = path.join(
+ await utils.createTempDirectory(),
+ utils.getCacheFileName(compressionMethod)
+ )
+ core.debug(`Archive Path: ${archivePath}`)
+
+ try {
+ // Download the cache from the cache entry
+ await cacheHttpClient.downloadCache(cacheEntry.archiveLocation, archivePath)
+
+ const archiveFileSize = utils.getArchiveFileSize(archivePath)
+ core.info(
+ `Cache Size: ~${Math.round(
+ archiveFileSize / (1024 * 1024)
+ )} MB (${archiveFileSize} B)`
+ )
+
+ await extractTar(archivePath, compressionMethod)
+ } finally {
+ // Try to delete the archive to save space
+ try {
+ await utils.unlinkFile(archivePath)
+ } catch (error) {
+ core.debug(`Failed to delete archive: ${error}`)
+ }
+ }
+
+ core.info(`Cache restored from key: ${cacheEntry && cacheEntry.cacheKey}`)
+
+ return cacheEntry.cacheKey
}
/**
- * Saves a file with the specified key
+ * Saves a list of files with the specified key
*
- * @param path a string representing files to be cached
+ * @param paths a list of file paths to be cached
* @param key an explicit key for restoring the cache
- * @returns number returns cacheId if the cache was saved successfully, otherwise return -1
+ * @returns number returns cacheId if the cache was saved successfully
*/
-export async function saveCache(path: string, key: string): Promise {
- try {
- if (!path || path.length === 0) {
- throw new Error('Input required and not supplied: path')
- }
+export async function saveCache(paths: string[], key: string): Promise {
+ checkPaths(paths)
+ checkKey(key)
- const compressionMethod = await utils.getCompressionMethod()
+ const compressionMethod = await utils.getCompressionMethod()
- core.debug('Reserving Cache')
- const cacheId = await cacheHttpClient.reserveCache(key, path, {
- compressionMethod
- })
- if (cacheId === -1) {
- core.info(
- `Unable to reserve cache with key ${key}, another job may be creating this cache.`
- )
- return -1
- }
- core.debug(`Cache ID: ${cacheId}`)
- const cachePaths = await utils.resolvePaths(
- path.split('\n').filter(x => x !== '')
+ core.debug('Reserving Cache')
+ const cacheId = await cacheHttpClient.reserveCache(key, paths, {
+ compressionMethod
+ })
+ if (cacheId === -1) {
+ throw new Error(
+ `Unable to reserve cache with key ${key}, another job may be creating this cache.`
)
-
- core.debug('Cache Paths:')
- core.debug(`${JSON.stringify(cachePaths)}`)
-
- const archiveFolder = await utils.createTempDirectory()
- const archivePath = pathUtils.join(
- archiveFolder,
- utils.getCacheFileName(compressionMethod)
- )
-
- core.debug(`Archive Path: ${archivePath}`)
-
- await createTar(archiveFolder, cachePaths, compressionMethod)
-
- const fileSizeLimit = 5 * 1024 * 1024 * 1024 // 5GB per repo limit
- const archiveFileSize = utils.getArchiveFileSize(archivePath)
- core.debug(`File Size: ${archiveFileSize}`)
- if (archiveFileSize > fileSizeLimit) {
- utils.logWarning(
- `Cache size of ~${Math.round(
- archiveFileSize / (1024 * 1024)
- )} MB (${archiveFileSize} B) is over the 5GB limit, not saving cache.`
- )
- return -1
- }
-
- core.debug(`Saving Cache (ID: ${cacheId})`)
- await cacheHttpClient.saveCache(cacheId, archivePath)
-
- return cacheId
- } catch (error) {
- utils.logWarning(error.message)
- return -1
}
+ core.debug(`Cache ID: ${cacheId}`)
+ const cachePaths = await utils.resolvePaths(paths)
+
+ core.debug('Cache Paths:')
+ core.debug(`${JSON.stringify(cachePaths)}`)
+
+ const archiveFolder = await utils.createTempDirectory()
+ const archivePath = path.join(
+ archiveFolder,
+ utils.getCacheFileName(compressionMethod)
+ )
+
+ core.debug(`Archive Path: ${archivePath}`)
+
+ await createTar(archiveFolder, cachePaths, compressionMethod)
+
+ const fileSizeLimit = 5 * 1024 * 1024 * 1024 // 5GB per repo limit
+ const archiveFileSize = utils.getArchiveFileSize(archivePath)
+ core.debug(`File Size: ${archiveFileSize}`)
+ if (archiveFileSize > fileSizeLimit) {
+ throw new Error(
+ `Cache size of ~${Math.round(
+ archiveFileSize / (1024 * 1024)
+ )} MB (${archiveFileSize} B) is over the 5GB limit, not saving cache.`
+ )
+ }
+
+ core.debug(`Saving Cache (ID: ${cacheId})`)
+ await cacheHttpClient.saveCache(cacheId, archivePath)
+
+ return cacheId
}
diff --git a/packages/cache/src/internal/cacheHttpClient.ts b/packages/cache/src/internal/cacheHttpClient.ts
index 92e9498a..f4f8c4d1 100644
--- a/packages/cache/src/internal/cacheHttpClient.ts
+++ b/packages/cache/src/internal/cacheHttpClient.ts
@@ -84,10 +84,10 @@ function createHttpClient(): HttpClient {
}
export function getCacheVersion(
- inputPath: string,
+ paths: string[],
compressionMethod?: CompressionMethod
): string {
- const components = [inputPath].concat(
+ const components = paths.concat(
compressionMethod === CompressionMethod.Zstd ? [compressionMethod] : []
)
@@ -102,11 +102,11 @@ export function getCacheVersion(
export async function getCacheEntry(
keys: string[],
- inputPath: string,
+ paths: string[],
options?: CacheOptions
): Promise {
const httpClient = createHttpClient()
- const version = getCacheVersion(inputPath, options?.compressionMethod)
+ const version = getCacheVersion(paths, options?.compressionMethod)
const resource = `cache?keys=${encodeURIComponent(
keys.join(',')
)}&version=${version}`
@@ -177,11 +177,11 @@ export async function downloadCache(
// Reserve Cache
export async function reserveCache(
key: string,
- inputPath: string,
+ paths: string[],
options?: CacheOptions
): Promise {
const httpClient = createHttpClient()
- const version = getCacheVersion(inputPath, options?.compressionMethod)
+ const version = getCacheVersion(paths, options?.compressionMethod)
const reserveCacheRequest: ReserveCacheRequest = {
key,
diff --git a/packages/cache/src/internal/cacheUtils.ts b/packages/cache/src/internal/cacheUtils.ts
index 8cce071f..8743963a 100644
--- a/packages/cache/src/internal/cacheUtils.ts
+++ b/packages/cache/src/internal/cacheUtils.ts
@@ -38,11 +38,6 @@ export function getArchiveFileSize(filePath: string): number {
return fs.statSync(filePath).size
}
-export function logWarning(message: string): void {
- const warningPrefix = '[warning]'
- core.info(`${warningPrefix}${message}`)
-}
-
export async function resolvePaths(patterns: string[]): Promise {
const paths: string[] = []
const workspace = process.env['GITHUB_WORKSPACE'] ?? process.cwd()