diff --git a/README.md b/README.md
index 3dab6d39..8701ab5c 100644
--- a/README.md
+++ b/README.md
@@ -82,6 +82,15 @@ $ npm install @actions/artifact --save
```
+:dart: [@actions/cache](packages/cache)
+
+Provides functions to interact with actions cache. Read more [here](packages/cache)
+
+```bash
+$ npm install @actions/artifact --save
+```
+
+
## Creating an Action with the Toolkit
:question: [Choosing an action type](docs/action-types.md)
diff --git a/packages/cache/CONTRIBUTIONS.md b/packages/cache/CONTRIBUTIONS.md
new file mode 100644
index 00000000..e69de29b
diff --git a/packages/cache/README.md b/packages/cache/README.md
new file mode 100644
index 00000000..b65c7f34
--- /dev/null
+++ b/packages/cache/README.md
@@ -0,0 +1 @@
+# `@actions/cache`
diff --git a/packages/cache/RELEASES.md b/packages/cache/RELEASES.md
new file mode 100644
index 00000000..b47fc550
--- /dev/null
+++ b/packages/cache/RELEASES.md
@@ -0,0 +1,5 @@
+# @actions/cache Releases
+
+### 0.0.0
+
+- Initial release
\ No newline at end of file
diff --git a/packages/cache/__tests__/__fixtures__/helloWorld.txt b/packages/cache/__tests__/__fixtures__/helloWorld.txt
new file mode 100644
index 00000000..95d09f2b
--- /dev/null
+++ b/packages/cache/__tests__/__fixtures__/helloWorld.txt
@@ -0,0 +1 @@
+hello world
\ No newline at end of file
diff --git a/packages/cache/__tests__/cacheHttpClient.test.ts b/packages/cache/__tests__/cacheHttpClient.test.ts
new file mode 100644
index 00000000..d2165280
--- /dev/null
+++ b/packages/cache/__tests__/cacheHttpClient.test.ts
@@ -0,0 +1,28 @@
+import {getCacheVersion} from '../src/internal/cacheHttpClient'
+import {CompressionMethod} from '../src/internal/constants'
+
+test('getCacheVersion with path input and compression method undefined returns version', async () => {
+ const inputPath = 'node_modules'
+ const result = getCacheVersion(inputPath)
+ expect(result).toEqual(
+ 'b3e0c6cb5ecf32614eeb2997d905b9c297046d7cbf69062698f25b14b4cb0985'
+ )
+})
+
+test('getCacheVersion with zstd compression returns version', async () => {
+ const inputPath = 'node_modules'
+ const result = getCacheVersion(inputPath, CompressionMethod.Zstd)
+
+ expect(result).toEqual(
+ '273877e14fd65d270b87a198edbfa2db5a43de567c9a548d2a2505b408befe24'
+ )
+})
+
+test('getCacheVersion with gzip compression does not change vesion', async () => {
+ const inputPath = 'node_modules'
+ const result = getCacheVersion(inputPath, CompressionMethod.Gzip)
+
+ expect(result).toEqual(
+ 'b3e0c6cb5ecf32614eeb2997d905b9c297046d7cbf69062698f25b14b4cb0985'
+ )
+})
diff --git a/packages/cache/__tests__/cacheUtils.test.ts b/packages/cache/__tests__/cacheUtils.test.ts
new file mode 100644
index 00000000..b09eed13
--- /dev/null
+++ b/packages/cache/__tests__/cacheUtils.test.ts
@@ -0,0 +1,177 @@
+import * as core from '@actions/core'
+import * as io from '@actions/io'
+import {promises as fs} from 'fs'
+import * as os from 'os'
+import * as path from 'path'
+import {v4 as uuidV4} from 'uuid'
+import * as cacheUtils from '../src/internal/cacheUtils'
+
+jest.mock('@actions/core')
+jest.mock('os')
+
+function getTempDir(): string {
+ return path.join(__dirname, '_temp', 'cacheUtils')
+}
+
+afterAll(async () => {
+ delete process.env['GITHUB_WORKSPACE']
+ await io.rmRF(getTempDir())
+})
+
+test('getArchiveFileSize returns file size', () => {
+ const filePath = path.join(__dirname, '__fixtures__', 'helloWorld.txt')
+
+ const size = cacheUtils.getArchiveFileSize(filePath)
+
+ expect(size).toBe(11)
+})
+
+test('logWarning logs a message with a warning prefix', () => {
+ const message = 'A warning occurred.'
+
+ const infoMock = jest.spyOn(core, 'info')
+
+ cacheUtils.logWarning(message)
+
+ expect(infoMock).toHaveBeenCalledWith(`[warning]${message}`)
+})
+
+test('resolvePaths with no ~ in path', async () => {
+ const filePath = '.cache'
+
+ // Create the following layout:
+ // cwd
+ // cwd/.cache
+ // cwd/.cache/file.txt
+
+ const root = path.join(getTempDir(), 'no-tilde')
+ // tarball entries will be relative to workspace
+ process.env['GITHUB_WORKSPACE'] = root
+
+ await fs.mkdir(root, {recursive: true})
+ const cache = path.join(root, '.cache')
+ await fs.mkdir(cache, {recursive: true})
+ await fs.writeFile(path.join(cache, 'file.txt'), 'cached')
+
+ const originalCwd = process.cwd()
+
+ try {
+ process.chdir(root)
+
+ const resolvedPath = await cacheUtils.resolvePaths([filePath])
+
+ const expectedPath = [filePath]
+ expect(resolvedPath).toStrictEqual(expectedPath)
+ } finally {
+ process.chdir(originalCwd)
+ }
+})
+
+test('resolvePaths with ~ in path', async () => {
+ const cacheDir = uuidV4()
+ const filePath = `~/${cacheDir}`
+ // Create the following layout:
+ // ~/uuid
+ // ~/uuid/file.txt
+
+ const homedir = jest.requireActual('os').homedir()
+ const homedirMock = jest.spyOn(os, 'homedir')
+ homedirMock.mockReturnValue(homedir)
+
+ const target = path.join(homedir, cacheDir)
+ await fs.mkdir(target, {recursive: true})
+ await fs.writeFile(path.join(target, 'file.txt'), 'cached')
+
+ const root = getTempDir()
+ process.env['GITHUB_WORKSPACE'] = root
+
+ try {
+ const resolvedPath = await cacheUtils.resolvePaths([filePath])
+
+ const expectedPath = [path.relative(root, target)]
+ expect(resolvedPath).toStrictEqual(expectedPath)
+ } finally {
+ await io.rmRF(target)
+ }
+})
+
+test('resolvePaths with home not found', async () => {
+ const filePath = '~/.cache/yarn'
+ const homedirMock = jest.spyOn(os, 'homedir')
+ homedirMock.mockReturnValue('')
+
+ await expect(cacheUtils.resolvePaths([filePath])).rejects.toThrow(
+ 'Unable to determine HOME directory'
+ )
+})
+
+test('resolvePaths inclusion pattern returns found', async () => {
+ const pattern = '*.ts'
+ // Create the following layout:
+ // inclusion-patterns
+ // inclusion-patterns/miss.txt
+ // inclusion-patterns/test.ts
+
+ const root = path.join(getTempDir(), 'inclusion-patterns')
+ // tarball entries will be relative to workspace
+ process.env['GITHUB_WORKSPACE'] = root
+
+ await fs.mkdir(root, {recursive: true})
+ await fs.writeFile(path.join(root, 'miss.txt'), 'no match')
+ await fs.writeFile(path.join(root, 'test.ts'), 'match')
+
+ const originalCwd = process.cwd()
+
+ try {
+ process.chdir(root)
+
+ const resolvedPath = await cacheUtils.resolvePaths([pattern])
+
+ const expectedPath = ['test.ts']
+ expect(resolvedPath).toStrictEqual(expectedPath)
+ } finally {
+ process.chdir(originalCwd)
+ }
+})
+
+test('resolvePaths exclusion pattern returns not found', async () => {
+ const patterns = ['*.ts', '!test.ts']
+ // Create the following layout:
+ // exclusion-patterns
+ // exclusion-patterns/miss.txt
+ // exclusion-patterns/test.ts
+
+ const root = path.join(getTempDir(), 'exclusion-patterns')
+ // tarball entries will be relative to workspace
+ process.env['GITHUB_WORKSPACE'] = root
+
+ await fs.mkdir(root, {recursive: true})
+ await fs.writeFile(path.join(root, 'miss.txt'), 'no match')
+ await fs.writeFile(path.join(root, 'test.ts'), 'no match')
+
+ const originalCwd = process.cwd()
+
+ try {
+ process.chdir(root)
+
+ const resolvedPath = await cacheUtils.resolvePaths(patterns)
+
+ const expectedPath: string[] = []
+ expect(resolvedPath).toStrictEqual(expectedPath)
+ } finally {
+ process.chdir(originalCwd)
+ }
+})
+
+test('unlinkFile unlinks file', async () => {
+ const testDirectory = await fs.mkdtemp('unlinkFileTest')
+ const testFile = path.join(testDirectory, 'test.txt')
+ await fs.writeFile(testFile, 'hello world')
+
+ await cacheUtils.unlinkFile(testFile)
+
+ // This should throw as testFile should not exist
+ await expect(fs.stat(testFile)).rejects.toThrow()
+
+ await fs.rmdir(testDirectory)
+})
diff --git a/packages/cache/__tests__/restoreCache.test.ts b/packages/cache/__tests__/restoreCache.test.ts
new file mode 100644
index 00000000..d1f016d6
--- /dev/null
+++ b/packages/cache/__tests__/restoreCache.test.ts
@@ -0,0 +1,303 @@
+import * as core from '@actions/core'
+import * as path from 'path'
+import {restoreCache} from '../src/cache'
+import * as cacheHttpClient from '../src/internal/cacheHttpClient'
+import * as cacheUtils from '../src/internal/cacheUtils'
+import {CacheFilename, CompressionMethod} from '../src/internal/constants'
+import {ArtifactCacheEntry} from '../src/internal/contracts'
+import * as tar from '../src/internal/tar'
+
+jest.mock('../src/internal/cacheHttpClient')
+jest.mock('../src/internal/cacheUtils')
+jest.mock('../src/internal/tar')
+
+beforeAll(() => {
+ // eslint-disable-next-line @typescript-eslint/promise-function-async
+ jest.spyOn(cacheUtils, 'getCacheFileName').mockImplementation(cm => {
+ const actualUtils = jest.requireActual('../src/internal/cacheUtils')
+ return actualUtils.getCacheFileName(cm)
+ })
+})
+
+test('restore with no path should fail', async () => {
+ const inputPath = ''
+ const key = 'node-test'
+ const failedMock = jest.spyOn(core, 'setFailed')
+ await restoreCache(inputPath, key)
+ expect(failedMock).toHaveBeenCalledWith(
+ 'Input required and not supplied: path'
+ )
+})
+
+test('restore with too many keys should fail', async () => {
+ const inputPath = 'node_modules'
+ const key = 'node-test'
+ const restoreKeys = [...Array(20).keys()].map(x => x.toString())
+ const failedMock = jest.spyOn(core, 'setFailed')
+ await restoreCache(inputPath, key, restoreKeys)
+ expect(failedMock).toHaveBeenCalledWith(
+ `Key Validation Error: Keys are limited to a maximum of 10.`
+ )
+})
+
+test('restore with large key should fail', async () => {
+ const inputPath = 'node_modules'
+ const key = 'foo'.repeat(512) // Over the 512 character limit
+ const failedMock = jest.spyOn(core, 'setFailed')
+ await restoreCache(inputPath, key)
+ expect(failedMock).toHaveBeenCalledWith(
+ `Key Validation Error: ${key} cannot be larger than 512 characters.`
+ )
+})
+
+test('restore with invalid key should fail', async () => {
+ const inputPath = 'node_modules'
+ const key = 'comma,comma'
+ const failedMock = jest.spyOn(core, 'setFailed')
+ await restoreCache(inputPath, key)
+ expect(failedMock).toHaveBeenCalledWith(
+ `Key Validation Error: ${key} cannot contain commas.`
+ )
+})
+
+test('restore with no cache found', async () => {
+ const inputPath = 'node_modules'
+ const key = 'node-test'
+
+ const infoMock = jest.spyOn(core, 'info')
+ const failedMock = jest.spyOn(core, 'setFailed')
+
+ const clientMock = jest.spyOn(cacheHttpClient, 'getCacheEntry')
+ clientMock.mockImplementation(async () => {
+ return Promise.resolve(null)
+ })
+
+ await restoreCache(inputPath, key)
+
+ expect(failedMock).toHaveBeenCalledTimes(0)
+ expect(infoMock).toHaveBeenCalledWith(
+ `Cache not found for input keys: ${key}`
+ )
+})
+
+test('restore with server error should fail', async () => {
+ const inputPath = 'node_modules'
+ const key = 'node-test'
+
+ const logWarningMock = jest.spyOn(cacheUtils, 'logWarning')
+ const failedMock = jest.spyOn(core, 'setFailed')
+
+ const clientMock = jest.spyOn(cacheHttpClient, 'getCacheEntry')
+ clientMock.mockImplementation(() => {
+ throw new Error('HTTP Error Occurred')
+ })
+
+ await restoreCache(inputPath, key)
+
+ expect(logWarningMock).toHaveBeenCalledTimes(1)
+ expect(logWarningMock).toHaveBeenCalledWith('HTTP Error Occurred')
+ expect(failedMock).toHaveBeenCalledTimes(0)
+})
+
+test('restore with restore keys and no cache found', async () => {
+ const inputPath = 'node_modules'
+ const key = 'node-test'
+ const restoreKey = 'node-'
+
+ const infoMock = jest.spyOn(core, 'info')
+ const failedMock = jest.spyOn(core, 'setFailed')
+
+ const clientMock = jest.spyOn(cacheHttpClient, 'getCacheEntry')
+ clientMock.mockImplementation(async () => {
+ return Promise.resolve(null)
+ })
+
+ await restoreCache(inputPath, key, [restoreKey])
+
+ expect(failedMock).toHaveBeenCalledTimes(0)
+ expect(infoMock).toHaveBeenCalledWith(
+ `Cache not found for input keys: ${key}, ${restoreKey}`
+ )
+})
+
+test('restore with gzip compressed cache found', async () => {
+ const inputPath = 'node_modules'
+ const key = 'node-test'
+
+ const infoMock = jest.spyOn(core, 'info')
+ const failedMock = jest.spyOn(core, 'setFailed')
+
+ const cacheEntry: ArtifactCacheEntry = {
+ cacheKey: key,
+ scope: 'refs/heads/master',
+ archiveLocation: 'www.actionscache.test/download'
+ }
+ const getCacheMock = jest.spyOn(cacheHttpClient, 'getCacheEntry')
+ getCacheMock.mockImplementation(async () => {
+ return Promise.resolve(cacheEntry)
+ })
+
+ const tempPath = '/foo/bar'
+
+ const createTempDirectoryMock = jest.spyOn(cacheUtils, 'createTempDirectory')
+ createTempDirectoryMock.mockImplementation(async () => {
+ return Promise.resolve(tempPath)
+ })
+
+ const archivePath = path.join(tempPath, CacheFilename.Gzip)
+ const downloadCacheMock = jest.spyOn(cacheHttpClient, 'downloadCache')
+
+ const fileSize = 142
+ const getArchiveFileSizeMock = jest
+ .spyOn(cacheUtils, 'getArchiveFileSize')
+ .mockReturnValue(fileSize)
+
+ const extractTarMock = jest.spyOn(tar, 'extractTar')
+ const unlinkFileMock = jest.spyOn(cacheUtils, 'unlinkFile')
+
+ const compression = CompressionMethod.Gzip
+ const getCompressionMock = jest
+ .spyOn(cacheUtils, 'getCompressionMethod')
+ .mockReturnValue(Promise.resolve(compression))
+
+ await restoreCache(inputPath, key)
+
+ expect(getCacheMock).toHaveBeenCalledWith([key], inputPath, {
+ compressionMethod: compression
+ })
+ expect(createTempDirectoryMock).toHaveBeenCalledTimes(1)
+ expect(downloadCacheMock).toHaveBeenCalledWith(
+ cacheEntry.archiveLocation,
+ archivePath
+ )
+ expect(getArchiveFileSizeMock).toHaveBeenCalledWith(archivePath)
+
+ expect(extractTarMock).toHaveBeenCalledTimes(1)
+ expect(extractTarMock).toHaveBeenCalledWith(archivePath, compression)
+
+ expect(unlinkFileMock).toHaveBeenCalledTimes(1)
+ expect(unlinkFileMock).toHaveBeenCalledWith(archivePath)
+
+ expect(infoMock).toHaveBeenCalledWith(`Cache restored from key: ${key}`)
+ expect(failedMock).toHaveBeenCalledTimes(0)
+ expect(getCompressionMock).toHaveBeenCalledTimes(1)
+})
+
+test('restore with a pull request event and zstd compressed cache found', async () => {
+ const inputPath = 'node_modules'
+ const key = 'node-test'
+
+ const infoMock = jest.spyOn(core, 'info')
+ const failedMock = jest.spyOn(core, 'setFailed')
+
+ const cacheEntry: ArtifactCacheEntry = {
+ cacheKey: key,
+ scope: 'refs/heads/master',
+ archiveLocation: 'www.actionscache.test/download'
+ }
+ const getCacheMock = jest.spyOn(cacheHttpClient, 'getCacheEntry')
+ getCacheMock.mockImplementation(async () => {
+ return Promise.resolve(cacheEntry)
+ })
+ const tempPath = '/foo/bar'
+
+ const createTempDirectoryMock = jest.spyOn(cacheUtils, 'createTempDirectory')
+ createTempDirectoryMock.mockImplementation(async () => {
+ return Promise.resolve(tempPath)
+ })
+
+ const archivePath = path.join(tempPath, CacheFilename.Zstd)
+ const downloadCacheMock = jest.spyOn(cacheHttpClient, 'downloadCache')
+
+ const fileSize = 62915000
+ const getArchiveFileSizeMock = jest
+ .spyOn(cacheUtils, 'getArchiveFileSize')
+ .mockReturnValue(fileSize)
+
+ const extractTarMock = jest.spyOn(tar, 'extractTar')
+ const compression = CompressionMethod.Zstd
+ const getCompressionMock = jest
+ .spyOn(cacheUtils, 'getCompressionMethod')
+ .mockReturnValue(Promise.resolve(compression))
+
+ await restoreCache(inputPath, key)
+
+ expect(getCacheMock).toHaveBeenCalledWith([key], inputPath, {
+ compressionMethod: compression
+ })
+ expect(createTempDirectoryMock).toHaveBeenCalledTimes(1)
+ expect(downloadCacheMock).toHaveBeenCalledWith(
+ cacheEntry.archiveLocation,
+ archivePath
+ )
+ expect(getArchiveFileSizeMock).toHaveBeenCalledWith(archivePath)
+ expect(infoMock).toHaveBeenCalledWith(`Cache Size: ~60 MB (62915000 B)`)
+
+ expect(extractTarMock).toHaveBeenCalledTimes(1)
+ expect(extractTarMock).toHaveBeenCalledWith(archivePath, compression)
+
+ expect(infoMock).toHaveBeenCalledWith(`Cache restored from key: ${key}`)
+ expect(failedMock).toHaveBeenCalledTimes(0)
+ expect(getCompressionMock).toHaveBeenCalledTimes(1)
+})
+
+test('restore with cache found for restore key', async () => {
+ const inputPath = 'node_modules'
+ const key = 'node-test'
+ const restoreKey = 'node-'
+
+ const infoMock = jest.spyOn(core, 'info')
+ const failedMock = jest.spyOn(core, 'setFailed')
+
+ const cacheEntry: ArtifactCacheEntry = {
+ cacheKey: restoreKey,
+ scope: 'refs/heads/master',
+ archiveLocation: 'www.actionscache.test/download'
+ }
+ const getCacheMock = jest.spyOn(cacheHttpClient, 'getCacheEntry')
+ getCacheMock.mockImplementation(async () => {
+ return Promise.resolve(cacheEntry)
+ })
+ const tempPath = '/foo/bar'
+
+ const createTempDirectoryMock = jest.spyOn(cacheUtils, 'createTempDirectory')
+ createTempDirectoryMock.mockImplementation(async () => {
+ return Promise.resolve(tempPath)
+ })
+
+ const archivePath = path.join(tempPath, CacheFilename.Zstd)
+ const downloadCacheMock = jest.spyOn(cacheHttpClient, 'downloadCache')
+
+ const fileSize = 142
+ const getArchiveFileSizeMock = jest
+ .spyOn(cacheUtils, 'getArchiveFileSize')
+ .mockReturnValue(fileSize)
+
+ const extractTarMock = jest.spyOn(tar, 'extractTar')
+ const compression = CompressionMethod.Zstd
+ const getCompressionMock = jest
+ .spyOn(cacheUtils, 'getCompressionMethod')
+ .mockReturnValue(Promise.resolve(compression))
+
+ await restoreCache(inputPath, key, [restoreKey])
+
+ expect(getCacheMock).toHaveBeenCalledWith([key, restoreKey], inputPath, {
+ compressionMethod: compression
+ })
+ expect(createTempDirectoryMock).toHaveBeenCalledTimes(1)
+ expect(downloadCacheMock).toHaveBeenCalledWith(
+ cacheEntry.archiveLocation,
+ archivePath
+ )
+ expect(getArchiveFileSizeMock).toHaveBeenCalledWith(archivePath)
+ expect(infoMock).toHaveBeenCalledWith(`Cache Size: ~0 MB (142 B)`)
+
+ expect(extractTarMock).toHaveBeenCalledTimes(1)
+ expect(extractTarMock).toHaveBeenCalledWith(archivePath, compression)
+
+ expect(infoMock).toHaveBeenCalledWith(
+ `Cache restored from key: ${restoreKey}`
+ )
+ expect(failedMock).toHaveBeenCalledTimes(0)
+ expect(getCompressionMock).toHaveBeenCalledTimes(1)
+})
diff --git a/packages/cache/__tests__/saveCache.test.ts b/packages/cache/__tests__/saveCache.test.ts
new file mode 100644
index 00000000..f1346634
--- /dev/null
+++ b/packages/cache/__tests__/saveCache.test.ts
@@ -0,0 +1,219 @@
+import * as core from '@actions/core'
+import * as path from 'path'
+import {saveCache} from '../src/cache'
+import * as cacheHttpClient from '../src/internal/cacheHttpClient'
+import * as cacheUtils from '../src/internal/cacheUtils'
+import {CacheFilename, CompressionMethod} from '../src/internal/constants'
+import * as tar from '../src/internal/tar'
+
+jest.mock('@actions/core')
+jest.mock('../src/internal/cacheHttpClient')
+jest.mock('../src/internal/cacheUtils')
+jest.mock('../src/internal/tar')
+
+beforeAll(() => {
+ // eslint-disable-next-line @typescript-eslint/promise-function-async
+ jest.spyOn(cacheUtils, 'getCacheFileName').mockImplementation(cm => {
+ const actualUtils = jest.requireActual('../src/internal/cacheUtils')
+ return actualUtils.getCacheFileName(cm)
+ })
+
+ jest.spyOn(cacheUtils, 'resolvePaths').mockImplementation(async filePaths => {
+ return filePaths.map(x => path.resolve(x))
+ })
+
+ jest.spyOn(cacheUtils, 'createTempDirectory').mockImplementation(async () => {
+ return Promise.resolve('/foo/bar')
+ })
+})
+
+test('save with missing input outputs warning', async () => {
+ const logWarningMock = jest.spyOn(cacheUtils, 'logWarning')
+ const failedMock = jest.spyOn(core, 'setFailed')
+
+ const inputPath = ''
+ const primaryKey = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43'
+
+ await saveCache(inputPath, primaryKey)
+
+ expect(logWarningMock).toHaveBeenCalledWith(
+ 'Input required and not supplied: path'
+ )
+ expect(logWarningMock).toHaveBeenCalledTimes(1)
+ expect(failedMock).toHaveBeenCalledTimes(0)
+})
+
+test('save with large cache outputs warning', async () => {
+ const logWarningMock = jest.spyOn(cacheUtils, 'logWarning')
+ const failedMock = jest.spyOn(core, 'setFailed')
+
+ const inputPath = 'node_modules'
+ const primaryKey = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43'
+ const cachePaths = [path.resolve(inputPath)]
+
+ const createTarMock = jest.spyOn(tar, 'createTar')
+
+ const cacheSize = 6 * 1024 * 1024 * 1024 //~6GB, over the 5GB limit
+ jest.spyOn(cacheUtils, 'getArchiveFileSize').mockImplementationOnce(() => {
+ return cacheSize
+ })
+ const compression = CompressionMethod.Gzip
+ const getCompressionMock = jest
+ .spyOn(cacheUtils, 'getCompressionMethod')
+ .mockReturnValue(Promise.resolve(compression))
+
+ await saveCache(inputPath, primaryKey)
+
+ const archiveFolder = '/foo/bar'
+
+ expect(createTarMock).toHaveBeenCalledTimes(1)
+ expect(createTarMock).toHaveBeenCalledWith(
+ archiveFolder,
+ cachePaths,
+ compression
+ )
+ expect(logWarningMock).toHaveBeenCalledTimes(1)
+ expect(logWarningMock).toHaveBeenCalledWith(
+ 'Cache size of ~6144 MB (6442450944 B) is over the 5GB limit, not saving cache.'
+ )
+ expect(failedMock).toHaveBeenCalledTimes(0)
+ expect(getCompressionMock).toHaveBeenCalledTimes(1)
+})
+
+test('save with reserve cache failure outputs warning', async () => {
+ const infoMock = jest.spyOn(core, 'info')
+ const logWarningMock = jest.spyOn(cacheUtils, 'logWarning')
+ const failedMock = jest.spyOn(core, 'setFailed')
+
+ const inputPath = 'node_modules'
+ const primaryKey = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43'
+
+ const reserveCacheMock = jest
+ .spyOn(cacheHttpClient, 'reserveCache')
+ .mockImplementation(async () => {
+ return -1
+ })
+
+ const createTarMock = jest.spyOn(tar, 'createTar')
+ const saveCacheMock = jest.spyOn(cacheHttpClient, 'saveCache')
+ const compression = CompressionMethod.Zstd
+ const getCompressionMock = jest
+ .spyOn(cacheUtils, 'getCompressionMethod')
+ .mockReturnValue(Promise.resolve(compression))
+
+ await saveCache(inputPath, primaryKey)
+
+ expect(reserveCacheMock).toHaveBeenCalledTimes(1)
+ expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey, inputPath, {
+ compressionMethod: compression
+ })
+
+ expect(infoMock).toHaveBeenCalledWith(
+ `Unable to reserve cache with key ${primaryKey}, another job may be creating this cache.`
+ )
+
+ expect(createTarMock).toHaveBeenCalledTimes(0)
+ expect(saveCacheMock).toHaveBeenCalledTimes(0)
+ expect(logWarningMock).toHaveBeenCalledTimes(0)
+ expect(failedMock).toHaveBeenCalledTimes(0)
+ expect(getCompressionMock).toHaveBeenCalledTimes(1)
+})
+
+test('save with server error outputs warning', async () => {
+ const logWarningMock = jest.spyOn(cacheUtils, 'logWarning')
+ const failedMock = jest.spyOn(core, 'setFailed')
+
+ const inputPath = 'node_modules'
+ const primaryKey = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43'
+ const cachePaths = [path.resolve(inputPath)]
+
+ const cacheId = 4
+ const reserveCacheMock = jest
+ .spyOn(cacheHttpClient, 'reserveCache')
+ .mockImplementation(async () => {
+ return cacheId
+ })
+
+ const createTarMock = jest.spyOn(tar, 'createTar')
+
+ const saveCacheMock = jest
+ .spyOn(cacheHttpClient, 'saveCache')
+ .mockImplementationOnce(async () => {
+ throw new Error('HTTP Error Occurred')
+ })
+ const compression = CompressionMethod.Zstd
+ const getCompressionMock = jest
+ .spyOn(cacheUtils, 'getCompressionMethod')
+ .mockReturnValue(Promise.resolve(compression))
+
+ await saveCache(inputPath, primaryKey)
+
+ expect(reserveCacheMock).toHaveBeenCalledTimes(1)
+ expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey, inputPath, {
+ compressionMethod: compression
+ })
+
+ const archiveFolder = '/foo/bar'
+ const archiveFile = path.join(archiveFolder, CacheFilename.Zstd)
+
+ expect(createTarMock).toHaveBeenCalledTimes(1)
+ expect(createTarMock).toHaveBeenCalledWith(
+ archiveFolder,
+ cachePaths,
+ compression
+ )
+
+ expect(saveCacheMock).toHaveBeenCalledTimes(1)
+ expect(saveCacheMock).toHaveBeenCalledWith(cacheId, archiveFile)
+
+ expect(logWarningMock).toHaveBeenCalledTimes(1)
+ expect(logWarningMock).toHaveBeenCalledWith('HTTP Error Occurred')
+
+ expect(failedMock).toHaveBeenCalledTimes(0)
+ expect(getCompressionMock).toHaveBeenCalledTimes(1)
+})
+
+test('save with valid inputs uploads a cache', async () => {
+ const failedMock = jest.spyOn(core, 'setFailed')
+
+ const inputPath = 'node_modules'
+ const primaryKey = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43'
+ const cachePaths = [path.resolve(inputPath)]
+
+ const cacheId = 4
+ const reserveCacheMock = jest
+ .spyOn(cacheHttpClient, 'reserveCache')
+ .mockImplementation(async () => {
+ return cacheId
+ })
+ const createTarMock = jest.spyOn(tar, 'createTar')
+
+ const saveCacheMock = jest.spyOn(cacheHttpClient, 'saveCache')
+ const compression = CompressionMethod.Zstd
+ const getCompressionMock = jest
+ .spyOn(cacheUtils, 'getCompressionMethod')
+ .mockReturnValue(Promise.resolve(compression))
+
+ await saveCache(inputPath, primaryKey)
+
+ expect(reserveCacheMock).toHaveBeenCalledTimes(1)
+ expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey, inputPath, {
+ compressionMethod: compression
+ })
+
+ const archiveFolder = '/foo/bar'
+ const archiveFile = path.join(archiveFolder, CacheFilename.Zstd)
+
+ expect(createTarMock).toHaveBeenCalledTimes(1)
+ expect(createTarMock).toHaveBeenCalledWith(
+ archiveFolder,
+ cachePaths,
+ compression
+ )
+
+ expect(saveCacheMock).toHaveBeenCalledTimes(1)
+ expect(saveCacheMock).toHaveBeenCalledWith(cacheId, archiveFile)
+
+ expect(failedMock).toHaveBeenCalledTimes(0)
+ expect(getCompressionMock).toHaveBeenCalledTimes(1)
+})
diff --git a/packages/cache/__tests__/tar.test.ts b/packages/cache/__tests__/tar.test.ts
new file mode 100644
index 00000000..0aa6c784
--- /dev/null
+++ b/packages/cache/__tests__/tar.test.ts
@@ -0,0 +1,191 @@
+import * as exec from '@actions/exec'
+import * as io from '@actions/io'
+import * as path from 'path'
+import {CacheFilename, CompressionMethod} from '../src/internal/constants'
+import * as tar from '../src/internal/tar'
+import * as utils from '../src/internal/cacheUtils'
+// eslint-disable-next-line @typescript-eslint/no-require-imports
+import fs = require('fs')
+
+jest.mock('@actions/exec')
+jest.mock('@actions/io')
+
+const IS_WINDOWS = process.platform === 'win32'
+
+function getTempDir(): string {
+ return path.join(__dirname, '_temp', 'tar')
+}
+
+beforeAll(async () => {
+ jest.spyOn(io, 'which').mockImplementation(async tool => {
+ return tool
+ })
+
+ process.env['GITHUB_WORKSPACE'] = process.cwd()
+ await jest.requireActual('@actions/io').rmRF(getTempDir())
+})
+
+afterAll(async () => {
+ delete process.env['GITHUB_WORKSPACE']
+ await jest.requireActual('@actions/io').rmRF(getTempDir())
+})
+
+test('zstd extract tar', async () => {
+ const mkdirMock = jest.spyOn(io, 'mkdirP')
+ const execMock = jest.spyOn(exec, 'exec')
+
+ const archivePath = IS_WINDOWS
+ ? `${process.env['windir']}\\fakepath\\cache.tar`
+ : 'cache.tar'
+ const workspace = process.env['GITHUB_WORKSPACE']
+
+ await tar.extractTar(archivePath, CompressionMethod.Zstd)
+
+ expect(mkdirMock).toHaveBeenCalledWith(workspace)
+ const tarPath = IS_WINDOWS
+ ? `${process.env['windir']}\\System32\\tar.exe`
+ : 'tar'
+ expect(execMock).toHaveBeenCalledTimes(1)
+ expect(execMock).toHaveBeenCalledWith(
+ `"${tarPath}"`,
+ [
+ '--use-compress-program',
+ 'zstd -d --long=30',
+ '-xf',
+ IS_WINDOWS ? archivePath.replace(/\\/g, '/') : archivePath,
+ '-P',
+ '-C',
+ IS_WINDOWS ? workspace?.replace(/\\/g, '/') : workspace
+ ],
+ {cwd: undefined}
+ )
+})
+
+test('gzip extract tar', async () => {
+ const mkdirMock = jest.spyOn(io, 'mkdirP')
+ const execMock = jest.spyOn(exec, 'exec')
+ const archivePath = IS_WINDOWS
+ ? `${process.env['windir']}\\fakepath\\cache.tar`
+ : 'cache.tar'
+ const workspace = process.env['GITHUB_WORKSPACE']
+
+ await tar.extractTar(archivePath, CompressionMethod.Gzip)
+
+ expect(mkdirMock).toHaveBeenCalledWith(workspace)
+ const tarPath = IS_WINDOWS
+ ? `${process.env['windir']}\\System32\\tar.exe`
+ : 'tar'
+ expect(execMock).toHaveBeenCalledTimes(1)
+ expect(execMock).toHaveBeenCalledWith(
+ `"${tarPath}"`,
+ [
+ '-z',
+ '-xf',
+ IS_WINDOWS ? archivePath.replace(/\\/g, '/') : archivePath,
+ '-P',
+ '-C',
+ IS_WINDOWS ? workspace?.replace(/\\/g, '/') : workspace
+ ],
+ {cwd: undefined}
+ )
+})
+
+test('gzip extract GNU tar on windows', async () => {
+ if (IS_WINDOWS) {
+ jest.spyOn(fs, 'existsSync').mockReturnValueOnce(false)
+
+ const isGnuMock = jest
+ .spyOn(utils, 'useGnuTar')
+ .mockReturnValue(Promise.resolve(true))
+ const execMock = jest.spyOn(exec, 'exec')
+ const archivePath = `${process.env['windir']}\\fakepath\\cache.tar`
+ const workspace = process.env['GITHUB_WORKSPACE']
+
+ await tar.extractTar(archivePath, CompressionMethod.Gzip)
+
+ expect(isGnuMock).toHaveBeenCalledTimes(1)
+ expect(execMock).toHaveBeenCalledTimes(1)
+ expect(execMock).toHaveBeenCalledWith(
+ `"tar"`,
+ [
+ '-z',
+ '-xf',
+ archivePath.replace(/\\/g, '/'),
+ '-P',
+ '-C',
+ workspace?.replace(/\\/g, '/'),
+ '--force-local'
+ ],
+ {cwd: undefined}
+ )
+ }
+})
+
+test('zstd create tar', async () => {
+ const execMock = jest.spyOn(exec, 'exec')
+
+ const archiveFolder = getTempDir()
+ const workspace = process.env['GITHUB_WORKSPACE']
+ const sourceDirectories = ['~/.npm/cache', `${workspace}/dist`]
+
+ await fs.promises.mkdir(archiveFolder, {recursive: true})
+
+ await tar.createTar(archiveFolder, sourceDirectories, CompressionMethod.Zstd)
+
+ const tarPath = IS_WINDOWS
+ ? `${process.env['windir']}\\System32\\tar.exe`
+ : 'tar'
+
+ expect(execMock).toHaveBeenCalledTimes(1)
+ expect(execMock).toHaveBeenCalledWith(
+ `"${tarPath}"`,
+ [
+ '--use-compress-program',
+ 'zstd -T0 --long=30',
+ '-cf',
+ IS_WINDOWS ? CacheFilename.Zstd.replace(/\\/g, '/') : CacheFilename.Zstd,
+ '-P',
+ '-C',
+ IS_WINDOWS ? workspace?.replace(/\\/g, '/') : workspace,
+ '--files-from',
+ 'manifest.txt'
+ ],
+ {
+ cwd: archiveFolder
+ }
+ )
+})
+
+test('gzip create tar', async () => {
+ const execMock = jest.spyOn(exec, 'exec')
+
+ const archiveFolder = getTempDir()
+ const workspace = process.env['GITHUB_WORKSPACE']
+ const sourceDirectories = ['~/.npm/cache', `${workspace}/dist`]
+
+ await fs.promises.mkdir(archiveFolder, {recursive: true})
+
+ await tar.createTar(archiveFolder, sourceDirectories, CompressionMethod.Gzip)
+
+ const tarPath = IS_WINDOWS
+ ? `${process.env['windir']}\\System32\\tar.exe`
+ : 'tar'
+
+ expect(execMock).toHaveBeenCalledTimes(1)
+ expect(execMock).toHaveBeenCalledWith(
+ `"${tarPath}"`,
+ [
+ '-z',
+ '-cf',
+ IS_WINDOWS ? CacheFilename.Gzip.replace(/\\/g, '/') : CacheFilename.Gzip,
+ '-P',
+ '-C',
+ IS_WINDOWS ? workspace?.replace(/\\/g, '/') : workspace,
+ '--files-from',
+ 'manifest.txt'
+ ],
+ {
+ cwd: archiveFolder
+ }
+ )
+})
diff --git a/packages/cache/package-lock.json b/packages/cache/package-lock.json
new file mode 100644
index 00000000..4a263beb
--- /dev/null
+++ b/packages/cache/package-lock.json
@@ -0,0 +1,32 @@
+{
+ "name": "@actions/cache",
+ "version": "0.0.0",
+ "lockfileVersion": 1,
+ "requires": true,
+ "dependencies": {
+ "@actions/http-client": {
+ "version": "1.0.8",
+ "resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-1.0.8.tgz",
+ "integrity": "sha512-G4JjJ6f9Hb3Zvejj+ewLLKLf99ZC+9v+yCxoYf9vSyH+WkzPLB2LuUtRMGNkooMqdugGBFStIKXOuvH1W+EctA==",
+ "requires": {
+ "tunnel": "0.0.6"
+ }
+ },
+ "@types/uuid": {
+ "version": "3.4.9",
+ "resolved": "https://registry.npmjs.org/@types/uuid/-/uuid-3.4.9.tgz",
+ "integrity": "sha512-XDwyIlt/47l2kWLTzw/mtrpLdB+GPSskR2n/PIcPn+VYhVO77rGhRncIR5GPU0KRzXuqkDO+J5qqrG0Y8P6jzQ==",
+ "dev": true
+ },
+ "tunnel": {
+ "version": "0.0.6",
+ "resolved": "https://registry.npmjs.org/tunnel/-/tunnel-0.0.6.tgz",
+ "integrity": "sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg=="
+ },
+ "uuid": {
+ "version": "3.4.0",
+ "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz",
+ "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A=="
+ }
+ }
+}
diff --git a/packages/cache/package.json b/packages/cache/package.json
new file mode 100644
index 00000000..1454048c
--- /dev/null
+++ b/packages/cache/package.json
@@ -0,0 +1,49 @@
+{
+ "name": "@actions/cache",
+ "version": "0.0.0",
+ "preview": true,
+ "description": "Actions artifact cache lib",
+ "keywords": [
+ "github",
+ "actions",
+ "cache"
+ ],
+ "homepage": "https://github.com/actions/toolkit/tree/master/packages/cache",
+ "license": "MIT",
+ "main": "lib/cache.js",
+ "types": "lib/cache.d.ts",
+ "directories": {
+ "lib": "lib",
+ "test": "__tests__"
+ },
+ "files": [
+ "lib"
+ ],
+ "publishConfig": {
+ "access": "public"
+ },
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/actions/toolkit.git",
+ "directory": "packages/cache"
+ },
+ "scripts": {
+ "audit-moderate": "npm install && npm audit --audit-level=moderate",
+ "test": "echo \"Error: run tests from root\" && exit 1",
+ "tsc": "tsc"
+ },
+ "bugs": {
+ "url": "https://github.com/actions/toolkit/issues"
+ },
+ "dependencies": {
+ "@actions/core": "^1.2.4",
+ "@actions/exec": "^1.0.1",
+ "@actions/glob": "^0.1.0",
+ "@actions/http-client": "^1.0.8",
+ "@actions/io": "^1.0.1",
+ "uuid": "^3.3.3"
+ },
+ "devDependencies": {
+ "@types/uuid": "^3.4.5"
+ }
+}
diff --git a/packages/cache/src/cache.ts b/packages/cache/src/cache.ts
new file mode 100644
index 00000000..4aab741e
--- /dev/null
+++ b/packages/cache/src/cache.ts
@@ -0,0 +1,169 @@
+import * as core from '@actions/core'
+import * as pathUtils from 'path'
+import * as utils from './internal/cacheUtils'
+import * as cacheHttpClient from './internal/cacheHttpClient'
+import {createTar, extractTar} from './internal/tar'
+
+/**
+ * Restores cache from keys
+ *
+ * @param path a string representing files that were cached
+ * @param primaryKey an explicit key for restoring the cache
+ * @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for key
+ * @returns string returns the key for the cache hit, otherwise return undefined
+ */
+export async function restoreCache(
+ path: string,
+ primaryKey: string,
+ restoreKeys?: string[]
+): Promise {
+ try {
+ if (!path || path.length === 0) {
+ throw new Error('Input required and not supplied: path')
+ }
+
+ restoreKeys = restoreKeys || []
+ const keys = [primaryKey, ...restoreKeys]
+
+ core.debug('Resolved Keys:')
+ core.debug(JSON.stringify(keys))
+
+ if (keys.length > 10) {
+ core.setFailed(
+ `Key Validation Error: Keys are limited to a maximum of 10.`
+ )
+ return undefined
+ }
+ for (const key of keys) {
+ if (key.length > 512) {
+ core.setFailed(
+ `Key Validation Error: ${key} cannot be larger than 512 characters.`
+ )
+ return undefined
+ }
+ const regex = /^[^,]*$/
+ if (!regex.test(key)) {
+ core.setFailed(`Key Validation Error: ${key} cannot contain commas.`)
+ return undefined
+ }
+ }
+
+ const compressionMethod = await utils.getCompressionMethod()
+
+ try {
+ // path are needed to compute version
+ const cacheEntry = await cacheHttpClient.getCacheEntry(keys, path, {
+ compressionMethod
+ })
+ if (!cacheEntry?.archiveLocation) {
+ core.info(`Cache not found for input keys: ${keys.join(', ')}`)
+ return undefined
+ }
+
+ const archivePath = pathUtils.join(
+ await utils.createTempDirectory(),
+ utils.getCacheFileName(compressionMethod)
+ )
+ core.debug(`Archive Path: ${archivePath}`)
+
+ try {
+ // Download the cache from the cache entry
+ await cacheHttpClient.downloadCache(
+ cacheEntry.archiveLocation,
+ archivePath
+ )
+
+ const archiveFileSize = utils.getArchiveFileSize(archivePath)
+ core.info(
+ `Cache Size: ~${Math.round(
+ archiveFileSize / (1024 * 1024)
+ )} MB (${archiveFileSize} B)`
+ )
+
+ await extractTar(archivePath, compressionMethod)
+ } finally {
+ // Try to delete the archive to save space
+ try {
+ await utils.unlinkFile(archivePath)
+ } catch (error) {
+ core.debug(`Failed to delete archive: ${error}`)
+ }
+ }
+
+ core.info(`Cache restored from key: ${cacheEntry && cacheEntry.cacheKey}`)
+
+ return cacheEntry.cacheKey
+ } catch (error) {
+ utils.logWarning(error.message)
+ return undefined
+ }
+ } catch (error) {
+ core.setFailed(error.message)
+ return undefined
+ }
+}
+
+/**
+ * Saves a file with the specified key
+ *
+ * @param path a string representing files to be cached
+ * @param key an explicit key for restoring the cache
+ * @returns number returns cacheId if the cache was saved successfully, otherwise return -1
+ */
+export async function saveCache(path: string, key: string): Promise {
+ try {
+ if (!path || path.length === 0) {
+ throw new Error('Input required and not supplied: path')
+ }
+
+ const compressionMethod = await utils.getCompressionMethod()
+
+ core.debug('Reserving Cache')
+ const cacheId = await cacheHttpClient.reserveCache(key, path, {
+ compressionMethod
+ })
+ if (cacheId === -1) {
+ core.info(
+ `Unable to reserve cache with key ${key}, another job may be creating this cache.`
+ )
+ return -1
+ }
+ core.debug(`Cache ID: ${cacheId}`)
+ const cachePaths = await utils.resolvePaths(
+ path.split('\n').filter(x => x !== '')
+ )
+
+ core.debug('Cache Paths:')
+ core.debug(`${JSON.stringify(cachePaths)}`)
+
+ const archiveFolder = await utils.createTempDirectory()
+ const archivePath = pathUtils.join(
+ archiveFolder,
+ utils.getCacheFileName(compressionMethod)
+ )
+
+ core.debug(`Archive Path: ${archivePath}`)
+
+ await createTar(archiveFolder, cachePaths, compressionMethod)
+
+ const fileSizeLimit = 5 * 1024 * 1024 * 1024 // 5GB per repo limit
+ const archiveFileSize = utils.getArchiveFileSize(archivePath)
+ core.debug(`File Size: ${archiveFileSize}`)
+ if (archiveFileSize > fileSizeLimit) {
+ utils.logWarning(
+ `Cache size of ~${Math.round(
+ archiveFileSize / (1024 * 1024)
+ )} MB (${archiveFileSize} B) is over the 5GB limit, not saving cache.`
+ )
+ return -1
+ }
+
+ core.debug(`Saving Cache (ID: ${cacheId})`)
+ await cacheHttpClient.saveCache(cacheId, archivePath)
+
+ return cacheId
+ } catch (error) {
+ utils.logWarning(error.message)
+ return -1
+ }
+}
diff --git a/packages/cache/src/internal/cacheHttpClient.ts b/packages/cache/src/internal/cacheHttpClient.ts
new file mode 100644
index 00000000..92e9498a
--- /dev/null
+++ b/packages/cache/src/internal/cacheHttpClient.ts
@@ -0,0 +1,339 @@
+import * as core from '@actions/core'
+import {HttpClient, HttpCodes} from '@actions/http-client'
+import {BearerCredentialHandler} from '@actions/http-client/auth'
+import {
+ IHttpClientResponse,
+ IRequestOptions,
+ ITypedResponse
+} from '@actions/http-client/interfaces'
+import * as crypto from 'crypto'
+import * as fs from 'fs'
+import * as stream from 'stream'
+import * as util from 'util'
+
+import * as utils from './cacheUtils'
+import {CompressionMethod, SocketTimeout} from './constants'
+import {
+ ArtifactCacheEntry,
+ CacheOptions,
+ CommitCacheRequest,
+ ReserveCacheRequest,
+ ReserveCacheResponse
+} from './contracts'
+
+const versionSalt = '1.0'
+
+function isSuccessStatusCode(statusCode?: number): boolean {
+ if (!statusCode) {
+ return false
+ }
+ return statusCode >= 200 && statusCode < 300
+}
+
+function isRetryableStatusCode(statusCode?: number): boolean {
+ if (!statusCode) {
+ return false
+ }
+ const retryableStatusCodes = [
+ HttpCodes.BadGateway,
+ HttpCodes.ServiceUnavailable,
+ HttpCodes.GatewayTimeout
+ ]
+ return retryableStatusCodes.includes(statusCode)
+}
+
+function getCacheApiUrl(resource: string): string {
+ // Ideally we just use ACTIONS_CACHE_URL
+ const baseUrl: string = (
+ process.env['ACTIONS_CACHE_URL'] ||
+ process.env['ACTIONS_RUNTIME_URL'] ||
+ ''
+ ).replace('pipelines', 'artifactcache')
+ if (!baseUrl) {
+ throw new Error('Cache Service Url not found, unable to restore cache.')
+ }
+
+ const url = `${baseUrl}_apis/artifactcache/${resource}`
+ core.debug(`Resource Url: ${url}`)
+ return url
+}
+
+function createAcceptHeader(type: string, apiVersion: string): string {
+ return `${type};api-version=${apiVersion}`
+}
+
+function getRequestOptions(): IRequestOptions {
+ const requestOptions: IRequestOptions = {
+ headers: {
+ Accept: createAcceptHeader('application/json', '6.0-preview.1')
+ }
+ }
+
+ return requestOptions
+}
+
+function createHttpClient(): HttpClient {
+ const token = process.env['ACTIONS_RUNTIME_TOKEN'] || ''
+ const bearerCredentialHandler = new BearerCredentialHandler(token)
+
+ return new HttpClient(
+ 'actions/cache',
+ [bearerCredentialHandler],
+ getRequestOptions()
+ )
+}
+
+export function getCacheVersion(
+ inputPath: string,
+ compressionMethod?: CompressionMethod
+): string {
+ const components = [inputPath].concat(
+ compressionMethod === CompressionMethod.Zstd ? [compressionMethod] : []
+ )
+
+ // Add salt to cache version to support breaking changes in cache entry
+ components.push(versionSalt)
+
+ return crypto
+ .createHash('sha256')
+ .update(components.join('|'))
+ .digest('hex')
+}
+
+export async function getCacheEntry(
+ keys: string[],
+ inputPath: string,
+ options?: CacheOptions
+): Promise {
+ const httpClient = createHttpClient()
+ const version = getCacheVersion(inputPath, options?.compressionMethod)
+ const resource = `cache?keys=${encodeURIComponent(
+ keys.join(',')
+ )}&version=${version}`
+
+ const response = await httpClient.getJson(
+ getCacheApiUrl(resource)
+ )
+ if (response.statusCode === 204) {
+ return null
+ }
+ if (!isSuccessStatusCode(response.statusCode)) {
+ throw new Error(`Cache service responded with ${response.statusCode}`)
+ }
+
+ const cacheResult = response.result
+ const cacheDownloadUrl = cacheResult?.archiveLocation
+ if (!cacheDownloadUrl) {
+ throw new Error('Cache not found.')
+ }
+ core.setSecret(cacheDownloadUrl)
+ core.debug(`Cache Result:`)
+ core.debug(JSON.stringify(cacheResult))
+
+ return cacheResult
+}
+
+async function pipeResponseToStream(
+ response: IHttpClientResponse,
+ output: NodeJS.WritableStream
+): Promise {
+ const pipeline = util.promisify(stream.pipeline)
+ await pipeline(response.message, output)
+}
+
+export async function downloadCache(
+ archiveLocation: string,
+ archivePath: string
+): Promise {
+ const writableStream = fs.createWriteStream(archivePath)
+ const httpClient = new HttpClient('actions/cache')
+ const downloadResponse = await httpClient.get(archiveLocation)
+
+ // Abort download if no traffic received over the socket.
+ downloadResponse.message.socket.setTimeout(SocketTimeout, () => {
+ downloadResponse.message.destroy()
+ core.debug(`Aborting download, socket timed out after ${SocketTimeout} ms`)
+ })
+
+ await pipeResponseToStream(downloadResponse, writableStream)
+
+ // Validate download size.
+ const contentLengthHeader = downloadResponse.message.headers['content-length']
+
+ if (contentLengthHeader) {
+ const expectedLength = parseInt(contentLengthHeader)
+ const actualLength = utils.getArchiveFileSize(archivePath)
+
+ if (actualLength !== expectedLength) {
+ throw new Error(
+ `Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}`
+ )
+ }
+ } else {
+ core.debug('Unable to validate download, no Content-Length header')
+ }
+}
+
+// Reserve Cache
+export async function reserveCache(
+ key: string,
+ inputPath: string,
+ options?: CacheOptions
+): Promise {
+ const httpClient = createHttpClient()
+ const version = getCacheVersion(inputPath, options?.compressionMethod)
+
+ const reserveCacheRequest: ReserveCacheRequest = {
+ key,
+ version
+ }
+ const response = await httpClient.postJson(
+ getCacheApiUrl('caches'),
+ reserveCacheRequest
+ )
+ return response?.result?.cacheId ?? -1
+}
+
+function getContentRange(start: number, end: number): string {
+ // Format: `bytes start-end/filesize
+ // start and end are inclusive
+ // filesize can be *
+ // For a 200 byte chunk starting at byte 0:
+ // Content-Range: bytes 0-199/*
+ return `bytes ${start}-${end}/*`
+}
+
+async function uploadChunk(
+ httpClient: HttpClient,
+ resourceUrl: string,
+ data: NodeJS.ReadableStream,
+ start: number,
+ end: number
+): Promise {
+ core.debug(
+ `Uploading chunk of size ${end -
+ start +
+ 1} bytes at offset ${start} with content range: ${getContentRange(
+ start,
+ end
+ )}`
+ )
+ const additionalHeaders = {
+ 'Content-Type': 'application/octet-stream',
+ 'Content-Range': getContentRange(start, end)
+ }
+
+ const uploadChunkRequest = async (): Promise => {
+ return await httpClient.sendStream(
+ 'PATCH',
+ resourceUrl,
+ data,
+ additionalHeaders
+ )
+ }
+
+ const response = await uploadChunkRequest()
+ if (isSuccessStatusCode(response.message.statusCode)) {
+ return
+ }
+
+ if (isRetryableStatusCode(response.message.statusCode)) {
+ core.debug(
+ `Received ${response.message.statusCode}, retrying chunk at offset ${start}.`
+ )
+ const retryResponse = await uploadChunkRequest()
+ if (isSuccessStatusCode(retryResponse.message.statusCode)) {
+ return
+ }
+ }
+
+ throw new Error(
+ `Cache service responded with ${response.message.statusCode} during chunk upload.`
+ )
+}
+
+function parseEnvNumber(key: string): number | undefined {
+ const value = Number(process.env[key])
+ if (Number.isNaN(value) || value < 0) {
+ return undefined
+ }
+ return value
+}
+
+async function uploadFile(
+ httpClient: HttpClient,
+ cacheId: number,
+ archivePath: string
+): Promise {
+ // Upload Chunks
+ const fileSize = fs.statSync(archivePath).size
+ const resourceUrl = getCacheApiUrl(`caches/${cacheId.toString()}`)
+ const fd = fs.openSync(archivePath, 'r')
+
+ const concurrency = parseEnvNumber('CACHE_UPLOAD_CONCURRENCY') ?? 4 // # of HTTP requests in parallel
+ const MAX_CHUNK_SIZE =
+ parseEnvNumber('CACHE_UPLOAD_CHUNK_SIZE') ?? 32 * 1024 * 1024 // 32 MB Chunks
+ core.debug(`Concurrency: ${concurrency} and Chunk Size: ${MAX_CHUNK_SIZE}`)
+
+ const parallelUploads = [...new Array(concurrency).keys()]
+ core.debug('Awaiting all uploads')
+ let offset = 0
+
+ try {
+ await Promise.all(
+ parallelUploads.map(async () => {
+ while (offset < fileSize) {
+ const chunkSize = Math.min(fileSize - offset, MAX_CHUNK_SIZE)
+ const start = offset
+ const end = offset + chunkSize - 1
+ offset += MAX_CHUNK_SIZE
+ const chunk = fs.createReadStream(archivePath, {
+ fd,
+ start,
+ end,
+ autoClose: false
+ })
+
+ await uploadChunk(httpClient, resourceUrl, chunk, start, end)
+ }
+ })
+ )
+ } finally {
+ fs.closeSync(fd)
+ }
+ return
+}
+
+async function commitCache(
+ httpClient: HttpClient,
+ cacheId: number,
+ filesize: number
+): Promise> {
+ const commitCacheRequest: CommitCacheRequest = {size: filesize}
+ return await httpClient.postJson(
+ getCacheApiUrl(`caches/${cacheId.toString()}`),
+ commitCacheRequest
+ )
+}
+
+export async function saveCache(
+ cacheId: number,
+ archivePath: string
+): Promise {
+ const httpClient = createHttpClient()
+
+ core.debug('Upload cache')
+ await uploadFile(httpClient, cacheId, archivePath)
+
+ // Commit Cache
+ core.debug('Commiting cache')
+ const cacheSize = utils.getArchiveFileSize(archivePath)
+ const commitCacheResponse = await commitCache(httpClient, cacheId, cacheSize)
+ if (!isSuccessStatusCode(commitCacheResponse.statusCode)) {
+ throw new Error(
+ `Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`
+ )
+ }
+
+ core.info('Cache saved successfully')
+}
diff --git a/packages/cache/src/internal/cacheUtils.ts b/packages/cache/src/internal/cacheUtils.ts
new file mode 100644
index 00000000..8cce071f
--- /dev/null
+++ b/packages/cache/src/internal/cacheUtils.ts
@@ -0,0 +1,104 @@
+import * as core from '@actions/core'
+import * as exec from '@actions/exec'
+import * as glob from '@actions/glob'
+import * as io from '@actions/io'
+import * as fs from 'fs'
+import * as path from 'path'
+import * as util from 'util'
+import {v4 as uuidV4} from 'uuid'
+import {CacheFilename, CompressionMethod} from './constants'
+
+// From https://github.com/actions/toolkit/blob/master/packages/tool-cache/src/tool-cache.ts#L23
+export async function createTempDirectory(): Promise {
+ const IS_WINDOWS = process.platform === 'win32'
+
+ let tempDirectory: string = process.env['RUNNER_TEMP'] || ''
+
+ if (!tempDirectory) {
+ let baseLocation: string
+ if (IS_WINDOWS) {
+ // On Windows use the USERPROFILE env variable
+ baseLocation = process.env['USERPROFILE'] || 'C:\\'
+ } else {
+ if (process.platform === 'darwin') {
+ baseLocation = '/Users'
+ } else {
+ baseLocation = '/home'
+ }
+ }
+ tempDirectory = path.join(baseLocation, 'actions', 'temp')
+ }
+
+ const dest = path.join(tempDirectory, uuidV4())
+ await io.mkdirP(dest)
+ return dest
+}
+
+export function getArchiveFileSize(filePath: string): number {
+ return fs.statSync(filePath).size
+}
+
+export function logWarning(message: string): void {
+ const warningPrefix = '[warning]'
+ core.info(`${warningPrefix}${message}`)
+}
+
+export async function resolvePaths(patterns: string[]): Promise {
+ const paths: string[] = []
+ const workspace = process.env['GITHUB_WORKSPACE'] ?? process.cwd()
+ const globber = await glob.create(patterns.join('\n'), {
+ implicitDescendants: false
+ })
+
+ for await (const file of globber.globGenerator()) {
+ const relativeFile = path.relative(workspace, file)
+ core.debug(`Matched: ${relativeFile}`)
+ // Paths are made relative so the tar entries are all relative to the root of the workspace.
+ paths.push(`${relativeFile}`)
+ }
+
+ return paths
+}
+
+export async function unlinkFile(filePath: fs.PathLike): Promise {
+ return util.promisify(fs.unlink)(filePath)
+}
+
+async function getVersion(app: string): Promise {
+ core.debug(`Checking ${app} --version`)
+ let versionOutput = ''
+ try {
+ await exec.exec(`${app} --version`, [], {
+ ignoreReturnCode: true,
+ silent: true,
+ listeners: {
+ stdout: (data: Buffer): string => (versionOutput += data.toString()),
+ stderr: (data: Buffer): string => (versionOutput += data.toString())
+ }
+ })
+ } catch (err) {
+ core.debug(err.message)
+ }
+
+ versionOutput = versionOutput.trim()
+ core.debug(versionOutput)
+ return versionOutput
+}
+
+export async function getCompressionMethod(): Promise {
+ const versionOutput = await getVersion('zstd')
+ return versionOutput.toLowerCase().includes('zstd command line interface')
+ ? CompressionMethod.Zstd
+ : CompressionMethod.Gzip
+}
+
+export function getCacheFileName(compressionMethod: CompressionMethod): string {
+ return compressionMethod === CompressionMethod.Zstd
+ ? CacheFilename.Zstd
+ : CacheFilename.Gzip
+}
+
+export async function useGnuTar(): Promise {
+ const versionOutput = await getVersion('tar')
+ return versionOutput.toLowerCase().includes('gnu tar')
+}
diff --git a/packages/cache/src/internal/constants.ts b/packages/cache/src/internal/constants.ts
new file mode 100644
index 00000000..b3d2a577
--- /dev/null
+++ b/packages/cache/src/internal/constants.ts
@@ -0,0 +1,14 @@
+export enum CacheFilename {
+ Gzip = 'cache.tgz',
+ Zstd = 'cache.tzst'
+}
+
+export enum CompressionMethod {
+ Gzip = 'gzip',
+ Zstd = 'zstd'
+}
+
+// Socket timeout in milliseconds during download. If no traffic is received
+// over the socket during this period, the socket is destroyed and the download
+// is aborted.
+export const SocketTimeout = 5000
diff --git a/packages/cache/src/internal/contracts.d.ts b/packages/cache/src/internal/contracts.d.ts
new file mode 100644
index 00000000..ca3f3620
--- /dev/null
+++ b/packages/cache/src/internal/contracts.d.ts
@@ -0,0 +1,25 @@
+import {CompressionMethod} from './constants'
+
+export interface ArtifactCacheEntry {
+ cacheKey?: string
+ scope?: string
+ creationTime?: string
+ archiveLocation?: string
+}
+
+export interface CommitCacheRequest {
+ size: number
+}
+
+export interface ReserveCacheRequest {
+ key: string
+ version?: string
+}
+
+export interface ReserveCacheResponse {
+ cacheId: number
+}
+
+export interface CacheOptions {
+ compressionMethod?: CompressionMethod
+}
diff --git a/packages/cache/src/internal/tar.ts b/packages/cache/src/internal/tar.ts
new file mode 100644
index 00000000..221c7c70
--- /dev/null
+++ b/packages/cache/src/internal/tar.ts
@@ -0,0 +1,86 @@
+import {exec} from '@actions/exec'
+import * as io from '@actions/io'
+import {existsSync, writeFileSync} from 'fs'
+import * as path from 'path'
+import * as utils from './cacheUtils'
+import {CompressionMethod} from './constants'
+
+async function getTarPath(args: string[]): Promise {
+ // Explicitly use BSD Tar on Windows
+ const IS_WINDOWS = process.platform === 'win32'
+ if (IS_WINDOWS) {
+ const systemTar = `${process.env['windir']}\\System32\\tar.exe`
+ if (existsSync(systemTar)) {
+ return systemTar
+ } else if (await utils.useGnuTar()) {
+ args.push('--force-local')
+ }
+ }
+ return await io.which('tar', true)
+}
+
+async function execTar(args: string[], cwd?: string): Promise {
+ try {
+ await exec(`"${await getTarPath(args)}"`, args, {cwd})
+ } catch (error) {
+ throw new Error(`Tar failed with error: ${error?.message}`)
+ }
+}
+
+function getWorkingDirectory(): string {
+ return process.env['GITHUB_WORKSPACE'] ?? process.cwd()
+}
+
+export async function extractTar(
+ archivePath: string,
+ compressionMethod: CompressionMethod
+): Promise {
+ // Create directory to extract tar into
+ const workingDirectory = getWorkingDirectory()
+ await io.mkdirP(workingDirectory)
+ // --d: Decompress.
+ // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
+ // Using 30 here because we also support 32-bit self-hosted runners.
+ const args = [
+ ...(compressionMethod === CompressionMethod.Zstd
+ ? ['--use-compress-program', 'zstd -d --long=30']
+ : ['-z']),
+ '-xf',
+ archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
+ '-P',
+ '-C',
+ workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/')
+ ]
+ await execTar(args)
+}
+
+export async function createTar(
+ archiveFolder: string,
+ sourceDirectories: string[],
+ compressionMethod: CompressionMethod
+): Promise {
+ // Write source directories to manifest.txt to avoid command length limits
+ const manifestFilename = 'manifest.txt'
+ const cacheFileName = utils.getCacheFileName(compressionMethod)
+ writeFileSync(
+ path.join(archiveFolder, manifestFilename),
+ sourceDirectories.join('\n')
+ )
+ // -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores.
+ // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
+ // Using 30 here because we also support 32-bit self-hosted runners.
+ const workingDirectory = getWorkingDirectory()
+ const args = [
+ ...(compressionMethod === CompressionMethod.Zstd
+ ? ['--use-compress-program', 'zstd -T0 --long=30']
+ : ['-z']),
+ '-cf',
+ cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
+ '-P',
+ '-C',
+ workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
+ '--files-from',
+ manifestFilename
+ ]
+ await execTar(args, archiveFolder)
+}
diff --git a/packages/cache/tsconfig.json b/packages/cache/tsconfig.json
new file mode 100644
index 00000000..a8b812a6
--- /dev/null
+++ b/packages/cache/tsconfig.json
@@ -0,0 +1,11 @@
+{
+ "extends": "../../tsconfig.json",
+ "compilerOptions": {
+ "baseUrl": "./",
+ "outDir": "./lib",
+ "rootDir": "./src"
+ },
+ "include": [
+ "./src"
+ ]
+}
\ No newline at end of file