1
0
Fork 0

Merge pull request #448 from actions/users/aiyan/cache-package

Initial commit to create @actions/cache package
pull/382/head^2
Aiqiao Yan 2020-05-15 13:41:32 -04:00 committed by GitHub
commit a67b91ea15
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
26 changed files with 2025 additions and 4 deletions

View File

@ -72,8 +72,8 @@ jobs:
- name: Verify downloadArtifact() - name: Verify downloadArtifact()
shell: bash shell: bash
run: | run: |
scripts/test-artifact-file.sh "artifact-1-directory/artifact-path/world.txt" "${{ env.non-gzip-artifact-content }}" packages/artifact/__tests__/test-artifact-file.sh "artifact-1-directory/artifact-path/world.txt" "${{ env.non-gzip-artifact-content }}"
scripts/test-artifact-file.sh "artifact-2-directory/artifact-path/gzip.txt" "${{ env.gzip-artifact-content }}" packages/artifact/__tests__/test-artifact-file.sh "artifact-2-directory/artifact-path/gzip.txt" "${{ env.gzip-artifact-content }}"
- name: Download artifacts using downloadAllArtifacts() - name: Download artifacts using downloadAllArtifacts()
run: | run: |
@ -83,5 +83,5 @@ jobs:
- name: Verify downloadAllArtifacts() - name: Verify downloadAllArtifacts()
shell: bash shell: bash
run: | run: |
scripts/test-artifact-file.sh "multi-artifact-directory/my-artifact-1/artifact-path/world.txt" "${{ env.non-gzip-artifact-content }}" packages/artifact/__tests__/test-artifact-file.sh "multi-artifact-directory/my-artifact-1/artifact-path/world.txt" "${{ env.non-gzip-artifact-content }}"
scripts/test-artifact-file.sh "multi-artifact-directory/my-artifact-2/artifact-path/gzip.txt" "${{ env.gzip-artifact-content }}" packages/artifact/__tests__/test-artifact-file.sh "multi-artifact-directory/my-artifact-2/artifact-path/gzip.txt" "${{ env.gzip-artifact-content }}"

69
.github/workflows/cache-tests.yml vendored Normal file
View File

@ -0,0 +1,69 @@
name: cache-unit-tests
on:
push:
branches:
- master
paths-ignore:
- '**.md'
pull_request:
paths-ignore:
- '**.md'
jobs:
build:
name: Build
strategy:
matrix:
runs-on: [ubuntu-latest, windows-latest, macOS-latest]
fail-fast: false
runs-on: ${{ matrix.runs-on }}
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Set Node.js 12.x
uses: actions/setup-node@v1
with:
node-version: 12.x
# In order to save & restore cache from a shell script, certain env variables need to be set that are only available in the
# node context. This runs a local action that gets and sets the necessary env variables that are needed
- name: Set env variables
uses: ./packages/cache/__tests__/__fixtures__/
# Need root node_modules because certain npm packages like jest are configured for the entire repository and it won't be possible
# without these to just compile the cache package
- name: Install root npm packages
run: npm ci
- name: Compile cache package
run: |
npm ci
npm run tsc
working-directory: packages/cache
- name: Generate files in working directory
shell: bash
run: packages/cache/__tests__/create-cache-files.sh ${{ runner.os }} test-cache
- name: Generate files outside working directory
shell: bash
run: packages/cache/__tests__/create-cache-files.sh ${{ runner.os }} ~/test-cache
# We're using node -e to call the functions directly available in the @actions/cache package
- name: Save cache using saveCache()
run: |
node -e "Promise.resolve(require('./packages/cache/lib/cache').saveCache(['test-cache','~/test-cache'],'test-${{ runner.os }}-${{ github.run_id }}'))"
- name: Restore cache using restoreCache()
run: |
node -e "Promise.resolve(require('./packages/cache/lib/cache').restoreCache(['test-cache','~/test-cache'],'test-${{ runner.os }}-${{ github.run_id }}'))"
- name: Verify cache
shell: bash
run: |
packages/cache/__tests__/verify-cache-files.sh ${{ runner.os }} test-cache
packages/cache/__tests__/verify-cache-files.sh ${{ runner.os }} ~/test-cache

View File

@ -59,6 +59,8 @@ $ npm install @actions/io
Provides functions for downloading and caching tools. e.g. setup-* actions. Read more [here](packages/tool-cache) Provides functions for downloading and caching tools. e.g. setup-* actions. Read more [here](packages/tool-cache)
See @actions/cache for caching workflow dependencies.
```bash ```bash
$ npm install @actions/tool-cache $ npm install @actions/tool-cache
``` ```
@ -82,6 +84,15 @@ $ npm install @actions/artifact
``` ```
<br/> <br/>
:dart: [@actions/cache](packages/cache)
Provides functions to cache dependencies and build outputs to improve workflow execution time. Read more [here](packages/cache)
```bash
$ npm install @actions/cache
```
<br/>
## Creating an Action with the Toolkit ## Creating an Action with the Toolkit
:question: [Choosing an action type](docs/action-types.md) :question: [Choosing an action type](docs/action-types.md)

41
packages/cache/README.md vendored Normal file
View File

@ -0,0 +1,41 @@
# `@actions/cache`
> Functions necessary for caching dependencies and build outputs to improve workflow execution time.
See ["Caching dependencies to speed up workflows"](https://help.github.com/github/automating-your-workflow-with-github-actions/caching-dependencies-to-speed-up-workflows) for how caching works.
Note that GitHub will remove any cache entries that have not been accessed in over 7 days. There is no limit on the number of caches you can store, but the total size of all caches in a repository is limited to 5 GB. If you exceed this limit, GitHub will save your cache but will begin evicting caches until the total size is less than 5 GB.
## Usage
#### Restore Cache
Restores a cache based on `key` and `restoreKeys` to the `paths` provided. Function returns the cache key for cache hit and returns undefined if cache not found.
```js
const cache = require('@actions/cache');
const paths = [
'node_modules',
'packages/*/node_modules/'
]
const key = 'npm-foobar-d5ea0750'
const restoreKeys = [
'npm-foobar-',
'npm-'
]
const cacheKey = await cache.restoreCache(paths, key, restoreKeys)
```
#### Save Cache
Saves a cache containing the files in `paths` using the `key` provided. The files would be compressed using zstandard compression algorithm if zstd is installed, otherwise gzip is used. Function returns the cache id if the cache was saved succesfully and throws an error if cache upload fails.
```js
const cache = require('@actions/cache');
const paths = [
'node_modules',
'packages/*/node_modules/'
]
const key = 'npm-foobar-d5ea0750'
const cacheId = await cache.saveCache(paths, key)
```

5
packages/cache/RELEASES.md vendored Normal file
View File

@ -0,0 +1,5 @@
# @actions/cache Releases
### 0.1.0
- Initial release

View File

@ -0,0 +1,5 @@
name: 'Set env variables'
description: 'Sets certain env variables so that e2e restore and save cache can be tested in a shell'
runs:
using: 'node12'
main: 'index.js'

View File

@ -0,0 +1 @@
hello world

View File

@ -0,0 +1,5 @@
// Certain env variables are not set by default in a shell context and are only available in a node context from a running action
// In order to be able to restore and save cache e2e in a shell when running CI tests, we need these env variables set
console.log(`::set-env name=ACTIONS_RUNTIME_URL::${process.env.ACTIONS_RUNTIME_URL}`)
console.log(`::set-env name=ACTIONS_RUNTIME_TOKEN::${process.env.ACTIONS_RUNTIME_TOKEN}`)
console.log(`::set-env name=GITHUB_RUN_ID::${process.env.GITHUB_RUN_ID}`)

View File

@ -0,0 +1,175 @@
import {getCacheVersion, retry} from '../src/internal/cacheHttpClient'
import {CompressionMethod} from '../src/internal/constants'
test('getCacheVersion with one path returns version', async () => {
const paths = ['node_modules']
const result = getCacheVersion(paths)
expect(result).toEqual(
'b3e0c6cb5ecf32614eeb2997d905b9c297046d7cbf69062698f25b14b4cb0985'
)
})
test('getCacheVersion with multiple paths returns version', async () => {
const paths = ['node_modules', 'dist']
const result = getCacheVersion(paths)
expect(result).toEqual(
'165c3053bc646bf0d4fac17b1f5731caca6fe38e0e464715c0c3c6b6318bf436'
)
})
test('getCacheVersion with zstd compression returns version', async () => {
const paths = ['node_modules']
const result = getCacheVersion(paths, CompressionMethod.Zstd)
expect(result).toEqual(
'273877e14fd65d270b87a198edbfa2db5a43de567c9a548d2a2505b408befe24'
)
})
test('getCacheVersion with gzip compression does not change vesion', async () => {
const paths = ['node_modules']
const result = getCacheVersion(paths, CompressionMethod.Gzip)
expect(result).toEqual(
'b3e0c6cb5ecf32614eeb2997d905b9c297046d7cbf69062698f25b14b4cb0985'
)
})
interface TestResponse {
statusCode: number
result: string | null
}
async function handleResponse(
response: TestResponse | undefined
): Promise<TestResponse> {
if (!response) {
// eslint-disable-next-line no-undef
fail('Retry method called too many times')
}
if (response.statusCode === 999) {
throw Error('Test Error')
} else {
return Promise.resolve(response)
}
}
async function testRetryExpectingResult(
responses: TestResponse[],
expectedResult: string | null
): Promise<void> {
responses = responses.reverse() // Reverse responses since we pop from end
const actualResult = await retry(
'test',
async () => handleResponse(responses.pop()),
(response: TestResponse) => response.statusCode
)
expect(actualResult.result).toEqual(expectedResult)
}
async function testRetryExpectingError(
responses: TestResponse[]
): Promise<void> {
responses = responses.reverse() // Reverse responses since we pop from end
expect(
retry(
'test',
async () => handleResponse(responses.pop()),
(response: TestResponse) => response.statusCode
)
).rejects.toBeInstanceOf(Error)
}
test('retry works on successful response', async () => {
await testRetryExpectingResult(
[
{
statusCode: 200,
result: 'Ok'
}
],
'Ok'
)
})
test('retry works after retryable status code', async () => {
await testRetryExpectingResult(
[
{
statusCode: 503,
result: null
},
{
statusCode: 200,
result: 'Ok'
}
],
'Ok'
)
})
test('retry fails after exhausting retries', async () => {
await testRetryExpectingError([
{
statusCode: 503,
result: null
},
{
statusCode: 503,
result: null
},
{
statusCode: 200,
result: 'Ok'
}
])
})
test('retry fails after non-retryable status code', async () => {
await testRetryExpectingError([
{
statusCode: 500,
result: null
},
{
statusCode: 200,
result: 'Ok'
}
])
})
test('retry works after error', async () => {
await testRetryExpectingResult(
[
{
statusCode: 999,
result: null
},
{
statusCode: 200,
result: 'Ok'
}
],
'Ok'
)
})
test('retry returns after client error', async () => {
await testRetryExpectingResult(
[
{
statusCode: 400,
result: null
},
{
statusCode: 200,
result: 'Ok'
}
],
null
)
})

View File

@ -0,0 +1,26 @@
import {promises as fs} from 'fs'
import * as path from 'path'
import * as cacheUtils from '../src/internal/cacheUtils'
test('getArchiveFileSizeIsBytes returns file size', () => {
const filePath = path.join(__dirname, '__fixtures__', 'helloWorld.txt')
const size = cacheUtils.getArchiveFileSizeIsBytes(filePath)
expect(size).toBe(11)
})
test('unlinkFile unlinks file', async () => {
const testDirectory = await fs.mkdtemp('unlinkFileTest')
const testFile = path.join(testDirectory, 'test.txt')
await fs.writeFile(testFile, 'hello world')
await expect(fs.stat(testFile)).resolves.not.toThrow()
await cacheUtils.unlinkFile(testFile)
// This should throw as testFile should not exist
await expect(fs.stat(testFile)).rejects.toThrow()
await fs.rmdir(testDirectory)
})

View File

@ -0,0 +1,17 @@
#!/bin/sh
# Validate args
prefix="$1"
if [ -z "$prefix" ]; then
echo "Must supply prefix argument"
exit 1
fi
path="$2"
if [ -z "$path" ]; then
echo "Must supply path argument"
exit 1
fi
mkdir -p $path
echo "$prefix $GITHUB_RUN_ID" > $path/test-file.txt

View File

@ -0,0 +1,269 @@
import * as core from '@actions/core'
import * as path from 'path'
import {restoreCache} from '../src/cache'
import * as cacheHttpClient from '../src/internal/cacheHttpClient'
import * as cacheUtils from '../src/internal/cacheUtils'
import {CacheFilename, CompressionMethod} from '../src/internal/constants'
import {ArtifactCacheEntry} from '../src/internal/contracts'
import * as tar from '../src/internal/tar'
jest.mock('../src/internal/cacheHttpClient')
jest.mock('../src/internal/cacheUtils')
jest.mock('../src/internal/tar')
beforeAll(() => {
jest.spyOn(console, 'log').mockImplementation(() => {})
jest.spyOn(core, 'debug').mockImplementation(() => {})
jest.spyOn(core, 'info').mockImplementation(() => {})
jest.spyOn(core, 'warning').mockImplementation(() => {})
jest.spyOn(core, 'error').mockImplementation(() => {})
// eslint-disable-next-line @typescript-eslint/promise-function-async
jest.spyOn(cacheUtils, 'getCacheFileName').mockImplementation(cm => {
const actualUtils = jest.requireActual('../src/internal/cacheUtils')
return actualUtils.getCacheFileName(cm)
})
})
test('restore with no path should fail', async () => {
const paths: string[] = []
const key = 'node-test'
await expect(restoreCache(paths, key)).rejects.toThrowError(
`Path Validation Error: At least one directory or file path is required`
)
})
test('restore with too many keys should fail', async () => {
const paths = ['node_modules']
const key = 'node-test'
const restoreKeys = [...Array(20).keys()].map(x => x.toString())
await expect(restoreCache(paths, key, restoreKeys)).rejects.toThrowError(
`Key Validation Error: Keys are limited to a maximum of 10.`
)
})
test('restore with large key should fail', async () => {
const paths = ['node_modules']
const key = 'foo'.repeat(512) // Over the 512 character limit
await expect(restoreCache(paths, key)).rejects.toThrowError(
`Key Validation Error: ${key} cannot be larger than 512 characters.`
)
})
test('restore with invalid key should fail', async () => {
const paths = ['node_modules']
const key = 'comma,comma'
await expect(restoreCache(paths, key)).rejects.toThrowError(
`Key Validation Error: ${key} cannot contain commas.`
)
})
test('restore with no cache found', async () => {
const paths = ['node_modules']
const key = 'node-test'
jest.spyOn(cacheHttpClient, 'getCacheEntry').mockImplementation(async () => {
return Promise.resolve(null)
})
const cacheKey = await restoreCache(paths, key)
expect(cacheKey).toBe(undefined)
})
test('restore with server error should fail', async () => {
const paths = ['node_modules']
const key = 'node-test'
jest.spyOn(cacheHttpClient, 'getCacheEntry').mockImplementation(() => {
throw new Error('HTTP Error Occurred')
})
await expect(restoreCache(paths, key)).rejects.toThrowError(
'HTTP Error Occurred'
)
})
test('restore with restore keys and no cache found', async () => {
const paths = ['node_modules']
const key = 'node-test'
const restoreKey = 'node-'
jest.spyOn(cacheHttpClient, 'getCacheEntry').mockImplementation(async () => {
return Promise.resolve(null)
})
const cacheKey = await restoreCache(paths, key, [restoreKey])
expect(cacheKey).toBe(undefined)
})
test('restore with gzip compressed cache found', async () => {
const paths = ['node_modules']
const key = 'node-test'
const cacheEntry: ArtifactCacheEntry = {
cacheKey: key,
scope: 'refs/heads/master',
archiveLocation: 'www.actionscache.test/download'
}
const getCacheMock = jest.spyOn(cacheHttpClient, 'getCacheEntry')
getCacheMock.mockImplementation(async () => {
return Promise.resolve(cacheEntry)
})
const tempPath = '/foo/bar'
const createTempDirectoryMock = jest.spyOn(cacheUtils, 'createTempDirectory')
createTempDirectoryMock.mockImplementation(async () => {
return Promise.resolve(tempPath)
})
const archivePath = path.join(tempPath, CacheFilename.Gzip)
const downloadCacheMock = jest.spyOn(cacheHttpClient, 'downloadCache')
const fileSize = 142
const getArchiveFileSizeIsBytesMock = jest
.spyOn(cacheUtils, 'getArchiveFileSizeIsBytes')
.mockReturnValue(fileSize)
const extractTarMock = jest.spyOn(tar, 'extractTar')
const unlinkFileMock = jest.spyOn(cacheUtils, 'unlinkFile')
const compression = CompressionMethod.Gzip
const getCompressionMock = jest
.spyOn(cacheUtils, 'getCompressionMethod')
.mockReturnValue(Promise.resolve(compression))
const cacheKey = await restoreCache(paths, key)
expect(cacheKey).toBe(key)
expect(getCacheMock).toHaveBeenCalledWith([key], paths, {
compressionMethod: compression
})
expect(createTempDirectoryMock).toHaveBeenCalledTimes(1)
expect(downloadCacheMock).toHaveBeenCalledWith(
cacheEntry.archiveLocation,
archivePath
)
expect(getArchiveFileSizeIsBytesMock).toHaveBeenCalledWith(archivePath)
expect(extractTarMock).toHaveBeenCalledTimes(1)
expect(extractTarMock).toHaveBeenCalledWith(archivePath, compression)
expect(unlinkFileMock).toHaveBeenCalledTimes(1)
expect(unlinkFileMock).toHaveBeenCalledWith(archivePath)
expect(getCompressionMock).toHaveBeenCalledTimes(1)
})
test('restore with zstd compressed cache found', async () => {
const paths = ['node_modules']
const key = 'node-test'
const infoMock = jest.spyOn(core, 'info')
const cacheEntry: ArtifactCacheEntry = {
cacheKey: key,
scope: 'refs/heads/master',
archiveLocation: 'www.actionscache.test/download'
}
const getCacheMock = jest.spyOn(cacheHttpClient, 'getCacheEntry')
getCacheMock.mockImplementation(async () => {
return Promise.resolve(cacheEntry)
})
const tempPath = '/foo/bar'
const createTempDirectoryMock = jest.spyOn(cacheUtils, 'createTempDirectory')
createTempDirectoryMock.mockImplementation(async () => {
return Promise.resolve(tempPath)
})
const archivePath = path.join(tempPath, CacheFilename.Zstd)
const downloadCacheMock = jest.spyOn(cacheHttpClient, 'downloadCache')
const fileSize = 62915000
const getArchiveFileSizeIsBytesMock = jest
.spyOn(cacheUtils, 'getArchiveFileSizeIsBytes')
.mockReturnValue(fileSize)
const extractTarMock = jest.spyOn(tar, 'extractTar')
const compression = CompressionMethod.Zstd
const getCompressionMock = jest
.spyOn(cacheUtils, 'getCompressionMethod')
.mockReturnValue(Promise.resolve(compression))
const cacheKey = await restoreCache(paths, key)
expect(cacheKey).toBe(key)
expect(getCacheMock).toHaveBeenCalledWith([key], paths, {
compressionMethod: compression
})
expect(createTempDirectoryMock).toHaveBeenCalledTimes(1)
expect(downloadCacheMock).toHaveBeenCalledWith(
cacheEntry.archiveLocation,
archivePath
)
expect(getArchiveFileSizeIsBytesMock).toHaveBeenCalledWith(archivePath)
expect(infoMock).toHaveBeenCalledWith(`Cache Size: ~60 MB (62915000 B)`)
expect(extractTarMock).toHaveBeenCalledTimes(1)
expect(extractTarMock).toHaveBeenCalledWith(archivePath, compression)
expect(getCompressionMock).toHaveBeenCalledTimes(1)
})
test('restore with cache found for restore key', async () => {
const paths = ['node_modules']
const key = 'node-test'
const restoreKey = 'node-'
const infoMock = jest.spyOn(core, 'info')
const cacheEntry: ArtifactCacheEntry = {
cacheKey: restoreKey,
scope: 'refs/heads/master',
archiveLocation: 'www.actionscache.test/download'
}
const getCacheMock = jest.spyOn(cacheHttpClient, 'getCacheEntry')
getCacheMock.mockImplementation(async () => {
return Promise.resolve(cacheEntry)
})
const tempPath = '/foo/bar'
const createTempDirectoryMock = jest.spyOn(cacheUtils, 'createTempDirectory')
createTempDirectoryMock.mockImplementation(async () => {
return Promise.resolve(tempPath)
})
const archivePath = path.join(tempPath, CacheFilename.Zstd)
const downloadCacheMock = jest.spyOn(cacheHttpClient, 'downloadCache')
const fileSize = 142
const getArchiveFileSizeIsBytesMock = jest
.spyOn(cacheUtils, 'getArchiveFileSizeIsBytes')
.mockReturnValue(fileSize)
const extractTarMock = jest.spyOn(tar, 'extractTar')
const compression = CompressionMethod.Zstd
const getCompressionMock = jest
.spyOn(cacheUtils, 'getCompressionMethod')
.mockReturnValue(Promise.resolve(compression))
const cacheKey = await restoreCache(paths, key, [restoreKey])
expect(cacheKey).toBe(restoreKey)
expect(getCacheMock).toHaveBeenCalledWith([key, restoreKey], paths, {
compressionMethod: compression
})
expect(createTempDirectoryMock).toHaveBeenCalledTimes(1)
expect(downloadCacheMock).toHaveBeenCalledWith(
cacheEntry.archiveLocation,
archivePath
)
expect(getArchiveFileSizeIsBytesMock).toHaveBeenCalledWith(archivePath)
expect(infoMock).toHaveBeenCalledWith(`Cache Size: ~0 MB (142 B)`)
expect(extractTarMock).toHaveBeenCalledTimes(1)
expect(extractTarMock).toHaveBeenCalledWith(archivePath, compression)
expect(getCompressionMock).toHaveBeenCalledTimes(1)
})

View File

@ -0,0 +1,189 @@
import * as core from '@actions/core'
import * as path from 'path'
import {saveCache} from '../src/cache'
import * as cacheHttpClient from '../src/internal/cacheHttpClient'
import * as cacheUtils from '../src/internal/cacheUtils'
import {CacheFilename, CompressionMethod} from '../src/internal/constants'
import * as tar from '../src/internal/tar'
jest.mock('../src/internal/cacheHttpClient')
jest.mock('../src/internal/cacheUtils')
jest.mock('../src/internal/tar')
beforeAll(() => {
jest.spyOn(console, 'log').mockImplementation(() => {})
jest.spyOn(core, 'debug').mockImplementation(() => {})
jest.spyOn(core, 'info').mockImplementation(() => {})
jest.spyOn(core, 'warning').mockImplementation(() => {})
jest.spyOn(core, 'error').mockImplementation(() => {})
// eslint-disable-next-line @typescript-eslint/promise-function-async
jest.spyOn(cacheUtils, 'getCacheFileName').mockImplementation(cm => {
const actualUtils = jest.requireActual('../src/internal/cacheUtils')
return actualUtils.getCacheFileName(cm)
})
jest.spyOn(cacheUtils, 'resolvePaths').mockImplementation(async filePaths => {
return filePaths.map(x => path.resolve(x))
})
jest.spyOn(cacheUtils, 'createTempDirectory').mockImplementation(async () => {
return Promise.resolve('/foo/bar')
})
})
test('save with missing input should fail', async () => {
const paths: string[] = []
const primaryKey = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43'
await expect(saveCache(paths, primaryKey)).rejects.toThrowError(
`Path Validation Error: At least one directory or file path is required`
)
})
test('save with large cache outputs should fail', async () => {
const filePath = 'node_modules'
const primaryKey = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43'
const cachePaths = [path.resolve(filePath)]
const createTarMock = jest.spyOn(tar, 'createTar')
const cacheSize = 6 * 1024 * 1024 * 1024 //~6GB, over the 5GB limit
jest
.spyOn(cacheUtils, 'getArchiveFileSizeIsBytes')
.mockReturnValueOnce(cacheSize)
const compression = CompressionMethod.Gzip
const getCompressionMock = jest
.spyOn(cacheUtils, 'getCompressionMethod')
.mockReturnValueOnce(Promise.resolve(compression))
await expect(saveCache([filePath], primaryKey)).rejects.toThrowError(
'Cache size of ~6144 MB (6442450944 B) is over the 5GB limit, not saving cache.'
)
const archiveFolder = '/foo/bar'
expect(createTarMock).toHaveBeenCalledTimes(1)
expect(createTarMock).toHaveBeenCalledWith(
archiveFolder,
cachePaths,
compression
)
expect(getCompressionMock).toHaveBeenCalledTimes(1)
})
test('save with reserve cache failure should fail', async () => {
const paths = ['node_modules']
const primaryKey = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43'
const reserveCacheMock = jest
.spyOn(cacheHttpClient, 'reserveCache')
.mockImplementation(async () => {
return -1
})
const createTarMock = jest.spyOn(tar, 'createTar')
const saveCacheMock = jest.spyOn(cacheHttpClient, 'saveCache')
const compression = CompressionMethod.Zstd
const getCompressionMock = jest
.spyOn(cacheUtils, 'getCompressionMethod')
.mockReturnValueOnce(Promise.resolve(compression))
await expect(saveCache(paths, primaryKey)).rejects.toThrowError(
`Unable to reserve cache with key ${primaryKey}, another job may be creating this cache.`
)
expect(reserveCacheMock).toHaveBeenCalledTimes(1)
expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey, paths, {
compressionMethod: compression
})
expect(createTarMock).toHaveBeenCalledTimes(0)
expect(saveCacheMock).toHaveBeenCalledTimes(0)
expect(getCompressionMock).toHaveBeenCalledTimes(1)
})
test('save with server error should fail', async () => {
const filePath = 'node_modules'
const primaryKey = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43'
const cachePaths = [path.resolve(filePath)]
const cacheId = 4
const reserveCacheMock = jest
.spyOn(cacheHttpClient, 'reserveCache')
.mockImplementation(async () => {
return cacheId
})
const createTarMock = jest.spyOn(tar, 'createTar')
const saveCacheMock = jest
.spyOn(cacheHttpClient, 'saveCache')
.mockImplementationOnce(() => {
throw new Error('HTTP Error Occurred')
})
const compression = CompressionMethod.Zstd
const getCompressionMock = jest
.spyOn(cacheUtils, 'getCompressionMethod')
.mockReturnValueOnce(Promise.resolve(compression))
await expect(saveCache([filePath], primaryKey)).rejects.toThrowError(
'HTTP Error Occurred'
)
expect(reserveCacheMock).toHaveBeenCalledTimes(1)
expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey, [filePath], {
compressionMethod: compression
})
const archiveFolder = '/foo/bar'
const archiveFile = path.join(archiveFolder, CacheFilename.Zstd)
expect(createTarMock).toHaveBeenCalledTimes(1)
expect(createTarMock).toHaveBeenCalledWith(
archiveFolder,
cachePaths,
compression
)
expect(saveCacheMock).toHaveBeenCalledTimes(1)
expect(saveCacheMock).toHaveBeenCalledWith(cacheId, archiveFile, undefined)
expect(getCompressionMock).toHaveBeenCalledTimes(1)
})
test('save with valid inputs uploads a cache', async () => {
const filePath = 'node_modules'
const primaryKey = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43'
const cachePaths = [path.resolve(filePath)]
const cacheId = 4
const reserveCacheMock = jest
.spyOn(cacheHttpClient, 'reserveCache')
.mockImplementation(async () => {
return cacheId
})
const createTarMock = jest.spyOn(tar, 'createTar')
const saveCacheMock = jest.spyOn(cacheHttpClient, 'saveCache')
const compression = CompressionMethod.Zstd
const getCompressionMock = jest
.spyOn(cacheUtils, 'getCompressionMethod')
.mockReturnValue(Promise.resolve(compression))
await saveCache([filePath], primaryKey)
expect(reserveCacheMock).toHaveBeenCalledTimes(1)
expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey, [filePath], {
compressionMethod: compression
})
const archiveFolder = '/foo/bar'
const archiveFile = path.join(archiveFolder, CacheFilename.Zstd)
expect(createTarMock).toHaveBeenCalledTimes(1)
expect(createTarMock).toHaveBeenCalledWith(
archiveFolder,
cachePaths,
compression
)
expect(saveCacheMock).toHaveBeenCalledTimes(1)
expect(saveCacheMock).toHaveBeenCalledWith(cacheId, archiveFile, undefined)
expect(getCompressionMock).toHaveBeenCalledTimes(1)
})

191
packages/cache/__tests__/tar.test.ts vendored Normal file
View File

@ -0,0 +1,191 @@
import * as exec from '@actions/exec'
import * as io from '@actions/io'
import * as path from 'path'
import {CacheFilename, CompressionMethod} from '../src/internal/constants'
import * as tar from '../src/internal/tar'
import * as utils from '../src/internal/cacheUtils'
// eslint-disable-next-line @typescript-eslint/no-require-imports
import fs = require('fs')
jest.mock('@actions/exec')
jest.mock('@actions/io')
const IS_WINDOWS = process.platform === 'win32'
function getTempDir(): string {
return path.join(__dirname, '_temp', 'tar')
}
beforeAll(async () => {
jest.spyOn(io, 'which').mockImplementation(async tool => {
return tool
})
process.env['GITHUB_WORKSPACE'] = process.cwd()
await jest.requireActual('@actions/io').rmRF(getTempDir())
})
afterAll(async () => {
delete process.env['GITHUB_WORKSPACE']
await jest.requireActual('@actions/io').rmRF(getTempDir())
})
test('zstd extract tar', async () => {
const mkdirMock = jest.spyOn(io, 'mkdirP')
const execMock = jest.spyOn(exec, 'exec')
const archivePath = IS_WINDOWS
? `${process.env['windir']}\\fakepath\\cache.tar`
: 'cache.tar'
const workspace = process.env['GITHUB_WORKSPACE']
await tar.extractTar(archivePath, CompressionMethod.Zstd)
expect(mkdirMock).toHaveBeenCalledWith(workspace)
const tarPath = IS_WINDOWS
? `${process.env['windir']}\\System32\\tar.exe`
: 'tar'
expect(execMock).toHaveBeenCalledTimes(1)
expect(execMock).toHaveBeenCalledWith(
`"${tarPath}"`,
[
'--use-compress-program',
'zstd -d --long=30',
'-xf',
IS_WINDOWS ? archivePath.replace(/\\/g, '/') : archivePath,
'-P',
'-C',
IS_WINDOWS ? workspace?.replace(/\\/g, '/') : workspace
],
{cwd: undefined}
)
})
test('gzip extract tar', async () => {
const mkdirMock = jest.spyOn(io, 'mkdirP')
const execMock = jest.spyOn(exec, 'exec')
const archivePath = IS_WINDOWS
? `${process.env['windir']}\\fakepath\\cache.tar`
: 'cache.tar'
const workspace = process.env['GITHUB_WORKSPACE']
await tar.extractTar(archivePath, CompressionMethod.Gzip)
expect(mkdirMock).toHaveBeenCalledWith(workspace)
const tarPath = IS_WINDOWS
? `${process.env['windir']}\\System32\\tar.exe`
: 'tar'
expect(execMock).toHaveBeenCalledTimes(1)
expect(execMock).toHaveBeenCalledWith(
`"${tarPath}"`,
[
'-z',
'-xf',
IS_WINDOWS ? archivePath.replace(/\\/g, '/') : archivePath,
'-P',
'-C',
IS_WINDOWS ? workspace?.replace(/\\/g, '/') : workspace
],
{cwd: undefined}
)
})
test('gzip extract GNU tar on windows', async () => {
if (IS_WINDOWS) {
jest.spyOn(fs, 'existsSync').mockReturnValueOnce(false)
const isGnuMock = jest
.spyOn(utils, 'useGnuTar')
.mockReturnValue(Promise.resolve(true))
const execMock = jest.spyOn(exec, 'exec')
const archivePath = `${process.env['windir']}\\fakepath\\cache.tar`
const workspace = process.env['GITHUB_WORKSPACE']
await tar.extractTar(archivePath, CompressionMethod.Gzip)
expect(isGnuMock).toHaveBeenCalledTimes(1)
expect(execMock).toHaveBeenCalledTimes(1)
expect(execMock).toHaveBeenCalledWith(
`"tar"`,
[
'-z',
'-xf',
archivePath.replace(/\\/g, '/'),
'-P',
'-C',
workspace?.replace(/\\/g, '/'),
'--force-local'
],
{cwd: undefined}
)
}
})
test('zstd create tar', async () => {
const execMock = jest.spyOn(exec, 'exec')
const archiveFolder = getTempDir()
const workspace = process.env['GITHUB_WORKSPACE']
const sourceDirectories = ['~/.npm/cache', `${workspace}/dist`]
await fs.promises.mkdir(archiveFolder, {recursive: true})
await tar.createTar(archiveFolder, sourceDirectories, CompressionMethod.Zstd)
const tarPath = IS_WINDOWS
? `${process.env['windir']}\\System32\\tar.exe`
: 'tar'
expect(execMock).toHaveBeenCalledTimes(1)
expect(execMock).toHaveBeenCalledWith(
`"${tarPath}"`,
[
'--use-compress-program',
'zstd -T0 --long=30',
'-cf',
IS_WINDOWS ? CacheFilename.Zstd.replace(/\\/g, '/') : CacheFilename.Zstd,
'-P',
'-C',
IS_WINDOWS ? workspace?.replace(/\\/g, '/') : workspace,
'--files-from',
'manifest.txt'
],
{
cwd: archiveFolder
}
)
})
test('gzip create tar', async () => {
const execMock = jest.spyOn(exec, 'exec')
const archiveFolder = getTempDir()
const workspace = process.env['GITHUB_WORKSPACE']
const sourceDirectories = ['~/.npm/cache', `${workspace}/dist`]
await fs.promises.mkdir(archiveFolder, {recursive: true})
await tar.createTar(archiveFolder, sourceDirectories, CompressionMethod.Gzip)
const tarPath = IS_WINDOWS
? `${process.env['windir']}\\System32\\tar.exe`
: 'tar'
expect(execMock).toHaveBeenCalledTimes(1)
expect(execMock).toHaveBeenCalledWith(
`"${tarPath}"`,
[
'-z',
'-cf',
IS_WINDOWS ? CacheFilename.Gzip.replace(/\\/g, '/') : CacheFilename.Gzip,
'-P',
'-C',
IS_WINDOWS ? workspace?.replace(/\\/g, '/') : workspace,
'--files-from',
'manifest.txt'
],
{
cwd: archiveFolder
}
)
})

View File

@ -0,0 +1,36 @@
#!/bin/sh
# Validate args
prefix="$1"
if [ -z "$prefix" ]; then
echo "Must supply prefix argument"
exit 1
fi
path="$2"
if [ -z "$path" ]; then
echo "Must specify path argument"
exit 1
fi
# Sanity check GITHUB_RUN_ID defined
if [ -z "$GITHUB_RUN_ID" ]; then
echo "GITHUB_RUN_ID not defined"
exit 1
fi
# Verify file exists
file="$path/test-file.txt"
echo "Checking for $file"
if [ ! -e $file ]; then
echo "File does not exist"
exit 1
fi
# Verify file content
content="$(cat $file)"
echo "File content:\n$content"
if [ -z "$(echo $content | grep --fixed-strings "$prefix $GITHUB_RUN_ID")" ]; then
echo "Unexpected file content"
exit 1
fi

92
packages/cache/package-lock.json generated vendored Normal file
View File

@ -0,0 +1,92 @@
{
"name": "@actions/cache",
"version": "0.1.0",
"lockfileVersion": 1,
"requires": true,
"dependencies": {
"@actions/core": {
"version": "1.2.4",
"resolved": "https://registry.npmjs.org/@actions/core/-/core-1.2.4.tgz",
"integrity": "sha512-YJCEq8BE3CdN8+7HPZ/4DxJjk/OkZV2FFIf+DlZTC/4iBlzYCD5yjRR6eiOS5llO11zbRltIRuKAjMKaWTE6cg=="
},
"@actions/exec": {
"version": "1.0.4",
"resolved": "https://registry.npmjs.org/@actions/exec/-/exec-1.0.4.tgz",
"integrity": "sha512-4DPChWow9yc9W3WqEbUj8Nr86xkpyE29ZzWjXucHItclLbEW6jr80Zx4nqv18QL6KK65+cifiQZXvnqgTV6oHw==",
"requires": {
"@actions/io": "^1.0.1"
}
},
"@actions/glob": {
"version": "0.1.0",
"resolved": "https://registry.npmjs.org/@actions/glob/-/glob-0.1.0.tgz",
"integrity": "sha512-lx8SzyQ2FE9+UUvjqY1f28QbTJv+w8qP7kHHbfQRhphrlcx0Mdmm1tZdGJzfxv1jxREa/sLW4Oy8CbGQKCJySA==",
"requires": {
"@actions/core": "^1.2.0",
"minimatch": "^3.0.4"
}
},
"@actions/http-client": {
"version": "1.0.8",
"resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-1.0.8.tgz",
"integrity": "sha512-G4JjJ6f9Hb3Zvejj+ewLLKLf99ZC+9v+yCxoYf9vSyH+WkzPLB2LuUtRMGNkooMqdugGBFStIKXOuvH1W+EctA==",
"requires": {
"tunnel": "0.0.6"
}
},
"@actions/io": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/@actions/io/-/io-1.0.2.tgz",
"integrity": "sha512-J8KuFqVPr3p6U8W93DOXlXW6zFvrQAJANdS+vw0YhusLIq+bszW8zmK2Fh1C2kDPX8FMvwIl1OUcFgvJoXLbAg=="
},
"@types/uuid": {
"version": "3.4.9",
"resolved": "https://registry.npmjs.org/@types/uuid/-/uuid-3.4.9.tgz",
"integrity": "sha512-XDwyIlt/47l2kWLTzw/mtrpLdB+GPSskR2n/PIcPn+VYhVO77rGhRncIR5GPU0KRzXuqkDO+J5qqrG0Y8P6jzQ==",
"dev": true
},
"balanced-match": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz",
"integrity": "sha1-ibTRmasr7kneFk6gK4nORi1xt2c="
},
"brace-expansion": {
"version": "1.1.11",
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz",
"integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==",
"requires": {
"balanced-match": "^1.0.0",
"concat-map": "0.0.1"
}
},
"concat-map": {
"version": "0.0.1",
"resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz",
"integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s="
},
"minimatch": {
"version": "3.0.4",
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz",
"integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==",
"requires": {
"brace-expansion": "^1.1.7"
}
},
"tunnel": {
"version": "0.0.6",
"resolved": "https://registry.npmjs.org/tunnel/-/tunnel-0.0.6.tgz",
"integrity": "sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg=="
},
"typescript": {
"version": "3.8.3",
"resolved": "https://registry.npmjs.org/typescript/-/typescript-3.8.3.tgz",
"integrity": "sha512-MYlEfn5VrLNsgudQTVJeNaQFUAI7DkhnOjdpAp4T+ku1TfQClewlbSuTVHiA+8skNBgaf02TL/kLOvig4y3G8w==",
"dev": true
},
"uuid": {
"version": "3.4.0",
"resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz",
"integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A=="
}
}
}

50
packages/cache/package.json vendored Normal file
View File

@ -0,0 +1,50 @@
{
"name": "@actions/cache",
"version": "0.1.0",
"preview": true,
"description": "Actions cache lib",
"keywords": [
"github",
"actions",
"cache"
],
"homepage": "https://github.com/actions/toolkit/tree/master/packages/cache",
"license": "MIT",
"main": "lib/cache.js",
"types": "lib/cache.d.ts",
"directories": {
"lib": "lib",
"test": "__tests__"
},
"files": [
"lib"
],
"publishConfig": {
"access": "public"
},
"repository": {
"type": "git",
"url": "git+https://github.com/actions/toolkit.git",
"directory": "packages/cache"
},
"scripts": {
"audit-moderate": "npm install && npm audit --audit-level=moderate",
"test": "echo \"Error: run tests from root\" && exit 1",
"tsc": "tsc"
},
"bugs": {
"url": "https://github.com/actions/toolkit/issues"
},
"dependencies": {
"@actions/core": "^1.2.4",
"@actions/exec": "^1.0.1",
"@actions/glob": "^0.1.0",
"@actions/http-client": "^1.0.8",
"@actions/io": "^1.0.1",
"uuid": "^3.3.3"
},
"devDependencies": {
"typescript": "^3.8.3",
"@types/uuid": "^3.4.5"
}
}

175
packages/cache/src/cache.ts vendored Normal file
View File

@ -0,0 +1,175 @@
import * as core from '@actions/core'
import * as path from 'path'
import * as utils from './internal/cacheUtils'
import * as cacheHttpClient from './internal/cacheHttpClient'
import {createTar, extractTar} from './internal/tar'
import {UploadOptions} from './options'
export class ValidationError extends Error {
constructor(message: string) {
super(message)
this.name = 'ValidationError'
Object.setPrototypeOf(this, ValidationError.prototype)
}
}
export class ReserveCacheError extends Error {
constructor(message: string) {
super(message)
this.name = 'ReserveCacheError'
Object.setPrototypeOf(this, ReserveCacheError.prototype)
}
}
function checkPaths(paths: string[]): void {
if (!paths || paths.length === 0) {
throw new ValidationError(
`Path Validation Error: At least one directory or file path is required`
)
}
}
function checkKey(key: string): void {
if (key.length > 512) {
throw new ValidationError(
`Key Validation Error: ${key} cannot be larger than 512 characters.`
)
}
const regex = /^[^,]*$/
if (!regex.test(key)) {
throw new ValidationError(
`Key Validation Error: ${key} cannot contain commas.`
)
}
}
/**
* Restores cache from keys
*
* @param paths a list of file paths to restore from the cache
* @param primaryKey an explicit key for restoring the cache
* @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for key
* @returns string returns the key for the cache hit, otherwise returns undefined
*/
export async function restoreCache(
paths: string[],
primaryKey: string,
restoreKeys?: string[]
): Promise<string | undefined> {
checkPaths(paths)
restoreKeys = restoreKeys || []
const keys = [primaryKey, ...restoreKeys]
core.debug('Resolved Keys:')
core.debug(JSON.stringify(keys))
if (keys.length > 10) {
throw new ValidationError(
`Key Validation Error: Keys are limited to a maximum of 10.`
)
}
for (const key of keys) {
checkKey(key)
}
const compressionMethod = await utils.getCompressionMethod()
// path are needed to compute version
const cacheEntry = await cacheHttpClient.getCacheEntry(keys, paths, {
compressionMethod
})
if (!cacheEntry?.archiveLocation) {
// Cache not found
return undefined
}
const archivePath = path.join(
await utils.createTempDirectory(),
utils.getCacheFileName(compressionMethod)
)
core.debug(`Archive Path: ${archivePath}`)
try {
// Download the cache from the cache entry
await cacheHttpClient.downloadCache(cacheEntry.archiveLocation, archivePath)
const archiveFileSize = utils.getArchiveFileSizeIsBytes(archivePath)
core.info(
`Cache Size: ~${Math.round(
archiveFileSize / (1024 * 1024)
)} MB (${archiveFileSize} B)`
)
await extractTar(archivePath, compressionMethod)
} finally {
// Try to delete the archive to save space
try {
await utils.unlinkFile(archivePath)
} catch (error) {
core.debug(`Failed to delete archive: ${error}`)
}
}
return cacheEntry.cacheKey
}
/**
* Saves a list of files with the specified key
*
* @param paths a list of file paths to be cached
* @param key an explicit key for restoring the cache
* @param options cache upload options
* @returns number returns cacheId if the cache was saved successfully and throws an error if save fails
*/
export async function saveCache(
paths: string[],
key: string,
options?: UploadOptions
): Promise<number> {
checkPaths(paths)
checkKey(key)
const compressionMethod = await utils.getCompressionMethod()
core.debug('Reserving Cache')
const cacheId = await cacheHttpClient.reserveCache(key, paths, {
compressionMethod
})
if (cacheId === -1) {
throw new ReserveCacheError(
`Unable to reserve cache with key ${key}, another job may be creating this cache.`
)
}
core.debug(`Cache ID: ${cacheId}`)
const cachePaths = await utils.resolvePaths(paths)
core.debug('Cache Paths:')
core.debug(`${JSON.stringify(cachePaths)}`)
const archiveFolder = await utils.createTempDirectory()
const archivePath = path.join(
archiveFolder,
utils.getCacheFileName(compressionMethod)
)
core.debug(`Archive Path: ${archivePath}`)
await createTar(archiveFolder, cachePaths, compressionMethod)
const fileSizeLimit = 5 * 1024 * 1024 * 1024 // 5GB per repo limit
const archiveFileSize = utils.getArchiveFileSizeIsBytes(archivePath)
core.debug(`File Size: ${archiveFileSize}`)
if (archiveFileSize > fileSizeLimit) {
throw new Error(
`Cache size of ~${Math.round(
archiveFileSize / (1024 * 1024)
)} MB (${archiveFileSize} B) is over the 5GB limit, not saving cache.`
)
}
core.debug(`Saving Cache (ID: ${cacheId})`)
await cacheHttpClient.saveCache(cacheId, archivePath, options)
return cacheId
}

View File

@ -0,0 +1,411 @@
import * as core from '@actions/core'
import {HttpClient, HttpCodes} from '@actions/http-client'
import {BearerCredentialHandler} from '@actions/http-client/auth'
import {
IHttpClientResponse,
IRequestOptions,
ITypedResponse
} from '@actions/http-client/interfaces'
import * as crypto from 'crypto'
import * as fs from 'fs'
import * as stream from 'stream'
import * as util from 'util'
import * as utils from './cacheUtils'
import {CompressionMethod, SocketTimeout} from './constants'
import {
ArtifactCacheEntry,
InternalCacheOptions,
CommitCacheRequest,
ReserveCacheRequest,
ReserveCacheResponse
} from './contracts'
import {UploadOptions} from '../options'
const versionSalt = '1.0'
function isSuccessStatusCode(statusCode?: number): boolean {
if (!statusCode) {
return false
}
return statusCode >= 200 && statusCode < 300
}
function isServerErrorStatusCode(statusCode?: number): boolean {
if (!statusCode) {
return true
}
return statusCode >= 500
}
function isRetryableStatusCode(statusCode?: number): boolean {
if (!statusCode) {
return false
}
const retryableStatusCodes = [
HttpCodes.BadGateway,
HttpCodes.ServiceUnavailable,
HttpCodes.GatewayTimeout
]
return retryableStatusCodes.includes(statusCode)
}
function getCacheApiUrl(resource: string): string {
// Ideally we just use ACTIONS_CACHE_URL
const baseUrl: string = (
process.env['ACTIONS_CACHE_URL'] ||
process.env['ACTIONS_RUNTIME_URL'] ||
''
).replace('pipelines', 'artifactcache')
if (!baseUrl) {
throw new Error('Cache Service Url not found, unable to restore cache.')
}
const url = `${baseUrl}_apis/artifactcache/${resource}`
core.debug(`Resource Url: ${url}`)
return url
}
function createAcceptHeader(type: string, apiVersion: string): string {
return `${type};api-version=${apiVersion}`
}
function getRequestOptions(): IRequestOptions {
const requestOptions: IRequestOptions = {
headers: {
Accept: createAcceptHeader('application/json', '6.0-preview.1')
}
}
return requestOptions
}
function createHttpClient(): HttpClient {
const token = process.env['ACTIONS_RUNTIME_TOKEN'] || ''
const bearerCredentialHandler = new BearerCredentialHandler(token)
return new HttpClient(
'actions/cache',
[bearerCredentialHandler],
getRequestOptions()
)
}
export function getCacheVersion(
paths: string[],
compressionMethod?: CompressionMethod
): string {
const components = paths.concat(
compressionMethod === CompressionMethod.Zstd ? [compressionMethod] : []
)
// Add salt to cache version to support breaking changes in cache entry
components.push(versionSalt)
return crypto
.createHash('sha256')
.update(components.join('|'))
.digest('hex')
}
export async function retry<T>(
name: string,
method: () => Promise<T>,
getStatusCode: (arg0: T) => number | undefined,
maxAttempts = 2
): Promise<T> {
let response: T | undefined = undefined
let statusCode: number | undefined = undefined
let isRetryable = false
let errorMessage = ''
let attempt = 1
while (attempt <= maxAttempts) {
try {
response = await method()
statusCode = getStatusCode(response)
if (!isServerErrorStatusCode(statusCode)) {
return response
}
isRetryable = isRetryableStatusCode(statusCode)
errorMessage = `Cache service responded with ${statusCode}`
} catch (error) {
isRetryable = true
errorMessage = error.message
}
core.debug(
`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`
)
if (!isRetryable) {
core.debug(`${name} - Error is not retryable`)
break
}
attempt++
}
throw Error(`${name} failed: ${errorMessage}`)
}
export async function retryTypedResponse<T>(
name: string,
method: () => Promise<ITypedResponse<T>>,
maxAttempts = 2
): Promise<ITypedResponse<T>> {
return await retry(
name,
method,
(response: ITypedResponse<T>) => response.statusCode,
maxAttempts
)
}
export async function retryHttpClientResponse<T>(
name: string,
method: () => Promise<IHttpClientResponse>,
maxAttempts = 2
): Promise<IHttpClientResponse> {
return await retry(
name,
method,
(response: IHttpClientResponse) => response.message.statusCode,
maxAttempts
)
}
export async function getCacheEntry(
keys: string[],
paths: string[],
options?: InternalCacheOptions
): Promise<ArtifactCacheEntry | null> {
const httpClient = createHttpClient()
const version = getCacheVersion(paths, options?.compressionMethod)
const resource = `cache?keys=${encodeURIComponent(
keys.join(',')
)}&version=${version}`
const response = await retryTypedResponse('getCacheEntry', async () =>
httpClient.getJson<ArtifactCacheEntry>(getCacheApiUrl(resource))
)
if (response.statusCode === 204) {
return null
}
if (!isSuccessStatusCode(response.statusCode)) {
throw new Error(`Cache service responded with ${response.statusCode}`)
}
const cacheResult = response.result
const cacheDownloadUrl = cacheResult?.archiveLocation
if (!cacheDownloadUrl) {
throw new Error('Cache not found.')
}
core.setSecret(cacheDownloadUrl)
core.debug(`Cache Result:`)
core.debug(JSON.stringify(cacheResult))
return cacheResult
}
async function pipeResponseToStream(
response: IHttpClientResponse,
output: NodeJS.WritableStream
): Promise<void> {
const pipeline = util.promisify(stream.pipeline)
await pipeline(response.message, output)
}
export async function downloadCache(
archiveLocation: string,
archivePath: string
): Promise<void> {
const writeStream = fs.createWriteStream(archivePath)
const httpClient = new HttpClient('actions/cache')
const downloadResponse = await retryHttpClientResponse(
'downloadCache',
async () => httpClient.get(archiveLocation)
)
// Abort download if no traffic received over the socket.
downloadResponse.message.socket.setTimeout(SocketTimeout, () => {
downloadResponse.message.destroy()
core.debug(`Aborting download, socket timed out after ${SocketTimeout} ms`)
})
await pipeResponseToStream(downloadResponse, writeStream)
// Validate download size.
const contentLengthHeader = downloadResponse.message.headers['content-length']
if (contentLengthHeader) {
const expectedLength = parseInt(contentLengthHeader)
const actualLength = utils.getArchiveFileSizeIsBytes(archivePath)
if (actualLength !== expectedLength) {
throw new Error(
`Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}`
)
}
} else {
core.debug('Unable to validate download, no Content-Length header')
}
}
// Reserve Cache
export async function reserveCache(
key: string,
paths: string[],
options?: InternalCacheOptions
): Promise<number> {
const httpClient = createHttpClient()
const version = getCacheVersion(paths, options?.compressionMethod)
const reserveCacheRequest: ReserveCacheRequest = {
key,
version
}
const response = await retryTypedResponse('reserveCache', async () =>
httpClient.postJson<ReserveCacheResponse>(
getCacheApiUrl('caches'),
reserveCacheRequest
)
)
return response?.result?.cacheId ?? -1
}
function getContentRange(start: number, end: number): string {
// Format: `bytes start-end/filesize
// start and end are inclusive
// filesize can be *
// For a 200 byte chunk starting at byte 0:
// Content-Range: bytes 0-199/*
return `bytes ${start}-${end}/*`
}
async function uploadChunk(
httpClient: HttpClient,
resourceUrl: string,
openStream: () => NodeJS.ReadableStream,
start: number,
end: number
): Promise<void> {
core.debug(
`Uploading chunk of size ${end -
start +
1} bytes at offset ${start} with content range: ${getContentRange(
start,
end
)}`
)
const additionalHeaders = {
'Content-Type': 'application/octet-stream',
'Content-Range': getContentRange(start, end)
}
await retryHttpClientResponse(
`uploadChunk (start: ${start}, end: ${end})`,
async () =>
httpClient.sendStream(
'PATCH',
resourceUrl,
openStream(),
additionalHeaders
)
)
}
async function uploadFile(
httpClient: HttpClient,
cacheId: number,
archivePath: string,
options?: UploadOptions
): Promise<void> {
// Upload Chunks
const fileSize = fs.statSync(archivePath).size
const resourceUrl = getCacheApiUrl(`caches/${cacheId.toString()}`)
const fd = fs.openSync(archivePath, 'r')
const concurrency = options?.uploadConcurrency ?? 4 // # of HTTP requests in parallel
const MAX_CHUNK_SIZE = options?.uploadChunkSize ?? 32 * 1024 * 1024 // 32 MB Chunks
core.debug(`Concurrency: ${concurrency} and Chunk Size: ${MAX_CHUNK_SIZE}`)
const parallelUploads = [...new Array(concurrency).keys()]
core.debug('Awaiting all uploads')
let offset = 0
try {
await Promise.all(
parallelUploads.map(async () => {
while (offset < fileSize) {
const chunkSize = Math.min(fileSize - offset, MAX_CHUNK_SIZE)
const start = offset
const end = offset + chunkSize - 1
offset += MAX_CHUNK_SIZE
await uploadChunk(
httpClient,
resourceUrl,
() =>
fs
.createReadStream(archivePath, {
fd,
start,
end,
autoClose: false
})
.on('error', error => {
throw new Error(
`Cache upload failed because file read failed with ${error.Message}`
)
}),
start,
end
)
}
})
)
} finally {
fs.closeSync(fd)
}
return
}
async function commitCache(
httpClient: HttpClient,
cacheId: number,
filesize: number
): Promise<ITypedResponse<null>> {
const commitCacheRequest: CommitCacheRequest = {size: filesize}
return await retryTypedResponse('commitCache', async () =>
httpClient.postJson<null>(
getCacheApiUrl(`caches/${cacheId.toString()}`),
commitCacheRequest
)
)
}
export async function saveCache(
cacheId: number,
archivePath: string,
options?: UploadOptions
): Promise<void> {
const httpClient = createHttpClient()
core.debug('Upload cache')
await uploadFile(httpClient, cacheId, archivePath, options)
// Commit Cache
core.debug('Commiting cache')
const cacheSize = utils.getArchiveFileSizeIsBytes(archivePath)
const commitCacheResponse = await commitCache(httpClient, cacheId, cacheSize)
if (!isSuccessStatusCode(commitCacheResponse.statusCode)) {
throw new Error(
`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`
)
}
core.info('Cache saved successfully')
}

View File

@ -0,0 +1,100 @@
import * as core from '@actions/core'
import * as exec from '@actions/exec'
import * as glob from '@actions/glob'
import * as io from '@actions/io'
import * as fs from 'fs'
import * as path from 'path'
import * as util from 'util'
import {v4 as uuidV4} from 'uuid'
import {CacheFilename, CompressionMethod} from './constants'
// From https://github.com/actions/toolkit/blob/master/packages/tool-cache/src/tool-cache.ts#L23
export async function createTempDirectory(): Promise<string> {
const IS_WINDOWS = process.platform === 'win32'
let tempDirectory: string = process.env['RUNNER_TEMP'] || ''
if (!tempDirectory) {
let baseLocation: string
if (IS_WINDOWS) {
// On Windows use the USERPROFILE env variable
baseLocation = process.env['USERPROFILE'] || 'C:\\'
} else {
if (process.platform === 'darwin') {
baseLocation = '/Users'
} else {
baseLocation = '/home'
}
}
tempDirectory = path.join(baseLocation, 'actions', 'temp')
}
const dest = path.join(tempDirectory, uuidV4())
await io.mkdirP(dest)
return dest
}
export function getArchiveFileSizeIsBytes(filePath: string): number {
return fs.statSync(filePath).size
}
export async function resolvePaths(patterns: string[]): Promise<string[]> {
const paths: string[] = []
const workspace = process.env['GITHUB_WORKSPACE'] ?? process.cwd()
const globber = await glob.create(patterns.join('\n'), {
implicitDescendants: false
})
for await (const file of globber.globGenerator()) {
const relativeFile = path.relative(workspace, file)
core.debug(`Matched: ${relativeFile}`)
// Paths are made relative so the tar entries are all relative to the root of the workspace.
paths.push(`${relativeFile}`)
}
return paths
}
export async function unlinkFile(filePath: fs.PathLike): Promise<void> {
return util.promisify(fs.unlink)(filePath)
}
async function getVersion(app: string): Promise<string> {
core.debug(`Checking ${app} --version`)
let versionOutput = ''
try {
await exec.exec(`${app} --version`, [], {
ignoreReturnCode: true,
silent: true,
listeners: {
stdout: (data: Buffer): string => (versionOutput += data.toString()),
stderr: (data: Buffer): string => (versionOutput += data.toString())
}
})
} catch (err) {
core.debug(err.message)
}
versionOutput = versionOutput.trim()
core.debug(versionOutput)
return versionOutput
}
// Use zstandard if possible to maximize cache performance
export async function getCompressionMethod(): Promise<CompressionMethod> {
const versionOutput = await getVersion('zstd')
return versionOutput.toLowerCase().includes('zstd command line interface')
? CompressionMethod.Zstd
: CompressionMethod.Gzip
}
export function getCacheFileName(compressionMethod: CompressionMethod): string {
return compressionMethod === CompressionMethod.Zstd
? CacheFilename.Zstd
: CacheFilename.Gzip
}
export async function useGnuTar(): Promise<boolean> {
const versionOutput = await getVersion('tar')
return versionOutput.toLowerCase().includes('gnu tar')
}

View File

@ -0,0 +1,14 @@
export enum CacheFilename {
Gzip = 'cache.tgz',
Zstd = 'cache.tzst'
}
export enum CompressionMethod {
Gzip = 'gzip',
Zstd = 'zstd'
}
// Socket timeout in milliseconds during download. If no traffic is received
// over the socket during this period, the socket is destroyed and the download
// is aborted.
export const SocketTimeout = 5000

View File

@ -0,0 +1,25 @@
import {CompressionMethod} from './constants'
export interface ArtifactCacheEntry {
cacheKey?: string
scope?: string
creationTime?: string
archiveLocation?: string
}
export interface CommitCacheRequest {
size: number
}
export interface ReserveCacheRequest {
key: string
version?: string
}
export interface ReserveCacheResponse {
cacheId: number
}
export interface InternalCacheOptions {
compressionMethod?: CompressionMethod
}

86
packages/cache/src/internal/tar.ts vendored Normal file
View File

@ -0,0 +1,86 @@
import {exec} from '@actions/exec'
import * as io from '@actions/io'
import {existsSync, writeFileSync} from 'fs'
import * as path from 'path'
import * as utils from './cacheUtils'
import {CompressionMethod} from './constants'
async function getTarPath(args: string[]): Promise<string> {
// Explicitly use BSD Tar on Windows
const IS_WINDOWS = process.platform === 'win32'
if (IS_WINDOWS) {
const systemTar = `${process.env['windir']}\\System32\\tar.exe`
if (existsSync(systemTar)) {
return systemTar
} else if (await utils.useGnuTar()) {
args.push('--force-local')
}
}
return await io.which('tar', true)
}
async function execTar(args: string[], cwd?: string): Promise<void> {
try {
await exec(`"${await getTarPath(args)}"`, args, {cwd})
} catch (error) {
throw new Error(`Tar failed with error: ${error?.message}`)
}
}
function getWorkingDirectory(): string {
return process.env['GITHUB_WORKSPACE'] ?? process.cwd()
}
export async function extractTar(
archivePath: string,
compressionMethod: CompressionMethod
): Promise<void> {
// Create directory to extract tar into
const workingDirectory = getWorkingDirectory()
await io.mkdirP(workingDirectory)
// --d: Decompress.
// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
// Using 30 here because we also support 32-bit self-hosted runners.
const args = [
...(compressionMethod === CompressionMethod.Zstd
? ['--use-compress-program', 'zstd -d --long=30']
: ['-z']),
'-xf',
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
'-P',
'-C',
workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/')
]
await execTar(args)
}
export async function createTar(
archiveFolder: string,
sourceDirectories: string[],
compressionMethod: CompressionMethod
): Promise<void> {
// Write source directories to manifest.txt to avoid command length limits
const manifestFilename = 'manifest.txt'
const cacheFileName = utils.getCacheFileName(compressionMethod)
writeFileSync(
path.join(archiveFolder, manifestFilename),
sourceDirectories.join('\n')
)
// -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores.
// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
// Using 30 here because we also support 32-bit self-hosted runners.
const workingDirectory = getWorkingDirectory()
const args = [
...(compressionMethod === CompressionMethod.Zstd
? ['--use-compress-program', 'zstd -T0 --long=30']
: ['-z']),
'-cf',
cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
'-P',
'-C',
workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
'--files-from',
manifestFilename
]
await execTar(args, archiveFolder)
}

17
packages/cache/src/options.ts vendored Normal file
View File

@ -0,0 +1,17 @@
/**
* Options to control cache upload
*/
export interface UploadOptions {
/**
* Number of parallel cache upload
*
* @default 4
*/
uploadConcurrency?: number
/**
* Maximum chunk size in bytes for cache upload
*
* @default 32MB
*/
uploadChunkSize?: number
}

11
packages/cache/tsconfig.json vendored Normal file
View File

@ -0,0 +1,11 @@
{
"extends": "../../tsconfig.json",
"compilerOptions": {
"baseUrl": "./",
"outDir": "./lib",
"rootDir": "./src"
},
"include": [
"./src"
]
}