mirror of https://github.com/actions/toolkit
React to feedback
parent
b3c8e19a7a
commit
d2b2399bd2
|
@ -64,8 +64,8 @@ jobs:
|
||||||
- name: Verify downloadArtifact()
|
- name: Verify downloadArtifact()
|
||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
scripts/test-artifact-file.sh "artifact-1-directory/artifact-path/world.txt" "${{ env.non-gzip-artifact-content }}"
|
packages/artifact/__tests__/test-artifact-file.sh "artifact-1-directory/artifact-path/world.txt" "${{ env.non-gzip-artifact-content }}"
|
||||||
scripts/test-artifact-file.sh "artifact-2-directory/artifact-path/gzip.txt" "${{ env.gzip-artifact-content }}"
|
packages/artifact/__tests__/test-artifact-file.sh "artifact-2-directory/artifact-path/gzip.txt" "${{ env.gzip-artifact-content }}"
|
||||||
|
|
||||||
- name: Download artifacts using downloadAllArtifacts()
|
- name: Download artifacts using downloadAllArtifacts()
|
||||||
run: |
|
run: |
|
||||||
|
@ -75,5 +75,5 @@ jobs:
|
||||||
- name: Verify downloadAllArtifacts()
|
- name: Verify downloadAllArtifacts()
|
||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
scripts/test-artifact-file.sh "multi-artifact-directory/my-artifact-1/artifact-path/world.txt" "${{ env.non-gzip-artifact-content }}"
|
packages/artifact/__tests__/test-artifact-file.sh "multi-artifact-directory/my-artifact-1/artifact-path/world.txt" "${{ env.non-gzip-artifact-content }}"
|
||||||
scripts/test-artifact-file.sh "multi-artifact-directory/my-artifact-2/artifact-path/gzip.txt" "${{ env.gzip-artifact-content }}"
|
packages/artifact/__tests__/test-artifact-file.sh "multi-artifact-directory/my-artifact-2/artifact-path/gzip.txt" "${{ env.gzip-artifact-content }}"
|
|
@ -47,11 +47,11 @@ jobs:
|
||||||
|
|
||||||
- name: Generate files in working directory
|
- name: Generate files in working directory
|
||||||
shell: bash
|
shell: bash
|
||||||
run: scripts/create-cache-files.sh ${{ runner.os }} test-cache
|
run: packages/cache/__tests__/create-cache-files.sh ${{ runner.os }} test-cache
|
||||||
|
|
||||||
- name: Generate files outside working directory
|
- name: Generate files outside working directory
|
||||||
shell: bash
|
shell: bash
|
||||||
run: scripts/create-cache-files.sh ${{ runner.os }} ~/test-cache
|
run: packages/cache/__tests__/create-cache-files.sh ${{ runner.os }} ~/test-cache
|
||||||
|
|
||||||
# We're using node -e to call the functions directly available in the @actions/cache package
|
# We're using node -e to call the functions directly available in the @actions/cache package
|
||||||
- name: Save cache using saveCache()
|
- name: Save cache using saveCache()
|
||||||
|
@ -65,5 +65,5 @@ jobs:
|
||||||
- name: Verify cache
|
- name: Verify cache
|
||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
scripts/verify-cache-files.sh ${{ runner.os }} test-cache
|
packages/cache/__tests__/verify-cache-files.sh ${{ runner.os }} test-cache
|
||||||
scripts/verify-cache-files.sh ${{ runner.os }} ~/test-cache
|
packages/cache/__tests__/verify-cache-files.sh ${{ runner.os }} ~/test-cache
|
|
@ -2,6 +2,10 @@
|
||||||
|
|
||||||
> Functions necessary for caching dependencies and build outputs to improve workflow execution time.
|
> Functions necessary for caching dependencies and build outputs to improve workflow execution time.
|
||||||
|
|
||||||
|
See ["Caching dependencies to speed up workflows"](https://help.github.com/github/automating-your-workflow-with-github-actions/caching-dependencies-to-speed-up-workflows) for how caching works.
|
||||||
|
|
||||||
|
Note that GitHub will remove any cache entries that have not been accessed in over 7 days. There is no limit on the number of caches you can store, but the total size of all caches in a repository is limited to 5 GB. If you exceed this limit, GitHub will save your cache but will begin evicting caches until the total size is less than 5 GB.
|
||||||
|
|
||||||
## Usage
|
## Usage
|
||||||
|
|
||||||
#### Restore Cache
|
#### Restore Cache
|
||||||
|
@ -24,7 +28,7 @@ const cacheKey = await cache.restoreCache(paths, key, restoreKeys)
|
||||||
|
|
||||||
#### Save Cache
|
#### Save Cache
|
||||||
|
|
||||||
Saves a cache containing the files in `paths` using the `key` provided. Function returns the cache id if the cache was save succesfully.
|
Saves a cache containing the files in `paths` using the `key` provided. The files would be compressed using zstandard compression algorithm if zstd is installed, otherwise gzip is used. Function returns the cache id if the cache was saved succesfully and throws an error if cache upload fails.
|
||||||
|
|
||||||
```js
|
```js
|
||||||
const cache = require('@actions/cache');
|
const cache = require('@actions/cache');
|
||||||
|
@ -35,7 +39,3 @@ const paths = [
|
||||||
const key = 'npm-foobar-d5ea0750'
|
const key = 'npm-foobar-d5ea0750'
|
||||||
const cacheId = await cache.saveCache(paths, key)
|
const cacheId = await cache.saveCache(paths, key)
|
||||||
```
|
```
|
||||||
|
|
||||||
## Additional Documentation
|
|
||||||
|
|
||||||
See ["Caching dependencies to speed up workflows"](https://help.github.com/github/automating-your-workflow-with-github-actions/caching-dependencies-to-speed-up-workflows).
|
|
|
@ -1,5 +1,5 @@
|
||||||
# @actions/cache Releases
|
# @actions/cache Releases
|
||||||
|
|
||||||
### 1.0.0
|
### 0.1.0
|
||||||
|
|
||||||
- Initial release
|
- Initial release
|
|
@ -1,24 +1,11 @@
|
||||||
import * as io from '@actions/io'
|
|
||||||
import {promises as fs} from 'fs'
|
import {promises as fs} from 'fs'
|
||||||
import * as path from 'path'
|
import * as path from 'path'
|
||||||
import * as cacheUtils from '../src/internal/cacheUtils'
|
import * as cacheUtils from '../src/internal/cacheUtils'
|
||||||
|
|
||||||
jest.mock('@actions/core')
|
test('getArchiveFileSizeIsBytes returns file size', () => {
|
||||||
jest.mock('os')
|
|
||||||
|
|
||||||
function getTempDir(): string {
|
|
||||||
return path.join(__dirname, '_temp', 'cacheUtils')
|
|
||||||
}
|
|
||||||
|
|
||||||
afterAll(async () => {
|
|
||||||
delete process.env['GITHUB_WORKSPACE']
|
|
||||||
await io.rmRF(getTempDir())
|
|
||||||
})
|
|
||||||
|
|
||||||
test('getArchiveFileSize returns file size', () => {
|
|
||||||
const filePath = path.join(__dirname, '__fixtures__', 'helloWorld.txt')
|
const filePath = path.join(__dirname, '__fixtures__', 'helloWorld.txt')
|
||||||
|
|
||||||
const size = cacheUtils.getArchiveFileSize(filePath)
|
const size = cacheUtils.getArchiveFileSizeIsBytes(filePath)
|
||||||
|
|
||||||
expect(size).toBe(11)
|
expect(size).toBe(11)
|
||||||
})
|
})
|
||||||
|
@ -28,6 +15,8 @@ test('unlinkFile unlinks file', async () => {
|
||||||
const testFile = path.join(testDirectory, 'test.txt')
|
const testFile = path.join(testDirectory, 'test.txt')
|
||||||
await fs.writeFile(testFile, 'hello world')
|
await fs.writeFile(testFile, 'hello world')
|
||||||
|
|
||||||
|
await expect(fs.stat(testFile)).resolves.not.toThrow()
|
||||||
|
|
||||||
await cacheUtils.unlinkFile(testFile)
|
await cacheUtils.unlinkFile(testFile)
|
||||||
|
|
||||||
// This should throw as testFile should not exist
|
// This should throw as testFile should not exist
|
||||||
|
|
|
@ -12,6 +12,12 @@ jest.mock('../src/internal/cacheUtils')
|
||||||
jest.mock('../src/internal/tar')
|
jest.mock('../src/internal/tar')
|
||||||
|
|
||||||
beforeAll(() => {
|
beforeAll(() => {
|
||||||
|
jest.spyOn(console, 'log').mockImplementation(() => {})
|
||||||
|
jest.spyOn(core, 'debug').mockImplementation(() => {})
|
||||||
|
jest.spyOn(core, 'info').mockImplementation(() => {})
|
||||||
|
jest.spyOn(core, 'warning').mockImplementation(() => {})
|
||||||
|
jest.spyOn(core, 'error').mockImplementation(() => {})
|
||||||
|
|
||||||
// eslint-disable-next-line @typescript-eslint/promise-function-async
|
// eslint-disable-next-line @typescript-eslint/promise-function-async
|
||||||
jest.spyOn(cacheUtils, 'getCacheFileName').mockImplementation(cm => {
|
jest.spyOn(cacheUtils, 'getCacheFileName').mockImplementation(cm => {
|
||||||
const actualUtils = jest.requireActual('../src/internal/cacheUtils')
|
const actualUtils = jest.requireActual('../src/internal/cacheUtils')
|
||||||
|
@ -56,7 +62,6 @@ test('restore with no cache found', async () => {
|
||||||
const paths = ['node_modules']
|
const paths = ['node_modules']
|
||||||
const key = 'node-test'
|
const key = 'node-test'
|
||||||
|
|
||||||
const infoMock = jest.spyOn(core, 'info')
|
|
||||||
jest.spyOn(cacheHttpClient, 'getCacheEntry').mockImplementation(async () => {
|
jest.spyOn(cacheHttpClient, 'getCacheEntry').mockImplementation(async () => {
|
||||||
return Promise.resolve(null)
|
return Promise.resolve(null)
|
||||||
})
|
})
|
||||||
|
@ -64,9 +69,6 @@ test('restore with no cache found', async () => {
|
||||||
const cacheKey = await restoreCache(paths, key)
|
const cacheKey = await restoreCache(paths, key)
|
||||||
|
|
||||||
expect(cacheKey).toBe(undefined)
|
expect(cacheKey).toBe(undefined)
|
||||||
expect(infoMock).toHaveBeenCalledWith(
|
|
||||||
`Cache not found for input keys: ${key}`
|
|
||||||
)
|
|
||||||
})
|
})
|
||||||
|
|
||||||
test('restore with server error should fail', async () => {
|
test('restore with server error should fail', async () => {
|
||||||
|
@ -87,8 +89,6 @@ test('restore with restore keys and no cache found', async () => {
|
||||||
const key = 'node-test'
|
const key = 'node-test'
|
||||||
const restoreKey = 'node-'
|
const restoreKey = 'node-'
|
||||||
|
|
||||||
const infoMock = jest.spyOn(core, 'info')
|
|
||||||
|
|
||||||
jest.spyOn(cacheHttpClient, 'getCacheEntry').mockImplementation(async () => {
|
jest.spyOn(cacheHttpClient, 'getCacheEntry').mockImplementation(async () => {
|
||||||
return Promise.resolve(null)
|
return Promise.resolve(null)
|
||||||
})
|
})
|
||||||
|
@ -96,17 +96,12 @@ test('restore with restore keys and no cache found', async () => {
|
||||||
const cacheKey = await restoreCache(paths, key, [restoreKey])
|
const cacheKey = await restoreCache(paths, key, [restoreKey])
|
||||||
|
|
||||||
expect(cacheKey).toBe(undefined)
|
expect(cacheKey).toBe(undefined)
|
||||||
expect(infoMock).toHaveBeenCalledWith(
|
|
||||||
`Cache not found for input keys: ${key}, ${restoreKey}`
|
|
||||||
)
|
|
||||||
})
|
})
|
||||||
|
|
||||||
test('restore with gzip compressed cache found', async () => {
|
test('restore with gzip compressed cache found', async () => {
|
||||||
const paths = ['node_modules']
|
const paths = ['node_modules']
|
||||||
const key = 'node-test'
|
const key = 'node-test'
|
||||||
|
|
||||||
const infoMock = jest.spyOn(core, 'info')
|
|
||||||
|
|
||||||
const cacheEntry: ArtifactCacheEntry = {
|
const cacheEntry: ArtifactCacheEntry = {
|
||||||
cacheKey: key,
|
cacheKey: key,
|
||||||
scope: 'refs/heads/master',
|
scope: 'refs/heads/master',
|
||||||
|
@ -128,8 +123,8 @@ test('restore with gzip compressed cache found', async () => {
|
||||||
const downloadCacheMock = jest.spyOn(cacheHttpClient, 'downloadCache')
|
const downloadCacheMock = jest.spyOn(cacheHttpClient, 'downloadCache')
|
||||||
|
|
||||||
const fileSize = 142
|
const fileSize = 142
|
||||||
const getArchiveFileSizeMock = jest
|
const getArchiveFileSizeIsBytesMock = jest
|
||||||
.spyOn(cacheUtils, 'getArchiveFileSize')
|
.spyOn(cacheUtils, 'getArchiveFileSizeIsBytes')
|
||||||
.mockReturnValue(fileSize)
|
.mockReturnValue(fileSize)
|
||||||
|
|
||||||
const extractTarMock = jest.spyOn(tar, 'extractTar')
|
const extractTarMock = jest.spyOn(tar, 'extractTar')
|
||||||
|
@ -151,7 +146,7 @@ test('restore with gzip compressed cache found', async () => {
|
||||||
cacheEntry.archiveLocation,
|
cacheEntry.archiveLocation,
|
||||||
archivePath
|
archivePath
|
||||||
)
|
)
|
||||||
expect(getArchiveFileSizeMock).toHaveBeenCalledWith(archivePath)
|
expect(getArchiveFileSizeIsBytesMock).toHaveBeenCalledWith(archivePath)
|
||||||
|
|
||||||
expect(extractTarMock).toHaveBeenCalledTimes(1)
|
expect(extractTarMock).toHaveBeenCalledTimes(1)
|
||||||
expect(extractTarMock).toHaveBeenCalledWith(archivePath, compression)
|
expect(extractTarMock).toHaveBeenCalledWith(archivePath, compression)
|
||||||
|
@ -159,11 +154,10 @@ test('restore with gzip compressed cache found', async () => {
|
||||||
expect(unlinkFileMock).toHaveBeenCalledTimes(1)
|
expect(unlinkFileMock).toHaveBeenCalledTimes(1)
|
||||||
expect(unlinkFileMock).toHaveBeenCalledWith(archivePath)
|
expect(unlinkFileMock).toHaveBeenCalledWith(archivePath)
|
||||||
|
|
||||||
expect(infoMock).toHaveBeenCalledWith(`Cache restored from key: ${key}`)
|
|
||||||
expect(getCompressionMock).toHaveBeenCalledTimes(1)
|
expect(getCompressionMock).toHaveBeenCalledTimes(1)
|
||||||
})
|
})
|
||||||
|
|
||||||
test('restore with a pull request event and zstd compressed cache found', async () => {
|
test('restore with zstd compressed cache found', async () => {
|
||||||
const paths = ['node_modules']
|
const paths = ['node_modules']
|
||||||
const key = 'node-test'
|
const key = 'node-test'
|
||||||
|
|
||||||
|
@ -189,8 +183,8 @@ test('restore with a pull request event and zstd compressed cache found', async
|
||||||
const downloadCacheMock = jest.spyOn(cacheHttpClient, 'downloadCache')
|
const downloadCacheMock = jest.spyOn(cacheHttpClient, 'downloadCache')
|
||||||
|
|
||||||
const fileSize = 62915000
|
const fileSize = 62915000
|
||||||
const getArchiveFileSizeMock = jest
|
const getArchiveFileSizeIsBytesMock = jest
|
||||||
.spyOn(cacheUtils, 'getArchiveFileSize')
|
.spyOn(cacheUtils, 'getArchiveFileSizeIsBytes')
|
||||||
.mockReturnValue(fileSize)
|
.mockReturnValue(fileSize)
|
||||||
|
|
||||||
const extractTarMock = jest.spyOn(tar, 'extractTar')
|
const extractTarMock = jest.spyOn(tar, 'extractTar')
|
||||||
|
@ -210,13 +204,11 @@ test('restore with a pull request event and zstd compressed cache found', async
|
||||||
cacheEntry.archiveLocation,
|
cacheEntry.archiveLocation,
|
||||||
archivePath
|
archivePath
|
||||||
)
|
)
|
||||||
expect(getArchiveFileSizeMock).toHaveBeenCalledWith(archivePath)
|
expect(getArchiveFileSizeIsBytesMock).toHaveBeenCalledWith(archivePath)
|
||||||
expect(infoMock).toHaveBeenCalledWith(`Cache Size: ~60 MB (62915000 B)`)
|
expect(infoMock).toHaveBeenCalledWith(`Cache Size: ~60 MB (62915000 B)`)
|
||||||
|
|
||||||
expect(extractTarMock).toHaveBeenCalledTimes(1)
|
expect(extractTarMock).toHaveBeenCalledTimes(1)
|
||||||
expect(extractTarMock).toHaveBeenCalledWith(archivePath, compression)
|
expect(extractTarMock).toHaveBeenCalledWith(archivePath, compression)
|
||||||
|
|
||||||
expect(infoMock).toHaveBeenCalledWith(`Cache restored from key: ${key}`)
|
|
||||||
expect(getCompressionMock).toHaveBeenCalledTimes(1)
|
expect(getCompressionMock).toHaveBeenCalledTimes(1)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -247,8 +239,8 @@ test('restore with cache found for restore key', async () => {
|
||||||
const downloadCacheMock = jest.spyOn(cacheHttpClient, 'downloadCache')
|
const downloadCacheMock = jest.spyOn(cacheHttpClient, 'downloadCache')
|
||||||
|
|
||||||
const fileSize = 142
|
const fileSize = 142
|
||||||
const getArchiveFileSizeMock = jest
|
const getArchiveFileSizeIsBytesMock = jest
|
||||||
.spyOn(cacheUtils, 'getArchiveFileSize')
|
.spyOn(cacheUtils, 'getArchiveFileSizeIsBytes')
|
||||||
.mockReturnValue(fileSize)
|
.mockReturnValue(fileSize)
|
||||||
|
|
||||||
const extractTarMock = jest.spyOn(tar, 'extractTar')
|
const extractTarMock = jest.spyOn(tar, 'extractTar')
|
||||||
|
@ -268,14 +260,10 @@ test('restore with cache found for restore key', async () => {
|
||||||
cacheEntry.archiveLocation,
|
cacheEntry.archiveLocation,
|
||||||
archivePath
|
archivePath
|
||||||
)
|
)
|
||||||
expect(getArchiveFileSizeMock).toHaveBeenCalledWith(archivePath)
|
expect(getArchiveFileSizeIsBytesMock).toHaveBeenCalledWith(archivePath)
|
||||||
expect(infoMock).toHaveBeenCalledWith(`Cache Size: ~0 MB (142 B)`)
|
expect(infoMock).toHaveBeenCalledWith(`Cache Size: ~0 MB (142 B)`)
|
||||||
|
|
||||||
expect(extractTarMock).toHaveBeenCalledTimes(1)
|
expect(extractTarMock).toHaveBeenCalledTimes(1)
|
||||||
expect(extractTarMock).toHaveBeenCalledWith(archivePath, compression)
|
expect(extractTarMock).toHaveBeenCalledWith(archivePath, compression)
|
||||||
|
|
||||||
expect(infoMock).toHaveBeenCalledWith(
|
|
||||||
`Cache restored from key: ${restoreKey}`
|
|
||||||
)
|
|
||||||
expect(getCompressionMock).toHaveBeenCalledTimes(1)
|
expect(getCompressionMock).toHaveBeenCalledTimes(1)
|
||||||
})
|
})
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
import * as core from '@actions/core'
|
||||||
import * as path from 'path'
|
import * as path from 'path'
|
||||||
import {saveCache} from '../src/cache'
|
import {saveCache} from '../src/cache'
|
||||||
import * as cacheHttpClient from '../src/internal/cacheHttpClient'
|
import * as cacheHttpClient from '../src/internal/cacheHttpClient'
|
||||||
|
@ -5,12 +6,17 @@ import * as cacheUtils from '../src/internal/cacheUtils'
|
||||||
import {CacheFilename, CompressionMethod} from '../src/internal/constants'
|
import {CacheFilename, CompressionMethod} from '../src/internal/constants'
|
||||||
import * as tar from '../src/internal/tar'
|
import * as tar from '../src/internal/tar'
|
||||||
|
|
||||||
jest.mock('@actions/core')
|
|
||||||
jest.mock('../src/internal/cacheHttpClient')
|
jest.mock('../src/internal/cacheHttpClient')
|
||||||
jest.mock('../src/internal/cacheUtils')
|
jest.mock('../src/internal/cacheUtils')
|
||||||
jest.mock('../src/internal/tar')
|
jest.mock('../src/internal/tar')
|
||||||
|
|
||||||
beforeAll(() => {
|
beforeAll(() => {
|
||||||
|
jest.spyOn(console, 'log').mockImplementation(() => {})
|
||||||
|
jest.spyOn(core, 'debug').mockImplementation(() => {})
|
||||||
|
jest.spyOn(core, 'info').mockImplementation(() => {})
|
||||||
|
jest.spyOn(core, 'warning').mockImplementation(() => {})
|
||||||
|
jest.spyOn(core, 'error').mockImplementation(() => {})
|
||||||
|
|
||||||
// eslint-disable-next-line @typescript-eslint/promise-function-async
|
// eslint-disable-next-line @typescript-eslint/promise-function-async
|
||||||
jest.spyOn(cacheUtils, 'getCacheFileName').mockImplementation(cm => {
|
jest.spyOn(cacheUtils, 'getCacheFileName').mockImplementation(cm => {
|
||||||
const actualUtils = jest.requireActual('../src/internal/cacheUtils')
|
const actualUtils = jest.requireActual('../src/internal/cacheUtils')
|
||||||
|
@ -42,7 +48,9 @@ test('save with large cache outputs should fail', async () => {
|
||||||
const createTarMock = jest.spyOn(tar, 'createTar')
|
const createTarMock = jest.spyOn(tar, 'createTar')
|
||||||
|
|
||||||
const cacheSize = 6 * 1024 * 1024 * 1024 //~6GB, over the 5GB limit
|
const cacheSize = 6 * 1024 * 1024 * 1024 //~6GB, over the 5GB limit
|
||||||
jest.spyOn(cacheUtils, 'getArchiveFileSize').mockReturnValueOnce(cacheSize)
|
jest
|
||||||
|
.spyOn(cacheUtils, 'getArchiveFileSizeIsBytes')
|
||||||
|
.mockReturnValueOnce(cacheSize)
|
||||||
const compression = CompressionMethod.Gzip
|
const compression = CompressionMethod.Gzip
|
||||||
const getCompressionMock = jest
|
const getCompressionMock = jest
|
||||||
.spyOn(cacheUtils, 'getCompressionMethod')
|
.spyOn(cacheUtils, 'getCompressionMethod')
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
{
|
{
|
||||||
"name": "@actions/cache",
|
"name": "@actions/cache",
|
||||||
"version": "1.0.0",
|
"version": "0.1.0",
|
||||||
"lockfileVersion": 1,
|
"lockfileVersion": 1,
|
||||||
"requires": true,
|
"requires": true,
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
{
|
{
|
||||||
"name": "@actions/cache",
|
"name": "@actions/cache",
|
||||||
"version": "1.0.0",
|
"version": "0.1.0",
|
||||||
"preview": true,
|
"preview": true,
|
||||||
"description": "Actions cache lib",
|
"description": "Actions cache lib",
|
||||||
"keywords": [
|
"keywords": [
|
||||||
|
|
|
@ -9,6 +9,7 @@ export class ValidationError extends Error {
|
||||||
constructor(message: string) {
|
constructor(message: string) {
|
||||||
super(message)
|
super(message)
|
||||||
this.name = 'ValidationError'
|
this.name = 'ValidationError'
|
||||||
|
Object.setPrototypeOf(this, ValidationError.prototype)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -16,6 +17,7 @@ export class ReserveCacheError extends Error {
|
||||||
constructor(message: string) {
|
constructor(message: string) {
|
||||||
super(message)
|
super(message)
|
||||||
this.name = 'ReserveCacheError'
|
this.name = 'ReserveCacheError'
|
||||||
|
Object.setPrototypeOf(this, ReserveCacheError.prototype)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -47,7 +49,7 @@ function checkKey(key: string): void {
|
||||||
* @param paths a list of file paths to restore from the cache
|
* @param paths a list of file paths to restore from the cache
|
||||||
* @param primaryKey an explicit key for restoring the cache
|
* @param primaryKey an explicit key for restoring the cache
|
||||||
* @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for key
|
* @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for key
|
||||||
* @returns string returns the key for the cache hit, otherwise return undefined
|
* @returns string returns the key for the cache hit, otherwise returns undefined
|
||||||
*/
|
*/
|
||||||
export async function restoreCache(
|
export async function restoreCache(
|
||||||
paths: string[],
|
paths: string[],
|
||||||
|
@ -78,7 +80,7 @@ export async function restoreCache(
|
||||||
compressionMethod
|
compressionMethod
|
||||||
})
|
})
|
||||||
if (!cacheEntry?.archiveLocation) {
|
if (!cacheEntry?.archiveLocation) {
|
||||||
core.info(`Cache not found for input keys: ${keys.join(', ')}`)
|
// Cache not found
|
||||||
return undefined
|
return undefined
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -92,7 +94,7 @@ export async function restoreCache(
|
||||||
// Download the cache from the cache entry
|
// Download the cache from the cache entry
|
||||||
await cacheHttpClient.downloadCache(cacheEntry.archiveLocation, archivePath)
|
await cacheHttpClient.downloadCache(cacheEntry.archiveLocation, archivePath)
|
||||||
|
|
||||||
const archiveFileSize = utils.getArchiveFileSize(archivePath)
|
const archiveFileSize = utils.getArchiveFileSizeIsBytes(archivePath)
|
||||||
core.info(
|
core.info(
|
||||||
`Cache Size: ~${Math.round(
|
`Cache Size: ~${Math.round(
|
||||||
archiveFileSize / (1024 * 1024)
|
archiveFileSize / (1024 * 1024)
|
||||||
|
@ -109,8 +111,6 @@ export async function restoreCache(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
core.info(`Cache restored from key: ${cacheEntry && cacheEntry.cacheKey}`)
|
|
||||||
|
|
||||||
return cacheEntry.cacheKey
|
return cacheEntry.cacheKey
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -120,7 +120,7 @@ export async function restoreCache(
|
||||||
* @param paths a list of file paths to be cached
|
* @param paths a list of file paths to be cached
|
||||||
* @param key an explicit key for restoring the cache
|
* @param key an explicit key for restoring the cache
|
||||||
* @param options cache upload options
|
* @param options cache upload options
|
||||||
* @returns number returns cacheId if the cache was saved successfully
|
* @returns number returns cacheId if the cache was saved successfully and throws an error if save fails
|
||||||
*/
|
*/
|
||||||
export async function saveCache(
|
export async function saveCache(
|
||||||
paths: string[],
|
paths: string[],
|
||||||
|
@ -158,7 +158,7 @@ export async function saveCache(
|
||||||
await createTar(archiveFolder, cachePaths, compressionMethod)
|
await createTar(archiveFolder, cachePaths, compressionMethod)
|
||||||
|
|
||||||
const fileSizeLimit = 5 * 1024 * 1024 * 1024 // 5GB per repo limit
|
const fileSizeLimit = 5 * 1024 * 1024 * 1024 // 5GB per repo limit
|
||||||
const archiveFileSize = utils.getArchiveFileSize(archivePath)
|
const archiveFileSize = utils.getArchiveFileSizeIsBytes(archivePath)
|
||||||
core.debug(`File Size: ${archiveFileSize}`)
|
core.debug(`File Size: ${archiveFileSize}`)
|
||||||
if (archiveFileSize > fileSizeLimit) {
|
if (archiveFileSize > fileSizeLimit) {
|
||||||
throw new Error(
|
throw new Error(
|
||||||
|
|
|
@ -242,7 +242,7 @@ export async function downloadCache(
|
||||||
|
|
||||||
if (contentLengthHeader) {
|
if (contentLengthHeader) {
|
||||||
const expectedLength = parseInt(contentLengthHeader)
|
const expectedLength = parseInt(contentLengthHeader)
|
||||||
const actualLength = utils.getArchiveFileSize(archivePath)
|
const actualLength = utils.getArchiveFileSizeIsBytes(archivePath)
|
||||||
|
|
||||||
if (actualLength !== expectedLength) {
|
if (actualLength !== expectedLength) {
|
||||||
throw new Error(
|
throw new Error(
|
||||||
|
@ -399,7 +399,7 @@ export async function saveCache(
|
||||||
|
|
||||||
// Commit Cache
|
// Commit Cache
|
||||||
core.debug('Commiting cache')
|
core.debug('Commiting cache')
|
||||||
const cacheSize = utils.getArchiveFileSize(archivePath)
|
const cacheSize = utils.getArchiveFileSizeIsBytes(archivePath)
|
||||||
const commitCacheResponse = await commitCache(httpClient, cacheId, cacheSize)
|
const commitCacheResponse = await commitCache(httpClient, cacheId, cacheSize)
|
||||||
if (!isSuccessStatusCode(commitCacheResponse.statusCode)) {
|
if (!isSuccessStatusCode(commitCacheResponse.statusCode)) {
|
||||||
throw new Error(
|
throw new Error(
|
||||||
|
|
|
@ -34,7 +34,7 @@ export async function createTempDirectory(): Promise<string> {
|
||||||
return dest
|
return dest
|
||||||
}
|
}
|
||||||
|
|
||||||
export function getArchiveFileSize(filePath: string): number {
|
export function getArchiveFileSizeIsBytes(filePath: string): number {
|
||||||
return fs.statSync(filePath).size
|
return fs.statSync(filePath).size
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -80,6 +80,7 @@ async function getVersion(app: string): Promise<string> {
|
||||||
return versionOutput
|
return versionOutput
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Use zstandard if possible to maximize cache performance
|
||||||
export async function getCompressionMethod(): Promise<CompressionMethod> {
|
export async function getCompressionMethod(): Promise<CompressionMethod> {
|
||||||
const versionOutput = await getVersion('zstd')
|
const versionOutput = await getVersion('zstd')
|
||||||
return versionOutput.toLowerCase().includes('zstd command line interface')
|
return versionOutput.toLowerCase().includes('zstd command line interface')
|
||||||
|
|
|
@ -9,7 +9,7 @@ export interface UploadOptions {
|
||||||
*/
|
*/
|
||||||
uploadConcurrency?: number
|
uploadConcurrency?: number
|
||||||
/**
|
/**
|
||||||
* Maximum chunk size for cache upload
|
* Maximum chunk size in bytes for cache upload
|
||||||
*
|
*
|
||||||
* @default 32MB
|
* @default 32MB
|
||||||
*/
|
*/
|
||||||
|
|
Loading…
Reference in New Issue