mirror of https://github.com/actions/toolkit
Change variable path to a list
parent
932779cf58
commit
7409ad5fae
|
@ -87,7 +87,7 @@ $ npm install @actions/artifact --save
|
||||||
Provides functions to interact with actions cache. Read more [here](packages/cache)
|
Provides functions to interact with actions cache. Read more [here](packages/cache)
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
$ npm install @actions/artifact --save
|
$ npm install @actions/cache --save
|
||||||
```
|
```
|
||||||
<br/>
|
<br/>
|
||||||
|
|
||||||
|
|
|
@ -1 +1,13 @@
|
||||||
# `@actions/cache`
|
# `@actions/cache`
|
||||||
|
|
||||||
|
> Functions necessary for caching dependencies and build outputs to improve workflow execution time.
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
#### Restore Cache
|
||||||
|
|
||||||
|
#### Save Cache
|
||||||
|
|
||||||
|
## Additional Documentation
|
||||||
|
|
||||||
|
See ["Caching dependencies to speed up workflows"](https://help.github.com/github/automating-your-workflow-with-github-actions/caching-dependencies-to-speed-up-workflows).
|
|
@ -1,17 +1,25 @@
|
||||||
import {getCacheVersion} from '../src/internal/cacheHttpClient'
|
import {getCacheVersion} from '../src/internal/cacheHttpClient'
|
||||||
import {CompressionMethod} from '../src/internal/constants'
|
import {CompressionMethod} from '../src/internal/constants'
|
||||||
|
|
||||||
test('getCacheVersion with path input and compression method undefined returns version', async () => {
|
test('getCacheVersion with one path returns version', async () => {
|
||||||
const inputPath = 'node_modules'
|
const paths = ['node_modules']
|
||||||
const result = getCacheVersion(inputPath)
|
const result = getCacheVersion(paths)
|
||||||
expect(result).toEqual(
|
expect(result).toEqual(
|
||||||
'b3e0c6cb5ecf32614eeb2997d905b9c297046d7cbf69062698f25b14b4cb0985'
|
'b3e0c6cb5ecf32614eeb2997d905b9c297046d7cbf69062698f25b14b4cb0985'
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
test('getCacheVersion with multiple paths returns version', async () => {
|
||||||
|
const paths = ['node_modules', 'dist']
|
||||||
|
const result = getCacheVersion(paths)
|
||||||
|
expect(result).toEqual(
|
||||||
|
'165c3053bc646bf0d4fac17b1f5731caca6fe38e0e464715c0c3c6b6318bf436'
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
test('getCacheVersion with zstd compression returns version', async () => {
|
test('getCacheVersion with zstd compression returns version', async () => {
|
||||||
const inputPath = 'node_modules'
|
const paths = ['node_modules']
|
||||||
const result = getCacheVersion(inputPath, CompressionMethod.Zstd)
|
const result = getCacheVersion(paths, CompressionMethod.Zstd)
|
||||||
|
|
||||||
expect(result).toEqual(
|
expect(result).toEqual(
|
||||||
'273877e14fd65d270b87a198edbfa2db5a43de567c9a548d2a2505b408befe24'
|
'273877e14fd65d270b87a198edbfa2db5a43de567c9a548d2a2505b408befe24'
|
||||||
|
@ -19,8 +27,8 @@ test('getCacheVersion with zstd compression returns version', async () => {
|
||||||
})
|
})
|
||||||
|
|
||||||
test('getCacheVersion with gzip compression does not change vesion', async () => {
|
test('getCacheVersion with gzip compression does not change vesion', async () => {
|
||||||
const inputPath = 'node_modules'
|
const paths = ['node_modules']
|
||||||
const result = getCacheVersion(inputPath, CompressionMethod.Gzip)
|
const result = getCacheVersion(paths, CompressionMethod.Gzip)
|
||||||
|
|
||||||
expect(result).toEqual(
|
expect(result).toEqual(
|
||||||
'b3e0c6cb5ecf32614eeb2997d905b9c297046d7cbf69062698f25b14b4cb0985'
|
'b3e0c6cb5ecf32614eeb2997d905b9c297046d7cbf69062698f25b14b4cb0985'
|
||||||
|
|
|
@ -1,9 +1,6 @@
|
||||||
import * as core from '@actions/core'
|
|
||||||
import * as io from '@actions/io'
|
import * as io from '@actions/io'
|
||||||
import {promises as fs} from 'fs'
|
import {promises as fs} from 'fs'
|
||||||
import * as os from 'os'
|
|
||||||
import * as path from 'path'
|
import * as path from 'path'
|
||||||
import {v4 as uuidV4} from 'uuid'
|
|
||||||
import * as cacheUtils from '../src/internal/cacheUtils'
|
import * as cacheUtils from '../src/internal/cacheUtils'
|
||||||
|
|
||||||
jest.mock('@actions/core')
|
jest.mock('@actions/core')
|
||||||
|
@ -26,143 +23,6 @@ test('getArchiveFileSize returns file size', () => {
|
||||||
expect(size).toBe(11)
|
expect(size).toBe(11)
|
||||||
})
|
})
|
||||||
|
|
||||||
test('logWarning logs a message with a warning prefix', () => {
|
|
||||||
const message = 'A warning occurred.'
|
|
||||||
|
|
||||||
const infoMock = jest.spyOn(core, 'info')
|
|
||||||
|
|
||||||
cacheUtils.logWarning(message)
|
|
||||||
|
|
||||||
expect(infoMock).toHaveBeenCalledWith(`[warning]${message}`)
|
|
||||||
})
|
|
||||||
|
|
||||||
test('resolvePaths with no ~ in path', async () => {
|
|
||||||
const filePath = '.cache'
|
|
||||||
|
|
||||||
// Create the following layout:
|
|
||||||
// cwd
|
|
||||||
// cwd/.cache
|
|
||||||
// cwd/.cache/file.txt
|
|
||||||
|
|
||||||
const root = path.join(getTempDir(), 'no-tilde')
|
|
||||||
// tarball entries will be relative to workspace
|
|
||||||
process.env['GITHUB_WORKSPACE'] = root
|
|
||||||
|
|
||||||
await fs.mkdir(root, {recursive: true})
|
|
||||||
const cache = path.join(root, '.cache')
|
|
||||||
await fs.mkdir(cache, {recursive: true})
|
|
||||||
await fs.writeFile(path.join(cache, 'file.txt'), 'cached')
|
|
||||||
|
|
||||||
const originalCwd = process.cwd()
|
|
||||||
|
|
||||||
try {
|
|
||||||
process.chdir(root)
|
|
||||||
|
|
||||||
const resolvedPath = await cacheUtils.resolvePaths([filePath])
|
|
||||||
|
|
||||||
const expectedPath = [filePath]
|
|
||||||
expect(resolvedPath).toStrictEqual(expectedPath)
|
|
||||||
} finally {
|
|
||||||
process.chdir(originalCwd)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
test('resolvePaths with ~ in path', async () => {
|
|
||||||
const cacheDir = uuidV4()
|
|
||||||
const filePath = `~/${cacheDir}`
|
|
||||||
// Create the following layout:
|
|
||||||
// ~/uuid
|
|
||||||
// ~/uuid/file.txt
|
|
||||||
|
|
||||||
const homedir = jest.requireActual('os').homedir()
|
|
||||||
const homedirMock = jest.spyOn(os, 'homedir')
|
|
||||||
homedirMock.mockReturnValue(homedir)
|
|
||||||
|
|
||||||
const target = path.join(homedir, cacheDir)
|
|
||||||
await fs.mkdir(target, {recursive: true})
|
|
||||||
await fs.writeFile(path.join(target, 'file.txt'), 'cached')
|
|
||||||
|
|
||||||
const root = getTempDir()
|
|
||||||
process.env['GITHUB_WORKSPACE'] = root
|
|
||||||
|
|
||||||
try {
|
|
||||||
const resolvedPath = await cacheUtils.resolvePaths([filePath])
|
|
||||||
|
|
||||||
const expectedPath = [path.relative(root, target)]
|
|
||||||
expect(resolvedPath).toStrictEqual(expectedPath)
|
|
||||||
} finally {
|
|
||||||
await io.rmRF(target)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
test('resolvePaths with home not found', async () => {
|
|
||||||
const filePath = '~/.cache/yarn'
|
|
||||||
const homedirMock = jest.spyOn(os, 'homedir')
|
|
||||||
homedirMock.mockReturnValue('')
|
|
||||||
|
|
||||||
await expect(cacheUtils.resolvePaths([filePath])).rejects.toThrow(
|
|
||||||
'Unable to determine HOME directory'
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
test('resolvePaths inclusion pattern returns found', async () => {
|
|
||||||
const pattern = '*.ts'
|
|
||||||
// Create the following layout:
|
|
||||||
// inclusion-patterns
|
|
||||||
// inclusion-patterns/miss.txt
|
|
||||||
// inclusion-patterns/test.ts
|
|
||||||
|
|
||||||
const root = path.join(getTempDir(), 'inclusion-patterns')
|
|
||||||
// tarball entries will be relative to workspace
|
|
||||||
process.env['GITHUB_WORKSPACE'] = root
|
|
||||||
|
|
||||||
await fs.mkdir(root, {recursive: true})
|
|
||||||
await fs.writeFile(path.join(root, 'miss.txt'), 'no match')
|
|
||||||
await fs.writeFile(path.join(root, 'test.ts'), 'match')
|
|
||||||
|
|
||||||
const originalCwd = process.cwd()
|
|
||||||
|
|
||||||
try {
|
|
||||||
process.chdir(root)
|
|
||||||
|
|
||||||
const resolvedPath = await cacheUtils.resolvePaths([pattern])
|
|
||||||
|
|
||||||
const expectedPath = ['test.ts']
|
|
||||||
expect(resolvedPath).toStrictEqual(expectedPath)
|
|
||||||
} finally {
|
|
||||||
process.chdir(originalCwd)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
test('resolvePaths exclusion pattern returns not found', async () => {
|
|
||||||
const patterns = ['*.ts', '!test.ts']
|
|
||||||
// Create the following layout:
|
|
||||||
// exclusion-patterns
|
|
||||||
// exclusion-patterns/miss.txt
|
|
||||||
// exclusion-patterns/test.ts
|
|
||||||
|
|
||||||
const root = path.join(getTempDir(), 'exclusion-patterns')
|
|
||||||
// tarball entries will be relative to workspace
|
|
||||||
process.env['GITHUB_WORKSPACE'] = root
|
|
||||||
|
|
||||||
await fs.mkdir(root, {recursive: true})
|
|
||||||
await fs.writeFile(path.join(root, 'miss.txt'), 'no match')
|
|
||||||
await fs.writeFile(path.join(root, 'test.ts'), 'no match')
|
|
||||||
|
|
||||||
const originalCwd = process.cwd()
|
|
||||||
|
|
||||||
try {
|
|
||||||
process.chdir(root)
|
|
||||||
|
|
||||||
const resolvedPath = await cacheUtils.resolvePaths(patterns)
|
|
||||||
|
|
||||||
const expectedPath: string[] = []
|
|
||||||
expect(resolvedPath).toStrictEqual(expectedPath)
|
|
||||||
} finally {
|
|
||||||
process.chdir(originalCwd)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
test('unlinkFile unlinks file', async () => {
|
test('unlinkFile unlinks file', async () => {
|
||||||
const testDirectory = await fs.mkdtemp('unlinkFileTest')
|
const testDirectory = await fs.mkdtemp('unlinkFileTest')
|
||||||
const testFile = path.join(testDirectory, 'test.txt')
|
const testFile = path.join(testDirectory, 'test.txt')
|
||||||
|
|
|
@ -20,112 +20,95 @@ beforeAll(() => {
|
||||||
})
|
})
|
||||||
|
|
||||||
test('restore with no path should fail', async () => {
|
test('restore with no path should fail', async () => {
|
||||||
const inputPath = ''
|
const paths: string[] = []
|
||||||
const key = 'node-test'
|
const key = 'node-test'
|
||||||
const failedMock = jest.spyOn(core, 'setFailed')
|
await expect(restoreCache(paths, key)).rejects.toThrowError(
|
||||||
await restoreCache(inputPath, key)
|
`Path Validation Error: At least one directory or file path is required`
|
||||||
expect(failedMock).toHaveBeenCalledWith(
|
|
||||||
'Input required and not supplied: path'
|
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
|
||||||
test('restore with too many keys should fail', async () => {
|
test('restore with too many keys should fail', async () => {
|
||||||
const inputPath = 'node_modules'
|
const paths = ['node_modules']
|
||||||
const key = 'node-test'
|
const key = 'node-test'
|
||||||
const restoreKeys = [...Array(20).keys()].map(x => x.toString())
|
const restoreKeys = [...Array(20).keys()].map(x => x.toString())
|
||||||
const failedMock = jest.spyOn(core, 'setFailed')
|
await expect(restoreCache(paths, key, restoreKeys)).rejects.toThrowError(
|
||||||
await restoreCache(inputPath, key, restoreKeys)
|
|
||||||
expect(failedMock).toHaveBeenCalledWith(
|
|
||||||
`Key Validation Error: Keys are limited to a maximum of 10.`
|
`Key Validation Error: Keys are limited to a maximum of 10.`
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
|
||||||
test('restore with large key should fail', async () => {
|
test('restore with large key should fail', async () => {
|
||||||
const inputPath = 'node_modules'
|
const paths = ['node_modules']
|
||||||
const key = 'foo'.repeat(512) // Over the 512 character limit
|
const key = 'foo'.repeat(512) // Over the 512 character limit
|
||||||
const failedMock = jest.spyOn(core, 'setFailed')
|
await expect(restoreCache(paths, key)).rejects.toThrowError(
|
||||||
await restoreCache(inputPath, key)
|
|
||||||
expect(failedMock).toHaveBeenCalledWith(
|
|
||||||
`Key Validation Error: ${key} cannot be larger than 512 characters.`
|
`Key Validation Error: ${key} cannot be larger than 512 characters.`
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
|
||||||
test('restore with invalid key should fail', async () => {
|
test('restore with invalid key should fail', async () => {
|
||||||
const inputPath = 'node_modules'
|
const paths = ['node_modules']
|
||||||
const key = 'comma,comma'
|
const key = 'comma,comma'
|
||||||
const failedMock = jest.spyOn(core, 'setFailed')
|
await expect(restoreCache(paths, key)).rejects.toThrowError(
|
||||||
await restoreCache(inputPath, key)
|
|
||||||
expect(failedMock).toHaveBeenCalledWith(
|
|
||||||
`Key Validation Error: ${key} cannot contain commas.`
|
`Key Validation Error: ${key} cannot contain commas.`
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
|
||||||
test('restore with no cache found', async () => {
|
test('restore with no cache found', async () => {
|
||||||
const inputPath = 'node_modules'
|
const paths = ['node_modules']
|
||||||
const key = 'node-test'
|
const key = 'node-test'
|
||||||
|
|
||||||
const infoMock = jest.spyOn(core, 'info')
|
const infoMock = jest.spyOn(core, 'info')
|
||||||
const failedMock = jest.spyOn(core, 'setFailed')
|
|
||||||
|
|
||||||
const clientMock = jest.spyOn(cacheHttpClient, 'getCacheEntry')
|
const clientMock = jest.spyOn(cacheHttpClient, 'getCacheEntry')
|
||||||
clientMock.mockImplementation(async () => {
|
clientMock.mockImplementation(async () => {
|
||||||
return Promise.resolve(null)
|
return Promise.resolve(null)
|
||||||
})
|
})
|
||||||
|
|
||||||
await restoreCache(inputPath, key)
|
const cacheKey = await restoreCache(paths, key)
|
||||||
|
|
||||||
expect(failedMock).toHaveBeenCalledTimes(0)
|
expect(cacheKey).toBe(undefined)
|
||||||
expect(infoMock).toHaveBeenCalledWith(
|
expect(infoMock).toHaveBeenCalledWith(
|
||||||
`Cache not found for input keys: ${key}`
|
`Cache not found for input keys: ${key}`
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
|
||||||
test('restore with server error should fail', async () => {
|
test('restore with server error should fail', async () => {
|
||||||
const inputPath = 'node_modules'
|
const paths = ['node_modules']
|
||||||
const key = 'node-test'
|
const key = 'node-test'
|
||||||
|
|
||||||
const logWarningMock = jest.spyOn(cacheUtils, 'logWarning')
|
|
||||||
const failedMock = jest.spyOn(core, 'setFailed')
|
|
||||||
|
|
||||||
const clientMock = jest.spyOn(cacheHttpClient, 'getCacheEntry')
|
const clientMock = jest.spyOn(cacheHttpClient, 'getCacheEntry')
|
||||||
clientMock.mockImplementation(() => {
|
clientMock.mockImplementation(() => {
|
||||||
throw new Error('HTTP Error Occurred')
|
throw new Error('HTTP Error Occurred')
|
||||||
})
|
})
|
||||||
|
|
||||||
await restoreCache(inputPath, key)
|
await expect(restoreCache(paths, key)).rejects.toThrowError(
|
||||||
|
'HTTP Error Occurred'
|
||||||
expect(logWarningMock).toHaveBeenCalledTimes(1)
|
)
|
||||||
expect(logWarningMock).toHaveBeenCalledWith('HTTP Error Occurred')
|
|
||||||
expect(failedMock).toHaveBeenCalledTimes(0)
|
|
||||||
})
|
})
|
||||||
|
|
||||||
test('restore with restore keys and no cache found', async () => {
|
test('restore with restore keys and no cache found', async () => {
|
||||||
const inputPath = 'node_modules'
|
const paths = ['node_modules']
|
||||||
const key = 'node-test'
|
const key = 'node-test'
|
||||||
const restoreKey = 'node-'
|
const restoreKey = 'node-'
|
||||||
|
|
||||||
const infoMock = jest.spyOn(core, 'info')
|
const infoMock = jest.spyOn(core, 'info')
|
||||||
const failedMock = jest.spyOn(core, 'setFailed')
|
|
||||||
|
|
||||||
const clientMock = jest.spyOn(cacheHttpClient, 'getCacheEntry')
|
const clientMock = jest.spyOn(cacheHttpClient, 'getCacheEntry')
|
||||||
clientMock.mockImplementation(async () => {
|
clientMock.mockImplementation(async () => {
|
||||||
return Promise.resolve(null)
|
return Promise.resolve(null)
|
||||||
})
|
})
|
||||||
|
|
||||||
await restoreCache(inputPath, key, [restoreKey])
|
const cacheKey = await restoreCache(paths, key, [restoreKey])
|
||||||
|
|
||||||
expect(failedMock).toHaveBeenCalledTimes(0)
|
expect(cacheKey).toBe(undefined)
|
||||||
expect(infoMock).toHaveBeenCalledWith(
|
expect(infoMock).toHaveBeenCalledWith(
|
||||||
`Cache not found for input keys: ${key}, ${restoreKey}`
|
`Cache not found for input keys: ${key}, ${restoreKey}`
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
|
||||||
test('restore with gzip compressed cache found', async () => {
|
test('restore with gzip compressed cache found', async () => {
|
||||||
const inputPath = 'node_modules'
|
const paths = ['node_modules']
|
||||||
const key = 'node-test'
|
const key = 'node-test'
|
||||||
|
|
||||||
const infoMock = jest.spyOn(core, 'info')
|
const infoMock = jest.spyOn(core, 'info')
|
||||||
const failedMock = jest.spyOn(core, 'setFailed')
|
|
||||||
|
|
||||||
const cacheEntry: ArtifactCacheEntry = {
|
const cacheEntry: ArtifactCacheEntry = {
|
||||||
cacheKey: key,
|
cacheKey: key,
|
||||||
|
@ -160,9 +143,10 @@ test('restore with gzip compressed cache found', async () => {
|
||||||
.spyOn(cacheUtils, 'getCompressionMethod')
|
.spyOn(cacheUtils, 'getCompressionMethod')
|
||||||
.mockReturnValue(Promise.resolve(compression))
|
.mockReturnValue(Promise.resolve(compression))
|
||||||
|
|
||||||
await restoreCache(inputPath, key)
|
const cacheKey = await restoreCache(paths, key)
|
||||||
|
|
||||||
expect(getCacheMock).toHaveBeenCalledWith([key], inputPath, {
|
expect(cacheKey).toBe(key)
|
||||||
|
expect(getCacheMock).toHaveBeenCalledWith([key], paths, {
|
||||||
compressionMethod: compression
|
compressionMethod: compression
|
||||||
})
|
})
|
||||||
expect(createTempDirectoryMock).toHaveBeenCalledTimes(1)
|
expect(createTempDirectoryMock).toHaveBeenCalledTimes(1)
|
||||||
|
@ -179,16 +163,14 @@ test('restore with gzip compressed cache found', async () => {
|
||||||
expect(unlinkFileMock).toHaveBeenCalledWith(archivePath)
|
expect(unlinkFileMock).toHaveBeenCalledWith(archivePath)
|
||||||
|
|
||||||
expect(infoMock).toHaveBeenCalledWith(`Cache restored from key: ${key}`)
|
expect(infoMock).toHaveBeenCalledWith(`Cache restored from key: ${key}`)
|
||||||
expect(failedMock).toHaveBeenCalledTimes(0)
|
|
||||||
expect(getCompressionMock).toHaveBeenCalledTimes(1)
|
expect(getCompressionMock).toHaveBeenCalledTimes(1)
|
||||||
})
|
})
|
||||||
|
|
||||||
test('restore with a pull request event and zstd compressed cache found', async () => {
|
test('restore with a pull request event and zstd compressed cache found', async () => {
|
||||||
const inputPath = 'node_modules'
|
const paths = ['node_modules']
|
||||||
const key = 'node-test'
|
const key = 'node-test'
|
||||||
|
|
||||||
const infoMock = jest.spyOn(core, 'info')
|
const infoMock = jest.spyOn(core, 'info')
|
||||||
const failedMock = jest.spyOn(core, 'setFailed')
|
|
||||||
|
|
||||||
const cacheEntry: ArtifactCacheEntry = {
|
const cacheEntry: ArtifactCacheEntry = {
|
||||||
cacheKey: key,
|
cacheKey: key,
|
||||||
|
@ -220,9 +202,10 @@ test('restore with a pull request event and zstd compressed cache found', async
|
||||||
.spyOn(cacheUtils, 'getCompressionMethod')
|
.spyOn(cacheUtils, 'getCompressionMethod')
|
||||||
.mockReturnValue(Promise.resolve(compression))
|
.mockReturnValue(Promise.resolve(compression))
|
||||||
|
|
||||||
await restoreCache(inputPath, key)
|
const cacheKey = await restoreCache(paths, key)
|
||||||
|
|
||||||
expect(getCacheMock).toHaveBeenCalledWith([key], inputPath, {
|
expect(cacheKey).toBe(key)
|
||||||
|
expect(getCacheMock).toHaveBeenCalledWith([key], paths, {
|
||||||
compressionMethod: compression
|
compressionMethod: compression
|
||||||
})
|
})
|
||||||
expect(createTempDirectoryMock).toHaveBeenCalledTimes(1)
|
expect(createTempDirectoryMock).toHaveBeenCalledTimes(1)
|
||||||
|
@ -237,17 +220,15 @@ test('restore with a pull request event and zstd compressed cache found', async
|
||||||
expect(extractTarMock).toHaveBeenCalledWith(archivePath, compression)
|
expect(extractTarMock).toHaveBeenCalledWith(archivePath, compression)
|
||||||
|
|
||||||
expect(infoMock).toHaveBeenCalledWith(`Cache restored from key: ${key}`)
|
expect(infoMock).toHaveBeenCalledWith(`Cache restored from key: ${key}`)
|
||||||
expect(failedMock).toHaveBeenCalledTimes(0)
|
|
||||||
expect(getCompressionMock).toHaveBeenCalledTimes(1)
|
expect(getCompressionMock).toHaveBeenCalledTimes(1)
|
||||||
})
|
})
|
||||||
|
|
||||||
test('restore with cache found for restore key', async () => {
|
test('restore with cache found for restore key', async () => {
|
||||||
const inputPath = 'node_modules'
|
const paths = ['node_modules']
|
||||||
const key = 'node-test'
|
const key = 'node-test'
|
||||||
const restoreKey = 'node-'
|
const restoreKey = 'node-'
|
||||||
|
|
||||||
const infoMock = jest.spyOn(core, 'info')
|
const infoMock = jest.spyOn(core, 'info')
|
||||||
const failedMock = jest.spyOn(core, 'setFailed')
|
|
||||||
|
|
||||||
const cacheEntry: ArtifactCacheEntry = {
|
const cacheEntry: ArtifactCacheEntry = {
|
||||||
cacheKey: restoreKey,
|
cacheKey: restoreKey,
|
||||||
|
@ -279,9 +260,10 @@ test('restore with cache found for restore key', async () => {
|
||||||
.spyOn(cacheUtils, 'getCompressionMethod')
|
.spyOn(cacheUtils, 'getCompressionMethod')
|
||||||
.mockReturnValue(Promise.resolve(compression))
|
.mockReturnValue(Promise.resolve(compression))
|
||||||
|
|
||||||
await restoreCache(inputPath, key, [restoreKey])
|
const cacheKey = await restoreCache(paths, key, [restoreKey])
|
||||||
|
|
||||||
expect(getCacheMock).toHaveBeenCalledWith([key, restoreKey], inputPath, {
|
expect(cacheKey).toBe(restoreKey)
|
||||||
|
expect(getCacheMock).toHaveBeenCalledWith([key, restoreKey], paths, {
|
||||||
compressionMethod: compression
|
compressionMethod: compression
|
||||||
})
|
})
|
||||||
expect(createTempDirectoryMock).toHaveBeenCalledTimes(1)
|
expect(createTempDirectoryMock).toHaveBeenCalledTimes(1)
|
||||||
|
@ -298,6 +280,5 @@ test('restore with cache found for restore key', async () => {
|
||||||
expect(infoMock).toHaveBeenCalledWith(
|
expect(infoMock).toHaveBeenCalledWith(
|
||||||
`Cache restored from key: ${restoreKey}`
|
`Cache restored from key: ${restoreKey}`
|
||||||
)
|
)
|
||||||
expect(failedMock).toHaveBeenCalledTimes(0)
|
|
||||||
expect(getCompressionMock).toHaveBeenCalledTimes(1)
|
expect(getCompressionMock).toHaveBeenCalledTimes(1)
|
||||||
})
|
})
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
import * as core from '@actions/core'
|
|
||||||
import * as path from 'path'
|
import * as path from 'path'
|
||||||
import {saveCache} from '../src/cache'
|
import {saveCache} from '../src/cache'
|
||||||
import * as cacheHttpClient from '../src/internal/cacheHttpClient'
|
import * as cacheHttpClient from '../src/internal/cacheHttpClient'
|
||||||
|
@ -27,42 +26,31 @@ beforeAll(() => {
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
test('save with missing input outputs warning', async () => {
|
test('save with missing input should fail', async () => {
|
||||||
const logWarningMock = jest.spyOn(cacheUtils, 'logWarning')
|
const paths: string[] = []
|
||||||
const failedMock = jest.spyOn(core, 'setFailed')
|
|
||||||
|
|
||||||
const inputPath = ''
|
|
||||||
const primaryKey = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43'
|
const primaryKey = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43'
|
||||||
|
await expect(saveCache(paths, primaryKey)).rejects.toThrowError(
|
||||||
await saveCache(inputPath, primaryKey)
|
`Path Validation Error: At least one directory or file path is required`
|
||||||
|
|
||||||
expect(logWarningMock).toHaveBeenCalledWith(
|
|
||||||
'Input required and not supplied: path'
|
|
||||||
)
|
)
|
||||||
expect(logWarningMock).toHaveBeenCalledTimes(1)
|
|
||||||
expect(failedMock).toHaveBeenCalledTimes(0)
|
|
||||||
})
|
})
|
||||||
|
|
||||||
test('save with large cache outputs warning', async () => {
|
test('save with large cache outputs should fail', async () => {
|
||||||
const logWarningMock = jest.spyOn(cacheUtils, 'logWarning')
|
const filePath = 'node_modules'
|
||||||
const failedMock = jest.spyOn(core, 'setFailed')
|
|
||||||
|
|
||||||
const inputPath = 'node_modules'
|
|
||||||
const primaryKey = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43'
|
const primaryKey = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43'
|
||||||
const cachePaths = [path.resolve(inputPath)]
|
const cachePaths = [path.resolve(filePath)]
|
||||||
|
|
||||||
const createTarMock = jest.spyOn(tar, 'createTar')
|
const createTarMock = jest.spyOn(tar, 'createTar')
|
||||||
|
|
||||||
const cacheSize = 6 * 1024 * 1024 * 1024 //~6GB, over the 5GB limit
|
const cacheSize = 6 * 1024 * 1024 * 1024 //~6GB, over the 5GB limit
|
||||||
jest.spyOn(cacheUtils, 'getArchiveFileSize').mockImplementationOnce(() => {
|
jest.spyOn(cacheUtils, 'getArchiveFileSize').mockReturnValue(cacheSize)
|
||||||
return cacheSize
|
|
||||||
})
|
|
||||||
const compression = CompressionMethod.Gzip
|
const compression = CompressionMethod.Gzip
|
||||||
const getCompressionMock = jest
|
const getCompressionMock = jest
|
||||||
.spyOn(cacheUtils, 'getCompressionMethod')
|
.spyOn(cacheUtils, 'getCompressionMethod')
|
||||||
.mockReturnValue(Promise.resolve(compression))
|
.mockReturnValueOnce(Promise.resolve(compression))
|
||||||
|
|
||||||
await saveCache(inputPath, primaryKey)
|
await expect(saveCache([filePath], primaryKey)).rejects.toThrowError(
|
||||||
|
'Cache size of ~6144 MB (6442450944 B) is over the 5GB limit, not saving cache.'
|
||||||
|
)
|
||||||
|
|
||||||
const archiveFolder = '/foo/bar'
|
const archiveFolder = '/foo/bar'
|
||||||
|
|
||||||
|
@ -72,20 +60,11 @@ test('save with large cache outputs warning', async () => {
|
||||||
cachePaths,
|
cachePaths,
|
||||||
compression
|
compression
|
||||||
)
|
)
|
||||||
expect(logWarningMock).toHaveBeenCalledTimes(1)
|
|
||||||
expect(logWarningMock).toHaveBeenCalledWith(
|
|
||||||
'Cache size of ~6144 MB (6442450944 B) is over the 5GB limit, not saving cache.'
|
|
||||||
)
|
|
||||||
expect(failedMock).toHaveBeenCalledTimes(0)
|
|
||||||
expect(getCompressionMock).toHaveBeenCalledTimes(1)
|
expect(getCompressionMock).toHaveBeenCalledTimes(1)
|
||||||
})
|
})
|
||||||
|
|
||||||
test('save with reserve cache failure outputs warning', async () => {
|
test('save with reserve cache failure should fail', async () => {
|
||||||
const infoMock = jest.spyOn(core, 'info')
|
const paths = ['node_modules']
|
||||||
const logWarningMock = jest.spyOn(cacheUtils, 'logWarning')
|
|
||||||
const failedMock = jest.spyOn(core, 'setFailed')
|
|
||||||
|
|
||||||
const inputPath = 'node_modules'
|
|
||||||
const primaryKey = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43'
|
const primaryKey = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43'
|
||||||
|
|
||||||
const reserveCacheMock = jest
|
const reserveCacheMock = jest
|
||||||
|
@ -99,33 +78,24 @@ test('save with reserve cache failure outputs warning', async () => {
|
||||||
const compression = CompressionMethod.Zstd
|
const compression = CompressionMethod.Zstd
|
||||||
const getCompressionMock = jest
|
const getCompressionMock = jest
|
||||||
.spyOn(cacheUtils, 'getCompressionMethod')
|
.spyOn(cacheUtils, 'getCompressionMethod')
|
||||||
.mockReturnValue(Promise.resolve(compression))
|
.mockReturnValueOnce(Promise.resolve(compression))
|
||||||
|
|
||||||
await saveCache(inputPath, primaryKey)
|
await expect(saveCache(paths, primaryKey)).rejects.toThrowError(
|
||||||
|
|
||||||
expect(reserveCacheMock).toHaveBeenCalledTimes(1)
|
|
||||||
expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey, inputPath, {
|
|
||||||
compressionMethod: compression
|
|
||||||
})
|
|
||||||
|
|
||||||
expect(infoMock).toHaveBeenCalledWith(
|
|
||||||
`Unable to reserve cache with key ${primaryKey}, another job may be creating this cache.`
|
`Unable to reserve cache with key ${primaryKey}, another job may be creating this cache.`
|
||||||
)
|
)
|
||||||
|
expect(reserveCacheMock).toHaveBeenCalledTimes(1)
|
||||||
|
expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey, paths, {
|
||||||
|
compressionMethod: compression
|
||||||
|
})
|
||||||
expect(createTarMock).toHaveBeenCalledTimes(0)
|
expect(createTarMock).toHaveBeenCalledTimes(0)
|
||||||
expect(saveCacheMock).toHaveBeenCalledTimes(0)
|
expect(saveCacheMock).toHaveBeenCalledTimes(0)
|
||||||
expect(logWarningMock).toHaveBeenCalledTimes(0)
|
|
||||||
expect(failedMock).toHaveBeenCalledTimes(0)
|
|
||||||
expect(getCompressionMock).toHaveBeenCalledTimes(1)
|
expect(getCompressionMock).toHaveBeenCalledTimes(1)
|
||||||
})
|
})
|
||||||
|
|
||||||
test('save with server error outputs warning', async () => {
|
test('save with server error should fail', async () => {
|
||||||
const logWarningMock = jest.spyOn(cacheUtils, 'logWarning')
|
const filePath = 'node_modules'
|
||||||
const failedMock = jest.spyOn(core, 'setFailed')
|
|
||||||
|
|
||||||
const inputPath = 'node_modules'
|
|
||||||
const primaryKey = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43'
|
const primaryKey = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43'
|
||||||
const cachePaths = [path.resolve(inputPath)]
|
const cachePaths = [path.resolve(filePath)]
|
||||||
|
|
||||||
const cacheId = 4
|
const cacheId = 4
|
||||||
const reserveCacheMock = jest
|
const reserveCacheMock = jest
|
||||||
|
@ -144,12 +114,13 @@ test('save with server error outputs warning', async () => {
|
||||||
const compression = CompressionMethod.Zstd
|
const compression = CompressionMethod.Zstd
|
||||||
const getCompressionMock = jest
|
const getCompressionMock = jest
|
||||||
.spyOn(cacheUtils, 'getCompressionMethod')
|
.spyOn(cacheUtils, 'getCompressionMethod')
|
||||||
.mockReturnValue(Promise.resolve(compression))
|
.mockReturnValueOnce(Promise.resolve(compression))
|
||||||
|
|
||||||
await saveCache(inputPath, primaryKey)
|
|
||||||
|
|
||||||
|
await expect(await saveCache([filePath], primaryKey)).rejects.toThrowError(
|
||||||
|
'HTTP Error Occurred'
|
||||||
|
)
|
||||||
expect(reserveCacheMock).toHaveBeenCalledTimes(1)
|
expect(reserveCacheMock).toHaveBeenCalledTimes(1)
|
||||||
expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey, inputPath, {
|
expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey, [filePath], {
|
||||||
compressionMethod: compression
|
compressionMethod: compression
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -165,20 +136,13 @@ test('save with server error outputs warning', async () => {
|
||||||
|
|
||||||
expect(saveCacheMock).toHaveBeenCalledTimes(1)
|
expect(saveCacheMock).toHaveBeenCalledTimes(1)
|
||||||
expect(saveCacheMock).toHaveBeenCalledWith(cacheId, archiveFile)
|
expect(saveCacheMock).toHaveBeenCalledWith(cacheId, archiveFile)
|
||||||
|
|
||||||
expect(logWarningMock).toHaveBeenCalledTimes(1)
|
|
||||||
expect(logWarningMock).toHaveBeenCalledWith('HTTP Error Occurred')
|
|
||||||
|
|
||||||
expect(failedMock).toHaveBeenCalledTimes(0)
|
|
||||||
expect(getCompressionMock).toHaveBeenCalledTimes(1)
|
expect(getCompressionMock).toHaveBeenCalledTimes(1)
|
||||||
})
|
})
|
||||||
|
|
||||||
test('save with valid inputs uploads a cache', async () => {
|
test('save with valid inputs uploads a cache', async () => {
|
||||||
const failedMock = jest.spyOn(core, 'setFailed')
|
const filePath = 'node_modules'
|
||||||
|
|
||||||
const inputPath = 'node_modules'
|
|
||||||
const primaryKey = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43'
|
const primaryKey = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43'
|
||||||
const cachePaths = [path.resolve(inputPath)]
|
const cachePaths = [path.resolve(filePath)]
|
||||||
|
|
||||||
const cacheId = 4
|
const cacheId = 4
|
||||||
const reserveCacheMock = jest
|
const reserveCacheMock = jest
|
||||||
|
@ -194,10 +158,10 @@ test('save with valid inputs uploads a cache', async () => {
|
||||||
.spyOn(cacheUtils, 'getCompressionMethod')
|
.spyOn(cacheUtils, 'getCompressionMethod')
|
||||||
.mockReturnValue(Promise.resolve(compression))
|
.mockReturnValue(Promise.resolve(compression))
|
||||||
|
|
||||||
await saveCache(inputPath, primaryKey)
|
await saveCache([filePath], primaryKey)
|
||||||
|
|
||||||
expect(reserveCacheMock).toHaveBeenCalledTimes(1)
|
expect(reserveCacheMock).toHaveBeenCalledTimes(1)
|
||||||
expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey, inputPath, {
|
expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey, [filePath], {
|
||||||
compressionMethod: compression
|
compressionMethod: compression
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -213,7 +177,5 @@ test('save with valid inputs uploads a cache', async () => {
|
||||||
|
|
||||||
expect(saveCacheMock).toHaveBeenCalledTimes(1)
|
expect(saveCacheMock).toHaveBeenCalledTimes(1)
|
||||||
expect(saveCacheMock).toHaveBeenCalledWith(cacheId, archiveFile)
|
expect(saveCacheMock).toHaveBeenCalledWith(cacheId, archiveFile)
|
||||||
|
|
||||||
expect(failedMock).toHaveBeenCalledTimes(0)
|
|
||||||
expect(getCompressionMock).toHaveBeenCalledTimes(1)
|
expect(getCompressionMock).toHaveBeenCalledTimes(1)
|
||||||
})
|
})
|
||||||
|
|
|
@ -1,169 +1,153 @@
|
||||||
import * as core from '@actions/core'
|
import * as core from '@actions/core'
|
||||||
import * as pathUtils from 'path'
|
import * as path from 'path'
|
||||||
import * as utils from './internal/cacheUtils'
|
import * as utils from './internal/cacheUtils'
|
||||||
import * as cacheHttpClient from './internal/cacheHttpClient'
|
import * as cacheHttpClient from './internal/cacheHttpClient'
|
||||||
import {createTar, extractTar} from './internal/tar'
|
import {createTar, extractTar} from './internal/tar'
|
||||||
|
|
||||||
|
function checkPaths(paths: string[]): void {
|
||||||
|
if (!paths || paths.length === 0) {
|
||||||
|
throw new Error(
|
||||||
|
`Path Validation Error: At least one directory or file path is required`
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function checkKey(key: string): void {
|
||||||
|
if (key.length > 512) {
|
||||||
|
throw new Error(
|
||||||
|
`Key Validation Error: ${key} cannot be larger than 512 characters.`
|
||||||
|
)
|
||||||
|
}
|
||||||
|
const regex = /^[^,]*$/
|
||||||
|
if (!regex.test(key)) {
|
||||||
|
throw new Error(`Key Validation Error: ${key} cannot contain commas.`)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Restores cache from keys
|
* Restores cache from keys
|
||||||
*
|
*
|
||||||
* @param path a string representing files that were cached
|
* @param paths a list of file paths to restore from the cache
|
||||||
* @param primaryKey an explicit key for restoring the cache
|
* @param primaryKey an explicit key for restoring the cache
|
||||||
* @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for key
|
* @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for key
|
||||||
* @returns string returns the key for the cache hit, otherwise return undefined
|
* @returns string returns the key for the cache hit, otherwise return undefined
|
||||||
*/
|
*/
|
||||||
export async function restoreCache(
|
export async function restoreCache(
|
||||||
path: string,
|
paths: string[],
|
||||||
primaryKey: string,
|
primaryKey: string,
|
||||||
restoreKeys?: string[]
|
restoreKeys?: string[]
|
||||||
): Promise<string | undefined> {
|
): Promise<string | undefined> {
|
||||||
try {
|
checkPaths(paths)
|
||||||
if (!path || path.length === 0) {
|
|
||||||
throw new Error('Input required and not supplied: path')
|
|
||||||
}
|
|
||||||
|
|
||||||
restoreKeys = restoreKeys || []
|
restoreKeys = restoreKeys || []
|
||||||
const keys = [primaryKey, ...restoreKeys]
|
const keys = [primaryKey, ...restoreKeys]
|
||||||
|
|
||||||
core.debug('Resolved Keys:')
|
core.debug('Resolved Keys:')
|
||||||
core.debug(JSON.stringify(keys))
|
core.debug(JSON.stringify(keys))
|
||||||
|
|
||||||
if (keys.length > 10) {
|
if (keys.length > 10) {
|
||||||
core.setFailed(
|
throw new Error(
|
||||||
`Key Validation Error: Keys are limited to a maximum of 10.`
|
`Key Validation Error: Keys are limited to a maximum of 10.`
|
||||||
)
|
)
|
||||||
return undefined
|
}
|
||||||
}
|
for (const key of keys) {
|
||||||
for (const key of keys) {
|
checkKey(key)
|
||||||
if (key.length > 512) {
|
}
|
||||||
core.setFailed(
|
|
||||||
`Key Validation Error: ${key} cannot be larger than 512 characters.`
|
|
||||||
)
|
|
||||||
return undefined
|
|
||||||
}
|
|
||||||
const regex = /^[^,]*$/
|
|
||||||
if (!regex.test(key)) {
|
|
||||||
core.setFailed(`Key Validation Error: ${key} cannot contain commas.`)
|
|
||||||
return undefined
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const compressionMethod = await utils.getCompressionMethod()
|
const compressionMethod = await utils.getCompressionMethod()
|
||||||
|
|
||||||
try {
|
// path are needed to compute version
|
||||||
// path are needed to compute version
|
const cacheEntry = await cacheHttpClient.getCacheEntry(keys, paths, {
|
||||||
const cacheEntry = await cacheHttpClient.getCacheEntry(keys, path, {
|
compressionMethod
|
||||||
compressionMethod
|
})
|
||||||
})
|
if (!cacheEntry?.archiveLocation) {
|
||||||
if (!cacheEntry?.archiveLocation) {
|
core.info(`Cache not found for input keys: ${keys.join(', ')}`)
|
||||||
core.info(`Cache not found for input keys: ${keys.join(', ')}`)
|
|
||||||
return undefined
|
|
||||||
}
|
|
||||||
|
|
||||||
const archivePath = pathUtils.join(
|
|
||||||
await utils.createTempDirectory(),
|
|
||||||
utils.getCacheFileName(compressionMethod)
|
|
||||||
)
|
|
||||||
core.debug(`Archive Path: ${archivePath}`)
|
|
||||||
|
|
||||||
try {
|
|
||||||
// Download the cache from the cache entry
|
|
||||||
await cacheHttpClient.downloadCache(
|
|
||||||
cacheEntry.archiveLocation,
|
|
||||||
archivePath
|
|
||||||
)
|
|
||||||
|
|
||||||
const archiveFileSize = utils.getArchiveFileSize(archivePath)
|
|
||||||
core.info(
|
|
||||||
`Cache Size: ~${Math.round(
|
|
||||||
archiveFileSize / (1024 * 1024)
|
|
||||||
)} MB (${archiveFileSize} B)`
|
|
||||||
)
|
|
||||||
|
|
||||||
await extractTar(archivePath, compressionMethod)
|
|
||||||
} finally {
|
|
||||||
// Try to delete the archive to save space
|
|
||||||
try {
|
|
||||||
await utils.unlinkFile(archivePath)
|
|
||||||
} catch (error) {
|
|
||||||
core.debug(`Failed to delete archive: ${error}`)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
core.info(`Cache restored from key: ${cacheEntry && cacheEntry.cacheKey}`)
|
|
||||||
|
|
||||||
return cacheEntry.cacheKey
|
|
||||||
} catch (error) {
|
|
||||||
utils.logWarning(error.message)
|
|
||||||
return undefined
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
core.setFailed(error.message)
|
|
||||||
return undefined
|
return undefined
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const archivePath = path.join(
|
||||||
|
await utils.createTempDirectory(),
|
||||||
|
utils.getCacheFileName(compressionMethod)
|
||||||
|
)
|
||||||
|
core.debug(`Archive Path: ${archivePath}`)
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Download the cache from the cache entry
|
||||||
|
await cacheHttpClient.downloadCache(cacheEntry.archiveLocation, archivePath)
|
||||||
|
|
||||||
|
const archiveFileSize = utils.getArchiveFileSize(archivePath)
|
||||||
|
core.info(
|
||||||
|
`Cache Size: ~${Math.round(
|
||||||
|
archiveFileSize / (1024 * 1024)
|
||||||
|
)} MB (${archiveFileSize} B)`
|
||||||
|
)
|
||||||
|
|
||||||
|
await extractTar(archivePath, compressionMethod)
|
||||||
|
} finally {
|
||||||
|
// Try to delete the archive to save space
|
||||||
|
try {
|
||||||
|
await utils.unlinkFile(archivePath)
|
||||||
|
} catch (error) {
|
||||||
|
core.debug(`Failed to delete archive: ${error}`)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
core.info(`Cache restored from key: ${cacheEntry && cacheEntry.cacheKey}`)
|
||||||
|
|
||||||
|
return cacheEntry.cacheKey
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Saves a file with the specified key
|
* Saves a list of files with the specified key
|
||||||
*
|
*
|
||||||
* @param path a string representing files to be cached
|
* @param paths a list of file paths to be cached
|
||||||
* @param key an explicit key for restoring the cache
|
* @param key an explicit key for restoring the cache
|
||||||
* @returns number returns cacheId if the cache was saved successfully, otherwise return -1
|
* @returns number returns cacheId if the cache was saved successfully
|
||||||
*/
|
*/
|
||||||
export async function saveCache(path: string, key: string): Promise<number> {
|
export async function saveCache(paths: string[], key: string): Promise<number> {
|
||||||
try {
|
checkPaths(paths)
|
||||||
if (!path || path.length === 0) {
|
checkKey(key)
|
||||||
throw new Error('Input required and not supplied: path')
|
|
||||||
}
|
|
||||||
|
|
||||||
const compressionMethod = await utils.getCompressionMethod()
|
const compressionMethod = await utils.getCompressionMethod()
|
||||||
|
|
||||||
core.debug('Reserving Cache')
|
core.debug('Reserving Cache')
|
||||||
const cacheId = await cacheHttpClient.reserveCache(key, path, {
|
const cacheId = await cacheHttpClient.reserveCache(key, paths, {
|
||||||
compressionMethod
|
compressionMethod
|
||||||
})
|
})
|
||||||
if (cacheId === -1) {
|
if (cacheId === -1) {
|
||||||
core.info(
|
throw new Error(
|
||||||
`Unable to reserve cache with key ${key}, another job may be creating this cache.`
|
`Unable to reserve cache with key ${key}, another job may be creating this cache.`
|
||||||
)
|
|
||||||
return -1
|
|
||||||
}
|
|
||||||
core.debug(`Cache ID: ${cacheId}`)
|
|
||||||
const cachePaths = await utils.resolvePaths(
|
|
||||||
path.split('\n').filter(x => x !== '')
|
|
||||||
)
|
)
|
||||||
|
|
||||||
core.debug('Cache Paths:')
|
|
||||||
core.debug(`${JSON.stringify(cachePaths)}`)
|
|
||||||
|
|
||||||
const archiveFolder = await utils.createTempDirectory()
|
|
||||||
const archivePath = pathUtils.join(
|
|
||||||
archiveFolder,
|
|
||||||
utils.getCacheFileName(compressionMethod)
|
|
||||||
)
|
|
||||||
|
|
||||||
core.debug(`Archive Path: ${archivePath}`)
|
|
||||||
|
|
||||||
await createTar(archiveFolder, cachePaths, compressionMethod)
|
|
||||||
|
|
||||||
const fileSizeLimit = 5 * 1024 * 1024 * 1024 // 5GB per repo limit
|
|
||||||
const archiveFileSize = utils.getArchiveFileSize(archivePath)
|
|
||||||
core.debug(`File Size: ${archiveFileSize}`)
|
|
||||||
if (archiveFileSize > fileSizeLimit) {
|
|
||||||
utils.logWarning(
|
|
||||||
`Cache size of ~${Math.round(
|
|
||||||
archiveFileSize / (1024 * 1024)
|
|
||||||
)} MB (${archiveFileSize} B) is over the 5GB limit, not saving cache.`
|
|
||||||
)
|
|
||||||
return -1
|
|
||||||
}
|
|
||||||
|
|
||||||
core.debug(`Saving Cache (ID: ${cacheId})`)
|
|
||||||
await cacheHttpClient.saveCache(cacheId, archivePath)
|
|
||||||
|
|
||||||
return cacheId
|
|
||||||
} catch (error) {
|
|
||||||
utils.logWarning(error.message)
|
|
||||||
return -1
|
|
||||||
}
|
}
|
||||||
|
core.debug(`Cache ID: ${cacheId}`)
|
||||||
|
const cachePaths = await utils.resolvePaths(paths)
|
||||||
|
|
||||||
|
core.debug('Cache Paths:')
|
||||||
|
core.debug(`${JSON.stringify(cachePaths)}`)
|
||||||
|
|
||||||
|
const archiveFolder = await utils.createTempDirectory()
|
||||||
|
const archivePath = path.join(
|
||||||
|
archiveFolder,
|
||||||
|
utils.getCacheFileName(compressionMethod)
|
||||||
|
)
|
||||||
|
|
||||||
|
core.debug(`Archive Path: ${archivePath}`)
|
||||||
|
|
||||||
|
await createTar(archiveFolder, cachePaths, compressionMethod)
|
||||||
|
|
||||||
|
const fileSizeLimit = 5 * 1024 * 1024 * 1024 // 5GB per repo limit
|
||||||
|
const archiveFileSize = utils.getArchiveFileSize(archivePath)
|
||||||
|
core.debug(`File Size: ${archiveFileSize}`)
|
||||||
|
if (archiveFileSize > fileSizeLimit) {
|
||||||
|
throw new Error(
|
||||||
|
`Cache size of ~${Math.round(
|
||||||
|
archiveFileSize / (1024 * 1024)
|
||||||
|
)} MB (${archiveFileSize} B) is over the 5GB limit, not saving cache.`
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
core.debug(`Saving Cache (ID: ${cacheId})`)
|
||||||
|
await cacheHttpClient.saveCache(cacheId, archivePath)
|
||||||
|
|
||||||
|
return cacheId
|
||||||
}
|
}
|
||||||
|
|
|
@ -84,10 +84,10 @@ function createHttpClient(): HttpClient {
|
||||||
}
|
}
|
||||||
|
|
||||||
export function getCacheVersion(
|
export function getCacheVersion(
|
||||||
inputPath: string,
|
paths: string[],
|
||||||
compressionMethod?: CompressionMethod
|
compressionMethod?: CompressionMethod
|
||||||
): string {
|
): string {
|
||||||
const components = [inputPath].concat(
|
const components = paths.concat(
|
||||||
compressionMethod === CompressionMethod.Zstd ? [compressionMethod] : []
|
compressionMethod === CompressionMethod.Zstd ? [compressionMethod] : []
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -102,11 +102,11 @@ export function getCacheVersion(
|
||||||
|
|
||||||
export async function getCacheEntry(
|
export async function getCacheEntry(
|
||||||
keys: string[],
|
keys: string[],
|
||||||
inputPath: string,
|
paths: string[],
|
||||||
options?: CacheOptions
|
options?: CacheOptions
|
||||||
): Promise<ArtifactCacheEntry | null> {
|
): Promise<ArtifactCacheEntry | null> {
|
||||||
const httpClient = createHttpClient()
|
const httpClient = createHttpClient()
|
||||||
const version = getCacheVersion(inputPath, options?.compressionMethod)
|
const version = getCacheVersion(paths, options?.compressionMethod)
|
||||||
const resource = `cache?keys=${encodeURIComponent(
|
const resource = `cache?keys=${encodeURIComponent(
|
||||||
keys.join(',')
|
keys.join(',')
|
||||||
)}&version=${version}`
|
)}&version=${version}`
|
||||||
|
@ -177,11 +177,11 @@ export async function downloadCache(
|
||||||
// Reserve Cache
|
// Reserve Cache
|
||||||
export async function reserveCache(
|
export async function reserveCache(
|
||||||
key: string,
|
key: string,
|
||||||
inputPath: string,
|
paths: string[],
|
||||||
options?: CacheOptions
|
options?: CacheOptions
|
||||||
): Promise<number> {
|
): Promise<number> {
|
||||||
const httpClient = createHttpClient()
|
const httpClient = createHttpClient()
|
||||||
const version = getCacheVersion(inputPath, options?.compressionMethod)
|
const version = getCacheVersion(paths, options?.compressionMethod)
|
||||||
|
|
||||||
const reserveCacheRequest: ReserveCacheRequest = {
|
const reserveCacheRequest: ReserveCacheRequest = {
|
||||||
key,
|
key,
|
||||||
|
|
|
@ -38,11 +38,6 @@ export function getArchiveFileSize(filePath: string): number {
|
||||||
return fs.statSync(filePath).size
|
return fs.statSync(filePath).size
|
||||||
}
|
}
|
||||||
|
|
||||||
export function logWarning(message: string): void {
|
|
||||||
const warningPrefix = '[warning]'
|
|
||||||
core.info(`${warningPrefix}${message}`)
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function resolvePaths(patterns: string[]): Promise<string[]> {
|
export async function resolvePaths(patterns: string[]): Promise<string[]> {
|
||||||
const paths: string[] = []
|
const paths: string[] = []
|
||||||
const workspace = process.env['GITHUB_WORKSPACE'] ?? process.cwd()
|
const workspace = process.env['GITHUB_WORKSPACE'] ?? process.cwd()
|
||||||
|
|
Loading…
Reference in New Issue