mirror of https://github.com/actions/toolkit
commit
928d3e806d
|
@ -0,0 +1,442 @@
|
||||||
|
import * as core from '@actions/core'
|
||||||
|
import * as path from 'path'
|
||||||
|
import * as tar from '../src/internal/tar'
|
||||||
|
import * as config from '../src/internal/config'
|
||||||
|
import * as cacheUtils from '../src/internal/cacheUtils'
|
||||||
|
import * as downloadCacheModule from '../src/internal/blob/download-cache'
|
||||||
|
import {restoreCache} from '../src/cache'
|
||||||
|
import {CacheFilename, CompressionMethod} from '../src/internal/constants'
|
||||||
|
import {CacheServiceClientJSON} from '../src/generated/results/api/v1/cache.twirp'
|
||||||
|
import {BlobDownloadResponseParsed} from '@azure/storage-blob'
|
||||||
|
|
||||||
|
jest.mock('../src/internal/cacheHttpClient')
|
||||||
|
jest.mock('../src/internal/cacheUtils')
|
||||||
|
jest.mock('../src/internal/config')
|
||||||
|
jest.mock('../src/internal/tar')
|
||||||
|
|
||||||
|
let logDebugMock: jest.SpyInstance
|
||||||
|
let logInfoMock: jest.SpyInstance
|
||||||
|
|
||||||
|
beforeAll(() => {
|
||||||
|
jest.spyOn(console, 'log').mockImplementation(() => {})
|
||||||
|
jest.spyOn(core, 'debug').mockImplementation(() => {})
|
||||||
|
jest.spyOn(core, 'info').mockImplementation(() => {})
|
||||||
|
jest.spyOn(core, 'warning').mockImplementation(() => {})
|
||||||
|
jest.spyOn(core, 'error').mockImplementation(() => {})
|
||||||
|
|
||||||
|
jest.spyOn(cacheUtils, 'getCacheFileName').mockImplementation(cm => {
|
||||||
|
const actualUtils = jest.requireActual('../src/internal/cacheUtils')
|
||||||
|
return actualUtils.getCacheFileName(cm)
|
||||||
|
})
|
||||||
|
|
||||||
|
// Ensure that we're using v2 for these tests
|
||||||
|
jest.spyOn(config, 'getCacheServiceVersion').mockReturnValue('v2')
|
||||||
|
|
||||||
|
logDebugMock = jest.spyOn(core, 'debug')
|
||||||
|
logInfoMock = jest.spyOn(core, 'info')
|
||||||
|
})
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
expect(logDebugMock).toHaveBeenCalledWith('Cache service version: v2')
|
||||||
|
})
|
||||||
|
|
||||||
|
test('restore with no path should fail', async () => {
|
||||||
|
const paths: string[] = []
|
||||||
|
const key = 'node-test'
|
||||||
|
await expect(restoreCache(paths, key)).rejects.toThrowError(
|
||||||
|
`Path Validation Error: At least one directory or file path is required`
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('restore with too many keys should fail', async () => {
|
||||||
|
const paths = ['node_modules']
|
||||||
|
const key = 'node-test'
|
||||||
|
const restoreKeys = [...Array(20).keys()].map(x => x.toString())
|
||||||
|
await expect(restoreCache(paths, key, restoreKeys)).rejects.toThrowError(
|
||||||
|
`Key Validation Error: Keys are limited to a maximum of 10.`
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('restore with large key should fail', async () => {
|
||||||
|
const paths = ['node_modules']
|
||||||
|
const key = 'foo'.repeat(512) // Over the 512 character limit
|
||||||
|
await expect(restoreCache(paths, key)).rejects.toThrowError(
|
||||||
|
`Key Validation Error: ${key} cannot be larger than 512 characters.`
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('restore with invalid key should fail', async () => {
|
||||||
|
const paths = ['node_modules']
|
||||||
|
const key = 'comma,comma'
|
||||||
|
await expect(restoreCache(paths, key)).rejects.toThrowError(
|
||||||
|
`Key Validation Error: ${key} cannot contain commas.`
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('restore with no cache found', async () => {
|
||||||
|
const paths = ['node_modules']
|
||||||
|
const key = 'node-test'
|
||||||
|
|
||||||
|
jest
|
||||||
|
.spyOn(CacheServiceClientJSON.prototype, 'GetCacheEntryDownloadURL')
|
||||||
|
.mockReturnValue(
|
||||||
|
Promise.resolve({
|
||||||
|
ok: false,
|
||||||
|
signedDownloadUrl: '',
|
||||||
|
matchedKey: ''
|
||||||
|
})
|
||||||
|
)
|
||||||
|
|
||||||
|
const cacheKey = await restoreCache(paths, key)
|
||||||
|
|
||||||
|
expect(cacheKey).toBe(undefined)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('restore with server error should fail', async () => {
|
||||||
|
const paths = ['node_modules']
|
||||||
|
const key = 'node-test'
|
||||||
|
const logWarningMock = jest.spyOn(core, 'warning')
|
||||||
|
|
||||||
|
jest
|
||||||
|
.spyOn(CacheServiceClientJSON.prototype, 'GetCacheEntryDownloadURL')
|
||||||
|
.mockImplementation(() => {
|
||||||
|
throw new Error('HTTP Error Occurred')
|
||||||
|
})
|
||||||
|
|
||||||
|
const cacheKey = await restoreCache(paths, key)
|
||||||
|
expect(cacheKey).toBe(undefined)
|
||||||
|
expect(logWarningMock).toHaveBeenCalledTimes(1)
|
||||||
|
expect(logWarningMock).toHaveBeenCalledWith(
|
||||||
|
'Failed to restore: HTTP Error Occurred'
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('restore with restore keys and no cache found', async () => {
|
||||||
|
const paths = ['node_modules']
|
||||||
|
const key = 'node-test'
|
||||||
|
const restoreKeys = ['node-']
|
||||||
|
const logWarningMock = jest.spyOn(core, 'warning')
|
||||||
|
|
||||||
|
jest
|
||||||
|
.spyOn(CacheServiceClientJSON.prototype, 'GetCacheEntryDownloadURL')
|
||||||
|
.mockReturnValue(
|
||||||
|
Promise.resolve({
|
||||||
|
ok: false,
|
||||||
|
signedDownloadUrl: '',
|
||||||
|
matchedKey: ''
|
||||||
|
})
|
||||||
|
)
|
||||||
|
|
||||||
|
const cacheKey = await restoreCache(paths, key, restoreKeys)
|
||||||
|
|
||||||
|
expect(cacheKey).toBe(undefined)
|
||||||
|
expect(logWarningMock).toHaveBeenCalledWith(
|
||||||
|
`Cache not found for keys: ${[key, ...restoreKeys].join(', ')}`
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('restore with gzip compressed cache found', async () => {
|
||||||
|
const paths = ['node_modules']
|
||||||
|
const key = 'node-test'
|
||||||
|
const compressionMethod = CompressionMethod.Gzip
|
||||||
|
const signedDownloadUrl = 'https://blob-storage.local?signed=true'
|
||||||
|
const cacheVersion =
|
||||||
|
'd90f107aaeb22920dba0c637a23c37b5bc497b4dfa3b07fe3f79bf88a273c11b'
|
||||||
|
|
||||||
|
const getCacheVersionMock = jest.spyOn(cacheUtils, 'getCacheVersion')
|
||||||
|
getCacheVersionMock.mockReturnValue(cacheVersion)
|
||||||
|
|
||||||
|
const compressionMethodMock = jest.spyOn(cacheUtils, 'getCompressionMethod')
|
||||||
|
compressionMethodMock.mockReturnValue(Promise.resolve(compressionMethod))
|
||||||
|
|
||||||
|
const getCacheDownloadURLMock = jest.spyOn(
|
||||||
|
CacheServiceClientJSON.prototype,
|
||||||
|
'GetCacheEntryDownloadURL'
|
||||||
|
)
|
||||||
|
getCacheDownloadURLMock.mockReturnValue(
|
||||||
|
Promise.resolve({
|
||||||
|
ok: true,
|
||||||
|
signedDownloadUrl,
|
||||||
|
matchedKey: key
|
||||||
|
})
|
||||||
|
)
|
||||||
|
|
||||||
|
const tempPath = '/foo/bar'
|
||||||
|
|
||||||
|
const createTempDirectoryMock = jest.spyOn(cacheUtils, 'createTempDirectory')
|
||||||
|
createTempDirectoryMock.mockImplementation(async () => {
|
||||||
|
return Promise.resolve(tempPath)
|
||||||
|
})
|
||||||
|
|
||||||
|
const archivePath = path.join(tempPath, CacheFilename.Gzip)
|
||||||
|
const downloadCacheFileMock = jest.spyOn(
|
||||||
|
downloadCacheModule,
|
||||||
|
'downloadCacheFile'
|
||||||
|
)
|
||||||
|
downloadCacheFileMock.mockReturnValue(
|
||||||
|
Promise.resolve({
|
||||||
|
_response: {
|
||||||
|
status: 200
|
||||||
|
}
|
||||||
|
} as BlobDownloadResponseParsed)
|
||||||
|
)
|
||||||
|
|
||||||
|
const fileSize = 142
|
||||||
|
const getArchiveFileSizeInBytesMock = jest
|
||||||
|
.spyOn(cacheUtils, 'getArchiveFileSizeInBytes')
|
||||||
|
.mockReturnValue(fileSize)
|
||||||
|
|
||||||
|
const extractTarMock = jest.spyOn(tar, 'extractTar')
|
||||||
|
const unlinkFileMock = jest.spyOn(cacheUtils, 'unlinkFile')
|
||||||
|
|
||||||
|
const cacheKey = await restoreCache(paths, key)
|
||||||
|
|
||||||
|
expect(cacheKey).toBe(key)
|
||||||
|
expect(getCacheVersionMock).toHaveBeenCalledWith(
|
||||||
|
paths,
|
||||||
|
compressionMethod,
|
||||||
|
false
|
||||||
|
)
|
||||||
|
expect(getCacheDownloadURLMock).toHaveBeenCalledWith({
|
||||||
|
key,
|
||||||
|
restoreKeys: [],
|
||||||
|
version: cacheVersion
|
||||||
|
})
|
||||||
|
expect(createTempDirectoryMock).toHaveBeenCalledTimes(1)
|
||||||
|
expect(downloadCacheFileMock).toHaveBeenCalledWith(
|
||||||
|
signedDownloadUrl,
|
||||||
|
archivePath
|
||||||
|
)
|
||||||
|
expect(getArchiveFileSizeInBytesMock).toHaveBeenCalledWith(archivePath)
|
||||||
|
expect(logInfoMock).toHaveBeenCalledWith(`Cache Size: ~0 MB (142 B)`)
|
||||||
|
|
||||||
|
expect(extractTarMock).toHaveBeenCalledTimes(1)
|
||||||
|
expect(extractTarMock).toHaveBeenCalledWith(archivePath, compressionMethod)
|
||||||
|
|
||||||
|
expect(unlinkFileMock).toHaveBeenCalledTimes(1)
|
||||||
|
expect(unlinkFileMock).toHaveBeenCalledWith(archivePath)
|
||||||
|
|
||||||
|
expect(compressionMethodMock).toHaveBeenCalledTimes(1)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('restore with zstd compressed cache found', async () => {
|
||||||
|
const paths = ['node_modules']
|
||||||
|
const key = 'node-test'
|
||||||
|
const compressionMethod = CompressionMethod.Zstd
|
||||||
|
const signedDownloadUrl = 'https://blob-storage.local?signed=true'
|
||||||
|
const cacheVersion =
|
||||||
|
'8e2e96a184cb0cd6b48285b176c06a418f3d7fce14c29d9886fd1bb4f05c513d'
|
||||||
|
|
||||||
|
const getCacheVersionMock = jest.spyOn(cacheUtils, 'getCacheVersion')
|
||||||
|
getCacheVersionMock.mockReturnValue(cacheVersion)
|
||||||
|
|
||||||
|
const compressionMethodMock = jest.spyOn(cacheUtils, 'getCompressionMethod')
|
||||||
|
compressionMethodMock.mockReturnValue(Promise.resolve(compressionMethod))
|
||||||
|
|
||||||
|
const getCacheDownloadURLMock = jest.spyOn(
|
||||||
|
CacheServiceClientJSON.prototype,
|
||||||
|
'GetCacheEntryDownloadURL'
|
||||||
|
)
|
||||||
|
getCacheDownloadURLMock.mockReturnValue(
|
||||||
|
Promise.resolve({
|
||||||
|
ok: true,
|
||||||
|
signedDownloadUrl,
|
||||||
|
matchedKey: key
|
||||||
|
})
|
||||||
|
)
|
||||||
|
|
||||||
|
const tempPath = '/foo/bar'
|
||||||
|
|
||||||
|
const createTempDirectoryMock = jest.spyOn(cacheUtils, 'createTempDirectory')
|
||||||
|
createTempDirectoryMock.mockImplementation(async () => {
|
||||||
|
return Promise.resolve(tempPath)
|
||||||
|
})
|
||||||
|
|
||||||
|
const archivePath = path.join(tempPath, CacheFilename.Zstd)
|
||||||
|
const downloadCacheFileMock = jest.spyOn(
|
||||||
|
downloadCacheModule,
|
||||||
|
'downloadCacheFile'
|
||||||
|
)
|
||||||
|
downloadCacheFileMock.mockReturnValue(
|
||||||
|
Promise.resolve({
|
||||||
|
_response: {
|
||||||
|
status: 200
|
||||||
|
}
|
||||||
|
} as BlobDownloadResponseParsed)
|
||||||
|
)
|
||||||
|
|
||||||
|
const fileSize = 62915000
|
||||||
|
const getArchiveFileSizeInBytesMock = jest
|
||||||
|
.spyOn(cacheUtils, 'getArchiveFileSizeInBytes')
|
||||||
|
.mockReturnValue(fileSize)
|
||||||
|
|
||||||
|
const extractTarMock = jest.spyOn(tar, 'extractTar')
|
||||||
|
const unlinkFileMock = jest.spyOn(cacheUtils, 'unlinkFile')
|
||||||
|
|
||||||
|
const cacheKey = await restoreCache(paths, key)
|
||||||
|
|
||||||
|
expect(cacheKey).toBe(key)
|
||||||
|
expect(getCacheVersionMock).toHaveBeenCalledWith(
|
||||||
|
paths,
|
||||||
|
compressionMethod,
|
||||||
|
false
|
||||||
|
)
|
||||||
|
expect(getCacheDownloadURLMock).toHaveBeenCalledWith({
|
||||||
|
key,
|
||||||
|
restoreKeys: [],
|
||||||
|
version: cacheVersion
|
||||||
|
})
|
||||||
|
expect(createTempDirectoryMock).toHaveBeenCalledTimes(1)
|
||||||
|
expect(downloadCacheFileMock).toHaveBeenCalledWith(
|
||||||
|
signedDownloadUrl,
|
||||||
|
archivePath
|
||||||
|
)
|
||||||
|
expect(getArchiveFileSizeInBytesMock).toHaveBeenCalledWith(archivePath)
|
||||||
|
expect(logInfoMock).toHaveBeenCalledWith(`Cache Size: ~60 MB (62915000 B)`)
|
||||||
|
|
||||||
|
expect(extractTarMock).toHaveBeenCalledTimes(1)
|
||||||
|
expect(extractTarMock).toHaveBeenCalledWith(archivePath, compressionMethod)
|
||||||
|
|
||||||
|
expect(unlinkFileMock).toHaveBeenCalledTimes(1)
|
||||||
|
expect(unlinkFileMock).toHaveBeenCalledWith(archivePath)
|
||||||
|
|
||||||
|
expect(compressionMethodMock).toHaveBeenCalledTimes(1)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('restore with cache found for restore key', async () => {
|
||||||
|
const paths = ['node_modules']
|
||||||
|
const key = 'node-test'
|
||||||
|
const restoreKeys = ['node-']
|
||||||
|
const compressionMethod = CompressionMethod.Gzip
|
||||||
|
const signedDownloadUrl = 'https://blob-storage.local?signed=true'
|
||||||
|
const cacheVersion =
|
||||||
|
'b8b58e9bd7b1e8f83d9f05c7e06ea865ba44a0330e07a14db74ac74386677bed'
|
||||||
|
|
||||||
|
const getCacheVersionMock = jest.spyOn(cacheUtils, 'getCacheVersion')
|
||||||
|
getCacheVersionMock.mockReturnValue(cacheVersion)
|
||||||
|
|
||||||
|
const compressionMethodMock = jest.spyOn(cacheUtils, 'getCompressionMethod')
|
||||||
|
compressionMethodMock.mockReturnValue(Promise.resolve(compressionMethod))
|
||||||
|
|
||||||
|
const getCacheDownloadURLMock = jest.spyOn(
|
||||||
|
CacheServiceClientJSON.prototype,
|
||||||
|
'GetCacheEntryDownloadURL'
|
||||||
|
)
|
||||||
|
getCacheDownloadURLMock.mockReturnValue(
|
||||||
|
Promise.resolve({
|
||||||
|
ok: true,
|
||||||
|
signedDownloadUrl,
|
||||||
|
matchedKey: restoreKeys[0]
|
||||||
|
})
|
||||||
|
)
|
||||||
|
|
||||||
|
const tempPath = '/foo/bar'
|
||||||
|
|
||||||
|
const createTempDirectoryMock = jest.spyOn(cacheUtils, 'createTempDirectory')
|
||||||
|
createTempDirectoryMock.mockImplementation(async () => {
|
||||||
|
return Promise.resolve(tempPath)
|
||||||
|
})
|
||||||
|
|
||||||
|
const archivePath = path.join(tempPath, CacheFilename.Gzip)
|
||||||
|
const downloadCacheFileMock = jest.spyOn(
|
||||||
|
downloadCacheModule,
|
||||||
|
'downloadCacheFile'
|
||||||
|
)
|
||||||
|
downloadCacheFileMock.mockReturnValue(
|
||||||
|
Promise.resolve({
|
||||||
|
_response: {
|
||||||
|
status: 200
|
||||||
|
}
|
||||||
|
} as BlobDownloadResponseParsed)
|
||||||
|
)
|
||||||
|
|
||||||
|
const fileSize = 142
|
||||||
|
const getArchiveFileSizeInBytesMock = jest
|
||||||
|
.spyOn(cacheUtils, 'getArchiveFileSizeInBytes')
|
||||||
|
.mockReturnValue(fileSize)
|
||||||
|
|
||||||
|
const extractTarMock = jest.spyOn(tar, 'extractTar')
|
||||||
|
const unlinkFileMock = jest.spyOn(cacheUtils, 'unlinkFile')
|
||||||
|
|
||||||
|
const cacheKey = await restoreCache(paths, key, restoreKeys)
|
||||||
|
|
||||||
|
expect(cacheKey).toBe(restoreKeys[0])
|
||||||
|
expect(getCacheVersionMock).toHaveBeenCalledWith(
|
||||||
|
paths,
|
||||||
|
compressionMethod,
|
||||||
|
false
|
||||||
|
)
|
||||||
|
expect(getCacheDownloadURLMock).toHaveBeenCalledWith({
|
||||||
|
key,
|
||||||
|
restoreKeys,
|
||||||
|
version: cacheVersion
|
||||||
|
})
|
||||||
|
expect(createTempDirectoryMock).toHaveBeenCalledTimes(1)
|
||||||
|
expect(downloadCacheFileMock).toHaveBeenCalledWith(
|
||||||
|
signedDownloadUrl,
|
||||||
|
archivePath
|
||||||
|
)
|
||||||
|
expect(getArchiveFileSizeInBytesMock).toHaveBeenCalledWith(archivePath)
|
||||||
|
expect(logInfoMock).toHaveBeenCalledWith(`Cache Size: ~0 MB (142 B)`)
|
||||||
|
|
||||||
|
expect(extractTarMock).toHaveBeenCalledTimes(1)
|
||||||
|
expect(extractTarMock).toHaveBeenCalledWith(archivePath, compressionMethod)
|
||||||
|
|
||||||
|
expect(unlinkFileMock).toHaveBeenCalledTimes(1)
|
||||||
|
expect(unlinkFileMock).toHaveBeenCalledWith(archivePath)
|
||||||
|
|
||||||
|
expect(compressionMethodMock).toHaveBeenCalledTimes(1)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('restore with dry run', async () => {
|
||||||
|
const paths = ['node_modules']
|
||||||
|
const key = 'node-test'
|
||||||
|
const options = {lookupOnly: true}
|
||||||
|
const compressionMethod = CompressionMethod.Gzip
|
||||||
|
const signedDownloadUrl = 'https://blob-storage.local?signed=true'
|
||||||
|
const cacheVersion =
|
||||||
|
'd90f107aaeb22920dba0c637a23c37b5bc497b4dfa3b07fe3f79bf88a273c11b'
|
||||||
|
|
||||||
|
const getCacheVersionMock = jest.spyOn(cacheUtils, 'getCacheVersion')
|
||||||
|
getCacheVersionMock.mockReturnValue(cacheVersion)
|
||||||
|
|
||||||
|
const compressionMethodMock = jest.spyOn(cacheUtils, 'getCompressionMethod')
|
||||||
|
compressionMethodMock.mockReturnValue(Promise.resolve(compressionMethod))
|
||||||
|
|
||||||
|
const getCacheDownloadURLMock = jest.spyOn(
|
||||||
|
CacheServiceClientJSON.prototype,
|
||||||
|
'GetCacheEntryDownloadURL'
|
||||||
|
)
|
||||||
|
getCacheDownloadURLMock.mockReturnValue(
|
||||||
|
Promise.resolve({
|
||||||
|
ok: true,
|
||||||
|
signedDownloadUrl,
|
||||||
|
matchedKey: key
|
||||||
|
})
|
||||||
|
)
|
||||||
|
|
||||||
|
const createTempDirectoryMock = jest.spyOn(cacheUtils, 'createTempDirectory')
|
||||||
|
const downloadCacheFileMock = jest.spyOn(
|
||||||
|
downloadCacheModule,
|
||||||
|
'downloadCacheFile'
|
||||||
|
)
|
||||||
|
|
||||||
|
const cacheKey = await restoreCache(paths, key, undefined, options)
|
||||||
|
|
||||||
|
expect(cacheKey).toBe(key)
|
||||||
|
expect(getCacheVersionMock).toHaveBeenCalledWith(
|
||||||
|
paths,
|
||||||
|
compressionMethod,
|
||||||
|
false
|
||||||
|
)
|
||||||
|
expect(getCacheDownloadURLMock).toHaveBeenCalledWith({
|
||||||
|
key,
|
||||||
|
restoreKeys: [],
|
||||||
|
version: cacheVersion
|
||||||
|
})
|
||||||
|
expect(logInfoMock).toHaveBeenCalledWith('Lookup only - skipping download')
|
||||||
|
|
||||||
|
// creating a tempDir and downloading the cache are skipped
|
||||||
|
expect(createTempDirectoryMock).toHaveBeenCalledTimes(0)
|
||||||
|
expect(downloadCacheFileMock).toHaveBeenCalledTimes(0)
|
||||||
|
})
|
|
@ -79,9 +79,11 @@ export async function restoreCache(
|
||||||
options?: DownloadOptions,
|
options?: DownloadOptions,
|
||||||
enableCrossOsArchive = false
|
enableCrossOsArchive = false
|
||||||
): Promise<string | undefined> {
|
): Promise<string | undefined> {
|
||||||
|
const cacheServiceVersion: string = getCacheServiceVersion()
|
||||||
|
core.debug(`Cache service version: ${cacheServiceVersion}`)
|
||||||
|
|
||||||
checkPaths(paths)
|
checkPaths(paths)
|
||||||
|
|
||||||
const cacheServiceVersion: string = getCacheServiceVersion()
|
|
||||||
switch (cacheServiceVersion) {
|
switch (cacheServiceVersion) {
|
||||||
case 'v2':
|
case 'v2':
|
||||||
return await restoreCacheV2(
|
return await restoreCacheV2(
|
||||||
|
@ -259,7 +261,7 @@ async function restoreCacheV2(
|
||||||
|
|
||||||
if (options?.lookupOnly) {
|
if (options?.lookupOnly) {
|
||||||
core.info('Lookup only - skipping download')
|
core.info('Lookup only - skipping download')
|
||||||
return request.key
|
return response.matchedKey
|
||||||
}
|
}
|
||||||
|
|
||||||
archivePath = path.join(
|
archivePath = path.join(
|
||||||
|
@ -269,7 +271,11 @@ async function restoreCacheV2(
|
||||||
core.debug(`Archive path: ${archivePath}`)
|
core.debug(`Archive path: ${archivePath}`)
|
||||||
core.debug(`Starting download of archive to: ${archivePath}`)
|
core.debug(`Starting download of archive to: ${archivePath}`)
|
||||||
|
|
||||||
await downloadCacheFile(response.signedDownloadUrl, archivePath)
|
const downloadResponse = await downloadCacheFile(
|
||||||
|
response.signedDownloadUrl,
|
||||||
|
archivePath
|
||||||
|
)
|
||||||
|
core.debug(`Download response status: ${downloadResponse._response.status}`)
|
||||||
|
|
||||||
const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath)
|
const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath)
|
||||||
core.info(
|
core.info(
|
||||||
|
@ -285,9 +291,15 @@ async function restoreCacheV2(
|
||||||
await extractTar(archivePath, compressionMethod)
|
await extractTar(archivePath, compressionMethod)
|
||||||
core.info('Cache restored successfully')
|
core.info('Cache restored successfully')
|
||||||
|
|
||||||
return request.key
|
return response.matchedKey
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
throw new Error(`Failed to restore: ${error.message}`)
|
const typedError = error as Error
|
||||||
|
if (typedError.name === ValidationError.name) {
|
||||||
|
throw error
|
||||||
|
} else {
|
||||||
|
// Supress all non-validation cache related errors because caching should be optional
|
||||||
|
core.warning(`Failed to restore: ${(error as Error).message}`)
|
||||||
|
}
|
||||||
} finally {
|
} finally {
|
||||||
try {
|
try {
|
||||||
if (archivePath) {
|
if (archivePath) {
|
||||||
|
@ -297,6 +309,8 @@ async function restoreCacheV2(
|
||||||
core.debug(`Failed to delete archive: ${error}`)
|
core.debug(`Failed to delete archive: ${error}`)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
return undefined
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -525,7 +539,13 @@ async function saveCacheV2(
|
||||||
cacheId = parseInt(finalizeResponse.entryId)
|
cacheId = parseInt(finalizeResponse.entryId)
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
const typedError = error as Error
|
const typedError = error as Error
|
||||||
|
if (typedError.name === ValidationError.name) {
|
||||||
|
throw error
|
||||||
|
} else if (typedError.name === ReserveCacheError.name) {
|
||||||
|
core.info(`Failed to save: ${typedError.message}`)
|
||||||
|
} else {
|
||||||
core.warning(`Failed to save: ${typedError.message}`)
|
core.warning(`Failed to save: ${typedError.message}`)
|
||||||
|
}
|
||||||
} finally {
|
} finally {
|
||||||
// Try to delete the archive to save space
|
// Try to delete the archive to save space
|
||||||
try {
|
try {
|
||||||
|
|
|
@ -12,7 +12,7 @@ import type { PartialMessage } from "@protobuf-ts/runtime";
|
||||||
import { reflectionMergePartial } from "@protobuf-ts/runtime";
|
import { reflectionMergePartial } from "@protobuf-ts/runtime";
|
||||||
import { MESSAGE_TYPE } from "@protobuf-ts/runtime";
|
import { MESSAGE_TYPE } from "@protobuf-ts/runtime";
|
||||||
import { MessageType } from "@protobuf-ts/runtime";
|
import { MessageType } from "@protobuf-ts/runtime";
|
||||||
import { Timestamp } from "../../../google/protobuf/timestamp";
|
import { CacheEntry } from "../../entities/v1/cacheentry";
|
||||||
import { CacheMetadata } from "../../entities/v1/cachemetadata";
|
import { CacheMetadata } from "../../entities/v1/cachemetadata";
|
||||||
/**
|
/**
|
||||||
* @generated from protobuf message github.actions.results.api.v1.CreateCacheEntryRequest
|
* @generated from protobuf message github.actions.results.api.v1.CreateCacheEntryRequest
|
||||||
|
@ -139,6 +139,12 @@ export interface GetCacheEntryDownloadURLResponse {
|
||||||
* @generated from protobuf field: string signed_download_url = 2;
|
* @generated from protobuf field: string signed_download_url = 2;
|
||||||
*/
|
*/
|
||||||
signedDownloadUrl: string;
|
signedDownloadUrl: string;
|
||||||
|
/**
|
||||||
|
* Key or restore key that matches the lookup
|
||||||
|
*
|
||||||
|
* @generated from protobuf field: string matched_key = 3;
|
||||||
|
*/
|
||||||
|
matchedKey: string;
|
||||||
}
|
}
|
||||||
/**
|
/**
|
||||||
* @generated from protobuf message github.actions.results.api.v1.DeleteCacheEntryRequest
|
* @generated from protobuf message github.actions.results.api.v1.DeleteCacheEntryRequest
|
||||||
|
@ -200,62 +206,11 @@ export interface ListCacheEntriesRequest {
|
||||||
*/
|
*/
|
||||||
export interface ListCacheEntriesResponse {
|
export interface ListCacheEntriesResponse {
|
||||||
/**
|
/**
|
||||||
* @generated from protobuf field: repeated github.actions.results.api.v1.ListCacheEntriesResponse.CacheEntry entries = 1;
|
* Cache entries in the defined scope
|
||||||
*/
|
|
||||||
entries: ListCacheEntriesResponse_CacheEntry[];
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* @generated from protobuf message github.actions.results.api.v1.ListCacheEntriesResponse.CacheEntry
|
|
||||||
*/
|
|
||||||
export interface ListCacheEntriesResponse_CacheEntry {
|
|
||||||
/**
|
|
||||||
* An explicit key for a cache entry
|
|
||||||
*
|
*
|
||||||
* @generated from protobuf field: string key = 1;
|
* @generated from protobuf field: repeated github.actions.results.entities.v1.CacheEntry entries = 1;
|
||||||
*/
|
*/
|
||||||
key: string;
|
entries: CacheEntry[];
|
||||||
/**
|
|
||||||
* SHA256 hex digest of the cache archive
|
|
||||||
*
|
|
||||||
* @generated from protobuf field: string hash = 2;
|
|
||||||
*/
|
|
||||||
hash: string;
|
|
||||||
/**
|
|
||||||
* Cache entry size in bytes
|
|
||||||
*
|
|
||||||
* @generated from protobuf field: int64 size_bytes = 3;
|
|
||||||
*/
|
|
||||||
sizeBytes: string;
|
|
||||||
/**
|
|
||||||
* Access scope
|
|
||||||
*
|
|
||||||
* @generated from protobuf field: string scope = 4;
|
|
||||||
*/
|
|
||||||
scope: string;
|
|
||||||
/**
|
|
||||||
* Version SHA256 hex digest
|
|
||||||
*
|
|
||||||
* @generated from protobuf field: string version = 5;
|
|
||||||
*/
|
|
||||||
version: string;
|
|
||||||
/**
|
|
||||||
* When the cache entry was created
|
|
||||||
*
|
|
||||||
* @generated from protobuf field: google.protobuf.Timestamp created_at = 6;
|
|
||||||
*/
|
|
||||||
createdAt?: Timestamp;
|
|
||||||
/**
|
|
||||||
* When the cache entry was last accessed
|
|
||||||
*
|
|
||||||
* @generated from protobuf field: google.protobuf.Timestamp last_accessed_at = 7;
|
|
||||||
*/
|
|
||||||
lastAccessedAt?: Timestamp;
|
|
||||||
/**
|
|
||||||
* When the cache entry is set to expire
|
|
||||||
*
|
|
||||||
* @generated from protobuf field: google.protobuf.Timestamp expires_at = 8;
|
|
||||||
*/
|
|
||||||
expiresAt?: Timestamp;
|
|
||||||
}
|
}
|
||||||
/**
|
/**
|
||||||
* @generated from protobuf message github.actions.results.api.v1.LookupCacheEntryRequest
|
* @generated from protobuf message github.actions.results.api.v1.LookupCacheEntryRequest
|
||||||
|
@ -296,61 +251,12 @@ export interface LookupCacheEntryResponse {
|
||||||
* @generated from protobuf field: bool exists = 1;
|
* @generated from protobuf field: bool exists = 1;
|
||||||
*/
|
*/
|
||||||
exists: boolean;
|
exists: boolean;
|
||||||
}
|
|
||||||
/**
|
/**
|
||||||
* Matched cache entry metadata
|
* Matched cache entry metadata
|
||||||
*
|
*
|
||||||
* @generated from protobuf message github.actions.results.api.v1.LookupCacheEntryResponse.CacheEntry
|
* @generated from protobuf field: github.actions.results.entities.v1.CacheEntry entry = 2;
|
||||||
*/
|
*/
|
||||||
export interface LookupCacheEntryResponse_CacheEntry {
|
entry?: CacheEntry;
|
||||||
/**
|
|
||||||
* An explicit key for a cache entry
|
|
||||||
*
|
|
||||||
* @generated from protobuf field: string key = 1;
|
|
||||||
*/
|
|
||||||
key: string;
|
|
||||||
/**
|
|
||||||
* SHA256 hex digest of the cache archive
|
|
||||||
*
|
|
||||||
* @generated from protobuf field: string hash = 2;
|
|
||||||
*/
|
|
||||||
hash: string;
|
|
||||||
/**
|
|
||||||
* Cache entry size in bytes
|
|
||||||
*
|
|
||||||
* @generated from protobuf field: int64 size_bytes = 3;
|
|
||||||
*/
|
|
||||||
sizeBytes: string;
|
|
||||||
/**
|
|
||||||
* Access scope
|
|
||||||
*
|
|
||||||
* @generated from protobuf field: string scope = 4;
|
|
||||||
*/
|
|
||||||
scope: string;
|
|
||||||
/**
|
|
||||||
* Version SHA256 hex digest
|
|
||||||
*
|
|
||||||
* @generated from protobuf field: string version = 5;
|
|
||||||
*/
|
|
||||||
version: string;
|
|
||||||
/**
|
|
||||||
* When the cache entry was created
|
|
||||||
*
|
|
||||||
* @generated from protobuf field: google.protobuf.Timestamp created_at = 6;
|
|
||||||
*/
|
|
||||||
createdAt?: Timestamp;
|
|
||||||
/**
|
|
||||||
* When the cache entry was last accessed
|
|
||||||
*
|
|
||||||
* @generated from protobuf field: google.protobuf.Timestamp last_accessed_at = 7;
|
|
||||||
*/
|
|
||||||
lastAccessedAt?: Timestamp;
|
|
||||||
/**
|
|
||||||
* When the cache entry is set to expire
|
|
||||||
*
|
|
||||||
* @generated from protobuf field: google.protobuf.Timestamp expires_at = 8;
|
|
||||||
*/
|
|
||||||
expiresAt?: Timestamp;
|
|
||||||
}
|
}
|
||||||
// @generated message type with reflection information, may provide speed optimized methods
|
// @generated message type with reflection information, may provide speed optimized methods
|
||||||
class CreateCacheEntryRequest$Type extends MessageType<CreateCacheEntryRequest> {
|
class CreateCacheEntryRequest$Type extends MessageType<CreateCacheEntryRequest> {
|
||||||
|
@ -662,11 +568,12 @@ class GetCacheEntryDownloadURLResponse$Type extends MessageType<GetCacheEntryDow
|
||||||
constructor() {
|
constructor() {
|
||||||
super("github.actions.results.api.v1.GetCacheEntryDownloadURLResponse", [
|
super("github.actions.results.api.v1.GetCacheEntryDownloadURLResponse", [
|
||||||
{ no: 1, name: "ok", kind: "scalar", T: 8 /*ScalarType.BOOL*/ },
|
{ no: 1, name: "ok", kind: "scalar", T: 8 /*ScalarType.BOOL*/ },
|
||||||
{ no: 2, name: "signed_download_url", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
|
{ no: 2, name: "signed_download_url", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||||
|
{ no: 3, name: "matched_key", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
|
||||||
]);
|
]);
|
||||||
}
|
}
|
||||||
create(value?: PartialMessage<GetCacheEntryDownloadURLResponse>): GetCacheEntryDownloadURLResponse {
|
create(value?: PartialMessage<GetCacheEntryDownloadURLResponse>): GetCacheEntryDownloadURLResponse {
|
||||||
const message = { ok: false, signedDownloadUrl: "" };
|
const message = { ok: false, signedDownloadUrl: "", matchedKey: "" };
|
||||||
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
|
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
|
||||||
if (value !== undefined)
|
if (value !== undefined)
|
||||||
reflectionMergePartial<GetCacheEntryDownloadURLResponse>(this, message, value);
|
reflectionMergePartial<GetCacheEntryDownloadURLResponse>(this, message, value);
|
||||||
|
@ -683,6 +590,9 @@ class GetCacheEntryDownloadURLResponse$Type extends MessageType<GetCacheEntryDow
|
||||||
case /* string signed_download_url */ 2:
|
case /* string signed_download_url */ 2:
|
||||||
message.signedDownloadUrl = reader.string();
|
message.signedDownloadUrl = reader.string();
|
||||||
break;
|
break;
|
||||||
|
case /* string matched_key */ 3:
|
||||||
|
message.matchedKey = reader.string();
|
||||||
|
break;
|
||||||
default:
|
default:
|
||||||
let u = options.readUnknownField;
|
let u = options.readUnknownField;
|
||||||
if (u === "throw")
|
if (u === "throw")
|
||||||
|
@ -701,6 +611,9 @@ class GetCacheEntryDownloadURLResponse$Type extends MessageType<GetCacheEntryDow
|
||||||
/* string signed_download_url = 2; */
|
/* string signed_download_url = 2; */
|
||||||
if (message.signedDownloadUrl !== "")
|
if (message.signedDownloadUrl !== "")
|
||||||
writer.tag(2, WireType.LengthDelimited).string(message.signedDownloadUrl);
|
writer.tag(2, WireType.LengthDelimited).string(message.signedDownloadUrl);
|
||||||
|
/* string matched_key = 3; */
|
||||||
|
if (message.matchedKey !== "")
|
||||||
|
writer.tag(3, WireType.LengthDelimited).string(message.matchedKey);
|
||||||
let u = options.writeUnknownFields;
|
let u = options.writeUnknownFields;
|
||||||
if (u !== false)
|
if (u !== false)
|
||||||
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||||
|
@ -884,7 +797,7 @@ export const ListCacheEntriesRequest = new ListCacheEntriesRequest$Type();
|
||||||
class ListCacheEntriesResponse$Type extends MessageType<ListCacheEntriesResponse> {
|
class ListCacheEntriesResponse$Type extends MessageType<ListCacheEntriesResponse> {
|
||||||
constructor() {
|
constructor() {
|
||||||
super("github.actions.results.api.v1.ListCacheEntriesResponse", [
|
super("github.actions.results.api.v1.ListCacheEntriesResponse", [
|
||||||
{ no: 1, name: "entries", kind: "message", repeat: 1 /*RepeatType.PACKED*/, T: () => ListCacheEntriesResponse_CacheEntry }
|
{ no: 1, name: "entries", kind: "message", repeat: 1 /*RepeatType.PACKED*/, T: () => CacheEntry }
|
||||||
]);
|
]);
|
||||||
}
|
}
|
||||||
create(value?: PartialMessage<ListCacheEntriesResponse>): ListCacheEntriesResponse {
|
create(value?: PartialMessage<ListCacheEntriesResponse>): ListCacheEntriesResponse {
|
||||||
|
@ -899,8 +812,8 @@ class ListCacheEntriesResponse$Type extends MessageType<ListCacheEntriesResponse
|
||||||
while (reader.pos < end) {
|
while (reader.pos < end) {
|
||||||
let [fieldNo, wireType] = reader.tag();
|
let [fieldNo, wireType] = reader.tag();
|
||||||
switch (fieldNo) {
|
switch (fieldNo) {
|
||||||
case /* repeated github.actions.results.api.v1.ListCacheEntriesResponse.CacheEntry entries */ 1:
|
case /* repeated github.actions.results.entities.v1.CacheEntry entries */ 1:
|
||||||
message.entries.push(ListCacheEntriesResponse_CacheEntry.internalBinaryRead(reader, reader.uint32(), options));
|
message.entries.push(CacheEntry.internalBinaryRead(reader, reader.uint32(), options));
|
||||||
break;
|
break;
|
||||||
default:
|
default:
|
||||||
let u = options.readUnknownField;
|
let u = options.readUnknownField;
|
||||||
|
@ -914,9 +827,9 @@ class ListCacheEntriesResponse$Type extends MessageType<ListCacheEntriesResponse
|
||||||
return message;
|
return message;
|
||||||
}
|
}
|
||||||
internalBinaryWrite(message: ListCacheEntriesResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
internalBinaryWrite(message: ListCacheEntriesResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||||
/* repeated github.actions.results.api.v1.ListCacheEntriesResponse.CacheEntry entries = 1; */
|
/* repeated github.actions.results.entities.v1.CacheEntry entries = 1; */
|
||||||
for (let i = 0; i < message.entries.length; i++)
|
for (let i = 0; i < message.entries.length; i++)
|
||||||
ListCacheEntriesResponse_CacheEntry.internalBinaryWrite(message.entries[i], writer.tag(1, WireType.LengthDelimited).fork(), options).join();
|
CacheEntry.internalBinaryWrite(message.entries[i], writer.tag(1, WireType.LengthDelimited).fork(), options).join();
|
||||||
let u = options.writeUnknownFields;
|
let u = options.writeUnknownFields;
|
||||||
if (u !== false)
|
if (u !== false)
|
||||||
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||||
|
@ -928,102 +841,6 @@ class ListCacheEntriesResponse$Type extends MessageType<ListCacheEntriesResponse
|
||||||
*/
|
*/
|
||||||
export const ListCacheEntriesResponse = new ListCacheEntriesResponse$Type();
|
export const ListCacheEntriesResponse = new ListCacheEntriesResponse$Type();
|
||||||
// @generated message type with reflection information, may provide speed optimized methods
|
// @generated message type with reflection information, may provide speed optimized methods
|
||||||
class ListCacheEntriesResponse_CacheEntry$Type extends MessageType<ListCacheEntriesResponse_CacheEntry> {
|
|
||||||
constructor() {
|
|
||||||
super("github.actions.results.api.v1.ListCacheEntriesResponse.CacheEntry", [
|
|
||||||
{ no: 1, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
|
||||||
{ no: 2, name: "hash", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
|
||||||
{ no: 3, name: "size_bytes", kind: "scalar", T: 3 /*ScalarType.INT64*/ },
|
|
||||||
{ no: 4, name: "scope", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
|
||||||
{ no: 5, name: "version", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
|
||||||
{ no: 6, name: "created_at", kind: "message", T: () => Timestamp },
|
|
||||||
{ no: 7, name: "last_accessed_at", kind: "message", T: () => Timestamp },
|
|
||||||
{ no: 8, name: "expires_at", kind: "message", T: () => Timestamp }
|
|
||||||
]);
|
|
||||||
}
|
|
||||||
create(value?: PartialMessage<ListCacheEntriesResponse_CacheEntry>): ListCacheEntriesResponse_CacheEntry {
|
|
||||||
const message = { key: "", hash: "", sizeBytes: "0", scope: "", version: "" };
|
|
||||||
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
|
|
||||||
if (value !== undefined)
|
|
||||||
reflectionMergePartial<ListCacheEntriesResponse_CacheEntry>(this, message, value);
|
|
||||||
return message;
|
|
||||||
}
|
|
||||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: ListCacheEntriesResponse_CacheEntry): ListCacheEntriesResponse_CacheEntry {
|
|
||||||
let message = target ?? this.create(), end = reader.pos + length;
|
|
||||||
while (reader.pos < end) {
|
|
||||||
let [fieldNo, wireType] = reader.tag();
|
|
||||||
switch (fieldNo) {
|
|
||||||
case /* string key */ 1:
|
|
||||||
message.key = reader.string();
|
|
||||||
break;
|
|
||||||
case /* string hash */ 2:
|
|
||||||
message.hash = reader.string();
|
|
||||||
break;
|
|
||||||
case /* int64 size_bytes */ 3:
|
|
||||||
message.sizeBytes = reader.int64().toString();
|
|
||||||
break;
|
|
||||||
case /* string scope */ 4:
|
|
||||||
message.scope = reader.string();
|
|
||||||
break;
|
|
||||||
case /* string version */ 5:
|
|
||||||
message.version = reader.string();
|
|
||||||
break;
|
|
||||||
case /* google.protobuf.Timestamp created_at */ 6:
|
|
||||||
message.createdAt = Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.createdAt);
|
|
||||||
break;
|
|
||||||
case /* google.protobuf.Timestamp last_accessed_at */ 7:
|
|
||||||
message.lastAccessedAt = Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.lastAccessedAt);
|
|
||||||
break;
|
|
||||||
case /* google.protobuf.Timestamp expires_at */ 8:
|
|
||||||
message.expiresAt = Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.expiresAt);
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
let u = options.readUnknownField;
|
|
||||||
if (u === "throw")
|
|
||||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
|
||||||
let d = reader.skip(wireType);
|
|
||||||
if (u !== false)
|
|
||||||
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return message;
|
|
||||||
}
|
|
||||||
internalBinaryWrite(message: ListCacheEntriesResponse_CacheEntry, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
|
||||||
/* string key = 1; */
|
|
||||||
if (message.key !== "")
|
|
||||||
writer.tag(1, WireType.LengthDelimited).string(message.key);
|
|
||||||
/* string hash = 2; */
|
|
||||||
if (message.hash !== "")
|
|
||||||
writer.tag(2, WireType.LengthDelimited).string(message.hash);
|
|
||||||
/* int64 size_bytes = 3; */
|
|
||||||
if (message.sizeBytes !== "0")
|
|
||||||
writer.tag(3, WireType.Varint).int64(message.sizeBytes);
|
|
||||||
/* string scope = 4; */
|
|
||||||
if (message.scope !== "")
|
|
||||||
writer.tag(4, WireType.LengthDelimited).string(message.scope);
|
|
||||||
/* string version = 5; */
|
|
||||||
if (message.version !== "")
|
|
||||||
writer.tag(5, WireType.LengthDelimited).string(message.version);
|
|
||||||
/* google.protobuf.Timestamp created_at = 6; */
|
|
||||||
if (message.createdAt)
|
|
||||||
Timestamp.internalBinaryWrite(message.createdAt, writer.tag(6, WireType.LengthDelimited).fork(), options).join();
|
|
||||||
/* google.protobuf.Timestamp last_accessed_at = 7; */
|
|
||||||
if (message.lastAccessedAt)
|
|
||||||
Timestamp.internalBinaryWrite(message.lastAccessedAt, writer.tag(7, WireType.LengthDelimited).fork(), options).join();
|
|
||||||
/* google.protobuf.Timestamp expires_at = 8; */
|
|
||||||
if (message.expiresAt)
|
|
||||||
Timestamp.internalBinaryWrite(message.expiresAt, writer.tag(8, WireType.LengthDelimited).fork(), options).join();
|
|
||||||
let u = options.writeUnknownFields;
|
|
||||||
if (u !== false)
|
|
||||||
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
|
||||||
return writer;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* @generated MessageType for protobuf message github.actions.results.api.v1.ListCacheEntriesResponse.CacheEntry
|
|
||||||
*/
|
|
||||||
export const ListCacheEntriesResponse_CacheEntry = new ListCacheEntriesResponse_CacheEntry$Type();
|
|
||||||
// @generated message type with reflection information, may provide speed optimized methods
|
|
||||||
class LookupCacheEntryRequest$Type extends MessageType<LookupCacheEntryRequest> {
|
class LookupCacheEntryRequest$Type extends MessageType<LookupCacheEntryRequest> {
|
||||||
constructor() {
|
constructor() {
|
||||||
super("github.actions.results.api.v1.LookupCacheEntryRequest", [
|
super("github.actions.results.api.v1.LookupCacheEntryRequest", [
|
||||||
|
@ -1095,7 +912,8 @@ export const LookupCacheEntryRequest = new LookupCacheEntryRequest$Type();
|
||||||
class LookupCacheEntryResponse$Type extends MessageType<LookupCacheEntryResponse> {
|
class LookupCacheEntryResponse$Type extends MessageType<LookupCacheEntryResponse> {
|
||||||
constructor() {
|
constructor() {
|
||||||
super("github.actions.results.api.v1.LookupCacheEntryResponse", [
|
super("github.actions.results.api.v1.LookupCacheEntryResponse", [
|
||||||
{ no: 1, name: "exists", kind: "scalar", T: 8 /*ScalarType.BOOL*/ }
|
{ no: 1, name: "exists", kind: "scalar", T: 8 /*ScalarType.BOOL*/ },
|
||||||
|
{ no: 2, name: "entry", kind: "message", T: () => CacheEntry }
|
||||||
]);
|
]);
|
||||||
}
|
}
|
||||||
create(value?: PartialMessage<LookupCacheEntryResponse>): LookupCacheEntryResponse {
|
create(value?: PartialMessage<LookupCacheEntryResponse>): LookupCacheEntryResponse {
|
||||||
|
@ -1113,6 +931,9 @@ class LookupCacheEntryResponse$Type extends MessageType<LookupCacheEntryResponse
|
||||||
case /* bool exists */ 1:
|
case /* bool exists */ 1:
|
||||||
message.exists = reader.bool();
|
message.exists = reader.bool();
|
||||||
break;
|
break;
|
||||||
|
case /* github.actions.results.entities.v1.CacheEntry entry */ 2:
|
||||||
|
message.entry = CacheEntry.internalBinaryRead(reader, reader.uint32(), options, message.entry);
|
||||||
|
break;
|
||||||
default:
|
default:
|
||||||
let u = options.readUnknownField;
|
let u = options.readUnknownField;
|
||||||
if (u === "throw")
|
if (u === "throw")
|
||||||
|
@ -1128,6 +949,9 @@ class LookupCacheEntryResponse$Type extends MessageType<LookupCacheEntryResponse
|
||||||
/* bool exists = 1; */
|
/* bool exists = 1; */
|
||||||
if (message.exists !== false)
|
if (message.exists !== false)
|
||||||
writer.tag(1, WireType.Varint).bool(message.exists);
|
writer.tag(1, WireType.Varint).bool(message.exists);
|
||||||
|
/* github.actions.results.entities.v1.CacheEntry entry = 2; */
|
||||||
|
if (message.entry)
|
||||||
|
CacheEntry.internalBinaryWrite(message.entry, writer.tag(2, WireType.LengthDelimited).fork(), options).join();
|
||||||
let u = options.writeUnknownFields;
|
let u = options.writeUnknownFields;
|
||||||
if (u !== false)
|
if (u !== false)
|
||||||
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||||
|
@ -1138,102 +962,6 @@ class LookupCacheEntryResponse$Type extends MessageType<LookupCacheEntryResponse
|
||||||
* @generated MessageType for protobuf message github.actions.results.api.v1.LookupCacheEntryResponse
|
* @generated MessageType for protobuf message github.actions.results.api.v1.LookupCacheEntryResponse
|
||||||
*/
|
*/
|
||||||
export const LookupCacheEntryResponse = new LookupCacheEntryResponse$Type();
|
export const LookupCacheEntryResponse = new LookupCacheEntryResponse$Type();
|
||||||
// @generated message type with reflection information, may provide speed optimized methods
|
|
||||||
class LookupCacheEntryResponse_CacheEntry$Type extends MessageType<LookupCacheEntryResponse_CacheEntry> {
|
|
||||||
constructor() {
|
|
||||||
super("github.actions.results.api.v1.LookupCacheEntryResponse.CacheEntry", [
|
|
||||||
{ no: 1, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
|
||||||
{ no: 2, name: "hash", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
|
||||||
{ no: 3, name: "size_bytes", kind: "scalar", T: 3 /*ScalarType.INT64*/ },
|
|
||||||
{ no: 4, name: "scope", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
|
||||||
{ no: 5, name: "version", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
|
||||||
{ no: 6, name: "created_at", kind: "message", T: () => Timestamp },
|
|
||||||
{ no: 7, name: "last_accessed_at", kind: "message", T: () => Timestamp },
|
|
||||||
{ no: 8, name: "expires_at", kind: "message", T: () => Timestamp }
|
|
||||||
]);
|
|
||||||
}
|
|
||||||
create(value?: PartialMessage<LookupCacheEntryResponse_CacheEntry>): LookupCacheEntryResponse_CacheEntry {
|
|
||||||
const message = { key: "", hash: "", sizeBytes: "0", scope: "", version: "" };
|
|
||||||
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
|
|
||||||
if (value !== undefined)
|
|
||||||
reflectionMergePartial<LookupCacheEntryResponse_CacheEntry>(this, message, value);
|
|
||||||
return message;
|
|
||||||
}
|
|
||||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: LookupCacheEntryResponse_CacheEntry): LookupCacheEntryResponse_CacheEntry {
|
|
||||||
let message = target ?? this.create(), end = reader.pos + length;
|
|
||||||
while (reader.pos < end) {
|
|
||||||
let [fieldNo, wireType] = reader.tag();
|
|
||||||
switch (fieldNo) {
|
|
||||||
case /* string key */ 1:
|
|
||||||
message.key = reader.string();
|
|
||||||
break;
|
|
||||||
case /* string hash */ 2:
|
|
||||||
message.hash = reader.string();
|
|
||||||
break;
|
|
||||||
case /* int64 size_bytes */ 3:
|
|
||||||
message.sizeBytes = reader.int64().toString();
|
|
||||||
break;
|
|
||||||
case /* string scope */ 4:
|
|
||||||
message.scope = reader.string();
|
|
||||||
break;
|
|
||||||
case /* string version */ 5:
|
|
||||||
message.version = reader.string();
|
|
||||||
break;
|
|
||||||
case /* google.protobuf.Timestamp created_at */ 6:
|
|
||||||
message.createdAt = Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.createdAt);
|
|
||||||
break;
|
|
||||||
case /* google.protobuf.Timestamp last_accessed_at */ 7:
|
|
||||||
message.lastAccessedAt = Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.lastAccessedAt);
|
|
||||||
break;
|
|
||||||
case /* google.protobuf.Timestamp expires_at */ 8:
|
|
||||||
message.expiresAt = Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.expiresAt);
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
let u = options.readUnknownField;
|
|
||||||
if (u === "throw")
|
|
||||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
|
||||||
let d = reader.skip(wireType);
|
|
||||||
if (u !== false)
|
|
||||||
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return message;
|
|
||||||
}
|
|
||||||
internalBinaryWrite(message: LookupCacheEntryResponse_CacheEntry, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
|
||||||
/* string key = 1; */
|
|
||||||
if (message.key !== "")
|
|
||||||
writer.tag(1, WireType.LengthDelimited).string(message.key);
|
|
||||||
/* string hash = 2; */
|
|
||||||
if (message.hash !== "")
|
|
||||||
writer.tag(2, WireType.LengthDelimited).string(message.hash);
|
|
||||||
/* int64 size_bytes = 3; */
|
|
||||||
if (message.sizeBytes !== "0")
|
|
||||||
writer.tag(3, WireType.Varint).int64(message.sizeBytes);
|
|
||||||
/* string scope = 4; */
|
|
||||||
if (message.scope !== "")
|
|
||||||
writer.tag(4, WireType.LengthDelimited).string(message.scope);
|
|
||||||
/* string version = 5; */
|
|
||||||
if (message.version !== "")
|
|
||||||
writer.tag(5, WireType.LengthDelimited).string(message.version);
|
|
||||||
/* google.protobuf.Timestamp created_at = 6; */
|
|
||||||
if (message.createdAt)
|
|
||||||
Timestamp.internalBinaryWrite(message.createdAt, writer.tag(6, WireType.LengthDelimited).fork(), options).join();
|
|
||||||
/* google.protobuf.Timestamp last_accessed_at = 7; */
|
|
||||||
if (message.lastAccessedAt)
|
|
||||||
Timestamp.internalBinaryWrite(message.lastAccessedAt, writer.tag(7, WireType.LengthDelimited).fork(), options).join();
|
|
||||||
/* google.protobuf.Timestamp expires_at = 8; */
|
|
||||||
if (message.expiresAt)
|
|
||||||
Timestamp.internalBinaryWrite(message.expiresAt, writer.tag(8, WireType.LengthDelimited).fork(), options).join();
|
|
||||||
let u = options.writeUnknownFields;
|
|
||||||
if (u !== false)
|
|
||||||
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
|
||||||
return writer;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* @generated MessageType for protobuf message github.actions.results.api.v1.LookupCacheEntryResponse.CacheEntry
|
|
||||||
*/
|
|
||||||
export const LookupCacheEntryResponse_CacheEntry = new LookupCacheEntryResponse_CacheEntry$Type();
|
|
||||||
/**
|
/**
|
||||||
* @generated ServiceType for protobuf service github.actions.results.api.v1.CacheService
|
* @generated ServiceType for protobuf service github.actions.results.api.v1.CacheService
|
||||||
*/
|
*/
|
||||||
|
|
|
@ -0,0 +1,163 @@
|
||||||
|
// @generated by protobuf-ts 2.9.1 with parameter long_type_string,client_none,generate_dependencies
|
||||||
|
// @generated from protobuf file "results/entities/v1/cacheentry.proto" (package "github.actions.results.entities.v1", syntax proto3)
|
||||||
|
// tslint:disable
|
||||||
|
import type { BinaryWriteOptions } from "@protobuf-ts/runtime";
|
||||||
|
import type { IBinaryWriter } from "@protobuf-ts/runtime";
|
||||||
|
import { WireType } from "@protobuf-ts/runtime";
|
||||||
|
import type { BinaryReadOptions } from "@protobuf-ts/runtime";
|
||||||
|
import type { IBinaryReader } from "@protobuf-ts/runtime";
|
||||||
|
import { UnknownFieldHandler } from "@protobuf-ts/runtime";
|
||||||
|
import type { PartialMessage } from "@protobuf-ts/runtime";
|
||||||
|
import { reflectionMergePartial } from "@protobuf-ts/runtime";
|
||||||
|
import { MESSAGE_TYPE } from "@protobuf-ts/runtime";
|
||||||
|
import { MessageType } from "@protobuf-ts/runtime";
|
||||||
|
import { Timestamp } from "../../../google/protobuf/timestamp";
|
||||||
|
/**
|
||||||
|
* @generated from protobuf message github.actions.results.entities.v1.CacheEntry
|
||||||
|
*/
|
||||||
|
export interface CacheEntry {
|
||||||
|
/**
|
||||||
|
* An explicit key for a cache entry
|
||||||
|
*
|
||||||
|
* @generated from protobuf field: string key = 1;
|
||||||
|
*/
|
||||||
|
key: string;
|
||||||
|
/**
|
||||||
|
* SHA256 hex digest of the cache archive
|
||||||
|
*
|
||||||
|
* @generated from protobuf field: string hash = 2;
|
||||||
|
*/
|
||||||
|
hash: string;
|
||||||
|
/**
|
||||||
|
* Cache entry size in bytes
|
||||||
|
*
|
||||||
|
* @generated from protobuf field: int64 size_bytes = 3;
|
||||||
|
*/
|
||||||
|
sizeBytes: string;
|
||||||
|
/**
|
||||||
|
* Access scope
|
||||||
|
*
|
||||||
|
* @generated from protobuf field: string scope = 4;
|
||||||
|
*/
|
||||||
|
scope: string;
|
||||||
|
/**
|
||||||
|
* Version SHA256 hex digest
|
||||||
|
*
|
||||||
|
* @generated from protobuf field: string version = 5;
|
||||||
|
*/
|
||||||
|
version: string;
|
||||||
|
/**
|
||||||
|
* When the cache entry was created
|
||||||
|
*
|
||||||
|
* @generated from protobuf field: google.protobuf.Timestamp created_at = 6;
|
||||||
|
*/
|
||||||
|
createdAt?: Timestamp;
|
||||||
|
/**
|
||||||
|
* When the cache entry was last accessed
|
||||||
|
*
|
||||||
|
* @generated from protobuf field: google.protobuf.Timestamp last_accessed_at = 7;
|
||||||
|
*/
|
||||||
|
lastAccessedAt?: Timestamp;
|
||||||
|
/**
|
||||||
|
* When the cache entry is set to expire
|
||||||
|
*
|
||||||
|
* @generated from protobuf field: google.protobuf.Timestamp expires_at = 8;
|
||||||
|
*/
|
||||||
|
expiresAt?: Timestamp;
|
||||||
|
}
|
||||||
|
// @generated message type with reflection information, may provide speed optimized methods
|
||||||
|
class CacheEntry$Type extends MessageType<CacheEntry> {
|
||||||
|
constructor() {
|
||||||
|
super("github.actions.results.entities.v1.CacheEntry", [
|
||||||
|
{ no: 1, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||||
|
{ no: 2, name: "hash", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||||
|
{ no: 3, name: "size_bytes", kind: "scalar", T: 3 /*ScalarType.INT64*/ },
|
||||||
|
{ no: 4, name: "scope", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||||
|
{ no: 5, name: "version", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||||
|
{ no: 6, name: "created_at", kind: "message", T: () => Timestamp },
|
||||||
|
{ no: 7, name: "last_accessed_at", kind: "message", T: () => Timestamp },
|
||||||
|
{ no: 8, name: "expires_at", kind: "message", T: () => Timestamp }
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
create(value?: PartialMessage<CacheEntry>): CacheEntry {
|
||||||
|
const message = { key: "", hash: "", sizeBytes: "0", scope: "", version: "" };
|
||||||
|
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
|
||||||
|
if (value !== undefined)
|
||||||
|
reflectionMergePartial<CacheEntry>(this, message, value);
|
||||||
|
return message;
|
||||||
|
}
|
||||||
|
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: CacheEntry): CacheEntry {
|
||||||
|
let message = target ?? this.create(), end = reader.pos + length;
|
||||||
|
while (reader.pos < end) {
|
||||||
|
let [fieldNo, wireType] = reader.tag();
|
||||||
|
switch (fieldNo) {
|
||||||
|
case /* string key */ 1:
|
||||||
|
message.key = reader.string();
|
||||||
|
break;
|
||||||
|
case /* string hash */ 2:
|
||||||
|
message.hash = reader.string();
|
||||||
|
break;
|
||||||
|
case /* int64 size_bytes */ 3:
|
||||||
|
message.sizeBytes = reader.int64().toString();
|
||||||
|
break;
|
||||||
|
case /* string scope */ 4:
|
||||||
|
message.scope = reader.string();
|
||||||
|
break;
|
||||||
|
case /* string version */ 5:
|
||||||
|
message.version = reader.string();
|
||||||
|
break;
|
||||||
|
case /* google.protobuf.Timestamp created_at */ 6:
|
||||||
|
message.createdAt = Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.createdAt);
|
||||||
|
break;
|
||||||
|
case /* google.protobuf.Timestamp last_accessed_at */ 7:
|
||||||
|
message.lastAccessedAt = Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.lastAccessedAt);
|
||||||
|
break;
|
||||||
|
case /* google.protobuf.Timestamp expires_at */ 8:
|
||||||
|
message.expiresAt = Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.expiresAt);
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
let u = options.readUnknownField;
|
||||||
|
if (u === "throw")
|
||||||
|
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||||
|
let d = reader.skip(wireType);
|
||||||
|
if (u !== false)
|
||||||
|
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return message;
|
||||||
|
}
|
||||||
|
internalBinaryWrite(message: CacheEntry, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||||
|
/* string key = 1; */
|
||||||
|
if (message.key !== "")
|
||||||
|
writer.tag(1, WireType.LengthDelimited).string(message.key);
|
||||||
|
/* string hash = 2; */
|
||||||
|
if (message.hash !== "")
|
||||||
|
writer.tag(2, WireType.LengthDelimited).string(message.hash);
|
||||||
|
/* int64 size_bytes = 3; */
|
||||||
|
if (message.sizeBytes !== "0")
|
||||||
|
writer.tag(3, WireType.Varint).int64(message.sizeBytes);
|
||||||
|
/* string scope = 4; */
|
||||||
|
if (message.scope !== "")
|
||||||
|
writer.tag(4, WireType.LengthDelimited).string(message.scope);
|
||||||
|
/* string version = 5; */
|
||||||
|
if (message.version !== "")
|
||||||
|
writer.tag(5, WireType.LengthDelimited).string(message.version);
|
||||||
|
/* google.protobuf.Timestamp created_at = 6; */
|
||||||
|
if (message.createdAt)
|
||||||
|
Timestamp.internalBinaryWrite(message.createdAt, writer.tag(6, WireType.LengthDelimited).fork(), options).join();
|
||||||
|
/* google.protobuf.Timestamp last_accessed_at = 7; */
|
||||||
|
if (message.lastAccessedAt)
|
||||||
|
Timestamp.internalBinaryWrite(message.lastAccessedAt, writer.tag(7, WireType.LengthDelimited).fork(), options).join();
|
||||||
|
/* google.protobuf.Timestamp expires_at = 8; */
|
||||||
|
if (message.expiresAt)
|
||||||
|
Timestamp.internalBinaryWrite(message.expiresAt, writer.tag(8, WireType.LengthDelimited).fork(), options).join();
|
||||||
|
let u = options.writeUnknownFields;
|
||||||
|
if (u !== false)
|
||||||
|
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||||
|
return writer;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* @generated MessageType for protobuf message github.actions.results.entities.v1.CacheEntry
|
||||||
|
*/
|
||||||
|
export const CacheEntry = new CacheEntry$Type();
|
|
@ -3,13 +3,14 @@ import * as core from '@actions/core'
|
||||||
import {
|
import {
|
||||||
BlobClient,
|
BlobClient,
|
||||||
BlockBlobClient,
|
BlockBlobClient,
|
||||||
BlobDownloadOptions
|
BlobDownloadOptions,
|
||||||
|
BlobDownloadResponseParsed
|
||||||
} from '@azure/storage-blob'
|
} from '@azure/storage-blob'
|
||||||
|
|
||||||
export async function downloadCacheFile(
|
export async function downloadCacheFile(
|
||||||
signedUploadURL: string,
|
signedUploadURL: string,
|
||||||
archivePath: string
|
archivePath: string
|
||||||
): Promise<{}> {
|
): Promise<BlobDownloadResponseParsed> {
|
||||||
const downloadOptions: BlobDownloadOptions = {
|
const downloadOptions: BlobDownloadOptions = {
|
||||||
maxRetryRequests: 5
|
maxRetryRequests: 5
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue