1
0
Fork 0

Cleanups and package refactoring

pull/1857/head
Bassem Dghaidi 2024-10-24 05:19:48 -07:00 committed by GitHub
parent 89354f6540
commit 28dbd8ff93
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
7 changed files with 447 additions and 20 deletions

View File

@ -0,0 +1,346 @@
import * as core from '@actions/core'
import * as path from 'path'
import { restoreCache } from '../src/cache'
import { CacheServiceClientJSON } from '../src/generated/results/api/v1/cache.twirp'
import * as cacheUtils from '../src/internal/cacheUtils'
import * as config from '../src/internal/config'
import { CacheFilename, CompressionMethod } from '../src/internal/constants'
import * as util from '@actions/artifact/lib/internal/shared/util'
import { ArtifactCacheEntry } from '../src/internal/contracts'
import * as tar from '../src/internal/tar'
jest.mock('../src/internal/cacheTwirpClient')
jest.mock('../src/internal/cacheUtils')
jest.mock('../src/internal/tar')
const fixtures = {
testRuntimeToken: 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwic2NwIjoiQWN0aW9ucy5FeGFtcGxlIEFjdGlvbnMuQW5vdGhlckV4YW1wbGU6dGVzdCBBY3Rpb25zLlJlc3VsdHM6Y2U3ZjU0YzctNjFjNy00YWFlLTg4N2YtMzBkYTQ3NWY1ZjFhOmNhMzk1MDg1LTA0MGEtNTI2Yi0yY2U4LWJkYzg1ZjY5Mjc3NCIsImlhdCI6MTUxNjIzOTAyMn0.XYnI_wHPBlUi1mqYveJnnkJhp4dlFjqxzRmISPsqfw8',
backendIds: {
workflowRunBackendId: 'c4d7c21f-ba3f-4ddc-a8c8-6f2f626f8422',
workflowJobRunBackendId: '760803a1-f890-4d25-9a6e-a3fc01a0c7cf'
},
cacheServiceURL: 'http://results.local',
}
beforeAll(() => {
jest.spyOn(console, 'log').mockImplementation(() => { })
jest.spyOn(core, 'debug').mockImplementation(() => { })
jest.spyOn(core, 'info').mockImplementation(() => { })
jest.spyOn(core, 'warning').mockImplementation(() => { })
jest.spyOn(core, 'error').mockImplementation(() => { })
jest.spyOn(cacheUtils, 'getCacheFileName').mockImplementation(cm => {
const actualUtils = jest.requireActual('../src/internal/cacheUtils')
return actualUtils.getCacheFileName(cm)
})
jest.spyOn(config, 'getCacheServiceVersion').mockImplementation(() => {
return "v2"
})
jest.spyOn(config, 'getRuntimeToken').mockImplementation(() => {
return fixtures.testRuntimeToken
})
jest.spyOn(util, 'getBackendIdsFromToken').mockImplementation(() => {
return fixtures.backendIds
})
jest.spyOn(config, 'getCacheServiceURL').mockReturnValue(
fixtures.cacheServiceURL
)
})
test('restore with no path should fail', async () => {
const paths: string[] = []
const key = 'node-test'
await expect(restoreCache(paths, key)).rejects.toThrowError(
`Path Validation Error: At least one directory or file path is required`
)
})
test('restore with too many keys should fail', async () => {
const paths = ['node_modules']
const key = 'node-test'
const restoreKeys = [...Array(20).keys()].map(x => x.toString())
await expect(restoreCache(paths, key, restoreKeys)).rejects.toThrowError(
`Key Validation Error: Keys are limited to a maximum of 10.`
)
})
test('restore with large key should fail', async () => {
const paths = ['node_modules']
const key = 'foo'.repeat(512) // Over the 512 character limit
await expect(restoreCache(paths, key)).rejects.toThrowError(
`Key Validation Error: ${key} cannot be larger than 512 characters.`
)
})
test('restore with invalid key should fail', async () => {
const paths = ['node_modules']
const key = 'comma,comma'
await expect(restoreCache(paths, key)).rejects.toThrowError(
`Key Validation Error: ${key} cannot contain commas.`
)
})
test('restore with no cache found', async () => {
const paths = ['node_modules']
const key = 'node-test'
jest.spyOn(CacheServiceClientJSON.prototype, 'GetCacheEntryDownloadURL')
.mockReturnValue(
Promise.resolve({
ok: false,
signedDownloadUrl: ''
})
)
const cacheKey = await restoreCache(paths, key)
expect(cacheKey).toBe(undefined)
})
/**
test('restore with server error should fail', async () => {
const paths = ['node_modules']
const key = 'node-test'
const logWarningMock = jest.spyOn(core, 'warning')
jest.spyOn(cacheHttpClient, 'getCacheEntry').mockImplementation(() => {
throw new Error('HTTP Error Occurred')
})
const cacheKey = await restoreCache(paths, key)
expect(cacheKey).toBe(undefined)
expect(logWarningMock).toHaveBeenCalledTimes(1)
expect(logWarningMock).toHaveBeenCalledWith(
'Failed to restore: HTTP Error Occurred'
)
})
test('restore with restore keys and no cache found', async () => {
const paths = ['node_modules']
const key = 'node-test'
const restoreKey = 'node-'
jest.spyOn(cacheHttpClient, 'getCacheEntry').mockImplementation(async () => {
return Promise.resolve(null)
})
const cacheKey = await restoreCache(paths, key, [restoreKey])
expect(cacheKey).toBe(undefined)
})
test('restore with gzip compressed cache found', async () => {
const paths = ['node_modules']
const key = 'node-test'
const cacheEntry: ArtifactCacheEntry = {
cacheKey: key,
scope: 'refs/heads/main',
archiveLocation: 'www.actionscache.test/download'
}
const getCacheMock = jest.spyOn(cacheHttpClient, 'getCacheEntry')
getCacheMock.mockImplementation(async () => {
return Promise.resolve(cacheEntry)
})
const tempPath = '/foo/bar'
const createTempDirectoryMock = jest.spyOn(cacheUtils, 'createTempDirectory')
createTempDirectoryMock.mockImplementation(async () => {
return Promise.resolve(tempPath)
})
const archivePath = path.join(tempPath, CacheFilename.Gzip)
const downloadCacheMock = jest.spyOn(cacheHttpClient, 'downloadCache')
const fileSize = 142
const getArchiveFileSizeInBytesMock = jest
.spyOn(cacheUtils, 'getArchiveFileSizeInBytes')
.mockReturnValue(fileSize)
const extractTarMock = jest.spyOn(tar, 'extractTar')
const unlinkFileMock = jest.spyOn(cacheUtils, 'unlinkFile')
const compression = CompressionMethod.Gzip
const getCompressionMock = jest
.spyOn(cacheUtils, 'getCompressionMethod')
.mockReturnValue(Promise.resolve(compression))
const cacheKey = await restoreCache(paths, key)
expect(cacheKey).toBe(key)
expect(getCacheMock).toHaveBeenCalledWith([key], paths, {
compressionMethod: compression,
enableCrossOsArchive: false
})
expect(createTempDirectoryMock).toHaveBeenCalledTimes(1)
expect(downloadCacheMock).toHaveBeenCalledWith(
cacheEntry.archiveLocation,
archivePath,
undefined
)
expect(getArchiveFileSizeInBytesMock).toHaveBeenCalledWith(archivePath)
expect(extractTarMock).toHaveBeenCalledTimes(1)
expect(extractTarMock).toHaveBeenCalledWith(archivePath, compression)
expect(unlinkFileMock).toHaveBeenCalledTimes(1)
expect(unlinkFileMock).toHaveBeenCalledWith(archivePath)
expect(getCompressionMock).toHaveBeenCalledTimes(1)
})
test('restore with zstd compressed cache found', async () => {
const paths = ['node_modules']
const key = 'node-test'
const infoMock = jest.spyOn(core, 'info')
const cacheEntry: ArtifactCacheEntry = {
cacheKey: key,
scope: 'refs/heads/main',
archiveLocation: 'www.actionscache.test/download'
}
const getCacheMock = jest.spyOn(cacheHttpClient, 'getCacheEntry')
getCacheMock.mockImplementation(async () => {
return Promise.resolve(cacheEntry)
})
const tempPath = '/foo/bar'
const createTempDirectoryMock = jest.spyOn(cacheUtils, 'createTempDirectory')
createTempDirectoryMock.mockImplementation(async () => {
return Promise.resolve(tempPath)
})
const archivePath = path.join(tempPath, CacheFilename.Zstd)
const downloadCacheMock = jest.spyOn(cacheHttpClient, 'downloadCache')
const fileSize = 62915000
const getArchiveFileSizeInBytesMock = jest
.spyOn(cacheUtils, 'getArchiveFileSizeInBytes')
.mockReturnValue(fileSize)
const extractTarMock = jest.spyOn(tar, 'extractTar')
const compression = CompressionMethod.Zstd
const getCompressionMock = jest
.spyOn(cacheUtils, 'getCompressionMethod')
.mockReturnValue(Promise.resolve(compression))
const cacheKey = await restoreCache(paths, key)
expect(cacheKey).toBe(key)
expect(getCacheMock).toHaveBeenCalledWith([key], paths, {
compressionMethod: compression,
enableCrossOsArchive: false
})
expect(createTempDirectoryMock).toHaveBeenCalledTimes(1)
expect(downloadCacheMock).toHaveBeenCalledWith(
cacheEntry.archiveLocation,
archivePath,
undefined
)
expect(getArchiveFileSizeInBytesMock).toHaveBeenCalledWith(archivePath)
expect(infoMock).toHaveBeenCalledWith(`Cache Size: ~60 MB (62915000 B)`)
expect(extractTarMock).toHaveBeenCalledTimes(1)
expect(extractTarMock).toHaveBeenCalledWith(archivePath, compression)
expect(getCompressionMock).toHaveBeenCalledTimes(1)
})
test('restore with cache found for restore key', async () => {
const paths = ['node_modules']
const key = 'node-test'
const restoreKey = 'node-'
const infoMock = jest.spyOn(core, 'info')
const cacheEntry: ArtifactCacheEntry = {
cacheKey: restoreKey,
scope: 'refs/heads/main',
archiveLocation: 'www.actionscache.test/download'
}
const getCacheMock = jest.spyOn(cacheHttpClient, 'getCacheEntry')
getCacheMock.mockImplementation(async () => {
return Promise.resolve(cacheEntry)
})
const tempPath = '/foo/bar'
const createTempDirectoryMock = jest.spyOn(cacheUtils, 'createTempDirectory')
createTempDirectoryMock.mockImplementation(async () => {
return Promise.resolve(tempPath)
})
const archivePath = path.join(tempPath, CacheFilename.Zstd)
const downloadCacheMock = jest.spyOn(cacheHttpClient, 'downloadCache')
const fileSize = 142
const getArchiveFileSizeInBytesMock = jest
.spyOn(cacheUtils, 'getArchiveFileSizeInBytes')
.mockReturnValue(fileSize)
const extractTarMock = jest.spyOn(tar, 'extractTar')
const compression = CompressionMethod.Zstd
const getCompressionMock = jest
.spyOn(cacheUtils, 'getCompressionMethod')
.mockReturnValue(Promise.resolve(compression))
const cacheKey = await restoreCache(paths, key, [restoreKey])
expect(cacheKey).toBe(restoreKey)
expect(getCacheMock).toHaveBeenCalledWith([key, restoreKey], paths, {
compressionMethod: compression,
enableCrossOsArchive: false
})
expect(createTempDirectoryMock).toHaveBeenCalledTimes(1)
expect(downloadCacheMock).toHaveBeenCalledWith(
cacheEntry.archiveLocation,
archivePath,
undefined
)
expect(getArchiveFileSizeInBytesMock).toHaveBeenCalledWith(archivePath)
expect(infoMock).toHaveBeenCalledWith(`Cache Size: ~0 MB (142 B)`)
expect(extractTarMock).toHaveBeenCalledTimes(1)
expect(extractTarMock).toHaveBeenCalledWith(archivePath, compression)
expect(getCompressionMock).toHaveBeenCalledTimes(1)
})
test('restore with dry run', async () => {
const paths = ['node_modules']
const key = 'node-test'
const options = { lookupOnly: true }
const cacheEntry: ArtifactCacheEntry = {
cacheKey: key,
scope: 'refs/heads/main',
archiveLocation: 'www.actionscache.test/download'
}
const getCacheMock = jest.spyOn(cacheHttpClient, 'getCacheEntry')
getCacheMock.mockImplementation(async () => {
return Promise.resolve(cacheEntry)
})
const createTempDirectoryMock = jest.spyOn(cacheUtils, 'createTempDirectory')
const downloadCacheMock = jest.spyOn(cacheHttpClient, 'downloadCache')
const compression = CompressionMethod.Gzip
const getCompressionMock = jest
.spyOn(cacheUtils, 'getCompressionMethod')
.mockReturnValue(Promise.resolve(compression))
const cacheKey = await restoreCache(paths, key, undefined, options)
expect(cacheKey).toBe(key)
expect(getCompressionMock).toHaveBeenCalledTimes(1)
expect(getCacheMock).toHaveBeenCalledWith([key], paths, {
compressionMethod: compression,
enableCrossOsArchive: false
})
// creating a tempDir and downloading the cache are skipped
expect(createTempDirectoryMock).toHaveBeenCalledTimes(0)
expect(downloadCacheMock).toHaveBeenCalledTimes(0)
})
**/

View File

@ -1,11 +1,11 @@
import * as core from '@actions/core'
import * as path from 'path'
import * as utils from './internal/cacheUtils'
import * as config from './internal/config'
import * as utils from './internal/cacheUtils'
import * as cacheHttpClient from './internal/cacheHttpClient'
import * as cacheTwirpClient from './internal/cacheTwirpClient'
import { createTar, extractTar, listTar } from './internal/tar'
import * as cacheTwirpClient from './internal/shared/cacheTwirpClient'
import { DownloadOptions, UploadOptions } from './options'
import { createTar, extractTar, listTar } from './internal/tar'
import {
CreateCacheEntryRequest,
CreateCacheEntryResponse,
@ -14,10 +14,10 @@ import {
GetCacheEntryDownloadURLRequest,
GetCacheEntryDownloadURLResponse
} from './generated/results/api/v1/cache'
import { UploadCacheFile } from './internal/v2/upload-cache'
import { DownloadCacheFile } from './internal/v2/download-cache'
import { getBackendIdsFromToken, BackendIds } from '@actions/artifact/lib/internal/shared/util'
import { CacheFileSizeLimit } from './internal/constants'
import { UploadCacheFile } from './internal/blob/upload-cache'
import { DownloadCacheFile } from './internal/blob/download-cache'
import { getBackendIdsFromToken, BackendIds } from '@actions/artifact/lib/internal/shared/util'
export class ValidationError extends Error {
constructor(message: string) {

View File

@ -1,10 +1,10 @@
import { info, debug } from '@actions/core'
import { getRuntimeToken, getCacheServiceURL } from './config'
import { getUserAgentString } from './user-agent'
import { NetworkError, UsageError } from './errors'
import { getRuntimeToken, getCacheServiceURL } from '../config'
import { BearerCredentialHandler } from '@actions/http-client/lib/auth'
import { HttpClient, HttpClientResponse, HttpCodes } from '@actions/http-client'
import { CacheServiceClientJSON } from '../generated/results/api/v1/cache.twirp'
// import {getUserAgentString} from './user-agent'
// import {NetworkError, UsageError} from './errors'
import { CacheServiceClientJSON } from '../../generated/results/api/v1/cache.twirp'
// The twirp http client must implement this interface
interface Rpc {
@ -100,9 +100,9 @@ class CacheServiceClient implements Rpc {
isRetryable = this.isRetryableHttpStatusCode(statusCode)
errorMessage = `Failed request: (${statusCode}) ${response.message.statusMessage}`
if (body.msg) {
// if (UsageError.isUsageErrorMessage(body.msg)) {
// throw new UsageError()
// }
if (UsageError.isUsageErrorMessage(body.msg)) {
throw new UsageError()
}
errorMessage = `${errorMessage}: ${body.msg}`
}
@ -111,13 +111,13 @@ class CacheServiceClient implements Rpc {
debug(`Raw Body: ${rawBody}`)
}
// if (error instanceof UsageError) {
// throw error
// }
if (error instanceof UsageError) {
throw error
}
// if (NetworkError.isNetworkErrorCode(error?.code)) {
// throw new NetworkError(error?.code)
// }
if (NetworkError.isNetworkErrorCode(error?.code)) {
throw new NetworkError(error?.code)
}
isRetryable = true
errorMessage = error.message
@ -193,7 +193,7 @@ export function internalCacheTwirpClient(options?: {
retryMultiplier?: number
}): CacheServiceClientJSON {
const client = new CacheServiceClient(
'actions/cache',
getUserAgentString(),
options?.maxAttempts,
options?.retryIntervalMs,
options?.retryMultiplier

View File

@ -0,0 +1,72 @@
export class FilesNotFoundError extends Error {
files: string[]
constructor(files: string[] = []) {
let message = 'No files were found to upload'
if (files.length > 0) {
message += `: ${files.join(', ')}`
}
super(message)
this.files = files
this.name = 'FilesNotFoundError'
}
}
export class InvalidResponseError extends Error {
constructor(message: string) {
super(message)
this.name = 'InvalidResponseError'
}
}
export class CacheNotFoundError extends Error {
constructor(message = 'Cache not found') {
super(message)
this.name = 'CacheNotFoundError'
}
}
export class GHESNotSupportedError extends Error {
constructor(
message = '@actions/cache v4.1.4+, actions/cache/save@v4+ and actions/cache/restore@v4+ are not currently supported on GHES.'
) {
super(message)
this.name = 'GHESNotSupportedError'
}
}
export class NetworkError extends Error {
code: string
constructor(code: string) {
const message = `Unable to make request: ${code}\nIf you are using self-hosted runners, please make sure your runner has access to all GitHub endpoints: https://docs.github.com/en/actions/hosting-your-own-runners/managing-self-hosted-runners/about-self-hosted-runners#communication-between-self-hosted-runners-and-github`
super(message)
this.code = code
this.name = 'NetworkError'
}
static isNetworkErrorCode = (code?: string): boolean => {
if (!code) return false
return [
'ECONNRESET',
'ENOTFOUND',
'ETIMEDOUT',
'ECONNREFUSED',
'EHOSTUNREACH'
].includes(code)
}
}
export class UsageError extends Error {
constructor() {
const message = `Cache storage quota has been hit. Unable to upload any new cache entries. Usage is recalculated every 6-12 hours.\nMore info on storage limits: https://docs.github.com/en/billing/managing-billing-for-github-actions/about-billing-for-github-actions#calculating-minute-and-storage-spending`
super(message)
this.name = 'UsageError'
}
static isUsageErrorMessage = (msg?: string): boolean => {
if (!msg) return false
return msg.includes('insufficient usage')
}
}

View File

@ -0,0 +1,9 @@
// eslint-disable-next-line @typescript-eslint/no-var-requires, @typescript-eslint/no-require-imports
const packageJson = require('../../../package.json')
/**
* Ensure that this User Agent String is used in all HTTP calls so that we can monitor telemetry between different versions of this package
*/
export function getUserAgentString(): string {
return `@actions/cache-${packageJson.version}`
}