diff --git a/.github/workflows/cache-tests.yml b/.github/workflows/cache-tests.yml
index aa382ffc..e63ac90c 100644
--- a/.github/workflows/cache-tests.yml
+++ b/.github/workflows/cache-tests.yml
@@ -1,5 +1,13 @@
name: cache-unit-tests
-on: push
+on:
+ push:
+ branches:
+ - master
+ paths-ignore:
+ - '**.md'
+ pull_request:
+ paths-ignore:
+ - '**.md'
jobs:
build:
@@ -21,7 +29,7 @@ jobs:
with:
node-version: 12.x
- # In order to save & restore cache artifacts from a shell script, certain env variables need to be set that are only available in the
+ # In order to save & restore cache from a shell script, certain env variables need to be set that are only available in the
# node context. This runs a local action that gets and sets the necessary env variables that are needed
- name: Set env variables
uses: ./packages/cache/__tests__/__fixtures__/
diff --git a/README.md b/README.md
index 4ffb0d44..6fd01f8c 100644
--- a/README.md
+++ b/README.md
@@ -89,7 +89,7 @@ $ npm install @actions/artifact --save
Provides functions to cache dependencies and build outputs to improve workflow execution time. Read more [here](packages/cache)
```bash
-$ npm install @actions/cache --save
+$ npm install @actions/cache
```
diff --git a/packages/cache/__tests__/cacheHttpClient.test.ts b/packages/cache/__tests__/cacheHttpClient.test.ts
index e5d7eacf..a7f3fec1 100644
--- a/packages/cache/__tests__/cacheHttpClient.test.ts
+++ b/packages/cache/__tests__/cacheHttpClient.test.ts
@@ -1,4 +1,4 @@
-import {getCacheVersion} from '../src/internal/cacheHttpClient'
+import {getCacheVersion, retry} from '../src/internal/cacheHttpClient'
import {CompressionMethod} from '../src/internal/constants'
test('getCacheVersion with one path returns version', async () => {
@@ -34,3 +34,142 @@ test('getCacheVersion with gzip compression does not change vesion', async () =>
'b3e0c6cb5ecf32614eeb2997d905b9c297046d7cbf69062698f25b14b4cb0985'
)
})
+
+interface TestResponse {
+ statusCode: number
+ result: string | null
+}
+
+async function handleResponse(
+ response: TestResponse | undefined
+): Promise {
+ if (!response) {
+ // eslint-disable-next-line no-undef
+ fail('Retry method called too many times')
+ }
+
+ if (response.statusCode === 999) {
+ throw Error('Test Error')
+ } else {
+ return Promise.resolve(response)
+ }
+}
+
+async function testRetryExpectingResult(
+ responses: TestResponse[],
+ expectedResult: string | null
+): Promise {
+ responses = responses.reverse() // Reverse responses since we pop from end
+
+ const actualResult = await retry(
+ 'test',
+ async () => handleResponse(responses.pop()),
+ (response: TestResponse) => response.statusCode
+ )
+
+ expect(actualResult.result).toEqual(expectedResult)
+}
+
+async function testRetryExpectingError(
+ responses: TestResponse[]
+): Promise {
+ responses = responses.reverse() // Reverse responses since we pop from end
+
+ expect(
+ retry(
+ 'test',
+ async () => handleResponse(responses.pop()),
+ (response: TestResponse) => response.statusCode
+ )
+ ).rejects.toBeInstanceOf(Error)
+}
+
+test('retry works on successful response', async () => {
+ await testRetryExpectingResult(
+ [
+ {
+ statusCode: 200,
+ result: 'Ok'
+ }
+ ],
+ 'Ok'
+ )
+})
+
+test('retry works after retryable status code', async () => {
+ await testRetryExpectingResult(
+ [
+ {
+ statusCode: 503,
+ result: null
+ },
+ {
+ statusCode: 200,
+ result: 'Ok'
+ }
+ ],
+ 'Ok'
+ )
+})
+
+test('retry fails after exhausting retries', async () => {
+ await testRetryExpectingError([
+ {
+ statusCode: 503,
+ result: null
+ },
+ {
+ statusCode: 503,
+ result: null
+ },
+ {
+ statusCode: 200,
+ result: 'Ok'
+ }
+ ])
+})
+
+test('retry fails after non-retryable status code', async () => {
+ await testRetryExpectingError([
+ {
+ statusCode: 500,
+ result: null
+ },
+ {
+ statusCode: 200,
+ result: 'Ok'
+ }
+ ])
+})
+
+test('retry works after error', async () => {
+ await testRetryExpectingResult(
+ [
+ {
+ statusCode: 999,
+ result: null
+ },
+ {
+ statusCode: 200,
+ result: 'Ok'
+ }
+ ],
+ 'Ok'
+ )
+})
+
+test('retry returns after client error', async () => {
+ await testRetryExpectingResult(
+ [
+ {
+ statusCode: 400,
+ result: null
+ },
+ {
+ statusCode: 200,
+ result: 'Ok'
+ }
+ ],
+ null
+ )
+})
diff --git a/packages/cache/__tests__/saveCache.test.ts b/packages/cache/__tests__/saveCache.test.ts
index 2fc379b3..720d2ad6 100644
--- a/packages/cache/__tests__/saveCache.test.ts
+++ b/packages/cache/__tests__/saveCache.test.ts
@@ -135,7 +135,7 @@ test('save with server error should fail', async () => {
)
expect(saveCacheMock).toHaveBeenCalledTimes(1)
- expect(saveCacheMock).toHaveBeenCalledWith(cacheId, archiveFile)
+ expect(saveCacheMock).toHaveBeenCalledWith(cacheId, archiveFile, undefined)
expect(getCompressionMock).toHaveBeenCalledTimes(1)
})
@@ -176,6 +176,6 @@ test('save with valid inputs uploads a cache', async () => {
)
expect(saveCacheMock).toHaveBeenCalledTimes(1)
- expect(saveCacheMock).toHaveBeenCalledWith(cacheId, archiveFile)
+ expect(saveCacheMock).toHaveBeenCalledWith(cacheId, archiveFile, undefined)
expect(getCompressionMock).toHaveBeenCalledTimes(1)
})
diff --git a/packages/cache/src/cache.ts b/packages/cache/src/cache.ts
index cf9be5eb..39eaf6de 100644
--- a/packages/cache/src/cache.ts
+++ b/packages/cache/src/cache.ts
@@ -3,6 +3,7 @@ import * as path from 'path'
import * as utils from './internal/cacheUtils'
import * as cacheHttpClient from './internal/cacheHttpClient'
import {createTar, extractTar} from './internal/tar'
+import {UploadOptions} from './options'
function checkPaths(paths: string[]): void {
if (!paths || paths.length === 0) {
@@ -102,9 +103,14 @@ export async function restoreCache(
*
* @param paths a list of file paths to be cached
* @param key an explicit key for restoring the cache
+ * @param options cache upload options
* @returns number returns cacheId if the cache was saved successfully
*/
-export async function saveCache(paths: string[], key: string): Promise {
+export async function saveCache(
+ paths: string[],
+ key: string,
+ options?: UploadOptions
+): Promise {
checkPaths(paths)
checkKey(key)
@@ -147,7 +153,7 @@ export async function saveCache(paths: string[], key: string): Promise {
}
core.debug(`Saving Cache (ID: ${cacheId})`)
- await cacheHttpClient.saveCache(cacheId, archivePath)
+ await cacheHttpClient.saveCache(cacheId, archivePath, options)
return cacheId
}
diff --git a/packages/cache/src/internal/cacheHttpClient.ts b/packages/cache/src/internal/cacheHttpClient.ts
index f4f8c4d1..a3b9633a 100644
--- a/packages/cache/src/internal/cacheHttpClient.ts
+++ b/packages/cache/src/internal/cacheHttpClient.ts
@@ -20,6 +20,7 @@ import {
ReserveCacheRequest,
ReserveCacheResponse
} from './contracts'
+import {UploadOptions} from '../options'
const versionSalt = '1.0'
@@ -30,6 +31,13 @@ function isSuccessStatusCode(statusCode?: number): boolean {
return statusCode >= 200 && statusCode < 300
}
+function isServerErrorStatusCode(statusCode?: number): boolean {
+ if (!statusCode) {
+ return true
+ }
+ return statusCode >= 500
+}
+
function isRetryableStatusCode(statusCode?: number): boolean {
if (!statusCode) {
return false
@@ -100,6 +108,75 @@ export function getCacheVersion(
.digest('hex')
}
+export async function retry(
+ name: string,
+ method: () => Promise,
+ getStatusCode: (arg0: T) => number | undefined,
+ maxAttempts = 2
+): Promise {
+ let response: T | undefined = undefined
+ let statusCode: number | undefined = undefined
+ let isRetryable = false
+ let errorMessage = ''
+ let attempt = 1
+
+ while (attempt <= maxAttempts) {
+ try {
+ response = await method()
+ statusCode = getStatusCode(response)
+
+ if (!isServerErrorStatusCode(statusCode)) {
+ return response
+ }
+
+ isRetryable = isRetryableStatusCode(statusCode)
+ errorMessage = `Cache service responded with ${statusCode}`
+ } catch (error) {
+ isRetryable = true
+ errorMessage = error.message
+ }
+
+ core.debug(
+ `${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`
+ )
+
+ if (!isRetryable) {
+ core.debug(`${name} - Error is not retryable`)
+ break
+ }
+
+ attempt++
+ }
+
+ throw Error(`${name} failed: ${errorMessage}`)
+}
+
+export async function retryTypedResponse(
+ name: string,
+ method: () => Promise>,
+ maxAttempts = 2
+): Promise> {
+ return await retry(
+ name,
+ method,
+ (response: ITypedResponse) => response.statusCode,
+ maxAttempts
+ )
+}
+
+export async function retryHttpClientResponse(
+ name: string,
+ method: () => Promise,
+ maxAttempts = 2
+): Promise {
+ return await retry(
+ name,
+ method,
+ (response: IHttpClientResponse) => response.message.statusCode,
+ maxAttempts
+ )
+}
+
export async function getCacheEntry(
keys: string[],
paths: string[],
@@ -111,8 +188,8 @@ export async function getCacheEntry(
keys.join(',')
)}&version=${version}`
- const response = await httpClient.getJson(
- getCacheApiUrl(resource)
+ const response = await retryTypedResponse('getCacheEntry', async () =>
+ httpClient.getJson(getCacheApiUrl(resource))
)
if (response.statusCode === 204) {
return null
@@ -145,9 +222,12 @@ export async function downloadCache(
archiveLocation: string,
archivePath: string
): Promise {
- const writableStream = fs.createWriteStream(archivePath)
+ const writeStream = fs.createWriteStream(archivePath)
const httpClient = new HttpClient('actions/cache')
- const downloadResponse = await httpClient.get(archiveLocation)
+ const downloadResponse = await retryHttpClientResponse(
+ 'downloadCache',
+ async () => httpClient.get(archiveLocation)
+ )
// Abort download if no traffic received over the socket.
downloadResponse.message.socket.setTimeout(SocketTimeout, () => {
@@ -155,7 +235,7 @@ export async function downloadCache(
core.debug(`Aborting download, socket timed out after ${SocketTimeout} ms`)
})
- await pipeResponseToStream(downloadResponse, writableStream)
+ await pipeResponseToStream(downloadResponse, writeStream)
// Validate download size.
const contentLengthHeader = downloadResponse.message.headers['content-length']
@@ -187,9 +267,11 @@ export async function reserveCache(
key,
version
}
- const response = await httpClient.postJson(
- getCacheApiUrl('caches'),
- reserveCacheRequest
+ const response = await retryTypedResponse('reserveCache', async () =>
+ httpClient.postJson(
+ getCacheApiUrl('caches'),
+ reserveCacheRequest
+ )
)
return response?.result?.cacheId ?? -1
}
@@ -206,7 +288,7 @@ function getContentRange(start: number, end: number): string {
async function uploadChunk(
httpClient: HttpClient,
resourceUrl: string,
- data: NodeJS.ReadableStream,
+ openStream: () => NodeJS.ReadableStream,
start: number,
end: number
): Promise {
@@ -223,56 +305,31 @@ async function uploadChunk(
'Content-Range': getContentRange(start, end)
}
- const uploadChunkRequest = async (): Promise => {
- return await httpClient.sendStream(
- 'PATCH',
- resourceUrl,
- data,
- additionalHeaders
- )
- }
-
- const response = await uploadChunkRequest()
- if (isSuccessStatusCode(response.message.statusCode)) {
- return
- }
-
- if (isRetryableStatusCode(response.message.statusCode)) {
- core.debug(
- `Received ${response.message.statusCode}, retrying chunk at offset ${start}.`
- )
- const retryResponse = await uploadChunkRequest()
- if (isSuccessStatusCode(retryResponse.message.statusCode)) {
- return
- }
- }
-
- throw new Error(
- `Cache service responded with ${response.message.statusCode} during chunk upload.`
+ await retryHttpClientResponse(
+ `uploadChunk (start: ${start}, end: ${end})`,
+ async () =>
+ httpClient.sendStream(
+ 'PATCH',
+ resourceUrl,
+ openStream(),
+ additionalHeaders
+ )
)
}
-function parseEnvNumber(key: string): number | undefined {
- const value = Number(process.env[key])
- if (Number.isNaN(value) || value < 0) {
- return undefined
- }
- return value
-}
-
async function uploadFile(
httpClient: HttpClient,
cacheId: number,
- archivePath: string
+ archivePath: string,
+ options?: UploadOptions
): Promise {
// Upload Chunks
const fileSize = fs.statSync(archivePath).size
const resourceUrl = getCacheApiUrl(`caches/${cacheId.toString()}`)
const fd = fs.openSync(archivePath, 'r')
- const concurrency = parseEnvNumber('CACHE_UPLOAD_CONCURRENCY') ?? 4 // # of HTTP requests in parallel
- const MAX_CHUNK_SIZE =
- parseEnvNumber('CACHE_UPLOAD_CHUNK_SIZE') ?? 32 * 1024 * 1024 // 32 MB Chunks
+ const concurrency = options?.uploadConcurrency ?? 4 // # of HTTP requests in parallel
+ const MAX_CHUNK_SIZE = options?.uploadChunkSize ?? 32 * 1024 * 1024 // 32 MB Chunks
core.debug(`Concurrency: ${concurrency} and Chunk Size: ${MAX_CHUNK_SIZE}`)
const parallelUploads = [...new Array(concurrency).keys()]
@@ -287,14 +344,26 @@ async function uploadFile(
const start = offset
const end = offset + chunkSize - 1
offset += MAX_CHUNK_SIZE
- const chunk = fs.createReadStream(archivePath, {
- fd,
- start,
- end,
- autoClose: false
- })
- await uploadChunk(httpClient, resourceUrl, chunk, start, end)
+ await uploadChunk(
+ httpClient,
+ resourceUrl,
+ () =>
+ fs
+ .createReadStream(archivePath, {
+ fd,
+ start,
+ end,
+ autoClose: false
+ })
+ .on('error', error => {
+ throw new Error(
+ `Cache upload failed because file read failed with ${error.Message}`
+ )
+ }),
+ start,
+ end
+ )
}
})
)
@@ -310,20 +379,23 @@ async function commitCache(
filesize: number
): Promise> {
const commitCacheRequest: CommitCacheRequest = {size: filesize}
- return await httpClient.postJson(
- getCacheApiUrl(`caches/${cacheId.toString()}`),
- commitCacheRequest
+ return await retryTypedResponse('commitCache', async () =>
+ httpClient.postJson(
+ getCacheApiUrl(`caches/${cacheId.toString()}`),
+ commitCacheRequest
+ )
)
}
export async function saveCache(
cacheId: number,
- archivePath: string
+ archivePath: string,
+ options?: UploadOptions
): Promise {
const httpClient = createHttpClient()
core.debug('Upload cache')
- await uploadFile(httpClient, cacheId, archivePath)
+ await uploadFile(httpClient, cacheId, archivePath, options)
// Commit Cache
core.debug('Commiting cache')
diff --git a/packages/cache/src/options.ts b/packages/cache/src/options.ts
new file mode 100644
index 00000000..c5ecdad5
--- /dev/null
+++ b/packages/cache/src/options.ts
@@ -0,0 +1,17 @@
+/**
+ * Options to control cache upload
+ */
+export interface UploadOptions {
+ /**
+ * Number of parallel cache upload
+ *
+ * @default 4
+ */
+ uploadConcurrency?: number
+ /**
+ * Maximum chunk size for cache upload
+ *
+ * @default 32MB
+ */
+ uploadChunkSize?: number
+}