mirror of https://github.com/actions/toolkit
Add cache upload options and pull from latest actions/cache master
parent
c534ad2cbd
commit
1413cd0e32
|
@ -1,5 +1,13 @@
|
||||||
name: cache-unit-tests
|
name: cache-unit-tests
|
||||||
on: push
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- master
|
||||||
|
paths-ignore:
|
||||||
|
- '**.md'
|
||||||
|
pull_request:
|
||||||
|
paths-ignore:
|
||||||
|
- '**.md'
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
build:
|
build:
|
||||||
|
@ -21,7 +29,7 @@ jobs:
|
||||||
with:
|
with:
|
||||||
node-version: 12.x
|
node-version: 12.x
|
||||||
|
|
||||||
# In order to save & restore cache artifacts from a shell script, certain env variables need to be set that are only available in the
|
# In order to save & restore cache from a shell script, certain env variables need to be set that are only available in the
|
||||||
# node context. This runs a local action that gets and sets the necessary env variables that are needed
|
# node context. This runs a local action that gets and sets the necessary env variables that are needed
|
||||||
- name: Set env variables
|
- name: Set env variables
|
||||||
uses: ./packages/cache/__tests__/__fixtures__/
|
uses: ./packages/cache/__tests__/__fixtures__/
|
||||||
|
|
|
@ -89,7 +89,7 @@ $ npm install @actions/artifact --save
|
||||||
Provides functions to cache dependencies and build outputs to improve workflow execution time. Read more [here](packages/cache)
|
Provides functions to cache dependencies and build outputs to improve workflow execution time. Read more [here](packages/cache)
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
$ npm install @actions/cache --save
|
$ npm install @actions/cache
|
||||||
```
|
```
|
||||||
<br/>
|
<br/>
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
import {getCacheVersion} from '../src/internal/cacheHttpClient'
|
import {getCacheVersion, retry} from '../src/internal/cacheHttpClient'
|
||||||
import {CompressionMethod} from '../src/internal/constants'
|
import {CompressionMethod} from '../src/internal/constants'
|
||||||
|
|
||||||
test('getCacheVersion with one path returns version', async () => {
|
test('getCacheVersion with one path returns version', async () => {
|
||||||
|
@ -34,3 +34,142 @@ test('getCacheVersion with gzip compression does not change vesion', async () =>
|
||||||
'b3e0c6cb5ecf32614eeb2997d905b9c297046d7cbf69062698f25b14b4cb0985'
|
'b3e0c6cb5ecf32614eeb2997d905b9c297046d7cbf69062698f25b14b4cb0985'
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
interface TestResponse {
|
||||||
|
statusCode: number
|
||||||
|
result: string | null
|
||||||
|
}
|
||||||
|
|
||||||
|
async function handleResponse(
|
||||||
|
response: TestResponse | undefined
|
||||||
|
): Promise<TestResponse> {
|
||||||
|
if (!response) {
|
||||||
|
// eslint-disable-next-line no-undef
|
||||||
|
fail('Retry method called too many times')
|
||||||
|
}
|
||||||
|
|
||||||
|
if (response.statusCode === 999) {
|
||||||
|
throw Error('Test Error')
|
||||||
|
} else {
|
||||||
|
return Promise.resolve(response)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function testRetryExpectingResult(
|
||||||
|
responses: TestResponse[],
|
||||||
|
expectedResult: string | null
|
||||||
|
): Promise<void> {
|
||||||
|
responses = responses.reverse() // Reverse responses since we pop from end
|
||||||
|
|
||||||
|
const actualResult = await retry(
|
||||||
|
'test',
|
||||||
|
async () => handleResponse(responses.pop()),
|
||||||
|
(response: TestResponse) => response.statusCode
|
||||||
|
)
|
||||||
|
|
||||||
|
expect(actualResult.result).toEqual(expectedResult)
|
||||||
|
}
|
||||||
|
|
||||||
|
async function testRetryExpectingError(
|
||||||
|
responses: TestResponse[]
|
||||||
|
): Promise<void> {
|
||||||
|
responses = responses.reverse() // Reverse responses since we pop from end
|
||||||
|
|
||||||
|
expect(
|
||||||
|
retry(
|
||||||
|
'test',
|
||||||
|
async () => handleResponse(responses.pop()),
|
||||||
|
(response: TestResponse) => response.statusCode
|
||||||
|
)
|
||||||
|
).rejects.toBeInstanceOf(Error)
|
||||||
|
}
|
||||||
|
|
||||||
|
test('retry works on successful response', async () => {
|
||||||
|
await testRetryExpectingResult(
|
||||||
|
[
|
||||||
|
{
|
||||||
|
statusCode: 200,
|
||||||
|
result: 'Ok'
|
||||||
|
}
|
||||||
|
],
|
||||||
|
'Ok'
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('retry works after retryable status code', async () => {
|
||||||
|
await testRetryExpectingResult(
|
||||||
|
[
|
||||||
|
{
|
||||||
|
statusCode: 503,
|
||||||
|
result: null
|
||||||
|
},
|
||||||
|
{
|
||||||
|
statusCode: 200,
|
||||||
|
result: 'Ok'
|
||||||
|
}
|
||||||
|
],
|
||||||
|
'Ok'
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('retry fails after exhausting retries', async () => {
|
||||||
|
await testRetryExpectingError([
|
||||||
|
{
|
||||||
|
statusCode: 503,
|
||||||
|
result: null
|
||||||
|
},
|
||||||
|
{
|
||||||
|
statusCode: 503,
|
||||||
|
result: null
|
||||||
|
},
|
||||||
|
{
|
||||||
|
statusCode: 200,
|
||||||
|
result: 'Ok'
|
||||||
|
}
|
||||||
|
])
|
||||||
|
})
|
||||||
|
|
||||||
|
test('retry fails after non-retryable status code', async () => {
|
||||||
|
await testRetryExpectingError([
|
||||||
|
{
|
||||||
|
statusCode: 500,
|
||||||
|
result: null
|
||||||
|
},
|
||||||
|
{
|
||||||
|
statusCode: 200,
|
||||||
|
result: 'Ok'
|
||||||
|
}
|
||||||
|
])
|
||||||
|
})
|
||||||
|
|
||||||
|
test('retry works after error', async () => {
|
||||||
|
await testRetryExpectingResult(
|
||||||
|
[
|
||||||
|
{
|
||||||
|
statusCode: 999,
|
||||||
|
result: null
|
||||||
|
},
|
||||||
|
{
|
||||||
|
statusCode: 200,
|
||||||
|
result: 'Ok'
|
||||||
|
}
|
||||||
|
],
|
||||||
|
'Ok'
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('retry returns after client error', async () => {
|
||||||
|
await testRetryExpectingResult(
|
||||||
|
[
|
||||||
|
{
|
||||||
|
statusCode: 400,
|
||||||
|
result: null
|
||||||
|
},
|
||||||
|
{
|
||||||
|
statusCode: 200,
|
||||||
|
result: 'Ok'
|
||||||
|
}
|
||||||
|
],
|
||||||
|
null
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
|
@ -135,7 +135,7 @@ test('save with server error should fail', async () => {
|
||||||
)
|
)
|
||||||
|
|
||||||
expect(saveCacheMock).toHaveBeenCalledTimes(1)
|
expect(saveCacheMock).toHaveBeenCalledTimes(1)
|
||||||
expect(saveCacheMock).toHaveBeenCalledWith(cacheId, archiveFile)
|
expect(saveCacheMock).toHaveBeenCalledWith(cacheId, archiveFile, undefined)
|
||||||
expect(getCompressionMock).toHaveBeenCalledTimes(1)
|
expect(getCompressionMock).toHaveBeenCalledTimes(1)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -176,6 +176,6 @@ test('save with valid inputs uploads a cache', async () => {
|
||||||
)
|
)
|
||||||
|
|
||||||
expect(saveCacheMock).toHaveBeenCalledTimes(1)
|
expect(saveCacheMock).toHaveBeenCalledTimes(1)
|
||||||
expect(saveCacheMock).toHaveBeenCalledWith(cacheId, archiveFile)
|
expect(saveCacheMock).toHaveBeenCalledWith(cacheId, archiveFile, undefined)
|
||||||
expect(getCompressionMock).toHaveBeenCalledTimes(1)
|
expect(getCompressionMock).toHaveBeenCalledTimes(1)
|
||||||
})
|
})
|
||||||
|
|
|
@ -3,6 +3,7 @@ import * as path from 'path'
|
||||||
import * as utils from './internal/cacheUtils'
|
import * as utils from './internal/cacheUtils'
|
||||||
import * as cacheHttpClient from './internal/cacheHttpClient'
|
import * as cacheHttpClient from './internal/cacheHttpClient'
|
||||||
import {createTar, extractTar} from './internal/tar'
|
import {createTar, extractTar} from './internal/tar'
|
||||||
|
import {UploadOptions} from './options'
|
||||||
|
|
||||||
function checkPaths(paths: string[]): void {
|
function checkPaths(paths: string[]): void {
|
||||||
if (!paths || paths.length === 0) {
|
if (!paths || paths.length === 0) {
|
||||||
|
@ -102,9 +103,14 @@ export async function restoreCache(
|
||||||
*
|
*
|
||||||
* @param paths a list of file paths to be cached
|
* @param paths a list of file paths to be cached
|
||||||
* @param key an explicit key for restoring the cache
|
* @param key an explicit key for restoring the cache
|
||||||
|
* @param options cache upload options
|
||||||
* @returns number returns cacheId if the cache was saved successfully
|
* @returns number returns cacheId if the cache was saved successfully
|
||||||
*/
|
*/
|
||||||
export async function saveCache(paths: string[], key: string): Promise<number> {
|
export async function saveCache(
|
||||||
|
paths: string[],
|
||||||
|
key: string,
|
||||||
|
options?: UploadOptions
|
||||||
|
): Promise<number> {
|
||||||
checkPaths(paths)
|
checkPaths(paths)
|
||||||
checkKey(key)
|
checkKey(key)
|
||||||
|
|
||||||
|
@ -147,7 +153,7 @@ export async function saveCache(paths: string[], key: string): Promise<number> {
|
||||||
}
|
}
|
||||||
|
|
||||||
core.debug(`Saving Cache (ID: ${cacheId})`)
|
core.debug(`Saving Cache (ID: ${cacheId})`)
|
||||||
await cacheHttpClient.saveCache(cacheId, archivePath)
|
await cacheHttpClient.saveCache(cacheId, archivePath, options)
|
||||||
|
|
||||||
return cacheId
|
return cacheId
|
||||||
}
|
}
|
||||||
|
|
|
@ -20,6 +20,7 @@ import {
|
||||||
ReserveCacheRequest,
|
ReserveCacheRequest,
|
||||||
ReserveCacheResponse
|
ReserveCacheResponse
|
||||||
} from './contracts'
|
} from './contracts'
|
||||||
|
import {UploadOptions} from '../options'
|
||||||
|
|
||||||
const versionSalt = '1.0'
|
const versionSalt = '1.0'
|
||||||
|
|
||||||
|
@ -30,6 +31,13 @@ function isSuccessStatusCode(statusCode?: number): boolean {
|
||||||
return statusCode >= 200 && statusCode < 300
|
return statusCode >= 200 && statusCode < 300
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function isServerErrorStatusCode(statusCode?: number): boolean {
|
||||||
|
if (!statusCode) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
return statusCode >= 500
|
||||||
|
}
|
||||||
|
|
||||||
function isRetryableStatusCode(statusCode?: number): boolean {
|
function isRetryableStatusCode(statusCode?: number): boolean {
|
||||||
if (!statusCode) {
|
if (!statusCode) {
|
||||||
return false
|
return false
|
||||||
|
@ -100,6 +108,75 @@ export function getCacheVersion(
|
||||||
.digest('hex')
|
.digest('hex')
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export async function retry<T>(
|
||||||
|
name: string,
|
||||||
|
method: () => Promise<T>,
|
||||||
|
getStatusCode: (arg0: T) => number | undefined,
|
||||||
|
maxAttempts = 2
|
||||||
|
): Promise<T> {
|
||||||
|
let response: T | undefined = undefined
|
||||||
|
let statusCode: number | undefined = undefined
|
||||||
|
let isRetryable = false
|
||||||
|
let errorMessage = ''
|
||||||
|
let attempt = 1
|
||||||
|
|
||||||
|
while (attempt <= maxAttempts) {
|
||||||
|
try {
|
||||||
|
response = await method()
|
||||||
|
statusCode = getStatusCode(response)
|
||||||
|
|
||||||
|
if (!isServerErrorStatusCode(statusCode)) {
|
||||||
|
return response
|
||||||
|
}
|
||||||
|
|
||||||
|
isRetryable = isRetryableStatusCode(statusCode)
|
||||||
|
errorMessage = `Cache service responded with ${statusCode}`
|
||||||
|
} catch (error) {
|
||||||
|
isRetryable = true
|
||||||
|
errorMessage = error.message
|
||||||
|
}
|
||||||
|
|
||||||
|
core.debug(
|
||||||
|
`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`
|
||||||
|
)
|
||||||
|
|
||||||
|
if (!isRetryable) {
|
||||||
|
core.debug(`${name} - Error is not retryable`)
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
attempt++
|
||||||
|
}
|
||||||
|
|
||||||
|
throw Error(`${name} failed: ${errorMessage}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function retryTypedResponse<T>(
|
||||||
|
name: string,
|
||||||
|
method: () => Promise<ITypedResponse<T>>,
|
||||||
|
maxAttempts = 2
|
||||||
|
): Promise<ITypedResponse<T>> {
|
||||||
|
return await retry(
|
||||||
|
name,
|
||||||
|
method,
|
||||||
|
(response: ITypedResponse<T>) => response.statusCode,
|
||||||
|
maxAttempts
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function retryHttpClientResponse<T>(
|
||||||
|
name: string,
|
||||||
|
method: () => Promise<IHttpClientResponse>,
|
||||||
|
maxAttempts = 2
|
||||||
|
): Promise<IHttpClientResponse> {
|
||||||
|
return await retry(
|
||||||
|
name,
|
||||||
|
method,
|
||||||
|
(response: IHttpClientResponse) => response.message.statusCode,
|
||||||
|
maxAttempts
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
export async function getCacheEntry(
|
export async function getCacheEntry(
|
||||||
keys: string[],
|
keys: string[],
|
||||||
paths: string[],
|
paths: string[],
|
||||||
|
@ -111,8 +188,8 @@ export async function getCacheEntry(
|
||||||
keys.join(',')
|
keys.join(',')
|
||||||
)}&version=${version}`
|
)}&version=${version}`
|
||||||
|
|
||||||
const response = await httpClient.getJson<ArtifactCacheEntry>(
|
const response = await retryTypedResponse('getCacheEntry', async () =>
|
||||||
getCacheApiUrl(resource)
|
httpClient.getJson<ArtifactCacheEntry>(getCacheApiUrl(resource))
|
||||||
)
|
)
|
||||||
if (response.statusCode === 204) {
|
if (response.statusCode === 204) {
|
||||||
return null
|
return null
|
||||||
|
@ -145,9 +222,12 @@ export async function downloadCache(
|
||||||
archiveLocation: string,
|
archiveLocation: string,
|
||||||
archivePath: string
|
archivePath: string
|
||||||
): Promise<void> {
|
): Promise<void> {
|
||||||
const writableStream = fs.createWriteStream(archivePath)
|
const writeStream = fs.createWriteStream(archivePath)
|
||||||
const httpClient = new HttpClient('actions/cache')
|
const httpClient = new HttpClient('actions/cache')
|
||||||
const downloadResponse = await httpClient.get(archiveLocation)
|
const downloadResponse = await retryHttpClientResponse(
|
||||||
|
'downloadCache',
|
||||||
|
async () => httpClient.get(archiveLocation)
|
||||||
|
)
|
||||||
|
|
||||||
// Abort download if no traffic received over the socket.
|
// Abort download if no traffic received over the socket.
|
||||||
downloadResponse.message.socket.setTimeout(SocketTimeout, () => {
|
downloadResponse.message.socket.setTimeout(SocketTimeout, () => {
|
||||||
|
@ -155,7 +235,7 @@ export async function downloadCache(
|
||||||
core.debug(`Aborting download, socket timed out after ${SocketTimeout} ms`)
|
core.debug(`Aborting download, socket timed out after ${SocketTimeout} ms`)
|
||||||
})
|
})
|
||||||
|
|
||||||
await pipeResponseToStream(downloadResponse, writableStream)
|
await pipeResponseToStream(downloadResponse, writeStream)
|
||||||
|
|
||||||
// Validate download size.
|
// Validate download size.
|
||||||
const contentLengthHeader = downloadResponse.message.headers['content-length']
|
const contentLengthHeader = downloadResponse.message.headers['content-length']
|
||||||
|
@ -187,9 +267,11 @@ export async function reserveCache(
|
||||||
key,
|
key,
|
||||||
version
|
version
|
||||||
}
|
}
|
||||||
const response = await httpClient.postJson<ReserveCacheResponse>(
|
const response = await retryTypedResponse('reserveCache', async () =>
|
||||||
getCacheApiUrl('caches'),
|
httpClient.postJson<ReserveCacheResponse>(
|
||||||
reserveCacheRequest
|
getCacheApiUrl('caches'),
|
||||||
|
reserveCacheRequest
|
||||||
|
)
|
||||||
)
|
)
|
||||||
return response?.result?.cacheId ?? -1
|
return response?.result?.cacheId ?? -1
|
||||||
}
|
}
|
||||||
|
@ -206,7 +288,7 @@ function getContentRange(start: number, end: number): string {
|
||||||
async function uploadChunk(
|
async function uploadChunk(
|
||||||
httpClient: HttpClient,
|
httpClient: HttpClient,
|
||||||
resourceUrl: string,
|
resourceUrl: string,
|
||||||
data: NodeJS.ReadableStream,
|
openStream: () => NodeJS.ReadableStream,
|
||||||
start: number,
|
start: number,
|
||||||
end: number
|
end: number
|
||||||
): Promise<void> {
|
): Promise<void> {
|
||||||
|
@ -223,56 +305,31 @@ async function uploadChunk(
|
||||||
'Content-Range': getContentRange(start, end)
|
'Content-Range': getContentRange(start, end)
|
||||||
}
|
}
|
||||||
|
|
||||||
const uploadChunkRequest = async (): Promise<IHttpClientResponse> => {
|
await retryHttpClientResponse(
|
||||||
return await httpClient.sendStream(
|
`uploadChunk (start: ${start}, end: ${end})`,
|
||||||
'PATCH',
|
async () =>
|
||||||
resourceUrl,
|
httpClient.sendStream(
|
||||||
data,
|
'PATCH',
|
||||||
additionalHeaders
|
resourceUrl,
|
||||||
)
|
openStream(),
|
||||||
}
|
additionalHeaders
|
||||||
|
)
|
||||||
const response = await uploadChunkRequest()
|
|
||||||
if (isSuccessStatusCode(response.message.statusCode)) {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
if (isRetryableStatusCode(response.message.statusCode)) {
|
|
||||||
core.debug(
|
|
||||||
`Received ${response.message.statusCode}, retrying chunk at offset ${start}.`
|
|
||||||
)
|
|
||||||
const retryResponse = await uploadChunkRequest()
|
|
||||||
if (isSuccessStatusCode(retryResponse.message.statusCode)) {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
throw new Error(
|
|
||||||
`Cache service responded with ${response.message.statusCode} during chunk upload.`
|
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
function parseEnvNumber(key: string): number | undefined {
|
|
||||||
const value = Number(process.env[key])
|
|
||||||
if (Number.isNaN(value) || value < 0) {
|
|
||||||
return undefined
|
|
||||||
}
|
|
||||||
return value
|
|
||||||
}
|
|
||||||
|
|
||||||
async function uploadFile(
|
async function uploadFile(
|
||||||
httpClient: HttpClient,
|
httpClient: HttpClient,
|
||||||
cacheId: number,
|
cacheId: number,
|
||||||
archivePath: string
|
archivePath: string,
|
||||||
|
options?: UploadOptions
|
||||||
): Promise<void> {
|
): Promise<void> {
|
||||||
// Upload Chunks
|
// Upload Chunks
|
||||||
const fileSize = fs.statSync(archivePath).size
|
const fileSize = fs.statSync(archivePath).size
|
||||||
const resourceUrl = getCacheApiUrl(`caches/${cacheId.toString()}`)
|
const resourceUrl = getCacheApiUrl(`caches/${cacheId.toString()}`)
|
||||||
const fd = fs.openSync(archivePath, 'r')
|
const fd = fs.openSync(archivePath, 'r')
|
||||||
|
|
||||||
const concurrency = parseEnvNumber('CACHE_UPLOAD_CONCURRENCY') ?? 4 // # of HTTP requests in parallel
|
const concurrency = options?.uploadConcurrency ?? 4 // # of HTTP requests in parallel
|
||||||
const MAX_CHUNK_SIZE =
|
const MAX_CHUNK_SIZE = options?.uploadChunkSize ?? 32 * 1024 * 1024 // 32 MB Chunks
|
||||||
parseEnvNumber('CACHE_UPLOAD_CHUNK_SIZE') ?? 32 * 1024 * 1024 // 32 MB Chunks
|
|
||||||
core.debug(`Concurrency: ${concurrency} and Chunk Size: ${MAX_CHUNK_SIZE}`)
|
core.debug(`Concurrency: ${concurrency} and Chunk Size: ${MAX_CHUNK_SIZE}`)
|
||||||
|
|
||||||
const parallelUploads = [...new Array(concurrency).keys()]
|
const parallelUploads = [...new Array(concurrency).keys()]
|
||||||
|
@ -287,14 +344,26 @@ async function uploadFile(
|
||||||
const start = offset
|
const start = offset
|
||||||
const end = offset + chunkSize - 1
|
const end = offset + chunkSize - 1
|
||||||
offset += MAX_CHUNK_SIZE
|
offset += MAX_CHUNK_SIZE
|
||||||
const chunk = fs.createReadStream(archivePath, {
|
|
||||||
fd,
|
|
||||||
start,
|
|
||||||
end,
|
|
||||||
autoClose: false
|
|
||||||
})
|
|
||||||
|
|
||||||
await uploadChunk(httpClient, resourceUrl, chunk, start, end)
|
await uploadChunk(
|
||||||
|
httpClient,
|
||||||
|
resourceUrl,
|
||||||
|
() =>
|
||||||
|
fs
|
||||||
|
.createReadStream(archivePath, {
|
||||||
|
fd,
|
||||||
|
start,
|
||||||
|
end,
|
||||||
|
autoClose: false
|
||||||
|
})
|
||||||
|
.on('error', error => {
|
||||||
|
throw new Error(
|
||||||
|
`Cache upload failed because file read failed with ${error.Message}`
|
||||||
|
)
|
||||||
|
}),
|
||||||
|
start,
|
||||||
|
end
|
||||||
|
)
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
)
|
)
|
||||||
|
@ -310,20 +379,23 @@ async function commitCache(
|
||||||
filesize: number
|
filesize: number
|
||||||
): Promise<ITypedResponse<null>> {
|
): Promise<ITypedResponse<null>> {
|
||||||
const commitCacheRequest: CommitCacheRequest = {size: filesize}
|
const commitCacheRequest: CommitCacheRequest = {size: filesize}
|
||||||
return await httpClient.postJson<null>(
|
return await retryTypedResponse('commitCache', async () =>
|
||||||
getCacheApiUrl(`caches/${cacheId.toString()}`),
|
httpClient.postJson<null>(
|
||||||
commitCacheRequest
|
getCacheApiUrl(`caches/${cacheId.toString()}`),
|
||||||
|
commitCacheRequest
|
||||||
|
)
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function saveCache(
|
export async function saveCache(
|
||||||
cacheId: number,
|
cacheId: number,
|
||||||
archivePath: string
|
archivePath: string,
|
||||||
|
options?: UploadOptions
|
||||||
): Promise<void> {
|
): Promise<void> {
|
||||||
const httpClient = createHttpClient()
|
const httpClient = createHttpClient()
|
||||||
|
|
||||||
core.debug('Upload cache')
|
core.debug('Upload cache')
|
||||||
await uploadFile(httpClient, cacheId, archivePath)
|
await uploadFile(httpClient, cacheId, archivePath, options)
|
||||||
|
|
||||||
// Commit Cache
|
// Commit Cache
|
||||||
core.debug('Commiting cache')
|
core.debug('Commiting cache')
|
||||||
|
|
|
@ -0,0 +1,17 @@
|
||||||
|
/**
|
||||||
|
* Options to control cache upload
|
||||||
|
*/
|
||||||
|
export interface UploadOptions {
|
||||||
|
/**
|
||||||
|
* Number of parallel cache upload
|
||||||
|
*
|
||||||
|
* @default 4
|
||||||
|
*/
|
||||||
|
uploadConcurrency?: number
|
||||||
|
/**
|
||||||
|
* Maximum chunk size for cache upload
|
||||||
|
*
|
||||||
|
* @default 32MB
|
||||||
|
*/
|
||||||
|
uploadChunkSize?: number
|
||||||
|
}
|
Loading…
Reference in New Issue