1
0
Fork 0

@actions/cache: using concurrent download whenever server supported (via `Accept-Ranges` header)

pull/1835/head
Đặng Minh Dũng 2024-09-30 15:40:01 +07:00
parent 6dd369c0e6
commit 732400a59e
No known key found for this signature in database
GPG Key ID: 30B00965500D018B
2 changed files with 46 additions and 35 deletions

View File

@ -13,12 +13,12 @@ import * as utils from './cacheUtils'
import {CompressionMethod} from './constants' import {CompressionMethod} from './constants'
import { import {
ArtifactCacheEntry, ArtifactCacheEntry,
InternalCacheOptions, ArtifactCacheList,
CommitCacheRequest, CommitCacheRequest,
ReserveCacheRequest, InternalCacheOptions,
ReserveCacheResponse,
ITypedResponseWithError, ITypedResponseWithError,
ArtifactCacheList ReserveCacheRequest,
ReserveCacheResponse
} from './contracts' } from './contracts'
import { import {
downloadCacheHttpClient, downloadCacheHttpClient,
@ -27,9 +27,9 @@ import {
} from './downloadUtils' } from './downloadUtils'
import { import {
DownloadOptions, DownloadOptions,
UploadOptions,
getDownloadOptions, getDownloadOptions,
getUploadOptions getUploadOptions,
UploadOptions
} from '../options' } from '../options'
import { import {
isSuccessStatusCode, isSuccessStatusCode,
@ -179,22 +179,45 @@ export async function downloadCache(
if (archiveUrl.hostname.endsWith('.blob.core.windows.net')) { if (archiveUrl.hostname.endsWith('.blob.core.windows.net')) {
if (downloadOptions.useAzureSdk) { if (downloadOptions.useAzureSdk) {
// Use Azure storage SDK to download caches hosted on Azure to improve speed and reliability. // Use Azure storage SDK to download caches hosted on Azure to improve speed and reliability.
await downloadCacheStorageSDK( return await downloadCacheStorageSDK(
archiveLocation, archiveLocation,
archivePath, archivePath,
downloadOptions downloadOptions
) )
} else if (downloadOptions.concurrentBlobDownloads) { }
}
let acceptRange = false
let contentLength = -1
// Determine partial file downloads is supported by server
// via `Accept-Ranges: bytes` response header.
try {
const httpClient = new HttpClient('actions/cache', undefined, {
socketTimeout: downloadOptions.timeoutInMs,
keepAlive: true
})
const res = await retryHttpClientResponse(
'downloadCacheMetadata',
async () => await httpClient.request('HEAD', archiveLocation, null, {})
)
acceptRange = res.message.headers['Accept-Ranges'] === 'bytes'
const lengthHeader = res.message.headers['Content-Length']
contentLength = parseInt(lengthHeader)
} catch {
// ignore
}
if (acceptRange && contentLength > 0) {
// Use concurrent implementation with HttpClient to work around blob SDK issue // Use concurrent implementation with HttpClient to work around blob SDK issue
await downloadCacheHttpClientConcurrent( await downloadCacheHttpClientConcurrent(
archiveLocation, archiveLocation,
archivePath, archivePath,
contentLength,
downloadOptions downloadOptions
) )
} else {
// Otherwise, download using the Actions http-client.
await downloadCacheHttpClient(archiveLocation, archivePath)
}
} else { } else {
await downloadCacheHttpClient(archiveLocation, archivePath) await downloadCacheHttpClient(archiveLocation, archivePath)
} }

View File

@ -208,10 +208,13 @@ export async function downloadCacheHttpClient(
* *
* @param archiveLocation the URL for the cache * @param archiveLocation the URL for the cache
* @param archivePath the local path where the cache is saved * @param archivePath the local path where the cache is saved
* @param contentLength
* @param options
*/ */
export async function downloadCacheHttpClientConcurrent( export async function downloadCacheHttpClientConcurrent(
archiveLocation: string, archiveLocation: string,
archivePath: fs.PathLike, archivePath: fs.PathLike,
contentLength: number,
options: DownloadOptions options: DownloadOptions
): Promise<void> { ): Promise<void> {
const archiveDescriptor = await fs.promises.open(archivePath, 'w') const archiveDescriptor = await fs.promises.open(archivePath, 'w')
@ -220,29 +223,14 @@ export async function downloadCacheHttpClientConcurrent(
keepAlive: true keepAlive: true
}) })
try { try {
const res = await retryHttpClientResponse(
'downloadCacheMetadata',
async () => await httpClient.request('HEAD', archiveLocation, null, {})
)
const lengthHeader = res.message.headers['content-length']
if (lengthHeader === undefined || lengthHeader === null) {
throw new Error('Content-Length not found on blob response')
}
const length = parseInt(lengthHeader)
if (Number.isNaN(length)) {
throw new Error(`Could not interpret Content-Length: ${length}`)
}
const downloads: { const downloads: {
offset: number offset: number
promiseGetter: () => Promise<DownloadSegment> promiseGetter: () => Promise<DownloadSegment>
}[] = [] }[] = []
const blockSize = 4 * 1024 * 1024 const blockSize = 4 * 1024 * 1024
for (let offset = 0; offset < length; offset += blockSize) { for (let offset = 0; offset < contentLength; offset += blockSize) {
const count = Math.min(blockSize, length - offset) const count = Math.min(blockSize, contentLength - offset)
downloads.push({ downloads.push({
offset, offset,
promiseGetter: async () => { promiseGetter: async () => {
@ -260,7 +248,7 @@ export async function downloadCacheHttpClientConcurrent(
downloads.reverse() downloads.reverse()
let actives = 0 let actives = 0
let bytesDownloaded = 0 let bytesDownloaded = 0
const progress = new DownloadProgress(length) const progress = new DownloadProgress(contentLength)
progress.startDisplayTimer() progress.startDisplayTimer()
const progressFn = progress.onProgress() const progressFn = progress.onProgress()