mirror of https://github.com/actions/toolkit
Merge pull request #9 from WarpBuilds/hotfix-concurrency-limits-uploads
hotfix: concurrency limits in file uploadspull/1935/head
commit
e85e3f8677
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "github-actions.warp-cache",
|
||||
"version": "1.2.2",
|
||||
"version": "1.2.5",
|
||||
"preview": true,
|
||||
"description": "Github action to use WarpBuild's in-house cache offering",
|
||||
"keywords": [
|
||||
|
|
|
@ -124,6 +124,11 @@ export function getCacheVersion(
|
|||
components.push('windows-only')
|
||||
}
|
||||
|
||||
// Check for mac platforms if enableCrossOsArchive is false
|
||||
if (process.platform === 'darwin' && !enableCrossOsArchive) {
|
||||
components.push('mac-only')
|
||||
}
|
||||
|
||||
// Add architecture to cache version
|
||||
if (!enableCrossArchArchive) {
|
||||
components.push(process.arch)
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
import * as core from '@actions/core'
|
||||
import * as utils from './cacheUtils'
|
||||
import * as os from 'os'
|
||||
|
||||
import fs from 'fs'
|
||||
|
||||
|
@ -52,9 +53,8 @@ async function uploadChunk(
|
|||
}
|
||||
} catch (error) {
|
||||
throw new Error(
|
||||
`Cache service responded with ${
|
||||
(error as AxiosError).status
|
||||
} during upload chunk.`
|
||||
`Cache service responded with ${(error as AxiosError).response
|
||||
?.status} during upload chunk.`
|
||||
)
|
||||
}
|
||||
}
|
||||
|
@ -66,20 +66,31 @@ export async function uploadFileToS3(
|
|||
const fileSize = utils.getArchiveFileSizeInBytes(archivePath)
|
||||
const numberOfChunks = preSignedURLs.length
|
||||
|
||||
let concurrency = 4
|
||||
// Adjust concurrency based on the number of cpu cores
|
||||
if (os.cpus().length > 4) {
|
||||
concurrency = 8
|
||||
}
|
||||
|
||||
const fd = fs.openSync(archivePath, 'r')
|
||||
|
||||
core.debug('Awaiting all uploads')
|
||||
core.debug(`Awaiting all uploads with concurrency limit of ${concurrency}`)
|
||||
let offset = 0
|
||||
|
||||
const completedParts: InternalS3CompletedPart[] = []
|
||||
|
||||
try {
|
||||
const completedParts = await Promise.all(
|
||||
preSignedURLs.map(async (presignedURL, index) => {
|
||||
for (let i = 0; i < numberOfChunks; i += concurrency) {
|
||||
const batch = preSignedURLs
|
||||
.slice(i, i + concurrency)
|
||||
.map((presignedURL, index) => {
|
||||
const chunkIndex = i + index
|
||||
const chunkSize = Math.ceil(fileSize / numberOfChunks)
|
||||
const start = offset
|
||||
const end = offset + chunkSize - 1
|
||||
offset += chunkSize
|
||||
|
||||
return await uploadChunk(
|
||||
return uploadChunk(
|
||||
presignedURL,
|
||||
() =>
|
||||
fs
|
||||
|
@ -94,13 +105,15 @@ export async function uploadFileToS3(
|
|||
`Cache upload failed because file read failed with ${error.message}`
|
||||
)
|
||||
}),
|
||||
index + 1,
|
||||
chunkIndex + 1,
|
||||
start,
|
||||
end
|
||||
)
|
||||
})
|
||||
)
|
||||
|
||||
const batchResults = await Promise.all(batch)
|
||||
completedParts.push(...batchResults)
|
||||
}
|
||||
return completedParts
|
||||
} finally {
|
||||
fs.closeSync(fd)
|
||||
|
|
Loading…
Reference in New Issue