mirror of https://github.com/actions/toolkit
Merge pull request #9 from WarpBuilds/hotfix-concurrency-limits-uploads
hotfix: concurrency limits in file uploadspull/1935/head
commit
e85e3f8677
|
@ -1,6 +1,6 @@
|
||||||
{
|
{
|
||||||
"name": "github-actions.warp-cache",
|
"name": "github-actions.warp-cache",
|
||||||
"version": "1.2.2",
|
"version": "1.2.5",
|
||||||
"preview": true,
|
"preview": true,
|
||||||
"description": "Github action to use WarpBuild's in-house cache offering",
|
"description": "Github action to use WarpBuild's in-house cache offering",
|
||||||
"keywords": [
|
"keywords": [
|
||||||
|
|
|
@ -124,6 +124,11 @@ export function getCacheVersion(
|
||||||
components.push('windows-only')
|
components.push('windows-only')
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Check for mac platforms if enableCrossOsArchive is false
|
||||||
|
if (process.platform === 'darwin' && !enableCrossOsArchive) {
|
||||||
|
components.push('mac-only')
|
||||||
|
}
|
||||||
|
|
||||||
// Add architecture to cache version
|
// Add architecture to cache version
|
||||||
if (!enableCrossArchArchive) {
|
if (!enableCrossArchArchive) {
|
||||||
components.push(process.arch)
|
components.push(process.arch)
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
import * as core from '@actions/core'
|
import * as core from '@actions/core'
|
||||||
import * as utils from './cacheUtils'
|
import * as utils from './cacheUtils'
|
||||||
|
import * as os from 'os'
|
||||||
|
|
||||||
import fs from 'fs'
|
import fs from 'fs'
|
||||||
|
|
||||||
|
@ -52,9 +53,8 @@ async function uploadChunk(
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
throw new Error(
|
throw new Error(
|
||||||
`Cache service responded with ${
|
`Cache service responded with ${(error as AxiosError).response
|
||||||
(error as AxiosError).status
|
?.status} during upload chunk.`
|
||||||
} during upload chunk.`
|
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -66,20 +66,31 @@ export async function uploadFileToS3(
|
||||||
const fileSize = utils.getArchiveFileSizeInBytes(archivePath)
|
const fileSize = utils.getArchiveFileSizeInBytes(archivePath)
|
||||||
const numberOfChunks = preSignedURLs.length
|
const numberOfChunks = preSignedURLs.length
|
||||||
|
|
||||||
|
let concurrency = 4
|
||||||
|
// Adjust concurrency based on the number of cpu cores
|
||||||
|
if (os.cpus().length > 4) {
|
||||||
|
concurrency = 8
|
||||||
|
}
|
||||||
|
|
||||||
const fd = fs.openSync(archivePath, 'r')
|
const fd = fs.openSync(archivePath, 'r')
|
||||||
|
|
||||||
core.debug('Awaiting all uploads')
|
core.debug(`Awaiting all uploads with concurrency limit of ${concurrency}`)
|
||||||
let offset = 0
|
let offset = 0
|
||||||
|
|
||||||
|
const completedParts: InternalS3CompletedPart[] = []
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const completedParts = await Promise.all(
|
for (let i = 0; i < numberOfChunks; i += concurrency) {
|
||||||
preSignedURLs.map(async (presignedURL, index) => {
|
const batch = preSignedURLs
|
||||||
|
.slice(i, i + concurrency)
|
||||||
|
.map((presignedURL, index) => {
|
||||||
|
const chunkIndex = i + index
|
||||||
const chunkSize = Math.ceil(fileSize / numberOfChunks)
|
const chunkSize = Math.ceil(fileSize / numberOfChunks)
|
||||||
const start = offset
|
const start = offset
|
||||||
const end = offset + chunkSize - 1
|
const end = offset + chunkSize - 1
|
||||||
offset += chunkSize
|
offset += chunkSize
|
||||||
|
|
||||||
return await uploadChunk(
|
return uploadChunk(
|
||||||
presignedURL,
|
presignedURL,
|
||||||
() =>
|
() =>
|
||||||
fs
|
fs
|
||||||
|
@ -94,13 +105,15 @@ export async function uploadFileToS3(
|
||||||
`Cache upload failed because file read failed with ${error.message}`
|
`Cache upload failed because file read failed with ${error.message}`
|
||||||
)
|
)
|
||||||
}),
|
}),
|
||||||
index + 1,
|
chunkIndex + 1,
|
||||||
start,
|
start,
|
||||||
end
|
end
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
)
|
|
||||||
|
|
||||||
|
const batchResults = await Promise.all(batch)
|
||||||
|
completedParts.push(...batchResults)
|
||||||
|
}
|
||||||
return completedParts
|
return completedParts
|
||||||
} finally {
|
} finally {
|
||||||
fs.closeSync(fd)
|
fs.closeSync(fd)
|
||||||
|
|
Loading…
Reference in New Issue