1
0
Fork 0

Merge pull request #1030 from actions/users/ashwinsangem/fix_download_chunk_cap

Cap the cache download chunk to 2 GB
pull/1012/head
Ashwin Sangem 2022-03-24 19:11:52 +05:30 committed by GitHub
commit 39b9640642
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 97 additions and 20041 deletions

20124
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -49,4 +49,7 @@
- Use @azure/storage-blob v12.8.0 - Use @azure/storage-blob v12.8.0
### 1.0.10 ### 1.0.10
- Update `lockfileVersion` to `v2` in `package-lock.json [#1022](https://github.com/actions/toolkit/pull/1022) - Update `lockfileVersion` to `v2` in `package-lock.json [#1022](https://github.com/actions/toolkit/pull/1022)
### 1.0.11
- Fix file downloads > 2GB([issue](https://github.com/actions/cache/issues/773))

4
packages/cache/package-lock.json generated vendored
View File

@ -1,12 +1,12 @@
{ {
"name": "@actions/cache", "name": "@actions/cache",
"version": "1.0.10", "version": "1.0.11",
"lockfileVersion": 2, "lockfileVersion": 2,
"requires": true, "requires": true,
"packages": { "packages": {
"": { "": {
"name": "@actions/cache", "name": "@actions/cache",
"version": "1.0.10", "version": "1.0.11",
"license": "MIT", "license": "MIT",
"dependencies": { "dependencies": {
"@actions/core": "^1.2.6", "@actions/core": "^1.2.6",

View File

@ -1,6 +1,6 @@
{ {
"name": "@actions/cache", "name": "@actions/cache",
"version": "1.0.10", "version": "1.0.11",
"preview": true, "preview": true,
"description": "Actions cache lib", "description": "Actions cache lib",
"keywords": [ "keywords": [

View File

@ -240,7 +240,8 @@ export async function downloadCacheStorageSDK(
// //
// If the file exceeds the buffer maximum length (~1 GB on 32-bit systems and ~2 GB // If the file exceeds the buffer maximum length (~1 GB on 32-bit systems and ~2 GB
// on 64-bit systems), split the download into multiple segments // on 64-bit systems), split the download into multiple segments
const maxSegmentSize = buffer.constants.MAX_LENGTH // ~2 GB = 2147483647, beyond this, we start getting out of range error. So, capping it accordingly.
const maxSegmentSize = Math.min(2147483647, buffer.constants.MAX_LENGTH)
const downloadProgress = new DownloadProgress(contentLength) const downloadProgress = new DownloadProgress(contentLength)
const fd = fs.openSync(archivePath, 'w') const fd = fs.openSync(archivePath, 'w')