mirror of https://github.com/actions/toolkit
GZip fixes
parent
0d66a767df
commit
274f7e660d
|
@ -200,33 +200,37 @@ export class UploadHttpClient {
|
||||||
// file is less than 64k in size, to increase thoroughput and minimize disk I/O for creating a new GZip file, an in-memory buffer will be used
|
// file is less than 64k in size, to increase thoroughput and minimize disk I/O for creating a new GZip file, an in-memory buffer will be used
|
||||||
if (originalFileSize < 65536) {
|
if (originalFileSize < 65536) {
|
||||||
const buffer = await this.CreateGZipFileInBuffer(parameters.file)
|
const buffer = await this.CreateGZipFileInBuffer(parameters.file)
|
||||||
|
let uploadStream: NodeJS.ReadableStream
|
||||||
uploadFileSize = buffer.byteLength
|
uploadFileSize = buffer.byteLength
|
||||||
|
|
||||||
if (originalFileSize < uploadFileSize) {
|
if (originalFileSize < uploadFileSize) {
|
||||||
// compression did not help with reducing the size, use the original file for upload
|
// compression did not help with reducing the size, use a readable stream from the original file for upload
|
||||||
uploadFileSize = originalFileSize
|
uploadFileSize = originalFileSize
|
||||||
isGzip = false
|
isGzip = false
|
||||||
|
uploadStream = fs.createReadStream(parameters.file)
|
||||||
|
} else {
|
||||||
|
// Create a readable stream using a PassThrough stream and the in-memory buffer. A PassThrought stream is both a readable stream and writable stream
|
||||||
|
const passThrough = new stream.PassThrough()
|
||||||
|
passThrough.end(buffer)
|
||||||
|
uploadStream = passThrough
|
||||||
}
|
}
|
||||||
|
|
||||||
// if using a buffer, the entire file should be uploaded in a single chunk with a single call
|
// the entire file should be uploaded in a single chunk with a single call
|
||||||
if (uploadFileSize > parameters.maxChunkSize) {
|
if (uploadFileSize > parameters.maxChunkSize) {
|
||||||
throw new Error(
|
throw new Error(
|
||||||
'Chunk size is too large to upload using buffer with a single call'
|
'Chunk size is too large to upload using buffer with a single call'
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
const chunkSize = Math.min(
|
const chunkSize = Math.min(
|
||||||
uploadFileSize - offset,
|
uploadFileSize - offset,
|
||||||
parameters.maxChunkSize
|
parameters.maxChunkSize
|
||||||
)
|
)
|
||||||
|
|
||||||
// Create a readable stream using a PassThrough stream and the in-memory buffer
|
|
||||||
const passThrough = new stream.PassThrough()
|
|
||||||
passThrough.end(buffer)
|
|
||||||
|
|
||||||
const result = await this.uploadChunk(
|
const result = await this.uploadChunk(
|
||||||
httpClientIndex,
|
httpClientIndex,
|
||||||
parameters.resourceUrl,
|
parameters.resourceUrl,
|
||||||
passThrough,
|
uploadStream,
|
||||||
offset,
|
offset,
|
||||||
offset + chunkSize - 1,
|
offset + chunkSize - 1,
|
||||||
uploadFileSize,
|
uploadFileSize,
|
||||||
|
|
Loading…
Reference in New Issue