1
0
Fork 0

Merge pull request #1584 from actions/robherley/upload-v4-improvements

Increase Artifact v4 upload speed
pull/1587/head
Rob Herley 2023-11-20 16:30:50 -05:00 committed by GitHub
commit 0407266511
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
7 changed files with 52 additions and 17 deletions

6
package-lock.json generated
View File

@ -3262,9 +3262,9 @@
} }
}, },
"node_modules/axios": { "node_modules/axios": {
"version": "1.4.0", "version": "1.6.2",
"resolved": "https://registry.npmjs.org/axios/-/axios-1.4.0.tgz", "resolved": "https://registry.npmjs.org/axios/-/axios-1.6.2.tgz",
"integrity": "sha512-S4XCWMEmzvo64T9GfvQDOXgYRDJ/wsSZc7Jvdgx5u1sd0JwsuPLqb3SYmusag+edF6ziyMensPVqLTSc1PiSEA==", "integrity": "sha512-7i24Ri4pmDRfJTR7LDBhsOTtcm+9kjX5WiY1X3wIisx6G9So3pfMkEiU7emUBe46oceVImccTEM3k6C5dbVW8A==",
"dev": true, "dev": true,
"dependencies": { "dependencies": {
"follow-redirects": "^1.15.0", "follow-redirects": "^1.15.0",

View File

@ -52,8 +52,8 @@ class ArtifactHttpClient implements Rpc {
contentType: 'application/json' | 'application/protobuf', contentType: 'application/json' | 'application/protobuf',
data: object | Uint8Array data: object | Uint8Array
): Promise<object | Uint8Array> { ): Promise<object | Uint8Array> {
const url = `${this.baseUrl}/twirp/${service}/${method}` const url = new URL(`/twirp/${service}/${method}`, this.baseUrl).href
debug(`Requesting ${url}`) debug(`Requesting: ${url}`)
const headers = { const headers = {
'Content-Type': contentType 'Content-Type': contentType
} }

View File

@ -1,3 +1,5 @@
import os from 'os'
// Used for controlling the highWaterMark value of the zip that is being streamed // Used for controlling the highWaterMark value of the zip that is being streamed
// The same value is used as the chunk size that is use during upload to blob storage // The same value is used as the chunk size that is use during upload to blob storage
export function getUploadChunkSize(): number { export function getUploadChunkSize(): number {
@ -17,7 +19,8 @@ export function getResultsServiceUrl(): string {
if (!resultsUrl) { if (!resultsUrl) {
throw new Error('Unable to get the ACTIONS_RESULTS_URL env variable') throw new Error('Unable to get the ACTIONS_RESULTS_URL env variable')
} }
return resultsUrl
return new URL(resultsUrl).origin
} }
export function isGhes(): boolean { export function isGhes(): boolean {
@ -34,3 +37,17 @@ export function getGitHubWorkspaceDir(): string {
} }
return ghWorkspaceDir return ghWorkspaceDir
} }
// Mimics behavior of azcopy: https://learn.microsoft.com/en-us/azure/storage/common/storage-use-azcopy-optimize
// If your machine has fewer than 5 CPUs, then the value of this variable is set to 32.
// Otherwise, the default value is equal to 16 multiplied by the number of CPUs. The maximum value of this variable is 300.
export function getConcurrency(): number {
const numCPUs = os.cpus().length
if (numCPUs <= 4) {
return 32
}
const concurrency = 16 * numCPUs
return concurrency > 300 ? 300 : concurrency
}

View File

@ -38,6 +38,17 @@ export interface UploadOptions {
* input of 0 assumes default retention setting. * input of 0 assumes default retention setting.
*/ */
retentionDays?: number retentionDays?: number
/**
* The level of compression for Zlib to be applied to the artifact archive.
* The value can range from 0 to 9:
* - 0: No compression
* - 1: Best speed
* - 6: Default compression (same as GNU Gzip)
* - 9: Best compression
* Higher levels will result in better compression, but will take longer to complete.
* For large files that are not easily compressed, a value of 0 is recommended for significantly faster uploads.
*/
compressionLevel?: number
} }
/***************************************************************************** /*****************************************************************************

View File

@ -1,7 +1,7 @@
import {BlobClient, BlockBlobUploadStreamOptions} from '@azure/storage-blob' import {BlobClient, BlockBlobUploadStreamOptions} from '@azure/storage-blob'
import {TransferProgressEvent} from '@azure/core-http' import {TransferProgressEvent} from '@azure/core-http'
import {ZipUploadStream} from './zip' import {ZipUploadStream} from './zip'
import {getUploadChunkSize} from '../shared/config' import {getUploadChunkSize, getConcurrency} from '../shared/config'
import * as core from '@actions/core' import * as core from '@actions/core'
import * as crypto from 'crypto' import * as crypto from 'crypto'
import * as stream from 'stream' import * as stream from 'stream'
@ -29,13 +29,13 @@ export async function uploadZipToBlobStorage(
): Promise<BlobUploadResponse> { ): Promise<BlobUploadResponse> {
let uploadByteCount = 0 let uploadByteCount = 0
const maxBuffers = 5 const maxConcurrency = getConcurrency()
const bufferSize = getUploadChunkSize() const bufferSize = getUploadChunkSize()
const blobClient = new BlobClient(authenticatedUploadURL) const blobClient = new BlobClient(authenticatedUploadURL)
const blockBlobClient = blobClient.getBlockBlobClient() const blockBlobClient = blobClient.getBlockBlobClient()
core.debug( core.debug(
`Uploading artifact zip to blob storage with maxBuffers: ${maxBuffers}, bufferSize: ${bufferSize}` `Uploading artifact zip to blob storage with maxConcurrency: ${maxConcurrency}, bufferSize: ${bufferSize}`
) )
const uploadCallback = (progress: TransferProgressEvent): void => { const uploadCallback = (progress: TransferProgressEvent): void => {
@ -61,7 +61,7 @@ export async function uploadZipToBlobStorage(
await blockBlobClient.uploadStream( await blockBlobClient.uploadStream(
uploadStream, uploadStream,
bufferSize, bufferSize,
maxBuffers, maxConcurrency,
options options
) )

View File

@ -37,7 +37,10 @@ export async function uploadArtifact(
} }
} }
const zipUploadStream = await createZipUploadStream(zipSpecification) const zipUploadStream = await createZipUploadStream(
zipSpecification,
options?.compressionLevel
)
// get the IDs needed for the artifact creation // get the IDs needed for the artifact creation
const backendIds = getBackendIdsFromToken() const backendIds = getBackendIdsFromToken()

View File

@ -5,6 +5,8 @@ import {createReadStream} from 'fs'
import {UploadZipSpecification} from './upload-zip-specification' import {UploadZipSpecification} from './upload-zip-specification'
import {getUploadChunkSize} from '../shared/config' import {getUploadChunkSize} from '../shared/config'
export const DEFAULT_COMPRESSION_LEVEL = 6
// Custom stream transformer so we can set the highWaterMark property // Custom stream transformer so we can set the highWaterMark property
// See https://github.com/nodejs/node/issues/8855 // See https://github.com/nodejs/node/issues/8855
export class ZipUploadStream extends stream.Transform { export class ZipUploadStream extends stream.Transform {
@ -21,14 +23,16 @@ export class ZipUploadStream extends stream.Transform {
} }
export async function createZipUploadStream( export async function createZipUploadStream(
uploadSpecification: UploadZipSpecification[] uploadSpecification: UploadZipSpecification[],
compressionLevel: number = DEFAULT_COMPRESSION_LEVEL
): Promise<ZipUploadStream> { ): Promise<ZipUploadStream> {
core.debug(
`Creating Artifact archive with compressionLevel: ${compressionLevel}`
)
const zip = archiver.create('zip', { const zip = archiver.create('zip', {
zlib: {level: 9} // Sets the compression level. highWaterMark: getUploadChunkSize(),
// Available options are 0-9 zlib: {level: compressionLevel}
// 0 => no compression
// 1 => fastest with low compression
// 9 => highest compression ratio but the slowest
}) })
// register callbacks for various events during the zip lifecycle // register callbacks for various events during the zip lifecycle