diff --git a/packages/artifact/src/internal/shared/config.ts b/packages/artifact/src/internal/shared/config.ts index 8d8a1668..73a9fcc5 100644 --- a/packages/artifact/src/internal/shared/config.ts +++ b/packages/artifact/src/internal/shared/config.ts @@ -1,3 +1,5 @@ +import os from 'os' + // Used for controlling the highWaterMark value of the zip that is being streamed // The same value is used as the chunk size that is use during upload to blob storage export function getUploadChunkSize(): number { @@ -34,3 +36,17 @@ export function getGitHubWorkspaceDir(): string { } return ghWorkspaceDir } + +// Mimics behavior of azcopy: https://learn.microsoft.com/en-us/azure/storage/common/storage-use-azcopy-optimize +// If your machine has fewer than 5 CPUs, then the value of this variable is set to 32. +// Otherwise, the default value is equal to 16 multiplied by the number of CPUs. The maximum value of this variable is 300. +export function getConcurrency() { + const numCPUs = os.cpus().length + + if (numCPUs <= 4) { + return 32 + } + + const concurrency = 16 * numCPUs + return concurrency > 300 ? 300 : concurrency +} diff --git a/packages/artifact/src/internal/shared/interfaces.ts b/packages/artifact/src/internal/shared/interfaces.ts index 7e3e862f..b160d1be 100644 --- a/packages/artifact/src/internal/shared/interfaces.ts +++ b/packages/artifact/src/internal/shared/interfaces.ts @@ -38,6 +38,17 @@ export interface UploadOptions { * input of 0 assumes default retention setting. */ retentionDays?: number + /** + * The level of compression for Zlib to be applied to the artifact archive. + * The value can range from 0 to 9: + * - 0: No compression + * - 1: Best speed + * - 6: Default compression (same as GNU Gzip) + * - 9: Best compression + * Higher levels will result in better compression, but will take longer to complete. + * For large files that are not easily compressed, a value of 0 is recommended for significantly faster uploads. + */ + compressionLevel?: number } /***************************************************************************** diff --git a/packages/artifact/src/internal/upload/blob-upload.ts b/packages/artifact/src/internal/upload/blob-upload.ts index 2bed1f39..cb7a11b7 100644 --- a/packages/artifact/src/internal/upload/blob-upload.ts +++ b/packages/artifact/src/internal/upload/blob-upload.ts @@ -1,7 +1,7 @@ import {BlobClient, BlockBlobUploadStreamOptions} from '@azure/storage-blob' import {TransferProgressEvent} from '@azure/core-http' import {ZipUploadStream} from './zip' -import {getUploadChunkSize} from '../shared/config' +import {getUploadChunkSize, getConcurrency} from '../shared/config' import * as core from '@actions/core' import * as crypto from 'crypto' import * as stream from 'stream' @@ -29,13 +29,13 @@ export async function uploadZipToBlobStorage( ): Promise { let uploadByteCount = 0 - const maxBuffers = 5 + const maxConcurrency = getConcurrency() const bufferSize = getUploadChunkSize() const blobClient = new BlobClient(authenticatedUploadURL) const blockBlobClient = blobClient.getBlockBlobClient() core.debug( - `Uploading artifact zip to blob storage with maxBuffers: ${maxBuffers}, bufferSize: ${bufferSize}` + `Uploading artifact zip to blob storage with maxConcurrency: ${maxConcurrency}, bufferSize: ${bufferSize}` ) const uploadCallback = (progress: TransferProgressEvent): void => { @@ -61,7 +61,7 @@ export async function uploadZipToBlobStorage( await blockBlobClient.uploadStream( uploadStream, bufferSize, - maxBuffers, + maxConcurrency, options ) diff --git a/packages/artifact/src/internal/upload/upload-artifact.ts b/packages/artifact/src/internal/upload/upload-artifact.ts index ced05568..f2547516 100644 --- a/packages/artifact/src/internal/upload/upload-artifact.ts +++ b/packages/artifact/src/internal/upload/upload-artifact.ts @@ -37,7 +37,7 @@ export async function uploadArtifact( } } - const zipUploadStream = await createZipUploadStream(zipSpecification) + const zipUploadStream = await createZipUploadStream(zipSpecification, options?.compressionLevel) // get the IDs needed for the artifact creation const backendIds = getBackendIdsFromToken() diff --git a/packages/artifact/src/internal/upload/zip.ts b/packages/artifact/src/internal/upload/zip.ts index 81a12343..d8a5e5b7 100644 --- a/packages/artifact/src/internal/upload/zip.ts +++ b/packages/artifact/src/internal/upload/zip.ts @@ -5,6 +5,8 @@ import {createReadStream} from 'fs' import {UploadZipSpecification} from './upload-zip-specification' import {getUploadChunkSize} from '../shared/config' +export const DEFAULT_COMPRESSION_LEVEL = 6 + // Custom stream transformer so we can set the highWaterMark property // See https://github.com/nodejs/node/issues/8855 export class ZipUploadStream extends stream.Transform { @@ -21,14 +23,12 @@ export class ZipUploadStream extends stream.Transform { } export async function createZipUploadStream( - uploadSpecification: UploadZipSpecification[] + uploadSpecification: UploadZipSpecification[], + compressionLevel: number = DEFAULT_COMPRESSION_LEVEL ): Promise { const zip = archiver.create('zip', { - zlib: {level: 9} // Sets the compression level. - // Available options are 0-9 - // 0 => no compression - // 1 => fastest with low compression - // 9 => highest compression ratio but the slowest + highWaterMark: getUploadChunkSize(), + zlib: {level: compressionLevel} }) // register callbacks for various events during the zip lifecycle