diff --git a/package-lock.json b/package-lock.json index c0fd14ed..e72dedbd 100644 --- a/package-lock.json +++ b/package-lock.json @@ -3262,9 +3262,9 @@ } }, "node_modules/axios": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/axios/-/axios-1.4.0.tgz", - "integrity": "sha512-S4XCWMEmzvo64T9GfvQDOXgYRDJ/wsSZc7Jvdgx5u1sd0JwsuPLqb3SYmusag+edF6ziyMensPVqLTSc1PiSEA==", + "version": "1.6.2", + "resolved": "https://registry.npmjs.org/axios/-/axios-1.6.2.tgz", + "integrity": "sha512-7i24Ri4pmDRfJTR7LDBhsOTtcm+9kjX5WiY1X3wIisx6G9So3pfMkEiU7emUBe46oceVImccTEM3k6C5dbVW8A==", "dev": true, "dependencies": { "follow-redirects": "^1.15.0", diff --git a/packages/artifact/src/internal/shared/artifact-twirp-client.ts b/packages/artifact/src/internal/shared/artifact-twirp-client.ts index 444e3fac..09355808 100644 --- a/packages/artifact/src/internal/shared/artifact-twirp-client.ts +++ b/packages/artifact/src/internal/shared/artifact-twirp-client.ts @@ -52,8 +52,8 @@ class ArtifactHttpClient implements Rpc { contentType: 'application/json' | 'application/protobuf', data: object | Uint8Array ): Promise { - const url = `${this.baseUrl}/twirp/${service}/${method}` - debug(`Requesting ${url}`) + const url = new URL(`/twirp/${service}/${method}`, this.baseUrl).href + debug(`Requesting: ${url}`) const headers = { 'Content-Type': contentType } diff --git a/packages/artifact/src/internal/shared/config.ts b/packages/artifact/src/internal/shared/config.ts index 8d8a1668..a5631bfc 100644 --- a/packages/artifact/src/internal/shared/config.ts +++ b/packages/artifact/src/internal/shared/config.ts @@ -1,3 +1,5 @@ +import os from 'os' + // Used for controlling the highWaterMark value of the zip that is being streamed // The same value is used as the chunk size that is use during upload to blob storage export function getUploadChunkSize(): number { @@ -17,7 +19,8 @@ export function getResultsServiceUrl(): string { if (!resultsUrl) { throw new Error('Unable to get the ACTIONS_RESULTS_URL env variable') } - return resultsUrl + + return new URL(resultsUrl).origin } export function isGhes(): boolean { @@ -34,3 +37,17 @@ export function getGitHubWorkspaceDir(): string { } return ghWorkspaceDir } + +// Mimics behavior of azcopy: https://learn.microsoft.com/en-us/azure/storage/common/storage-use-azcopy-optimize +// If your machine has fewer than 5 CPUs, then the value of this variable is set to 32. +// Otherwise, the default value is equal to 16 multiplied by the number of CPUs. The maximum value of this variable is 300. +export function getConcurrency(): number { + const numCPUs = os.cpus().length + + if (numCPUs <= 4) { + return 32 + } + + const concurrency = 16 * numCPUs + return concurrency > 300 ? 300 : concurrency +} diff --git a/packages/artifact/src/internal/shared/interfaces.ts b/packages/artifact/src/internal/shared/interfaces.ts index 7e3e862f..b160d1be 100644 --- a/packages/artifact/src/internal/shared/interfaces.ts +++ b/packages/artifact/src/internal/shared/interfaces.ts @@ -38,6 +38,17 @@ export interface UploadOptions { * input of 0 assumes default retention setting. */ retentionDays?: number + /** + * The level of compression for Zlib to be applied to the artifact archive. + * The value can range from 0 to 9: + * - 0: No compression + * - 1: Best speed + * - 6: Default compression (same as GNU Gzip) + * - 9: Best compression + * Higher levels will result in better compression, but will take longer to complete. + * For large files that are not easily compressed, a value of 0 is recommended for significantly faster uploads. + */ + compressionLevel?: number } /***************************************************************************** diff --git a/packages/artifact/src/internal/upload/blob-upload.ts b/packages/artifact/src/internal/upload/blob-upload.ts index 2bed1f39..cb7a11b7 100644 --- a/packages/artifact/src/internal/upload/blob-upload.ts +++ b/packages/artifact/src/internal/upload/blob-upload.ts @@ -1,7 +1,7 @@ import {BlobClient, BlockBlobUploadStreamOptions} from '@azure/storage-blob' import {TransferProgressEvent} from '@azure/core-http' import {ZipUploadStream} from './zip' -import {getUploadChunkSize} from '../shared/config' +import {getUploadChunkSize, getConcurrency} from '../shared/config' import * as core from '@actions/core' import * as crypto from 'crypto' import * as stream from 'stream' @@ -29,13 +29,13 @@ export async function uploadZipToBlobStorage( ): Promise { let uploadByteCount = 0 - const maxBuffers = 5 + const maxConcurrency = getConcurrency() const bufferSize = getUploadChunkSize() const blobClient = new BlobClient(authenticatedUploadURL) const blockBlobClient = blobClient.getBlockBlobClient() core.debug( - `Uploading artifact zip to blob storage with maxBuffers: ${maxBuffers}, bufferSize: ${bufferSize}` + `Uploading artifact zip to blob storage with maxConcurrency: ${maxConcurrency}, bufferSize: ${bufferSize}` ) const uploadCallback = (progress: TransferProgressEvent): void => { @@ -61,7 +61,7 @@ export async function uploadZipToBlobStorage( await blockBlobClient.uploadStream( uploadStream, bufferSize, - maxBuffers, + maxConcurrency, options ) diff --git a/packages/artifact/src/internal/upload/upload-artifact.ts b/packages/artifact/src/internal/upload/upload-artifact.ts index ced05568..bcc91ec8 100644 --- a/packages/artifact/src/internal/upload/upload-artifact.ts +++ b/packages/artifact/src/internal/upload/upload-artifact.ts @@ -37,7 +37,10 @@ export async function uploadArtifact( } } - const zipUploadStream = await createZipUploadStream(zipSpecification) + const zipUploadStream = await createZipUploadStream( + zipSpecification, + options?.compressionLevel + ) // get the IDs needed for the artifact creation const backendIds = getBackendIdsFromToken() diff --git a/packages/artifact/src/internal/upload/zip.ts b/packages/artifact/src/internal/upload/zip.ts index 81a12343..5a923119 100644 --- a/packages/artifact/src/internal/upload/zip.ts +++ b/packages/artifact/src/internal/upload/zip.ts @@ -5,6 +5,8 @@ import {createReadStream} from 'fs' import {UploadZipSpecification} from './upload-zip-specification' import {getUploadChunkSize} from '../shared/config' +export const DEFAULT_COMPRESSION_LEVEL = 6 + // Custom stream transformer so we can set the highWaterMark property // See https://github.com/nodejs/node/issues/8855 export class ZipUploadStream extends stream.Transform { @@ -21,14 +23,16 @@ export class ZipUploadStream extends stream.Transform { } export async function createZipUploadStream( - uploadSpecification: UploadZipSpecification[] + uploadSpecification: UploadZipSpecification[], + compressionLevel: number = DEFAULT_COMPRESSION_LEVEL ): Promise { + core.debug( + `Creating Artifact archive with compressionLevel: ${compressionLevel}` + ) + const zip = archiver.create('zip', { - zlib: {level: 9} // Sets the compression level. - // Available options are 0-9 - // 0 => no compression - // 1 => fastest with low compression - // 9 => highest compression ratio but the slowest + highWaterMark: getUploadChunkSize(), + zlib: {level: compressionLevel} }) // register callbacks for various events during the zip lifecycle