diff --git a/packages/artifact/src/internal/artifact-client.ts b/packages/artifact/src/internal/artifact-client.ts index bdf88493..730e19d5 100644 --- a/packages/artifact/src/internal/artifact-client.ts +++ b/packages/artifact/src/internal/artifact-client.ts @@ -72,6 +72,10 @@ export class DefaultArtifactClient implements ArtifactClient { rootDirectory: string, options?: UploadOptions | undefined ): Promise { + core.info( + `Starting artifact upload +For more detailed logs during the artifact upload process, enable step-debugging: https://docs.github.com/actions/monitoring-and-troubleshooting-workflows/enabling-debug-logging#enabling-step-debug-logging` + ) checkArtifactName(name) // Get specification for the files being uploaded @@ -103,7 +107,11 @@ export class DefaultArtifactClient implements ArtifactClient { 'No URL provided by the Artifact Service to upload an artifact to' ) } + core.debug(`Upload Resource URL: ${response.fileContainerResourceUrl}`) + core.info( + `Container for artifact "${name}" successfully created. Starting upload of file(s)` + ) // Upload each of the files that were found concurrently const uploadResult = await uploadHttpClient.uploadArtifactToFileContainer( @@ -114,10 +122,27 @@ export class DefaultArtifactClient implements ArtifactClient { // Update the size of the artifact to indicate we are done uploading // The uncompressed size is used for display when downloading a zip of the artifact from the UI + core.info( + `File upload process has finished. Finalizing the artifact upload` + ) await uploadHttpClient.patchArtifactSize(uploadResult.totalSize, name) + if (uploadResult.failedItems.length > 0) { + core.info( + `Upload finished. There were ${uploadResult.failedItems.length} items that failed to upload` + ) + } else { + core.info( + `Artifact has been finalized. All files have been successfully uploaded!` + ) + } + core.info( - `Finished uploading artifact ${name}. Reported size is ${uploadResult.uploadSize} bytes. There were ${uploadResult.failedItems.length} items that failed to upload` + ` +The raw size of all the files that were specified for upload is ${uploadResult.totalSize} bytes +The size of all the files that were uploaded is ${uploadResult.uploadSize} bytes. This takes into account any gzip compression used to reduce the upload size, time and storage + +Note: The size of downloaded zips can differ significantly from the reported size. For more information see: https://github.com/actions/upload-artifact#zipped-artifact-downloads \r\n` ) uploadResponse.artifactItems = uploadSpecification.map( diff --git a/packages/artifact/src/internal/contracts.ts b/packages/artifact/src/internal/contracts.ts index 277813ca..55a391cf 100644 --- a/packages/artifact/src/internal/contracts.ts +++ b/packages/artifact/src/internal/contracts.ts @@ -29,8 +29,20 @@ export interface PatchArtifactSizeSuccessResponse { } export interface UploadResults { + /** + * The size in bytes of data that was transferred during the upload process to the actions backend service. This takes into account possible + * gzip compression to reduce the amount of data that needs to be transferred + */ uploadSize: number + + /** + * The raw size of the files that were specified for upload + */ totalSize: number + + /** + * An array of files that failed to upload + */ failedItems: string[] } diff --git a/packages/artifact/src/internal/upload-http-client.ts b/packages/artifact/src/internal/upload-http-client.ts index c9aa3650..bf3d2b33 100644 --- a/packages/artifact/src/internal/upload-http-client.ts +++ b/packages/artifact/src/internal/upload-http-client.ts @@ -235,19 +235,28 @@ export class UploadHttpClient { // for creating a new GZip file, an in-memory buffer is used for compression // with named pipes the file size is reported as zero in that case don't read the file in memory if (!isFIFO && totalFileSize < 65536) { + core.debug( + `${parameters.file} is less than 64k in size. Creating a gzip file in-memory to potentially reduce the upload size` + ) const buffer = await createGZipFileInBuffer(parameters.file) - //An open stream is needed in the event of a failure and we need to retry. If a NodeJS.ReadableStream is directly passed in, + // An open stream is needed in the event of a failure and we need to retry. If a NodeJS.ReadableStream is directly passed in, // it will not properly get reset to the start of the stream if a chunk upload needs to be retried let openUploadStream: () => NodeJS.ReadableStream if (totalFileSize < buffer.byteLength) { // compression did not help with reducing the size, use a readable stream from the original file for upload + core.debug( + `The gzip file created for ${parameters.file} did not help with reducing the size of the file. The original file will be uploaded as-is` + ) openUploadStream = () => fs.createReadStream(parameters.file) isGzip = false uploadFileSize = totalFileSize } else { // create a readable stream using a PassThrough stream that is both readable and writable + core.debug( + `A gzip file created for ${parameters.file} helped with reducing the size of the original file. The file will be uploaded using gzip.` + ) openUploadStream = () => { const passThrough = new stream.PassThrough() passThrough.end(buffer) @@ -283,6 +292,9 @@ export class UploadHttpClient { // the file that is being uploaded is greater than 64k in size, a temporary file gets created on disk using the // npm tmp-promise package and this file gets used to create a GZipped file const tempFile = await tmp.file() + core.debug( + `${parameters.file} is greater than 64k in size. Creating a gzip file on-disk ${tempFile.path} to potentially reduce the upload size` + ) // create a GZip file of the original file being uploaded, the original file should not be modified in any way uploadFileSize = await createGZipFileOnDisk( @@ -295,9 +307,16 @@ export class UploadHttpClient { // compression did not help with size reduction, use the original file for upload and delete the temp GZip file // for named pipes totalFileSize is zero, this assumes compression did help if (!isFIFO && totalFileSize < uploadFileSize) { + core.debug( + `The gzip file created for ${parameters.file} did not help with reducing the size of the file. The original file will be uploaded as-is` + ) uploadFileSize = totalFileSize uploadFilePath = parameters.file isGzip = false + } else { + core.debug( + `The gzip file created for ${parameters.file} is smaller than the original file. The file will be uploaded using gzip.` + ) } let abortFileUpload = false @@ -355,6 +374,7 @@ export class UploadHttpClient { // Delete the temporary file that was created as part of the upload. If the temp file does not get manually deleted by // calling cleanup, it gets removed when the node process exits. For more info see: https://www.npmjs.com/package/tmp-promise#about + core.debug(`deleting temporary gzip file ${tempFile.path}`) await tempFile.cleanup() return { diff --git a/packages/artifact/src/internal/upload-specification.ts b/packages/artifact/src/internal/upload-specification.ts index e0815357..884a5967 100644 --- a/packages/artifact/src/internal/upload-specification.ts +++ b/packages/artifact/src/internal/upload-specification.ts @@ -1,7 +1,7 @@ import * as fs from 'fs' import {debug} from '@actions/core' import {join, normalize, resolve} from 'path' -import {checkArtifactName, checkArtifactFilePath} from './utils' +import {checkArtifactFilePath} from './utils' export interface UploadSpecification { absoluteFilePath: string @@ -19,8 +19,7 @@ export function getUploadSpecification( rootDirectory: string, artifactFiles: string[] ): UploadSpecification[] { - checkArtifactName(artifactName) - + // artifact name was checked earlier on, no need to check again const specifications: UploadSpecification[] = [] if (!fs.existsSync(rootDirectory)) { diff --git a/packages/artifact/src/internal/utils.ts b/packages/artifact/src/internal/utils.ts index 8b5ac580..8d91444f 100644 --- a/packages/artifact/src/internal/utils.ts +++ b/packages/artifact/src/internal/utils.ts @@ -30,7 +30,7 @@ export function getExponentialRetryTimeInMilliseconds( const maxTime = minTime * getRetryMultiplier() // returns a random number between the minTime (inclusive) and the maxTime (exclusive) - return Math.random() * (maxTime - minTime) + minTime + return Math.trunc(Math.random() * (maxTime - minTime) + minTime) } /** @@ -263,10 +263,16 @@ export function checkArtifactName(name: string): void { for (const invalidChar of invalidArtifactNameCharacters) { if (name.includes(invalidChar)) { throw new Error( - `Artifact name is not valid: ${name}. Contains character: "${invalidChar}". Invalid artifact name characters include: ${invalidArtifactNameCharacters.toString()}.` + `Artifact name is not valid: ${name}. Contains the following character: "${invalidChar}". + +Invalid characters include: ${invalidArtifactNameCharacters.toString()}. + +These characters are not allowed in the artifact name due to limitations with certain file systems such as NTFS. To maintain file system agnostic behavior, these characters are intentionally not allowed to prevent potential problems with downloads on different file systems.` ) } } + + info(`Artifact name is valid!`) } /** @@ -280,7 +286,10 @@ export function checkArtifactFilePath(path: string): void { for (const invalidChar of invalidArtifactFilePathCharacters) { if (path.includes(invalidChar)) { throw new Error( - `Artifact path is not valid: ${path}. Contains character: "${invalidChar}". Invalid characters include: ${invalidArtifactFilePathCharacters.toString()}.` + `Artifact path is not valid: ${path}. Contains the following character: "${invalidChar}". Invalid characters include: ${invalidArtifactFilePathCharacters.toString()}. + + The following characters are not allowed in files that are uploaded due to limitations with certain file systems such as NTFS. To maintain file system agnostic behavior, these characters are intentionally not allowed to prevent potential problems with downloads on different file systems. + ` ) } }