1
0
Fork 0

Updates to logging for artifact uploads (#949)

* More details logs during artifact upload

* extra logging

* Updates to artifact logging + clarifications around upload size

* Fix linting errors

* Update packages/artifact/src/internal/artifact-client.ts

Co-authored-by: campersau <buchholz.bastian@googlemail.com>

Co-authored-by: campersau <buchholz.bastian@googlemail.com>
pull/857/merge
Konrad Pabjan 2021-11-30 12:53:24 -05:00 committed by GitHub
parent e19e4261da
commit 4df5abb3ee
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 73 additions and 8 deletions

View File

@ -72,6 +72,10 @@ export class DefaultArtifactClient implements ArtifactClient {
rootDirectory: string, rootDirectory: string,
options?: UploadOptions | undefined options?: UploadOptions | undefined
): Promise<UploadResponse> { ): Promise<UploadResponse> {
core.info(
`Starting artifact upload
For more detailed logs during the artifact upload process, enable step-debugging: https://docs.github.com/actions/monitoring-and-troubleshooting-workflows/enabling-debug-logging#enabling-step-debug-logging`
)
checkArtifactName(name) checkArtifactName(name)
// Get specification for the files being uploaded // Get specification for the files being uploaded
@ -103,7 +107,11 @@ export class DefaultArtifactClient implements ArtifactClient {
'No URL provided by the Artifact Service to upload an artifact to' 'No URL provided by the Artifact Service to upload an artifact to'
) )
} }
core.debug(`Upload Resource URL: ${response.fileContainerResourceUrl}`) core.debug(`Upload Resource URL: ${response.fileContainerResourceUrl}`)
core.info(
`Container for artifact "${name}" successfully created. Starting upload of file(s)`
)
// Upload each of the files that were found concurrently // Upload each of the files that were found concurrently
const uploadResult = await uploadHttpClient.uploadArtifactToFileContainer( const uploadResult = await uploadHttpClient.uploadArtifactToFileContainer(
@ -114,10 +122,27 @@ export class DefaultArtifactClient implements ArtifactClient {
// Update the size of the artifact to indicate we are done uploading // Update the size of the artifact to indicate we are done uploading
// The uncompressed size is used for display when downloading a zip of the artifact from the UI // The uncompressed size is used for display when downloading a zip of the artifact from the UI
core.info(
`File upload process has finished. Finalizing the artifact upload`
)
await uploadHttpClient.patchArtifactSize(uploadResult.totalSize, name) await uploadHttpClient.patchArtifactSize(uploadResult.totalSize, name)
if (uploadResult.failedItems.length > 0) {
core.info(
`Upload finished. There were ${uploadResult.failedItems.length} items that failed to upload`
)
} else {
core.info(
`Artifact has been finalized. All files have been successfully uploaded!`
)
}
core.info( core.info(
`Finished uploading artifact ${name}. Reported size is ${uploadResult.uploadSize} bytes. There were ${uploadResult.failedItems.length} items that failed to upload` `
The raw size of all the files that were specified for upload is ${uploadResult.totalSize} bytes
The size of all the files that were uploaded is ${uploadResult.uploadSize} bytes. This takes into account any gzip compression used to reduce the upload size, time and storage
Note: The size of downloaded zips can differ significantly from the reported size. For more information see: https://github.com/actions/upload-artifact#zipped-artifact-downloads \r\n`
) )
uploadResponse.artifactItems = uploadSpecification.map( uploadResponse.artifactItems = uploadSpecification.map(

View File

@ -29,8 +29,20 @@ export interface PatchArtifactSizeSuccessResponse {
} }
export interface UploadResults { export interface UploadResults {
/**
* The size in bytes of data that was transferred during the upload process to the actions backend service. This takes into account possible
* gzip compression to reduce the amount of data that needs to be transferred
*/
uploadSize: number uploadSize: number
/**
* The raw size of the files that were specified for upload
*/
totalSize: number totalSize: number
/**
* An array of files that failed to upload
*/
failedItems: string[] failedItems: string[]
} }

View File

@ -235,19 +235,28 @@ export class UploadHttpClient {
// for creating a new GZip file, an in-memory buffer is used for compression // for creating a new GZip file, an in-memory buffer is used for compression
// with named pipes the file size is reported as zero in that case don't read the file in memory // with named pipes the file size is reported as zero in that case don't read the file in memory
if (!isFIFO && totalFileSize < 65536) { if (!isFIFO && totalFileSize < 65536) {
core.debug(
`${parameters.file} is less than 64k in size. Creating a gzip file in-memory to potentially reduce the upload size`
)
const buffer = await createGZipFileInBuffer(parameters.file) const buffer = await createGZipFileInBuffer(parameters.file)
//An open stream is needed in the event of a failure and we need to retry. If a NodeJS.ReadableStream is directly passed in, // An open stream is needed in the event of a failure and we need to retry. If a NodeJS.ReadableStream is directly passed in,
// it will not properly get reset to the start of the stream if a chunk upload needs to be retried // it will not properly get reset to the start of the stream if a chunk upload needs to be retried
let openUploadStream: () => NodeJS.ReadableStream let openUploadStream: () => NodeJS.ReadableStream
if (totalFileSize < buffer.byteLength) { if (totalFileSize < buffer.byteLength) {
// compression did not help with reducing the size, use a readable stream from the original file for upload // compression did not help with reducing the size, use a readable stream from the original file for upload
core.debug(
`The gzip file created for ${parameters.file} did not help with reducing the size of the file. The original file will be uploaded as-is`
)
openUploadStream = () => fs.createReadStream(parameters.file) openUploadStream = () => fs.createReadStream(parameters.file)
isGzip = false isGzip = false
uploadFileSize = totalFileSize uploadFileSize = totalFileSize
} else { } else {
// create a readable stream using a PassThrough stream that is both readable and writable // create a readable stream using a PassThrough stream that is both readable and writable
core.debug(
`A gzip file created for ${parameters.file} helped with reducing the size of the original file. The file will be uploaded using gzip.`
)
openUploadStream = () => { openUploadStream = () => {
const passThrough = new stream.PassThrough() const passThrough = new stream.PassThrough()
passThrough.end(buffer) passThrough.end(buffer)
@ -283,6 +292,9 @@ export class UploadHttpClient {
// the file that is being uploaded is greater than 64k in size, a temporary file gets created on disk using the // the file that is being uploaded is greater than 64k in size, a temporary file gets created on disk using the
// npm tmp-promise package and this file gets used to create a GZipped file // npm tmp-promise package and this file gets used to create a GZipped file
const tempFile = await tmp.file() const tempFile = await tmp.file()
core.debug(
`${parameters.file} is greater than 64k in size. Creating a gzip file on-disk ${tempFile.path} to potentially reduce the upload size`
)
// create a GZip file of the original file being uploaded, the original file should not be modified in any way // create a GZip file of the original file being uploaded, the original file should not be modified in any way
uploadFileSize = await createGZipFileOnDisk( uploadFileSize = await createGZipFileOnDisk(
@ -295,9 +307,16 @@ export class UploadHttpClient {
// compression did not help with size reduction, use the original file for upload and delete the temp GZip file // compression did not help with size reduction, use the original file for upload and delete the temp GZip file
// for named pipes totalFileSize is zero, this assumes compression did help // for named pipes totalFileSize is zero, this assumes compression did help
if (!isFIFO && totalFileSize < uploadFileSize) { if (!isFIFO && totalFileSize < uploadFileSize) {
core.debug(
`The gzip file created for ${parameters.file} did not help with reducing the size of the file. The original file will be uploaded as-is`
)
uploadFileSize = totalFileSize uploadFileSize = totalFileSize
uploadFilePath = parameters.file uploadFilePath = parameters.file
isGzip = false isGzip = false
} else {
core.debug(
`The gzip file created for ${parameters.file} is smaller than the original file. The file will be uploaded using gzip.`
)
} }
let abortFileUpload = false let abortFileUpload = false
@ -355,6 +374,7 @@ export class UploadHttpClient {
// Delete the temporary file that was created as part of the upload. If the temp file does not get manually deleted by // Delete the temporary file that was created as part of the upload. If the temp file does not get manually deleted by
// calling cleanup, it gets removed when the node process exits. For more info see: https://www.npmjs.com/package/tmp-promise#about // calling cleanup, it gets removed when the node process exits. For more info see: https://www.npmjs.com/package/tmp-promise#about
core.debug(`deleting temporary gzip file ${tempFile.path}`)
await tempFile.cleanup() await tempFile.cleanup()
return { return {

View File

@ -1,7 +1,7 @@
import * as fs from 'fs' import * as fs from 'fs'
import {debug} from '@actions/core' import {debug} from '@actions/core'
import {join, normalize, resolve} from 'path' import {join, normalize, resolve} from 'path'
import {checkArtifactName, checkArtifactFilePath} from './utils' import {checkArtifactFilePath} from './utils'
export interface UploadSpecification { export interface UploadSpecification {
absoluteFilePath: string absoluteFilePath: string
@ -19,8 +19,7 @@ export function getUploadSpecification(
rootDirectory: string, rootDirectory: string,
artifactFiles: string[] artifactFiles: string[]
): UploadSpecification[] { ): UploadSpecification[] {
checkArtifactName(artifactName) // artifact name was checked earlier on, no need to check again
const specifications: UploadSpecification[] = [] const specifications: UploadSpecification[] = []
if (!fs.existsSync(rootDirectory)) { if (!fs.existsSync(rootDirectory)) {

View File

@ -30,7 +30,7 @@ export function getExponentialRetryTimeInMilliseconds(
const maxTime = minTime * getRetryMultiplier() const maxTime = minTime * getRetryMultiplier()
// returns a random number between the minTime (inclusive) and the maxTime (exclusive) // returns a random number between the minTime (inclusive) and the maxTime (exclusive)
return Math.random() * (maxTime - minTime) + minTime return Math.trunc(Math.random() * (maxTime - minTime) + minTime)
} }
/** /**
@ -263,10 +263,16 @@ export function checkArtifactName(name: string): void {
for (const invalidChar of invalidArtifactNameCharacters) { for (const invalidChar of invalidArtifactNameCharacters) {
if (name.includes(invalidChar)) { if (name.includes(invalidChar)) {
throw new Error( throw new Error(
`Artifact name is not valid: ${name}. Contains character: "${invalidChar}". Invalid artifact name characters include: ${invalidArtifactNameCharacters.toString()}.` `Artifact name is not valid: ${name}. Contains the following character: "${invalidChar}".
Invalid characters include: ${invalidArtifactNameCharacters.toString()}.
These characters are not allowed in the artifact name due to limitations with certain file systems such as NTFS. To maintain file system agnostic behavior, these characters are intentionally not allowed to prevent potential problems with downloads on different file systems.`
) )
} }
} }
info(`Artifact name is valid!`)
} }
/** /**
@ -280,7 +286,10 @@ export function checkArtifactFilePath(path: string): void {
for (const invalidChar of invalidArtifactFilePathCharacters) { for (const invalidChar of invalidArtifactFilePathCharacters) {
if (path.includes(invalidChar)) { if (path.includes(invalidChar)) {
throw new Error( throw new Error(
`Artifact path is not valid: ${path}. Contains character: "${invalidChar}". Invalid characters include: ${invalidArtifactFilePathCharacters.toString()}.` `Artifact path is not valid: ${path}. Contains the following character: "${invalidChar}". Invalid characters include: ${invalidArtifactFilePathCharacters.toString()}.
The following characters are not allowed in files that are uploaded due to limitations with certain file systems such as NTFS. To maintain file system agnostic behavior, these characters are intentionally not allowed to prevent potential problems with downloads on different file systems.
`
) )
} }
} }