diff --git a/packages/artifact/__tests__/download.test.ts b/packages/artifact/__tests__/download.test.ts index 3da3cad4..40ab58cb 100644 --- a/packages/artifact/__tests__/download.test.ts +++ b/packages/artifact/__tests__/download.test.ts @@ -4,6 +4,8 @@ import * as io from '../../io/src/io' import * as net from 'net' import * as path from 'path' import * as configVariables from '../src/internal/config-variables' +import {promises as fs} from 'fs' +import {DownloadItem} from '../src/internal/download-specification' import {HttpClient, HttpClientResponse} from '@actions/http-client' import {DownloadHttpClient} from '../src/internal/download-http-client' import { @@ -11,7 +13,7 @@ import { QueryArtifactResponse } from '../src/internal/contracts' -const root = path.join(__dirname, '_temp', 'artifact-download') +const root = path.join(__dirname, '_temp', 'artifact-download-tests') jest.mock('../src/internal/config-variables') jest.mock('@actions/http-client') @@ -19,12 +21,16 @@ jest.mock('@actions/http-client') describe('Download Tests', () => { beforeAll(async () => { await io.rmRF(root) + await fs.mkdir(path.join(root), { + recursive: true + }) // mock all output so that there is less noise when running tests jest.spyOn(console, 'log').mockImplementation(() => {}) jest.spyOn(core, 'debug').mockImplementation(() => {}) jest.spyOn(core, 'info').mockImplementation(() => {}) jest.spyOn(core, 'warning').mockImplementation(() => {}) + jest.spyOn(core, 'error').mockImplementation(() => {}) }) /** @@ -107,6 +113,56 @@ describe('Download Tests', () => { ) }) + it('Test downloading an individual artifact with gzip', async () => { + setupDownloadItemResponse(true, 200) + const downloadHttpClient = new DownloadHttpClient() + + const items: DownloadItem[] = [] + items.push({ + sourceLocation: `${configVariables.getRuntimeUrl()}_apis/resources/Containers/13?itemPath=my-artifact%2FFileA.txt`, + targetPath: path.join(root, 'FileA.txt') + }) + + await expect( + downloadHttpClient.downloadSingleArtifact(items) + ).resolves.not.toThrow() + }) + + it('Test downloading an individual artifact without gzip', async () => { + setupDownloadItemResponse(false, 200) + const downloadHttpClient = new DownloadHttpClient() + + const items: DownloadItem[] = [] + items.push({ + sourceLocation: `${configVariables.getRuntimeUrl()}_apis/resources/Containers/13?itemPath=my-artifact%2FFileB.txt`, + targetPath: path.join(root, 'FileB.txt') + }) + + await expect( + downloadHttpClient.downloadSingleArtifact(items) + ).resolves.not.toThrow() + }) + + it('Test retryable status codes during artifact download', async () => { + // The first http response should return a retryable status call while the subsequent call should return a 200 so + // the download should successfully finish + const retryableStatusCodes = [429, 502, 503, 504] + for (const statusCode of retryableStatusCodes) { + setupDownloadItemResponse(false, statusCode) + const downloadHttpClient = new DownloadHttpClient() + + const items: DownloadItem[] = [] + items.push({ + sourceLocation: `${configVariables.getRuntimeUrl()}_apis/resources/Containers/13?itemPath=my-artifact%2FFileC.txt`, + targetPath: path.join(root, 'FileC.txt') + }) + + await expect( + downloadHttpClient.downloadSingleArtifact(items) + ).resolves.not.toThrow() + } + }) + /** * Helper used to setup mocking for the HttpClient */ @@ -164,6 +220,60 @@ describe('Download Tests', () => { }) } + /** + * Setups up HTTP GET response for downloading items + * @param isGzip is the downloaded item gzip encoded + * @param firstHttpResponseCode the http response code that should be returned + */ + function setupDownloadItemResponse( + isGzip: boolean, + firstHttpResponseCode: number + ): void { + jest + .spyOn(DownloadHttpClient.prototype, 'pipeResponseToFile') + .mockImplementationOnce(async () => { + return new Promise(resolve => { + resolve() + }) + }) + + jest + .spyOn(HttpClient.prototype, 'get') + .mockImplementationOnce(async () => { + const mockMessage = new http.IncomingMessage(new net.Socket()) + mockMessage.statusCode = firstHttpResponseCode + if (isGzip) { + mockMessage.headers = { + 'content-type': 'gzip' + } + } + + return new Promise(resolve => { + resolve({ + message: mockMessage, + readBody: emptyMockReadBody + }) + }) + }) + .mockImplementationOnce(async () => { + // chained response, if the HTTP GET function gets called again, return a successful response + const mockMessage = new http.IncomingMessage(new net.Socket()) + mockMessage.statusCode = 200 + if (isGzip) { + mockMessage.headers = { + 'content-type': 'gzip' + } + } + + return new Promise(resolve => { + resolve({ + message: mockMessage, + readBody: emptyMockReadBody + }) + }) + }) + } + /** * Setups up HTTP GET response when querying for container items */ diff --git a/packages/artifact/__tests__/upload.test.ts b/packages/artifact/__tests__/upload.test.ts index 502c20ea..be1b3bf2 100644 --- a/packages/artifact/__tests__/upload.test.ts +++ b/packages/artifact/__tests__/upload.test.ts @@ -12,6 +12,7 @@ import { PatchArtifactSizeSuccessResponse } from '../src/internal/contracts' import {UploadSpecification} from '../src/internal/upload-specification' +import {getArtifactUrl} from '../src/internal/utils' const root = path.join(__dirname, '_temp', 'artifact-upload') const file1Path = path.join(root, 'file1.txt') @@ -36,6 +37,7 @@ describe('Upload Tests', () => { jest.spyOn(core, 'debug').mockImplementation(() => {}) jest.spyOn(core, 'info').mockImplementation(() => {}) jest.spyOn(core, 'warning').mockImplementation(() => {}) + jest.spyOn(core, 'error').mockImplementation(() => {}) // setup mocking for calls that got through the HttpClient setupHttpClientMock() @@ -99,7 +101,7 @@ describe('Upload Tests', () => { uploadHttpClient.createArtifactInFileContainer(artifactName) ).rejects.toEqual( new Error( - 'Unable to create a container for the artifact invalid-artifact-name' + `Unable to create a container for the artifact invalid-artifact-name at ${getArtifactUrl()}` ) ) }) diff --git a/packages/artifact/__tests__/util.test.ts b/packages/artifact/__tests__/util.test.ts index c1fcca40..1ca963eb 100644 --- a/packages/artifact/__tests__/util.test.ts +++ b/packages/artifact/__tests__/util.test.ts @@ -4,7 +4,12 @@ import * as path from 'path' import * as utils from '../src/internal/utils' import * as core from '@actions/core' import {HttpCodes} from '@actions/http-client' -import {getRuntimeUrl, getWorkFlowRunId} from '../src/internal/config-variables' +import { + getRuntimeUrl, + getWorkFlowRunId, + getInitialRetryIntervalInMilliseconds, + getRetryMultiplier +} from '../src/internal/config-variables' jest.mock('../src/internal/config-variables') @@ -17,6 +22,30 @@ describe('Utils', () => { jest.spyOn(core, 'warning').mockImplementation(() => {}) }) + it('Check exponential retry range', () => { + // No retries should return the initial retry interval + const retryWaitTime0 = utils.getExponentialRetryTimeInMilliseconds(0) + expect(retryWaitTime0).toEqual(getInitialRetryIntervalInMilliseconds()) + + const testMinMaxRange = (retryCount: number): void => { + const retryWaitTime = utils.getExponentialRetryTimeInMilliseconds( + retryCount + ) + const minRange = + getInitialRetryIntervalInMilliseconds() * + getRetryMultiplier() * + retryCount + const maxRange = minRange * getRetryMultiplier() + + expect(retryWaitTime).toBeGreaterThanOrEqual(minRange) + expect(retryWaitTime).toBeLessThan(maxRange) + } + + for (let i = 1; i < 10; i++) { + testMinMaxRange(i) + } + }) + it('Check Artifact Name for any invalid characters', () => { const invalidNames = [ 'my\\artifact', @@ -88,13 +117,13 @@ describe('Utils', () => { ) }) - it('Test constructing headers with all optional parameters', () => { - const type = 'application/json' + it('Test constructing upload headers with all optional parameters', () => { + const contentType = 'application/octet-stream' const size = 24 const uncompressedLength = 100 const range = 'bytes 0-199/200' - const options = utils.getRequestOptions( - type, + const options = utils.getUploadRequestOptions( + contentType, true, true, uncompressedLength, @@ -103,9 +132,9 @@ describe('Utils', () => { ) expect(Object.keys(options).length).toEqual(8) expect(options['Accept']).toEqual( - `${type};api-version=${utils.getApiVersion()}` + `application/json;api-version=${utils.getApiVersion()}` ) - expect(options['Content-Type']).toEqual(type) + expect(options['Content-Type']).toEqual(contentType) expect(options['Connection']).toEqual('Keep-Alive') expect(options['Keep-Alive']).toEqual('10') expect(options['Content-Encoding']).toEqual('gzip') @@ -114,9 +143,33 @@ describe('Utils', () => { expect(options['Content-Range']).toEqual(range) }) - it('Test constructing headers with only required parameter', () => { - const options = utils.getRequestOptions() - expect(Object.keys(options).length).toEqual(1) + it('Test constructing upload headers with only required parameter', () => { + const options = utils.getUploadRequestOptions('application/octet-stream') + expect(Object.keys(options).length).toEqual(2) + expect(options['Accept']).toEqual( + `application/json;api-version=${utils.getApiVersion()}` + ) + expect(options['Content-Type']).toEqual('application/octet-stream') + }) + + it('Test constructing download headers with all optional parameters', () => { + const contentType = 'application/json' + const options = utils.getDownloadRequestOptions(contentType, true, true) + expect(Object.keys(options).length).toEqual(5) + expect(options['Content-Type']).toEqual(contentType) + expect(options['Connection']).toEqual('Keep-Alive') + expect(options['Keep-Alive']).toEqual('10') + expect(options['Accept-Encoding']).toEqual('gzip') + expect(options['Accept']).toEqual( + `application/octet-stream;api-version=${utils.getApiVersion()}` + ) + }) + + it('Test constructing download headers with only required parameter', () => { + const options = utils.getDownloadRequestOptions('application/octet-stream') + expect(Object.keys(options).length).toEqual(2) + expect(options['Content-Type']).toEqual('application/octet-stream') + // check for default accept type expect(options['Accept']).toEqual( `application/json;api-version=${utils.getApiVersion()}` ) @@ -137,11 +190,23 @@ describe('Utils', () => { true ) expect(utils.isRetryableStatusCode(HttpCodes.GatewayTimeout)).toEqual(true) + expect(utils.isRetryableStatusCode(429)).toEqual(true) expect(utils.isRetryableStatusCode(HttpCodes.OK)).toEqual(false) expect(utils.isRetryableStatusCode(HttpCodes.NotFound)).toEqual(false) expect(utils.isRetryableStatusCode(HttpCodes.Forbidden)).toEqual(false) }) + it('Test Throttled Status Code', () => { + expect(utils.isThrottledStatusCode(429)).toEqual(true) + expect(utils.isThrottledStatusCode(HttpCodes.InternalServerError)).toEqual( + false + ) + expect(utils.isThrottledStatusCode(HttpCodes.BadGateway)).toEqual(false) + expect(utils.isThrottledStatusCode(HttpCodes.ServiceUnavailable)).toEqual( + false + ) + }) + it('Test Creating Artifact Directories', async () => { const root = path.join(__dirname, '_temp', 'artifact-download') // remove directory before starting diff --git a/packages/artifact/docs/implementation-details.md b/packages/artifact/docs/implementation-details.md index d2f84646..3bf09e5c 100644 --- a/packages/artifact/docs/implementation-details.md +++ b/packages/artifact/docs/implementation-details.md @@ -6,9 +6,13 @@ Warning: Implementation details may change at any time without notice. This is m ![image](https://user-images.githubusercontent.com/16109154/77190819-38685d80-6ada-11ea-8281-4703ff8cc025.png) +## Retry Logic when downloading an individual file + +![image](https://user-images.githubusercontent.com/16109154/78555461-5be71400-780d-11ea-9abd-b05b77a95a3f.png) + ## Proxy support -This package uses the `@actions/http-client` NPM package internally which supports proxied requests out of the box. +This package uses the `@actions/http-client` NPM package internally which supports proxied requests out of the box. ## HttpManager diff --git a/packages/artifact/package-lock.json b/packages/artifact/package-lock.json index 51e30cd1..9953a518 100644 --- a/packages/artifact/package-lock.json +++ b/packages/artifact/package-lock.json @@ -5,9 +5,9 @@ "requires": true, "dependencies": { "@actions/http-client": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-1.0.6.tgz", - "integrity": "sha512-LGmio4w98UyGX33b/W6V6Nx/sQHRXZ859YlMkn36wPsXPB82u8xTVlA/Dq2DXrm6lEq9RVmisRJa1c+HETAIJA==", + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-1.0.7.tgz", + "integrity": "sha512-PY3ys/XH5WMekkHyZhYSa/scYvlE5T/TV/T++vABHuY5ZRgtiBZkn2L2tV5Pv/xDCl59lSZb9WwRuWExDyAsSg==", "requires": { "tunnel": "0.0.6" } diff --git a/packages/artifact/package.json b/packages/artifact/package.json index 1eed412c..a940a88c 100644 --- a/packages/artifact/package.json +++ b/packages/artifact/package.json @@ -37,7 +37,7 @@ }, "dependencies": { "@actions/core": "^1.2.1", - "@actions/http-client": "^1.0.6", + "@actions/http-client": "^1.0.7", "@types/tmp": "^0.1.0", "tmp": "^0.1.0", "tmp-promise": "^2.0.2" diff --git a/packages/artifact/src/internal/__mocks__/config-variables.ts b/packages/artifact/src/internal/__mocks__/config-variables.ts index 46efafdf..af7f7636 100644 --- a/packages/artifact/src/internal/__mocks__/config-variables.ts +++ b/packages/artifact/src/internal/__mocks__/config-variables.ts @@ -13,12 +13,16 @@ export function getUploadChunkSize(): number { return 4 * 1024 * 1024 // 4 MB Chunks } -export function getUploadRetryCount(): number { - return 1 +export function getRetryLimit(): number { + return 2 } -export function getRetryWaitTimeInMilliseconds(): number { - return 1 +export function getRetryMultiplier(): number { + return 1.5 +} + +export function getInitialRetryIntervalInMilliseconds(): number { + return 10 } export function getDownloadFileConcurrency(): number { diff --git a/packages/artifact/src/internal/artifact-client.ts b/packages/artifact/src/internal/artifact-client.ts index 0f90383d..afbf9e8a 100644 --- a/packages/artifact/src/internal/artifact-client.ts +++ b/packages/artifact/src/internal/artifact-client.ts @@ -173,6 +173,7 @@ export class DefaultArtifactClient implements ArtifactClient { await createDirectoriesForArtifact( downloadSpecification.directoryStructure ) + core.info('Directory structure has been setup for the artifact') await downloadHttpClient.downloadSingleArtifact( downloadSpecification.filesToDownload ) diff --git a/packages/artifact/src/internal/config-variables.ts b/packages/artifact/src/internal/config-variables.ts index baffffcd..5cdfd2d9 100644 --- a/packages/artifact/src/internal/config-variables.ts +++ b/packages/artifact/src/internal/config-variables.ts @@ -1,19 +1,31 @@ +// The number of concurrent uploads that happens at the same time export function getUploadFileConcurrency(): number { return 2 } +// When uploading large files that can't be uploaded with a single http call, this controls +// the chunk size that is used during upload export function getUploadChunkSize(): number { return 4 * 1024 * 1024 // 4 MB Chunks } -export function getUploadRetryCount(): number { - return 3 +// The maximum number of retries that can be attempted before an upload or download fails +export function getRetryLimit(): number { + return 5 } -export function getRetryWaitTimeInMilliseconds(): number { - return 10000 +// With exponential backoff, the larger the retry count, the larger the wait time before another attempt +// The retry multiplier controls by how much the backOff time increases depending on the number of retries +export function getRetryMultiplier(): number { + return 1.5 } +// The initial wait time if an upload or download fails and a retry is being attempted for the first time +export function getInitialRetryIntervalInMilliseconds(): number { + return 3000 +} + +// The number of concurrent downloads that happens at the same time export function getDownloadFileConcurrency(): number { return 2 } diff --git a/packages/artifact/src/internal/download-http-client.ts b/packages/artifact/src/internal/download-http-client.ts index dadfe5af..8d139806 100644 --- a/packages/artifact/src/internal/download-http-client.ts +++ b/packages/artifact/src/internal/download-http-client.ts @@ -1,30 +1,35 @@ import * as fs from 'fs' +import * as core from '@actions/core' import * as zlib from 'zlib' import { getArtifactUrl, - getRequestOptions, + getDownloadRequestOptions, isSuccessStatusCode, isRetryableStatusCode, - createHttpClient + isThrottledStatusCode, + getExponentialRetryTimeInMilliseconds, + tryGetRetryAfterValueTimeInMilliseconds, + displayHttpDiagnostics } from './utils' import {URL} from 'url' +import {StatusReporter} from './status-reporter' +import {performance} from 'perf_hooks' import {ListArtifactsResponse, QueryArtifactResponse} from './contracts' import {IHttpClientResponse} from '@actions/http-client/interfaces' import {HttpManager} from './http-manager' import {DownloadItem} from './download-specification' -import { - getDownloadFileConcurrency, - getRetryWaitTimeInMilliseconds -} from './config-variables' -import {warning} from '@actions/core' +import {getDownloadFileConcurrency, getRetryLimit} from './config-variables' import {IncomingHttpHeaders} from 'http' export class DownloadHttpClient { - // http manager is used for concurrent connection when downloading mulitple files at once + // http manager is used for concurrent connections when downloading multiple files at once private downloadHttpManager: HttpManager + private statusReporter: StatusReporter constructor() { this.downloadHttpManager = new HttpManager(getDownloadFileConcurrency()) + // downloads are usually significantly faster than uploads so display status information every second + this.statusReporter = new StatusReporter(1000) } /** @@ -32,18 +37,20 @@ export class DownloadHttpClient { */ async listArtifacts(): Promise { const artifactUrl = getArtifactUrl() - // use the first client from the httpManager, `keep-alive` is not used so the connection will close immediatly - const client = this.downloadHttpManager.getClient(0) - const requestOptions = getRequestOptions('application/json') - const rawResponse = await client.get(artifactUrl, requestOptions) - const body: string = await rawResponse.readBody() - if (isSuccessStatusCode(rawResponse.message.statusCode) && body) { + // use the first client from the httpManager, `keep-alive` is not used so the connection will close immediately + const client = this.downloadHttpManager.getClient(0) + const requestOptions = getDownloadRequestOptions('application/json') + const response = await client.get(artifactUrl, requestOptions) + const body: string = await response.readBody() + + if (isSuccessStatusCode(response.message.statusCode) && body) { return JSON.parse(body) } - // eslint-disable-next-line no-console - console.log(rawResponse) - throw new Error(`Unable to list artifacts for the run`) + displayHttpDiagnostics(response) + throw new Error( + `Unable to list artifacts for the run. Resource Url ${artifactUrl}` + ) } /** @@ -59,18 +66,16 @@ export class DownloadHttpClient { const resourceUrl = new URL(containerUrl) resourceUrl.searchParams.append('itemPath', artifactName) - // no concurrent calls so a single httpClient without the http-manager is sufficient - const client = createHttpClient() + // use the first client from the httpManager, `keep-alive` is not used so the connection will close immediately + const client = this.downloadHttpManager.getClient(0) + const requestOptions = getDownloadRequestOptions('application/json') + const response = await client.get(resourceUrl.toString(), requestOptions) + const body: string = await response.readBody() - // no keep-alive header, client disposal is not necessary - const requestOptions = getRequestOptions('application/json') - const rawResponse = await client.get(resourceUrl.toString(), requestOptions) - const body: string = await rawResponse.readBody() - if (isSuccessStatusCode(rawResponse.message.statusCode) && body) { + if (isSuccessStatusCode(response.message.statusCode) && body) { return JSON.parse(body) } - // eslint-disable-next-line no-console - console.log(rawResponse) + displayHttpDiagnostics(response) throw new Error(`Unable to get ContainersItems from ${resourceUrl}`) } @@ -81,24 +86,53 @@ export class DownloadHttpClient { async downloadSingleArtifact(downloadItems: DownloadItem[]): Promise { const DOWNLOAD_CONCURRENCY = getDownloadFileConcurrency() // limit the number of files downloaded at a single time + core.debug(`Download file concurrency is set to ${DOWNLOAD_CONCURRENCY}`) const parallelDownloads = [...new Array(DOWNLOAD_CONCURRENCY).keys()] + let currentFile = 0 let downloadedFiles = 0 + + core.info( + `Total number of files that will be downloaded: ${downloadItems.length}` + ) + + this.statusReporter.setTotalNumberOfFilesToProcess(downloadItems.length) + this.statusReporter.start() + await Promise.all( parallelDownloads.map(async index => { - while (downloadedFiles < downloadItems.length) { - const currentFileToDownload = downloadItems[downloadedFiles] - downloadedFiles += 1 + while (currentFile < downloadItems.length) { + const currentFileToDownload = downloadItems[currentFile] + currentFile += 1 + + const startTime = performance.now() await this.downloadIndividualFile( index, currentFileToDownload.sourceLocation, currentFileToDownload.targetPath ) + + if (core.isDebug()) { + core.debug( + `File: ${++downloadedFiles}/${downloadItems.length}. ${ + currentFileToDownload.targetPath + } took ${(performance.now() - startTime).toFixed( + 3 + )} milliseconds to finish downloading` + ) + } + + this.statusReporter.incrementProcessedCount() } }) ) - - // done downloading, safety dispose all connections - this.downloadHttpManager.disposeAndReplaceAllClients() + .catch(error => { + throw new Error(`Unable to download the artifact: ${error}`) + }) + .finally(() => { + this.statusReporter.stop() + // safety dispose all connections + this.downloadHttpManager.disposeAndReplaceAllClients() + }) } /** @@ -112,10 +146,20 @@ export class DownloadHttpClient { artifactLocation: string, downloadPath: string ): Promise { - const stream = fs.createWriteStream(downloadPath) - const client = this.downloadHttpManager.getClient(httpClientIndex) - const requestOptions = getRequestOptions('application/octet-stream', true) - const response = await client.get(artifactLocation, requestOptions) + let retryCount = 0 + const retryLimit = getRetryLimit() + const destinationStream = fs.createWriteStream(downloadPath) + const requestOptions = getDownloadRequestOptions( + 'application/json', + true, + true + ) + + // a single GET request is used to download a file + const makeDownloadRequest = async (): Promise => { + const client = this.downloadHttpManager.getClient(httpClientIndex) + return await client.get(artifactLocation, requestOptions) + } // check the response headers to determine if the file was compressed using gzip const isGzip = (headers: IncomingHttpHeaders): boolean => { @@ -124,66 +168,126 @@ export class DownloadHttpClient { ) } - if (isSuccessStatusCode(response.message.statusCode)) { - await this.pipeResponseToStream( - response, - stream, - isGzip(response.message.headers) - ) - } else if (isRetryableStatusCode(response.message.statusCode)) { - warning( - `Received http ${response.message.statusCode} during file download, will retry ${artifactLocation} after 10 seconds` - ) - // if an error is encountered, dispose of the http connection, and create a new one - this.downloadHttpManager.disposeAndReplaceClient(httpClientIndex) - await new Promise(resolve => - setTimeout(resolve, getRetryWaitTimeInMilliseconds()) - ) - const retryResponse = await client.get(artifactLocation) - if (isSuccessStatusCode(retryResponse.message.statusCode)) { - await this.pipeResponseToStream( - response, - stream, - isGzip(response.message.headers) + // Increments the current retry count and then checks if the retry limit has been reached + // If there have been too many retries, fail so the download stops. If there is a retryAfterValue value provided, + // it will be used + const backOff = async (retryAfterValue?: number): Promise => { + retryCount++ + if (retryCount > retryLimit) { + return Promise.reject( + new Error( + `Retry limit has been reached. Unable to download ${artifactLocation}` + ) ) } else { - // eslint-disable-next-line no-console - console.log(retryResponse) - throw new Error(`Unable to download ${artifactLocation}`) + this.downloadHttpManager.disposeAndReplaceClient(httpClientIndex) + if (retryAfterValue) { + // Back off by waiting the specified time denoted by the retry-after header + core.info( + `Backoff due to too many requests, retry #${retryCount}. Waiting for ${retryAfterValue} milliseconds before continuing the download` + ) + await new Promise(resolve => setTimeout(resolve, retryAfterValue)) + } else { + // Back off using an exponential value that depends on the retry count + const backoffTime = getExponentialRetryTimeInMilliseconds(retryCount) + core.info( + `Exponential backoff for retry #${retryCount}. Waiting for ${backoffTime} milliseconds before continuing the download` + ) + await new Promise(resolve => setTimeout(resolve, backoffTime)) + } + core.info( + `Finished backoff for retry #${retryCount}, continuing with download` + ) + } + } + + // keep trying to download a file until a retry limit has been reached + while (retryCount <= retryLimit) { + let response: IHttpClientResponse + try { + response = await makeDownloadRequest() + } catch (error) { + // if an error is caught, it is usually indicative of a timeout so retry the download + core.info('An error occurred while attempting to download a file') + // eslint-disable-next-line no-console + console.log(error) + + // increment the retryCount and use exponential backoff to wait before making the next request + await backOff() + continue + } + + if (isSuccessStatusCode(response.message.statusCode)) { + // The body contains the contents of the file however calling response.readBody() causes all the content to be converted to a string + // which can cause some gzip encoded data to be lost + // Instead of using response.readBody(), response.message is a readableStream that can be directly used to get the raw body contents + return this.pipeResponseToFile( + response, + destinationStream, + isGzip(response.message.headers) + ) + } else if (isRetryableStatusCode(response.message.statusCode)) { + core.info( + `A ${response.message.statusCode} response code has been received while attempting to download an artifact` + ) + // if a throttled status code is received, try to get the retryAfter header value, else differ to standard exponential backoff + isThrottledStatusCode(response.message.statusCode) + ? await backOff( + tryGetRetryAfterValueTimeInMilliseconds(response.message.headers) + ) + : await backOff() + } else { + // Some unexpected response code, fail immediately and stop the download + displayHttpDiagnostics(response) + return Promise.reject( + new Error( + `Unexpected http ${response.message.statusCode} during download for ${artifactLocation}` + ) + ) } - } else { - // eslint-disable-next-line no-console - console.log(response) - throw new Error(`Unable to download ${artifactLocation}`) } } /** - * Pipes the response from downloading an individual file to the appropriate stream - * @param response the http response recieved when downloading a file - * @param stream the stream where the file should be written to - * @param isGzip does the response need to be be uncompressed + * Pipes the response from downloading an individual file to the appropriate destination stream while decoding gzip content if necessary + * @param response the http response received when downloading a file + * @param destinationStream the stream where the file should be written to + * @param isGzip a boolean denoting if the content is compressed using gzip and if we need to decode it */ - private async pipeResponseToStream( + async pipeResponseToFile( response: IHttpClientResponse, - stream: NodeJS.WritableStream, + destinationStream: fs.WriteStream, isGzip: boolean ): Promise { - return new Promise(resolve => { + await new Promise((resolve, reject) => { if (isGzip) { - // pipe the response into gunzip to decompress const gunzip = zlib.createGunzip() response.message .pipe(gunzip) - .pipe(stream) + .pipe(destinationStream) .on('close', () => { resolve() }) + .on('error', error => { + core.error( + `An error has been encountered while decompressing and writing a downloaded file to ${destinationStream.path}` + ) + reject(error) + }) } else { - response.message.pipe(stream).on('close', () => { - resolve() - }) + response.message + .pipe(destinationStream) + .on('close', () => { + resolve() + }) + .on('error', error => { + core.error( + `An error has been encountered while writing a downloaded file to ${destinationStream.path}` + ) + reject(error) + }) } }) + return } } diff --git a/packages/artifact/src/internal/status-reporter.ts b/packages/artifact/src/internal/status-reporter.ts new file mode 100644 index 00000000..681dcfc9 --- /dev/null +++ b/packages/artifact/src/internal/status-reporter.ts @@ -0,0 +1,92 @@ +import {info} from '@actions/core' + +/** + * Status Reporter that displays information about the progress/status of an artifact that is being uploaded or downloaded + * + * Variable display time that can be adjusted using the displayFrequencyInMilliseconds variable + * The total status of the upload/download gets displayed according to this value + * If there is a large file that is being uploaded, extra information about the individual status can also be displayed using the updateLargeFileStatus function + */ + +export class StatusReporter { + private totalNumberOfFilesToProcess = 0 + private processedCount = 0 + private displayFrequencyInMilliseconds: number + private largeFiles = new Map() + private totalFileStatus: NodeJS.Timeout | undefined + private largeFileStatus: NodeJS.Timeout | undefined + + constructor(displayFrequencyInMilliseconds: number) { + this.totalFileStatus = undefined + this.largeFileStatus = undefined + this.displayFrequencyInMilliseconds = displayFrequencyInMilliseconds + } + + setTotalNumberOfFilesToProcess(fileTotal: number): void { + this.totalNumberOfFilesToProcess = fileTotal + } + + start(): void { + // displays information about the total upload/download status + this.totalFileStatus = setInterval(() => { + // display 1 decimal place without any rounding + const percentage = this.formatPercentage( + this.processedCount, + this.totalNumberOfFilesToProcess + ) + info( + `Total file count: ${ + this.totalNumberOfFilesToProcess + } ---- Processed file #${this.processedCount} (${percentage.slice( + 0, + percentage.indexOf('.') + 2 + )}%)` + ) + }, this.displayFrequencyInMilliseconds) + + // displays extra information about any large files that take a significant amount of time to upload or download every 1 second + this.largeFileStatus = setInterval(() => { + for (const value of Array.from(this.largeFiles.values())) { + info(value) + } + // delete all entires in the map after displaying the information so it will not be displayed again unless explicitly added + this.largeFiles.clear() + }, 1000) + } + + // if there is a large file that is being uploaded in chunks, this is used to display extra information about the status of the upload + updateLargeFileStatus( + fileName: string, + numerator: number, + denominator: number + ): void { + // display 1 decimal place without any rounding + const percentage = this.formatPercentage(numerator, denominator) + const displayInformation = `Uploading ${fileName} (${percentage.slice( + 0, + percentage.indexOf('.') + 2 + )}%)` + + // any previously added display information should be overwritten for the specific large file because a map is being used + this.largeFiles.set(fileName, displayInformation) + } + + stop(): void { + if (this.totalFileStatus) { + clearInterval(this.totalFileStatus) + } + + if (this.largeFileStatus) { + clearInterval(this.largeFileStatus) + } + } + + incrementProcessedCount(): void { + this.processedCount++ + } + + private formatPercentage(numerator: number, denominator: number): string { + // toFixed() rounds, so use extra precision to display accurate information even though 4 decimal places are not displayed + return ((numerator / denominator) * 100).toFixed(4).toString() + } +} diff --git a/packages/artifact/src/internal/upload-gzip.ts b/packages/artifact/src/internal/upload-gzip.ts index 58525765..6d4529be 100644 --- a/packages/artifact/src/internal/upload-gzip.ts +++ b/packages/artifact/src/internal/upload-gzip.ts @@ -43,7 +43,7 @@ export async function createGZipFileInBuffer( const inputStream = fs.createReadStream(originalFilePath) const gzip = zlib.createGzip() inputStream.pipe(gzip) - // read stream into buffer, using experimental async itterators see https://github.com/nodejs/readable-stream/issues/403#issuecomment-479069043 + // read stream into buffer, using experimental async iterators see https://github.com/nodejs/readable-stream/issues/403#issuecomment-479069043 const chunks = [] for await (const chunk of gzip) { chunks.push(chunk) diff --git a/packages/artifact/src/internal/upload-http-client.ts b/packages/artifact/src/internal/upload-http-client.ts index e3432701..4f7676c5 100644 --- a/packages/artifact/src/internal/upload-http-client.ts +++ b/packages/artifact/src/internal/upload-http-client.ts @@ -1,4 +1,5 @@ import * as fs from 'fs' +import * as core from '@actions/core' import * as tmp from 'tmp-promise' import * as stream from 'stream' import { @@ -10,21 +11,23 @@ import { import { getArtifactUrl, getContentRange, - getRequestOptions, + getUploadRequestOptions, isRetryableStatusCode, - isSuccessStatusCode + isSuccessStatusCode, + isThrottledStatusCode, + displayHttpDiagnostics, + getExponentialRetryTimeInMilliseconds, + tryGetRetryAfterValueTimeInMilliseconds } from './utils' import { getUploadChunkSize, getUploadFileConcurrency, - getUploadRetryCount, - getRetryWaitTimeInMilliseconds + getRetryLimit } from './config-variables' import {promisify} from 'util' import {URL} from 'url' import {performance} from 'perf_hooks' -import {UploadStatusReporter} from './upload-status-reporter' -import {debug, warning, info} from '@actions/core' +import {StatusReporter} from './status-reporter' import {HttpClientResponse} from '@actions/http-client/index' import {IHttpClientResponse} from '@actions/http-client/interfaces' import {HttpManager} from './http-manager' @@ -35,11 +38,11 @@ const stat = promisify(fs.stat) export class UploadHttpClient { private uploadHttpManager: HttpManager - private statusReporter: UploadStatusReporter + private statusReporter: StatusReporter constructor() { this.uploadHttpManager = new HttpManager(getUploadFileConcurrency()) - this.statusReporter = new UploadStatusReporter() + this.statusReporter = new StatusReporter(10000) } /** @@ -57,19 +60,18 @@ export class UploadHttpClient { const data: string = JSON.stringify(parameters, null, 2) const artifactUrl = getArtifactUrl() - // use the first client from the httpManager, `keep-alive` is not used so the connection will close immediatly + // use the first client from the httpManager, `keep-alive` is not used so the connection will close immediately const client = this.uploadHttpManager.getClient(0) - const requestOptions = getRequestOptions('application/json', false, false) + const requestOptions = getUploadRequestOptions('application/json', false) const rawResponse = await client.post(artifactUrl, data, requestOptions) const body: string = await rawResponse.readBody() if (isSuccessStatusCode(rawResponse.message.statusCode) && body) { return JSON.parse(body) } else { - // eslint-disable-next-line no-console - console.log(rawResponse) + displayHttpDiagnostics(rawResponse) throw new Error( - `Unable to create a container for the artifact ${artifactName}` + `Unable to create a container for the artifact ${artifactName} at ${artifactUrl}` ) } } @@ -87,7 +89,7 @@ export class UploadHttpClient { ): Promise { const FILE_CONCURRENCY = getUploadFileConcurrency() const MAX_CHUNK_SIZE = getUploadChunkSize() - debug( + core.debug( `File Concurrency: ${FILE_CONCURRENCY}, and Chunk Size: ${MAX_CHUNK_SIZE}` ) @@ -120,7 +122,7 @@ export class UploadHttpClient { let totalFileSize = 0 let abortPendingFileUploads = false - this.statusReporter.setTotalNumberOfFilesToUpload(filesToUpload.length) + this.statusReporter.setTotalNumberOfFilesToProcess(filesToUpload.length) this.statusReporter.start() // only allow a certain amount of files to be uploaded at once, this is done to reduce potential errors @@ -140,19 +142,23 @@ export class UploadHttpClient { currentFileParameters ) - debug( - `File: ${++completedFiles}/${filesToUpload.length}. ${ - currentFileParameters.file - } took ${(performance.now() - startTime).toFixed( - 3 - )} milliseconds to finish upload` - ) - uploadFileSize += uploadFileResult.successfullUploadSize + if (core.isDebug()) { + core.debug( + `File: ${++completedFiles}/${filesToUpload.length}. ${ + currentFileParameters.file + } took ${(performance.now() - startTime).toFixed( + 3 + )} milliseconds to finish upload` + ) + } + + uploadFileSize += uploadFileResult.successfulUploadSize totalFileSize += uploadFileResult.totalSize if (uploadFileResult.isSuccess === false) { failedItemsToReport.push(currentFileParameters.file) if (!continueOnError) { - // existing uploads will be able to finish however all pending uploads will fail fast + // fail fast + core.error(`aborting artifact upload`) abortPendingFileUploads = true } } @@ -165,7 +171,7 @@ export class UploadHttpClient { // done uploading, safety dispose all connections this.uploadHttpManager.disposeAndReplaceAllClients() - info(`Total size of all the files uploaded is ${uploadFileSize} bytes`) + core.info(`Total size of all the files uploaded is ${uploadFileSize} bytes`) return { uploadSize: uploadFileSize, totalSize: totalFileSize, @@ -191,7 +197,7 @@ export class UploadHttpClient { let uploadFileSize = 0 let isGzip = true - // the file that is being uploaded is less than 64k in size, to increase thoroughput and to minimize disk I/O + // the file that is being uploaded is less than 64k in size, to increase throughput and to minimize disk I/O // for creating a new GZip file, an in-memory buffer is used for compression if (totalFileSize < 65536) { const buffer = await createGZipFileInBuffer(parameters.file) @@ -225,16 +231,16 @@ export class UploadHttpClient { // chunk failed to upload isUploadSuccessful = false failedChunkSizes += uploadFileSize - warning(`Aborting upload for ${parameters.file} due to failure`) + core.warning(`Aborting upload for ${parameters.file} due to failure`) } return { isSuccess: isUploadSuccessful, - successfullUploadSize: uploadFileSize - failedChunkSizes, + successfulUploadSize: uploadFileSize - failedChunkSizes, totalSize: totalFileSize } } else { - // the file that is being uploaded is greater than 64k in size, a temprorary file gets created on disk using the + // the file that is being uploaded is greater than 64k in size, a temporary file gets created on disk using the // npm tmp-promise package and this file gets used during compression for the GZip file that gets created return tmp .file() @@ -261,11 +267,6 @@ export class UploadHttpClient { uploadFileSize - offset, parameters.maxChunkSize ) - if (abortFileUpload) { - // if we don't want to continue in the event of an error, any pending upload chunks will be marked as failed - failedChunkSizes += chunkSize - continue - } // if an individual file is greater than 100MB (1024*1024*100) in size, display extra information about the upload status if (uploadFileSize > 104857600) { @@ -280,6 +281,12 @@ export class UploadHttpClient { const end = offset + chunkSize - 1 offset += parameters.maxChunkSize + if (abortFileUpload) { + // if we don't want to continue in the event of an error, any pending upload chunks will be marked as failed + failedChunkSizes += chunkSize + continue + } + const result = await this.uploadChunk( httpClientIndex, parameters.resourceUrl, @@ -300,7 +307,9 @@ export class UploadHttpClient { // successfully uploaded so the server may report a different size for what was uploaded isUploadSuccessful = false failedChunkSizes += chunkSize - warning(`Aborting upload for ${parameters.file} due to failure`) + core.warning( + `Aborting upload for ${parameters.file} due to failure` + ) abortFileUpload = true } } @@ -311,7 +320,7 @@ export class UploadHttpClient { return new Promise(resolve => { resolve({ isSuccess: isUploadSuccessful, - successfullUploadSize: uploadFileSize - failedChunkSizes, + successfulUploadSize: uploadFileSize - failedChunkSizes, totalSize: totalFileSize }) }) @@ -344,7 +353,7 @@ export class UploadHttpClient { totalFileSize: number ): Promise { // prepare all the necessary headers before making any http call - const requestOptions = getRequestOptions( + const requestOptions = getUploadRequestOptions( 'application/octet-stream', true, isGzip, @@ -359,58 +368,91 @@ export class UploadHttpClient { } let retryCount = 0 - const retryLimit = getUploadRetryCount() + const retryLimit = getRetryLimit() + + // Increments the current retry count and then checks if the retry limit has been reached + // If there have been too many retries, fail so the download stops + const incrementAndCheckRetryLimit = ( + response?: IHttpClientResponse + ): boolean => { + retryCount++ + if (retryCount > retryLimit) { + if (response) { + displayHttpDiagnostics(response) + } + core.info( + `Retry limit has been reached for chunk at offset ${start} to ${resourceUrl}` + ) + return true + } + return false + } + + const backOff = async (retryAfterValue?: number): Promise => { + this.uploadHttpManager.disposeAndReplaceClient(httpClientIndex) + if (retryAfterValue) { + core.info( + `Backoff due to too many requests, retry #${retryCount}. Waiting for ${retryAfterValue} milliseconds before continuing the upload` + ) + await new Promise(resolve => setTimeout(resolve, retryAfterValue)) + } else { + const backoffTime = getExponentialRetryTimeInMilliseconds(retryCount) + core.info( + `Exponential backoff for retry #${retryCount}. Waiting for ${backoffTime} milliseconds before continuing the upload at offset ${start}` + ) + await new Promise(resolve => setTimeout(resolve, backoffTime)) + } + core.info( + `Finished backoff for retry #${retryCount}, continuing with upload` + ) + return + } // allow for failed chunks to be retried multiple times while (retryCount <= retryLimit) { + let response: IHttpClientResponse + try { - const response = await uploadChunkRequest() - - // Always read the body of the response. There is potential for a resource leak if the body is not read which will - // result in the connection remaining open along with unintended consequences when trying to dispose of the client - await response.readBody() - - if (isSuccessStatusCode(response.message.statusCode)) { - return true - } else if (isRetryableStatusCode(response.message.statusCode)) { - retryCount++ - if (retryCount > retryLimit) { - info( - `Retry limit has been reached for chunk at offset ${start} to ${resourceUrl}` - ) - return false - } else { - info( - `HTTP ${response.message.statusCode} during chunk upload, will retry at offset ${start} after ${getRetryWaitTimeInMilliseconds} milliseconds. Retry count #${retryCount}. URL ${resourceUrl}` - ) - this.uploadHttpManager.disposeAndReplaceClient(httpClientIndex) - await new Promise(resolve => - setTimeout(resolve, getRetryWaitTimeInMilliseconds()) - ) - } - } else { - info(`#ERROR# Unable to upload chunk to ${resourceUrl}`) - // eslint-disable-next-line no-console - console.log(response) - return false - } + response = await uploadChunkRequest() } catch (error) { + // if an error is caught, it is usually indicative of a timeout so retry the upload + core.info( + `An error has been caught http-client index ${httpClientIndex}, retrying the upload` + ) // eslint-disable-next-line no-console console.log(error) - retryCount++ - if (retryCount > retryLimit) { - info( - `Retry limit has been reached for chunk at offset ${start} to ${resourceUrl}` - ) + if (incrementAndCheckRetryLimit()) { return false - } else { - info(`Retrying chunk upload after encountering an error`) - this.uploadHttpManager.disposeAndReplaceClient(httpClientIndex) - await new Promise(resolve => - setTimeout(resolve, getRetryWaitTimeInMilliseconds()) - ) } + await backOff() + continue + } + + // Always read the body of the response. There is potential for a resource leak if the body is not read which will + // result in the connection remaining open along with unintended consequences when trying to dispose of the client + await response.readBody() + + if (isSuccessStatusCode(response.message.statusCode)) { + return true + } else if (isRetryableStatusCode(response.message.statusCode)) { + core.info( + `A ${response.message.statusCode} status code has been received, will attempt to retry the upload` + ) + if (incrementAndCheckRetryLimit(response)) { + return false + } + isThrottledStatusCode(response.message.statusCode) + ? await backOff( + tryGetRetryAfterValueTimeInMilliseconds(response.message.headers) + ) + : await backOff() + } else { + core.error( + `Unexpected response. Unable to upload chunk to ${resourceUrl}` + ) + displayHttpDiagnostics(response) + return false } } return false @@ -421,32 +463,34 @@ export class UploadHttpClient { * Updating the size indicates that we are done uploading all the contents of the artifact */ async patchArtifactSize(size: number, artifactName: string): Promise { - const requestOptions = getRequestOptions('application/json', false, false) + const requestOptions = getUploadRequestOptions('application/json', false) const resourceUrl = new URL(getArtifactUrl()) resourceUrl.searchParams.append('artifactName', artifactName) const parameters: PatchArtifactSize = {Size: size} const data: string = JSON.stringify(parameters, null, 2) - debug(`URL is ${resourceUrl.toString()}`) + core.debug(`URL is ${resourceUrl.toString()}`) - // use the first client from the httpManager, `keep-alive` is not used so the connection will close immediatly + // use the first client from the httpManager, `keep-alive` is not used so the connection will close immediately const client = this.uploadHttpManager.getClient(0) - const rawResponse: HttpClientResponse = await client.patch( + const response: HttpClientResponse = await client.patch( resourceUrl.toString(), data, requestOptions ) - const body: string = await rawResponse.readBody() - if (isSuccessStatusCode(rawResponse.message.statusCode)) { - debug( - `Artifact ${artifactName} has been successfully uploaded, total size ${size}` + const body: string = await response.readBody() + if (isSuccessStatusCode(response.message.statusCode)) { + core.debug( + `Artifact ${artifactName} has been successfully uploaded, total size in bytes: ${size}` ) - } else if (rawResponse.message.statusCode === 404) { + } else if (response.message.statusCode === 404) { throw new Error(`An Artifact with the name ${artifactName} was not found`) } else { - // eslint-disable-next-line no-console - console.log(body) - throw new Error(`Unable to finish uploading artifact ${artifactName}`) + displayHttpDiagnostics(response) + core.info(body) + throw new Error( + `Unable to finish uploading artifact ${artifactName} to ${resourceUrl}` + ) } } } @@ -460,6 +504,6 @@ interface UploadFileParameters { interface UploadFileResult { isSuccess: boolean - successfullUploadSize: number + successfulUploadSize: number totalSize: number } diff --git a/packages/artifact/src/internal/upload-status-reporter.ts b/packages/artifact/src/internal/upload-status-reporter.ts deleted file mode 100644 index 8f4242c1..00000000 --- a/packages/artifact/src/internal/upload-status-reporter.ts +++ /dev/null @@ -1,90 +0,0 @@ -import {info} from '@actions/core' - -/** - * Upload Status Reporter that displays information about the progress/status of an artifact that is being uploaded - * - * Every 10 seconds, the total status of the upload gets displayed. If there is a large file that is being uploaded, - * extra information about the individual status of an upload can also be displayed - */ - -export class UploadStatusReporter { - private totalNumberOfFilesToUpload = 0 - private processedCount = 0 - private largeUploads = new Map() - private totalUploadStatus: NodeJS.Timeout | undefined - private largeFileUploadStatus: NodeJS.Timeout | undefined - - constructor() { - this.totalUploadStatus = undefined - this.largeFileUploadStatus = undefined - } - - setTotalNumberOfFilesToUpload(fileTotal: number): void { - this.totalNumberOfFilesToUpload = fileTotal - } - - start(): void { - const _this = this - - // displays information about the total upload status every 10 seconds - this.totalUploadStatus = setInterval(function() { - // display 1 decimal place without any rounding - const percentage = _this.formatPercentage( - _this.processedCount, - _this.totalNumberOfFilesToUpload - ) - info( - `Total file(s): ${ - _this.totalNumberOfFilesToUpload - } ---- Processed file #${_this.processedCount} (${percentage.slice( - 0, - percentage.indexOf('.') + 2 - )}%)` - ) - }, 10000) - - // displays extra information about any large files that take a significant amount of time to upload every 1 second - this.largeFileUploadStatus = setInterval(function() { - for (const value of Array.from(_this.largeUploads.values())) { - info(value) - } - // delete all entires in the map after displaying the information so it will not be displayed again unless explicitly added - _this.largeUploads = new Map() - }, 1000) - } - - updateLargeFileStatus( - fileName: string, - numerator: number, - denomiator: number - ): void { - // display 1 decimal place without any rounding - const percentage = this.formatPercentage(numerator, denomiator) - const displayInformation = `Uploading ${fileName} (${percentage.slice( - 0, - percentage.indexOf('.') + 2 - )}%)` - - // any previously added display information should be overwritten for the specific large file because a map is being used - this.largeUploads.set(fileName, displayInformation) - } - - stop(): void { - if (this.totalUploadStatus) { - clearInterval(this.totalUploadStatus) - } - - if (this.largeFileUploadStatus) { - clearInterval(this.largeFileUploadStatus) - } - } - - incrementProcessedCount(): void { - this.processedCount++ - } - - private formatPercentage(numerator: number, denominator: number): string { - // toFixed() rounds, so use extra precision to display accurate information even though 4 decimal places are not displayed - return ((numerator / denominator) * 100).toFixed(4).toString() - } -} diff --git a/packages/artifact/src/internal/utils.ts b/packages/artifact/src/internal/utils.ts index 9919dd10..95e14c00 100644 --- a/packages/artifact/src/internal/utils.ts +++ b/packages/artifact/src/internal/utils.ts @@ -1,14 +1,38 @@ -import {debug} from '@actions/core' +import {debug, info} from '@actions/core' import {promises as fs} from 'fs' import {HttpCodes, HttpClient} from '@actions/http-client' import {BearerCredentialHandler} from '@actions/http-client/auth' -import {IHeaders} from '@actions/http-client/interfaces' +import {IHeaders, IHttpClientResponse} from '@actions/http-client/interfaces' +import {IncomingHttpHeaders} from 'http' import { getRuntimeToken, getRuntimeUrl, - getWorkFlowRunId + getWorkFlowRunId, + getRetryMultiplier, + getInitialRetryIntervalInMilliseconds } from './config-variables' +/** + * Returns a retry time in milliseconds that exponentially gets larger + * depending on the amount of retries that have been attempted + */ +export function getExponentialRetryTimeInMilliseconds( + retryCount: number +): number { + if (retryCount < 0) { + throw new Error('RetryCount should not be negative') + } else if (retryCount === 0) { + return getInitialRetryIntervalInMilliseconds() + } + + const minTime = + getInitialRetryIntervalInMilliseconds() * getRetryMultiplier() * retryCount + const maxTime = minTime * getRetryMultiplier() + + // returns a random number between the minTime (inclusive) and the maxTime (exclusive) + return Math.random() * (maxTime - minTime) + minTime +} + /** * Parses a env variable that is a number */ @@ -42,11 +66,47 @@ export function isRetryableStatusCode(statusCode?: number): boolean { const retryableStatusCodes = [ HttpCodes.BadGateway, HttpCodes.ServiceUnavailable, - HttpCodes.GatewayTimeout + HttpCodes.GatewayTimeout, + HttpCodes.TooManyRequests ] return retryableStatusCodes.includes(statusCode) } +export function isThrottledStatusCode(statusCode?: number): boolean { + if (!statusCode) { + return false + } + + return statusCode === HttpCodes.TooManyRequests +} + +/** + * Attempts to get the retry-after value from a set of http headers. The retry time + * is originally denoted in seconds, so if present, it is converted to milliseconds + * @param headers all the headers received when making an http call + */ +export function tryGetRetryAfterValueTimeInMilliseconds( + headers: IncomingHttpHeaders +): number | undefined { + if (headers['retry-after']) { + const retryTime = Number(headers['retry-after']) + if (!isNaN(retryTime)) { + info(`Retry-After header is present with a value of ${retryTime}`) + return retryTime * 1000 + } + info( + `Returned retry-after header value: ${retryTime} is non-numeric and cannot be used` + ) + return undefined + } + info( + `No retry-after header was found. Dumping all headers for diagnostic purposes` + ) + // eslint-disable-next-line no-console + console.log(headers) + return undefined +} + export function getContentRange( start: number, end: number, @@ -60,27 +120,62 @@ export function getContentRange( } /** - * Sets all the necessary headers when making HTTP calls + * Sets all the necessary headers when downloading an artifact + * @param {string} contentType the type of content being uploaded + * @param {boolean} isKeepAlive is the same connection being used to make multiple calls + * @param {boolean} acceptGzip can we accept a gzip encoded response + * @param {string} acceptType the type of content that we can accept + * @returns appropriate request options to make a specific http call during artifact download + */ +export function getDownloadRequestOptions( + contentType: string, + isKeepAlive?: boolean, + acceptGzip?: boolean +): IHeaders { + const requestOptions: IHeaders = {} + + if (contentType) { + requestOptions['Content-Type'] = contentType + } + if (isKeepAlive) { + requestOptions['Connection'] = 'Keep-Alive' + // keep alive for at least 10 seconds before closing the connection + requestOptions['Keep-Alive'] = '10' + } + if (acceptGzip) { + // if we are expecting a response with gzip encoding, it should be using an octet-stream in the accept header + requestOptions['Accept-Encoding'] = 'gzip' + requestOptions[ + 'Accept' + ] = `application/octet-stream;api-version=${getApiVersion()}` + } else { + // default to application/json if we are not working with gzip content + requestOptions['Accept'] = `application/json;api-version=${getApiVersion()}` + } + + return requestOptions +} + +/** + * Sets all the necessary headers when uploading an artifact * @param {string} contentType the type of content being uploaded * @param {boolean} isKeepAlive is the same connection being used to make multiple calls * @param {boolean} isGzip is the connection being used to upload GZip compressed content * @param {number} uncompressedLength the original size of the content if something is being uploaded that has been compressed * @param {number} contentLength the length of the content that is being uploaded * @param {string} contentRange the range of the content that is being uploaded - * @returns appropriate request options to make a specific http call + * @returns appropriate request options to make a specific http call during artifact upload */ -export function getRequestOptions( - contentType?: string, +export function getUploadRequestOptions( + contentType: string, isKeepAlive?: boolean, isGzip?: boolean, uncompressedLength?: number, contentLength?: number, contentRange?: string ): IHeaders { - const requestOptions: IHeaders = { - // same Accept type for each http call that gets made - Accept: `application/json;api-version=${getApiVersion()}` - } + const requestOptions: IHeaders = {} + requestOptions['Accept'] = `application/json;api-version=${getApiVersion()}` if (contentType) { requestOptions['Content-Type'] = contentType } @@ -99,6 +194,7 @@ export function getRequestOptions( if (contentRange) { requestOptions['Content-Range'] = contentRange } + return requestOptions } @@ -114,6 +210,25 @@ export function getArtifactUrl(): string { return artifactUrl } +/** + * Uh oh! Something might have gone wrong during either upload or download. The IHtttpClientResponse object contains information + * about the http call that was made by the actions http client. This information might be useful to display for diagnostic purposes, but + * this entire object is really big and most of the information is not really useful. This function takes the response object and displays only + * the information that we want. + * + * Certain information such as the TLSSocket and the Readable state are not really useful for diagnostic purposes so they can be avoided. + * Other information such as the headers, the response code and message might be useful, so this is displayed. + */ +export function displayHttpDiagnostics(response: IHttpClientResponse): void { + info( + `##### Begin Diagnostic HTTP information ##### +Status Code: ${response.message.statusCode} +Status Message: ${response.message.statusMessage} +Header Information: ${JSON.stringify(response.message.headers, undefined, 2)} +###### End Diagnostic HTTP information ######` + ) +} + /** * Invalid characters that cannot be in the artifact name or an uploaded file. Will be rejected * from the server if attempted to be sent over. These characters are not allowed due to limitations with certain