diff --git a/packages/artifact/__tests__/upload-artifact.test.ts b/packages/artifact/__tests__/upload-artifact.test.ts index 1761fa01..cd383db9 100644 --- a/packages/artifact/__tests__/upload-artifact.test.ts +++ b/packages/artifact/__tests__/upload-artifact.test.ts @@ -1,260 +1,137 @@ import * as uploadZipSpecification from '../src/internal/upload/upload-zip-specification' import * as zip from '../src/internal/upload/zip' import * as util from '../src/internal/shared/util' -import * as retention from '../src/internal/upload/retention' import * as config from '../src/internal/shared/config' -import {Timestamp, ArtifactServiceClientJSON} from '../src/generated' +import {ArtifactServiceClientJSON} from '../src/generated' import * as blobUpload from '../src/internal/upload/blob-upload' import {uploadArtifact} from '../src/internal/upload/upload-artifact' import {noopLogs} from './common' import {FilesNotFoundError} from '../src/internal/shared/errors' -import {BlockBlobClient} from '@azure/storage-blob' +import {BlockBlobUploadStreamOptions} from '@azure/storage-blob' import * as fs from 'fs' import * as path from 'path' +const uploadStreamMock = jest.fn() +const blockBlobClientMock = jest.fn().mockImplementation(() => ({ + uploadStream: uploadStreamMock +})) + +jest.mock('@azure/storage-blob', () => ({ + BlobClient: jest.fn().mockImplementation(() => { + return { + getBlockBlobClient: blockBlobClientMock + } + }) +})) + +const fixtures = { + uploadDirectory: path.join(__dirname, '_temp', 'plz-upload'), + files: [ + ['file1.txt', 'test 1 file content'], + ['file2.txt', 'test 2 file content'], + ['file3.txt', 'test 3 file content'] + ], + backendIDs: { + workflowRunBackendId: '67dbcc20-e851-4452-a7c3-2cc0d2e0ec67', + workflowJobRunBackendId: '5f49179d-3386-4c38-85f7-00f8138facd0' + }, + runtimeToken: 'test-token', + resultsServiceURL: 'http://results.local', + inputs: { + artifactName: 'test-artifact', + files: [ + '/home/user/files/plz-upload/file1.txt', + '/home/user/files/plz-upload/file2.txt', + '/home/user/files/plz-upload/dir/file3.txt' + ], + rootDirectory: '/home/user/files/plz-upload' + } +} + describe('upload-artifact', () => { + beforeAll(() => { + if (!fs.existsSync(fixtures.uploadDirectory)) { + fs.mkdirSync(fixtures.uploadDirectory, {recursive: true}) + } + + for (const [file, content] of fixtures.files) { + fs.writeFileSync(path.join(fixtures.uploadDirectory, file), content) + } + }) + beforeEach(() => { noopLogs() + jest + .spyOn(uploadZipSpecification, 'validateRootDirectory') + .mockReturnValue() + jest + .spyOn(util, 'getBackendIdsFromToken') + .mockReturnValue(fixtures.backendIDs) + jest + .spyOn(uploadZipSpecification, 'getUploadZipSpecification') + .mockReturnValue( + fixtures.files.map(file => ({ + sourcePath: path.join(fixtures.uploadDirectory, file[0]), + destinationPath: file[0] + })) + ) + jest.spyOn(config, 'getRuntimeToken').mockReturnValue(fixtures.runtimeToken) + jest + .spyOn(config, 'getResultsServiceUrl') + .mockReturnValue(fixtures.resultsServiceURL) }) afterEach(() => { jest.restoreAllMocks() }) - it('should successfully upload an artifact', () => { - const mockDate = new Date('2020-01-01') - jest - .spyOn(uploadZipSpecification, 'validateRootDirectory') - .mockReturnValue() - jest - .spyOn(uploadZipSpecification, 'getUploadZipSpecification') - .mockReturnValue([ - { - sourcePath: '/home/user/files/plz-upload/file1.txt', - destinationPath: 'file1.txt' - }, - { - sourcePath: '/home/user/files/plz-upload/file2.txt', - destinationPath: 'file2.txt' - }, - { - sourcePath: '/home/user/files/plz-upload/dir/file3.txt', - destinationPath: 'dir/file3.txt' - } - ]) - - jest - .spyOn(zip, 'createZipUploadStream') - .mockReturnValue(Promise.resolve(new zip.ZipUploadStream(1))) - jest.spyOn(util, 'getBackendIdsFromToken').mockReturnValue({ - workflowRunBackendId: '1234', - workflowJobRunBackendId: '5678' - }) - jest - .spyOn(retention, 'getExpiration') - .mockReturnValue(Timestamp.fromDate(mockDate)) - jest - .spyOn(ArtifactServiceClientJSON.prototype, 'CreateArtifact') - .mockReturnValue( - Promise.resolve({ - ok: true, - signedUploadUrl: 'https://signed-upload-url.com' - }) - ) - jest.spyOn(blobUpload, 'uploadZipToBlobStorage').mockReturnValue( - Promise.resolve({ - uploadSize: 1234, - sha256Hash: 'test-sha256-hash' - }) - ) - jest - .spyOn(ArtifactServiceClientJSON.prototype, 'FinalizeArtifact') - .mockReturnValue(Promise.resolve({ok: true, artifactId: '1'})) - - // ArtifactHttpClient mocks - jest.spyOn(config, 'getRuntimeToken').mockReturnValue('test-token') - jest - .spyOn(config, 'getResultsServiceUrl') - .mockReturnValue('https://test-url.com') - - const uploadResp = uploadArtifact( - 'test-artifact', - [ - '/home/user/files/plz-upload/file1.txt', - '/home/user/files/plz-upload/file2.txt', - '/home/user/files/plz-upload/dir/file3.txt' - ], - '/home/user/files/plz-upload' - ) - - expect(uploadResp).resolves.toEqual({size: 1234, id: 1}) - }) - - it('should throw an error if the root directory is invalid', () => { - jest - .spyOn(uploadZipSpecification, 'validateRootDirectory') - .mockImplementation(() => { - throw new Error('Invalid root directory') - }) - - const uploadResp = uploadArtifact( - 'test-artifact', - [ - '/home/user/files/plz-upload/file1.txt', - '/home/user/files/plz-upload/file2.txt', - '/home/user/files/plz-upload/dir/file3.txt' - ], - '/home/user/files/plz-upload' - ) - - expect(uploadResp).rejects.toThrow('Invalid root directory') - }) - - it('should reject if there are no files to upload', () => { - jest - .spyOn(uploadZipSpecification, 'validateRootDirectory') - .mockReturnValue() + it('should reject if there are no files to upload', async () => { jest .spyOn(uploadZipSpecification, 'getUploadZipSpecification') + .mockClear() .mockReturnValue([]) const uploadResp = uploadArtifact( - 'test-artifact', - [ - '/home/user/files/plz-upload/file1.txt', - '/home/user/files/plz-upload/file2.txt', - '/home/user/files/plz-upload/dir/file3.txt' - ], - '/home/user/files/plz-upload' + fixtures.inputs.artifactName, + fixtures.inputs.files, + fixtures.inputs.rootDirectory ) - expect(uploadResp).rejects.toThrowError(FilesNotFoundError) + await expect(uploadResp).rejects.toThrowError(FilesNotFoundError) }) - it('should reject if no backend IDs are found', () => { - jest - .spyOn(uploadZipSpecification, 'validateRootDirectory') - .mockReturnValue() - jest - .spyOn(uploadZipSpecification, 'getUploadZipSpecification') - .mockReturnValue([ - { - sourcePath: '/home/user/files/plz-upload/file1.txt', - destinationPath: 'file1.txt' - }, - { - sourcePath: '/home/user/files/plz-upload/file2.txt', - destinationPath: 'file2.txt' - }, - { - sourcePath: '/home/user/files/plz-upload/dir/file3.txt', - destinationPath: 'dir/file3.txt' - } - ]) - - jest - .spyOn(zip, 'createZipUploadStream') - .mockReturnValue(Promise.resolve(new zip.ZipUploadStream(1))) + it('should reject if no backend IDs are found', async () => { + jest.spyOn(util, 'getBackendIdsFromToken').mockRestore() const uploadResp = uploadArtifact( - 'test-artifact', - [ - '/home/user/files/plz-upload/file1.txt', - '/home/user/files/plz-upload/file2.txt', - '/home/user/files/plz-upload/dir/file3.txt' - ], - '/home/user/files/plz-upload' + fixtures.inputs.artifactName, + fixtures.inputs.files, + fixtures.inputs.rootDirectory ) - expect(uploadResp).rejects.toThrow() + await expect(uploadResp).rejects.toThrow() }) - it('should return false if the creation request fails', () => { - const mockDate = new Date('2020-01-01') - jest - .spyOn(uploadZipSpecification, 'validateRootDirectory') - .mockReturnValue() - jest - .spyOn(uploadZipSpecification, 'getUploadZipSpecification') - .mockReturnValue([ - { - sourcePath: '/home/user/files/plz-upload/file1.txt', - destinationPath: 'file1.txt' - }, - { - sourcePath: '/home/user/files/plz-upload/file2.txt', - destinationPath: 'file2.txt' - }, - { - sourcePath: '/home/user/files/plz-upload/dir/file3.txt', - destinationPath: 'dir/file3.txt' - } - ]) - + it('should return false if the creation request fails', async () => { jest .spyOn(zip, 'createZipUploadStream') .mockReturnValue(Promise.resolve(new zip.ZipUploadStream(1))) - jest.spyOn(util, 'getBackendIdsFromToken').mockReturnValue({ - workflowRunBackendId: '1234', - workflowJobRunBackendId: '5678' - }) - jest - .spyOn(retention, 'getExpiration') - .mockReturnValue(Timestamp.fromDate(mockDate)) jest .spyOn(ArtifactServiceClientJSON.prototype, 'CreateArtifact') .mockReturnValue(Promise.resolve({ok: false, signedUploadUrl: ''})) - // ArtifactHttpClient mocks - jest.spyOn(config, 'getRuntimeToken').mockReturnValue('test-token') - jest - .spyOn(config, 'getResultsServiceUrl') - .mockReturnValue('https://test-url.com') - const uploadResp = uploadArtifact( - 'test-artifact', - [ - '/home/user/files/plz-upload/file1.txt', - '/home/user/files/plz-upload/file2.txt', - '/home/user/files/plz-upload/dir/file3.txt' - ], - '/home/user/files/plz-upload' + fixtures.inputs.artifactName, + fixtures.inputs.files, + fixtures.inputs.rootDirectory ) - expect(uploadResp).rejects.toThrow() + await expect(uploadResp).rejects.toThrow() }) - it('should return false if blob storage upload is unsuccessful', () => { - const mockDate = new Date('2020-01-01') - jest - .spyOn(uploadZipSpecification, 'validateRootDirectory') - .mockReturnValue() - jest - .spyOn(uploadZipSpecification, 'getUploadZipSpecification') - .mockReturnValue([ - { - sourcePath: '/home/user/files/plz-upload/file1.txt', - destinationPath: 'file1.txt' - }, - { - sourcePath: '/home/user/files/plz-upload/file2.txt', - destinationPath: 'file2.txt' - }, - { - sourcePath: '/home/user/files/plz-upload/dir/file3.txt', - destinationPath: 'dir/file3.txt' - } - ]) - + it('should return false if blob storage upload is unsuccessful', async () => { jest .spyOn(zip, 'createZipUploadStream') .mockReturnValue(Promise.resolve(new zip.ZipUploadStream(1))) - jest.spyOn(util, 'getBackendIdsFromToken').mockReturnValue({ - workflowRunBackendId: '1234', - workflowJobRunBackendId: '5678' - }) - jest - .spyOn(retention, 'getExpiration') - .mockReturnValue(Timestamp.fromDate(mockDate)) jest .spyOn(ArtifactServiceClientJSON.prototype, 'CreateArtifact') .mockReturnValue( @@ -267,57 +144,19 @@ describe('upload-artifact', () => { .spyOn(blobUpload, 'uploadZipToBlobStorage') .mockReturnValue(Promise.reject(new Error('boom'))) - // ArtifactHttpClient mocks - jest.spyOn(config, 'getRuntimeToken').mockReturnValue('test-token') - jest - .spyOn(config, 'getResultsServiceUrl') - .mockReturnValue('https://test-url.com') - const uploadResp = uploadArtifact( - 'test-artifact', - [ - '/home/user/files/plz-upload/file1.txt', - '/home/user/files/plz-upload/file2.txt', - '/home/user/files/plz-upload/dir/file3.txt' - ], - '/home/user/files/plz-upload' + fixtures.inputs.artifactName, + fixtures.inputs.files, + fixtures.inputs.rootDirectory ) - expect(uploadResp).rejects.toThrow() + await expect(uploadResp).rejects.toThrow() }) - it('should reject if finalize artifact fails', () => { - const mockDate = new Date('2020-01-01') - jest - .spyOn(uploadZipSpecification, 'validateRootDirectory') - .mockReturnValue() - jest - .spyOn(uploadZipSpecification, 'getUploadZipSpecification') - .mockReturnValue([ - { - sourcePath: '/home/user/files/plz-upload/file1.txt', - destinationPath: 'file1.txt' - }, - { - sourcePath: '/home/user/files/plz-upload/file2.txt', - destinationPath: 'file2.txt' - }, - { - sourcePath: '/home/user/files/plz-upload/dir/file3.txt', - destinationPath: 'dir/file3.txt' - } - ]) - + it('should reject if finalize artifact fails', async () => { jest .spyOn(zip, 'createZipUploadStream') .mockReturnValue(Promise.resolve(new zip.ZipUploadStream(1))) - jest.spyOn(util, 'getBackendIdsFromToken').mockReturnValue({ - workflowRunBackendId: '1234', - workflowJobRunBackendId: '5678' - }) - jest - .spyOn(retention, 'getExpiration') - .mockReturnValue(Timestamp.fromDate(mockDate)) jest .spyOn(ArtifactServiceClientJSON.prototype, 'CreateArtifact') .mockReturnValue( @@ -336,112 +175,113 @@ describe('upload-artifact', () => { .spyOn(ArtifactServiceClientJSON.prototype, 'FinalizeArtifact') .mockReturnValue(Promise.resolve({ok: false, artifactId: ''})) - // ArtifactHttpClient mocks - jest.spyOn(config, 'getRuntimeToken').mockReturnValue('test-token') - jest - .spyOn(config, 'getResultsServiceUrl') - .mockReturnValue('https://test-url.com') - const uploadResp = uploadArtifact( - 'test-artifact', - [ - '/home/user/files/plz-upload/file1.txt', - '/home/user/files/plz-upload/file2.txt', - '/home/user/files/plz-upload/dir/file3.txt' - ], - '/home/user/files/plz-upload' + fixtures.inputs.artifactName, + fixtures.inputs.files, + fixtures.inputs.rootDirectory ) - expect(uploadResp).rejects.toThrow() + await expect(uploadResp).rejects.toThrow() }) - it('should throw an error uploading blob chunks get delayed', async () => { - const mockDate = new Date('2020-01-01') - const dirPath = path.join(__dirname, `plz-upload`) - if (!fs.existsSync(dirPath)) { - fs.mkdirSync(dirPath, {recursive: true}) - } - - fs.writeFileSync(path.join(dirPath, 'file1.txt'), 'test file content') - fs.writeFileSync(path.join(dirPath, 'file2.txt'), 'test file content') - - fs.writeFileSync(path.join(dirPath, 'file3.txt'), 'test file content') - - jest - .spyOn(uploadZipSpecification, 'validateRootDirectory') - .mockReturnValue() - jest - .spyOn(uploadZipSpecification, 'getUploadZipSpecification') - .mockReturnValue([ - { - sourcePath: path.join(dirPath, 'file1.txt'), - destinationPath: 'file1.txt' - }, - { - sourcePath: path.join(dirPath, 'file2.txt'), - destinationPath: 'file2.txt' - }, - { - sourcePath: path.join(dirPath, 'file3.txt'), - destinationPath: 'dir/file3.txt' - } - ]) - - jest.spyOn(util, 'getBackendIdsFromToken').mockReturnValue({ - workflowRunBackendId: '1234', - workflowJobRunBackendId: '5678' - }) - jest - .spyOn(retention, 'getExpiration') - .mockReturnValue(Timestamp.fromDate(mockDate)) + it('should successfully upload an artifact', async () => { jest .spyOn(ArtifactServiceClientJSON.prototype, 'CreateArtifact') .mockReturnValue( Promise.resolve({ ok: true, - signedUploadUrl: 'https://signed-upload-url.com' + signedUploadUrl: 'https://signed-upload-url.local' }) ) jest - .spyOn(blobUpload, 'uploadZipToBlobStorage') - .mockReturnValue(Promise.reject(new Error('Upload progress stalled.'))) - - // ArtifactHttpClient mocks - jest.spyOn(config, 'getRuntimeToken').mockReturnValue('test-token') - jest - .spyOn(config, 'getResultsServiceUrl') - .mockReturnValue('https://test-url.com') - - BlockBlobClient.prototype.uploadStream = jest - .fn() - .mockImplementation( - async (stream, bufferSize, maxConcurrency, options) => { - return new Promise(resolve => { - // Call the onProgress callback with a progress event - options.onProgress({loadedBytes: 0}) - - // Wait for 31 seconds before resolving the promise - setTimeout(() => { - // Call the onProgress callback again to simulate progress - options.onProgress({loadedBytes: 100}) - - resolve() - }, 31000) // Delay longer than your timeout - }) - } + .spyOn(ArtifactServiceClientJSON.prototype, 'FinalizeArtifact') + .mockReturnValue( + Promise.resolve({ + ok: true, + artifactId: '1' + }) ) - jest.mock('fs') - const uploadResp = uploadArtifact( - 'test-artifact', - [ - '/home/user/files/plz-upload/file1.txt', - '/home/user/files/plz-upload/file2.txt', - '/home/user/files/plz-upload/dir/file3.txt' - ], - '/home/user/files/plz-upload' + uploadStreamMock.mockImplementation( + async ( + stream: NodeJS.ReadableStream, + bufferSize?: number, + maxConcurrency?: number, + options?: BlockBlobUploadStreamOptions + ) => { + const {onProgress, abortSignal} = options || {} + + onProgress?.({loadedBytes: 0}) + + return new Promise(resolve => { + const timerId = setTimeout(() => { + onProgress?.({loadedBytes: 256}) + resolve({}) + }, 1_000) + abortSignal?.addEventListener('abort', () => { + clearTimeout(timerId) + resolve({}) + }) + }) + } ) - expect(uploadResp).rejects.toThrow('Upload progress stalled.') + const {id, size} = await uploadArtifact( + fixtures.inputs.artifactName, + fixtures.inputs.files, + fixtures.inputs.rootDirectory + ) + + expect(id).toBe(1) + expect(size).toBe(256) + }) + + it('should throw an error uploading blob chunks get delayed', async () => { + jest + .spyOn(ArtifactServiceClientJSON.prototype, 'CreateArtifact') + .mockReturnValue( + Promise.resolve({ + ok: true, + signedUploadUrl: 'https://signed-upload-url.local' + }) + ) + jest + .spyOn(ArtifactServiceClientJSON.prototype, 'FinalizeArtifact') + .mockReturnValue( + Promise.resolve({ + ok: true, + artifactId: '1' + }) + ) + jest + .spyOn(config, 'getResultsServiceUrl') + .mockReturnValue('https://results.local') + + jest.spyOn(config, 'getUploadChunkTimeout').mockReturnValue(2_000) + + uploadStreamMock.mockImplementation( + async ( + stream: NodeJS.ReadableStream, + bufferSize?: number, + maxConcurrency?: number, + options?: BlockBlobUploadStreamOptions + ) => { + const {onProgress, abortSignal} = options || {} + onProgress?.({loadedBytes: 0}) + return new Promise(resolve => { + abortSignal?.addEventListener('abort', () => { + resolve({}) + }) + }) + } + ) + + const uploadResp = uploadArtifact( + fixtures.inputs.artifactName, + fixtures.inputs.files, + fixtures.inputs.rootDirectory + ) + + await expect(uploadResp).rejects.toThrow('Upload progress stalled.') }) }) diff --git a/packages/artifact/src/internal/shared/config.ts b/packages/artifact/src/internal/shared/config.ts index 437a3624..1b20c7b9 100644 --- a/packages/artifact/src/internal/shared/config.ts +++ b/packages/artifact/src/internal/shared/config.ts @@ -57,3 +57,7 @@ export function getConcurrency(): number { const concurrency = 16 * numCPUs return concurrency > 300 ? 300 : concurrency } + +export function getUploadChunkTimeout(): number { + return 30_000 +} diff --git a/packages/artifact/src/internal/upload/blob-upload.ts b/packages/artifact/src/internal/upload/blob-upload.ts index 6c62fd49..331ee878 100644 --- a/packages/artifact/src/internal/upload/blob-upload.ts +++ b/packages/artifact/src/internal/upload/blob-upload.ts @@ -1,7 +1,11 @@ import {BlobClient, BlockBlobUploadStreamOptions} from '@azure/storage-blob' import {TransferProgressEvent} from '@azure/core-http' import {ZipUploadStream} from './zip' -import {getUploadChunkSize, getConcurrency} from '../shared/config' +import { + getUploadChunkSize, + getConcurrency, + getUploadChunkTimeout +} from '../shared/config' import * as core from '@actions/core' import * as crypto from 'crypto' import * as stream from 'stream' @@ -25,29 +29,26 @@ export async function uploadZipToBlobStorage( ): Promise { let uploadByteCount = 0 let lastProgressTime = Date.now() - let timeoutId: NodeJS.Timeout | undefined + const abortController = new AbortController() - const chunkTimer = (timeout: number): NodeJS.Timeout => { - // clear the previous timeout - if (timeoutId) { - clearTimeout(timeoutId) - } + const chunkTimer = async (interval: number): Promise => + new Promise((resolve, reject) => { + const timer = setInterval(() => { + if (Date.now() - lastProgressTime > interval) { + reject(new Error('Upload progress stalled.')) + } + }, interval) + + abortController.signal.addEventListener('abort', () => { + clearInterval(timer) + resolve() + }) + }) - timeoutId = setTimeout(() => { - const now = Date.now() - // if there's been more than 30 seconds since the - // last progress event, then we'll consider the upload stalled - if (now - lastProgressTime > timeout) { - throw new Error('Upload progress stalled.') - } - }, timeout) - return timeoutId - } const maxConcurrency = getConcurrency() const bufferSize = getUploadChunkSize() const blobClient = new BlobClient(authenticatedUploadURL) const blockBlobClient = blobClient.getBlockBlobClient() - const timeoutDuration = 300000 // 30 seconds core.debug( `Uploading artifact zip to blob storage with maxConcurrency: ${maxConcurrency}, bufferSize: ${bufferSize}` @@ -56,13 +57,13 @@ export async function uploadZipToBlobStorage( const uploadCallback = (progress: TransferProgressEvent): void => { core.info(`Uploaded bytes ${progress.loadedBytes}`) uploadByteCount = progress.loadedBytes - chunkTimer(timeoutDuration) lastProgressTime = Date.now() } const options: BlockBlobUploadStreamOptions = { blobHTTPHeaders: {blobContentType: 'zip'}, - onProgress: uploadCallback + onProgress: uploadCallback, + abortSignal: abortController.signal } let sha256Hash: string | undefined = undefined @@ -75,24 +76,22 @@ export async function uploadZipToBlobStorage( core.info('Beginning upload of artifact content to blob storage') try { - // Start the chunk timer - timeoutId = chunkTimer(timeoutDuration) - await blockBlobClient.uploadStream( - uploadStream, - bufferSize, - maxConcurrency, - options - ) + await Promise.race([ + blockBlobClient.uploadStream( + uploadStream, + bufferSize, + maxConcurrency, + options + ), + chunkTimer(getUploadChunkTimeout()) + ]) } catch (error) { if (NetworkError.isNetworkErrorCode(error?.code)) { throw new NetworkError(error?.code) } throw error } finally { - // clear the timeout whether or not the upload completes - if (timeoutId) { - clearTimeout(timeoutId) - } + abortController.abort() } core.info('Finished uploading artifact content to blob storage!')