From e5fc7ccac72118be02cbb6f33030bde55413a46b Mon Sep 17 00:00:00 2001 From: Konrad Pabjan Date: Wed, 2 Aug 2023 15:05:21 -0400 Subject: [PATCH] Improvements to upload POC --- .../src/internal/upload/tmp/testing.ts | 119 +++++++++++++----- 1 file changed, 89 insertions(+), 30 deletions(-) diff --git a/packages/artifact/src/internal/upload/tmp/testing.ts b/packages/artifact/src/internal/upload/tmp/testing.ts index 0f0895af..0b2001fa 100644 --- a/packages/artifact/src/internal/upload/tmp/testing.ts +++ b/packages/artifact/src/internal/upload/tmp/testing.ts @@ -1,15 +1,38 @@ -import {AppendBlobAppendBlockOptions, BlobClient} from '@azure/storage-blob' +import {BlobClient, BlockBlobUploadStreamOptions} from '@azure/storage-blob' +import { TransferProgressEvent } from '@azure/core-http'; import * as a from 'archiver' import * as fs from 'fs' import * as stream from 'stream' +const bufferSize = 1024 * 1024 * 8 // 8 MB + +// Custom stream transformer so we can set the highWaterMark property +// See https://github.com/nodejs/node/issues/8855 +export class ZipUploadStream extends stream.Transform { + constructor(bufferSize: number) { + super({ + highWaterMark: bufferSize + }) + } + + _transform(chunk:any, enc:any, cb:any) { + cb(null, chunk) + } +} + + // for local testing, run this using ts-node testing.ts export async function test(){ + let sasURL = "paste here" + sasURL = sasURL.replace("http://devstoreaccount1.blob.codedev.localhost", "http://127.0.0.1:11000/devstoreaccount1") - const sasURL = "http://127.0.0.1:11000/devstoreaccount1/actions-results/workflow-run-d...{add full SAS URL for testing here}" const blobClient = new BlobClient(sasURL); const zip = a.create('zip', { zlib: { level: 9 } // Sets the compression level. + // Available options are 0-9 + // 0 => no compression + // 1 => fastest with low compression + // 9 => highest compression ratio but the slowest }); // append files that are going to be part of the final zip @@ -17,48 +40,84 @@ export async function test(){ zip.append('this is file 2', { name: 'file2.txt' }); zip.append('this is file 1 in a directory', { name: 'dir/file1.txt' }); zip.append('this is file 2 in a directory', { name: 'dir/file2.txt' }); + zip.append('this is a live demo!!!', { name: 'dir/alive.txt' }); zip.append(fs.createReadStream('a.txt'), { name: 'dir2/a.txt' }) zip.append(fs.createReadStream('b.txt'), { name: 'dir2/b.txt' }) - // Create in-memory duplex stream to pipe zip straight to the upload - const passThroughStream = new stream.PassThrough() - zip.pipe(passThroughStream) + const zipUploadStream = new ZipUploadStream(bufferSize) + zip.pipe(zipUploadStream) zip.finalize(); + console.log("Write high watermark value " + zipUploadStream.writableHighWaterMark) + console.log("Read high watermark value " + zipUploadStream.readableHighWaterMark) + + // good practice to catch warnings (ie stat failures and other non-blocking errors) + zip.on('warning', function(err) { + if (err.code === 'ENOENT') { + console.log("zip error ENOENT") + } else { + console.log("some other warning ") + console.log(err) + } + }); + + // good practice to catch this error explicitly + zip.on('error', function(err) { + console.log("some error with zip ") + console.log(err) + }); + + zip.on("progress", function(progress: a.ProgressData) { + console.log(progress) + + /* This outputs data like this, we could potentially do something with this for even more logging to show the status of the zip creation + { + entries: { total: 7, processed: 1 }, + fs: { totalBytes: 0, processedBytes: 0 } + } + { + entries: { total: 7, processed: 2 }, + fs: { totalBytes: 0, processedBytes: 0 } + } + */ + }) + + + // We can add these to debug logging + zip.on('end', function() { + console.log("zip ending") + }); + zip.on('finish', function() { + console.log("zip finished") + }); + // Upload options - const ONE_MEGABYTE = 1024 * 1024; - const uploadOptions = { bufferSize: 4 * ONE_MEGABYTE, maxBuffers: 5 }; + const maxBuffers = 5 const blockBlobClient = blobClient.getBlockBlobClient() + + let uploadByteCount = 0 + var myCallback = function(progress: TransferProgressEvent) { + console.log("Byte upload count " + progress.loadedBytes) + uploadByteCount = progress.loadedBytes + }; + + const options: BlockBlobUploadStreamOptions = { + blobHTTPHeaders: { "blobContentType": "zip" }, + onProgress: myCallback + } // Upload! try { - await blockBlobClient.uploadStream( - passThroughStream, - uploadOptions.bufferSize, - uploadOptions.maxBuffers + const aa = await blockBlobClient.uploadStream( + zipUploadStream, + bufferSize, + maxBuffers, + options ); } catch (error){ console.log(error) } - - // That was easy - console.log("this worked!") + console.log("final upload size in bytes is " + uploadByteCount) } test() - - - // Another simple way of doing this - //const appendBlobClient = blobClient.getAppendBlobClient() - - //const response = await appendBlobClient.createIfNotExists() - //console.log(response) - - - //const content = "hello there! This is uploading from a SAS" - - //const options : AppendBlobAppendBlockOptions = { - // TODO, we could add MD5 or CRC64 hash info to protect the integrity - //} - - //const response2 = await appendBlobClient.appendBlock(content, content.length, options); \ No newline at end of file