1
0
Fork 0

POC upload using SAS URL

artifact-next
Konrad Pabjan 2023-08-01 17:33:32 -04:00
parent a3d4efa112
commit 39a7ba7bbd
6 changed files with 1352 additions and 39 deletions

File diff suppressed because it is too large Load Diff

View File

@ -37,10 +37,14 @@
"url": "https://github.com/actions/toolkit/issues" "url": "https://github.com/actions/toolkit/issues"
}, },
"dependencies": { "dependencies": {
"@actions/core": "^1.10.0" "@actions/core": "^1.10.0",
"@azure/storage-blob": "^12.15.0",
"@types/node": "^20.4.5",
"archiver": "^5.3.1"
}, },
"devDependencies": { "devDependencies": {
"@protobuf-ts/plugin": "^2.2.3-alpha.1", "@protobuf-ts/plugin": "^2.2.3-alpha.1",
"@types/archiver": "^5.3.2",
"twirp-ts": "^2.5.0", "twirp-ts": "^2.5.0",
"typescript": "^3.9.10" "typescript": "^3.9.10"
} }

View File

@ -0,0 +1 @@
hello there! This is from a.txt

View File

@ -0,0 +1 @@
This is from b.txt

View File

@ -0,0 +1,64 @@
import {AppendBlobAppendBlockOptions, BlobClient} from '@azure/storage-blob'
import * as a from 'archiver'
import * as fs from 'fs'
import * as stream from 'stream'
// for local testing, run this using ts-node testing.ts
export async function test(){
const sasURL = "http://127.0.0.1:11000/devstoreaccount1/actions-results/workflow-run-d...{add full SAS URL for testing here}"
const blobClient = new BlobClient(sasURL);
const zip = a.create('zip', {
zlib: { level: 9 } // Sets the compression level.
});
// append files that are going to be part of the final zip
zip.append('this is file 1', { name: 'file1.txt' });
zip.append('this is file 2', { name: 'file2.txt' });
zip.append('this is file 1 in a directory', { name: 'dir/file1.txt' });
zip.append('this is file 2 in a directory', { name: 'dir/file2.txt' });
zip.append(fs.createReadStream('a.txt'), { name: 'dir2/a.txt' })
zip.append(fs.createReadStream('b.txt'), { name: 'dir2/b.txt' })
// Create in-memory duplex stream to pipe zip straight to the upload
const passThroughStream = new stream.PassThrough()
zip.pipe(passThroughStream)
zip.finalize();
// Upload options
const ONE_MEGABYTE = 1024 * 1024;
const uploadOptions = { bufferSize: 4 * ONE_MEGABYTE, maxBuffers: 5 };
const blockBlobClient = blobClient.getBlockBlobClient()
// Upload!
try {
await blockBlobClient.uploadStream(
passThroughStream,
uploadOptions.bufferSize,
uploadOptions.maxBuffers
);
} catch (error){
console.log(error)
}
// That was easy
console.log("this worked!")
}
test()
// Another simple way of doing this
//const appendBlobClient = blobClient.getAppendBlobClient()
//const response = await appendBlobClient.createIfNotExists()
//console.log(response)
//const content = "hello there! This is uploading from a SAS"
//const options : AppendBlobAppendBlockOptions = {
// TODO, we could add MD5 or CRC64 hash info to protect the integrity
//}
//const response2 = await appendBlobClient.appendBlock(content, content.length, options);

View File

@ -16,6 +16,7 @@ export async function uploadArtifact(
// TODO Twirp call to create new artifact // TODO Twirp call to create new artifact
// TODO Upload to blob storage using SAS URL // TODO Upload to blob storage using SAS URL
// testing.ts is being used to prototype this functionality
// TODO Twirp call to finalize the new artifact upload // TODO Twirp call to finalize the new artifact upload