1
0
Fork 0

[Artifacts] Save md5 hash for each artifact upload (#1494)

* Hash artifact upload using md5

* Add imports

* Small tweaks

* PR feedback

* PR Feedback
pull/1485/merge
Konrad Pabjan 2023-08-15 13:39:57 -04:00 committed by GitHub
parent 45c49b09df
commit c9dab8c79d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 70 additions and 14 deletions

View File

@ -14,6 +14,7 @@
"@azure/storage-blob": "^12.15.0",
"@protobuf-ts/plugin": "^2.2.3-alpha.1",
"archiver": "^5.3.1",
"crypto": "^1.0.1",
"jwt-decode": "^3.1.2",
"twirp-ts": "^2.5.0"
},
@ -519,6 +520,12 @@
"node": ">= 10"
}
},
"node_modules/crypto": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/crypto/-/crypto-1.0.1.tgz",
"integrity": "sha512-VxBKmeNcqQdiUQUW2Tzq0t377b54N2bMtXO/qiLa+6eRRmmC4qT3D4OnTGoT/U6O9aklQ/jTwbOtRMTTY8G0Ig==",
"deprecated": "This package is no longer supported. It's now a built-in Node module. If you've depended on crypto, you should switch to the one that's built-in."
},
"node_modules/delayed-stream": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz",

View File

@ -44,6 +44,7 @@
"@azure/storage-blob": "^12.15.0",
"@protobuf-ts/plugin": "^2.2.3-alpha.1",
"archiver": "^5.3.1",
"crypto": "^1.0.1",
"jwt-decode": "^3.1.2",
"twirp-ts": "^2.5.0"
},

View File

@ -3,6 +3,8 @@ import {TransferProgressEvent} from '@azure/core-http'
import {ZipUploadStream} from './zip'
import {getUploadChunkSize} from '../shared/config'
import * as core from '@actions/core'
import * as crypto from 'crypto'
import * as stream from 'stream'
export interface BlobUploadResponse {
/**
@ -14,6 +16,11 @@ export interface BlobUploadResponse {
* The total reported upload size in bytes. Empty if the upload failed
*/
uploadSize?: number
/**
* The MD5 hash of the uploaded file. Empty if the upload failed
*/
md5Hash?: string
}
export async function uploadZipToBlobStorage(
@ -41,15 +48,31 @@ export async function uploadZipToBlobStorage(
onProgress: uploadCallback
}
let md5Hash: string | undefined = undefined
const uploadStream = new stream.PassThrough()
const hashStream = crypto.createHash('md5')
zipUploadStream.pipe(uploadStream) // This stream is used for the upload
zipUploadStream.pipe(hashStream).setEncoding('hex') // This stream is used to compute a hash of the zip content that gets used. Integrity check
try {
core.info('Beginning upload of artifact content to blob storage')
await blockBlobClient.uploadStream(
zipUploadStream,
uploadStream,
bufferSize,
maxBuffers,
options
)
core.info('Finished uploading artifact content to blob storage!')
hashStream.end()
md5Hash = hashStream.read() as string
core.info(`MD5 hash of uploaded artifact zip is ${md5Hash}`)
} catch (error) {
core.info(`Failed to upload artifact zip to blob storage, error: ${error}`)
core.warning(`Failed to upload artifact zip to blob storage, error: ${error}`)
return {
isSuccess: false
}
@ -62,12 +85,9 @@ export async function uploadZipToBlobStorage(
}
}
core.info(
`Successfully uploaded all artifact file content. Total reported size: ${uploadByteCount}`
)
return {
isSuccess: true,
uploadSize: uploadByteCount
uploadSize: uploadByteCount,
md5Hash: md5Hash
}
}

View File

@ -10,9 +10,13 @@ import {
validateRootDirectory
} from './upload-zip-specification'
import {getBackendIdsFromToken} from '../shared/util'
import {CreateArtifactRequest} from 'src/generated'
import {uploadZipToBlobStorage} from './blob-upload'
import {createZipUploadStream} from './zip'
import {
CreateArtifactRequest,
FinalizeArtifactRequest,
StringValue
} from '../../generated'
export async function uploadArtifact(
name: string,
@ -39,7 +43,9 @@ export async function uploadArtifact(
// get the IDs needed for the artifact creation
const backendIds = getBackendIdsFromToken()
if (!backendIds.workflowRunBackendId || !backendIds.workflowJobRunBackendId) {
core.warning(`Failed to get backend ids`)
core.warning(
`Failed to get the necessary backend ids which are required to create the artifact`
)
return {
success: false
}
@ -77,7 +83,10 @@ export async function uploadArtifact(
}
// Upload zip to blob storage
const uploadResult = await uploadZipToBlobStorage(createArtifactResp.signedUploadUrl, zipUploadStream)
const uploadResult = await uploadZipToBlobStorage(
createArtifactResp.signedUploadUrl,
zipUploadStream
)
if (uploadResult.isSuccess === false) {
return {
success: false
@ -85,12 +94,24 @@ export async function uploadArtifact(
}
// finalize the artifact
const finalizeArtifactResp = await artifactClient.FinalizeArtifact({
const finalizeArtifactReq: FinalizeArtifactRequest = {
workflowRunBackendId: backendIds.workflowRunBackendId,
workflowJobRunBackendId: backendIds.workflowJobRunBackendId,
name: name,
size: uploadResult.uploadSize!.toString()
})
}
if (uploadResult.md5Hash) {
finalizeArtifactReq.hash = StringValue.create({
value: `md5:${uploadResult.md5Hash!}`
})
}
core.info(`Finalizing artifact upload`)
const finalizeArtifactResp = await artifactClient.FinalizeArtifact(
finalizeArtifactReq
)
if (!finalizeArtifactResp.ok) {
core.warning(`Failed to finalize artifact`)
return {
@ -98,9 +119,14 @@ export async function uploadArtifact(
}
}
const artifactId = BigInt(finalizeArtifactResp.artifactId)
core.info(
`Artifact ${name}.zip successfully finalized. Artifact ID ${artifactId}`
)
return {
success: true,
size: uploadResult.uploadSize,
id: parseInt(finalizeArtifactResp.artifactId) // TODO - will this be a problem due to the id being a bigint?
id: Number(artifactId)
}
}

View File

@ -76,7 +76,9 @@ const zipErrorCallback = (error: any): void => {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
const zipWarningCallback = (error: any): void => {
if (error.code === 'ENOENT') {
core.warning('ENOENT warning during artifact zip creation. No such file or directory')
core.warning(
'ENOENT warning during artifact zip creation. No such file or directory'
)
core.info(error)
} else {
core.warning(