1
0
Fork 0

crc: update headers & digest stream

pull/1063/head
Rob Herley 2022-04-26 22:59:56 +00:00 committed by GitHub
parent fccc5ee6e6
commit 3f95e2ea4f
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 6 additions and 10 deletions

View File

@ -97,7 +97,7 @@ describe('Utils', () => {
expect(headers['x-tfs-filelength']).toEqual(uncompressedLength)
expect(headers['Content-Length']).toEqual(size)
expect(headers['Content-Range']).toEqual(range)
expect(headers['X-Digest']).toEqual(`sha-256=${digest}`)
expect(headers['X-GH-Actions-CRC64']).toEqual(digest)
})
it('Test constructing upload headers with only required parameter', () => {
@ -230,8 +230,6 @@ describe('Utils', () => {
const stream = Readable.from(data)
const digest = await utils.digestForStream(stream)
expect(digest).toBe(
'5e2bf57d3f40c4b6df69daf1936cb766f832374b4fc0259a7cbff06e2f70f269'
)
expect(digest).toBe('FFFCD6894DC82C6D')
})
})

View File

@ -1,4 +1,3 @@
import * as crypto from 'crypto'
import {promises as fs} from 'fs'
import {IncomingHttpHeaders} from 'http'
import {debug, info, warning} from '@actions/core'
@ -12,6 +11,7 @@ import {
getRetryMultiplier,
getInitialRetryIntervalInMilliseconds
} from './config-variables'
import CRC64 from './crc64'
/**
* Returns a retry time in milliseconds that exponentially gets larger
@ -205,8 +205,7 @@ export function getUploadHeaders(
requestOptions['Content-Range'] = contentRange
}
if (digest) {
// TODO(robherley): should we use 'Digest' directly? https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Digest
requestOptions['X-Digest'] = `sha-256=${digest}`
requestOptions['X-GH-Actions-CRC64'] = digest
}
return requestOptions
@ -302,10 +301,9 @@ export async function digestForStream(
stream: NodeJS.ReadableStream
): Promise<string> {
return new Promise((resolve, reject) => {
// TODO(robherley): switch to crc64 for production
const hasher = crypto.createHash('sha256')
const hasher = new CRC64()
stream.on('data', data => hasher.update(data))
stream.on('end', () => resolve(hasher.digest('hex')))
stream.on('end', () => resolve(hasher.digest()))
stream.on('error', reject)
})
}