mirror of https://github.com/actions/toolkit
crc: update headers & digest stream
parent
fccc5ee6e6
commit
3f95e2ea4f
|
@ -97,7 +97,7 @@ describe('Utils', () => {
|
||||||
expect(headers['x-tfs-filelength']).toEqual(uncompressedLength)
|
expect(headers['x-tfs-filelength']).toEqual(uncompressedLength)
|
||||||
expect(headers['Content-Length']).toEqual(size)
|
expect(headers['Content-Length']).toEqual(size)
|
||||||
expect(headers['Content-Range']).toEqual(range)
|
expect(headers['Content-Range']).toEqual(range)
|
||||||
expect(headers['X-Digest']).toEqual(`sha-256=${digest}`)
|
expect(headers['X-GH-Actions-CRC64']).toEqual(digest)
|
||||||
})
|
})
|
||||||
|
|
||||||
it('Test constructing upload headers with only required parameter', () => {
|
it('Test constructing upload headers with only required parameter', () => {
|
||||||
|
@ -230,8 +230,6 @@ describe('Utils', () => {
|
||||||
const stream = Readable.from(data)
|
const stream = Readable.from(data)
|
||||||
const digest = await utils.digestForStream(stream)
|
const digest = await utils.digestForStream(stream)
|
||||||
|
|
||||||
expect(digest).toBe(
|
expect(digest).toBe('FFFCD6894DC82C6D')
|
||||||
'5e2bf57d3f40c4b6df69daf1936cb766f832374b4fc0259a7cbff06e2f70f269'
|
|
||||||
)
|
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
import * as crypto from 'crypto'
|
|
||||||
import {promises as fs} from 'fs'
|
import {promises as fs} from 'fs'
|
||||||
import {IncomingHttpHeaders} from 'http'
|
import {IncomingHttpHeaders} from 'http'
|
||||||
import {debug, info, warning} from '@actions/core'
|
import {debug, info, warning} from '@actions/core'
|
||||||
|
@ -12,6 +11,7 @@ import {
|
||||||
getRetryMultiplier,
|
getRetryMultiplier,
|
||||||
getInitialRetryIntervalInMilliseconds
|
getInitialRetryIntervalInMilliseconds
|
||||||
} from './config-variables'
|
} from './config-variables'
|
||||||
|
import CRC64 from './crc64'
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns a retry time in milliseconds that exponentially gets larger
|
* Returns a retry time in milliseconds that exponentially gets larger
|
||||||
|
@ -205,8 +205,7 @@ export function getUploadHeaders(
|
||||||
requestOptions['Content-Range'] = contentRange
|
requestOptions['Content-Range'] = contentRange
|
||||||
}
|
}
|
||||||
if (digest) {
|
if (digest) {
|
||||||
// TODO(robherley): should we use 'Digest' directly? https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Digest
|
requestOptions['X-GH-Actions-CRC64'] = digest
|
||||||
requestOptions['X-Digest'] = `sha-256=${digest}`
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return requestOptions
|
return requestOptions
|
||||||
|
@ -302,10 +301,9 @@ export async function digestForStream(
|
||||||
stream: NodeJS.ReadableStream
|
stream: NodeJS.ReadableStream
|
||||||
): Promise<string> {
|
): Promise<string> {
|
||||||
return new Promise((resolve, reject) => {
|
return new Promise((resolve, reject) => {
|
||||||
// TODO(robherley): switch to crc64 for production
|
const hasher = new CRC64()
|
||||||
const hasher = crypto.createHash('sha256')
|
|
||||||
stream.on('data', data => hasher.update(data))
|
stream.on('data', data => hasher.update(data))
|
||||||
stream.on('end', () => resolve(hasher.digest('hex')))
|
stream.on('end', () => resolve(hasher.digest()))
|
||||||
stream.on('error', reject)
|
stream.on('error', reject)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue