2020-02-13 23:24:11 +00:00
|
|
|
import * as fs from 'fs'
|
|
|
|
import * as io from '../../io/src/io'
|
|
|
|
import * as path from 'path'
|
2020-03-12 13:50:27 +00:00
|
|
|
import * as utils from '../src/internal/utils'
|
2020-02-11 14:49:46 +00:00
|
|
|
import * as core from '@actions/core'
|
|
|
|
import {HttpCodes} from '@actions/http-client'
|
2020-04-08 14:55:18 +00:00
|
|
|
import {
|
|
|
|
getRuntimeUrl,
|
|
|
|
getWorkFlowRunId,
|
|
|
|
getInitialRetryIntervalInMilliseconds,
|
|
|
|
getRetryMultiplier
|
|
|
|
} from '../src/internal/config-variables'
|
2022-04-05 13:31:49 +00:00
|
|
|
import {Readable} from 'stream'
|
2020-02-11 14:49:46 +00:00
|
|
|
|
2020-03-12 13:50:27 +00:00
|
|
|
jest.mock('../src/internal/config-variables')
|
2020-02-11 14:49:46 +00:00
|
|
|
|
|
|
|
describe('Utils', () => {
|
|
|
|
beforeAll(() => {
|
|
|
|
// mock all output so that there is less noise when running tests
|
|
|
|
jest.spyOn(console, 'log').mockImplementation(() => {})
|
|
|
|
jest.spyOn(core, 'debug').mockImplementation(() => {})
|
|
|
|
jest.spyOn(core, 'info').mockImplementation(() => {})
|
|
|
|
jest.spyOn(core, 'warning').mockImplementation(() => {})
|
|
|
|
})
|
|
|
|
|
2020-04-08 14:55:18 +00:00
|
|
|
it('Check exponential retry range', () => {
|
|
|
|
// No retries should return the initial retry interval
|
|
|
|
const retryWaitTime0 = utils.getExponentialRetryTimeInMilliseconds(0)
|
|
|
|
expect(retryWaitTime0).toEqual(getInitialRetryIntervalInMilliseconds())
|
|
|
|
|
|
|
|
const testMinMaxRange = (retryCount: number): void => {
|
|
|
|
const retryWaitTime = utils.getExponentialRetryTimeInMilliseconds(
|
|
|
|
retryCount
|
|
|
|
)
|
|
|
|
const minRange =
|
|
|
|
getInitialRetryIntervalInMilliseconds() *
|
|
|
|
getRetryMultiplier() *
|
|
|
|
retryCount
|
|
|
|
const maxRange = minRange * getRetryMultiplier()
|
|
|
|
|
|
|
|
expect(retryWaitTime).toBeGreaterThanOrEqual(minRange)
|
|
|
|
expect(retryWaitTime).toBeLessThan(maxRange)
|
|
|
|
}
|
|
|
|
|
|
|
|
for (let i = 1; i < 10; i++) {
|
|
|
|
testMinMaxRange(i)
|
|
|
|
}
|
|
|
|
})
|
|
|
|
|
2020-09-18 15:30:00 +00:00
|
|
|
it('Test negative artifact retention throws', () => {
|
|
|
|
expect(() => {
|
|
|
|
utils.getProperRetention(-1, undefined)
|
|
|
|
}).toThrow()
|
|
|
|
})
|
|
|
|
|
|
|
|
it('Test no setting specified takes artifact retention input', () => {
|
|
|
|
expect(utils.getProperRetention(180, undefined)).toEqual(180)
|
|
|
|
})
|
|
|
|
|
|
|
|
it('Test artifact retention must conform to max allowed', () => {
|
|
|
|
expect(utils.getProperRetention(180, '45')).toEqual(45)
|
|
|
|
})
|
|
|
|
|
2020-02-11 14:49:46 +00:00
|
|
|
it('Test constructing artifact URL', () => {
|
|
|
|
const runtimeUrl = getRuntimeUrl()
|
|
|
|
const runId = getWorkFlowRunId()
|
|
|
|
const artifactUrl = utils.getArtifactUrl()
|
|
|
|
expect(artifactUrl).toEqual(
|
|
|
|
`${runtimeUrl}_apis/pipelines/workflows/${runId}/artifacts?api-version=${utils.getApiVersion()}`
|
|
|
|
)
|
|
|
|
})
|
|
|
|
|
2020-04-08 14:55:18 +00:00
|
|
|
it('Test constructing upload headers with all optional parameters', () => {
|
|
|
|
const contentType = 'application/octet-stream'
|
2020-02-11 14:49:46 +00:00
|
|
|
const size = 24
|
2020-03-12 13:50:27 +00:00
|
|
|
const uncompressedLength = 100
|
2020-02-11 14:49:46 +00:00
|
|
|
const range = 'bytes 0-199/200'
|
2022-05-05 13:26:38 +00:00
|
|
|
const digest = {
|
|
|
|
crc64: 'bSzITYnW/P8=',
|
|
|
|
md5: 'Xiv1fT9AxLbfadrxk2y3ZvgyN0tPwCWafL/wbi9w8mk='
|
|
|
|
}
|
2020-05-12 15:48:36 +00:00
|
|
|
const headers = utils.getUploadHeaders(
|
2020-04-08 14:55:18 +00:00
|
|
|
contentType,
|
2020-03-12 13:50:27 +00:00
|
|
|
true,
|
|
|
|
true,
|
|
|
|
uncompressedLength,
|
|
|
|
size,
|
2022-04-05 13:31:49 +00:00
|
|
|
range,
|
|
|
|
digest
|
2020-03-12 13:50:27 +00:00
|
|
|
)
|
2022-05-05 13:26:38 +00:00
|
|
|
expect(Object.keys(headers).length).toEqual(10)
|
2020-05-12 15:48:36 +00:00
|
|
|
expect(headers['Accept']).toEqual(
|
2020-04-08 14:55:18 +00:00
|
|
|
`application/json;api-version=${utils.getApiVersion()}`
|
2020-02-11 14:49:46 +00:00
|
|
|
)
|
2020-05-12 15:48:36 +00:00
|
|
|
expect(headers['Content-Type']).toEqual(contentType)
|
|
|
|
expect(headers['Connection']).toEqual('Keep-Alive')
|
|
|
|
expect(headers['Keep-Alive']).toEqual('10')
|
|
|
|
expect(headers['Content-Encoding']).toEqual('gzip')
|
|
|
|
expect(headers['x-tfs-filelength']).toEqual(uncompressedLength)
|
|
|
|
expect(headers['Content-Length']).toEqual(size)
|
|
|
|
expect(headers['Content-Range']).toEqual(range)
|
2022-05-05 13:26:38 +00:00
|
|
|
expect(headers['x-actions-results-crc64']).toEqual(digest.crc64)
|
|
|
|
expect(headers['x-actions-results-md5']).toEqual(digest.md5)
|
2020-02-11 14:49:46 +00:00
|
|
|
})
|
|
|
|
|
2020-04-08 14:55:18 +00:00
|
|
|
it('Test constructing upload headers with only required parameter', () => {
|
2020-05-12 15:48:36 +00:00
|
|
|
const headers = utils.getUploadHeaders('application/octet-stream')
|
|
|
|
expect(Object.keys(headers).length).toEqual(2)
|
|
|
|
expect(headers['Accept']).toEqual(
|
2020-04-08 14:55:18 +00:00
|
|
|
`application/json;api-version=${utils.getApiVersion()}`
|
|
|
|
)
|
2020-05-12 15:48:36 +00:00
|
|
|
expect(headers['Content-Type']).toEqual('application/octet-stream')
|
2020-04-08 14:55:18 +00:00
|
|
|
})
|
|
|
|
|
|
|
|
it('Test constructing download headers with all optional parameters', () => {
|
|
|
|
const contentType = 'application/json'
|
2020-05-12 15:48:36 +00:00
|
|
|
const headers = utils.getDownloadHeaders(contentType, true, true)
|
|
|
|
expect(Object.keys(headers).length).toEqual(5)
|
|
|
|
expect(headers['Content-Type']).toEqual(contentType)
|
|
|
|
expect(headers['Connection']).toEqual('Keep-Alive')
|
|
|
|
expect(headers['Keep-Alive']).toEqual('10')
|
|
|
|
expect(headers['Accept-Encoding']).toEqual('gzip')
|
|
|
|
expect(headers['Accept']).toEqual(
|
2020-04-08 14:55:18 +00:00
|
|
|
`application/octet-stream;api-version=${utils.getApiVersion()}`
|
|
|
|
)
|
|
|
|
})
|
|
|
|
|
|
|
|
it('Test constructing download headers with only required parameter', () => {
|
2020-05-12 15:48:36 +00:00
|
|
|
const headers = utils.getDownloadHeaders('application/octet-stream')
|
|
|
|
expect(Object.keys(headers).length).toEqual(2)
|
|
|
|
expect(headers['Content-Type']).toEqual('application/octet-stream')
|
2020-04-08 14:55:18 +00:00
|
|
|
// check for default accept type
|
2020-05-12 15:48:36 +00:00
|
|
|
expect(headers['Accept']).toEqual(
|
2020-02-11 14:49:46 +00:00
|
|
|
`application/json;api-version=${utils.getApiVersion()}`
|
|
|
|
)
|
|
|
|
})
|
|
|
|
|
|
|
|
it('Test Success Status Code', () => {
|
|
|
|
expect(utils.isSuccessStatusCode(HttpCodes.OK)).toEqual(true)
|
|
|
|
expect(utils.isSuccessStatusCode(201)).toEqual(true)
|
|
|
|
expect(utils.isSuccessStatusCode(299)).toEqual(true)
|
|
|
|
expect(utils.isSuccessStatusCode(HttpCodes.NotFound)).toEqual(false)
|
|
|
|
expect(utils.isSuccessStatusCode(HttpCodes.BadGateway)).toEqual(false)
|
|
|
|
expect(utils.isSuccessStatusCode(HttpCodes.Forbidden)).toEqual(false)
|
|
|
|
})
|
|
|
|
|
|
|
|
it('Test Retry Status Code', () => {
|
|
|
|
expect(utils.isRetryableStatusCode(HttpCodes.BadGateway)).toEqual(true)
|
|
|
|
expect(utils.isRetryableStatusCode(HttpCodes.ServiceUnavailable)).toEqual(
|
|
|
|
true
|
|
|
|
)
|
|
|
|
expect(utils.isRetryableStatusCode(HttpCodes.GatewayTimeout)).toEqual(true)
|
2020-04-09 15:14:12 +00:00
|
|
|
expect(utils.isRetryableStatusCode(HttpCodes.TooManyRequests)).toEqual(true)
|
2020-02-11 14:49:46 +00:00
|
|
|
expect(utils.isRetryableStatusCode(HttpCodes.OK)).toEqual(false)
|
|
|
|
expect(utils.isRetryableStatusCode(HttpCodes.NotFound)).toEqual(false)
|
|
|
|
expect(utils.isRetryableStatusCode(HttpCodes.Forbidden)).toEqual(false)
|
2020-08-04 14:57:38 +00:00
|
|
|
expect(utils.isRetryableStatusCode(413)).toEqual(true) // Payload Too Large
|
2020-02-11 14:49:46 +00:00
|
|
|
})
|
2020-02-13 23:24:11 +00:00
|
|
|
|
2020-04-08 14:55:18 +00:00
|
|
|
it('Test Throttled Status Code', () => {
|
2020-04-09 15:14:12 +00:00
|
|
|
expect(utils.isThrottledStatusCode(HttpCodes.TooManyRequests)).toEqual(true)
|
2020-04-08 14:55:18 +00:00
|
|
|
expect(utils.isThrottledStatusCode(HttpCodes.InternalServerError)).toEqual(
|
|
|
|
false
|
|
|
|
)
|
|
|
|
expect(utils.isThrottledStatusCode(HttpCodes.BadGateway)).toEqual(false)
|
|
|
|
expect(utils.isThrottledStatusCode(HttpCodes.ServiceUnavailable)).toEqual(
|
|
|
|
false
|
|
|
|
)
|
|
|
|
})
|
|
|
|
|
2020-04-09 15:14:12 +00:00
|
|
|
it('Test Forbidden Status Code', () => {
|
|
|
|
expect(utils.isForbiddenStatusCode(HttpCodes.Forbidden)).toEqual(true)
|
|
|
|
expect(utils.isForbiddenStatusCode(HttpCodes.InternalServerError)).toEqual(
|
|
|
|
false
|
|
|
|
)
|
|
|
|
expect(utils.isForbiddenStatusCode(HttpCodes.TooManyRequests)).toEqual(
|
|
|
|
false
|
|
|
|
)
|
|
|
|
expect(utils.isForbiddenStatusCode(HttpCodes.OK)).toEqual(false)
|
|
|
|
})
|
|
|
|
|
2020-02-13 23:24:11 +00:00
|
|
|
it('Test Creating Artifact Directories', async () => {
|
|
|
|
const root = path.join(__dirname, '_temp', 'artifact-download')
|
|
|
|
// remove directory before starting
|
|
|
|
await io.rmRF(root)
|
|
|
|
|
|
|
|
const directory1 = path.join(root, 'folder2', 'folder3')
|
|
|
|
const directory2 = path.join(directory1, 'folder1')
|
|
|
|
|
|
|
|
// Initially should not exist
|
2020-04-09 15:14:12 +00:00
|
|
|
await expect(fs.promises.access(directory1)).rejects.not.toBeUndefined()
|
|
|
|
await expect(fs.promises.access(directory2)).rejects.not.toBeUndefined()
|
2020-02-13 23:24:11 +00:00
|
|
|
const directoryStructure = [directory1, directory2]
|
|
|
|
await utils.createDirectoriesForArtifact(directoryStructure)
|
|
|
|
// directories should now be created
|
2020-04-09 15:14:12 +00:00
|
|
|
await expect(fs.promises.access(directory1)).resolves.toEqual(undefined)
|
|
|
|
await expect(fs.promises.access(directory2)).resolves.toEqual(undefined)
|
|
|
|
})
|
|
|
|
|
|
|
|
it('Test Creating Empty Files', async () => {
|
|
|
|
const root = path.join(__dirname, '_temp', 'empty-files')
|
|
|
|
await io.rmRF(root)
|
|
|
|
|
|
|
|
const emptyFile1 = path.join(root, 'emptyFile1')
|
|
|
|
const directoryToCreate = path.join(root, 'folder1')
|
|
|
|
const emptyFile2 = path.join(directoryToCreate, 'emptyFile2')
|
|
|
|
|
|
|
|
// empty files should only be created after the directory structure is fully setup
|
|
|
|
// ensure they are first created by using the createDirectoriesForArtifact method
|
|
|
|
const directoryStructure = [root, directoryToCreate]
|
|
|
|
await utils.createDirectoriesForArtifact(directoryStructure)
|
|
|
|
await expect(fs.promises.access(root)).resolves.toEqual(undefined)
|
|
|
|
await expect(fs.promises.access(directoryToCreate)).resolves.toEqual(
|
|
|
|
undefined
|
|
|
|
)
|
|
|
|
|
|
|
|
await expect(fs.promises.access(emptyFile1)).rejects.not.toBeUndefined()
|
|
|
|
await expect(fs.promises.access(emptyFile2)).rejects.not.toBeUndefined()
|
|
|
|
|
|
|
|
const emptyFilesToCreate = [emptyFile1, emptyFile2]
|
|
|
|
await utils.createEmptyFilesForArtifact(emptyFilesToCreate)
|
|
|
|
|
|
|
|
await expect(fs.promises.access(emptyFile1)).resolves.toEqual(undefined)
|
|
|
|
const size1 = (await fs.promises.stat(emptyFile1)).size
|
|
|
|
expect(size1).toEqual(0)
|
|
|
|
await expect(fs.promises.access(emptyFile2)).resolves.toEqual(undefined)
|
|
|
|
const size2 = (await fs.promises.stat(emptyFile2)).size
|
|
|
|
expect(size2).toEqual(0)
|
2020-02-13 23:24:11 +00:00
|
|
|
})
|
2022-04-05 13:31:49 +00:00
|
|
|
|
|
|
|
it('Creates a digest from a readable stream', async () => {
|
|
|
|
const data = 'lorem ipsum'
|
|
|
|
const stream = Readable.from(data)
|
|
|
|
const digest = await utils.digestForStream(stream)
|
|
|
|
|
2022-05-05 13:26:38 +00:00
|
|
|
expect(digest.crc64).toBe('bSzITYnW/P8=')
|
2022-05-05 14:32:09 +00:00
|
|
|
expect(digest.md5).toBe('gKdR/eV3AoZAxBkADjPrpg==')
|
2022-04-05 13:31:49 +00:00
|
|
|
})
|
2020-02-11 14:49:46 +00:00
|
|
|
})
|