mirror of
https://github.com/actions/toolkit
synced 2025-05-10 09:03:02 +00:00
Tests for artifact uploads
This commit is contained in:
parent
206233ad77
commit
e6f55916d2
6 changed files with 272 additions and 38 deletions
|
@ -106,9 +106,8 @@ describe('Search', () => {
|
|||
})
|
||||
|
||||
/**
|
||||
* Creates a single item in <TempDir>/single-file-artifact/folder-a/folder-b/folder-b/file-under-c.txt
|
||||
* Expected to find that item with full file path provided
|
||||
* Only 1 item is expected to be found so the uploadFilePath is expected to be {artifactName}/file-under-c.txt
|
||||
* Expected to find one item with full file path provided
|
||||
* Only 1 item is expected to be found so the uploadFilePath is expected to be {artifactName}/extra-file-in-folder-c.txt
|
||||
*/
|
||||
it('Single file search - full path', async () => {
|
||||
const expectedUploadFilePath = path.join(
|
||||
|
@ -135,9 +134,8 @@ describe('Search', () => {
|
|||
})
|
||||
|
||||
/**
|
||||
* Creates a single item in <TempDir>/single-file-artifact/folder-a/folder-b/folder-b/file-under-c.txt
|
||||
* Expected to find that one item with a provided wildcard pattern
|
||||
* Only 1 item is expected to be found so the uploadFilePath is expected to be {artifactName}/file-under-c.txt
|
||||
* Expected to find one item with the provided wildcard pattern
|
||||
* Only 1 item is expected to be found so the uploadFilePath is expected to be {artifactName}/good-item1.txt
|
||||
*/
|
||||
it('Single file search - wildcard pattern', async () => {
|
||||
const searchPath = path.join(root, '**/good*m1.txt')
|
||||
|
|
|
@ -1,24 +1,78 @@
|
|||
import * as http from 'http'
|
||||
import * as io from '../../io/src/io'
|
||||
import * as net from 'net'
|
||||
import * as path from 'path'
|
||||
import * as uploadHttpClient from '../src/upload-artifact-http-client'
|
||||
import {getRuntimeUrl} from '../src/env-variables'
|
||||
import {promises as fs} from 'fs'
|
||||
import {getRuntimeUrl} from '../src/config-variables'
|
||||
import {HttpClient, HttpClientResponse} from '@actions/http-client/index'
|
||||
import {
|
||||
CreateArtifactResponse,
|
||||
PatchArtifactSizeSuccessResponse
|
||||
} from '../src/contracts'
|
||||
import {SearchResult} from '../src/search'
|
||||
|
||||
const root = path.join(__dirname, '_temp', 'artifact-upload')
|
||||
const file1Path = path.join(root, 'file1.txt')
|
||||
const file2Path = path.join(root, 'file2.txt')
|
||||
const file3Path = path.join(root, 'folder1', 'file3.txt')
|
||||
const file4Path = path.join(root, 'folder1', 'file4.txt')
|
||||
const file5Path = path.join(root, 'folder1', 'folder2', 'folder3', 'file5.txt')
|
||||
|
||||
let file1Size = 0
|
||||
let file2Size = 0
|
||||
let file3Size = 0
|
||||
let file4Size = 0
|
||||
let file5Size = 0
|
||||
|
||||
// mock env variables that will not always be available along with certain http methods
|
||||
jest.mock('../src/env-variables')
|
||||
jest.mock('../src/config-variables')
|
||||
jest.mock('@actions/http-client')
|
||||
|
||||
describe('Upload Tests', () => {
|
||||
// setup mocking for HTTP calls
|
||||
beforeAll(() => {
|
||||
mockHttpPostCall()
|
||||
mockHttpPatchCall()
|
||||
// setup mocking for HTTP calls and prepare some test files for mock uploading
|
||||
beforeAll(async () => {
|
||||
mockHttpPost()
|
||||
mockHttpSendStream()
|
||||
mockHttpPatch()
|
||||
|
||||
// clear temp directory and create files that will be "uploaded"
|
||||
await io.rmRF(root)
|
||||
await fs.mkdir(path.join(root, 'folder1', 'folder2', 'folder3'), {
|
||||
recursive: true
|
||||
})
|
||||
await fs.writeFile(file1Path, 'this is file 1')
|
||||
await fs.writeFile(file2Path, 'this is file 2')
|
||||
await fs.writeFile(file3Path, 'this is file 3')
|
||||
await fs.writeFile(file4Path, 'this is file 4')
|
||||
await fs.writeFile(file5Path, 'this is file 5')
|
||||
/*
|
||||
Directory structure for files that were created:
|
||||
root/
|
||||
file1.txt
|
||||
file2.txt
|
||||
folder1/
|
||||
file3.txt
|
||||
file4.txt
|
||||
folder2/
|
||||
folder3/
|
||||
file5.txt
|
||||
*/
|
||||
|
||||
file1Size = (await fs.stat(file1Path)).size
|
||||
file2Size = (await fs.stat(file2Path)).size
|
||||
file3Size = (await fs.stat(file3Path)).size
|
||||
file4Size = (await fs.stat(file4Path)).size
|
||||
file5Size = (await fs.stat(file5Path)).size
|
||||
})
|
||||
|
||||
afterAll(async () => {
|
||||
await io.rmRF(root)
|
||||
})
|
||||
|
||||
/**
|
||||
* Artifact Creation Tests
|
||||
*/
|
||||
it('Create Artifact - Success', async () => {
|
||||
const artifactName = 'valid-artifact-name'
|
||||
const response = await uploadHttpClient.createArtifactInFileContainer(
|
||||
|
@ -48,6 +102,141 @@ describe('Upload Tests', () => {
|
|||
)
|
||||
})
|
||||
|
||||
/**
|
||||
* Artifact Upload Tests
|
||||
*/
|
||||
it('Upload Artifact - Success', async () => {
|
||||
/**
|
||||
* Normally search.findFilesToUpload() would be used for providing information about what to upload. These tests however
|
||||
* focuses solely on the upload APIs so searchResult[] will be hard-coded
|
||||
*/
|
||||
const artifactName = 'successful-artifact'
|
||||
const searchResult: SearchResult[] = [
|
||||
{
|
||||
absoluteFilePath: file1Path,
|
||||
uploadFilePath: `${artifactName}/file1.txt`
|
||||
},
|
||||
{
|
||||
absoluteFilePath: file2Path,
|
||||
uploadFilePath: `${artifactName}/file2.txt`
|
||||
},
|
||||
{
|
||||
absoluteFilePath: file3Path,
|
||||
uploadFilePath: `${artifactName}/folder1/file3.txt`
|
||||
},
|
||||
{
|
||||
absoluteFilePath: file4Path,
|
||||
uploadFilePath: `${artifactName}/folder1/file4.txt`
|
||||
},
|
||||
{
|
||||
absoluteFilePath: file5Path,
|
||||
uploadFilePath: `${artifactName}/folder1/folder2/folder3/file5.txt`
|
||||
}
|
||||
]
|
||||
|
||||
const expectedTotalSize =
|
||||
file1Size + file2Size + file3Size + file4Size + file5Size
|
||||
const uploadUrl = `${getRuntimeUrl()}_apis/resources/Containers/13`
|
||||
const uploadResult = await uploadHttpClient.uploadArtifactToFileContainer(
|
||||
uploadUrl,
|
||||
searchResult
|
||||
)
|
||||
expect(uploadResult.failedItems.length).toEqual(0)
|
||||
expect(uploadResult.size).toEqual(expectedTotalSize)
|
||||
})
|
||||
|
||||
it('Upload Artifact - Failed Single File Upload', async () => {
|
||||
const searchResult: SearchResult[] = [
|
||||
{
|
||||
absoluteFilePath: file1Path,
|
||||
uploadFilePath: `this-file-upload-will-fail`
|
||||
}
|
||||
]
|
||||
|
||||
const uploadUrl = `${getRuntimeUrl()}_apis/resources/Containers/13`
|
||||
const uploadResult = await uploadHttpClient.uploadArtifactToFileContainer(
|
||||
uploadUrl,
|
||||
searchResult
|
||||
)
|
||||
expect(uploadResult.failedItems.length).toEqual(1)
|
||||
expect(uploadResult.size).toEqual(0)
|
||||
})
|
||||
|
||||
it('Upload Artifact - Partial Upload Continue On Error', async () => {
|
||||
const artifactName = 'partial-artifact'
|
||||
const searchResult: SearchResult[] = [
|
||||
{
|
||||
absoluteFilePath: file1Path,
|
||||
uploadFilePath: `${artifactName}/file1.txt`
|
||||
},
|
||||
{
|
||||
absoluteFilePath: file2Path,
|
||||
uploadFilePath: `${artifactName}/file2.txt`
|
||||
},
|
||||
{
|
||||
absoluteFilePath: file3Path,
|
||||
uploadFilePath: `${artifactName}/folder1/file3.txt`
|
||||
},
|
||||
{
|
||||
absoluteFilePath: file4Path,
|
||||
uploadFilePath: `this-file-upload-will-fail`
|
||||
},
|
||||
{
|
||||
absoluteFilePath: file5Path,
|
||||
uploadFilePath: `${artifactName}/folder1/folder2/folder3/file5.txt`
|
||||
}
|
||||
]
|
||||
|
||||
const expectedPartialSize = file1Size + file2Size + file4Size + file5Size
|
||||
const uploadUrl = `${getRuntimeUrl()}_apis/resources/Containers/13`
|
||||
const uploadResult = await uploadHttpClient.uploadArtifactToFileContainer(
|
||||
uploadUrl,
|
||||
searchResult,
|
||||
{continueOnError: true}
|
||||
)
|
||||
expect(uploadResult.failedItems.length).toEqual(1)
|
||||
expect(uploadResult.size).toEqual(expectedPartialSize)
|
||||
})
|
||||
|
||||
it('Upload Artifact - Partial Upload Fail Fast', async () => {
|
||||
const artifactName = 'partial-artifact'
|
||||
const searchResult: SearchResult[] = [
|
||||
{
|
||||
absoluteFilePath: file1Path,
|
||||
uploadFilePath: `${artifactName}/file1.txt`
|
||||
},
|
||||
{
|
||||
absoluteFilePath: file2Path,
|
||||
uploadFilePath: `${artifactName}/file2.txt`
|
||||
},
|
||||
{
|
||||
absoluteFilePath: file3Path,
|
||||
uploadFilePath: `${artifactName}/folder1/file3.txt`
|
||||
},
|
||||
{
|
||||
absoluteFilePath: file4Path,
|
||||
uploadFilePath: `this-file-upload-will-fail`
|
||||
},
|
||||
{
|
||||
absoluteFilePath: file5Path,
|
||||
uploadFilePath: `${artifactName}/folder1/folder2/folder3/file5.txt`
|
||||
}
|
||||
]
|
||||
|
||||
const expectedPartialSize = file1Size + file2Size + file3Size
|
||||
const uploadUrl = `${getRuntimeUrl()}_apis/resources/Containers/13`
|
||||
const uploadResult = await uploadHttpClient.uploadArtifactToFileContainer(
|
||||
uploadUrl,
|
||||
searchResult,
|
||||
{continueOnError: false}
|
||||
)
|
||||
expect(uploadResult.failedItems.length).toEqual(2)
|
||||
expect(uploadResult.size).toEqual(expectedPartialSize)
|
||||
})
|
||||
|
||||
/**
|
||||
* Artifact Association Tests
|
||||
*/
|
||||
it('Associate Artifact - Success', async () => {
|
||||
expect(async () => {
|
||||
uploadHttpClient.patchArtifactSize(130, 'my-artifact')
|
||||
|
@ -68,16 +257,16 @@ describe('Upload Tests', () => {
|
|||
).rejects.toThrow('Unable to finish uploading artifact my-artifact')
|
||||
})
|
||||
|
||||
/**
|
||||
* Helpers used to setup mocking all the required HTTP calls
|
||||
*/
|
||||
async function mockReadBodyEmpty(): Promise<string> {
|
||||
return new Promise(resolve => {
|
||||
resolve()
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Mocks http post calls that are made when first creating a container for an artifact
|
||||
*/
|
||||
function mockHttpPostCall(): void {
|
||||
function mockHttpPost(): void {
|
||||
// eslint-disable-next-line @typescript-eslint/unbound-method
|
||||
HttpClient.prototype.post = async (
|
||||
requestdata,
|
||||
|
@ -119,14 +308,35 @@ describe('Upload Tests', () => {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Mocks http patch calls that are made at the very end of the artifact upload process to update the size
|
||||
*/
|
||||
function mockHttpPatchCall(): void {
|
||||
function mockHttpSendStream(): void {
|
||||
// eslint-disable-next-line @typescript-eslint/unbound-method
|
||||
HttpClient.prototype.sendStream = jest.fn(
|
||||
async (verb, requestUrl, stream) => {
|
||||
const mockMessage = new http.IncomingMessage(new net.Socket())
|
||||
mockMessage.statusCode = 200
|
||||
if (!stream.readable) {
|
||||
throw new Error('Unable to read provided stream')
|
||||
}
|
||||
if (requestUrl.includes('fail')) {
|
||||
mockMessage.statusCode = 500
|
||||
}
|
||||
return new Promise<HttpClientResponse>(resolve => {
|
||||
resolve({
|
||||
message: mockMessage,
|
||||
readBody: mockReadBodyEmpty
|
||||
})
|
||||
})
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
function mockHttpPatch(): void {
|
||||
// eslint-disable-next-line @typescript-eslint/unbound-method
|
||||
HttpClient.prototype.patch = jest.fn(async (requestdata, data) => {
|
||||
const inputData = JSON.parse(data)
|
||||
const mockMessage = new http.IncomingMessage(new net.Socket())
|
||||
|
||||
// Get the name from the end of requestdata. Will be something like https://www.example.com/_apis/pipelines/workflows/15/artifacts?api-version=6.0-preview&artifactName=my-artifact
|
||||
const artifactName = requestdata.split('=')[2]
|
||||
let mockReadBody = mockReadBodyEmpty
|
||||
if (inputData.Size < 1) {
|
||||
|
|
|
@ -1,3 +1,17 @@
|
|||
/**
|
||||
* Mocks default limits for easier testing
|
||||
*/
|
||||
export function getUploadFileConcurrency(): number {
|
||||
return 1
|
||||
}
|
||||
|
||||
export function getUploadChunkConcurrency(): number {
|
||||
return 1
|
||||
}
|
||||
|
||||
export function getUploadChunkSize(): number {
|
||||
return 4 * 1024 * 1024 // 4 MB Chunks
|
||||
}
|
||||
/**
|
||||
* Mocks the 'ACTIONS_RUNTIME_TOKEN', 'ACTIONS_RUNTIME_URL' and 'GITHUB_RUN_ID' env variables
|
||||
* that are only available from a node context on the runner. This allows for tests to run
|
|
@ -1,3 +1,15 @@
|
|||
export function getUploadFileConcurrency(): number {
|
||||
return 2
|
||||
}
|
||||
|
||||
export function getUploadChunkConcurrency(): number {
|
||||
return 3
|
||||
}
|
||||
|
||||
export function getUploadChunkSize(): number {
|
||||
return 4 * 1024 * 1024 // 4 MB Chunks
|
||||
}
|
||||
|
||||
export function getRuntimeToken(): string {
|
||||
const token = process.env['ACTIONS_RUNTIME_TOKEN']
|
||||
if (!token) {
|
|
@ -18,13 +18,16 @@ import {
|
|||
getContentRange,
|
||||
getRequestOptions,
|
||||
isRetryableStatusCode,
|
||||
isSuccessStatusCode,
|
||||
parseEnvNumber
|
||||
isSuccessStatusCode
|
||||
} from './utils'
|
||||
import {getRuntimeToken, getRuntimeUrl, getWorkFlowRunId} from './env-variables'
|
||||
|
||||
const defaultChunkUploadConcurrency = 3
|
||||
const defaultFileUploadConcurrency = 2
|
||||
import {
|
||||
getRuntimeToken,
|
||||
getRuntimeUrl,
|
||||
getWorkFlowRunId,
|
||||
getUploadChunkConcurrency,
|
||||
getUploadChunkSize,
|
||||
getUploadFileConcurrency
|
||||
} from './config-variables'
|
||||
|
||||
/**
|
||||
* Creates a file container for the new artifact in the remote blob storage/file service
|
||||
|
@ -73,21 +76,18 @@ export async function uploadArtifactToFileContainer(
|
|||
options?: UploadOptions
|
||||
): Promise<UploadResults> {
|
||||
const client = createHttpClient(getRuntimeToken())
|
||||
|
||||
const FILE_CONCURRENCY =
|
||||
parseEnvNumber('ARTIFACT_FILE_UPLOAD_CONCURRENCY') ||
|
||||
defaultFileUploadConcurrency
|
||||
const CHUNK_CONCURRENCY =
|
||||
parseEnvNumber('ARTIFACT_CHUNK_UPLOAD_CONCURRENCY') ||
|
||||
defaultChunkUploadConcurrency
|
||||
const MAX_CHUNK_SIZE =
|
||||
parseEnvNumber('ARTIFACT_UPLOAD_CHUNK_SIZE') || 4 * 1024 * 1024 // 4 MB Chunks
|
||||
const FILE_CONCURRENCY = getUploadFileConcurrency()
|
||||
const CHUNK_CONCURRENCY = getUploadChunkConcurrency()
|
||||
const MAX_CHUNK_SIZE = getUploadChunkSize()
|
||||
debug(
|
||||
`File Concurrency: ${FILE_CONCURRENCY}, Chunk Concurrency: ${CHUNK_CONCURRENCY} and Chunk Size: ${MAX_CHUNK_SIZE}`
|
||||
)
|
||||
|
||||
const parameters: UploadFileParameters[] = []
|
||||
const continueOnError = options?.continueOnError || true
|
||||
let continueOnError = true
|
||||
if (options) {
|
||||
continueOnError = options.continueOnError
|
||||
}
|
||||
|
||||
// Prepare the necessary parameters to upload all the files
|
||||
for (const file of filesToUpload) {
|
||||
|
@ -190,12 +190,12 @@ async function uploadFileAsync(
|
|||
end,
|
||||
fileSize
|
||||
)
|
||||
|
||||
if (!result) {
|
||||
/**
|
||||
* Chunk failed to upload, report as failed but continue if desired. It is possible that part of a chunk was
|
||||
* successfully uploaded so the server may report a different size for what was uploaded
|
||||
**/
|
||||
|
||||
isUploadSuccessful = false
|
||||
failedChunkSizes += chunkSize
|
||||
if (!parameters.continueOnError) {
|
||||
|
|
|
@ -14,5 +14,5 @@ export interface UploadOptions {
|
|||
* files with the exception of the problematic files(s)/chunks(s) that failed to upload
|
||||
*
|
||||
*/
|
||||
continueOnError?: boolean
|
||||
continueOnError: boolean
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue