1
0
Fork 0
mirror of https://github.com/actions/toolkit synced 2025-05-11 01:22:57 +00:00

Misc improvements

This commit is contained in:
Konrad Pabjan 2020-01-27 23:17:23 -05:00
parent 4431cd8b98
commit 7cf5770168
9 changed files with 358 additions and 118 deletions

View file

@ -28,9 +28,27 @@ describe('search', () => {
'file-under-c.txt' 'file-under-c.txt'
) )
await fs.writeFile(itemPath, 'sample file under folder c') await fs.writeFile(itemPath, 'sample file under folder c')
/*
Directory structure of files that were created:
root/
folder-a/
folder-b/
folder-c/
file-under-c.txt
*/
const exepectedUploadFilePath = path.join(artifactName, 'file-under-c.txt') const exepectedUploadFilePath = path.join(artifactName, 'file-under-c.txt')
const searchResult = await findFilesToUpload(artifactName, itemPath) const searchResult = await findFilesToUpload(artifactName, itemPath)
/*
searchResult[] should be equal to:
[
{
absoluteFilePath: itemPath
uploadFilePath: my-artifact/file-under-c.txt
}
]
*/
expect(searchResult.length).toEqual(1) expect(searchResult.length).toEqual(1)
expect(searchResult[0].uploadFilePath).toEqual(exepectedUploadFilePath) expect(searchResult[0].uploadFilePath).toEqual(exepectedUploadFilePath)
expect(searchResult[0].absoluteFilePath).toEqual(itemPath) expect(searchResult[0].absoluteFilePath).toEqual(itemPath)
@ -55,9 +73,28 @@ describe('search', () => {
'item1.txt' 'item1.txt'
) )
await fs.writeFile(itemPath, 'sample file under folder c') await fs.writeFile(itemPath, 'sample file under folder c')
/*
Directory structure of files that were created:
root/
folder-a/
folder-b/
folder-c/
item1.txt
*/
const searchPath = path.join(root, '**/*m1.txt') const searchPath = path.join(root, '**/*m1.txt')
const exepectedUploadFilePath = path.join(artifactName, 'item1.txt') const exepectedUploadFilePath = path.join(artifactName, 'item1.txt')
const searchResult = await findFilesToUpload(artifactName, searchPath) const searchResult = await findFilesToUpload(artifactName, searchPath)
/*
searchResult should be equal to:
[
{
absoluteFilePath: itemPath
uploadFilePath: my-artifact/item1.txt
}
]
*/
expect(searchResult.length).toEqual(1) expect(searchResult.length).toEqual(1)
expect(searchResult[0].uploadFilePath).toEqual(exepectedUploadFilePath) expect(searchResult[0].uploadFilePath).toEqual(exepectedUploadFilePath)
expect(searchResult[0].absoluteFilePath).toEqual(itemPath) expect(searchResult[0].absoluteFilePath).toEqual(itemPath)
@ -105,7 +142,33 @@ describe('search', () => {
item4.txt item4.txt
item5.txt item5.txt
*/ */
const searchResult = await findFilesToUpload(artifactName, root) const searchResult = await findFilesToUpload(artifactName, root)
/*
searchResult should be equal to:
[
{
absoluteFilePath: item1Path
uploadFilePath: my-artifact/folder-a/folder-b/folder-c/item1.txt
},
{
absoluteFilePath: item2Path
uploadFilePath: my-artifact/folder-d/item2.txt
},
{
absoluteFilePath: item3Path
uploadFilePath: my-artifact/folder-d/item3.txt
},
{
absoluteFilePath: item4Path
uploadFilePath: my-artifact/folder-d/item4.txt
},
{
absoluteFilePath: item5Path
uploadFilePath: my-artifact/item5.txt
}
]
*/
expect(searchResult.length).toEqual(5) expect(searchResult.length).toEqual(5)
const absolutePaths = searchResult.map(item => item.absoluteFilePath) const absolutePaths = searchResult.map(item => item.absoluteFilePath)
@ -209,7 +272,34 @@ describe('search', () => {
folder-i/ folder-i/
item5.txt item5.txt
*/ */
const searchResult = await findFilesToUpload(artifactName, root) const searchResult = await findFilesToUpload(artifactName, root)
/*
searchResult should be equal to:
[
{
absoluteFilePath: item1Path
uploadFilePath: my-artifact/folder-a/folder-b/folder-c/item1.txt
},
{
absoluteFilePath: item2Path
uploadFilePath: my-artifact/folder-d/item2.txt
},
{
absoluteFilePath: item3Path
uploadFilePath: my-artifact/folder-d/item3.txt
},
{
absoluteFilePath: item4Path
uploadFilePath: my-artifact/folder-d/item4.txt
},
{
absoluteFilePath: item5Path
uploadFilePath: my-artifact/item5.txt
}
]
*/
expect(searchResult.length).toEqual(5) expect(searchResult.length).toEqual(5)
const absolutePaths = searchResult.map(item => item.absoluteFilePath) const absolutePaths = searchResult.map(item => item.absoluteFilePath)
@ -322,7 +412,6 @@ describe('search', () => {
await fs.writeFile(badItem3Path, 'bad item3 file') await fs.writeFile(badItem3Path, 'bad item3 file')
await fs.writeFile(badItem4Path, 'bad item4 file') await fs.writeFile(badItem4Path, 'bad item4 file')
await fs.writeFile(badItem5Path, 'bad item5 file') await fs.writeFile(badItem5Path, 'bad item5 file')
/* /*
Directory structure of files that were created: Directory structure of files that were created:
root/ root/
@ -346,8 +435,35 @@ describe('search', () => {
bad-item5.txt bad-item5.txt
good-item5.txt good-item5.txt
*/ */
const searchPath = path.join(root, '**/good*') const searchPath = path.join(root, '**/good*')
const searchResult = await findFilesToUpload(artifactName, searchPath) const searchResult = await findFilesToUpload(artifactName, searchPath)
/*
searchResult should be equal to:
[
{
absoluteFilePath: goodItem1Path
uploadFilePath: my-artifact/folder-a/folder-b/folder-c/good-item1.txt
},
{
absoluteFilePath: goodItem2Path
uploadFilePath: my-artifact/folder-d/good-item2.txt
},
{
absoluteFilePath: goodItem3Path
uploadFilePath: my-artifact/folder-d/good-item3.txt
},
{
absoluteFilePath: goodItem4Path
uploadFilePath: my-artifact/folder-d/good-item4.txt
},
{
absoluteFilePath: goodItem5Path
uploadFilePath: my-artifact/good-item5.txt
}
]
*/
expect(searchResult.length).toEqual(5) expect(searchResult.length).toEqual(5)
const absolutePaths = searchResult.map(item => item.absoluteFilePath) const absolutePaths = searchResult.map(item => item.absoluteFilePath)
@ -393,6 +509,6 @@ describe('search', () => {
}) })
function getTestTemp(): string { function getTestTemp(): string {
return path.join(__dirname, '_temp', 'artifact') return path.join(__dirname, '_temp', 'artifact-search')
} }
}) })

View file

@ -0,0 +1,75 @@
import * as fs from 'fs'
import * as io from '../../io/src/io'
import * as path from 'path'
import * as uploadHttpClient from '../src/upload-artifact-http-client'
/*
These test will fail locally if as they require some env variables to be set by the runner
*/
describe('upload-tests', () => {
/**
* Simple test to verify an artifact container can be created with the expected response
*/
it('Create artifact in file container API test', async () => {
const name = 'my-artifact-container'
const response = await uploadHttpClient.createArtifactInFileContainer(name)
expect(response.name).toEqual(name)
expect(response.size).toEqual(-1)
expect(response.type).toEqual('actions_storage')
const expectedResourceUrl = `${process.env['ACTIONS_RUNTIME_URL']}_apis/resources/Containers/${response.containerId}`
expect(response.fileContainerResourceUrl).toEqual(expectedResourceUrl)
})
/**
* Tests creating a new artifact container, uploading a small file and then associating the
* uploaded artifact with the correct size
*/
it('Upload simple file and associate artifact', async () => {
const name = 'my-artifact-with-files'
const response = await uploadHttpClient.createArtifactInFileContainer(name)
expect(response.name).toEqual(name)
expect(response.size).toEqual(-1)
expect(response.type).toEqual('actions_storage')
const expectedResourceUrl = `${process.env['ACTIONS_RUNTIME_URL']}_apis/resources/Containers/${response.containerId}`
expect(response.fileContainerResourceUrl).toEqual(expectedResourceUrl)
// clear temp directory and create a simple file that will be uploaded
await io.rmRF(getTestTemp())
await fs.promises.mkdir(getTestTemp(), {recursive: true})
const itemPath = path.join(getTestTemp(), 'testFile.txt')
await fs.promises.writeFile(
itemPath,
'Simple file that we will be uploading'
)
/**
* findFilesToUpload() from search.ts will normally return the information for what to upload. For these tests
* however, filesToUpload will be hardcoded to just test the upload APIs
*/
const filesToUpload = [
{
absoluteFilePath: itemPath,
uploadFilePath: path.join(name, 'testFile.txt')
}
]
const uploadResult = await uploadHttpClient.uploadArtifactToFileContainer(
response.fileContainerResourceUrl,
filesToUpload
)
expect(uploadResult.failedItems.length === 0)
expect(uploadResult.size).toEqual(fs.statSync(itemPath).size)
expect(async () => {
await uploadHttpClient.patchArtifactSize(uploadResult.size, name)
}).not.toThrow()
})
function getTestTemp(): string {
return path.join(__dirname, '_temp', 'artifact-upload')
}
})

View file

@ -6,6 +6,7 @@ import {
patchArtifactSize patchArtifactSize
} from './upload-artifact-http-client' } from './upload-artifact-http-client'
import {UploadInfo} from './upload-info' import {UploadInfo} from './upload-info'
import {UploadOptions} from './upload-options'
import {checkArtifactName} from './utils' import {checkArtifactName} from './utils'
/** /**
@ -13,11 +14,13 @@ import {checkArtifactName} from './utils'
* *
* @param name the name of the artifact, required * @param name the name of the artifact, required
* @param path the directory, file, or glob pattern to denote what will be uploaded, required * @param path the directory, file, or glob pattern to denote what will be uploaded, required
* @param options extra options for customizing the upload behavior
* @returns single UploadInfo object * @returns single UploadInfo object
*/ */
export async function uploadArtifact( export async function uploadArtifact(
name: string, name: string,
path: string path: string,
options?: UploadOptions
): Promise<UploadInfo> { ): Promise<UploadInfo> {
checkArtifactName(name) checkArtifactName(name)
@ -27,16 +30,21 @@ export async function uploadArtifact(
// Search for the items that will be uploaded // Search for the items that will be uploaded
const filesToUpload: SearchResult[] = await findFilesToUpload(name, path) const filesToUpload: SearchResult[] = await findFilesToUpload(name, path)
let reportedSize = -1
if (filesToUpload === undefined) { if (filesToUpload === undefined) {
core.setFailed( throw new Error(
`Unable to succesfully search fo files to upload with the provided path: ${path}` `Unable to succesfully search for files to upload with the provided path: ${path}`
) )
} else if (filesToUpload.length === 0) { } else if (filesToUpload.length === 0) {
core.warning( core.warning(
`No files were found for the provided path: ${path}. No artifacts will be uploaded.` `No files were found for the provided path: ${path}. No artifacts will be uploaded.`
) )
return {
artifactName: name,
artifactItems: [],
size: 0,
failedItems: []
}
} else { } else {
/** /**
* Step 1 of 3 * Step 1 of 3
@ -55,30 +63,28 @@ export async function uploadArtifact(
* Step 2 of 3 * Step 2 of 3
* Upload each of the files that were found concurrently * Upload each of the files that were found concurrently
*/ */
const uploadingArtifact: Promise<number> = Promise.resolve( const uploadResult = await uploadArtifactToFileContainer(
uploadArtifactToFileContainer(
response.fileContainerResourceUrl, response.fileContainerResourceUrl,
filesToUpload filesToUpload,
options
) )
)
uploadingArtifact.then(async size => {
// eslint-disable-next-line no-console // eslint-disable-next-line no-console
console.log( console.log(
`All files for artifact ${name} have finished uploading. Reported upload size is ${size} bytes` `Finished uploading artifact ${name}. Reported size is ${uploadResult.size} bytes. There were ${uploadResult.failedItems.length} items that failed to upload`
) )
/** /**
* Step 3 of 3 * Step 3 of 3
* Update the size of the artifact to indicate we are done uploading * Update the size of the artifact to indicate we are done uploading
*/ */
await patchArtifactSize(size, name) await patchArtifactSize(uploadResult.size, name)
reportedSize = size
})
}
return { return {
artifactName: name, artifactName: name,
artifactItems: filesToUpload.map(item => item.absoluteFilePath), artifactItems: filesToUpload.map(item => item.absoluteFilePath),
size: reportedSize size: uploadResult.size,
failedItems: uploadResult.failedItems
}
} }
} }
@ -88,7 +94,7 @@ Downloads a single artifact associated with a run
export async function downloadArtifact( export async function downloadArtifact(
name: string, name: string,
path?: string, path?: string,
createArtifactFolder?:boolean options?: DownloadOptions
): Promise<DownloadInfo> { ): Promise<DownloadInfo> {
TODO TODO

View file

@ -26,3 +26,8 @@ export interface PatchArtifactSizeSuccessResponse {
url: string url: string
uploadUrl: string uploadUrl: string
} }
export interface UploadResults {
size: number
failedItems: string[]
}

View file

@ -0,0 +1,7 @@
export interface DownloadOptions {
/**
* Specifies if a folder is created for the artifact that is downloaded (contents downloaded into this folder),
* defaults to false if not specified
* */
createArtifactFolder?: boolean
}

View file

@ -1,27 +1,30 @@
import {debug} from '@actions/core' import {debug} from '@actions/core'
import {BearerCredentialHandler} from '@actions/http-client/auth'
import {HttpClientResponse, HttpClient} from '@actions/http-client/index' import {HttpClientResponse, HttpClient} from '@actions/http-client/index'
import {IHttpClientResponse} from '@actions/http-client/interfaces' import {IHttpClientResponse} from '@actions/http-client/interfaces'
import { import {
CreateArtifactResponse, CreateArtifactResponse,
CreateArtifactParameters, CreateArtifactParameters,
PatchArtifactSize, PatchArtifactSize,
PatchArtifactSizeSuccessResponse PatchArtifactSizeSuccessResponse,
UploadResults
} from './contracts' } from './contracts'
import * as fs from 'fs' import * as fs from 'fs'
import {SearchResult} from './search' import {SearchResult} from './search'
import {UploadOptions} from './upload-options'
import {URL} from 'url' import {URL} from 'url'
import { import {
parseEnvNumber, createHttpClient,
getArtifactUrl, getArtifactUrl,
getContentRange,
getRequestOptions,
isSuccessStatusCode, isSuccessStatusCode,
isRetryableStatusCode, isRetryableStatusCode,
getRequestOptions, parseEnvNumber
getContentRange
} from './utils' } from './utils'
const defaultChunkUploadConcurrency = 3 const defaultChunkUploadConcurrency = 3
const defaultFileUploadConcurrency = 2 const defaultFileUploadConcurrency = 2
const userAgent = 'actions/artifact'
/** /**
* Step 1 of 3 when uploading an artifact. Creates a file container for the new artifact in the remote blob storage/file service * Step 1 of 3 when uploading an artifact. Creates a file container for the new artifact in the remote blob storage/file service
@ -31,19 +34,14 @@ const defaultFileUploadConcurrency = 2
export async function createArtifactInFileContainer( export async function createArtifactInFileContainer(
artifactName: string artifactName: string
): Promise<CreateArtifactResponse> { ): Promise<CreateArtifactResponse> {
const token = process.env['ACTIONS_RUNTIME_TOKEN'] || '' const client = createHttpClient(userAgent)
const bearerCredentialHandler = new BearerCredentialHandler(token)
const requestOptions = getRequestOptions()
requestOptions['Content-Type'] = 'application/json'
const client: HttpClient = new HttpClient('actions/artifact', [
bearerCredentialHandler
])
const parameters: CreateArtifactParameters = { const parameters: CreateArtifactParameters = {
Type: 'actions_storage', Type: 'actions_storage',
Name: artifactName Name: artifactName
} }
const data: string = JSON.stringify(parameters, null, 2) const data: string = JSON.stringify(parameters, null, 2)
const requestOptions = getRequestOptions()
requestOptions['Content-Type'] = 'application/json'
const rawResponse: HttpClientResponse = await client.post( const rawResponse: HttpClientResponse = await client.post(
getArtifactUrl(), getArtifactUrl(),
data, data,
@ -72,13 +70,10 @@ export async function createArtifactInFileContainer(
*/ */
export async function uploadArtifactToFileContainer( export async function uploadArtifactToFileContainer(
uploadUrl: string, uploadUrl: string,
filesToUpload: SearchResult[] filesToUpload: SearchResult[],
): Promise<number> { options?: UploadOptions
const token = process.env['ACTIONS_RUNTIME_TOKEN'] || '' ): Promise<UploadResults> {
const bearerCredentialHandler = new BearerCredentialHandler(token) const client = createHttpClient(userAgent)
const client: HttpClient = new HttpClient('actions/artifact', [
bearerCredentialHandler
])
const FILE_CONCURRENCY = const FILE_CONCURRENCY =
parseEnvNumber('ARTIFACT_FILE_UPLOAD_CONCURRENCY') || parseEnvNumber('ARTIFACT_FILE_UPLOAD_CONCURRENCY') ||
@ -111,6 +106,9 @@ export async function uploadArtifactToFileContainer(
}) })
} }
// eslint-disable-next-line no-console
console.log(options) // TODO remove, temp
const parallelUploads = [...new Array(FILE_CONCURRENCY).keys()] const parallelUploads = [...new Array(FILE_CONCURRENCY).keys()]
const fileSizes: number[] = [] const fileSizes: number[] = []
let uploadedFiles = 0 let uploadedFiles = 0
@ -131,7 +129,10 @@ export async function uploadArtifactToFileContainer(
const sum = fileSizes.reduce((acc, val) => acc + val) const sum = fileSizes.reduce((acc, val) => acc + val)
// eslint-disable-next-line no-console // eslint-disable-next-line no-console
console.log(`Total size of all the files uploaded ${sum}`) console.log(`Total size of all the files uploaded ${sum}`)
return sum return {
size: sum,
failedItems: []
}
} }
/** /**
@ -259,6 +260,7 @@ export async function patchArtifactSize(
size: number, size: number,
artifactName: string artifactName: string
): Promise<void> { ): Promise<void> {
const client = createHttpClient(userAgent)
const requestOptions = getRequestOptions() const requestOptions = getRequestOptions()
requestOptions['Content-Type'] = 'application/json' requestOptions['Content-Type'] = 'application/json'
@ -268,11 +270,6 @@ export async function patchArtifactSize(
const parameters: PatchArtifactSize = {Size: size} const parameters: PatchArtifactSize = {Size: size}
const data: string = JSON.stringify(parameters, null, 2) const data: string = JSON.stringify(parameters, null, 2)
const token = process.env['ACTIONS_RUNTIME_TOKEN'] || ''
const bearerCredentialHandler = new BearerCredentialHandler(token)
const client: HttpClient = new HttpClient('actions/artifact', [
bearerCredentialHandler
])
// eslint-disable-next-line no-console // eslint-disable-next-line no-console
console.log(`URL is ${resourceUrl.toString()}`) console.log(`URL is ${resourceUrl.toString()}`)

View file

@ -5,7 +5,7 @@ export interface UploadInfo {
artifactName: string artifactName: string
/** /**
* A list of items that were uploaded as part of the artifact * A list of all items found using the provided path that are intended to be uploaded as part of the artfiact
*/ */
artifactItems: string[] artifactItems: string[]
@ -13,4 +13,10 @@ export interface UploadInfo {
* Total size of the artifact in bytes that was uploaded * Total size of the artifact in bytes that was uploaded
*/ */
size: number size: number
/**
* A list of items that were not uploaded as part of the artifact (includes queued items that were not uploaded if
* continueOnError is set to false). This is a subset of artifactItems.
*/
failedItems: string[]
} }

View file

@ -0,0 +1,18 @@
export interface UploadOptions {
/**
* Indicates if the artifact upload should continue if file or chunk fails to upload from any error.
* If there is a error during upload, a partial artifact will always be associated and available for
* download at the end. The size reported will be the amount of storage that the user or org will be
* charged for the partial artifact. Defaults to true if not specified
*
* If set to false, and an error is encountered, all other uploads will stop and any files or chunkes
* that were queued will not be attempted to be uploaded. The partial artifact avaiable will only
* include files and chunks up until the failure
*
* If set to true and an error is encountered, the failed file will be skipped and ignored and all
* other queued files will be attempted to be uploaded. The partial artifact at the end will have all
* files with the exception of the problematic files(s)/chunks(s) that failed to upload
*
*/
continueOnError?: boolean
}

View file

@ -1,5 +1,6 @@
import {debug} from '@actions/core' import {debug} from '@actions/core'
import {HttpCodes} from '@actions/http-client' import {HttpCodes, HttpClient} from '@actions/http-client'
import {BearerCredentialHandler} from '@actions/http-client/auth'
import {IHeaders} from '@actions/http-client/interfaces' import {IHeaders} from '@actions/http-client/interfaces'
const apiVersion = '6.0-preview' const apiVersion = '6.0-preview'
@ -43,16 +44,6 @@ export function getContentRange(
return `bytes ${start}-${end}/${total}` return `bytes ${start}-${end}/${total}`
} }
export function getArtifactUrl(): string {
const runtimeUrl = process.env['ACTIONS_RUNTIME_URL']
if (!runtimeUrl) {
throw new Error('Runtime url not found, unable to create artifact.')
}
const artifactUrl = `${runtimeUrl}_apis/pipelines/workflows/${getWorkFlowRunId()}/artifacts?api-version=${apiVersion}`
debug(`Artifact Url: ${artifactUrl}`)
return artifactUrl
}
export function getRequestOptions(): IHeaders { export function getRequestOptions(): IHeaders {
const requestOptions: IHeaders = { const requestOptions: IHeaders = {
Accept: createAcceptHeader('application/json') Accept: createAcceptHeader('application/json')
@ -64,6 +55,25 @@ export function createAcceptHeader(type: string): string {
return `${type};api-version=${apiVersion}` return `${type};api-version=${apiVersion}`
} }
export function createHttpClient(userAgent: string): HttpClient {
const token = process.env['ACTIONS_RUNTIME_TOKEN']
if (!token) {
throw new Error('Unable to get ACTIONS_RUNTIME_TOKEN')
}
return new HttpClient(userAgent, [new BearerCredentialHandler(token)])
}
export function getArtifactUrl(): string {
const runtimeUrl = process.env['ACTIONS_RUNTIME_URL']
if (!runtimeUrl) {
throw new Error('Runtime url not found, unable to create artifact.')
}
const artifactUrl = `${runtimeUrl}_apis/pipelines/workflows/${getWorkFlowRunId()}/artifacts?api-version=${apiVersion}`
debug(`Artifact Url: ${artifactUrl}`)
return artifactUrl
}
function getWorkFlowRunId(): string { function getWorkFlowRunId(): string {
const workFlowrunId = process.env['GITHUB_RUN_ID'] || '' const workFlowrunId = process.env['GITHUB_RUN_ID'] || ''
if (!workFlowrunId) { if (!workFlowrunId) {