mirror of
https://github.com/actions/toolkit
synced 2025-05-10 17:12:54 +00:00
Misc improvements
This commit is contained in:
parent
4431cd8b98
commit
7cf5770168
9 changed files with 358 additions and 118 deletions
|
@ -28,9 +28,27 @@ describe('search', () => {
|
|||
'file-under-c.txt'
|
||||
)
|
||||
await fs.writeFile(itemPath, 'sample file under folder c')
|
||||
/*
|
||||
Directory structure of files that were created:
|
||||
root/
|
||||
folder-a/
|
||||
folder-b/
|
||||
folder-c/
|
||||
file-under-c.txt
|
||||
*/
|
||||
|
||||
const exepectedUploadFilePath = path.join(artifactName, 'file-under-c.txt')
|
||||
const searchResult = await findFilesToUpload(artifactName, itemPath)
|
||||
/*
|
||||
searchResult[] should be equal to:
|
||||
[
|
||||
{
|
||||
absoluteFilePath: itemPath
|
||||
uploadFilePath: my-artifact/file-under-c.txt
|
||||
}
|
||||
]
|
||||
*/
|
||||
|
||||
expect(searchResult.length).toEqual(1)
|
||||
expect(searchResult[0].uploadFilePath).toEqual(exepectedUploadFilePath)
|
||||
expect(searchResult[0].absoluteFilePath).toEqual(itemPath)
|
||||
|
@ -55,9 +73,28 @@ describe('search', () => {
|
|||
'item1.txt'
|
||||
)
|
||||
await fs.writeFile(itemPath, 'sample file under folder c')
|
||||
/*
|
||||
Directory structure of files that were created:
|
||||
root/
|
||||
folder-a/
|
||||
folder-b/
|
||||
folder-c/
|
||||
item1.txt
|
||||
*/
|
||||
|
||||
const searchPath = path.join(root, '**/*m1.txt')
|
||||
const exepectedUploadFilePath = path.join(artifactName, 'item1.txt')
|
||||
const searchResult = await findFilesToUpload(artifactName, searchPath)
|
||||
/*
|
||||
searchResult should be equal to:
|
||||
[
|
||||
{
|
||||
absoluteFilePath: itemPath
|
||||
uploadFilePath: my-artifact/item1.txt
|
||||
}
|
||||
]
|
||||
*/
|
||||
|
||||
expect(searchResult.length).toEqual(1)
|
||||
expect(searchResult[0].uploadFilePath).toEqual(exepectedUploadFilePath)
|
||||
expect(searchResult[0].absoluteFilePath).toEqual(itemPath)
|
||||
|
@ -105,7 +142,33 @@ describe('search', () => {
|
|||
item4.txt
|
||||
item5.txt
|
||||
*/
|
||||
|
||||
const searchResult = await findFilesToUpload(artifactName, root)
|
||||
/*
|
||||
searchResult should be equal to:
|
||||
[
|
||||
{
|
||||
absoluteFilePath: item1Path
|
||||
uploadFilePath: my-artifact/folder-a/folder-b/folder-c/item1.txt
|
||||
},
|
||||
{
|
||||
absoluteFilePath: item2Path
|
||||
uploadFilePath: my-artifact/folder-d/item2.txt
|
||||
},
|
||||
{
|
||||
absoluteFilePath: item3Path
|
||||
uploadFilePath: my-artifact/folder-d/item3.txt
|
||||
},
|
||||
{
|
||||
absoluteFilePath: item4Path
|
||||
uploadFilePath: my-artifact/folder-d/item4.txt
|
||||
},
|
||||
{
|
||||
absoluteFilePath: item5Path
|
||||
uploadFilePath: my-artifact/item5.txt
|
||||
}
|
||||
]
|
||||
*/
|
||||
expect(searchResult.length).toEqual(5)
|
||||
|
||||
const absolutePaths = searchResult.map(item => item.absoluteFilePath)
|
||||
|
@ -209,7 +272,34 @@ describe('search', () => {
|
|||
folder-i/
|
||||
item5.txt
|
||||
*/
|
||||
|
||||
const searchResult = await findFilesToUpload(artifactName, root)
|
||||
/*
|
||||
searchResult should be equal to:
|
||||
[
|
||||
{
|
||||
absoluteFilePath: item1Path
|
||||
uploadFilePath: my-artifact/folder-a/folder-b/folder-c/item1.txt
|
||||
},
|
||||
{
|
||||
absoluteFilePath: item2Path
|
||||
uploadFilePath: my-artifact/folder-d/item2.txt
|
||||
},
|
||||
{
|
||||
absoluteFilePath: item3Path
|
||||
uploadFilePath: my-artifact/folder-d/item3.txt
|
||||
},
|
||||
{
|
||||
absoluteFilePath: item4Path
|
||||
uploadFilePath: my-artifact/folder-d/item4.txt
|
||||
},
|
||||
{
|
||||
absoluteFilePath: item5Path
|
||||
uploadFilePath: my-artifact/item5.txt
|
||||
}
|
||||
]
|
||||
*/
|
||||
|
||||
expect(searchResult.length).toEqual(5)
|
||||
|
||||
const absolutePaths = searchResult.map(item => item.absoluteFilePath)
|
||||
|
@ -322,7 +412,6 @@ describe('search', () => {
|
|||
await fs.writeFile(badItem3Path, 'bad item3 file')
|
||||
await fs.writeFile(badItem4Path, 'bad item4 file')
|
||||
await fs.writeFile(badItem5Path, 'bad item5 file')
|
||||
|
||||
/*
|
||||
Directory structure of files that were created:
|
||||
root/
|
||||
|
@ -346,8 +435,35 @@ describe('search', () => {
|
|||
bad-item5.txt
|
||||
good-item5.txt
|
||||
*/
|
||||
|
||||
const searchPath = path.join(root, '**/good*')
|
||||
const searchResult = await findFilesToUpload(artifactName, searchPath)
|
||||
/*
|
||||
searchResult should be equal to:
|
||||
[
|
||||
{
|
||||
absoluteFilePath: goodItem1Path
|
||||
uploadFilePath: my-artifact/folder-a/folder-b/folder-c/good-item1.txt
|
||||
},
|
||||
{
|
||||
absoluteFilePath: goodItem2Path
|
||||
uploadFilePath: my-artifact/folder-d/good-item2.txt
|
||||
},
|
||||
{
|
||||
absoluteFilePath: goodItem3Path
|
||||
uploadFilePath: my-artifact/folder-d/good-item3.txt
|
||||
},
|
||||
{
|
||||
absoluteFilePath: goodItem4Path
|
||||
uploadFilePath: my-artifact/folder-d/good-item4.txt
|
||||
},
|
||||
{
|
||||
absoluteFilePath: goodItem5Path
|
||||
uploadFilePath: my-artifact/good-item5.txt
|
||||
}
|
||||
]
|
||||
*/
|
||||
|
||||
expect(searchResult.length).toEqual(5)
|
||||
|
||||
const absolutePaths = searchResult.map(item => item.absoluteFilePath)
|
||||
|
@ -393,6 +509,6 @@ describe('search', () => {
|
|||
})
|
||||
|
||||
function getTestTemp(): string {
|
||||
return path.join(__dirname, '_temp', 'artifact')
|
||||
return path.join(__dirname, '_temp', 'artifact-search')
|
||||
}
|
||||
})
|
||||
|
|
75
packages/artifact/__tests__/upload.test.ts
Normal file
75
packages/artifact/__tests__/upload.test.ts
Normal file
|
@ -0,0 +1,75 @@
|
|||
import * as fs from 'fs'
|
||||
import * as io from '../../io/src/io'
|
||||
import * as path from 'path'
|
||||
import * as uploadHttpClient from '../src/upload-artifact-http-client'
|
||||
|
||||
/*
|
||||
These test will fail locally if as they require some env variables to be set by the runner
|
||||
*/
|
||||
describe('upload-tests', () => {
|
||||
/**
|
||||
* Simple test to verify an artifact container can be created with the expected response
|
||||
*/
|
||||
it('Create artifact in file container API test', async () => {
|
||||
const name = 'my-artifact-container'
|
||||
const response = await uploadHttpClient.createArtifactInFileContainer(name)
|
||||
|
||||
expect(response.name).toEqual(name)
|
||||
expect(response.size).toEqual(-1)
|
||||
expect(response.type).toEqual('actions_storage')
|
||||
|
||||
const expectedResourceUrl = `${process.env['ACTIONS_RUNTIME_URL']}_apis/resources/Containers/${response.containerId}`
|
||||
expect(response.fileContainerResourceUrl).toEqual(expectedResourceUrl)
|
||||
})
|
||||
|
||||
/**
|
||||
* Tests creating a new artifact container, uploading a small file and then associating the
|
||||
* uploaded artifact with the correct size
|
||||
*/
|
||||
it('Upload simple file and associate artifact', async () => {
|
||||
const name = 'my-artifact-with-files'
|
||||
const response = await uploadHttpClient.createArtifactInFileContainer(name)
|
||||
|
||||
expect(response.name).toEqual(name)
|
||||
expect(response.size).toEqual(-1)
|
||||
expect(response.type).toEqual('actions_storage')
|
||||
|
||||
const expectedResourceUrl = `${process.env['ACTIONS_RUNTIME_URL']}_apis/resources/Containers/${response.containerId}`
|
||||
expect(response.fileContainerResourceUrl).toEqual(expectedResourceUrl)
|
||||
|
||||
// clear temp directory and create a simple file that will be uploaded
|
||||
await io.rmRF(getTestTemp())
|
||||
await fs.promises.mkdir(getTestTemp(), {recursive: true})
|
||||
const itemPath = path.join(getTestTemp(), 'testFile.txt')
|
||||
await fs.promises.writeFile(
|
||||
itemPath,
|
||||
'Simple file that we will be uploading'
|
||||
)
|
||||
|
||||
/**
|
||||
* findFilesToUpload() from search.ts will normally return the information for what to upload. For these tests
|
||||
* however, filesToUpload will be hardcoded to just test the upload APIs
|
||||
*/
|
||||
const filesToUpload = [
|
||||
{
|
||||
absoluteFilePath: itemPath,
|
||||
uploadFilePath: path.join(name, 'testFile.txt')
|
||||
}
|
||||
]
|
||||
|
||||
const uploadResult = await uploadHttpClient.uploadArtifactToFileContainer(
|
||||
response.fileContainerResourceUrl,
|
||||
filesToUpload
|
||||
)
|
||||
expect(uploadResult.failedItems.length === 0)
|
||||
expect(uploadResult.size).toEqual(fs.statSync(itemPath).size)
|
||||
|
||||
expect(async () => {
|
||||
await uploadHttpClient.patchArtifactSize(uploadResult.size, name)
|
||||
}).not.toThrow()
|
||||
})
|
||||
|
||||
function getTestTemp(): string {
|
||||
return path.join(__dirname, '_temp', 'artifact-upload')
|
||||
}
|
||||
})
|
|
@ -6,6 +6,7 @@ import {
|
|||
patchArtifactSize
|
||||
} from './upload-artifact-http-client'
|
||||
import {UploadInfo} from './upload-info'
|
||||
import {UploadOptions} from './upload-options'
|
||||
import {checkArtifactName} from './utils'
|
||||
|
||||
/**
|
||||
|
@ -13,11 +14,13 @@ import {checkArtifactName} from './utils'
|
|||
*
|
||||
* @param name the name of the artifact, required
|
||||
* @param path the directory, file, or glob pattern to denote what will be uploaded, required
|
||||
* @param options extra options for customizing the upload behavior
|
||||
* @returns single UploadInfo object
|
||||
*/
|
||||
export async function uploadArtifact(
|
||||
name: string,
|
||||
path: string
|
||||
path: string,
|
||||
options?: UploadOptions
|
||||
): Promise<UploadInfo> {
|
||||
checkArtifactName(name)
|
||||
|
||||
|
@ -27,16 +30,21 @@ export async function uploadArtifact(
|
|||
|
||||
// Search for the items that will be uploaded
|
||||
const filesToUpload: SearchResult[] = await findFilesToUpload(name, path)
|
||||
let reportedSize = -1
|
||||
|
||||
if (filesToUpload === undefined) {
|
||||
core.setFailed(
|
||||
`Unable to succesfully search fo files to upload with the provided path: ${path}`
|
||||
throw new Error(
|
||||
`Unable to succesfully search for files to upload with the provided path: ${path}`
|
||||
)
|
||||
} else if (filesToUpload.length === 0) {
|
||||
core.warning(
|
||||
`No files were found for the provided path: ${path}. No artifacts will be uploaded.`
|
||||
)
|
||||
return {
|
||||
artifactName: name,
|
||||
artifactItems: [],
|
||||
size: 0,
|
||||
failedItems: []
|
||||
}
|
||||
} else {
|
||||
/**
|
||||
* Step 1 of 3
|
||||
|
@ -55,30 +63,28 @@ export async function uploadArtifact(
|
|||
* Step 2 of 3
|
||||
* Upload each of the files that were found concurrently
|
||||
*/
|
||||
const uploadingArtifact: Promise<number> = Promise.resolve(
|
||||
uploadArtifactToFileContainer(
|
||||
const uploadResult = await uploadArtifactToFileContainer(
|
||||
response.fileContainerResourceUrl,
|
||||
filesToUpload
|
||||
filesToUpload,
|
||||
options
|
||||
)
|
||||
)
|
||||
uploadingArtifact.then(async size => {
|
||||
// eslint-disable-next-line no-console
|
||||
console.log(
|
||||
`All files for artifact ${name} have finished uploading. Reported upload size is ${size} bytes`
|
||||
`Finished uploading artifact ${name}. Reported size is ${uploadResult.size} bytes. There were ${uploadResult.failedItems.length} items that failed to upload`
|
||||
)
|
||||
|
||||
/**
|
||||
* Step 3 of 3
|
||||
* Update the size of the artifact to indicate we are done uploading
|
||||
*/
|
||||
await patchArtifactSize(size, name)
|
||||
reportedSize = size
|
||||
})
|
||||
}
|
||||
await patchArtifactSize(uploadResult.size, name)
|
||||
|
||||
return {
|
||||
artifactName: name,
|
||||
artifactItems: filesToUpload.map(item => item.absoluteFilePath),
|
||||
size: reportedSize
|
||||
size: uploadResult.size,
|
||||
failedItems: uploadResult.failedItems
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -88,7 +94,7 @@ Downloads a single artifact associated with a run
|
|||
export async function downloadArtifact(
|
||||
name: string,
|
||||
path?: string,
|
||||
createArtifactFolder?:boolean
|
||||
options?: DownloadOptions
|
||||
): Promise<DownloadInfo> {
|
||||
|
||||
TODO
|
||||
|
|
|
@ -26,3 +26,8 @@ export interface PatchArtifactSizeSuccessResponse {
|
|||
url: string
|
||||
uploadUrl: string
|
||||
}
|
||||
|
||||
export interface UploadResults {
|
||||
size: number
|
||||
failedItems: string[]
|
||||
}
|
||||
|
|
7
packages/artifact/src/download-options.ts
Normal file
7
packages/artifact/src/download-options.ts
Normal file
|
@ -0,0 +1,7 @@
|
|||
export interface DownloadOptions {
|
||||
/**
|
||||
* Specifies if a folder is created for the artifact that is downloaded (contents downloaded into this folder),
|
||||
* defaults to false if not specified
|
||||
* */
|
||||
createArtifactFolder?: boolean
|
||||
}
|
|
@ -1,27 +1,30 @@
|
|||
import {debug} from '@actions/core'
|
||||
import {BearerCredentialHandler} from '@actions/http-client/auth'
|
||||
import {HttpClientResponse, HttpClient} from '@actions/http-client/index'
|
||||
import {IHttpClientResponse} from '@actions/http-client/interfaces'
|
||||
import {
|
||||
CreateArtifactResponse,
|
||||
CreateArtifactParameters,
|
||||
PatchArtifactSize,
|
||||
PatchArtifactSizeSuccessResponse
|
||||
PatchArtifactSizeSuccessResponse,
|
||||
UploadResults
|
||||
} from './contracts'
|
||||
import * as fs from 'fs'
|
||||
import {SearchResult} from './search'
|
||||
import {UploadOptions} from './upload-options'
|
||||
import {URL} from 'url'
|
||||
import {
|
||||
parseEnvNumber,
|
||||
createHttpClient,
|
||||
getArtifactUrl,
|
||||
getContentRange,
|
||||
getRequestOptions,
|
||||
isSuccessStatusCode,
|
||||
isRetryableStatusCode,
|
||||
getRequestOptions,
|
||||
getContentRange
|
||||
parseEnvNumber
|
||||
} from './utils'
|
||||
|
||||
const defaultChunkUploadConcurrency = 3
|
||||
const defaultFileUploadConcurrency = 2
|
||||
const userAgent = 'actions/artifact'
|
||||
|
||||
/**
|
||||
* Step 1 of 3 when uploading an artifact. Creates a file container for the new artifact in the remote blob storage/file service
|
||||
|
@ -31,19 +34,14 @@ const defaultFileUploadConcurrency = 2
|
|||
export async function createArtifactInFileContainer(
|
||||
artifactName: string
|
||||
): Promise<CreateArtifactResponse> {
|
||||
const token = process.env['ACTIONS_RUNTIME_TOKEN'] || ''
|
||||
const bearerCredentialHandler = new BearerCredentialHandler(token)
|
||||
const requestOptions = getRequestOptions()
|
||||
requestOptions['Content-Type'] = 'application/json'
|
||||
|
||||
const client: HttpClient = new HttpClient('actions/artifact', [
|
||||
bearerCredentialHandler
|
||||
])
|
||||
const client = createHttpClient(userAgent)
|
||||
const parameters: CreateArtifactParameters = {
|
||||
Type: 'actions_storage',
|
||||
Name: artifactName
|
||||
}
|
||||
const data: string = JSON.stringify(parameters, null, 2)
|
||||
const requestOptions = getRequestOptions()
|
||||
requestOptions['Content-Type'] = 'application/json'
|
||||
const rawResponse: HttpClientResponse = await client.post(
|
||||
getArtifactUrl(),
|
||||
data,
|
||||
|
@ -72,13 +70,10 @@ export async function createArtifactInFileContainer(
|
|||
*/
|
||||
export async function uploadArtifactToFileContainer(
|
||||
uploadUrl: string,
|
||||
filesToUpload: SearchResult[]
|
||||
): Promise<number> {
|
||||
const token = process.env['ACTIONS_RUNTIME_TOKEN'] || ''
|
||||
const bearerCredentialHandler = new BearerCredentialHandler(token)
|
||||
const client: HttpClient = new HttpClient('actions/artifact', [
|
||||
bearerCredentialHandler
|
||||
])
|
||||
filesToUpload: SearchResult[],
|
||||
options?: UploadOptions
|
||||
): Promise<UploadResults> {
|
||||
const client = createHttpClient(userAgent)
|
||||
|
||||
const FILE_CONCURRENCY =
|
||||
parseEnvNumber('ARTIFACT_FILE_UPLOAD_CONCURRENCY') ||
|
||||
|
@ -111,6 +106,9 @@ export async function uploadArtifactToFileContainer(
|
|||
})
|
||||
}
|
||||
|
||||
// eslint-disable-next-line no-console
|
||||
console.log(options) // TODO remove, temp
|
||||
|
||||
const parallelUploads = [...new Array(FILE_CONCURRENCY).keys()]
|
||||
const fileSizes: number[] = []
|
||||
let uploadedFiles = 0
|
||||
|
@ -131,7 +129,10 @@ export async function uploadArtifactToFileContainer(
|
|||
const sum = fileSizes.reduce((acc, val) => acc + val)
|
||||
// eslint-disable-next-line no-console
|
||||
console.log(`Total size of all the files uploaded ${sum}`)
|
||||
return sum
|
||||
return {
|
||||
size: sum,
|
||||
failedItems: []
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -259,6 +260,7 @@ export async function patchArtifactSize(
|
|||
size: number,
|
||||
artifactName: string
|
||||
): Promise<void> {
|
||||
const client = createHttpClient(userAgent)
|
||||
const requestOptions = getRequestOptions()
|
||||
requestOptions['Content-Type'] = 'application/json'
|
||||
|
||||
|
@ -268,11 +270,6 @@ export async function patchArtifactSize(
|
|||
const parameters: PatchArtifactSize = {Size: size}
|
||||
const data: string = JSON.stringify(parameters, null, 2)
|
||||
|
||||
const token = process.env['ACTIONS_RUNTIME_TOKEN'] || ''
|
||||
const bearerCredentialHandler = new BearerCredentialHandler(token)
|
||||
const client: HttpClient = new HttpClient('actions/artifact', [
|
||||
bearerCredentialHandler
|
||||
])
|
||||
// eslint-disable-next-line no-console
|
||||
console.log(`URL is ${resourceUrl.toString()}`)
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@ export interface UploadInfo {
|
|||
artifactName: string
|
||||
|
||||
/**
|
||||
* A list of items that were uploaded as part of the artifact
|
||||
* A list of all items found using the provided path that are intended to be uploaded as part of the artfiact
|
||||
*/
|
||||
artifactItems: string[]
|
||||
|
||||
|
@ -13,4 +13,10 @@ export interface UploadInfo {
|
|||
* Total size of the artifact in bytes that was uploaded
|
||||
*/
|
||||
size: number
|
||||
|
||||
/**
|
||||
* A list of items that were not uploaded as part of the artifact (includes queued items that were not uploaded if
|
||||
* continueOnError is set to false). This is a subset of artifactItems.
|
||||
*/
|
||||
failedItems: string[]
|
||||
}
|
||||
|
|
18
packages/artifact/src/upload-options.ts
Normal file
18
packages/artifact/src/upload-options.ts
Normal file
|
@ -0,0 +1,18 @@
|
|||
export interface UploadOptions {
|
||||
/**
|
||||
* Indicates if the artifact upload should continue if file or chunk fails to upload from any error.
|
||||
* If there is a error during upload, a partial artifact will always be associated and available for
|
||||
* download at the end. The size reported will be the amount of storage that the user or org will be
|
||||
* charged for the partial artifact. Defaults to true if not specified
|
||||
*
|
||||
* If set to false, and an error is encountered, all other uploads will stop and any files or chunkes
|
||||
* that were queued will not be attempted to be uploaded. The partial artifact avaiable will only
|
||||
* include files and chunks up until the failure
|
||||
*
|
||||
* If set to true and an error is encountered, the failed file will be skipped and ignored and all
|
||||
* other queued files will be attempted to be uploaded. The partial artifact at the end will have all
|
||||
* files with the exception of the problematic files(s)/chunks(s) that failed to upload
|
||||
*
|
||||
*/
|
||||
continueOnError?: boolean
|
||||
}
|
|
@ -1,5 +1,6 @@
|
|||
import {debug} from '@actions/core'
|
||||
import {HttpCodes} from '@actions/http-client'
|
||||
import {HttpCodes, HttpClient} from '@actions/http-client'
|
||||
import {BearerCredentialHandler} from '@actions/http-client/auth'
|
||||
import {IHeaders} from '@actions/http-client/interfaces'
|
||||
|
||||
const apiVersion = '6.0-preview'
|
||||
|
@ -43,16 +44,6 @@ export function getContentRange(
|
|||
return `bytes ${start}-${end}/${total}`
|
||||
}
|
||||
|
||||
export function getArtifactUrl(): string {
|
||||
const runtimeUrl = process.env['ACTIONS_RUNTIME_URL']
|
||||
if (!runtimeUrl) {
|
||||
throw new Error('Runtime url not found, unable to create artifact.')
|
||||
}
|
||||
const artifactUrl = `${runtimeUrl}_apis/pipelines/workflows/${getWorkFlowRunId()}/artifacts?api-version=${apiVersion}`
|
||||
debug(`Artifact Url: ${artifactUrl}`)
|
||||
return artifactUrl
|
||||
}
|
||||
|
||||
export function getRequestOptions(): IHeaders {
|
||||
const requestOptions: IHeaders = {
|
||||
Accept: createAcceptHeader('application/json')
|
||||
|
@ -64,6 +55,25 @@ export function createAcceptHeader(type: string): string {
|
|||
return `${type};api-version=${apiVersion}`
|
||||
}
|
||||
|
||||
export function createHttpClient(userAgent: string): HttpClient {
|
||||
const token = process.env['ACTIONS_RUNTIME_TOKEN']
|
||||
if (!token) {
|
||||
throw new Error('Unable to get ACTIONS_RUNTIME_TOKEN')
|
||||
}
|
||||
|
||||
return new HttpClient(userAgent, [new BearerCredentialHandler(token)])
|
||||
}
|
||||
|
||||
export function getArtifactUrl(): string {
|
||||
const runtimeUrl = process.env['ACTIONS_RUNTIME_URL']
|
||||
if (!runtimeUrl) {
|
||||
throw new Error('Runtime url not found, unable to create artifact.')
|
||||
}
|
||||
const artifactUrl = `${runtimeUrl}_apis/pipelines/workflows/${getWorkFlowRunId()}/artifacts?api-version=${apiVersion}`
|
||||
debug(`Artifact Url: ${artifactUrl}`)
|
||||
return artifactUrl
|
||||
}
|
||||
|
||||
function getWorkFlowRunId(): string {
|
||||
const workFlowrunId = process.env['GITHUB_RUN_ID'] || ''
|
||||
if (!workFlowrunId) {
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue