mirror of https://github.com/actions/toolkit
wip
parent
c94ca49c9c
commit
c11a7cdeac
|
@ -0,0 +1,205 @@
|
|||
import * as github from '@actions/github'
|
||||
import type {RequestInterface} from '@octokit/types'
|
||||
import {
|
||||
getArtifactInternal,
|
||||
getArtifactPublic
|
||||
} from '../src/internal/find/get-artifact'
|
||||
import * as config from '../src/internal/shared/config'
|
||||
import {ArtifactServiceClientJSON} from '../src/generated'
|
||||
import * as util from '../src/internal/shared/util'
|
||||
import {noopLogs} from './common'
|
||||
|
||||
type MockedRequest = jest.MockedFunction<RequestInterface<object>>
|
||||
|
||||
jest.mock('@actions/github', () => ({
|
||||
getOctokit: jest.fn().mockReturnValue({
|
||||
request: jest.fn()
|
||||
})
|
||||
}))
|
||||
|
||||
const fixtures = {
|
||||
repo: 'toolkit',
|
||||
owner: 'actions',
|
||||
token: 'ghp_1234567890',
|
||||
runId: 123,
|
||||
backendIds: {
|
||||
workflowRunBackendId: 'c4d7c21f-ba3f-4ddc-a8c8-6f2f626f8422',
|
||||
workflowJobRunBackendId: '760803a1-f890-4d25-9a6e-a3fc01a0c7cf'
|
||||
},
|
||||
artifacts: [
|
||||
{
|
||||
id: 1,
|
||||
name: 'my-artifact',
|
||||
size: 456,
|
||||
createdAt: new Date('2023-12-01')
|
||||
},
|
||||
{
|
||||
id: 2,
|
||||
name: 'my-artifact',
|
||||
size: 456,
|
||||
createdAt: new Date('2023-12-02')
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
describe('get-artifact', () => {
|
||||
beforeAll(() => {
|
||||
noopLogs()
|
||||
})
|
||||
|
||||
describe('public', () => {
|
||||
it('should return the artifact if it is found', async () => {
|
||||
const mockRequest = github.getOctokit(fixtures.token)
|
||||
.request as MockedRequest
|
||||
mockRequest.mockResolvedValueOnce({
|
||||
status: 200,
|
||||
headers: {},
|
||||
url: '',
|
||||
data: {
|
||||
artifacts: [
|
||||
{
|
||||
name: fixtures.artifacts[0].name,
|
||||
id: fixtures.artifacts[0].id,
|
||||
size_in_bytes: fixtures.artifacts[0].size,
|
||||
created_at: fixtures.artifacts[0].createdAt.toISOString()
|
||||
}
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
const response = await getArtifactPublic(
|
||||
fixtures.artifacts[0].name,
|
||||
fixtures.runId,
|
||||
fixtures.owner,
|
||||
fixtures.repo,
|
||||
fixtures.token
|
||||
)
|
||||
|
||||
expect(response).toEqual({
|
||||
success: true,
|
||||
artifact: fixtures.artifacts[0]
|
||||
})
|
||||
})
|
||||
|
||||
it('should return the latest artifact if multiple are found', async () => {
|
||||
const mockRequest = github.getOctokit(fixtures.token)
|
||||
.request as MockedRequest
|
||||
mockRequest.mockResolvedValueOnce({
|
||||
status: 200,
|
||||
headers: {},
|
||||
url: '',
|
||||
data: {
|
||||
artifacts: fixtures.artifacts.map(artifact => ({
|
||||
name: artifact.name,
|
||||
id: artifact.id,
|
||||
size_in_bytes: artifact.size,
|
||||
created_at: artifact.createdAt.toISOString()
|
||||
}))
|
||||
}
|
||||
})
|
||||
|
||||
const response = await getArtifactPublic(
|
||||
fixtures.artifacts[0].name,
|
||||
fixtures.runId,
|
||||
fixtures.owner,
|
||||
fixtures.repo,
|
||||
fixtures.token
|
||||
)
|
||||
|
||||
expect(response).toEqual({
|
||||
success: true,
|
||||
artifact: fixtures.artifacts[1]
|
||||
})
|
||||
})
|
||||
|
||||
it('should fail if no artifacts are found', async () => {
|
||||
const mockRequest = github.getOctokit(fixtures.token)
|
||||
.request as MockedRequest
|
||||
mockRequest.mockResolvedValueOnce({
|
||||
status: 200,
|
||||
headers: {},
|
||||
url: '',
|
||||
data: {
|
||||
artifacts: []
|
||||
}
|
||||
})
|
||||
|
||||
const response = await getArtifactPublic(
|
||||
fixtures.artifacts[0].name,
|
||||
fixtures.runId,
|
||||
fixtures.owner,
|
||||
fixtures.repo,
|
||||
fixtures.token
|
||||
)
|
||||
|
||||
expect(response).toEqual({
|
||||
success: false
|
||||
})
|
||||
})
|
||||
|
||||
it('should fail if non-200 response', async () => {
|
||||
const mockRequest = github.getOctokit(fixtures.token)
|
||||
.request as MockedRequest
|
||||
mockRequest.mockResolvedValueOnce({
|
||||
status: 404,
|
||||
headers: {},
|
||||
url: '',
|
||||
data: {}
|
||||
})
|
||||
|
||||
const response = await getArtifactPublic(
|
||||
fixtures.artifacts[0].name,
|
||||
fixtures.runId,
|
||||
fixtures.owner,
|
||||
fixtures.repo,
|
||||
fixtures.token
|
||||
)
|
||||
|
||||
expect(response).toEqual({
|
||||
success: false
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('internal', () => {
|
||||
beforeEach(() => {
|
||||
jest.spyOn(config, 'getRuntimeToken').mockReturnValue('test-token')
|
||||
|
||||
jest
|
||||
.spyOn(util, 'getBackendIdsFromToken')
|
||||
.mockReturnValue(fixtures.backendIds)
|
||||
|
||||
jest
|
||||
.spyOn(config, 'getResultsServiceUrl')
|
||||
.mockReturnValue('https://results.local')
|
||||
})
|
||||
|
||||
it('should return the artifact if it is found', async () => {
|
||||
jest
|
||||
.spyOn(ArtifactServiceClientJSON.prototype, 'ListArtifacts')
|
||||
.mockResolvedValue({
|
||||
artifacts: [
|
||||
{
|
||||
...fixtures.backendIds,
|
||||
databaseId: fixtures.artifacts[0].id.toString(),
|
||||
name: fixtures.artifacts[0].name,
|
||||
size: fixtures.artifacts[0].size.toString()
|
||||
}
|
||||
]
|
||||
})
|
||||
|
||||
const response = await getArtifactInternal(fixtures.artifacts[0].name)
|
||||
|
||||
expect(response).toEqual({
|
||||
success: true,
|
||||
artifact: fixtures.artifacts[0]
|
||||
})
|
||||
})
|
||||
|
||||
it('should return the latest artifact if multiple are found', async () => {})
|
||||
|
||||
it('should fail if no artifacts are found', async () => {})
|
||||
|
||||
it('should fail if non-200 response', async () => {})
|
||||
})
|
||||
})
|
|
@ -1,8 +1,8 @@
|
|||
import {warning} from '@actions/core'
|
||||
import {isGhes} from './shared/config'
|
||||
import {
|
||||
UploadOptions,
|
||||
UploadResponse,
|
||||
UploadArtifactOptions,
|
||||
UploadArtifactResponse,
|
||||
DownloadArtifactOptions,
|
||||
GetArtifactResponse,
|
||||
ListArtifactsOptions,
|
||||
|
@ -26,14 +26,14 @@ export interface ArtifactClient {
|
|||
* @param files A list of absolute or relative paths that denote what files should be uploaded
|
||||
* @param rootDirectory An absolute or relative file path that denotes the root parent directory of the files being uploaded
|
||||
* @param options Extra options for customizing the upload behavior
|
||||
* @returns single UploadResponse object
|
||||
* @returns single UploadArtifactResponse object
|
||||
*/
|
||||
uploadArtifact(
|
||||
name: string,
|
||||
files: string[],
|
||||
rootDirectory: string,
|
||||
options?: UploadOptions
|
||||
): Promise<UploadResponse>
|
||||
options?: UploadArtifactOptions
|
||||
): Promise<UploadArtifactResponse>
|
||||
|
||||
/**
|
||||
* Lists all artifacts that are part of the current workflow run.
|
||||
|
@ -96,8 +96,8 @@ export class Client implements ArtifactClient {
|
|||
name: string,
|
||||
files: string[],
|
||||
rootDirectory: string,
|
||||
options?: UploadOptions
|
||||
): Promise<UploadResponse> {
|
||||
options?: UploadArtifactOptions
|
||||
): Promise<UploadArtifactResponse> {
|
||||
if (isGhes()) {
|
||||
warning(
|
||||
`@actions/artifact v2.0.0+ and upload-artifact@v4+ are not currently supported on GHES.`
|
||||
|
|
|
@ -56,12 +56,9 @@ export async function getArtifactPublic(
|
|||
|
||||
let artifact = getArtifactResp.data.artifacts[0]
|
||||
if (getArtifactResp.data.artifacts.length > 1) {
|
||||
artifact = getArtifactResp.data.artifacts.reduce((prev, current) => {
|
||||
new Date(prev.created_at) > new Date(current.created_at) ? prev : current
|
||||
})
|
||||
|
||||
artifact = getArtifactResp.data.artifacts.sort((a, b) => b.id - a.id)[0]
|
||||
core.debug(
|
||||
`more than one artifact found for a single name, returning newest (id: ${artifact.id})`
|
||||
`More than one artifact found for a single name, returning newest (id: ${artifact.id})`
|
||||
)
|
||||
}
|
||||
|
||||
|
@ -101,11 +98,9 @@ export async function getArtifactInternal(
|
|||
|
||||
let artifact = res.artifacts[0]
|
||||
if (res.artifacts.length > 1) {
|
||||
artifact = res.artifacts.reduce((prev, current) => {
|
||||
const prevDate = Timestamp.toDate(prev.createdAt || Timestamp.now())
|
||||
const currentDate = Timestamp.toDate(current.createdAt || Timestamp.now())
|
||||
return prevDate > currentDate ? prev : current
|
||||
})
|
||||
artifact = res.artifacts.sort(
|
||||
(a, b) => Number(b.databaseId) - Number(a.databaseId)
|
||||
)[0]
|
||||
|
||||
core.debug(
|
||||
`more than one artifact found for a single name, returning newest (id: ${artifact.databaseId})`
|
||||
|
|
|
@ -152,19 +152,7 @@ export async function listArtifactsInternal(
|
|||
* @returns The filtered list of artifacts
|
||||
*/
|
||||
function filterLatest(artifacts: Artifact[]): Artifact[] {
|
||||
artifacts.sort((a, b) => {
|
||||
if (!a.createdAt && !b.createdAt) {
|
||||
return 0
|
||||
}
|
||||
if (!a.createdAt) {
|
||||
return -1
|
||||
}
|
||||
if (!b.createdAt) {
|
||||
return 1
|
||||
}
|
||||
return b.createdAt.getTime() - a.createdAt.getTime()
|
||||
})
|
||||
|
||||
artifacts.sort((a, b) => b.id - a.id)
|
||||
const latestArtifacts: Artifact[] = []
|
||||
const seenArtifactNames = new Set<string>()
|
||||
for (const artifact of artifacts) {
|
||||
|
@ -173,6 +161,5 @@ function filterLatest(artifacts: Artifact[]): Artifact[] {
|
|||
seenArtifactNames.add(artifact.name)
|
||||
}
|
||||
}
|
||||
|
||||
return latestArtifacts
|
||||
}
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
* UploadArtifact *
|
||||
* *
|
||||
*****************************************************************************/
|
||||
export interface UploadResponse {
|
||||
export interface UploadArtifactResponse {
|
||||
/**
|
||||
* Denotes if an artifact was successfully uploaded
|
||||
*/
|
||||
|
@ -21,7 +21,7 @@ export interface UploadResponse {
|
|||
id?: number
|
||||
}
|
||||
|
||||
export interface UploadOptions {
|
||||
export interface UploadArtifactOptions {
|
||||
/**
|
||||
* Duration after which artifact will expire in days.
|
||||
*
|
||||
|
|
Loading…
Reference in New Issue