mirror of https://github.com/actions/toolkit
Merge pull request #1591 from actions/robherley/artifact-internal-apis
Implement internal APIs for list/get/download artifactspull/1595/head
commit
8f1c589e25
|
@ -2,18 +2,14 @@ import * as http from 'http'
|
|||
import * as net from 'net'
|
||||
import {HttpClient} from '@actions/http-client'
|
||||
import * as config from '../src/internal/shared/config'
|
||||
import {createArtifactTwirpClient} from '../src/internal/shared/artifact-twirp-client'
|
||||
import * as core from '@actions/core'
|
||||
import {internalArtifactTwirpClient} from '../src/internal/shared/artifact-twirp-client'
|
||||
import {noopLogs} from './common'
|
||||
|
||||
jest.mock('@actions/http-client')
|
||||
|
||||
describe('artifact-http-client', () => {
|
||||
beforeAll(() => {
|
||||
// mock all output so that there is less noise when running tests
|
||||
jest.spyOn(console, 'log').mockImplementation(() => {})
|
||||
jest.spyOn(core, 'debug').mockImplementation(() => {})
|
||||
jest.spyOn(core, 'info').mockImplementation(() => {})
|
||||
jest.spyOn(core, 'warning').mockImplementation(() => {})
|
||||
noopLogs()
|
||||
jest
|
||||
.spyOn(config, 'getResultsServiceUrl')
|
||||
.mockReturnValue('http://localhost:8080')
|
||||
|
@ -25,7 +21,7 @@ describe('artifact-http-client', () => {
|
|||
})
|
||||
|
||||
it('should successfully create a client', () => {
|
||||
const client = createArtifactTwirpClient('upload')
|
||||
const client = internalArtifactTwirpClient()
|
||||
expect(client).toBeDefined()
|
||||
})
|
||||
|
||||
|
@ -50,7 +46,7 @@ describe('artifact-http-client', () => {
|
|||
}
|
||||
})
|
||||
|
||||
const client = createArtifactTwirpClient('upload')
|
||||
const client = internalArtifactTwirpClient()
|
||||
const artifact = await client.CreateArtifact({
|
||||
workflowRunBackendId: '1234',
|
||||
workflowJobRunBackendId: '5678',
|
||||
|
@ -98,12 +94,11 @@ describe('artifact-http-client', () => {
|
|||
}
|
||||
})
|
||||
|
||||
const client = createArtifactTwirpClient(
|
||||
'upload',
|
||||
5, // retry 5 times
|
||||
1, // wait 1 ms
|
||||
1.5 // backoff factor
|
||||
)
|
||||
const client = internalArtifactTwirpClient({
|
||||
maxAttempts: 5,
|
||||
retryIntervalMs: 1,
|
||||
retryMultiplier: 1.5
|
||||
})
|
||||
const artifact = await client.CreateArtifact({
|
||||
workflowRunBackendId: '1234',
|
||||
workflowJobRunBackendId: '5678',
|
||||
|
@ -138,12 +133,11 @@ describe('artifact-http-client', () => {
|
|||
post: mockPost
|
||||
}
|
||||
})
|
||||
const client = createArtifactTwirpClient(
|
||||
'upload',
|
||||
5, // retry 5 times
|
||||
1, // wait 1 ms
|
||||
1.5 // backoff factor
|
||||
)
|
||||
const client = internalArtifactTwirpClient({
|
||||
maxAttempts: 5,
|
||||
retryIntervalMs: 1,
|
||||
retryMultiplier: 1.5
|
||||
})
|
||||
await expect(async () => {
|
||||
await client.CreateArtifact({
|
||||
workflowRunBackendId: '1234',
|
||||
|
@ -178,12 +172,11 @@ describe('artifact-http-client', () => {
|
|||
post: mockPost
|
||||
}
|
||||
})
|
||||
const client = createArtifactTwirpClient(
|
||||
'upload',
|
||||
5, // retry 5 times
|
||||
1, // wait 1 ms
|
||||
1.5 // backoff factor
|
||||
)
|
||||
const client = internalArtifactTwirpClient({
|
||||
maxAttempts: 5,
|
||||
retryIntervalMs: 1,
|
||||
retryMultiplier: 1.5
|
||||
})
|
||||
await expect(async () => {
|
||||
await client.CreateArtifact({
|
||||
workflowRunBackendId: '1234',
|
||||
|
|
|
@ -0,0 +1,9 @@
|
|||
import * as core from '@actions/core'
|
||||
|
||||
// noopLogs mocks the console.log and core.* functions to prevent output in the console while testing
|
||||
export const noopLogs = (): void => {
|
||||
jest.spyOn(console, 'log').mockImplementation(() => {})
|
||||
jest.spyOn(core, 'debug').mockImplementation(() => {})
|
||||
jest.spyOn(core, 'info').mockImplementation(() => {})
|
||||
jest.spyOn(core, 'warning').mockImplementation(() => {})
|
||||
}
|
|
@ -2,14 +2,20 @@ import fs from 'fs'
|
|||
import * as http from 'http'
|
||||
import * as net from 'net'
|
||||
import * as path from 'path'
|
||||
import * as core from '@actions/core'
|
||||
import * as github from '@actions/github'
|
||||
import {HttpClient} from '@actions/http-client'
|
||||
import type {RestEndpointMethods} from '@octokit/plugin-rest-endpoint-methods/dist-types/generated/method-types'
|
||||
import archiver from 'archiver'
|
||||
|
||||
import {downloadArtifact} from '../src/internal/download/download-artifact'
|
||||
import {
|
||||
downloadArtifactInternal,
|
||||
downloadArtifactPublic
|
||||
} from '../src/internal/download/download-artifact'
|
||||
import {getUserAgentString} from '../src/internal/shared/user-agent'
|
||||
import {noopLogs} from './common'
|
||||
import * as config from '../src/internal/shared/config'
|
||||
import {ArtifactServiceClientJSON} from '../src/generated'
|
||||
import * as util from '../src/internal/shared/util'
|
||||
|
||||
type MockedDownloadArtifact = jest.MockedFunction<
|
||||
RestEndpointMethods['actions']['downloadArtifact']
|
||||
|
@ -32,10 +38,16 @@ const fixtures = {
|
|||
]
|
||||
},
|
||||
artifactID: 1234,
|
||||
artifactName: 'my-artifact',
|
||||
artifactSize: 123456,
|
||||
repositoryOwner: 'actions',
|
||||
repositoryName: 'toolkit',
|
||||
token: 'ghp_1234567890',
|
||||
blobStorageUrl: 'https://blob-storage.local?signed=true'
|
||||
blobStorageUrl: 'https://blob-storage.local?signed=true',
|
||||
backendIds: {
|
||||
workflowRunBackendId: 'c4d7c21f-ba3f-4ddc-a8c8-6f2f626f8422',
|
||||
workflowJobRunBackendId: '760803a1-f890-4d25-9a6e-a3fc01a0c7cf'
|
||||
}
|
||||
}
|
||||
|
||||
jest.mock('@actions/github', () => ({
|
||||
|
@ -74,206 +86,385 @@ const expectExtractedArchive = async (dir: string): Promise<void> => {
|
|||
}
|
||||
}
|
||||
|
||||
const setup = async (): Promise<void> => {
|
||||
noopLogs()
|
||||
await fs.promises.mkdir(testDir, {recursive: true})
|
||||
await createTestArchive()
|
||||
|
||||
process.env['GITHUB_WORKSPACE'] = fixtures.workspaceDir
|
||||
}
|
||||
|
||||
const cleanup = async (): Promise<void> => {
|
||||
jest.restoreAllMocks()
|
||||
await fs.promises.rm(testDir, {recursive: true})
|
||||
delete process.env['GITHUB_WORKSPACE']
|
||||
}
|
||||
|
||||
const mockGetArtifactSuccess = jest.fn(() => {
|
||||
const message = new http.IncomingMessage(new net.Socket())
|
||||
message.statusCode = 200
|
||||
message.push(fs.readFileSync(fixtures.exampleArtifact.path))
|
||||
return {
|
||||
message
|
||||
}
|
||||
})
|
||||
|
||||
const mockGetArtifactFailure = jest.fn(() => {
|
||||
const message = new http.IncomingMessage(new net.Socket())
|
||||
message.statusCode = 500
|
||||
message.push('Internal Server Error')
|
||||
return {
|
||||
message
|
||||
}
|
||||
})
|
||||
|
||||
describe('download-artifact', () => {
|
||||
beforeEach(async () => {
|
||||
jest.spyOn(core, 'debug').mockImplementation(() => {})
|
||||
jest.spyOn(core, 'info').mockImplementation(() => {})
|
||||
jest.spyOn(core, 'warning').mockImplementation(() => {})
|
||||
describe('public', () => {
|
||||
beforeEach(setup)
|
||||
afterEach(cleanup)
|
||||
|
||||
await fs.promises.mkdir(testDir, {recursive: true})
|
||||
await createTestArchive()
|
||||
it('should successfully download an artifact to $GITHUB_WORKSPACE', async () => {
|
||||
const downloadArtifactMock = github.getOctokit(fixtures.token).rest
|
||||
.actions.downloadArtifact as MockedDownloadArtifact
|
||||
downloadArtifactMock.mockResolvedValueOnce({
|
||||
headers: {
|
||||
location: fixtures.blobStorageUrl
|
||||
},
|
||||
status: 302,
|
||||
url: '',
|
||||
data: Buffer.from('')
|
||||
})
|
||||
|
||||
process.env['GITHUB_WORKSPACE'] = fixtures.workspaceDir
|
||||
const mockHttpClient = (HttpClient as jest.Mock).mockImplementation(
|
||||
() => {
|
||||
return {
|
||||
get: mockGetArtifactSuccess
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
const response = await downloadArtifactPublic(
|
||||
fixtures.artifactID,
|
||||
fixtures.repositoryOwner,
|
||||
fixtures.repositoryName,
|
||||
fixtures.token
|
||||
)
|
||||
|
||||
expect(downloadArtifactMock).toHaveBeenCalledWith({
|
||||
owner: fixtures.repositoryOwner,
|
||||
repo: fixtures.repositoryName,
|
||||
artifact_id: fixtures.artifactID,
|
||||
archive_format: 'zip',
|
||||
request: {
|
||||
redirect: 'manual'
|
||||
}
|
||||
})
|
||||
expect(mockHttpClient).toHaveBeenCalledWith(getUserAgentString())
|
||||
expect(mockGetArtifactSuccess).toHaveBeenCalledWith(
|
||||
fixtures.blobStorageUrl
|
||||
)
|
||||
expectExtractedArchive(fixtures.workspaceDir)
|
||||
expect(response.success).toBe(true)
|
||||
expect(response.downloadPath).toBe(fixtures.workspaceDir)
|
||||
})
|
||||
|
||||
it('should successfully download an artifact to user defined path', async () => {
|
||||
const customPath = path.join(testDir, 'custom')
|
||||
|
||||
const downloadArtifactMock = github.getOctokit(fixtures.token).rest
|
||||
.actions.downloadArtifact as MockedDownloadArtifact
|
||||
downloadArtifactMock.mockResolvedValueOnce({
|
||||
headers: {
|
||||
location: fixtures.blobStorageUrl
|
||||
},
|
||||
status: 302,
|
||||
url: '',
|
||||
data: Buffer.from('')
|
||||
})
|
||||
|
||||
const mockHttpClient = (HttpClient as jest.Mock).mockImplementation(
|
||||
() => {
|
||||
return {
|
||||
get: mockGetArtifactSuccess
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
const response = await downloadArtifactPublic(
|
||||
fixtures.artifactID,
|
||||
fixtures.repositoryOwner,
|
||||
fixtures.repositoryName,
|
||||
fixtures.token,
|
||||
{
|
||||
path: customPath
|
||||
}
|
||||
)
|
||||
|
||||
expect(downloadArtifactMock).toHaveBeenCalledWith({
|
||||
owner: fixtures.repositoryOwner,
|
||||
repo: fixtures.repositoryName,
|
||||
artifact_id: fixtures.artifactID,
|
||||
archive_format: 'zip',
|
||||
request: {
|
||||
redirect: 'manual'
|
||||
}
|
||||
})
|
||||
expect(mockHttpClient).toHaveBeenCalledWith(getUserAgentString())
|
||||
expect(mockGetArtifactSuccess).toHaveBeenCalledWith(
|
||||
fixtures.blobStorageUrl
|
||||
)
|
||||
expectExtractedArchive(customPath)
|
||||
expect(response.success).toBe(true)
|
||||
expect(response.downloadPath).toBe(customPath)
|
||||
})
|
||||
|
||||
it('should fail if download artifact API does not respond with location', async () => {
|
||||
const downloadArtifactMock = github.getOctokit(fixtures.token).rest
|
||||
.actions.downloadArtifact as MockedDownloadArtifact
|
||||
downloadArtifactMock.mockResolvedValueOnce({
|
||||
headers: {},
|
||||
status: 302,
|
||||
url: '',
|
||||
data: Buffer.from('')
|
||||
})
|
||||
|
||||
await expect(
|
||||
downloadArtifactPublic(
|
||||
fixtures.artifactID,
|
||||
fixtures.repositoryOwner,
|
||||
fixtures.repositoryName,
|
||||
fixtures.token
|
||||
)
|
||||
).rejects.toBeInstanceOf(Error)
|
||||
|
||||
expect(downloadArtifactMock).toHaveBeenCalledWith({
|
||||
owner: fixtures.repositoryOwner,
|
||||
repo: fixtures.repositoryName,
|
||||
artifact_id: fixtures.artifactID,
|
||||
archive_format: 'zip',
|
||||
request: {
|
||||
redirect: 'manual'
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
it('should fail if blob storage response is non-200', async () => {
|
||||
const downloadArtifactMock = github.getOctokit(fixtures.token).rest
|
||||
.actions.downloadArtifact as MockedDownloadArtifact
|
||||
downloadArtifactMock.mockResolvedValueOnce({
|
||||
headers: {
|
||||
location: fixtures.blobStorageUrl
|
||||
},
|
||||
status: 302,
|
||||
url: '',
|
||||
data: Buffer.from('')
|
||||
})
|
||||
|
||||
const mockHttpClient = (HttpClient as jest.Mock).mockImplementation(
|
||||
() => {
|
||||
return {
|
||||
get: mockGetArtifactFailure
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
await expect(
|
||||
downloadArtifactPublic(
|
||||
fixtures.artifactID,
|
||||
fixtures.repositoryOwner,
|
||||
fixtures.repositoryName,
|
||||
fixtures.token
|
||||
)
|
||||
).rejects.toBeInstanceOf(Error)
|
||||
|
||||
expect(downloadArtifactMock).toHaveBeenCalledWith({
|
||||
owner: fixtures.repositoryOwner,
|
||||
repo: fixtures.repositoryName,
|
||||
artifact_id: fixtures.artifactID,
|
||||
archive_format: 'zip',
|
||||
request: {
|
||||
redirect: 'manual'
|
||||
}
|
||||
})
|
||||
expect(mockHttpClient).toHaveBeenCalledWith(getUserAgentString())
|
||||
expect(mockGetArtifactFailure).toHaveBeenCalledWith(
|
||||
fixtures.blobStorageUrl
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
afterEach(async () => {
|
||||
jest.restoreAllMocks()
|
||||
await fs.promises.rm(testDir, {recursive: true})
|
||||
delete process.env['GITHUB_WORKSPACE']
|
||||
})
|
||||
describe('internal', () => {
|
||||
beforeEach(async () => {
|
||||
await setup()
|
||||
|
||||
it('should successfully download an artifact to $GITHUB_WORKSPACE', async () => {
|
||||
const downloadArtifactMock = github.getOctokit(fixtures.token).rest.actions
|
||||
.downloadArtifact as MockedDownloadArtifact
|
||||
downloadArtifactMock.mockResolvedValueOnce({
|
||||
headers: {
|
||||
location: fixtures.blobStorageUrl
|
||||
},
|
||||
status: 302,
|
||||
url: '',
|
||||
data: Buffer.from('')
|
||||
jest.spyOn(config, 'getRuntimeToken').mockReturnValue('test-token')
|
||||
|
||||
jest
|
||||
.spyOn(util, 'getBackendIdsFromToken')
|
||||
.mockReturnValue(fixtures.backendIds)
|
||||
|
||||
jest
|
||||
.spyOn(config, 'getResultsServiceUrl')
|
||||
.mockReturnValue('https://results.local')
|
||||
})
|
||||
afterEach(async () => {
|
||||
await cleanup()
|
||||
})
|
||||
|
||||
const getMock = jest.fn(() => {
|
||||
const message = new http.IncomingMessage(new net.Socket())
|
||||
message.statusCode = 200
|
||||
message.push(fs.readFileSync(fixtures.exampleArtifact.path))
|
||||
return {
|
||||
message
|
||||
}
|
||||
})
|
||||
const httpClientMock = (HttpClient as jest.Mock).mockImplementation(() => {
|
||||
return {
|
||||
get: getMock
|
||||
}
|
||||
it('should successfully download an artifact to $GITHUB_WORKSPACE', async () => {
|
||||
const mockListArtifacts = jest
|
||||
.spyOn(ArtifactServiceClientJSON.prototype, 'ListArtifacts')
|
||||
.mockResolvedValue({
|
||||
artifacts: [
|
||||
{
|
||||
...fixtures.backendIds,
|
||||
databaseId: fixtures.artifactID.toString(),
|
||||
name: fixtures.artifactName,
|
||||
size: fixtures.artifactSize.toString()
|
||||
}
|
||||
]
|
||||
})
|
||||
|
||||
const mockGetSignedArtifactURL = jest
|
||||
.spyOn(ArtifactServiceClientJSON.prototype, 'GetSignedArtifactURL')
|
||||
.mockReturnValue(
|
||||
Promise.resolve({
|
||||
signedUrl: fixtures.blobStorageUrl
|
||||
})
|
||||
)
|
||||
|
||||
const mockHttpClient = (HttpClient as jest.Mock).mockImplementation(
|
||||
() => {
|
||||
return {
|
||||
get: mockGetArtifactSuccess
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
const response = await downloadArtifactInternal(fixtures.artifactID)
|
||||
|
||||
expectExtractedArchive(fixtures.workspaceDir)
|
||||
expect(response.success).toBe(true)
|
||||
expect(response.downloadPath).toBe(fixtures.workspaceDir)
|
||||
expect(mockHttpClient).toHaveBeenCalledWith(getUserAgentString())
|
||||
expect(mockListArtifacts).toHaveBeenCalledWith({
|
||||
idFilter: {
|
||||
value: fixtures.artifactID.toString()
|
||||
},
|
||||
...fixtures.backendIds
|
||||
})
|
||||
expect(mockGetSignedArtifactURL).toHaveBeenCalledWith({
|
||||
...fixtures.backendIds,
|
||||
name: fixtures.artifactName
|
||||
})
|
||||
})
|
||||
|
||||
const response = await downloadArtifact(
|
||||
fixtures.artifactID,
|
||||
fixtures.repositoryOwner,
|
||||
fixtures.repositoryName,
|
||||
fixtures.token
|
||||
)
|
||||
it('should successfully download an artifact to user defined path', async () => {
|
||||
const customPath = path.join(testDir, 'custom')
|
||||
|
||||
expect(downloadArtifactMock).toHaveBeenCalledWith({
|
||||
owner: fixtures.repositoryOwner,
|
||||
repo: fixtures.repositoryName,
|
||||
artifact_id: fixtures.artifactID,
|
||||
archive_format: 'zip',
|
||||
request: {
|
||||
redirect: 'manual'
|
||||
}
|
||||
})
|
||||
expect(httpClientMock).toHaveBeenCalledWith(getUserAgentString())
|
||||
expect(getMock).toHaveBeenCalledWith(fixtures.blobStorageUrl)
|
||||
const mockListArtifacts = jest
|
||||
.spyOn(ArtifactServiceClientJSON.prototype, 'ListArtifacts')
|
||||
.mockResolvedValue({
|
||||
artifacts: [
|
||||
{
|
||||
...fixtures.backendIds,
|
||||
databaseId: fixtures.artifactID.toString(),
|
||||
name: fixtures.artifactName,
|
||||
size: fixtures.artifactSize.toString()
|
||||
}
|
||||
]
|
||||
})
|
||||
|
||||
expectExtractedArchive(fixtures.workspaceDir)
|
||||
const mockGetSignedArtifactURL = jest
|
||||
.spyOn(ArtifactServiceClientJSON.prototype, 'GetSignedArtifactURL')
|
||||
.mockReturnValue(
|
||||
Promise.resolve({
|
||||
signedUrl: fixtures.blobStorageUrl
|
||||
})
|
||||
)
|
||||
|
||||
expect(response.success).toBe(true)
|
||||
expect(response.downloadPath).toBe(fixtures.workspaceDir)
|
||||
})
|
||||
const mockHttpClient = (HttpClient as jest.Mock).mockImplementation(
|
||||
() => {
|
||||
return {
|
||||
get: mockGetArtifactSuccess
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
it('should successfully download an artifact to user defined path', async () => {
|
||||
const customPath = path.join(testDir, 'custom')
|
||||
|
||||
const downloadArtifactMock = github.getOctokit(fixtures.token).rest.actions
|
||||
.downloadArtifact as MockedDownloadArtifact
|
||||
downloadArtifactMock.mockResolvedValueOnce({
|
||||
headers: {
|
||||
location: fixtures.blobStorageUrl
|
||||
},
|
||||
status: 302,
|
||||
url: '',
|
||||
data: Buffer.from('')
|
||||
})
|
||||
|
||||
const getMock = jest.fn(() => {
|
||||
const message = new http.IncomingMessage(new net.Socket())
|
||||
message.statusCode = 200
|
||||
message.push(fs.readFileSync(fixtures.exampleArtifact.path))
|
||||
return {
|
||||
message
|
||||
}
|
||||
})
|
||||
const httpClientMock = (HttpClient as jest.Mock).mockImplementation(() => {
|
||||
return {
|
||||
get: getMock
|
||||
}
|
||||
})
|
||||
|
||||
const response = await downloadArtifact(
|
||||
fixtures.artifactID,
|
||||
fixtures.repositoryOwner,
|
||||
fixtures.repositoryName,
|
||||
fixtures.token,
|
||||
{
|
||||
const response = await downloadArtifactInternal(fixtures.artifactID, {
|
||||
path: customPath
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
expect(downloadArtifactMock).toHaveBeenCalledWith({
|
||||
owner: fixtures.repositoryOwner,
|
||||
repo: fixtures.repositoryName,
|
||||
artifact_id: fixtures.artifactID,
|
||||
archive_format: 'zip',
|
||||
request: {
|
||||
redirect: 'manual'
|
||||
}
|
||||
})
|
||||
expect(httpClientMock).toHaveBeenCalledWith(getUserAgentString())
|
||||
expect(getMock).toHaveBeenCalledWith(fixtures.blobStorageUrl)
|
||||
|
||||
expectExtractedArchive(customPath)
|
||||
|
||||
expect(response.success).toBe(true)
|
||||
expect(response.downloadPath).toBe(customPath)
|
||||
})
|
||||
|
||||
it('should fail if download artifact API does not respond with location', async () => {
|
||||
const downloadArtifactMock = github.getOctokit(fixtures.token).rest.actions
|
||||
.downloadArtifact as MockedDownloadArtifact
|
||||
downloadArtifactMock.mockResolvedValueOnce({
|
||||
headers: {},
|
||||
status: 302,
|
||||
url: '',
|
||||
data: Buffer.from('')
|
||||
expectExtractedArchive(customPath)
|
||||
expect(response.success).toBe(true)
|
||||
expect(response.downloadPath).toBe(customPath)
|
||||
expect(mockHttpClient).toHaveBeenCalledWith(getUserAgentString())
|
||||
expect(mockListArtifacts).toHaveBeenCalledWith({
|
||||
idFilter: {
|
||||
value: fixtures.artifactID.toString()
|
||||
},
|
||||
...fixtures.backendIds
|
||||
})
|
||||
expect(mockGetSignedArtifactURL).toHaveBeenCalledWith({
|
||||
...fixtures.backendIds,
|
||||
name: fixtures.artifactName
|
||||
})
|
||||
})
|
||||
|
||||
await expect(
|
||||
downloadArtifact(
|
||||
fixtures.artifactID,
|
||||
fixtures.repositoryOwner,
|
||||
fixtures.repositoryName,
|
||||
fixtures.token
|
||||
it('should fail if download artifact API does not respond with location', async () => {
|
||||
jest
|
||||
.spyOn(ArtifactServiceClientJSON.prototype, 'ListArtifacts')
|
||||
.mockRejectedValue(new Error('boom'))
|
||||
|
||||
await expect(
|
||||
downloadArtifactInternal(fixtures.artifactID)
|
||||
).rejects.toBeInstanceOf(Error)
|
||||
})
|
||||
|
||||
it('should fail if blob storage response is non-200', async () => {
|
||||
const mockListArtifacts = jest
|
||||
.spyOn(ArtifactServiceClientJSON.prototype, 'ListArtifacts')
|
||||
.mockResolvedValue({
|
||||
artifacts: [
|
||||
{
|
||||
...fixtures.backendIds,
|
||||
databaseId: fixtures.artifactID.toString(),
|
||||
name: fixtures.artifactName,
|
||||
size: fixtures.artifactSize.toString()
|
||||
}
|
||||
]
|
||||
})
|
||||
|
||||
const mockGetSignedArtifactURL = jest
|
||||
.spyOn(ArtifactServiceClientJSON.prototype, 'GetSignedArtifactURL')
|
||||
.mockReturnValue(
|
||||
Promise.resolve({
|
||||
signedUrl: fixtures.blobStorageUrl
|
||||
})
|
||||
)
|
||||
|
||||
const mockHttpClient = (HttpClient as jest.Mock).mockImplementation(
|
||||
() => {
|
||||
return {
|
||||
get: mockGetArtifactFailure
|
||||
}
|
||||
}
|
||||
)
|
||||
).rejects.toBeInstanceOf(Error)
|
||||
|
||||
expect(downloadArtifactMock).toHaveBeenCalledWith({
|
||||
owner: fixtures.repositoryOwner,
|
||||
repo: fixtures.repositoryName,
|
||||
artifact_id: fixtures.artifactID,
|
||||
archive_format: 'zip',
|
||||
request: {
|
||||
redirect: 'manual'
|
||||
}
|
||||
await expect(
|
||||
downloadArtifactInternal(fixtures.artifactID)
|
||||
).rejects.toBeInstanceOf(Error)
|
||||
expect(mockHttpClient).toHaveBeenCalledWith(getUserAgentString())
|
||||
expect(mockListArtifacts).toHaveBeenCalledWith({
|
||||
idFilter: {
|
||||
value: fixtures.artifactID.toString()
|
||||
},
|
||||
...fixtures.backendIds
|
||||
})
|
||||
expect(mockGetSignedArtifactURL).toHaveBeenCalledWith({
|
||||
...fixtures.backendIds,
|
||||
name: fixtures.artifactName
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
it('should fail if blob storage response is non-200', async () => {
|
||||
const downloadArtifactMock = github.getOctokit(fixtures.token).rest.actions
|
||||
.downloadArtifact as MockedDownloadArtifact
|
||||
downloadArtifactMock.mockResolvedValueOnce({
|
||||
headers: {
|
||||
location: fixtures.blobStorageUrl
|
||||
},
|
||||
status: 302,
|
||||
url: '',
|
||||
data: Buffer.from('')
|
||||
})
|
||||
|
||||
const getMock = jest.fn(() => {
|
||||
const message = new http.IncomingMessage(new net.Socket())
|
||||
message.statusCode = 500
|
||||
message.push('Internal Server Error')
|
||||
return {
|
||||
message
|
||||
}
|
||||
})
|
||||
const httpClientMock = (HttpClient as jest.Mock).mockImplementation(() => {
|
||||
return {
|
||||
get: getMock
|
||||
}
|
||||
})
|
||||
|
||||
await expect(
|
||||
downloadArtifact(
|
||||
fixtures.artifactID,
|
||||
fixtures.repositoryOwner,
|
||||
fixtures.repositoryName,
|
||||
fixtures.token
|
||||
)
|
||||
).rejects.toBeInstanceOf(Error)
|
||||
|
||||
expect(downloadArtifactMock).toHaveBeenCalledWith({
|
||||
owner: fixtures.repositoryOwner,
|
||||
repo: fixtures.repositoryName,
|
||||
artifact_id: fixtures.artifactID,
|
||||
archive_format: 'zip',
|
||||
request: {
|
||||
redirect: 'manual'
|
||||
}
|
||||
})
|
||||
expect(httpClientMock).toHaveBeenCalledWith(getUserAgentString())
|
||||
expect(getMock).toHaveBeenCalledWith(fixtures.blobStorageUrl)
|
||||
})
|
||||
})
|
||||
|
|
|
@ -3,15 +3,11 @@ import {
|
|||
validateFilePath
|
||||
} from '../src/internal/upload/path-and-artifact-name-validation'
|
||||
|
||||
import * as core from '@actions/core'
|
||||
import {noopLogs} from './common'
|
||||
|
||||
describe('Path and artifact name validation', () => {
|
||||
beforeAll(() => {
|
||||
// mock all output so that there is less noise when running tests
|
||||
jest.spyOn(console, 'log').mockImplementation(() => {})
|
||||
jest.spyOn(core, 'debug').mockImplementation(() => {})
|
||||
jest.spyOn(core, 'info').mockImplementation(() => {})
|
||||
jest.spyOn(core, 'warning').mockImplementation(() => {})
|
||||
noopLogs()
|
||||
})
|
||||
|
||||
it('Check Artifact Name for any invalid characters', () => {
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
import * as core from '@actions/core'
|
||||
import * as uploadZipSpecification from '../src/internal/upload/upload-zip-specification'
|
||||
import * as zip from '../src/internal/upload/zip'
|
||||
import * as util from '../src/internal/shared/util'
|
||||
|
@ -7,14 +6,11 @@ import * as config from '../src/internal/shared/config'
|
|||
import {Timestamp, ArtifactServiceClientJSON} from '../src/generated'
|
||||
import * as blobUpload from '../src/internal/upload/blob-upload'
|
||||
import {uploadArtifact} from '../src/internal/upload/upload-artifact'
|
||||
import {noopLogs} from './common'
|
||||
|
||||
describe('upload-artifact', () => {
|
||||
beforeEach(() => {
|
||||
// mock all output so that there is less noise when running tests
|
||||
jest.spyOn(console, 'log').mockImplementation(() => {})
|
||||
jest.spyOn(core, 'debug').mockImplementation(() => {})
|
||||
jest.spyOn(core, 'info').mockImplementation(() => {})
|
||||
jest.spyOn(core, 'warning').mockImplementation(() => {})
|
||||
noopLogs()
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
|
@ -131,7 +127,7 @@ describe('upload-artifact', () => {
|
|||
expect(uploadResp).resolves.toEqual({success: false})
|
||||
})
|
||||
|
||||
it('should return false if no backend IDs are found', () => {
|
||||
it('should reject if no backend IDs are found', () => {
|
||||
jest
|
||||
.spyOn(uploadZipSpecification, 'validateRootDirectory')
|
||||
.mockReturnValue()
|
||||
|
@ -155,9 +151,6 @@ describe('upload-artifact', () => {
|
|||
jest
|
||||
.spyOn(zip, 'createZipUploadStream')
|
||||
.mockReturnValue(Promise.resolve(new zip.ZipUploadStream(1)))
|
||||
jest
|
||||
.spyOn(util, 'getBackendIdsFromToken')
|
||||
.mockReturnValue({workflowRunBackendId: '', workflowJobRunBackendId: ''})
|
||||
|
||||
const uploadResp = uploadArtifact(
|
||||
'test-artifact',
|
||||
|
@ -169,7 +162,7 @@ describe('upload-artifact', () => {
|
|||
'/home/user/files/plz-upload'
|
||||
)
|
||||
|
||||
expect(uploadResp).resolves.toEqual({success: false})
|
||||
expect(uploadResp).rejects.toThrow()
|
||||
})
|
||||
|
||||
it('should return false if the creation request fails', () => {
|
||||
|
|
|
@ -1,11 +1,11 @@
|
|||
import * as io from '../../io/src/io'
|
||||
import * as path from 'path'
|
||||
import {promises as fs} from 'fs'
|
||||
import * as core from '@actions/core'
|
||||
import {
|
||||
getUploadZipSpecification,
|
||||
validateRootDirectory
|
||||
} from '../src/internal/upload/upload-zip-specification'
|
||||
import {noopLogs} from './common'
|
||||
|
||||
const root = path.join(__dirname, '_temp', 'upload-specification')
|
||||
const goodItem1Path = path.join(
|
||||
|
@ -51,11 +51,7 @@ const artifactFilesToUpload = [
|
|||
|
||||
describe('Search', () => {
|
||||
beforeAll(async () => {
|
||||
// mock all output so that there is less noise when running tests
|
||||
jest.spyOn(console, 'log').mockImplementation(() => {})
|
||||
jest.spyOn(core, 'debug').mockImplementation(() => {})
|
||||
jest.spyOn(core, 'info').mockImplementation(() => {})
|
||||
jest.spyOn(core, 'warning').mockImplementation(() => {})
|
||||
noopLogs()
|
||||
|
||||
// clear temp directory
|
||||
await io.rmRF(root)
|
||||
|
|
|
@ -1,13 +1,12 @@
|
|||
import * as config from '../src/internal/shared/config'
|
||||
import * as util from '../src/internal/shared/util'
|
||||
|
||||
export const testRuntimeToken =
|
||||
'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwic2NwIjoiQWN0aW9ucy5FeGFtcGxlIEFjdGlvbnMuQW5vdGhlckV4YW1wbGU6dGVzdCBBY3Rpb25zLlJlc3VsdHM6Y2U3ZjU0YzctNjFjNy00YWFlLTg4N2YtMzBkYTQ3NWY1ZjFhOmNhMzk1MDg1LTA0MGEtNTI2Yi0yY2U4LWJkYzg1ZjY5Mjc3NCIsImlhdCI6MTUxNjIzOTAyMn0.XYnI_wHPBlUi1mqYveJnnkJhp4dlFjqxzRmISPsqfw8'
|
||||
|
||||
describe('get-backend-ids-from-token', () => {
|
||||
it('should return backend ids when the token is valid', () => {
|
||||
jest
|
||||
.spyOn(config, 'getRuntimeToken')
|
||||
.mockReturnValue(
|
||||
'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwic2NwIjoiQWN0aW9ucy5FeGFtcGxlIEFjdGlvbnMuQW5vdGhlckV4YW1wbGU6dGVzdCBBY3Rpb25zLlJlc3VsdHM6Y2U3ZjU0YzctNjFjNy00YWFlLTg4N2YtMzBkYTQ3NWY1ZjFhOmNhMzk1MDg1LTA0MGEtNTI2Yi0yY2U4LWJkYzg1ZjY5Mjc3NCIsImlhdCI6MTUxNjIzOTAyMn0.XYnI_wHPBlUi1mqYveJnnkJhp4dlFjqxzRmISPsqfw8'
|
||||
)
|
||||
jest.spyOn(config, 'getRuntimeToken').mockReturnValue(testRuntimeToken)
|
||||
|
||||
const backendIds = util.getBackendIdsFromToken()
|
||||
expect(backendIds.workflowRunBackendId).toBe(
|
||||
|
|
|
@ -30,7 +30,7 @@
|
|||
},
|
||||
"scripts": {
|
||||
"audit-moderate": "npm install && npm audit --json --audit-level=moderate > audit.json",
|
||||
"test": "echo \"Error: run tests from root\" && exit 1",
|
||||
"test": "cd ../../ && npm run test ./packages/artifact",
|
||||
"bootstrap": "cd ../../ && npm run bootstrap",
|
||||
"tsc-run": "tsc",
|
||||
"tsc": "npm run bootstrap && npm run tsc-run"
|
||||
|
|
|
@ -12,6 +12,7 @@ import type { PartialMessage } from "@protobuf-ts/runtime";
|
|||
import { reflectionMergePartial } from "@protobuf-ts/runtime";
|
||||
import { MESSAGE_TYPE } from "@protobuf-ts/runtime";
|
||||
import { MessageType } from "@protobuf-ts/runtime";
|
||||
import { Int64Value } from "../../../google/protobuf/wrappers";
|
||||
import { StringValue } from "../../../google/protobuf/wrappers";
|
||||
import { Timestamp } from "../../../google/protobuf/timestamp";
|
||||
/**
|
||||
|
@ -90,6 +91,105 @@ export interface FinalizeArtifactResponse {
|
|||
*/
|
||||
artifactId: string;
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf message github.actions.results.api.v1.ListArtifactsRequest
|
||||
*/
|
||||
export interface ListArtifactsRequest {
|
||||
/**
|
||||
* The backend plan ID
|
||||
*
|
||||
* @generated from protobuf field: string workflow_run_backend_id = 1;
|
||||
*/
|
||||
workflowRunBackendId: string;
|
||||
/**
|
||||
* The backend job ID
|
||||
*
|
||||
* @generated from protobuf field: string workflow_job_run_backend_id = 2;
|
||||
*/
|
||||
workflowJobRunBackendId: string;
|
||||
/**
|
||||
* Name of the artifact to filter on
|
||||
*
|
||||
* @generated from protobuf field: google.protobuf.StringValue name_filter = 3;
|
||||
*/
|
||||
nameFilter?: StringValue; // optional
|
||||
/**
|
||||
* Monolith Database ID of the artifact to filter on
|
||||
*
|
||||
* @generated from protobuf field: google.protobuf.Int64Value id_filter = 4;
|
||||
*/
|
||||
idFilter?: Int64Value; // optional
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf message github.actions.results.api.v1.ListArtifactsResponse
|
||||
*/
|
||||
export interface ListArtifactsResponse {
|
||||
/**
|
||||
* @generated from protobuf field: repeated github.actions.results.api.v1.ListArtifactsResponse.MonolithArtifact artifacts = 1;
|
||||
*/
|
||||
artifacts: ListArtifactsResponse_MonolithArtifact[];
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf message github.actions.results.api.v1.ListArtifactsResponse.MonolithArtifact
|
||||
*/
|
||||
export interface ListArtifactsResponse_MonolithArtifact {
|
||||
/**
|
||||
* The backend plan ID
|
||||
*
|
||||
* @generated from protobuf field: string workflow_run_backend_id = 1;
|
||||
*/
|
||||
workflowRunBackendId: string;
|
||||
/**
|
||||
* The backend job ID
|
||||
*
|
||||
* @generated from protobuf field: string workflow_job_run_backend_id = 2;
|
||||
*/
|
||||
workflowJobRunBackendId: string;
|
||||
/**
|
||||
* Monolith database ID of the artifact
|
||||
*
|
||||
* @generated from protobuf field: int64 database_id = 3;
|
||||
*/
|
||||
databaseId: string;
|
||||
/**
|
||||
* Name of the artifact
|
||||
*
|
||||
* @generated from protobuf field: string name = 4;
|
||||
*/
|
||||
name: string;
|
||||
/**
|
||||
* Size of the artifact in bytes
|
||||
*
|
||||
* @generated from protobuf field: int64 size = 5;
|
||||
*/
|
||||
size: string;
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf message github.actions.results.api.v1.GetSignedArtifactURLRequest
|
||||
*/
|
||||
export interface GetSignedArtifactURLRequest {
|
||||
/**
|
||||
* @generated from protobuf field: string workflow_run_backend_id = 1;
|
||||
*/
|
||||
workflowRunBackendId: string;
|
||||
/**
|
||||
* @generated from protobuf field: string workflow_job_run_backend_id = 2;
|
||||
*/
|
||||
workflowJobRunBackendId: string;
|
||||
/**
|
||||
* @generated from protobuf field: string name = 3;
|
||||
*/
|
||||
name: string;
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf message github.actions.results.api.v1.GetSignedArtifactURLResponse
|
||||
*/
|
||||
export interface GetSignedArtifactURLResponse {
|
||||
/**
|
||||
* @generated from protobuf field: string signed_url = 1;
|
||||
*/
|
||||
signedUrl: string;
|
||||
}
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class CreateArtifactRequest$Type extends MessageType<CreateArtifactRequest> {
|
||||
constructor() {
|
||||
|
@ -348,10 +448,310 @@ class FinalizeArtifactResponse$Type extends MessageType<FinalizeArtifactResponse
|
|||
* @generated MessageType for protobuf message github.actions.results.api.v1.FinalizeArtifactResponse
|
||||
*/
|
||||
export const FinalizeArtifactResponse = new FinalizeArtifactResponse$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class ListArtifactsRequest$Type extends MessageType<ListArtifactsRequest> {
|
||||
constructor() {
|
||||
super("github.actions.results.api.v1.ListArtifactsRequest", [
|
||||
{ no: 1, name: "workflow_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 2, name: "workflow_job_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 3, name: "name_filter", kind: "message", T: () => StringValue },
|
||||
{ no: 4, name: "id_filter", kind: "message", T: () => Int64Value }
|
||||
]);
|
||||
}
|
||||
create(value?: PartialMessage<ListArtifactsRequest>): ListArtifactsRequest {
|
||||
const message = { workflowRunBackendId: "", workflowJobRunBackendId: "" };
|
||||
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
reflectionMergePartial<ListArtifactsRequest>(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: ListArtifactsRequest): ListArtifactsRequest {
|
||||
let message = target ?? this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* string workflow_run_backend_id */ 1:
|
||||
message.workflowRunBackendId = reader.string();
|
||||
break;
|
||||
case /* string workflow_job_run_backend_id */ 2:
|
||||
message.workflowJobRunBackendId = reader.string();
|
||||
break;
|
||||
case /* google.protobuf.StringValue name_filter */ 3:
|
||||
message.nameFilter = StringValue.internalBinaryRead(reader, reader.uint32(), options, message.nameFilter);
|
||||
break;
|
||||
case /* google.protobuf.Int64Value id_filter */ 4:
|
||||
message.idFilter = Int64Value.internalBinaryRead(reader, reader.uint32(), options, message.idFilter);
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message: ListArtifactsRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||
/* string workflow_run_backend_id = 1; */
|
||||
if (message.workflowRunBackendId !== "")
|
||||
writer.tag(1, WireType.LengthDelimited).string(message.workflowRunBackendId);
|
||||
/* string workflow_job_run_backend_id = 2; */
|
||||
if (message.workflowJobRunBackendId !== "")
|
||||
writer.tag(2, WireType.LengthDelimited).string(message.workflowJobRunBackendId);
|
||||
/* google.protobuf.StringValue name_filter = 3; */
|
||||
if (message.nameFilter)
|
||||
StringValue.internalBinaryWrite(message.nameFilter, writer.tag(3, WireType.LengthDelimited).fork(), options).join();
|
||||
/* google.protobuf.Int64Value id_filter = 4; */
|
||||
if (message.idFilter)
|
||||
Int64Value.internalBinaryWrite(message.idFilter, writer.tag(4, WireType.LengthDelimited).fork(), options).join();
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.ListArtifactsRequest
|
||||
*/
|
||||
export const ListArtifactsRequest = new ListArtifactsRequest$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class ListArtifactsResponse$Type extends MessageType<ListArtifactsResponse> {
|
||||
constructor() {
|
||||
super("github.actions.results.api.v1.ListArtifactsResponse", [
|
||||
{ no: 1, name: "artifacts", kind: "message", repeat: 1 /*RepeatType.PACKED*/, T: () => ListArtifactsResponse_MonolithArtifact }
|
||||
]);
|
||||
}
|
||||
create(value?: PartialMessage<ListArtifactsResponse>): ListArtifactsResponse {
|
||||
const message = { artifacts: [] };
|
||||
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
reflectionMergePartial<ListArtifactsResponse>(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: ListArtifactsResponse): ListArtifactsResponse {
|
||||
let message = target ?? this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* repeated github.actions.results.api.v1.ListArtifactsResponse.MonolithArtifact artifacts */ 1:
|
||||
message.artifacts.push(ListArtifactsResponse_MonolithArtifact.internalBinaryRead(reader, reader.uint32(), options));
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message: ListArtifactsResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||
/* repeated github.actions.results.api.v1.ListArtifactsResponse.MonolithArtifact artifacts = 1; */
|
||||
for (let i = 0; i < message.artifacts.length; i++)
|
||||
ListArtifactsResponse_MonolithArtifact.internalBinaryWrite(message.artifacts[i], writer.tag(1, WireType.LengthDelimited).fork(), options).join();
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.ListArtifactsResponse
|
||||
*/
|
||||
export const ListArtifactsResponse = new ListArtifactsResponse$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class ListArtifactsResponse_MonolithArtifact$Type extends MessageType<ListArtifactsResponse_MonolithArtifact> {
|
||||
constructor() {
|
||||
super("github.actions.results.api.v1.ListArtifactsResponse.MonolithArtifact", [
|
||||
{ no: 1, name: "workflow_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 2, name: "workflow_job_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 3, name: "database_id", kind: "scalar", T: 3 /*ScalarType.INT64*/ },
|
||||
{ no: 4, name: "name", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 5, name: "size", kind: "scalar", T: 3 /*ScalarType.INT64*/ }
|
||||
]);
|
||||
}
|
||||
create(value?: PartialMessage<ListArtifactsResponse_MonolithArtifact>): ListArtifactsResponse_MonolithArtifact {
|
||||
const message = { workflowRunBackendId: "", workflowJobRunBackendId: "", databaseId: "0", name: "", size: "0" };
|
||||
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
reflectionMergePartial<ListArtifactsResponse_MonolithArtifact>(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: ListArtifactsResponse_MonolithArtifact): ListArtifactsResponse_MonolithArtifact {
|
||||
let message = target ?? this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* string workflow_run_backend_id */ 1:
|
||||
message.workflowRunBackendId = reader.string();
|
||||
break;
|
||||
case /* string workflow_job_run_backend_id */ 2:
|
||||
message.workflowJobRunBackendId = reader.string();
|
||||
break;
|
||||
case /* int64 database_id */ 3:
|
||||
message.databaseId = reader.int64().toString();
|
||||
break;
|
||||
case /* string name */ 4:
|
||||
message.name = reader.string();
|
||||
break;
|
||||
case /* int64 size */ 5:
|
||||
message.size = reader.int64().toString();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message: ListArtifactsResponse_MonolithArtifact, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||
/* string workflow_run_backend_id = 1; */
|
||||
if (message.workflowRunBackendId !== "")
|
||||
writer.tag(1, WireType.LengthDelimited).string(message.workflowRunBackendId);
|
||||
/* string workflow_job_run_backend_id = 2; */
|
||||
if (message.workflowJobRunBackendId !== "")
|
||||
writer.tag(2, WireType.LengthDelimited).string(message.workflowJobRunBackendId);
|
||||
/* int64 database_id = 3; */
|
||||
if (message.databaseId !== "0")
|
||||
writer.tag(3, WireType.Varint).int64(message.databaseId);
|
||||
/* string name = 4; */
|
||||
if (message.name !== "")
|
||||
writer.tag(4, WireType.LengthDelimited).string(message.name);
|
||||
/* int64 size = 5; */
|
||||
if (message.size !== "0")
|
||||
writer.tag(5, WireType.Varint).int64(message.size);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.ListArtifactsResponse.MonolithArtifact
|
||||
*/
|
||||
export const ListArtifactsResponse_MonolithArtifact = new ListArtifactsResponse_MonolithArtifact$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class GetSignedArtifactURLRequest$Type extends MessageType<GetSignedArtifactURLRequest> {
|
||||
constructor() {
|
||||
super("github.actions.results.api.v1.GetSignedArtifactURLRequest", [
|
||||
{ no: 1, name: "workflow_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 2, name: "workflow_job_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 3, name: "name", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
|
||||
]);
|
||||
}
|
||||
create(value?: PartialMessage<GetSignedArtifactURLRequest>): GetSignedArtifactURLRequest {
|
||||
const message = { workflowRunBackendId: "", workflowJobRunBackendId: "", name: "" };
|
||||
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
reflectionMergePartial<GetSignedArtifactURLRequest>(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: GetSignedArtifactURLRequest): GetSignedArtifactURLRequest {
|
||||
let message = target ?? this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* string workflow_run_backend_id */ 1:
|
||||
message.workflowRunBackendId = reader.string();
|
||||
break;
|
||||
case /* string workflow_job_run_backend_id */ 2:
|
||||
message.workflowJobRunBackendId = reader.string();
|
||||
break;
|
||||
case /* string name */ 3:
|
||||
message.name = reader.string();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message: GetSignedArtifactURLRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||
/* string workflow_run_backend_id = 1; */
|
||||
if (message.workflowRunBackendId !== "")
|
||||
writer.tag(1, WireType.LengthDelimited).string(message.workflowRunBackendId);
|
||||
/* string workflow_job_run_backend_id = 2; */
|
||||
if (message.workflowJobRunBackendId !== "")
|
||||
writer.tag(2, WireType.LengthDelimited).string(message.workflowJobRunBackendId);
|
||||
/* string name = 3; */
|
||||
if (message.name !== "")
|
||||
writer.tag(3, WireType.LengthDelimited).string(message.name);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.GetSignedArtifactURLRequest
|
||||
*/
|
||||
export const GetSignedArtifactURLRequest = new GetSignedArtifactURLRequest$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class GetSignedArtifactURLResponse$Type extends MessageType<GetSignedArtifactURLResponse> {
|
||||
constructor() {
|
||||
super("github.actions.results.api.v1.GetSignedArtifactURLResponse", [
|
||||
{ no: 1, name: "signed_url", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
|
||||
]);
|
||||
}
|
||||
create(value?: PartialMessage<GetSignedArtifactURLResponse>): GetSignedArtifactURLResponse {
|
||||
const message = { signedUrl: "" };
|
||||
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
reflectionMergePartial<GetSignedArtifactURLResponse>(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: GetSignedArtifactURLResponse): GetSignedArtifactURLResponse {
|
||||
let message = target ?? this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* string signed_url */ 1:
|
||||
message.signedUrl = reader.string();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message: GetSignedArtifactURLResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||
/* string signed_url = 1; */
|
||||
if (message.signedUrl !== "")
|
||||
writer.tag(1, WireType.LengthDelimited).string(message.signedUrl);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.GetSignedArtifactURLResponse
|
||||
*/
|
||||
export const GetSignedArtifactURLResponse = new GetSignedArtifactURLResponse$Type();
|
||||
/**
|
||||
* @generated ServiceType for protobuf service github.actions.results.api.v1.ArtifactService
|
||||
*/
|
||||
export const ArtifactService = new ServiceType("github.actions.results.api.v1.ArtifactService", [
|
||||
{ name: "CreateArtifact", options: {}, I: CreateArtifactRequest, O: CreateArtifactResponse },
|
||||
{ name: "FinalizeArtifact", options: {}, I: FinalizeArtifactRequest, O: FinalizeArtifactResponse }
|
||||
{ name: "FinalizeArtifact", options: {}, I: FinalizeArtifactRequest, O: FinalizeArtifactResponse },
|
||||
{ name: "ListArtifacts", options: {}, I: ListArtifactsRequest, O: ListArtifactsResponse },
|
||||
{ name: "GetSignedArtifactURL", options: {}, I: GetSignedArtifactURLRequest, O: GetSignedArtifactURLResponse }
|
||||
]);
|
||||
|
|
|
@ -6,14 +6,18 @@ import {
|
|||
TwirpErrorCode,
|
||||
Interceptor,
|
||||
TwirpContentType,
|
||||
chainInterceptors
|
||||
} from 'twirp-ts'
|
||||
chainInterceptors,
|
||||
} from "twirp-ts";
|
||||
import {
|
||||
CreateArtifactRequest,
|
||||
CreateArtifactResponse,
|
||||
FinalizeArtifactRequest,
|
||||
FinalizeArtifactResponse
|
||||
} from './artifact'
|
||||
FinalizeArtifactResponse,
|
||||
ListArtifactsRequest,
|
||||
ListArtifactsResponse,
|
||||
GetSignedArtifactURLRequest,
|
||||
GetSignedArtifactURLResponse,
|
||||
} from "./artifact";
|
||||
|
||||
//==================================//
|
||||
// Client Code //
|
||||
|
@ -23,43 +27,51 @@ interface Rpc {
|
|||
request(
|
||||
service: string,
|
||||
method: string,
|
||||
contentType: 'application/json' | 'application/protobuf',
|
||||
contentType: "application/json" | "application/protobuf",
|
||||
data: object | Uint8Array
|
||||
): Promise<object | Uint8Array>
|
||||
): Promise<object | Uint8Array>;
|
||||
}
|
||||
|
||||
export interface ArtifactServiceClient {
|
||||
CreateArtifact(
|
||||
request: CreateArtifactRequest
|
||||
): Promise<CreateArtifactResponse>
|
||||
): Promise<CreateArtifactResponse>;
|
||||
FinalizeArtifact(
|
||||
request: FinalizeArtifactRequest
|
||||
): Promise<FinalizeArtifactResponse>
|
||||
): Promise<FinalizeArtifactResponse>;
|
||||
ListArtifacts(request: ListArtifactsRequest): Promise<ListArtifactsResponse>;
|
||||
GetSignedArtifactURL(
|
||||
request: GetSignedArtifactURLRequest
|
||||
): Promise<GetSignedArtifactURLResponse>;
|
||||
}
|
||||
|
||||
export class ArtifactServiceClientJSON implements ArtifactServiceClient {
|
||||
private readonly rpc: Rpc
|
||||
private readonly rpc: Rpc;
|
||||
constructor(rpc: Rpc) {
|
||||
this.rpc = rpc
|
||||
this.CreateArtifact.bind(this)
|
||||
this.FinalizeArtifact.bind(this)
|
||||
this.rpc = rpc;
|
||||
this.CreateArtifact.bind(this);
|
||||
this.FinalizeArtifact.bind(this);
|
||||
this.ListArtifacts.bind(this);
|
||||
this.GetSignedArtifactURL.bind(this);
|
||||
}
|
||||
CreateArtifact(
|
||||
request: CreateArtifactRequest
|
||||
): Promise<CreateArtifactResponse> {
|
||||
const data = CreateArtifactRequest.toJson(request, {
|
||||
useProtoFieldName: true,
|
||||
emitDefaultValues: false
|
||||
})
|
||||
emitDefaultValues: false,
|
||||
});
|
||||
const promise = this.rpc.request(
|
||||
'github.actions.results.api.v1.ArtifactService',
|
||||
'CreateArtifact',
|
||||
'application/json',
|
||||
"github.actions.results.api.v1.ArtifactService",
|
||||
"CreateArtifact",
|
||||
"application/json",
|
||||
data as object
|
||||
)
|
||||
return promise.then(data =>
|
||||
CreateArtifactResponse.fromJson(data as any, {ignoreUnknownFields: true})
|
||||
)
|
||||
);
|
||||
return promise.then((data) =>
|
||||
CreateArtifactResponse.fromJson(data as any, {
|
||||
ignoreUnknownFields: true,
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
FinalizeArtifact(
|
||||
|
@ -67,57 +79,123 @@ export class ArtifactServiceClientJSON implements ArtifactServiceClient {
|
|||
): Promise<FinalizeArtifactResponse> {
|
||||
const data = FinalizeArtifactRequest.toJson(request, {
|
||||
useProtoFieldName: true,
|
||||
emitDefaultValues: false
|
||||
})
|
||||
emitDefaultValues: false,
|
||||
});
|
||||
const promise = this.rpc.request(
|
||||
'github.actions.results.api.v1.ArtifactService',
|
||||
'FinalizeArtifact',
|
||||
'application/json',
|
||||
"github.actions.results.api.v1.ArtifactService",
|
||||
"FinalizeArtifact",
|
||||
"application/json",
|
||||
data as object
|
||||
)
|
||||
return promise.then(data =>
|
||||
);
|
||||
return promise.then((data) =>
|
||||
FinalizeArtifactResponse.fromJson(data as any, {
|
||||
ignoreUnknownFields: true
|
||||
ignoreUnknownFields: true,
|
||||
})
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
ListArtifacts(request: ListArtifactsRequest): Promise<ListArtifactsResponse> {
|
||||
const data = ListArtifactsRequest.toJson(request, {
|
||||
useProtoFieldName: true,
|
||||
emitDefaultValues: false,
|
||||
});
|
||||
const promise = this.rpc.request(
|
||||
"github.actions.results.api.v1.ArtifactService",
|
||||
"ListArtifacts",
|
||||
"application/json",
|
||||
data as object
|
||||
);
|
||||
return promise.then((data) =>
|
||||
ListArtifactsResponse.fromJson(data as any, { ignoreUnknownFields: true })
|
||||
);
|
||||
}
|
||||
|
||||
GetSignedArtifactURL(
|
||||
request: GetSignedArtifactURLRequest
|
||||
): Promise<GetSignedArtifactURLResponse> {
|
||||
const data = GetSignedArtifactURLRequest.toJson(request, {
|
||||
useProtoFieldName: true,
|
||||
emitDefaultValues: false,
|
||||
});
|
||||
const promise = this.rpc.request(
|
||||
"github.actions.results.api.v1.ArtifactService",
|
||||
"GetSignedArtifactURL",
|
||||
"application/json",
|
||||
data as object
|
||||
);
|
||||
return promise.then((data) =>
|
||||
GetSignedArtifactURLResponse.fromJson(data as any, {
|
||||
ignoreUnknownFields: true,
|
||||
})
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
export class ArtifactServiceClientProtobuf implements ArtifactServiceClient {
|
||||
private readonly rpc: Rpc
|
||||
private readonly rpc: Rpc;
|
||||
constructor(rpc: Rpc) {
|
||||
this.rpc = rpc
|
||||
this.CreateArtifact.bind(this)
|
||||
this.FinalizeArtifact.bind(this)
|
||||
this.rpc = rpc;
|
||||
this.CreateArtifact.bind(this);
|
||||
this.FinalizeArtifact.bind(this);
|
||||
this.ListArtifacts.bind(this);
|
||||
this.GetSignedArtifactURL.bind(this);
|
||||
}
|
||||
CreateArtifact(
|
||||
request: CreateArtifactRequest
|
||||
): Promise<CreateArtifactResponse> {
|
||||
const data = CreateArtifactRequest.toBinary(request)
|
||||
const data = CreateArtifactRequest.toBinary(request);
|
||||
const promise = this.rpc.request(
|
||||
'github.actions.results.api.v1.ArtifactService',
|
||||
'CreateArtifact',
|
||||
'application/protobuf',
|
||||
"github.actions.results.api.v1.ArtifactService",
|
||||
"CreateArtifact",
|
||||
"application/protobuf",
|
||||
data
|
||||
)
|
||||
return promise.then(data =>
|
||||
);
|
||||
return promise.then((data) =>
|
||||
CreateArtifactResponse.fromBinary(data as Uint8Array)
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
FinalizeArtifact(
|
||||
request: FinalizeArtifactRequest
|
||||
): Promise<FinalizeArtifactResponse> {
|
||||
const data = FinalizeArtifactRequest.toBinary(request)
|
||||
const data = FinalizeArtifactRequest.toBinary(request);
|
||||
const promise = this.rpc.request(
|
||||
'github.actions.results.api.v1.ArtifactService',
|
||||
'FinalizeArtifact',
|
||||
'application/protobuf',
|
||||
"github.actions.results.api.v1.ArtifactService",
|
||||
"FinalizeArtifact",
|
||||
"application/protobuf",
|
||||
data
|
||||
)
|
||||
return promise.then(data =>
|
||||
);
|
||||
return promise.then((data) =>
|
||||
FinalizeArtifactResponse.fromBinary(data as Uint8Array)
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
ListArtifacts(request: ListArtifactsRequest): Promise<ListArtifactsResponse> {
|
||||
const data = ListArtifactsRequest.toBinary(request);
|
||||
const promise = this.rpc.request(
|
||||
"github.actions.results.api.v1.ArtifactService",
|
||||
"ListArtifacts",
|
||||
"application/protobuf",
|
||||
data
|
||||
);
|
||||
return promise.then((data) =>
|
||||
ListArtifactsResponse.fromBinary(data as Uint8Array)
|
||||
);
|
||||
}
|
||||
|
||||
GetSignedArtifactURL(
|
||||
request: GetSignedArtifactURLRequest
|
||||
): Promise<GetSignedArtifactURLResponse> {
|
||||
const data = GetSignedArtifactURLRequest.toBinary(request);
|
||||
const promise = this.rpc.request(
|
||||
"github.actions.results.api.v1.ArtifactService",
|
||||
"GetSignedArtifactURL",
|
||||
"application/protobuf",
|
||||
data
|
||||
);
|
||||
return promise.then((data) =>
|
||||
GetSignedArtifactURLResponse.fromBinary(data as Uint8Array)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -129,33 +207,45 @@ export interface ArtifactServiceTwirp<T extends TwirpContext = TwirpContext> {
|
|||
CreateArtifact(
|
||||
ctx: T,
|
||||
request: CreateArtifactRequest
|
||||
): Promise<CreateArtifactResponse>
|
||||
): Promise<CreateArtifactResponse>;
|
||||
FinalizeArtifact(
|
||||
ctx: T,
|
||||
request: FinalizeArtifactRequest
|
||||
): Promise<FinalizeArtifactResponse>
|
||||
): Promise<FinalizeArtifactResponse>;
|
||||
ListArtifacts(
|
||||
ctx: T,
|
||||
request: ListArtifactsRequest
|
||||
): Promise<ListArtifactsResponse>;
|
||||
GetSignedArtifactURL(
|
||||
ctx: T,
|
||||
request: GetSignedArtifactURLRequest
|
||||
): Promise<GetSignedArtifactURLResponse>;
|
||||
}
|
||||
|
||||
export enum ArtifactServiceMethod {
|
||||
CreateArtifact = 'CreateArtifact',
|
||||
FinalizeArtifact = 'FinalizeArtifact'
|
||||
CreateArtifact = "CreateArtifact",
|
||||
FinalizeArtifact = "FinalizeArtifact",
|
||||
ListArtifacts = "ListArtifacts",
|
||||
GetSignedArtifactURL = "GetSignedArtifactURL",
|
||||
}
|
||||
|
||||
export const ArtifactServiceMethodList = [
|
||||
ArtifactServiceMethod.CreateArtifact,
|
||||
ArtifactServiceMethod.FinalizeArtifact
|
||||
]
|
||||
ArtifactServiceMethod.FinalizeArtifact,
|
||||
ArtifactServiceMethod.ListArtifacts,
|
||||
ArtifactServiceMethod.GetSignedArtifactURL,
|
||||
];
|
||||
|
||||
export function createArtifactServiceServer<
|
||||
T extends TwirpContext = TwirpContext
|
||||
>(service: ArtifactServiceTwirp<T>) {
|
||||
return new TwirpServer<ArtifactServiceTwirp, T>({
|
||||
service,
|
||||
packageName: 'github.actions.results.api.v1',
|
||||
serviceName: 'ArtifactService',
|
||||
packageName: "github.actions.results.api.v1",
|
||||
serviceName: "ArtifactService",
|
||||
methodList: ArtifactServiceMethodList,
|
||||
matchRoute: matchArtifactServiceRoute
|
||||
})
|
||||
matchRoute: matchArtifactServiceRoute,
|
||||
});
|
||||
}
|
||||
|
||||
function matchArtifactServiceRoute<T extends TwirpContext = TwirpContext>(
|
||||
|
@ -163,7 +253,7 @@ function matchArtifactServiceRoute<T extends TwirpContext = TwirpContext>(
|
|||
events: RouterEvents<T>
|
||||
) {
|
||||
switch (method) {
|
||||
case 'CreateArtifact':
|
||||
case "CreateArtifact":
|
||||
return async (
|
||||
ctx: T,
|
||||
service: ArtifactServiceTwirp,
|
||||
|
@ -174,16 +264,16 @@ function matchArtifactServiceRoute<T extends TwirpContext = TwirpContext>(
|
|||
CreateArtifactResponse
|
||||
>[]
|
||||
) => {
|
||||
ctx = {...ctx, methodName: 'CreateArtifact'}
|
||||
await events.onMatch(ctx)
|
||||
ctx = { ...ctx, methodName: "CreateArtifact" };
|
||||
await events.onMatch(ctx);
|
||||
return handleArtifactServiceCreateArtifactRequest(
|
||||
ctx,
|
||||
service,
|
||||
data,
|
||||
interceptors
|
||||
)
|
||||
}
|
||||
case 'FinalizeArtifact':
|
||||
);
|
||||
};
|
||||
case "FinalizeArtifact":
|
||||
return async (
|
||||
ctx: T,
|
||||
service: ArtifactServiceTwirp,
|
||||
|
@ -194,19 +284,59 @@ function matchArtifactServiceRoute<T extends TwirpContext = TwirpContext>(
|
|||
FinalizeArtifactResponse
|
||||
>[]
|
||||
) => {
|
||||
ctx = {...ctx, methodName: 'FinalizeArtifact'}
|
||||
await events.onMatch(ctx)
|
||||
ctx = { ...ctx, methodName: "FinalizeArtifact" };
|
||||
await events.onMatch(ctx);
|
||||
return handleArtifactServiceFinalizeArtifactRequest(
|
||||
ctx,
|
||||
service,
|
||||
data,
|
||||
interceptors
|
||||
)
|
||||
}
|
||||
);
|
||||
};
|
||||
case "ListArtifacts":
|
||||
return async (
|
||||
ctx: T,
|
||||
service: ArtifactServiceTwirp,
|
||||
data: Buffer,
|
||||
interceptors?: Interceptor<
|
||||
T,
|
||||
ListArtifactsRequest,
|
||||
ListArtifactsResponse
|
||||
>[]
|
||||
) => {
|
||||
ctx = { ...ctx, methodName: "ListArtifacts" };
|
||||
await events.onMatch(ctx);
|
||||
return handleArtifactServiceListArtifactsRequest(
|
||||
ctx,
|
||||
service,
|
||||
data,
|
||||
interceptors
|
||||
);
|
||||
};
|
||||
case "GetSignedArtifactURL":
|
||||
return async (
|
||||
ctx: T,
|
||||
service: ArtifactServiceTwirp,
|
||||
data: Buffer,
|
||||
interceptors?: Interceptor<
|
||||
T,
|
||||
GetSignedArtifactURLRequest,
|
||||
GetSignedArtifactURLResponse
|
||||
>[]
|
||||
) => {
|
||||
ctx = { ...ctx, methodName: "GetSignedArtifactURL" };
|
||||
await events.onMatch(ctx);
|
||||
return handleArtifactServiceGetSignedArtifactURLRequest(
|
||||
ctx,
|
||||
service,
|
||||
data,
|
||||
interceptors
|
||||
);
|
||||
};
|
||||
default:
|
||||
events.onNotFound()
|
||||
const msg = `no handler found`
|
||||
throw new TwirpError(TwirpErrorCode.BadRoute, msg)
|
||||
events.onNotFound();
|
||||
const msg = `no handler found`;
|
||||
throw new TwirpError(TwirpErrorCode.BadRoute, msg);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -225,17 +355,17 @@ function handleArtifactServiceCreateArtifactRequest<
|
|||
service,
|
||||
data,
|
||||
interceptors
|
||||
)
|
||||
);
|
||||
case TwirpContentType.Protobuf:
|
||||
return handleArtifactServiceCreateArtifactProtobuf<T>(
|
||||
ctx,
|
||||
service,
|
||||
data,
|
||||
interceptors
|
||||
)
|
||||
);
|
||||
default:
|
||||
const msg = 'unexpected Content-Type'
|
||||
throw new TwirpError(TwirpErrorCode.BadRoute, msg)
|
||||
const msg = "unexpected Content-Type";
|
||||
throw new TwirpError(TwirpErrorCode.BadRoute, msg);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -258,17 +388,79 @@ function handleArtifactServiceFinalizeArtifactRequest<
|
|||
service,
|
||||
data,
|
||||
interceptors
|
||||
)
|
||||
);
|
||||
case TwirpContentType.Protobuf:
|
||||
return handleArtifactServiceFinalizeArtifactProtobuf<T>(
|
||||
ctx,
|
||||
service,
|
||||
data,
|
||||
interceptors
|
||||
)
|
||||
);
|
||||
default:
|
||||
const msg = 'unexpected Content-Type'
|
||||
throw new TwirpError(TwirpErrorCode.BadRoute, msg)
|
||||
const msg = "unexpected Content-Type";
|
||||
throw new TwirpError(TwirpErrorCode.BadRoute, msg);
|
||||
}
|
||||
}
|
||||
|
||||
function handleArtifactServiceListArtifactsRequest<
|
||||
T extends TwirpContext = TwirpContext
|
||||
>(
|
||||
ctx: T,
|
||||
service: ArtifactServiceTwirp,
|
||||
data: Buffer,
|
||||
interceptors?: Interceptor<T, ListArtifactsRequest, ListArtifactsResponse>[]
|
||||
): Promise<string | Uint8Array> {
|
||||
switch (ctx.contentType) {
|
||||
case TwirpContentType.JSON:
|
||||
return handleArtifactServiceListArtifactsJSON<T>(
|
||||
ctx,
|
||||
service,
|
||||
data,
|
||||
interceptors
|
||||
);
|
||||
case TwirpContentType.Protobuf:
|
||||
return handleArtifactServiceListArtifactsProtobuf<T>(
|
||||
ctx,
|
||||
service,
|
||||
data,
|
||||
interceptors
|
||||
);
|
||||
default:
|
||||
const msg = "unexpected Content-Type";
|
||||
throw new TwirpError(TwirpErrorCode.BadRoute, msg);
|
||||
}
|
||||
}
|
||||
|
||||
function handleArtifactServiceGetSignedArtifactURLRequest<
|
||||
T extends TwirpContext = TwirpContext
|
||||
>(
|
||||
ctx: T,
|
||||
service: ArtifactServiceTwirp,
|
||||
data: Buffer,
|
||||
interceptors?: Interceptor<
|
||||
T,
|
||||
GetSignedArtifactURLRequest,
|
||||
GetSignedArtifactURLResponse
|
||||
>[]
|
||||
): Promise<string | Uint8Array> {
|
||||
switch (ctx.contentType) {
|
||||
case TwirpContentType.JSON:
|
||||
return handleArtifactServiceGetSignedArtifactURLJSON<T>(
|
||||
ctx,
|
||||
service,
|
||||
data,
|
||||
interceptors
|
||||
);
|
||||
case TwirpContentType.Protobuf:
|
||||
return handleArtifactServiceGetSignedArtifactURLProtobuf<T>(
|
||||
ctx,
|
||||
service,
|
||||
data,
|
||||
interceptors
|
||||
);
|
||||
default:
|
||||
const msg = "unexpected Content-Type";
|
||||
throw new TwirpError(TwirpErrorCode.BadRoute, msg);
|
||||
}
|
||||
}
|
||||
async function handleArtifactServiceCreateArtifactJSON<
|
||||
|
@ -279,16 +471,18 @@ async function handleArtifactServiceCreateArtifactJSON<
|
|||
data: Buffer,
|
||||
interceptors?: Interceptor<T, CreateArtifactRequest, CreateArtifactResponse>[]
|
||||
) {
|
||||
let request: CreateArtifactRequest
|
||||
let response: CreateArtifactResponse
|
||||
let request: CreateArtifactRequest;
|
||||
let response: CreateArtifactResponse;
|
||||
|
||||
try {
|
||||
const body = JSON.parse(data.toString() || '{}')
|
||||
request = CreateArtifactRequest.fromJson(body, {ignoreUnknownFields: true})
|
||||
const body = JSON.parse(data.toString() || "{}");
|
||||
request = CreateArtifactRequest.fromJson(body, {
|
||||
ignoreUnknownFields: true,
|
||||
});
|
||||
} catch (e) {
|
||||
if (e instanceof Error) {
|
||||
const msg = 'the json request could not be decoded'
|
||||
throw new TwirpError(TwirpErrorCode.Malformed, msg).withCause(e, true)
|
||||
const msg = "the json request could not be decoded";
|
||||
throw new TwirpError(TwirpErrorCode.Malformed, msg).withCause(e, true);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -297,20 +491,20 @@ async function handleArtifactServiceCreateArtifactJSON<
|
|||
T,
|
||||
CreateArtifactRequest,
|
||||
CreateArtifactResponse
|
||||
>
|
||||
>;
|
||||
response = await interceptor(ctx, request!, (ctx, inputReq) => {
|
||||
return service.CreateArtifact(ctx, inputReq)
|
||||
})
|
||||
return service.CreateArtifact(ctx, inputReq);
|
||||
});
|
||||
} else {
|
||||
response = await service.CreateArtifact(ctx, request!)
|
||||
response = await service.CreateArtifact(ctx, request!);
|
||||
}
|
||||
|
||||
return JSON.stringify(
|
||||
CreateArtifactResponse.toJson(response, {
|
||||
useProtoFieldName: true,
|
||||
emitDefaultValues: false
|
||||
emitDefaultValues: false,
|
||||
}) as string
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
async function handleArtifactServiceFinalizeArtifactJSON<
|
||||
|
@ -325,18 +519,18 @@ async function handleArtifactServiceFinalizeArtifactJSON<
|
|||
FinalizeArtifactResponse
|
||||
>[]
|
||||
) {
|
||||
let request: FinalizeArtifactRequest
|
||||
let response: FinalizeArtifactResponse
|
||||
let request: FinalizeArtifactRequest;
|
||||
let response: FinalizeArtifactResponse;
|
||||
|
||||
try {
|
||||
const body = JSON.parse(data.toString() || '{}')
|
||||
const body = JSON.parse(data.toString() || "{}");
|
||||
request = FinalizeArtifactRequest.fromJson(body, {
|
||||
ignoreUnknownFields: true
|
||||
})
|
||||
ignoreUnknownFields: true,
|
||||
});
|
||||
} catch (e) {
|
||||
if (e instanceof Error) {
|
||||
const msg = 'the json request could not be decoded'
|
||||
throw new TwirpError(TwirpErrorCode.Malformed, msg).withCause(e, true)
|
||||
const msg = "the json request could not be decoded";
|
||||
throw new TwirpError(TwirpErrorCode.Malformed, msg).withCause(e, true);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -345,20 +539,112 @@ async function handleArtifactServiceFinalizeArtifactJSON<
|
|||
T,
|
||||
FinalizeArtifactRequest,
|
||||
FinalizeArtifactResponse
|
||||
>
|
||||
>;
|
||||
response = await interceptor(ctx, request!, (ctx, inputReq) => {
|
||||
return service.FinalizeArtifact(ctx, inputReq)
|
||||
})
|
||||
return service.FinalizeArtifact(ctx, inputReq);
|
||||
});
|
||||
} else {
|
||||
response = await service.FinalizeArtifact(ctx, request!)
|
||||
response = await service.FinalizeArtifact(ctx, request!);
|
||||
}
|
||||
|
||||
return JSON.stringify(
|
||||
FinalizeArtifactResponse.toJson(response, {
|
||||
useProtoFieldName: true,
|
||||
emitDefaultValues: false
|
||||
emitDefaultValues: false,
|
||||
}) as string
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
async function handleArtifactServiceListArtifactsJSON<
|
||||
T extends TwirpContext = TwirpContext
|
||||
>(
|
||||
ctx: T,
|
||||
service: ArtifactServiceTwirp,
|
||||
data: Buffer,
|
||||
interceptors?: Interceptor<T, ListArtifactsRequest, ListArtifactsResponse>[]
|
||||
) {
|
||||
let request: ListArtifactsRequest;
|
||||
let response: ListArtifactsResponse;
|
||||
|
||||
try {
|
||||
const body = JSON.parse(data.toString() || "{}");
|
||||
request = ListArtifactsRequest.fromJson(body, {
|
||||
ignoreUnknownFields: true,
|
||||
});
|
||||
} catch (e) {
|
||||
if (e instanceof Error) {
|
||||
const msg = "the json request could not be decoded";
|
||||
throw new TwirpError(TwirpErrorCode.Malformed, msg).withCause(e, true);
|
||||
}
|
||||
}
|
||||
|
||||
if (interceptors && interceptors.length > 0) {
|
||||
const interceptor = chainInterceptors(...interceptors) as Interceptor<
|
||||
T,
|
||||
ListArtifactsRequest,
|
||||
ListArtifactsResponse
|
||||
>;
|
||||
response = await interceptor(ctx, request!, (ctx, inputReq) => {
|
||||
return service.ListArtifacts(ctx, inputReq);
|
||||
});
|
||||
} else {
|
||||
response = await service.ListArtifacts(ctx, request!);
|
||||
}
|
||||
|
||||
return JSON.stringify(
|
||||
ListArtifactsResponse.toJson(response, {
|
||||
useProtoFieldName: true,
|
||||
emitDefaultValues: false,
|
||||
}) as string
|
||||
);
|
||||
}
|
||||
|
||||
async function handleArtifactServiceGetSignedArtifactURLJSON<
|
||||
T extends TwirpContext = TwirpContext
|
||||
>(
|
||||
ctx: T,
|
||||
service: ArtifactServiceTwirp,
|
||||
data: Buffer,
|
||||
interceptors?: Interceptor<
|
||||
T,
|
||||
GetSignedArtifactURLRequest,
|
||||
GetSignedArtifactURLResponse
|
||||
>[]
|
||||
) {
|
||||
let request: GetSignedArtifactURLRequest;
|
||||
let response: GetSignedArtifactURLResponse;
|
||||
|
||||
try {
|
||||
const body = JSON.parse(data.toString() || "{}");
|
||||
request = GetSignedArtifactURLRequest.fromJson(body, {
|
||||
ignoreUnknownFields: true,
|
||||
});
|
||||
} catch (e) {
|
||||
if (e instanceof Error) {
|
||||
const msg = "the json request could not be decoded";
|
||||
throw new TwirpError(TwirpErrorCode.Malformed, msg).withCause(e, true);
|
||||
}
|
||||
}
|
||||
|
||||
if (interceptors && interceptors.length > 0) {
|
||||
const interceptor = chainInterceptors(...interceptors) as Interceptor<
|
||||
T,
|
||||
GetSignedArtifactURLRequest,
|
||||
GetSignedArtifactURLResponse
|
||||
>;
|
||||
response = await interceptor(ctx, request!, (ctx, inputReq) => {
|
||||
return service.GetSignedArtifactURL(ctx, inputReq);
|
||||
});
|
||||
} else {
|
||||
response = await service.GetSignedArtifactURL(ctx, request!);
|
||||
}
|
||||
|
||||
return JSON.stringify(
|
||||
GetSignedArtifactURLResponse.toJson(response, {
|
||||
useProtoFieldName: true,
|
||||
emitDefaultValues: false,
|
||||
}) as string
|
||||
);
|
||||
}
|
||||
async function handleArtifactServiceCreateArtifactProtobuf<
|
||||
T extends TwirpContext = TwirpContext
|
||||
|
@ -368,15 +654,15 @@ async function handleArtifactServiceCreateArtifactProtobuf<
|
|||
data: Buffer,
|
||||
interceptors?: Interceptor<T, CreateArtifactRequest, CreateArtifactResponse>[]
|
||||
) {
|
||||
let request: CreateArtifactRequest
|
||||
let response: CreateArtifactResponse
|
||||
let request: CreateArtifactRequest;
|
||||
let response: CreateArtifactResponse;
|
||||
|
||||
try {
|
||||
request = CreateArtifactRequest.fromBinary(data)
|
||||
request = CreateArtifactRequest.fromBinary(data);
|
||||
} catch (e) {
|
||||
if (e instanceof Error) {
|
||||
const msg = 'the protobuf request could not be decoded'
|
||||
throw new TwirpError(TwirpErrorCode.Malformed, msg).withCause(e, true)
|
||||
const msg = "the protobuf request could not be decoded";
|
||||
throw new TwirpError(TwirpErrorCode.Malformed, msg).withCause(e, true);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -385,15 +671,15 @@ async function handleArtifactServiceCreateArtifactProtobuf<
|
|||
T,
|
||||
CreateArtifactRequest,
|
||||
CreateArtifactResponse
|
||||
>
|
||||
>;
|
||||
response = await interceptor(ctx, request!, (ctx, inputReq) => {
|
||||
return service.CreateArtifact(ctx, inputReq)
|
||||
})
|
||||
return service.CreateArtifact(ctx, inputReq);
|
||||
});
|
||||
} else {
|
||||
response = await service.CreateArtifact(ctx, request!)
|
||||
response = await service.CreateArtifact(ctx, request!);
|
||||
}
|
||||
|
||||
return Buffer.from(CreateArtifactResponse.toBinary(response))
|
||||
return Buffer.from(CreateArtifactResponse.toBinary(response));
|
||||
}
|
||||
|
||||
async function handleArtifactServiceFinalizeArtifactProtobuf<
|
||||
|
@ -408,15 +694,15 @@ async function handleArtifactServiceFinalizeArtifactProtobuf<
|
|||
FinalizeArtifactResponse
|
||||
>[]
|
||||
) {
|
||||
let request: FinalizeArtifactRequest
|
||||
let response: FinalizeArtifactResponse
|
||||
let request: FinalizeArtifactRequest;
|
||||
let response: FinalizeArtifactResponse;
|
||||
|
||||
try {
|
||||
request = FinalizeArtifactRequest.fromBinary(data)
|
||||
request = FinalizeArtifactRequest.fromBinary(data);
|
||||
} catch (e) {
|
||||
if (e instanceof Error) {
|
||||
const msg = 'the protobuf request could not be decoded'
|
||||
throw new TwirpError(TwirpErrorCode.Malformed, msg).withCause(e, true)
|
||||
const msg = "the protobuf request could not be decoded";
|
||||
throw new TwirpError(TwirpErrorCode.Malformed, msg).withCause(e, true);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -425,13 +711,89 @@ async function handleArtifactServiceFinalizeArtifactProtobuf<
|
|||
T,
|
||||
FinalizeArtifactRequest,
|
||||
FinalizeArtifactResponse
|
||||
>
|
||||
>;
|
||||
response = await interceptor(ctx, request!, (ctx, inputReq) => {
|
||||
return service.FinalizeArtifact(ctx, inputReq)
|
||||
})
|
||||
return service.FinalizeArtifact(ctx, inputReq);
|
||||
});
|
||||
} else {
|
||||
response = await service.FinalizeArtifact(ctx, request!)
|
||||
response = await service.FinalizeArtifact(ctx, request!);
|
||||
}
|
||||
|
||||
return Buffer.from(FinalizeArtifactResponse.toBinary(response))
|
||||
return Buffer.from(FinalizeArtifactResponse.toBinary(response));
|
||||
}
|
||||
|
||||
async function handleArtifactServiceListArtifactsProtobuf<
|
||||
T extends TwirpContext = TwirpContext
|
||||
>(
|
||||
ctx: T,
|
||||
service: ArtifactServiceTwirp,
|
||||
data: Buffer,
|
||||
interceptors?: Interceptor<T, ListArtifactsRequest, ListArtifactsResponse>[]
|
||||
) {
|
||||
let request: ListArtifactsRequest;
|
||||
let response: ListArtifactsResponse;
|
||||
|
||||
try {
|
||||
request = ListArtifactsRequest.fromBinary(data);
|
||||
} catch (e) {
|
||||
if (e instanceof Error) {
|
||||
const msg = "the protobuf request could not be decoded";
|
||||
throw new TwirpError(TwirpErrorCode.Malformed, msg).withCause(e, true);
|
||||
}
|
||||
}
|
||||
|
||||
if (interceptors && interceptors.length > 0) {
|
||||
const interceptor = chainInterceptors(...interceptors) as Interceptor<
|
||||
T,
|
||||
ListArtifactsRequest,
|
||||
ListArtifactsResponse
|
||||
>;
|
||||
response = await interceptor(ctx, request!, (ctx, inputReq) => {
|
||||
return service.ListArtifacts(ctx, inputReq);
|
||||
});
|
||||
} else {
|
||||
response = await service.ListArtifacts(ctx, request!);
|
||||
}
|
||||
|
||||
return Buffer.from(ListArtifactsResponse.toBinary(response));
|
||||
}
|
||||
|
||||
async function handleArtifactServiceGetSignedArtifactURLProtobuf<
|
||||
T extends TwirpContext = TwirpContext
|
||||
>(
|
||||
ctx: T,
|
||||
service: ArtifactServiceTwirp,
|
||||
data: Buffer,
|
||||
interceptors?: Interceptor<
|
||||
T,
|
||||
GetSignedArtifactURLRequest,
|
||||
GetSignedArtifactURLResponse
|
||||
>[]
|
||||
) {
|
||||
let request: GetSignedArtifactURLRequest;
|
||||
let response: GetSignedArtifactURLResponse;
|
||||
|
||||
try {
|
||||
request = GetSignedArtifactURLRequest.fromBinary(data);
|
||||
} catch (e) {
|
||||
if (e instanceof Error) {
|
||||
const msg = "the protobuf request could not be decoded";
|
||||
throw new TwirpError(TwirpErrorCode.Malformed, msg).withCause(e, true);
|
||||
}
|
||||
}
|
||||
|
||||
if (interceptors && interceptors.length > 0) {
|
||||
const interceptor = chainInterceptors(...interceptors) as Interceptor<
|
||||
T,
|
||||
GetSignedArtifactURLRequest,
|
||||
GetSignedArtifactURLResponse
|
||||
>;
|
||||
response = await interceptor(ctx, request!, (ctx, inputReq) => {
|
||||
return service.GetSignedArtifactURL(ctx, inputReq);
|
||||
});
|
||||
} else {
|
||||
response = await service.GetSignedArtifactURL(ctx, request!);
|
||||
}
|
||||
|
||||
return Buffer.from(GetSignedArtifactURLResponse.toBinary(response));
|
||||
}
|
||||
|
|
|
@ -6,12 +6,16 @@ import {
|
|||
DownloadArtifactOptions,
|
||||
GetArtifactResponse,
|
||||
ListArtifactsResponse,
|
||||
DownloadArtifactResponse
|
||||
DownloadArtifactResponse,
|
||||
FindOptions
|
||||
} from './shared/interfaces'
|
||||
import {uploadArtifact} from './upload/upload-artifact'
|
||||
import {downloadArtifact} from './download/download-artifact'
|
||||
import {getArtifact} from './find/get-artifact'
|
||||
import {listArtifacts} from './find/list-artifacts'
|
||||
import {
|
||||
downloadArtifactPublic,
|
||||
downloadArtifactInternal
|
||||
} from './download/download-artifact'
|
||||
import {getArtifactPublic, getArtifactInternal} from './find/get-artifact'
|
||||
import {listArtifactsPublic, listArtifactsInternal} from './find/list-artifacts'
|
||||
|
||||
export interface ArtifactClient {
|
||||
/**
|
||||
|
@ -31,62 +35,46 @@ export interface ArtifactClient {
|
|||
): Promise<UploadResponse>
|
||||
|
||||
/**
|
||||
* Lists all artifacts that are part of a workflow run.
|
||||
* Lists all artifacts that are part of the current workflow run.
|
||||
* This function will return at most 1000 artifacts per workflow run.
|
||||
*
|
||||
* This calls the public List-Artifacts API https://docs.github.com/en/rest/actions/artifacts?apiVersion=2022-11-28#list-workflow-run-artifacts
|
||||
* Due to paginated responses from the public API. This function will return at most 1000 artifacts per workflow run (100 per page * maximum 10 calls)
|
||||
* If options.findBy is specified, this will call the public List-Artifacts API which can list from other runs.
|
||||
* https://docs.github.com/en/rest/actions/artifacts?apiVersion=2022-11-28#list-workflow-run-artifacts
|
||||
*
|
||||
* @param workflowRunId The workflow run id that the artifact belongs to
|
||||
* @param repositoryOwner The owner of the repository that the artifact belongs to
|
||||
* @param repositoryName The name of the repository that the artifact belongs to
|
||||
* @param token A token with the appropriate permission to the repository to list artifacts
|
||||
* @param options Extra options that allow for the customization of the list behavior
|
||||
* @returns ListArtifactResponse object
|
||||
*/
|
||||
listArtifacts(
|
||||
workflowRunId: number,
|
||||
repositoryOwner: string,
|
||||
repositoryName: string,
|
||||
token: string
|
||||
): Promise<ListArtifactsResponse>
|
||||
listArtifacts(options?: FindOptions): Promise<ListArtifactsResponse>
|
||||
|
||||
/**
|
||||
* Finds an artifact by name given a repository and workflow run id.
|
||||
* Finds an artifact by name.
|
||||
*
|
||||
* This calls the public List-Artifacts API with a name filter https://docs.github.com/en/rest/actions/artifacts?apiVersion=2022-11-28#list-workflow-run-artifacts
|
||||
* If options.findBy is specified, this will use the public List Artifacts API with a name filter which can get artifacts from other runs.
|
||||
* https://docs.github.com/en/rest/actions/artifacts?apiVersion=2022-11-28#list-workflow-run-artifacts
|
||||
* @actions/artifact > 2.0.0 does not allow for creating multiple artifacts with the same name in the same workflow run.
|
||||
* It is possible to have multiple artifacts with the same name in the same workflow run by using old versions of upload-artifact (v1,v2 and v3) or @actions/artifact < v2.0.0
|
||||
* It is possible to have multiple artifacts with the same name in the same workflow run by using old versions of upload-artifact (v1,v2 and v3), @actions/artifact < v2.0.0 or it is a rerun.
|
||||
* If there are multiple artifacts with the same name in the same workflow run this function will return the first artifact that matches the name.
|
||||
*
|
||||
* @param artifactName The name of the artifact to find
|
||||
* @param workflowRunId The workflow run id that the artifact belongs to
|
||||
* @param repositoryOwner The owner of the repository that the artifact belongs to
|
||||
* @param repositoryName The name of the repository that the artifact belongs to
|
||||
* @param token A token with the appropriate permission to the repository to find the artifact
|
||||
* @param options Extra options that allow for the customization of the get behavior
|
||||
*/
|
||||
getArtifact(
|
||||
artifactName: string,
|
||||
workflowRunId: number,
|
||||
repositoryOwner: string,
|
||||
repositoryName: string,
|
||||
token: string
|
||||
options?: FindOptions
|
||||
): Promise<GetArtifactResponse>
|
||||
|
||||
/**
|
||||
* Downloads an artifact and unzips the content
|
||||
* Downloads an artifact and unzips the content.
|
||||
*
|
||||
* If options.findBy is specified, this will use the public Download Artifact API https://docs.github.com/en/rest/actions/artifacts?apiVersion=2022-11-28#download-an-artifact
|
||||
*
|
||||
* @param artifactId The name of the artifact to download
|
||||
* @param repositoryOwner The owner of the repository that the artifact belongs to
|
||||
* @param repositoryName The name of the repository that the artifact belongs to
|
||||
* @param token A token with the appropriate permission to the repository to download the artifact
|
||||
* @param options Extra options that allow for the customization of the download behavior
|
||||
* @returns single DownloadArtifactResponse object
|
||||
*/
|
||||
downloadArtifact(
|
||||
artifactId: number,
|
||||
repositoryOwner: string,
|
||||
repositoryName: string,
|
||||
token: string,
|
||||
options?: DownloadArtifactOptions
|
||||
options?: DownloadArtifactOptions & FindOptions
|
||||
): Promise<DownloadArtifactResponse>
|
||||
}
|
||||
|
||||
|
@ -105,7 +93,7 @@ export class Client implements ArtifactClient {
|
|||
name: string,
|
||||
files: string[],
|
||||
rootDirectory: string,
|
||||
options?: UploadOptions | undefined
|
||||
options?: UploadOptions
|
||||
): Promise<UploadResponse> {
|
||||
if (isGhes()) {
|
||||
warning(
|
||||
|
@ -137,10 +125,7 @@ If the error persists, please check whether Actions is operating normally at [ht
|
|||
*/
|
||||
async downloadArtifact(
|
||||
artifactId: number,
|
||||
repositoryOwner: string,
|
||||
repositoryName: string,
|
||||
token: string,
|
||||
options?: DownloadArtifactOptions
|
||||
options?: DownloadArtifactOptions & FindOptions
|
||||
): Promise<DownloadArtifactResponse> {
|
||||
if (isGhes()) {
|
||||
warning(
|
||||
|
@ -152,13 +137,22 @@ If the error persists, please check whether Actions is operating normally at [ht
|
|||
}
|
||||
|
||||
try {
|
||||
return downloadArtifact(
|
||||
artifactId,
|
||||
repositoryOwner,
|
||||
repositoryName,
|
||||
token,
|
||||
options
|
||||
)
|
||||
if (options?.findBy) {
|
||||
const {
|
||||
findBy: {repositoryOwner, repositoryName, token},
|
||||
...downloadOptions
|
||||
} = options
|
||||
|
||||
return downloadArtifactPublic(
|
||||
artifactId,
|
||||
repositoryOwner,
|
||||
repositoryName,
|
||||
token,
|
||||
downloadOptions
|
||||
)
|
||||
}
|
||||
|
||||
return downloadArtifactInternal(artifactId, options)
|
||||
} catch (error) {
|
||||
warning(
|
||||
`Artifact download failed with error: ${error}.
|
||||
|
@ -177,12 +171,7 @@ If the error persists, please check whether Actions and API requests are operati
|
|||
/**
|
||||
* List Artifacts
|
||||
*/
|
||||
async listArtifacts(
|
||||
workflowRunId: number,
|
||||
repositoryOwner: string,
|
||||
repositoryName: string,
|
||||
token: string
|
||||
): Promise<ListArtifactsResponse> {
|
||||
async listArtifacts(options?: FindOptions): Promise<ListArtifactsResponse> {
|
||||
if (isGhes()) {
|
||||
warning(
|
||||
`@actions/artifact v2.0.0+ and download-artifact@v4+ are not currently supported on GHES.`
|
||||
|
@ -193,12 +182,20 @@ If the error persists, please check whether Actions and API requests are operati
|
|||
}
|
||||
|
||||
try {
|
||||
return listArtifacts(
|
||||
workflowRunId,
|
||||
repositoryOwner,
|
||||
repositoryName,
|
||||
token
|
||||
)
|
||||
if (options?.findBy) {
|
||||
const {
|
||||
findBy: {workflowRunId, repositoryOwner, repositoryName, token}
|
||||
} = options
|
||||
|
||||
return listArtifactsPublic(
|
||||
workflowRunId,
|
||||
repositoryOwner,
|
||||
repositoryName,
|
||||
token
|
||||
)
|
||||
}
|
||||
|
||||
return listArtifactsInternal()
|
||||
} catch (error: unknown) {
|
||||
warning(
|
||||
`Listing Artifacts failed with error: ${error}.
|
||||
|
@ -219,10 +216,7 @@ If the error persists, please check whether Actions and API requests are operati
|
|||
*/
|
||||
async getArtifact(
|
||||
artifactName: string,
|
||||
workflowRunId: number,
|
||||
repositoryOwner: string,
|
||||
repositoryName: string,
|
||||
token: string
|
||||
options?: FindOptions
|
||||
): Promise<GetArtifactResponse> {
|
||||
if (isGhes()) {
|
||||
warning(
|
||||
|
@ -234,13 +228,21 @@ If the error persists, please check whether Actions and API requests are operati
|
|||
}
|
||||
|
||||
try {
|
||||
return getArtifact(
|
||||
artifactName,
|
||||
workflowRunId,
|
||||
repositoryOwner,
|
||||
repositoryName,
|
||||
token
|
||||
)
|
||||
if (options?.findBy) {
|
||||
const {
|
||||
findBy: {workflowRunId, repositoryOwner, repositoryName, token}
|
||||
} = options
|
||||
|
||||
return getArtifactPublic(
|
||||
artifactName,
|
||||
workflowRunId,
|
||||
repositoryOwner,
|
||||
repositoryName,
|
||||
token
|
||||
)
|
||||
}
|
||||
|
||||
return getArtifactInternal(artifactName)
|
||||
} catch (error: unknown) {
|
||||
warning(
|
||||
`Fetching Artifact failed with error: ${error}.
|
||||
|
|
|
@ -9,6 +9,13 @@ import {
|
|||
} from '../shared/interfaces'
|
||||
import {getUserAgentString} from '../shared/user-agent'
|
||||
import {getGitHubWorkspaceDir} from '../shared/config'
|
||||
import {internalArtifactTwirpClient} from '../shared/artifact-twirp-client'
|
||||
import {
|
||||
GetSignedArtifactURLRequest,
|
||||
Int64Value,
|
||||
ListArtifactsRequest
|
||||
} from '../../generated'
|
||||
import {getBackendIdsFromToken} from '../shared/util'
|
||||
|
||||
const scrubQueryParameters = (url: string): string => {
|
||||
const parsed = new URL(url)
|
||||
|
@ -42,23 +49,14 @@ async function streamExtract(url: string, directory: string): Promise<void> {
|
|||
return response.message.pipe(unzipper.Extract({path: directory})).promise()
|
||||
}
|
||||
|
||||
export async function downloadArtifact(
|
||||
export async function downloadArtifactPublic(
|
||||
artifactId: number,
|
||||
repositoryOwner: string,
|
||||
repositoryName: string,
|
||||
token: string,
|
||||
options?: DownloadArtifactOptions
|
||||
): Promise<DownloadArtifactResponse> {
|
||||
const downloadPath = options?.path || getGitHubWorkspaceDir()
|
||||
|
||||
if (!(await exists(downloadPath))) {
|
||||
core.debug(
|
||||
`Artifact destination folder does not exist, creating: ${downloadPath}`
|
||||
)
|
||||
await fs.mkdir(downloadPath, {recursive: true})
|
||||
} else {
|
||||
core.debug(`Artifact destination folder already exists: ${downloadPath}`)
|
||||
}
|
||||
const downloadPath = await resolveOrCreateDirectory(options?.path)
|
||||
|
||||
const api = github.getOctokit(token)
|
||||
|
||||
|
@ -99,3 +97,71 @@ export async function downloadArtifact(
|
|||
|
||||
return {success: true, downloadPath}
|
||||
}
|
||||
|
||||
export async function downloadArtifactInternal(
|
||||
artifactId: number,
|
||||
options?: DownloadArtifactOptions
|
||||
): Promise<DownloadArtifactResponse> {
|
||||
const downloadPath = await resolveOrCreateDirectory(options?.path)
|
||||
|
||||
const artifactClient = internalArtifactTwirpClient()
|
||||
|
||||
const {workflowRunBackendId, workflowJobRunBackendId} =
|
||||
getBackendIdsFromToken()
|
||||
|
||||
const listReq: ListArtifactsRequest = {
|
||||
workflowRunBackendId,
|
||||
workflowJobRunBackendId,
|
||||
idFilter: Int64Value.create({value: artifactId.toString()})
|
||||
}
|
||||
|
||||
const {artifacts} = await artifactClient.ListArtifacts(listReq)
|
||||
|
||||
if (artifacts.length === 0) {
|
||||
core.warning(
|
||||
`No artifacts found for ID: ${artifactId}\nAre you trying to download from a different run? Try specifying a github-token with \`actions:read\` scope.`
|
||||
)
|
||||
return {success: false}
|
||||
}
|
||||
|
||||
if (artifacts.length > 1) {
|
||||
core.warning('Multiple artifacts found, defaulting to first.')
|
||||
}
|
||||
|
||||
const signedReq: GetSignedArtifactURLRequest = {
|
||||
workflowRunBackendId: artifacts[0].workflowRunBackendId,
|
||||
workflowJobRunBackendId: artifacts[0].workflowJobRunBackendId,
|
||||
name: artifacts[0].name
|
||||
}
|
||||
|
||||
const {signedUrl} = await artifactClient.GetSignedArtifactURL(signedReq)
|
||||
|
||||
core.info(
|
||||
`Redirecting to blob download url: ${scrubQueryParameters(signedUrl)}`
|
||||
)
|
||||
|
||||
try {
|
||||
core.info(`Starting download of artifact to: ${downloadPath}`)
|
||||
await streamExtract(signedUrl, downloadPath)
|
||||
core.info(`Artifact download completed successfully.`)
|
||||
} catch (error) {
|
||||
throw new Error(`Unable to download and extract artifact: ${error.message}`)
|
||||
}
|
||||
|
||||
return {success: true, downloadPath}
|
||||
}
|
||||
|
||||
async function resolveOrCreateDirectory(
|
||||
downloadPath = getGitHubWorkspaceDir()
|
||||
): Promise<string> {
|
||||
if (!(await exists(downloadPath))) {
|
||||
core.debug(
|
||||
`Artifact destination folder does not exist, creating: ${downloadPath}`
|
||||
)
|
||||
await fs.mkdir(downloadPath, {recursive: true})
|
||||
} else {
|
||||
core.debug(`Artifact destination folder already exists: ${downloadPath}`)
|
||||
}
|
||||
|
||||
return downloadPath
|
||||
}
|
||||
|
|
|
@ -1,14 +1,17 @@
|
|||
import {GetArtifactResponse} from '../shared/interfaces'
|
||||
import {getOctokit} from '@actions/github'
|
||||
import {getUserAgentString} from '../shared/user-agent'
|
||||
import {defaults as defaultGitHubOptions} from '@actions/github/lib/utils'
|
||||
import {getRetryOptions} from './retry-options'
|
||||
import {requestLog} from '@octokit/plugin-request-log'
|
||||
import {retry} from '@octokit/plugin-retry'
|
||||
import * as core from '@actions/core'
|
||||
import {OctokitOptions} from '@octokit/core/dist-types/types'
|
||||
import {defaults as defaultGitHubOptions} from '@actions/github/lib/utils'
|
||||
import {getRetryOptions} from './retry-options'
|
||||
import {requestLog} from '@octokit/plugin-request-log'
|
||||
import {GetArtifactResponse} from '../shared/interfaces'
|
||||
import {getBackendIdsFromToken} from '../shared/util'
|
||||
import {getUserAgentString} from '../shared/user-agent'
|
||||
import {internalArtifactTwirpClient} from '../shared/artifact-twirp-client'
|
||||
import {ListArtifactsRequest, StringValue} from '../../generated'
|
||||
|
||||
export async function getArtifact(
|
||||
export async function getArtifactPublic(
|
||||
artifactName: string,
|
||||
workflowRunId: number,
|
||||
repositoryOwner: string,
|
||||
|
@ -62,8 +65,54 @@ export async function getArtifact(
|
|||
artifact: {
|
||||
name: getArtifactResp.data.artifacts[0].name,
|
||||
id: getArtifactResp.data.artifacts[0].id,
|
||||
url: getArtifactResp.data.artifacts[0].url,
|
||||
size: getArtifactResp.data.artifacts[0].size_in_bytes
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function getArtifactInternal(
|
||||
artifactName: string
|
||||
): Promise<GetArtifactResponse> {
|
||||
const artifactClient = internalArtifactTwirpClient()
|
||||
|
||||
const {workflowRunBackendId, workflowJobRunBackendId} =
|
||||
getBackendIdsFromToken()
|
||||
|
||||
const req: ListArtifactsRequest = {
|
||||
workflowRunBackendId,
|
||||
workflowJobRunBackendId,
|
||||
nameFilter: StringValue.create({value: artifactName})
|
||||
}
|
||||
|
||||
const res = await artifactClient.ListArtifacts(req)
|
||||
|
||||
if (res.artifacts.length === 0) {
|
||||
core.warning('no artifacts found')
|
||||
return {
|
||||
success: false
|
||||
}
|
||||
}
|
||||
|
||||
if (res.artifacts.length > 1) {
|
||||
core.warning(
|
||||
'more than one artifact found for a single name, returning first'
|
||||
)
|
||||
}
|
||||
|
||||
// In the case of reruns, we may have artifacts with the same name scoped under the same workflow run.
|
||||
// Let's prefer the artifact closest scoped to this run.
|
||||
// If it doesn't exist (e.g. partial rerun) we'll use the first match.
|
||||
const artifact =
|
||||
res.artifacts.find(
|
||||
artifact => artifact.workflowRunBackendId === workflowRunBackendId
|
||||
) || res.artifacts[0]
|
||||
|
||||
return {
|
||||
success: true,
|
||||
artifact: {
|
||||
name: artifact.name,
|
||||
id: Number(artifact.databaseId),
|
||||
size: Number(artifact.size)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -7,13 +7,16 @@ import {defaults as defaultGitHubOptions} from '@actions/github/lib/utils'
|
|||
import {requestLog} from '@octokit/plugin-request-log'
|
||||
import {retry} from '@octokit/plugin-retry'
|
||||
import {OctokitOptions} from '@octokit/core/dist-types/types'
|
||||
import {internalArtifactTwirpClient} from '../shared/artifact-twirp-client'
|
||||
import {getBackendIdsFromToken} from '../shared/util'
|
||||
import {ListArtifactsRequest} from 'src/generated'
|
||||
|
||||
// Limiting to 1000 for perf reasons
|
||||
const maximumArtifactCount = 1000
|
||||
const paginationCount = 100
|
||||
const maxNumberOfPages = maximumArtifactCount / paginationCount
|
||||
|
||||
export async function listArtifacts(
|
||||
export async function listArtifactsPublic(
|
||||
workflowRunId: number,
|
||||
repositoryOwner: string,
|
||||
repositoryName: string,
|
||||
|
@ -62,7 +65,6 @@ export async function listArtifacts(
|
|||
artifacts.push({
|
||||
name: artifact.name,
|
||||
id: artifact.id,
|
||||
url: artifact.url,
|
||||
size: artifact.size_in_bytes
|
||||
})
|
||||
}
|
||||
|
@ -89,13 +91,37 @@ export async function listArtifacts(
|
|||
artifacts.push({
|
||||
name: artifact.name,
|
||||
id: artifact.id,
|
||||
url: artifact.url,
|
||||
size: artifact.size_in_bytes
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
info(`Finished fetching artifact list`)
|
||||
info(`Found ${artifacts.length} artifact(s)`)
|
||||
|
||||
return {
|
||||
artifacts
|
||||
}
|
||||
}
|
||||
|
||||
export async function listArtifactsInternal(): Promise<ListArtifactsResponse> {
|
||||
const artifactClient = internalArtifactTwirpClient()
|
||||
|
||||
const {workflowRunBackendId, workflowJobRunBackendId} =
|
||||
getBackendIdsFromToken()
|
||||
|
||||
const req: ListArtifactsRequest = {
|
||||
workflowRunBackendId,
|
||||
workflowJobRunBackendId
|
||||
}
|
||||
|
||||
const res = await artifactClient.ListArtifacts(req)
|
||||
const artifacts = res.artifacts.map(artifact => ({
|
||||
name: artifact.name,
|
||||
id: Number(artifact.databaseId),
|
||||
size: Number(artifact.size)
|
||||
}))
|
||||
|
||||
info(`Found ${artifacts.length} artifact(s)`)
|
||||
|
||||
return {
|
||||
artifacts
|
||||
|
|
|
@ -3,6 +3,7 @@ import {BearerCredentialHandler} from '@actions/http-client/lib/auth'
|
|||
import {info, debug} from '@actions/core'
|
||||
import {ArtifactServiceClientJSON} from '../../generated'
|
||||
import {getResultsServiceUrl, getRuntimeToken} from './config'
|
||||
import {getUserAgentString} from './user-agent'
|
||||
|
||||
// The twirp http client must implement this interface
|
||||
interface Rpc {
|
||||
|
@ -53,7 +54,7 @@ class ArtifactHttpClient implements Rpc {
|
|||
data: object | Uint8Array
|
||||
): Promise<object | Uint8Array> {
|
||||
const url = new URL(`/twirp/${service}/${method}`, this.baseUrl).href
|
||||
debug(`Requesting: ${url}`)
|
||||
debug(`[Request] ${method} ${url}`)
|
||||
const headers = {
|
||||
'Content-Type': contentType
|
||||
}
|
||||
|
@ -79,6 +80,8 @@ class ArtifactHttpClient implements Rpc {
|
|||
try {
|
||||
const response = await operation()
|
||||
const statusCode = response.message.statusCode
|
||||
debug(`[Response] ${response.message.statusCode}`)
|
||||
debug(JSON.stringify(response.message.headers, null, 2))
|
||||
|
||||
if (this.isSuccessStatusCode(statusCode)) {
|
||||
return response
|
||||
|
@ -157,17 +160,16 @@ class ArtifactHttpClient implements Rpc {
|
|||
}
|
||||
}
|
||||
|
||||
export function createArtifactTwirpClient(
|
||||
type: 'upload' | 'download',
|
||||
maxAttempts?: number,
|
||||
baseRetryIntervalMilliseconds?: number,
|
||||
export function internalArtifactTwirpClient(options?: {
|
||||
maxAttempts?: number
|
||||
retryIntervalMs?: number
|
||||
retryMultiplier?: number
|
||||
): ArtifactServiceClientJSON {
|
||||
}): ArtifactServiceClientJSON {
|
||||
const client = new ArtifactHttpClient(
|
||||
`@actions/artifact-${type}`,
|
||||
maxAttempts,
|
||||
baseRetryIntervalMilliseconds,
|
||||
retryMultiplier
|
||||
getUserAgentString(),
|
||||
options?.maxAttempts,
|
||||
options?.retryIntervalMs,
|
||||
options?.retryMultiplier
|
||||
)
|
||||
return new ArtifactServiceClientJSON(client)
|
||||
}
|
||||
|
|
|
@ -120,13 +120,23 @@ export interface Artifact {
|
|||
*/
|
||||
id: number
|
||||
|
||||
/**
|
||||
* The URL of the artifact
|
||||
*/
|
||||
url: string
|
||||
|
||||
/**
|
||||
* The size of the artifact in bytes
|
||||
*/
|
||||
size: number
|
||||
}
|
||||
|
||||
// FindOptions are for fetching Artifact(s) out of the scope of the current run.
|
||||
// Must specify a PAT with actions:read scope for cross run/repo lookup otherwise these will be ignored.
|
||||
export interface FindOptions {
|
||||
findBy?: {
|
||||
// Token with actions:read permissions
|
||||
token: string
|
||||
// WorkflowRun of the artifact(s) to lookup
|
||||
workflowRunId: number
|
||||
// Repository owner
|
||||
repositoryOwner: string
|
||||
// Repository name
|
||||
repositoryName: string
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
import * as core from '@actions/core'
|
||||
import {getRuntimeToken} from './config'
|
||||
import jwt_decode from 'jwt-decode'
|
||||
|
||||
|
@ -11,7 +12,7 @@ interface ActionsToken {
|
|||
}
|
||||
|
||||
const InvalidJwtError = new Error(
|
||||
'Failed to get backend IDs: The provided JWT token is invalid'
|
||||
'Failed to get backend IDs: The provided JWT token is invalid and/or missing claims'
|
||||
)
|
||||
|
||||
// uses the JWT token claims to get the
|
||||
|
@ -41,24 +42,29 @@ export function getBackendIdsFromToken(): BackendIds {
|
|||
|
||||
for (const scopes of scpParts) {
|
||||
const scopeParts = scopes.split(':')
|
||||
if (scopeParts?.[0] !== 'Actions.Results') {
|
||||
// not the Actions.Results scope
|
||||
continue
|
||||
}
|
||||
|
||||
/*
|
||||
* example scopeParts:
|
||||
* ["Actions.Results", "ce7f54c7-61c7-4aae-887f-30da475f5f1a", "ca395085-040a-526b-2ce8-bdc85f692774"]
|
||||
*/
|
||||
if (scopeParts.length !== 3) {
|
||||
// not the Actions.Results scope
|
||||
continue
|
||||
// missing expected number of claims
|
||||
throw InvalidJwtError
|
||||
}
|
||||
|
||||
if (scopeParts[0] !== 'Actions.Results') {
|
||||
// not the Actions.Results scope
|
||||
continue
|
||||
}
|
||||
|
||||
return {
|
||||
const ids = {
|
||||
workflowRunBackendId: scopeParts[1],
|
||||
workflowJobRunBackendId: scopeParts[2]
|
||||
}
|
||||
|
||||
core.debug(`Workflow Run Backend ID: ${ids.workflowRunBackendId}`)
|
||||
core.debug(`Workflow Job Run Backend ID: ${ids.workflowJobRunBackendId}`)
|
||||
|
||||
return ids
|
||||
}
|
||||
|
||||
throw InvalidJwtError
|
||||
|
|
|
@ -2,7 +2,7 @@ import * as core from '@actions/core'
|
|||
import {UploadOptions, UploadResponse} from '../shared/interfaces'
|
||||
import {getExpiration} from './retention'
|
||||
import {validateArtifactName} from './path-and-artifact-name-validation'
|
||||
import {createArtifactTwirpClient} from '../shared/artifact-twirp-client'
|
||||
import {internalArtifactTwirpClient} from '../shared/artifact-twirp-client'
|
||||
import {
|
||||
UploadZipSpecification,
|
||||
getUploadZipSpecification,
|
||||
|
@ -44,21 +44,9 @@ export async function uploadArtifact(
|
|||
|
||||
// get the IDs needed for the artifact creation
|
||||
const backendIds = getBackendIdsFromToken()
|
||||
if (!backendIds.workflowRunBackendId || !backendIds.workflowJobRunBackendId) {
|
||||
core.warning(
|
||||
`Failed to get the necessary backend ids which are required to create the artifact`
|
||||
)
|
||||
return {
|
||||
success: false
|
||||
}
|
||||
}
|
||||
core.debug(`Workflow Run Backend ID: ${backendIds.workflowRunBackendId}`)
|
||||
core.debug(
|
||||
`Workflow Job Run Backend ID: ${backendIds.workflowJobRunBackendId}`
|
||||
)
|
||||
|
||||
// create the artifact client
|
||||
const artifactClient = createArtifactTwirpClient('upload')
|
||||
const artifactClient = internalArtifactTwirpClient()
|
||||
|
||||
// create the artifact
|
||||
const createArtifactReq: CreateArtifactRequest = {
|
||||
|
|
Loading…
Reference in New Issue