mirror of https://github.com/actions/toolkit
Merge branch 'actions:main' into main
commit
f131e9b77e
|
@ -43,7 +43,7 @@ Note that before a PR will be accepted, you must ensure:
|
|||
1. In a new branch, create a new Lerna package:
|
||||
|
||||
```console
|
||||
$ npm run create-package new-package
|
||||
$ npm run new-package [name]
|
||||
```
|
||||
|
||||
This will ask you some questions about the new package. Start with `0.0.0` as the first version (look generally at some of the other packages for how the package.json is structured).
|
||||
|
|
|
@ -1,5 +1,3 @@
|
|||
# Temporarily disabled while v2.0.0 of @actions/artifact is under development
|
||||
|
||||
name: artifact-unit-tests
|
||||
on:
|
||||
push:
|
||||
|
@ -12,8 +10,8 @@ on:
|
|||
- '**.md'
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: Build
|
||||
upload:
|
||||
name: Upload
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
|
@ -24,10 +22,10 @@ jobs:
|
|||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set Node.js 20.x
|
||||
uses: actions/setup-node@v3
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20.x
|
||||
|
||||
|
@ -42,51 +40,47 @@ jobs:
|
|||
npm run tsc
|
||||
working-directory: packages/artifact
|
||||
|
||||
- name: Set artifact file contents
|
||||
shell: bash
|
||||
run: |
|
||||
echo "file1=hello from file 1" >> $GITHUB_ENV
|
||||
echo "file2=hello from file 2" >> $GITHUB_ENV
|
||||
|
||||
- name: Create files that will be uploaded
|
||||
run: |
|
||||
mkdir artifact-path
|
||||
echo '${{ env.file1 }}' > artifact-path/first.txt
|
||||
echo '${{ env.file2 }}' > artifact-path/second.txt
|
||||
echo -n 'hello from file 1' > artifact-path/first.txt
|
||||
echo -n 'hello from file 2' > artifact-path/second.txt
|
||||
|
||||
- name: Upload Artifacts using actions/github-script@v6
|
||||
uses: actions/github-script@v6
|
||||
- name: Upload Artifacts
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
const artifact = require('./packages/artifact/lib/artifact')
|
||||
const {default: artifact} = require('./packages/artifact/lib/artifact')
|
||||
|
||||
const artifactName = 'my-artifact-${{ matrix.runs-on }}'
|
||||
console.log('artifactName: ' + artifactName)
|
||||
|
||||
const fileContents = ['artifact-path/first.txt','artifact-path/second.txt']
|
||||
|
||||
const uploadResult = await artifact.create().uploadArtifact(artifactName, fileContents, './')
|
||||
const uploadResult = await artifact.uploadArtifact(artifactName, fileContents, './')
|
||||
console.log(uploadResult)
|
||||
|
||||
const success = uploadResult.success
|
||||
const size = uploadResult.size
|
||||
const id = uploadResult.id
|
||||
|
||||
if (!success) {
|
||||
throw new Error('Failed to upload artifact')
|
||||
} else {
|
||||
console.log(`Successfully uploaded artifact ${id}`)
|
||||
}
|
||||
console.log(`Successfully uploaded artifact ${id}`)
|
||||
|
||||
try {
|
||||
await artifact.uploadArtifact(artifactName, fileContents, './')
|
||||
throw new Error('should have failed second upload')
|
||||
} catch (err) {
|
||||
console.log('Successfully blocked second artifact upload')
|
||||
}
|
||||
verify:
|
||||
name: Verify and Delete
|
||||
runs-on: ubuntu-latest
|
||||
needs: [build]
|
||||
needs: [upload]
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set Node.js 20.x
|
||||
uses: actions/setup-node@v3
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20.x
|
||||
|
||||
|
@ -101,35 +95,100 @@ jobs:
|
|||
npm run tsc
|
||||
working-directory: packages/artifact
|
||||
|
||||
- name: List artifacts using actions/github-script@v6
|
||||
uses: actions/github-script@v6
|
||||
- name: List and Download Artifacts
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
const artifact = require('./packages/artifact/lib/artifact')
|
||||
const {default: artifactClient} = require('./packages/artifact/lib/artifact')
|
||||
|
||||
const workflowRunId = process.env.GITHUB_RUN_ID
|
||||
const repository = process.env.GITHUB_REPOSITORY
|
||||
const repositoryOwner = repository.split('/')[0]
|
||||
const repositoryName = repository.split('/')[1]
|
||||
const {readFile} = require('fs/promises')
|
||||
const path = require('path')
|
||||
|
||||
const listResult = await artifact.create().listArtifacts(workflowRunId, repositoryOwner, repositoryName, '${{ secrets.GITHUB_TOKEN }}')
|
||||
const findBy = {
|
||||
repositoryOwner: process.env.GITHUB_REPOSITORY.split('/')[0],
|
||||
repositoryName: process.env.GITHUB_REPOSITORY.split('/')[1],
|
||||
token: '${{ secrets.GITHUB_TOKEN }}',
|
||||
workflowRunId: process.env.GITHUB_RUN_ID
|
||||
}
|
||||
|
||||
const listResult = await artifactClient.listArtifacts({latest: true, findBy})
|
||||
console.log(listResult)
|
||||
|
||||
const artifacts = listResult.artifacts
|
||||
const expected = [
|
||||
'my-artifact-ubuntu-latest',
|
||||
'my-artifact-windows-latest',
|
||||
'my-artifact-macos-latest'
|
||||
]
|
||||
|
||||
if (artifacts.length !== 3) {
|
||||
throw new Error('Expected 3 artifacts but only found ' + artifacts.length + ' artifacts')
|
||||
}
|
||||
const foundArtifacts = artifacts.filter(artifact =>
|
||||
expected.includes(artifact.name)
|
||||
)
|
||||
|
||||
const artifactNames = artifacts.map(artifact => artifact.name)
|
||||
if (!artifactNames.includes('my-artifact-ubuntu-latest')){
|
||||
throw new Error("Expected artifact list to contain an artifact named my-artifact-ubuntu-latest but it's missing")
|
||||
}
|
||||
if (!artifactNames.includes('my-artifact-windows-latest')){
|
||||
throw new Error("Expected artifact list to contain an artifact named my-artifact-windows-latest but it's missing")
|
||||
}
|
||||
if (!artifactNames.includes('my-artifact-macos-latest')){
|
||||
throw new Error("Expected artifact list to contain an artifact named my-artifact-macos-latest but it's missing")
|
||||
if (foundArtifacts.length !== 3) {
|
||||
console.log('Unexpected length of found artifacts', foundArtifacts)
|
||||
throw new Error(
|
||||
`Expected 3 artifacts but found ${foundArtifacts.length} artifacts.`
|
||||
)
|
||||
}
|
||||
|
||||
console.log('Successfully listed artifacts that were uploaded')
|
||||
|
||||
const files = [
|
||||
{name: 'artifact-path/first.txt', content: 'hello from file 1'},
|
||||
{name: 'artifact-path/second.txt', content: 'hello from file 2'}
|
||||
]
|
||||
|
||||
for (const artifact of foundArtifacts) {
|
||||
const {downloadPath} = await artifactClient.downloadArtifact(artifact.id, {
|
||||
path: artifact.name,
|
||||
findBy
|
||||
})
|
||||
|
||||
console.log('Downloaded artifact to:', downloadPath)
|
||||
|
||||
for (const file of files) {
|
||||
const filepath = path.join(
|
||||
process.env.GITHUB_WORKSPACE,
|
||||
downloadPath,
|
||||
file.name
|
||||
)
|
||||
|
||||
console.log('Checking file:', filepath)
|
||||
|
||||
const content = await readFile(filepath, 'utf8')
|
||||
if (content.trim() !== file.content.trim()) {
|
||||
throw new Error(
|
||||
`Expected file '${file.name}' to contain '${file.content}' but found '${content}'`
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
- name: Delete Artifacts
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
const {default: artifactClient} = require('./packages/artifact/lib/artifact')
|
||||
|
||||
const artifactsToDelete = [
|
||||
'my-artifact-ubuntu-latest',
|
||||
'my-artifact-windows-latest',
|
||||
'my-artifact-macos-latest'
|
||||
]
|
||||
|
||||
for (const artifactName of artifactsToDelete) {
|
||||
const {id} = await artifactClient.deleteArtifact(artifactName)
|
||||
}
|
||||
|
||||
const {artifacts} = await artifactClient.listArtifacts({latest: true})
|
||||
const foundArtifacts = artifacts.filter(artifact =>
|
||||
artifactsToDelete.includes(artifact.name)
|
||||
)
|
||||
|
||||
if (foundArtifacts.length !== 0) {
|
||||
console.log('Unexpected length of found artifacts:', foundArtifacts)
|
||||
throw new Error(
|
||||
`Expected 0 artifacts but found ${foundArtifacts.length} artifacts.`
|
||||
)
|
||||
}
|
||||
|
||||
|
|
|
@ -18,10 +18,10 @@ jobs:
|
|||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set Node.js 20.x
|
||||
uses: actions/setup-node@v3
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20.x
|
||||
|
||||
|
|
|
@ -22,10 +22,10 @@ jobs:
|
|||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set Node.js 20.x
|
||||
uses: actions/setup-node@v3
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20.x
|
||||
|
||||
|
|
|
@ -20,7 +20,7 @@ jobs:
|
|||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
|
|
|
@ -5,7 +5,7 @@ on:
|
|||
inputs:
|
||||
package:
|
||||
required: true
|
||||
description: 'core, artifact, cache, exec, github, glob, http-client, io, tool-cache'
|
||||
description: 'core, artifact, cache, exec, github, glob, http-client, io, tool-cache, attest'
|
||||
|
||||
jobs:
|
||||
test:
|
||||
|
@ -13,13 +13,13 @@ jobs:
|
|||
|
||||
steps:
|
||||
- name: setup repo
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: verify package exists
|
||||
run: ls packages/${{ github.event.inputs.package }}
|
||||
|
||||
- name: Set Node.js 20.x
|
||||
uses: actions/setup-node@v3
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20.x
|
||||
|
||||
|
@ -40,7 +40,7 @@ jobs:
|
|||
working-directory: packages/${{ github.event.inputs.package }}
|
||||
|
||||
- name: upload artifact
|
||||
uses: actions/upload-artifact@v3
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ github.event.inputs.package }}
|
||||
path: packages/${{ github.event.inputs.package }}/*.tgz
|
||||
|
@ -49,10 +49,13 @@ jobs:
|
|||
runs-on: macos-latest
|
||||
needs: test
|
||||
environment: npm-publish
|
||||
permissions:
|
||||
contents: read
|
||||
id-token: write
|
||||
steps:
|
||||
|
||||
- name: download artifact
|
||||
uses: actions/download-artifact@v3
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: ${{ github.event.inputs.package }}
|
||||
|
||||
|
@ -62,7 +65,7 @@ jobs:
|
|||
NPM_TOKEN: ${{ secrets.TOKEN }}
|
||||
|
||||
- name: publish
|
||||
run: npm publish *.tgz
|
||||
run: npm publish --provenance *.tgz
|
||||
|
||||
- name: notify slack on failure
|
||||
if: failure()
|
||||
|
|
|
@ -23,10 +23,10 @@ jobs:
|
|||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set Node.js 20.x
|
||||
uses: actions/setup-node@v3
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20.x
|
||||
|
||||
|
|
|
@ -9,7 +9,7 @@ jobs:
|
|||
if: ${{ github.repository_owner == 'actions' }}
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
- name: Update Octokit
|
||||
working-directory: packages/github
|
||||
run: |
|
||||
|
|
|
@ -2,3 +2,4 @@
|
|||
|
||||
/packages/artifact/ @actions/artifacts-actions
|
||||
/packages/cache/ @actions/actions-cache
|
||||
/packages/attest/ @actions/package-security
|
||||
|
|
|
@ -102,6 +102,15 @@ $ npm install @actions/cache
|
|||
```
|
||||
<br/>
|
||||
|
||||
:lock_with_ink_pen: [@actions/attest](packages/attest)
|
||||
|
||||
Provides functions to write attestations for workflow artifacts. Read more [here](packages/attest)
|
||||
|
||||
```bash
|
||||
$ npm install @actions/attest
|
||||
```
|
||||
<br/>
|
||||
|
||||
## Creating an Action with the Toolkit
|
||||
|
||||
:question: [Choosing an action type](docs/action-types.md)
|
||||
|
|
|
@ -32,7 +32,7 @@ jobs:
|
|||
os: [ubuntu-16.04, windows-2019]
|
||||
runs-on: ${{matrix.os}}
|
||||
actions:
|
||||
- uses: actions/setup-node@v3
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
version: ${{matrix.node}}
|
||||
- run: |
|
||||
|
|
|
@ -18,7 +18,7 @@ e.g. To use https://github.com/actions/setup-node, users will author:
|
|||
|
||||
```yaml
|
||||
steps:
|
||||
using: actions/setup-node@v3
|
||||
using: actions/setup-node@v4
|
||||
```
|
||||
|
||||
# Define Metadata
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -13,7 +13,7 @@
|
|||
"lint": "eslint packages/**/*.ts",
|
||||
"lint-fix": "eslint packages/**/*.ts --fix",
|
||||
"new-package": "scripts/create-package",
|
||||
"test": "jest --testTimeout 10000"
|
||||
"test": "jest --testTimeout 70000"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/jest": "^29.5.4",
|
||||
|
@ -27,7 +27,7 @@
|
|||
"eslint-plugin-prettier": "^5.0.0",
|
||||
"flow-bin": "^0.115.0",
|
||||
"jest": "^29.6.4",
|
||||
"lerna": "^7.1.4",
|
||||
"lerna": "^6.4.1",
|
||||
"nx": "16.6.0",
|
||||
"prettier": "^3.0.0",
|
||||
"ts-jest": "^29.1.1",
|
||||
|
|
|
@ -1,30 +1,43 @@
|
|||
# Contributions
|
||||
|
||||
This package is used internally by the v2+ versions of [upload-artifact](https://github.com/actions/upload-artifact) and [download-artifact](https://github.com/actions/download-artifact). This package can also be used by other actions to interact with artifacts. Any changes or updates to this package will propagate updates to these actions so it is important that major changes or updates get properly tested.
|
||||
This package is used internally by the v4 versions of [upload-artifact](https://github.com/actions/upload-artifact) and [download-artifact](https://github.com/actions/download-artifact). This package can also be used by other actions to interact with artifacts. Any changes or updates to this package will propagate updates to these actions so it is important that major changes or updates get properly tested.
|
||||
|
||||
Any issues or feature requests that are related to the artifact actions should be filled in the appropriate repo.
|
||||
|
||||
A limited range of unit tests run as part of each PR when making changes to the artifact packages. For small contributions and fixes, they should be sufficient.
|
||||
|
||||
If making large changes, there are a few scenarios that should be tested.
|
||||
If making large changes, there are a few scenarios that should be tested:
|
||||
|
||||
- Uploading very large artifacts (large artifacts get compressed using gzip so compression/decompression must be tested)
|
||||
- Uploading artifacts with lots of small files (each file is uploaded with its own HTTP call, timeouts and non-success HTTP responses can be expected so they must be properly handled)
|
||||
- Uploading very large artifacts
|
||||
- Uploading artifacts with lots of small files
|
||||
- Uploading artifacts using a self-hosted runner (uploads and downloads behave differently due to extra latency)
|
||||
- Downloading a single artifact (large and small, if lots of small files are part of an artifact, timeouts and non-success HTTP responses can be expected)
|
||||
- Downloading all artifacts at once
|
||||
|
||||
Large architectural changes can impact upload/download performance so it is important to separately run extra tests. We request that any large contributions/changes have extra detailed testing so we can verify performance and possible regressions.
|
||||
|
||||
It is not possible to run end-to-end tests for artifacts as part of a PR in this repo because certain env variables such as `ACTIONS_RUNTIME_URL` are only available from the context of an action as opposed to a shell script. These env variables are needed in order to make the necessary API calls.
|
||||
Tests will run for every push/pull_request [via Actions](https://github.com/actions/toolkit/blob/main/.github/workflows/artifact-tests.yml).
|
||||
|
||||
# Testing
|
||||
|
||||
Any easy way to test changes is to fork the artifact actions and to use `npm link` to test your changes.
|
||||
## Package tests
|
||||
|
||||
1. Fork the [upload-artifact](https://github.com/actions/upload-artifact) and [download-artifact](https://github.com/actions/download-artifact) repos
|
||||
2. Clone the forks locally
|
||||
3. With your local changes to the toolkit repo, type `npm link` after ensuring there are no errors when running `tsc`
|
||||
4. In the locally cloned fork, type `npm link @actions/artifact`
|
||||
4. Create a new release for your local fork using `tsc` and `npm run release` (this will create a new `dist/index.js` file using `@vercel/ncc`)
|
||||
5. Commit and push your local changes, you will then be able to test your changes with your forked action
|
||||
To run unit tests for the `@actions/artifact` package:
|
||||
|
||||
1. Clone `actions/toolkit` locally
|
||||
2. Install dependencies: `npm bootstrap`
|
||||
3. Change working directory to `packages/artifact`
|
||||
4. Run jest tests: `npm run test`
|
||||
|
||||
## Within upload-artifact or download-artifact actions
|
||||
|
||||
Any easy way to test changes for the official upload/download actions is to fork them, compile changes and run them.
|
||||
|
||||
1. For your local `actions/toolkit` changes:
|
||||
1. Change directory to `packages/artifact`
|
||||
2. Compile the changes: `npm run tsc`
|
||||
3. Symlink your package change: `npm link`
|
||||
2. Fork and clone either [upload-artifact](https://github.com/actions/upload-artifact) and [download-artifact](https://github.com/actions/download-artifact)
|
||||
1. In the locally cloned fork, link to your local toolkit changes: `npm link @actions/artifact`
|
||||
2. Then, compile your changes with: `npm run release`. The local `dist/index.js` should be updated with your changes.
|
||||
3. Commit and push to your fork, you can then test with a `uses:` in your workflow pointed at your fork.
|
||||
|
|
|
@ -1,13 +1,192 @@
|
|||
# `@actions/artifact`
|
||||
|
||||
## Usage
|
||||
Interact programmatically with [Actions Artifacts](https://docs.github.com/en/actions/using-workflows/storing-workflow-data-as-artifacts).
|
||||
|
||||
You can use this package to interact with the Actions artifacts.
|
||||
This is the core library that powers the [`@actions/upload-artifact`](https://github.com/actions/upload-artifact) and [`@actions/download-artifact`](https://github.com/actions/download-artifact) actions.
|
||||
|
||||
This most recently published version of this package (`1.1.1`) can be found [here](https://github.com/actions/toolkit/tree/@actions/artifact@1.1.1/packages/artifact)
|
||||
|
||||
## 🚧 Under construction 🚧
|
||||
- [`@actions/artifact`](#actionsartifact)
|
||||
- [v2 - What's New](#v2---whats-new)
|
||||
- [Improvements](#improvements)
|
||||
- [Breaking changes](#breaking-changes)
|
||||
- [Quick Start](#quick-start)
|
||||
- [Examples](#examples)
|
||||
- [Upload and Download](#upload-and-download)
|
||||
- [Delete an Artifact](#delete-an-artifact)
|
||||
- [Downloading from other workflow runs or repos](#downloading-from-other-workflow-runs-or-repos)
|
||||
- [Speeding up large uploads](#speeding-up-large-uploads)
|
||||
- [Additional Resources](#additional-resources)
|
||||
|
||||
This package is currently undergoing a major overhaul in preparation for `v4` versions of `upload-artifact` and `download-artifact` (these Actions will use a new `2.0.0` version of `@actions/artifact` that will soon be released). The upcoming version of `@actions/artifact` will take advantage of a major re-architecture with entirely new APIs.
|
||||
## v2 - What's New
|
||||
|
||||
The upcoming `2.0.0` package and `v4` artifact Actions aim to solve some of the major pain-points that have made artifact usage difficult up until now.
|
||||
> [!IMPORTANT]
|
||||
> @actions/artifact v2+, upload-artifact@v4+, and download-artifact@v4+ are not currently supported on GHES yet. The previous version of this package can be found at [this tag](https://github.com/actions/toolkit/tree/@actions/artifact@1.1.2/packages/artifact) and [on npm](https://www.npmjs.com/package/@actions/artifact/v/1.1.2).
|
||||
|
||||
The release of `@actions/artifact@v2` (including `upload-artifact@v4` and `download-artifact@v4`) are major changes to the backend architecture of Artifacts. They have numerous performance and behavioral improvements.
|
||||
|
||||
### Improvements
|
||||
|
||||
1. All upload and download operations are much quicker, up to 80% faster download times and 96% faster upload times in worst case scenarios.
|
||||
2. Once uploaded, an Artifact ID is returned and Artifacts are immediately available in the UI and [REST API](https://docs.github.com/en/rest/actions/artifacts). Previously, you would have to wait for the run to be completed before an ID was available or any APIs could be utilized.
|
||||
3. Artifacts can now be downloaded and deleted from the UI _before_ the entire workflow run finishes.
|
||||
4. The contents of an Artifact are uploaded together into an _immutable_ archive. They cannot be altered by subsequent jobs. Both of these factors help reduce the possibility of accidentally corrupting Artifact files. (Digest/integrity hash coming soon in the API!)
|
||||
5. This library (and `actions/download-artifact`) now support downloading Artifacts from _other_ repositories and runs if a `GITHUB_TOKEN` with sufficient `actions:read` permissions are provided.
|
||||
|
||||
### Breaking changes
|
||||
|
||||
1. Firewall rules required for self-hosted runners.
|
||||
|
||||
If you are using self-hosted runners behind a firewall, you must have flows open to [Actions endpoints](https://docs.github.com/en/actions/hosting-your-own-runners/managing-self-hosted-runners/about-self-hosted-runners#communication-between-self-hosted-runners-and-github). If you cannot use wildcard rules for your firewall, see the GitHub [meta endpoint](https://api.github.com/meta) for specific endpoints.
|
||||
|
||||
e.g.
|
||||
|
||||
```bash
|
||||
curl https://api.github.com/meta | jq .domains.actions
|
||||
```
|
||||
|
||||
2. Uploading to the same named Artifact multiple times.
|
||||
|
||||
Due to how Artifacts are created in this new version, it is no longer possible to upload to the same named Artifact multiple times. You must either split the uploads into multiple Artifacts with different names, or only upload once.
|
||||
|
||||
3. Limit of Artifacts for an individual job.
|
||||
|
||||
Each job in a workflow run now has a limit of 10 artifacts.
|
||||
|
||||
## Quick Start
|
||||
|
||||
Install the package:
|
||||
|
||||
```bash
|
||||
npm i @actions/artifact
|
||||
```
|
||||
|
||||
Import the module:
|
||||
|
||||
```js
|
||||
// ES6 module
|
||||
import {DefaultArtifactClient} from '@actions/artifact'
|
||||
|
||||
// CommonJS
|
||||
const {DefaultArtifactClient} = require('@actions/artifact')
|
||||
```
|
||||
|
||||
Then instantiate:
|
||||
|
||||
```js
|
||||
const artifact = new DefaultArtifactClient()
|
||||
```
|
||||
|
||||
ℹ️ For a comprehensive list of classes, interfaces, functions and more, see the [generated documentation](./docs/generated/README.md).
|
||||
|
||||
## Examples
|
||||
|
||||
### Upload and Download
|
||||
|
||||
The most basic scenario is uploading one or more files to an Artifact, then downloading that Artifact. Downloads are based on the Artifact ID, which can be obtained in the response of `uploadArtifact`, `getArtifact`, `listArtifacts` or via the [REST API](https://docs.github.com/en/rest/actions/artifacts).
|
||||
|
||||
```js
|
||||
const {id, size} = await artifact.uploadArtifact(
|
||||
// name of the artifact
|
||||
'my-artifact',
|
||||
// files to include (supports absolute and relative paths)
|
||||
['/absolute/path/file1.txt', './relative/file2.txt'],
|
||||
{
|
||||
// optional: how long to retain the artifact
|
||||
// if unspecified, defaults to repository/org retention settings (the limit of this value)
|
||||
retentionDays: 10
|
||||
}
|
||||
)
|
||||
|
||||
console.log(`Created artifact with id: ${id} (bytes: ${size}`)
|
||||
|
||||
const {downloadPath} = await artifact.downloadArtifact(id, {
|
||||
// optional: download destination path. otherwise defaults to $GITHUB_WORKSPACE
|
||||
path: '/tmp/dst/path',
|
||||
})
|
||||
|
||||
console.log(`Downloaded artifact ${id} to: ${downloadPath}`)
|
||||
```
|
||||
|
||||
### Delete an Artifact
|
||||
|
||||
To delete an artifact, all you need is the name.
|
||||
|
||||
```js
|
||||
const {id} = await artifact.deleteArtifact(
|
||||
// name of the artifact
|
||||
'my-artifact'
|
||||
)
|
||||
|
||||
console.log(`Deleted Artifact ID '${id}'`)
|
||||
```
|
||||
|
||||
It also supports options to delete from other repos/runs given a github token with `actions:write` permissions on the target repository is supplied.
|
||||
|
||||
```js
|
||||
const findBy = {
|
||||
// must have actions:write permission on target repository
|
||||
token: process.env['GITHUB_TOKEN'],
|
||||
workflowRunId: 123,
|
||||
repositoryOwner: 'actions',
|
||||
repositoryName: 'toolkit'
|
||||
}
|
||||
|
||||
|
||||
const {id} = await artifact.deleteArtifact(
|
||||
// name of the artifact
|
||||
'my-artifact',
|
||||
// options to find by other repo/owner
|
||||
{ findBy }
|
||||
)
|
||||
|
||||
console.log(`Deleted Artifact ID '${id}' from ${findBy.repositoryOwner}/ ${findBy.repositoryName}`)
|
||||
```
|
||||
|
||||
### Downloading from other workflow runs or repos
|
||||
|
||||
It may be useful to download Artifacts from other workflow runs, or even other repositories. By default, the permissions are scoped so they can only download Artifacts within the current workflow run. To elevate permissions for this scenario, you must specify `options.findBy` to `downloadArtifact`.
|
||||
|
||||
```ts
|
||||
const findBy = {
|
||||
// must have actions:read permission on target repository
|
||||
token: process.env['GITHUB_TOKEN'],
|
||||
workflowRunId: 123,
|
||||
repositoryOwner: 'actions',
|
||||
repositoryName: 'toolkit'
|
||||
}
|
||||
|
||||
await artifact.downloadArtifact(1337, {
|
||||
findBy
|
||||
})
|
||||
|
||||
// can also be used in other methods
|
||||
|
||||
await artifact.getArtifact('my-artifact', {
|
||||
findBy
|
||||
})
|
||||
|
||||
await artifact.listArtifacts({
|
||||
findBy
|
||||
})
|
||||
```
|
||||
|
||||
### Speeding up large uploads
|
||||
|
||||
If you have large files that need to be uploaded (or file types that don't compress well), you may benefit from changing the compression level of the Artifact archive. NOTE: This is a tradeoff between artifact upload time and stored data size.
|
||||
|
||||
```ts
|
||||
await artifact.uploadArtifact('my-massive-artifact', ['big_file.bin'], {
|
||||
// The level of compression for Zlib to be applied to the artifact archive.
|
||||
// - 0: No compression
|
||||
// - 1: Best speed
|
||||
// - 6: Default compression (same as GNU Gzip)
|
||||
// - 9: Best compression
|
||||
compressionLevel: 0
|
||||
})
|
||||
```
|
||||
|
||||
## Additional Resources
|
||||
|
||||
- [Releases](./RELEASES.md)
|
||||
- [Contribution Guide](./CONTRIBUTIONS.md)
|
||||
- [Frequently Asked Questions](./docs/faq.md)
|
||||
|
|
|
@ -1,66 +1,72 @@
|
|||
# @actions/artifact Releases
|
||||
|
||||
### 0.1.0
|
||||
### 2.1.7
|
||||
|
||||
- Initial release
|
||||
- Update unzip-stream dependency and reverted to using `unzip.Extract()`
|
||||
|
||||
### 0.2.0
|
||||
### 2.1.6
|
||||
|
||||
- Fixes to TCP connections not closing
|
||||
- GZip file compression to speed up downloads
|
||||
- Improved logging and output
|
||||
- Extra documentation
|
||||
- Will retry on invalid request responses.
|
||||
|
||||
### 0.3.0
|
||||
### 2.1.5
|
||||
|
||||
- Fixes to gzip decompression when downloading artifacts
|
||||
- Support handling 429 response codes
|
||||
- Improved download experience when dealing with empty files
|
||||
- Exponential backoff when retryable status codes are encountered
|
||||
- Clearer error message if storage quota has been reached
|
||||
- Improved logging and output during artifact download
|
||||
- Bumped `archiver` dependency to 7.0.1
|
||||
|
||||
### 0.3.1
|
||||
### 2.1.4
|
||||
|
||||
- Fix to ensure temporary gzip files get correctly deleted during artifact upload
|
||||
- Remove spaces as a forbidden character during upload
|
||||
- Adds info-level logging for zip extraction
|
||||
|
||||
### 0.3.2
|
||||
### 2.1.3
|
||||
|
||||
- Fix to ensure readstreams get correctly reset in the event of a retry
|
||||
- Fixes a bug in the extract logic updated in 2.1.2
|
||||
|
||||
### 0.3.3
|
||||
### 2.1.2
|
||||
|
||||
- Increase chunk size during upload from 4MB to 8MB
|
||||
- Improve user-agent strings during API calls to help internally diagnose issues
|
||||
- Updated the stream extract functionality to use `unzip.Parse()` instead of `unzip.Extract()` for greater control of unzipping artifacts
|
||||
|
||||
### 0.3.5
|
||||
### 2.1.1
|
||||
|
||||
- Retry in the event of a 413 response
|
||||
- Updated `isGhes` check to include `.ghe.com` and `.ghe.localhost` as accepted hosts
|
||||
|
||||
### 0.4.0
|
||||
### 2.1.0
|
||||
|
||||
- Add option to specify custom retentions on artifacts
|
||||
- Added `ArtifactClient#deleteArtifact` to delete artifacts by name [#1626](https://github.com/actions/toolkit/pull/1626)
|
||||
- Update error messaging to be more useful [#1628](https://github.com/actions/toolkit/pull/1628)
|
||||
|
||||
### 0.4.1
|
||||
### 2.0.1
|
||||
|
||||
- Update to latest @actions/core version
|
||||
- Patch to fix transient request timeouts https://github.com/actions/download-artifact/issues/249
|
||||
|
||||
### 0.4.2
|
||||
### 2.0.0
|
||||
|
||||
- Improved retry-ability when a partial artifact download is encountered
|
||||
- Major release. Supports new Artifact backend for improved speed, reliability and behavior.
|
||||
- Numerous API changes, [some breaking](./README.md#breaking-changes).
|
||||
|
||||
### 0.5.0
|
||||
- [Blog post with more info](https://github.blog/2024-02-12-get-started-with-v4-of-github-actions-artifacts/)
|
||||
|
||||
- Improved retry-ability for all http calls during artifact upload and download if an error is encountered
|
||||
### 1.1.1
|
||||
|
||||
### 0.5.1
|
||||
- Fixed a bug in Node16 where if an HTTP download finished too quickly (<1ms, e.g. when it's mocked) we attempt to delete a temp file that has not been created yet [#1278](https://github.com/actions/toolkit/pull/1278/commits/b9de68a590daf37c6747e38d3cb4f1dd2cfb791c)
|
||||
|
||||
- Bump @actions/http-client to version 1.0.11 to fix proxy related issues during artifact upload and download
|
||||
### 1.1.0
|
||||
|
||||
### 0.5.2
|
||||
- Add `x-actions-results-crc64` and `x-actions-results-md5` checksum headers on upload [#1063](https://github.com/actions/toolkit/pull/1063)
|
||||
|
||||
- Add HTTP 500 as a retryable status code for artifact upload and download.
|
||||
### 1.0.2
|
||||
|
||||
- Update to v2.0.1 of `@actions/http-client` [#1087](https://github.com/actions/toolkit/pull/1087)
|
||||
|
||||
### 1.0.1
|
||||
|
||||
- Update to v2.0.0 of `@actions/http-client`
|
||||
|
||||
### 1.0.0
|
||||
|
||||
- Update `lockfileVersion` to `v2` in `package-lock.json` [#1009](https://github.com/actions/toolkit/pull/1009)
|
||||
|
||||
### 0.6.1
|
||||
|
||||
- Fix for failing 0 byte file uploads on Windows [#962](https://github.com/actions/toolkit/pull/962)
|
||||
|
||||
### 0.6.0
|
||||
|
||||
|
@ -71,26 +77,64 @@
|
|||
- Faster upload speeds for certain types of large files by exempting gzip compression [#956](https://github.com/actions/toolkit/pull/956)
|
||||
- More detailed logging when dealing with chunked uploads [#957](https://github.com/actions/toolkit/pull/957)
|
||||
|
||||
### 0.6.1
|
||||
### 0.5.2
|
||||
|
||||
- Fix for failing 0 byte file uploads on Windows [#962](https://github.com/actions/toolkit/pull/962)
|
||||
- Add HTTP 500 as a retryable status code for artifact upload and download.
|
||||
|
||||
### 1.0.0
|
||||
### 0.5.1
|
||||
|
||||
- Update `lockfileVersion` to `v2` in `package-lock.json` [#1009](https://github.com/actions/toolkit/pull/1009)
|
||||
- Bump @actions/http-client to version 1.0.11 to fix proxy related issues during artifact upload and download
|
||||
|
||||
### 1.0.1
|
||||
### 0.5.0
|
||||
|
||||
- Update to v2.0.0 of `@actions/http-client`
|
||||
- Improved retry-ability for all http calls during artifact upload and download if an error is encountered
|
||||
|
||||
### 1.0.2
|
||||
### 0.4.2
|
||||
|
||||
- Update to v2.0.1 of `@actions/http-client` [#1087](https://github.com/actions/toolkit/pull/1087)
|
||||
- Improved retry-ability when a partial artifact download is encountered
|
||||
|
||||
### 1.1.0
|
||||
### 0.4.1
|
||||
|
||||
- Add `x-actions-results-crc64` and `x-actions-results-md5` checksum headers on upload [#1063](https://github.com/actions/toolkit/pull/1063)
|
||||
- Update to latest @actions/core version
|
||||
|
||||
### 1.1.1
|
||||
### 0.4.0
|
||||
|
||||
- Fixed a bug in Node16 where if an HTTP download finished too quickly (<1ms, e.g. when it's mocked) we attempt to delete a temp file that has not been created yet [#1278](https://github.com/actions/toolkit/pull/1278/commits/b9de68a590daf37c6747e38d3cb4f1dd2cfb791c)
|
||||
- Add option to specify custom retentions on artifacts
|
||||
-
|
||||
### 0.3.5
|
||||
|
||||
- Retry in the event of a 413 response
|
||||
|
||||
### 0.3.3
|
||||
|
||||
- Increase chunk size during upload from 4MB to 8MB
|
||||
- Improve user-agent strings during API calls to help internally diagnose issues
|
||||
|
||||
### 0.3.2
|
||||
|
||||
- Fix to ensure readstreams get correctly reset in the event of a retry
|
||||
|
||||
### 0.3.1
|
||||
|
||||
- Fix to ensure temporary gzip files get correctly deleted during artifact upload
|
||||
- Remove spaces as a forbidden character during upload
|
||||
|
||||
### 0.3.0
|
||||
|
||||
- Fixes to gzip decompression when downloading artifacts
|
||||
- Support handling 429 response codes
|
||||
- Improved download experience when dealing with empty files
|
||||
- Exponential backoff when retryable status codes are encountered
|
||||
- Clearer error message if storage quota has been reached
|
||||
- Improved logging and output during artifact download
|
||||
|
||||
### 0.2.0
|
||||
|
||||
- Fixes to TCP connections not closing
|
||||
- GZip file compression to speed up downloads
|
||||
- Improved logging and output
|
||||
- Extra documentation
|
||||
|
||||
### 0.1.0
|
||||
|
||||
- Initial release
|
||||
|
|
|
@ -2,18 +2,21 @@ import * as http from 'http'
|
|||
import * as net from 'net'
|
||||
import {HttpClient} from '@actions/http-client'
|
||||
import * as config from '../src/internal/shared/config'
|
||||
import {createArtifactTwirpClient} from '../src/internal/shared/artifact-twirp-client'
|
||||
import * as core from '@actions/core'
|
||||
import {internalArtifactTwirpClient} from '../src/internal/shared/artifact-twirp-client'
|
||||
import {noopLogs} from './common'
|
||||
import {NetworkError, UsageError} from '../src/internal/shared/errors'
|
||||
|
||||
jest.mock('@actions/http-client')
|
||||
|
||||
const clientOptions = {
|
||||
maxAttempts: 5,
|
||||
retryIntervalMs: 1,
|
||||
retryMultiplier: 1.5
|
||||
}
|
||||
|
||||
describe('artifact-http-client', () => {
|
||||
beforeAll(() => {
|
||||
// mock all output so that there is less noise when running tests
|
||||
jest.spyOn(console, 'log').mockImplementation(() => {})
|
||||
jest.spyOn(core, 'debug').mockImplementation(() => {})
|
||||
jest.spyOn(core, 'info').mockImplementation(() => {})
|
||||
jest.spyOn(core, 'warning').mockImplementation(() => {})
|
||||
noopLogs()
|
||||
jest
|
||||
.spyOn(config, 'getResultsServiceUrl')
|
||||
.mockReturnValue('http://localhost:8080')
|
||||
|
@ -25,7 +28,7 @@ describe('artifact-http-client', () => {
|
|||
})
|
||||
|
||||
it('should successfully create a client', () => {
|
||||
const client = createArtifactTwirpClient('upload')
|
||||
const client = internalArtifactTwirpClient()
|
||||
expect(client).toBeDefined()
|
||||
})
|
||||
|
||||
|
@ -50,7 +53,7 @@ describe('artifact-http-client', () => {
|
|||
}
|
||||
})
|
||||
|
||||
const client = createArtifactTwirpClient('upload')
|
||||
const client = internalArtifactTwirpClient()
|
||||
const artifact = await client.CreateArtifact({
|
||||
workflowRunBackendId: '1234',
|
||||
workflowJobRunBackendId: '5678',
|
||||
|
@ -98,12 +101,55 @@ describe('artifact-http-client', () => {
|
|||
}
|
||||
})
|
||||
|
||||
const client = createArtifactTwirpClient(
|
||||
'upload',
|
||||
5, // retry 5 times
|
||||
1, // wait 1 ms
|
||||
1.5 // backoff factor
|
||||
)
|
||||
const client = internalArtifactTwirpClient(clientOptions)
|
||||
const artifact = await client.CreateArtifact({
|
||||
workflowRunBackendId: '1234',
|
||||
workflowJobRunBackendId: '5678',
|
||||
name: 'artifact',
|
||||
version: 4
|
||||
})
|
||||
|
||||
expect(mockHttpClient).toHaveBeenCalledTimes(1)
|
||||
expect(artifact).toBeDefined()
|
||||
expect(artifact.ok).toBe(true)
|
||||
expect(artifact.signedUploadUrl).toBe('http://localhost:8080/upload')
|
||||
expect(mockPost).toHaveBeenCalledTimes(2)
|
||||
})
|
||||
|
||||
it('should retry if invalid body response', async () => {
|
||||
const mockPost = jest
|
||||
.fn(() => {
|
||||
const msgSucceeded = new http.IncomingMessage(new net.Socket())
|
||||
msgSucceeded.statusCode = 200
|
||||
return {
|
||||
message: msgSucceeded,
|
||||
readBody: async () => {
|
||||
return Promise.resolve(
|
||||
`{"ok": true, "signedUploadUrl": "http://localhost:8080/upload"}`
|
||||
)
|
||||
}
|
||||
}
|
||||
})
|
||||
.mockImplementationOnce(() => {
|
||||
const msgFailed = new http.IncomingMessage(new net.Socket())
|
||||
msgFailed.statusCode = 502
|
||||
msgFailed.statusMessage = 'Bad Gateway'
|
||||
return {
|
||||
message: msgFailed,
|
||||
readBody: async () => {
|
||||
return Promise.resolve('💥')
|
||||
}
|
||||
}
|
||||
})
|
||||
const mockHttpClient = (
|
||||
HttpClient as unknown as jest.Mock
|
||||
).mockImplementation(() => {
|
||||
return {
|
||||
post: mockPost
|
||||
}
|
||||
})
|
||||
|
||||
const client = internalArtifactTwirpClient(clientOptions)
|
||||
const artifact = await client.CreateArtifact({
|
||||
workflowRunBackendId: '1234',
|
||||
workflowJobRunBackendId: '5678',
|
||||
|
@ -138,12 +184,7 @@ describe('artifact-http-client', () => {
|
|||
post: mockPost
|
||||
}
|
||||
})
|
||||
const client = createArtifactTwirpClient(
|
||||
'upload',
|
||||
5, // retry 5 times
|
||||
1, // wait 1 ms
|
||||
1.5 // backoff factor
|
||||
)
|
||||
const client = internalArtifactTwirpClient(clientOptions)
|
||||
await expect(async () => {
|
||||
await client.CreateArtifact({
|
||||
workflowRunBackendId: '1234',
|
||||
|
@ -178,12 +219,7 @@ describe('artifact-http-client', () => {
|
|||
post: mockPost
|
||||
}
|
||||
})
|
||||
const client = createArtifactTwirpClient(
|
||||
'upload',
|
||||
5, // retry 5 times
|
||||
1, // wait 1 ms
|
||||
1.5 // backoff factor
|
||||
)
|
||||
const client = internalArtifactTwirpClient(clientOptions)
|
||||
await expect(async () => {
|
||||
await client.CreateArtifact({
|
||||
workflowRunBackendId: '1234',
|
||||
|
@ -197,4 +233,116 @@ describe('artifact-http-client', () => {
|
|||
expect(mockHttpClient).toHaveBeenCalledTimes(1)
|
||||
expect(mockPost).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
|
||||
it('should fail with a descriptive error', async () => {
|
||||
// 409 duplicate error
|
||||
const mockPost = jest.fn(() => {
|
||||
const msgFailed = new http.IncomingMessage(new net.Socket())
|
||||
msgFailed.statusCode = 409
|
||||
msgFailed.statusMessage = 'Conflict'
|
||||
return {
|
||||
message: msgFailed,
|
||||
readBody: async () => {
|
||||
return Promise.resolve(
|
||||
`{"msg": "an artifact with this name already exists on the workflow run"}`
|
||||
)
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
const mockHttpClient = (
|
||||
HttpClient as unknown as jest.Mock
|
||||
).mockImplementation(() => {
|
||||
return {
|
||||
post: mockPost
|
||||
}
|
||||
})
|
||||
const client = internalArtifactTwirpClient(clientOptions)
|
||||
await expect(async () => {
|
||||
await client.CreateArtifact({
|
||||
workflowRunBackendId: '1234',
|
||||
workflowJobRunBackendId: '5678',
|
||||
name: 'artifact',
|
||||
version: 4
|
||||
})
|
||||
await client.CreateArtifact({
|
||||
workflowRunBackendId: '1234',
|
||||
workflowJobRunBackendId: '5678',
|
||||
name: 'artifact',
|
||||
version: 4
|
||||
})
|
||||
}).rejects.toThrowError(
|
||||
'Failed to CreateArtifact: Received non-retryable error: Failed request: (409) Conflict: an artifact with this name already exists on the workflow run'
|
||||
)
|
||||
expect(mockHttpClient).toHaveBeenCalledTimes(1)
|
||||
expect(mockPost).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
|
||||
it('should properly describe a network failure', async () => {
|
||||
class FakeNodeError extends Error {
|
||||
code: string
|
||||
constructor(code: string) {
|
||||
super()
|
||||
this.code = code
|
||||
}
|
||||
}
|
||||
|
||||
const mockPost = jest.fn(() => {
|
||||
throw new FakeNodeError('ENOTFOUND')
|
||||
})
|
||||
|
||||
const mockHttpClient = (
|
||||
HttpClient as unknown as jest.Mock
|
||||
).mockImplementation(() => {
|
||||
return {
|
||||
post: mockPost
|
||||
}
|
||||
})
|
||||
const client = internalArtifactTwirpClient()
|
||||
await expect(async () => {
|
||||
await client.CreateArtifact({
|
||||
workflowRunBackendId: '1234',
|
||||
workflowJobRunBackendId: '5678',
|
||||
name: 'artifact',
|
||||
version: 4
|
||||
})
|
||||
}).rejects.toThrowError(new NetworkError('ENOTFOUND').message)
|
||||
expect(mockHttpClient).toHaveBeenCalledTimes(1)
|
||||
expect(mockPost).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
|
||||
it('should properly describe a usage error', async () => {
|
||||
const mockPost = jest.fn(() => {
|
||||
const msgFailed = new http.IncomingMessage(new net.Socket())
|
||||
msgFailed.statusCode = 403
|
||||
msgFailed.statusMessage = 'Forbidden'
|
||||
return {
|
||||
message: msgFailed,
|
||||
readBody: async () => {
|
||||
return Promise.resolve(
|
||||
`{"msg": "insufficient usage to create artifact"}`
|
||||
)
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
const mockHttpClient = (
|
||||
HttpClient as unknown as jest.Mock
|
||||
).mockImplementation(() => {
|
||||
return {
|
||||
post: mockPost
|
||||
}
|
||||
})
|
||||
const client = internalArtifactTwirpClient()
|
||||
await expect(async () => {
|
||||
await client.CreateArtifact({
|
||||
workflowRunBackendId: '1234',
|
||||
workflowJobRunBackendId: '5678',
|
||||
name: 'artifact',
|
||||
version: 4
|
||||
})
|
||||
}).rejects.toThrowError(new UsageError().message)
|
||||
expect(mockHttpClient).toHaveBeenCalledTimes(1)
|
||||
expect(mockPost).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
})
|
||||
|
|
|
@ -0,0 +1,9 @@
|
|||
import * as core from '@actions/core'
|
||||
|
||||
// noopLogs mocks the console.log and core.* functions to prevent output in the console while testing
|
||||
export const noopLogs = (): void => {
|
||||
jest.spyOn(console, 'log').mockImplementation(() => {})
|
||||
jest.spyOn(core, 'debug').mockImplementation(() => {})
|
||||
jest.spyOn(core, 'info').mockImplementation(() => {})
|
||||
jest.spyOn(core, 'warning').mockImplementation(() => {})
|
||||
}
|
|
@ -0,0 +1,27 @@
|
|||
import * as config from '../src/internal/shared/config'
|
||||
|
||||
beforeEach(() => {
|
||||
jest.resetModules()
|
||||
})
|
||||
|
||||
describe('isGhes', () => {
|
||||
it('should return false when the request domain is github.com', () => {
|
||||
process.env.GITHUB_SERVER_URL = 'https://github.com'
|
||||
expect(config.isGhes()).toBe(false)
|
||||
})
|
||||
|
||||
it('should return false when the request domain ends with ghe.com', () => {
|
||||
process.env.GITHUB_SERVER_URL = 'https://my.domain.ghe.com'
|
||||
expect(config.isGhes()).toBe(false)
|
||||
})
|
||||
|
||||
it('should return false when the request domain ends with ghe.localhost', () => {
|
||||
process.env.GITHUB_SERVER_URL = 'https://my.domain.ghe.localhost'
|
||||
expect(config.isGhes()).toBe(false)
|
||||
})
|
||||
|
||||
it('should return false when the request domain is specific to an enterprise', () => {
|
||||
process.env.GITHUB_SERVER_URL = 'https://my-enterprise.github.com'
|
||||
expect(config.isGhes()).toBe(true)
|
||||
})
|
||||
})
|
|
@ -0,0 +1,192 @@
|
|||
import * as github from '@actions/github'
|
||||
import type {RestEndpointMethods} from '@octokit/plugin-rest-endpoint-methods/dist-types/generated/method-types'
|
||||
import type {RequestInterface} from '@octokit/types'
|
||||
import {
|
||||
deleteArtifactInternal,
|
||||
deleteArtifactPublic
|
||||
} from '../src/internal/delete/delete-artifact'
|
||||
import * as config from '../src/internal/shared/config'
|
||||
import {ArtifactServiceClientJSON, Timestamp} from '../src/generated'
|
||||
import * as util from '../src/internal/shared/util'
|
||||
import {noopLogs} from './common'
|
||||
|
||||
type MockedRequest = jest.MockedFunction<RequestInterface<object>>
|
||||
|
||||
type MockedDeleteArtifact = jest.MockedFunction<
|
||||
RestEndpointMethods['actions']['deleteArtifact']
|
||||
>
|
||||
|
||||
jest.mock('@actions/github', () => ({
|
||||
getOctokit: jest.fn().mockReturnValue({
|
||||
request: jest.fn(),
|
||||
rest: {
|
||||
actions: {
|
||||
deleteArtifact: jest.fn()
|
||||
}
|
||||
}
|
||||
})
|
||||
}))
|
||||
|
||||
const fixtures = {
|
||||
repo: 'toolkit',
|
||||
owner: 'actions',
|
||||
token: 'ghp_1234567890',
|
||||
runId: 123,
|
||||
backendIds: {
|
||||
workflowRunBackendId: 'c4d7c21f-ba3f-4ddc-a8c8-6f2f626f8422',
|
||||
workflowJobRunBackendId: '760803a1-f890-4d25-9a6e-a3fc01a0c7cf'
|
||||
},
|
||||
artifacts: [
|
||||
{
|
||||
id: 1,
|
||||
name: 'my-artifact',
|
||||
size: 456,
|
||||
createdAt: new Date('2023-12-01')
|
||||
},
|
||||
{
|
||||
id: 2,
|
||||
name: 'my-artifact',
|
||||
size: 456,
|
||||
createdAt: new Date('2023-12-02')
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
describe('delete-artifact', () => {
|
||||
beforeAll(() => {
|
||||
noopLogs()
|
||||
})
|
||||
|
||||
describe('public', () => {
|
||||
it('should delete an artifact', async () => {
|
||||
const mockRequest = github.getOctokit(fixtures.token)
|
||||
.request as MockedRequest
|
||||
mockRequest.mockResolvedValueOnce({
|
||||
status: 200,
|
||||
headers: {},
|
||||
url: '',
|
||||
data: {
|
||||
artifacts: [
|
||||
{
|
||||
name: fixtures.artifacts[0].name,
|
||||
id: fixtures.artifacts[0].id,
|
||||
size_in_bytes: fixtures.artifacts[0].size,
|
||||
created_at: fixtures.artifacts[0].createdAt.toISOString()
|
||||
}
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
const mockDeleteArtifact = github.getOctokit(fixtures.token).rest.actions
|
||||
.deleteArtifact as MockedDeleteArtifact
|
||||
mockDeleteArtifact.mockResolvedValueOnce({
|
||||
status: 204,
|
||||
headers: {},
|
||||
url: '',
|
||||
data: null as never
|
||||
})
|
||||
|
||||
const response = await deleteArtifactPublic(
|
||||
fixtures.artifacts[0].name,
|
||||
fixtures.runId,
|
||||
fixtures.owner,
|
||||
fixtures.repo,
|
||||
fixtures.token
|
||||
)
|
||||
|
||||
expect(response).toEqual({
|
||||
id: fixtures.artifacts[0].id
|
||||
})
|
||||
})
|
||||
|
||||
it('should fail if non-200 response', async () => {
|
||||
const mockRequest = github.getOctokit(fixtures.token)
|
||||
.request as MockedRequest
|
||||
mockRequest.mockResolvedValueOnce({
|
||||
status: 200,
|
||||
headers: {},
|
||||
url: '',
|
||||
data: {
|
||||
artifacts: [
|
||||
{
|
||||
name: fixtures.artifacts[0].name,
|
||||
id: fixtures.artifacts[0].id,
|
||||
size_in_bytes: fixtures.artifacts[0].size,
|
||||
created_at: fixtures.artifacts[0].createdAt.toISOString()
|
||||
}
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
const mockDeleteArtifact = github.getOctokit(fixtures.token).rest.actions
|
||||
.deleteArtifact as MockedDeleteArtifact
|
||||
mockDeleteArtifact.mockRejectedValue(new Error('boom'))
|
||||
|
||||
await expect(
|
||||
deleteArtifactPublic(
|
||||
fixtures.artifacts[0].name,
|
||||
fixtures.runId,
|
||||
fixtures.owner,
|
||||
fixtures.repo,
|
||||
fixtures.token
|
||||
)
|
||||
).rejects.toThrow('boom')
|
||||
})
|
||||
})
|
||||
|
||||
describe('internal', () => {
|
||||
beforeEach(() => {
|
||||
jest.spyOn(config, 'getRuntimeToken').mockReturnValue('test-token')
|
||||
jest
|
||||
.spyOn(util, 'getBackendIdsFromToken')
|
||||
.mockReturnValue(fixtures.backendIds)
|
||||
jest
|
||||
.spyOn(config, 'getResultsServiceUrl')
|
||||
.mockReturnValue('https://results.local')
|
||||
})
|
||||
|
||||
it('should delete an artifact', async () => {
|
||||
jest
|
||||
.spyOn(ArtifactServiceClientJSON.prototype, 'ListArtifacts')
|
||||
.mockResolvedValue({
|
||||
artifacts: fixtures.artifacts.map(artifact => ({
|
||||
...fixtures.backendIds,
|
||||
databaseId: artifact.id.toString(),
|
||||
name: artifact.name,
|
||||
size: artifact.size.toString(),
|
||||
createdAt: Timestamp.fromDate(artifact.createdAt)
|
||||
}))
|
||||
})
|
||||
jest
|
||||
.spyOn(ArtifactServiceClientJSON.prototype, 'DeleteArtifact')
|
||||
.mockResolvedValue({
|
||||
ok: true,
|
||||
artifactId: fixtures.artifacts[0].id.toString()
|
||||
})
|
||||
const response = await deleteArtifactInternal(fixtures.artifacts[0].name)
|
||||
expect(response).toEqual({
|
||||
id: fixtures.artifacts[0].id
|
||||
})
|
||||
})
|
||||
|
||||
it('should fail if non-200 response', async () => {
|
||||
jest
|
||||
.spyOn(ArtifactServiceClientJSON.prototype, 'ListArtifacts')
|
||||
.mockResolvedValue({
|
||||
artifacts: fixtures.artifacts.map(artifact => ({
|
||||
...fixtures.backendIds,
|
||||
databaseId: artifact.id.toString(),
|
||||
name: artifact.name,
|
||||
size: artifact.size.toString(),
|
||||
createdAt: Timestamp.fromDate(artifact.createdAt)
|
||||
}))
|
||||
})
|
||||
jest
|
||||
.spyOn(ArtifactServiceClientJSON.prototype, 'DeleteArtifact')
|
||||
.mockRejectedValue(new Error('boom'))
|
||||
await expect(
|
||||
deleteArtifactInternal(fixtures.artifacts[0].id)
|
||||
).rejects.toThrow('boom')
|
||||
})
|
||||
})
|
||||
})
|
|
@ -2,14 +2,21 @@ import fs from 'fs'
|
|||
import * as http from 'http'
|
||||
import * as net from 'net'
|
||||
import * as path from 'path'
|
||||
import * as core from '@actions/core'
|
||||
import * as github from '@actions/github'
|
||||
import {HttpClient} from '@actions/http-client'
|
||||
import type {RestEndpointMethods} from '@octokit/plugin-rest-endpoint-methods/dist-types/generated/method-types'
|
||||
import archiver from 'archiver'
|
||||
|
||||
import {downloadArtifact} from '../src/internal/download/download-artifact'
|
||||
import {
|
||||
downloadArtifactInternal,
|
||||
downloadArtifactPublic,
|
||||
streamExtractExternal
|
||||
} from '../src/internal/download/download-artifact'
|
||||
import {getUserAgentString} from '../src/internal/shared/user-agent'
|
||||
import {noopLogs} from './common'
|
||||
import * as config from '../src/internal/shared/config'
|
||||
import {ArtifactServiceClientJSON} from '../src/generated'
|
||||
import * as util from '../src/internal/shared/util'
|
||||
|
||||
type MockedDownloadArtifact = jest.MockedFunction<
|
||||
RestEndpointMethods['actions']['downloadArtifact']
|
||||
|
@ -32,10 +39,16 @@ const fixtures = {
|
|||
]
|
||||
},
|
||||
artifactID: 1234,
|
||||
artifactName: 'my-artifact',
|
||||
artifactSize: 123456,
|
||||
repositoryOwner: 'actions',
|
||||
repositoryName: 'toolkit',
|
||||
token: 'ghp_1234567890',
|
||||
blobStorageUrl: 'https://blob-storage.local?signed=true'
|
||||
blobStorageUrl: 'https://blob-storage.local?signed=true',
|
||||
backendIds: {
|
||||
workflowRunBackendId: 'c4d7c21f-ba3f-4ddc-a8c8-6f2f626f8422',
|
||||
workflowJobRunBackendId: '760803a1-f890-4d25-9a6e-a3fc01a0c7cf'
|
||||
}
|
||||
}
|
||||
|
||||
jest.mock('@actions/github', () => ({
|
||||
|
@ -74,206 +87,536 @@ const expectExtractedArchive = async (dir: string): Promise<void> => {
|
|||
}
|
||||
}
|
||||
|
||||
const setup = async (): Promise<void> => {
|
||||
noopLogs()
|
||||
await fs.promises.mkdir(testDir, {recursive: true})
|
||||
await createTestArchive()
|
||||
|
||||
process.env['GITHUB_WORKSPACE'] = fixtures.workspaceDir
|
||||
}
|
||||
|
||||
const cleanup = async (): Promise<void> => {
|
||||
jest.restoreAllMocks()
|
||||
await fs.promises.rm(testDir, {recursive: true})
|
||||
delete process.env['GITHUB_WORKSPACE']
|
||||
}
|
||||
|
||||
const mockGetArtifactSuccess = jest.fn(() => {
|
||||
const message = new http.IncomingMessage(new net.Socket())
|
||||
message.statusCode = 200
|
||||
message.push(fs.readFileSync(fixtures.exampleArtifact.path))
|
||||
message.push(null)
|
||||
return {
|
||||
message
|
||||
}
|
||||
})
|
||||
|
||||
const mockGetArtifactFailure = jest.fn(() => {
|
||||
const message = new http.IncomingMessage(new net.Socket())
|
||||
message.statusCode = 500
|
||||
message.push('Internal Server Error')
|
||||
message.push(null)
|
||||
return {
|
||||
message
|
||||
}
|
||||
})
|
||||
|
||||
const mockGetArtifactMalicious = jest.fn(() => {
|
||||
const message = new http.IncomingMessage(new net.Socket())
|
||||
message.statusCode = 200
|
||||
message.push(fs.readFileSync(path.join(__dirname, 'fixtures', 'evil.zip'))) // evil.zip contains files that are formatted x/../../etc/hosts
|
||||
message.push(null)
|
||||
return {
|
||||
message
|
||||
}
|
||||
})
|
||||
|
||||
describe('download-artifact', () => {
|
||||
beforeEach(async () => {
|
||||
jest.spyOn(core, 'debug').mockImplementation(() => {})
|
||||
jest.spyOn(core, 'info').mockImplementation(() => {})
|
||||
jest.spyOn(core, 'warning').mockImplementation(() => {})
|
||||
describe('public', () => {
|
||||
beforeEach(setup)
|
||||
afterEach(cleanup)
|
||||
|
||||
await fs.promises.mkdir(testDir, {recursive: true})
|
||||
await createTestArchive()
|
||||
it('should successfully download an artifact to $GITHUB_WORKSPACE', async () => {
|
||||
const downloadArtifactMock = github.getOctokit(fixtures.token).rest
|
||||
.actions.downloadArtifact as MockedDownloadArtifact
|
||||
downloadArtifactMock.mockResolvedValueOnce({
|
||||
headers: {
|
||||
location: fixtures.blobStorageUrl
|
||||
},
|
||||
status: 302,
|
||||
url: '',
|
||||
data: Buffer.from('')
|
||||
})
|
||||
|
||||
process.env['GITHUB_WORKSPACE'] = fixtures.workspaceDir
|
||||
const mockHttpClient = (HttpClient as jest.Mock).mockImplementation(
|
||||
() => {
|
||||
return {
|
||||
get: mockGetArtifactSuccess
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
const response = await downloadArtifactPublic(
|
||||
fixtures.artifactID,
|
||||
fixtures.repositoryOwner,
|
||||
fixtures.repositoryName,
|
||||
fixtures.token
|
||||
)
|
||||
|
||||
expect(downloadArtifactMock).toHaveBeenCalledWith({
|
||||
owner: fixtures.repositoryOwner,
|
||||
repo: fixtures.repositoryName,
|
||||
artifact_id: fixtures.artifactID,
|
||||
archive_format: 'zip',
|
||||
request: {
|
||||
redirect: 'manual'
|
||||
}
|
||||
})
|
||||
expect(mockHttpClient).toHaveBeenCalledWith(getUserAgentString())
|
||||
expect(mockGetArtifactSuccess).toHaveBeenCalledWith(
|
||||
fixtures.blobStorageUrl
|
||||
)
|
||||
expectExtractedArchive(fixtures.workspaceDir)
|
||||
expect(response.downloadPath).toBe(fixtures.workspaceDir)
|
||||
})
|
||||
|
||||
it('should not allow path traversal from malicious artifacts', async () => {
|
||||
const downloadArtifactMock = github.getOctokit(fixtures.token).rest
|
||||
.actions.downloadArtifact as MockedDownloadArtifact
|
||||
downloadArtifactMock.mockResolvedValueOnce({
|
||||
headers: {
|
||||
location: fixtures.blobStorageUrl
|
||||
},
|
||||
status: 302,
|
||||
url: '',
|
||||
data: Buffer.from('')
|
||||
})
|
||||
|
||||
const mockHttpClient = (HttpClient as jest.Mock).mockImplementation(
|
||||
() => {
|
||||
return {
|
||||
get: mockGetArtifactMalicious
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
const response = await downloadArtifactPublic(
|
||||
fixtures.artifactID,
|
||||
fixtures.repositoryOwner,
|
||||
fixtures.repositoryName,
|
||||
fixtures.token
|
||||
)
|
||||
|
||||
expect(downloadArtifactMock).toHaveBeenCalledWith({
|
||||
owner: fixtures.repositoryOwner,
|
||||
repo: fixtures.repositoryName,
|
||||
artifact_id: fixtures.artifactID,
|
||||
archive_format: 'zip',
|
||||
request: {
|
||||
redirect: 'manual'
|
||||
}
|
||||
})
|
||||
|
||||
expect(mockHttpClient).toHaveBeenCalledWith(getUserAgentString())
|
||||
expect(mockGetArtifactMalicious).toHaveBeenCalledWith(
|
||||
fixtures.blobStorageUrl
|
||||
)
|
||||
|
||||
// ensure path traversal was not possible
|
||||
expect(
|
||||
fs.existsSync(path.join(fixtures.workspaceDir, 'x/etc/hosts'))
|
||||
).toBe(true)
|
||||
expect(
|
||||
fs.existsSync(path.join(fixtures.workspaceDir, 'y/etc/hosts'))
|
||||
).toBe(true)
|
||||
|
||||
expect(response.downloadPath).toBe(fixtures.workspaceDir)
|
||||
})
|
||||
|
||||
it('should successfully download an artifact to user defined path', async () => {
|
||||
const customPath = path.join(testDir, 'custom')
|
||||
|
||||
const downloadArtifactMock = github.getOctokit(fixtures.token).rest
|
||||
.actions.downloadArtifact as MockedDownloadArtifact
|
||||
downloadArtifactMock.mockResolvedValueOnce({
|
||||
headers: {
|
||||
location: fixtures.blobStorageUrl
|
||||
},
|
||||
status: 302,
|
||||
url: '',
|
||||
data: Buffer.from('')
|
||||
})
|
||||
|
||||
const mockHttpClient = (HttpClient as jest.Mock).mockImplementation(
|
||||
() => {
|
||||
return {
|
||||
get: mockGetArtifactSuccess
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
const response = await downloadArtifactPublic(
|
||||
fixtures.artifactID,
|
||||
fixtures.repositoryOwner,
|
||||
fixtures.repositoryName,
|
||||
fixtures.token,
|
||||
{
|
||||
path: customPath
|
||||
}
|
||||
)
|
||||
|
||||
expect(downloadArtifactMock).toHaveBeenCalledWith({
|
||||
owner: fixtures.repositoryOwner,
|
||||
repo: fixtures.repositoryName,
|
||||
artifact_id: fixtures.artifactID,
|
||||
archive_format: 'zip',
|
||||
request: {
|
||||
redirect: 'manual'
|
||||
}
|
||||
})
|
||||
expect(mockHttpClient).toHaveBeenCalledWith(getUserAgentString())
|
||||
expect(mockGetArtifactSuccess).toHaveBeenCalledWith(
|
||||
fixtures.blobStorageUrl
|
||||
)
|
||||
expectExtractedArchive(customPath)
|
||||
expect(response.downloadPath).toBe(customPath)
|
||||
})
|
||||
|
||||
it('should fail if download artifact API does not respond with location', async () => {
|
||||
const downloadArtifactMock = github.getOctokit(fixtures.token).rest
|
||||
.actions.downloadArtifact as MockedDownloadArtifact
|
||||
downloadArtifactMock.mockResolvedValueOnce({
|
||||
headers: {},
|
||||
status: 302,
|
||||
url: '',
|
||||
data: Buffer.from('')
|
||||
})
|
||||
|
||||
await expect(
|
||||
downloadArtifactPublic(
|
||||
fixtures.artifactID,
|
||||
fixtures.repositoryOwner,
|
||||
fixtures.repositoryName,
|
||||
fixtures.token
|
||||
)
|
||||
).rejects.toBeInstanceOf(Error)
|
||||
|
||||
expect(downloadArtifactMock).toHaveBeenCalledWith({
|
||||
owner: fixtures.repositoryOwner,
|
||||
repo: fixtures.repositoryName,
|
||||
artifact_id: fixtures.artifactID,
|
||||
archive_format: 'zip',
|
||||
request: {
|
||||
redirect: 'manual'
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
it('should fail if blob storage storage chunk does not respond within 30s', async () => {
|
||||
// mock http client to delay response data by 30s
|
||||
const msg = new http.IncomingMessage(new net.Socket())
|
||||
msg.statusCode = 200
|
||||
|
||||
const mockGet = jest.fn(async () => {
|
||||
return new Promise((resolve, reject) => {
|
||||
// Resolve with a 200 status code immediately
|
||||
resolve({
|
||||
message: msg,
|
||||
readBody: async () => {
|
||||
return Promise.resolve(`{"ok": true}`)
|
||||
}
|
||||
})
|
||||
|
||||
// Reject with an error after 31 seconds
|
||||
setTimeout(() => {
|
||||
reject(new Error('Request timeout'))
|
||||
}, 31000) // Timeout after 31 seconds
|
||||
})
|
||||
})
|
||||
|
||||
const mockHttpClient = (HttpClient as jest.Mock).mockImplementation(
|
||||
() => {
|
||||
return {
|
||||
get: mockGet
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
await expect(
|
||||
streamExtractExternal(fixtures.blobStorageUrl, fixtures.workspaceDir)
|
||||
).rejects.toBeInstanceOf(Error)
|
||||
|
||||
expect(mockHttpClient).toHaveBeenCalledWith(getUserAgentString())
|
||||
}, 35000) // add longer timeout to allow for timer to run out
|
||||
|
||||
it('should fail if blob storage response is non-200 after 5 retries', async () => {
|
||||
const downloadArtifactMock = github.getOctokit(fixtures.token).rest
|
||||
.actions.downloadArtifact as MockedDownloadArtifact
|
||||
downloadArtifactMock.mockResolvedValueOnce({
|
||||
headers: {
|
||||
location: fixtures.blobStorageUrl
|
||||
},
|
||||
status: 302,
|
||||
url: '',
|
||||
data: Buffer.from('')
|
||||
})
|
||||
|
||||
const mockHttpClient = (HttpClient as jest.Mock).mockImplementation(
|
||||
() => {
|
||||
return {
|
||||
get: mockGetArtifactFailure
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
await expect(
|
||||
downloadArtifactPublic(
|
||||
fixtures.artifactID,
|
||||
fixtures.repositoryOwner,
|
||||
fixtures.repositoryName,
|
||||
fixtures.token
|
||||
)
|
||||
).rejects.toBeInstanceOf(Error)
|
||||
|
||||
expect(downloadArtifactMock).toHaveBeenCalledWith({
|
||||
owner: fixtures.repositoryOwner,
|
||||
repo: fixtures.repositoryName,
|
||||
artifact_id: fixtures.artifactID,
|
||||
archive_format: 'zip',
|
||||
request: {
|
||||
redirect: 'manual'
|
||||
}
|
||||
})
|
||||
expect(mockHttpClient).toHaveBeenCalledWith(getUserAgentString())
|
||||
expect(mockGetArtifactFailure).toHaveBeenCalledWith(
|
||||
fixtures.blobStorageUrl
|
||||
)
|
||||
expect(mockGetArtifactFailure).toHaveBeenCalledTimes(5)
|
||||
}, 38000)
|
||||
|
||||
it('should retry if blob storage response is non-200 and then succeed with a 200', async () => {
|
||||
const downloadArtifactMock = github.getOctokit(fixtures.token).rest
|
||||
.actions.downloadArtifact as MockedDownloadArtifact
|
||||
downloadArtifactMock.mockResolvedValueOnce({
|
||||
headers: {
|
||||
location: fixtures.blobStorageUrl
|
||||
},
|
||||
status: 302,
|
||||
url: '',
|
||||
data: Buffer.from('')
|
||||
})
|
||||
|
||||
const mockGetArtifact = jest
|
||||
.fn(mockGetArtifactSuccess)
|
||||
.mockImplementationOnce(mockGetArtifactFailure)
|
||||
|
||||
const mockHttpClient = (HttpClient as jest.Mock).mockImplementation(
|
||||
() => {
|
||||
return {
|
||||
get: mockGetArtifact
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
const response = await downloadArtifactPublic(
|
||||
fixtures.artifactID,
|
||||
fixtures.repositoryOwner,
|
||||
fixtures.repositoryName,
|
||||
fixtures.token
|
||||
)
|
||||
|
||||
expect(downloadArtifactMock).toHaveBeenCalledWith({
|
||||
owner: fixtures.repositoryOwner,
|
||||
repo: fixtures.repositoryName,
|
||||
artifact_id: fixtures.artifactID,
|
||||
archive_format: 'zip',
|
||||
request: {
|
||||
redirect: 'manual'
|
||||
}
|
||||
})
|
||||
expect(mockHttpClient).toHaveBeenCalledWith(getUserAgentString())
|
||||
expect(mockGetArtifactFailure).toHaveBeenCalledWith(
|
||||
fixtures.blobStorageUrl
|
||||
)
|
||||
expect(mockGetArtifactFailure).toHaveBeenCalledTimes(1)
|
||||
expect(mockGetArtifactSuccess).toHaveBeenCalledWith(
|
||||
fixtures.blobStorageUrl
|
||||
)
|
||||
expect(mockGetArtifactSuccess).toHaveBeenCalledTimes(1)
|
||||
expect(response.downloadPath).toBe(fixtures.workspaceDir)
|
||||
}, 28000)
|
||||
})
|
||||
|
||||
afterEach(async () => {
|
||||
jest.restoreAllMocks()
|
||||
await fs.promises.rm(testDir, {recursive: true})
|
||||
delete process.env['GITHUB_WORKSPACE']
|
||||
})
|
||||
describe('internal', () => {
|
||||
beforeEach(async () => {
|
||||
await setup()
|
||||
|
||||
it('should successfully download an artifact to $GITHUB_WORKSPACE', async () => {
|
||||
const downloadArtifactMock = github.getOctokit(fixtures.token).rest.actions
|
||||
.downloadArtifact as MockedDownloadArtifact
|
||||
downloadArtifactMock.mockResolvedValueOnce({
|
||||
headers: {
|
||||
location: fixtures.blobStorageUrl
|
||||
},
|
||||
status: 302,
|
||||
url: '',
|
||||
data: Buffer.from('')
|
||||
jest.spyOn(config, 'getRuntimeToken').mockReturnValue('test-token')
|
||||
|
||||
jest
|
||||
.spyOn(util, 'getBackendIdsFromToken')
|
||||
.mockReturnValue(fixtures.backendIds)
|
||||
|
||||
jest
|
||||
.spyOn(config, 'getResultsServiceUrl')
|
||||
.mockReturnValue('https://results.local')
|
||||
})
|
||||
afterEach(async () => {
|
||||
await cleanup()
|
||||
})
|
||||
|
||||
const getMock = jest.fn(() => {
|
||||
const message = new http.IncomingMessage(new net.Socket())
|
||||
message.statusCode = 200
|
||||
message.push(fs.readFileSync(fixtures.exampleArtifact.path))
|
||||
return {
|
||||
message
|
||||
}
|
||||
})
|
||||
const httpClientMock = (HttpClient as jest.Mock).mockImplementation(() => {
|
||||
return {
|
||||
get: getMock
|
||||
}
|
||||
it('should successfully download an artifact to $GITHUB_WORKSPACE', async () => {
|
||||
const mockListArtifacts = jest
|
||||
.spyOn(ArtifactServiceClientJSON.prototype, 'ListArtifacts')
|
||||
.mockResolvedValue({
|
||||
artifacts: [
|
||||
{
|
||||
...fixtures.backendIds,
|
||||
databaseId: fixtures.artifactID.toString(),
|
||||
name: fixtures.artifactName,
|
||||
size: fixtures.artifactSize.toString()
|
||||
}
|
||||
]
|
||||
})
|
||||
|
||||
const mockGetSignedArtifactURL = jest
|
||||
.spyOn(ArtifactServiceClientJSON.prototype, 'GetSignedArtifactURL')
|
||||
.mockReturnValue(
|
||||
Promise.resolve({
|
||||
signedUrl: fixtures.blobStorageUrl
|
||||
})
|
||||
)
|
||||
|
||||
const mockHttpClient = (HttpClient as jest.Mock).mockImplementation(
|
||||
() => {
|
||||
return {
|
||||
get: mockGetArtifactSuccess
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
const response = await downloadArtifactInternal(fixtures.artifactID)
|
||||
|
||||
expectExtractedArchive(fixtures.workspaceDir)
|
||||
expect(response.downloadPath).toBe(fixtures.workspaceDir)
|
||||
expect(mockHttpClient).toHaveBeenCalledWith(getUserAgentString())
|
||||
expect(mockListArtifacts).toHaveBeenCalledWith({
|
||||
idFilter: {
|
||||
value: fixtures.artifactID.toString()
|
||||
},
|
||||
...fixtures.backendIds
|
||||
})
|
||||
expect(mockGetSignedArtifactURL).toHaveBeenCalledWith({
|
||||
...fixtures.backendIds,
|
||||
name: fixtures.artifactName
|
||||
})
|
||||
})
|
||||
|
||||
const response = await downloadArtifact(
|
||||
fixtures.artifactID,
|
||||
fixtures.repositoryOwner,
|
||||
fixtures.repositoryName,
|
||||
fixtures.token
|
||||
)
|
||||
it('should successfully download an artifact to user defined path', async () => {
|
||||
const customPath = path.join(testDir, 'custom')
|
||||
|
||||
expect(downloadArtifactMock).toHaveBeenCalledWith({
|
||||
owner: fixtures.repositoryOwner,
|
||||
repo: fixtures.repositoryName,
|
||||
artifact_id: fixtures.artifactID,
|
||||
archive_format: 'zip',
|
||||
request: {
|
||||
redirect: 'manual'
|
||||
}
|
||||
})
|
||||
expect(httpClientMock).toHaveBeenCalledWith(getUserAgentString())
|
||||
expect(getMock).toHaveBeenCalledWith(fixtures.blobStorageUrl)
|
||||
const mockListArtifacts = jest
|
||||
.spyOn(ArtifactServiceClientJSON.prototype, 'ListArtifacts')
|
||||
.mockResolvedValue({
|
||||
artifacts: [
|
||||
{
|
||||
...fixtures.backendIds,
|
||||
databaseId: fixtures.artifactID.toString(),
|
||||
name: fixtures.artifactName,
|
||||
size: fixtures.artifactSize.toString()
|
||||
}
|
||||
]
|
||||
})
|
||||
|
||||
expectExtractedArchive(fixtures.workspaceDir)
|
||||
const mockGetSignedArtifactURL = jest
|
||||
.spyOn(ArtifactServiceClientJSON.prototype, 'GetSignedArtifactURL')
|
||||
.mockReturnValue(
|
||||
Promise.resolve({
|
||||
signedUrl: fixtures.blobStorageUrl
|
||||
})
|
||||
)
|
||||
|
||||
expect(response.success).toBe(true)
|
||||
expect(response.downloadPath).toBe(fixtures.workspaceDir)
|
||||
})
|
||||
const mockHttpClient = (HttpClient as jest.Mock).mockImplementation(
|
||||
() => {
|
||||
return {
|
||||
get: mockGetArtifactSuccess
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
it('should successfully download an artifact to user defined path', async () => {
|
||||
const customPath = path.join(testDir, 'custom')
|
||||
|
||||
const downloadArtifactMock = github.getOctokit(fixtures.token).rest.actions
|
||||
.downloadArtifact as MockedDownloadArtifact
|
||||
downloadArtifactMock.mockResolvedValueOnce({
|
||||
headers: {
|
||||
location: fixtures.blobStorageUrl
|
||||
},
|
||||
status: 302,
|
||||
url: '',
|
||||
data: Buffer.from('')
|
||||
})
|
||||
|
||||
const getMock = jest.fn(() => {
|
||||
const message = new http.IncomingMessage(new net.Socket())
|
||||
message.statusCode = 200
|
||||
message.push(fs.readFileSync(fixtures.exampleArtifact.path))
|
||||
return {
|
||||
message
|
||||
}
|
||||
})
|
||||
const httpClientMock = (HttpClient as jest.Mock).mockImplementation(() => {
|
||||
return {
|
||||
get: getMock
|
||||
}
|
||||
})
|
||||
|
||||
const response = await downloadArtifact(
|
||||
fixtures.artifactID,
|
||||
fixtures.repositoryOwner,
|
||||
fixtures.repositoryName,
|
||||
fixtures.token,
|
||||
{
|
||||
const response = await downloadArtifactInternal(fixtures.artifactID, {
|
||||
path: customPath
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
expect(downloadArtifactMock).toHaveBeenCalledWith({
|
||||
owner: fixtures.repositoryOwner,
|
||||
repo: fixtures.repositoryName,
|
||||
artifact_id: fixtures.artifactID,
|
||||
archive_format: 'zip',
|
||||
request: {
|
||||
redirect: 'manual'
|
||||
}
|
||||
})
|
||||
expect(httpClientMock).toHaveBeenCalledWith(getUserAgentString())
|
||||
expect(getMock).toHaveBeenCalledWith(fixtures.blobStorageUrl)
|
||||
|
||||
expectExtractedArchive(customPath)
|
||||
|
||||
expect(response.success).toBe(true)
|
||||
expect(response.downloadPath).toBe(customPath)
|
||||
})
|
||||
|
||||
it('should fail if download artifact API does not respond with location', async () => {
|
||||
const downloadArtifactMock = github.getOctokit(fixtures.token).rest.actions
|
||||
.downloadArtifact as MockedDownloadArtifact
|
||||
downloadArtifactMock.mockResolvedValueOnce({
|
||||
headers: {},
|
||||
status: 302,
|
||||
url: '',
|
||||
data: Buffer.from('')
|
||||
expectExtractedArchive(customPath)
|
||||
expect(response.downloadPath).toBe(customPath)
|
||||
expect(mockHttpClient).toHaveBeenCalledWith(getUserAgentString())
|
||||
expect(mockListArtifacts).toHaveBeenCalledWith({
|
||||
idFilter: {
|
||||
value: fixtures.artifactID.toString()
|
||||
},
|
||||
...fixtures.backendIds
|
||||
})
|
||||
expect(mockGetSignedArtifactURL).toHaveBeenCalledWith({
|
||||
...fixtures.backendIds,
|
||||
name: fixtures.artifactName
|
||||
})
|
||||
})
|
||||
|
||||
await expect(
|
||||
downloadArtifact(
|
||||
fixtures.artifactID,
|
||||
fixtures.repositoryOwner,
|
||||
fixtures.repositoryName,
|
||||
fixtures.token
|
||||
it('should fail if download artifact API does not respond with location', async () => {
|
||||
jest
|
||||
.spyOn(ArtifactServiceClientJSON.prototype, 'ListArtifacts')
|
||||
.mockRejectedValue(new Error('boom'))
|
||||
|
||||
await expect(
|
||||
downloadArtifactInternal(fixtures.artifactID)
|
||||
).rejects.toBeInstanceOf(Error)
|
||||
})
|
||||
|
||||
it('should fail if blob storage response is non-200', async () => {
|
||||
const mockListArtifacts = jest
|
||||
.spyOn(ArtifactServiceClientJSON.prototype, 'ListArtifacts')
|
||||
.mockResolvedValue({
|
||||
artifacts: [
|
||||
{
|
||||
...fixtures.backendIds,
|
||||
databaseId: fixtures.artifactID.toString(),
|
||||
name: fixtures.artifactName,
|
||||
size: fixtures.artifactSize.toString()
|
||||
}
|
||||
]
|
||||
})
|
||||
|
||||
const mockGetSignedArtifactURL = jest
|
||||
.spyOn(ArtifactServiceClientJSON.prototype, 'GetSignedArtifactURL')
|
||||
.mockReturnValue(
|
||||
Promise.resolve({
|
||||
signedUrl: fixtures.blobStorageUrl
|
||||
})
|
||||
)
|
||||
|
||||
const mockHttpClient = (HttpClient as jest.Mock).mockImplementation(
|
||||
() => {
|
||||
return {
|
||||
get: mockGetArtifactFailure
|
||||
}
|
||||
}
|
||||
)
|
||||
).rejects.toBeInstanceOf(Error)
|
||||
|
||||
expect(downloadArtifactMock).toHaveBeenCalledWith({
|
||||
owner: fixtures.repositoryOwner,
|
||||
repo: fixtures.repositoryName,
|
||||
artifact_id: fixtures.artifactID,
|
||||
archive_format: 'zip',
|
||||
request: {
|
||||
redirect: 'manual'
|
||||
}
|
||||
await expect(
|
||||
downloadArtifactInternal(fixtures.artifactID)
|
||||
).rejects.toBeInstanceOf(Error)
|
||||
expect(mockHttpClient).toHaveBeenCalledWith(getUserAgentString())
|
||||
expect(mockListArtifacts).toHaveBeenCalledWith({
|
||||
idFilter: {
|
||||
value: fixtures.artifactID.toString()
|
||||
},
|
||||
...fixtures.backendIds
|
||||
})
|
||||
expect(mockGetSignedArtifactURL).toHaveBeenCalledWith({
|
||||
...fixtures.backendIds,
|
||||
name: fixtures.artifactName
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
it('should fail if blob storage response is non-200', async () => {
|
||||
const downloadArtifactMock = github.getOctokit(fixtures.token).rest.actions
|
||||
.downloadArtifact as MockedDownloadArtifact
|
||||
downloadArtifactMock.mockResolvedValueOnce({
|
||||
headers: {
|
||||
location: fixtures.blobStorageUrl
|
||||
},
|
||||
status: 302,
|
||||
url: '',
|
||||
data: Buffer.from('')
|
||||
})
|
||||
|
||||
const getMock = jest.fn(() => {
|
||||
const message = new http.IncomingMessage(new net.Socket())
|
||||
message.statusCode = 500
|
||||
message.push('Internal Server Error')
|
||||
return {
|
||||
message
|
||||
}
|
||||
})
|
||||
const httpClientMock = (HttpClient as jest.Mock).mockImplementation(() => {
|
||||
return {
|
||||
get: getMock
|
||||
}
|
||||
})
|
||||
|
||||
await expect(
|
||||
downloadArtifact(
|
||||
fixtures.artifactID,
|
||||
fixtures.repositoryOwner,
|
||||
fixtures.repositoryName,
|
||||
fixtures.token
|
||||
)
|
||||
).rejects.toBeInstanceOf(Error)
|
||||
|
||||
expect(downloadArtifactMock).toHaveBeenCalledWith({
|
||||
owner: fixtures.repositoryOwner,
|
||||
repo: fixtures.repositoryName,
|
||||
artifact_id: fixtures.artifactID,
|
||||
archive_format: 'zip',
|
||||
request: {
|
||||
redirect: 'manual'
|
||||
}
|
||||
})
|
||||
expect(httpClientMock).toHaveBeenCalledWith(getUserAgentString())
|
||||
expect(getMock).toHaveBeenCalledWith(fixtures.blobStorageUrl)
|
||||
})
|
||||
})
|
||||
|
|
Binary file not shown.
|
@ -0,0 +1,239 @@
|
|||
import * as github from '@actions/github'
|
||||
import type {RequestInterface} from '@octokit/types'
|
||||
import {
|
||||
getArtifactInternal,
|
||||
getArtifactPublic
|
||||
} from '../src/internal/find/get-artifact'
|
||||
import * as config from '../src/internal/shared/config'
|
||||
import {ArtifactServiceClientJSON, Timestamp} from '../src/generated'
|
||||
import * as util from '../src/internal/shared/util'
|
||||
import {noopLogs} from './common'
|
||||
import {
|
||||
ArtifactNotFoundError,
|
||||
InvalidResponseError
|
||||
} from '../src/internal/shared/errors'
|
||||
|
||||
type MockedRequest = jest.MockedFunction<RequestInterface<object>>
|
||||
|
||||
jest.mock('@actions/github', () => ({
|
||||
getOctokit: jest.fn().mockReturnValue({
|
||||
request: jest.fn()
|
||||
})
|
||||
}))
|
||||
|
||||
const fixtures = {
|
||||
repo: 'toolkit',
|
||||
owner: 'actions',
|
||||
token: 'ghp_1234567890',
|
||||
runId: 123,
|
||||
backendIds: {
|
||||
workflowRunBackendId: 'c4d7c21f-ba3f-4ddc-a8c8-6f2f626f8422',
|
||||
workflowJobRunBackendId: '760803a1-f890-4d25-9a6e-a3fc01a0c7cf'
|
||||
},
|
||||
artifacts: [
|
||||
{
|
||||
id: 1,
|
||||
name: 'my-artifact',
|
||||
size: 456,
|
||||
createdAt: new Date('2023-12-01')
|
||||
},
|
||||
{
|
||||
id: 2,
|
||||
name: 'my-artifact',
|
||||
size: 456,
|
||||
createdAt: new Date('2023-12-02')
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
describe('get-artifact', () => {
|
||||
beforeAll(() => {
|
||||
noopLogs()
|
||||
})
|
||||
|
||||
describe('public', () => {
|
||||
it('should return the artifact if it is found', async () => {
|
||||
const mockRequest = github.getOctokit(fixtures.token)
|
||||
.request as MockedRequest
|
||||
mockRequest.mockResolvedValueOnce({
|
||||
status: 200,
|
||||
headers: {},
|
||||
url: '',
|
||||
data: {
|
||||
artifacts: [
|
||||
{
|
||||
name: fixtures.artifacts[0].name,
|
||||
id: fixtures.artifacts[0].id,
|
||||
size_in_bytes: fixtures.artifacts[0].size,
|
||||
created_at: fixtures.artifacts[0].createdAt.toISOString()
|
||||
}
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
const response = await getArtifactPublic(
|
||||
fixtures.artifacts[0].name,
|
||||
fixtures.runId,
|
||||
fixtures.owner,
|
||||
fixtures.repo,
|
||||
fixtures.token
|
||||
)
|
||||
|
||||
expect(response).toEqual({
|
||||
artifact: fixtures.artifacts[0]
|
||||
})
|
||||
})
|
||||
|
||||
it('should return the latest artifact if multiple are found', async () => {
|
||||
const mockRequest = github.getOctokit(fixtures.token)
|
||||
.request as MockedRequest
|
||||
mockRequest.mockResolvedValueOnce({
|
||||
status: 200,
|
||||
headers: {},
|
||||
url: '',
|
||||
data: {
|
||||
artifacts: fixtures.artifacts.map(artifact => ({
|
||||
name: artifact.name,
|
||||
id: artifact.id,
|
||||
size_in_bytes: artifact.size,
|
||||
created_at: artifact.createdAt.toISOString()
|
||||
}))
|
||||
}
|
||||
})
|
||||
|
||||
const response = await getArtifactPublic(
|
||||
fixtures.artifacts[0].name,
|
||||
fixtures.runId,
|
||||
fixtures.owner,
|
||||
fixtures.repo,
|
||||
fixtures.token
|
||||
)
|
||||
|
||||
expect(response).toEqual({
|
||||
artifact: fixtures.artifacts[1]
|
||||
})
|
||||
})
|
||||
|
||||
it('should fail if no artifacts are found', async () => {
|
||||
const mockRequest = github.getOctokit(fixtures.token)
|
||||
.request as MockedRequest
|
||||
mockRequest.mockResolvedValueOnce({
|
||||
status: 200,
|
||||
headers: {},
|
||||
url: '',
|
||||
data: {
|
||||
artifacts: []
|
||||
}
|
||||
})
|
||||
|
||||
const response = getArtifactPublic(
|
||||
fixtures.artifacts[0].name,
|
||||
fixtures.runId,
|
||||
fixtures.owner,
|
||||
fixtures.repo,
|
||||
fixtures.token
|
||||
)
|
||||
|
||||
expect(response).rejects.toThrowError(ArtifactNotFoundError)
|
||||
})
|
||||
|
||||
it('should fail if non-200 response', async () => {
|
||||
const mockRequest = github.getOctokit(fixtures.token)
|
||||
.request as MockedRequest
|
||||
mockRequest.mockResolvedValueOnce({
|
||||
status: 404,
|
||||
headers: {},
|
||||
url: '',
|
||||
data: {}
|
||||
})
|
||||
|
||||
const response = getArtifactPublic(
|
||||
fixtures.artifacts[0].name,
|
||||
fixtures.runId,
|
||||
fixtures.owner,
|
||||
fixtures.repo,
|
||||
fixtures.token
|
||||
)
|
||||
|
||||
expect(response).rejects.toThrowError(InvalidResponseError)
|
||||
})
|
||||
})
|
||||
|
||||
describe('internal', () => {
|
||||
beforeEach(() => {
|
||||
jest.spyOn(config, 'getRuntimeToken').mockReturnValue('test-token')
|
||||
|
||||
jest
|
||||
.spyOn(util, 'getBackendIdsFromToken')
|
||||
.mockReturnValue(fixtures.backendIds)
|
||||
|
||||
jest
|
||||
.spyOn(config, 'getResultsServiceUrl')
|
||||
.mockReturnValue('https://results.local')
|
||||
})
|
||||
|
||||
it('should return the artifact if it is found', async () => {
|
||||
jest
|
||||
.spyOn(ArtifactServiceClientJSON.prototype, 'ListArtifacts')
|
||||
.mockResolvedValue({
|
||||
artifacts: [
|
||||
{
|
||||
...fixtures.backendIds,
|
||||
databaseId: fixtures.artifacts[0].id.toString(),
|
||||
name: fixtures.artifacts[0].name,
|
||||
size: fixtures.artifacts[0].size.toString(),
|
||||
createdAt: Timestamp.fromDate(fixtures.artifacts[0].createdAt)
|
||||
}
|
||||
]
|
||||
})
|
||||
|
||||
const response = await getArtifactInternal(fixtures.artifacts[0].name)
|
||||
|
||||
expect(response).toEqual({
|
||||
artifact: fixtures.artifacts[0]
|
||||
})
|
||||
})
|
||||
|
||||
it('should return the latest artifact if multiple are found', async () => {
|
||||
jest
|
||||
.spyOn(ArtifactServiceClientJSON.prototype, 'ListArtifacts')
|
||||
.mockResolvedValue({
|
||||
artifacts: fixtures.artifacts.map(artifact => ({
|
||||
...fixtures.backendIds,
|
||||
databaseId: artifact.id.toString(),
|
||||
name: artifact.name,
|
||||
size: artifact.size.toString(),
|
||||
createdAt: Timestamp.fromDate(artifact.createdAt)
|
||||
}))
|
||||
})
|
||||
|
||||
const response = await getArtifactInternal(fixtures.artifacts[0].name)
|
||||
|
||||
expect(response).toEqual({
|
||||
artifact: fixtures.artifacts[1]
|
||||
})
|
||||
})
|
||||
|
||||
it('should fail if no artifacts are found', async () => {
|
||||
jest
|
||||
.spyOn(ArtifactServiceClientJSON.prototype, 'ListArtifacts')
|
||||
.mockResolvedValue({
|
||||
artifacts: []
|
||||
})
|
||||
|
||||
const response = getArtifactInternal(fixtures.artifacts[0].name)
|
||||
|
||||
expect(response).rejects.toThrowError(ArtifactNotFoundError)
|
||||
})
|
||||
|
||||
it('should fail if non-200 response', async () => {
|
||||
jest
|
||||
.spyOn(ArtifactServiceClientJSON.prototype, 'ListArtifacts')
|
||||
.mockRejectedValue(new Error('boom'))
|
||||
|
||||
const response = getArtifactInternal(fixtures.artifacts[0].name)
|
||||
|
||||
expect(response).rejects.toThrow()
|
||||
})
|
||||
})
|
||||
})
|
|
@ -0,0 +1,242 @@
|
|||
import * as github from '@actions/github'
|
||||
import type {RestEndpointMethods} from '@octokit/plugin-rest-endpoint-methods/dist-types/generated/method-types'
|
||||
import type {RestEndpointMethodTypes} from '@octokit/plugin-rest-endpoint-methods/dist-types/generated/parameters-and-response-types'
|
||||
import {
|
||||
listArtifactsInternal,
|
||||
listArtifactsPublic
|
||||
} from '../src/internal/find/list-artifacts'
|
||||
import * as config from '../src/internal/shared/config'
|
||||
import {ArtifactServiceClientJSON, Timestamp} from '../src/generated'
|
||||
import * as util from '../src/internal/shared/util'
|
||||
import {noopLogs} from './common'
|
||||
import {Artifact} from '../src/internal/shared/interfaces'
|
||||
|
||||
type MockedListWorkflowRunArtifacts = jest.MockedFunction<
|
||||
RestEndpointMethods['actions']['listWorkflowRunArtifacts']
|
||||
>
|
||||
|
||||
jest.mock('@actions/github', () => ({
|
||||
getOctokit: jest.fn().mockReturnValue({
|
||||
rest: {
|
||||
actions: {
|
||||
listWorkflowRunArtifacts: jest.fn()
|
||||
}
|
||||
}
|
||||
})
|
||||
}))
|
||||
|
||||
const artifactsToListResponse = (
|
||||
artifacts: Artifact[]
|
||||
): RestEndpointMethodTypes['actions']['listWorkflowRunArtifacts']['response']['data'] => {
|
||||
return {
|
||||
total_count: artifacts.length,
|
||||
artifacts: artifacts.map(artifact => ({
|
||||
name: artifact.name,
|
||||
id: artifact.id,
|
||||
size_in_bytes: artifact.size,
|
||||
created_at: artifact.createdAt?.toISOString() || '',
|
||||
run_id: fixtures.runId,
|
||||
// unused fields for tests
|
||||
url: '',
|
||||
archive_download_url: '',
|
||||
expired: false,
|
||||
expires_at: '',
|
||||
node_id: '',
|
||||
run_url: '',
|
||||
type: '',
|
||||
updated_at: ''
|
||||
}))
|
||||
}
|
||||
}
|
||||
|
||||
const fixtures = {
|
||||
repo: 'toolkit',
|
||||
owner: 'actions',
|
||||
token: 'ghp_1234567890',
|
||||
runId: 123,
|
||||
backendIds: {
|
||||
workflowRunBackendId: 'c4d7c21f-ba3f-4ddc-a8c8-6f2f626f8422',
|
||||
workflowJobRunBackendId: '760803a1-f890-4d25-9a6e-a3fc01a0c7cf'
|
||||
},
|
||||
artifacts: [
|
||||
{
|
||||
id: 1,
|
||||
name: 'my-artifact',
|
||||
size: 456,
|
||||
createdAt: new Date('2023-12-01')
|
||||
},
|
||||
{
|
||||
id: 2,
|
||||
name: 'my-artifact',
|
||||
size: 456,
|
||||
createdAt: new Date('2023-12-02')
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
describe('list-artifact', () => {
|
||||
beforeAll(() => {
|
||||
noopLogs()
|
||||
})
|
||||
|
||||
describe('public', () => {
|
||||
it('should return a list of artifacts', async () => {
|
||||
const mockListArtifacts = github.getOctokit(fixtures.token).rest.actions
|
||||
.listWorkflowRunArtifacts as MockedListWorkflowRunArtifacts
|
||||
|
||||
mockListArtifacts.mockResolvedValueOnce({
|
||||
status: 200,
|
||||
headers: {},
|
||||
url: '',
|
||||
data: artifactsToListResponse(fixtures.artifacts)
|
||||
})
|
||||
|
||||
const response = await listArtifactsPublic(
|
||||
fixtures.runId,
|
||||
fixtures.owner,
|
||||
fixtures.repo,
|
||||
fixtures.token,
|
||||
false
|
||||
)
|
||||
|
||||
expect(response).toEqual({
|
||||
artifacts: fixtures.artifacts
|
||||
})
|
||||
})
|
||||
|
||||
it('should return the latest artifact when latest is specified', async () => {
|
||||
const mockListArtifacts = github.getOctokit(fixtures.token).rest.actions
|
||||
.listWorkflowRunArtifacts as MockedListWorkflowRunArtifacts
|
||||
|
||||
mockListArtifacts.mockResolvedValueOnce({
|
||||
status: 200,
|
||||
headers: {},
|
||||
url: '',
|
||||
data: artifactsToListResponse(fixtures.artifacts)
|
||||
})
|
||||
|
||||
const response = await listArtifactsPublic(
|
||||
fixtures.runId,
|
||||
fixtures.owner,
|
||||
fixtures.repo,
|
||||
fixtures.token,
|
||||
true
|
||||
)
|
||||
|
||||
expect(response).toEqual({
|
||||
artifacts: [fixtures.artifacts[1]]
|
||||
})
|
||||
})
|
||||
|
||||
it('can return empty artifacts', async () => {
|
||||
const mockListArtifacts = github.getOctokit(fixtures.token).rest.actions
|
||||
.listWorkflowRunArtifacts as MockedListWorkflowRunArtifacts
|
||||
|
||||
mockListArtifacts.mockResolvedValueOnce({
|
||||
status: 200,
|
||||
headers: {},
|
||||
url: '',
|
||||
data: {
|
||||
total_count: 0,
|
||||
artifacts: []
|
||||
}
|
||||
})
|
||||
|
||||
const response = await listArtifactsPublic(
|
||||
fixtures.runId,
|
||||
fixtures.owner,
|
||||
fixtures.repo,
|
||||
fixtures.token,
|
||||
true
|
||||
)
|
||||
|
||||
expect(response).toEqual({
|
||||
artifacts: []
|
||||
})
|
||||
})
|
||||
|
||||
it('should fail if non-200 response', async () => {
|
||||
const mockListArtifacts = github.getOctokit(fixtures.token).rest.actions
|
||||
.listWorkflowRunArtifacts as MockedListWorkflowRunArtifacts
|
||||
|
||||
mockListArtifacts.mockRejectedValue(new Error('boom'))
|
||||
|
||||
await expect(
|
||||
listArtifactsPublic(
|
||||
fixtures.runId,
|
||||
fixtures.owner,
|
||||
fixtures.repo,
|
||||
fixtures.token,
|
||||
false
|
||||
)
|
||||
).rejects.toThrow('boom')
|
||||
})
|
||||
})
|
||||
|
||||
describe('internal', () => {
|
||||
beforeEach(() => {
|
||||
jest.spyOn(config, 'getRuntimeToken').mockReturnValue('test-token')
|
||||
jest
|
||||
.spyOn(util, 'getBackendIdsFromToken')
|
||||
.mockReturnValue(fixtures.backendIds)
|
||||
jest
|
||||
.spyOn(config, 'getResultsServiceUrl')
|
||||
.mockReturnValue('https://results.local')
|
||||
})
|
||||
|
||||
it('should return a list of artifacts', async () => {
|
||||
jest
|
||||
.spyOn(ArtifactServiceClientJSON.prototype, 'ListArtifacts')
|
||||
.mockResolvedValue({
|
||||
artifacts: fixtures.artifacts.map(artifact => ({
|
||||
...fixtures.backendIds,
|
||||
databaseId: artifact.id.toString(),
|
||||
name: artifact.name,
|
||||
size: artifact.size.toString(),
|
||||
createdAt: Timestamp.fromDate(artifact.createdAt)
|
||||
}))
|
||||
})
|
||||
const response = await listArtifactsInternal(false)
|
||||
expect(response).toEqual({
|
||||
artifacts: fixtures.artifacts
|
||||
})
|
||||
})
|
||||
|
||||
it('should return the latest artifact when latest is specified', async () => {
|
||||
jest
|
||||
.spyOn(ArtifactServiceClientJSON.prototype, 'ListArtifacts')
|
||||
.mockResolvedValue({
|
||||
artifacts: fixtures.artifacts.map(artifact => ({
|
||||
...fixtures.backendIds,
|
||||
databaseId: artifact.id.toString(),
|
||||
name: artifact.name,
|
||||
size: artifact.size.toString(),
|
||||
createdAt: Timestamp.fromDate(artifact.createdAt)
|
||||
}))
|
||||
})
|
||||
const response = await listArtifactsInternal(true)
|
||||
expect(response).toEqual({
|
||||
artifacts: [fixtures.artifacts[1]]
|
||||
})
|
||||
})
|
||||
|
||||
it('can return empty artifacts', async () => {
|
||||
jest
|
||||
.spyOn(ArtifactServiceClientJSON.prototype, 'ListArtifacts')
|
||||
.mockResolvedValue({
|
||||
artifacts: []
|
||||
})
|
||||
const response = await listArtifactsInternal(false)
|
||||
expect(response).toEqual({
|
||||
artifacts: []
|
||||
})
|
||||
})
|
||||
|
||||
it('should fail if non-200 response', async () => {
|
||||
jest
|
||||
.spyOn(ArtifactServiceClientJSON.prototype, 'ListArtifacts')
|
||||
.mockRejectedValue(new Error('boom'))
|
||||
await expect(listArtifactsInternal(false)).rejects.toThrow('boom')
|
||||
})
|
||||
})
|
||||
})
|
|
@ -3,15 +3,11 @@ import {
|
|||
validateFilePath
|
||||
} from '../src/internal/upload/path-and-artifact-name-validation'
|
||||
|
||||
import * as core from '@actions/core'
|
||||
import {noopLogs} from './common'
|
||||
|
||||
describe('Path and artifact name validation', () => {
|
||||
beforeAll(() => {
|
||||
// mock all output so that there is less noise when running tests
|
||||
jest.spyOn(console, 'log').mockImplementation(() => {})
|
||||
jest.spyOn(core, 'debug').mockImplementation(() => {})
|
||||
jest.spyOn(core, 'info').mockImplementation(() => {})
|
||||
jest.spyOn(core, 'warning').mockImplementation(() => {})
|
||||
noopLogs()
|
||||
})
|
||||
|
||||
it('Check Artifact Name for any invalid characters', () => {
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
import * as core from '@actions/core'
|
||||
import * as uploadZipSpecification from '../src/internal/upload/upload-zip-specification'
|
||||
import * as zip from '../src/internal/upload/zip'
|
||||
import * as util from '../src/internal/shared/util'
|
||||
|
@ -7,14 +6,15 @@ import * as config from '../src/internal/shared/config'
|
|||
import {Timestamp, ArtifactServiceClientJSON} from '../src/generated'
|
||||
import * as blobUpload from '../src/internal/upload/blob-upload'
|
||||
import {uploadArtifact} from '../src/internal/upload/upload-artifact'
|
||||
import {noopLogs} from './common'
|
||||
import {FilesNotFoundError} from '../src/internal/shared/errors'
|
||||
import {BlockBlobClient} from '@azure/storage-blob'
|
||||
import * as fs from 'fs'
|
||||
import * as path from 'path'
|
||||
|
||||
describe('upload-artifact', () => {
|
||||
beforeEach(() => {
|
||||
// mock all output so that there is less noise when running tests
|
||||
jest.spyOn(console, 'log').mockImplementation(() => {})
|
||||
jest.spyOn(core, 'debug').mockImplementation(() => {})
|
||||
jest.spyOn(core, 'info').mockImplementation(() => {})
|
||||
jest.spyOn(core, 'warning').mockImplementation(() => {})
|
||||
noopLogs()
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
|
@ -63,7 +63,6 @@ describe('upload-artifact', () => {
|
|||
)
|
||||
jest.spyOn(blobUpload, 'uploadZipToBlobStorage').mockReturnValue(
|
||||
Promise.resolve({
|
||||
isSuccess: true,
|
||||
uploadSize: 1234,
|
||||
sha256Hash: 'test-sha256-hash'
|
||||
})
|
||||
|
@ -88,7 +87,7 @@ describe('upload-artifact', () => {
|
|||
'/home/user/files/plz-upload'
|
||||
)
|
||||
|
||||
expect(uploadResp).resolves.toEqual({success: true, size: 1234, id: 1})
|
||||
expect(uploadResp).resolves.toEqual({size: 1234, id: 1})
|
||||
})
|
||||
|
||||
it('should throw an error if the root directory is invalid', () => {
|
||||
|
@ -111,7 +110,7 @@ describe('upload-artifact', () => {
|
|||
expect(uploadResp).rejects.toThrow('Invalid root directory')
|
||||
})
|
||||
|
||||
it('should return false if there are no files to upload', () => {
|
||||
it('should reject if there are no files to upload', () => {
|
||||
jest
|
||||
.spyOn(uploadZipSpecification, 'validateRootDirectory')
|
||||
.mockReturnValue()
|
||||
|
@ -128,10 +127,10 @@ describe('upload-artifact', () => {
|
|||
],
|
||||
'/home/user/files/plz-upload'
|
||||
)
|
||||
expect(uploadResp).resolves.toEqual({success: false})
|
||||
expect(uploadResp).rejects.toThrowError(FilesNotFoundError)
|
||||
})
|
||||
|
||||
it('should return false if no backend IDs are found', () => {
|
||||
it('should reject if no backend IDs are found', () => {
|
||||
jest
|
||||
.spyOn(uploadZipSpecification, 'validateRootDirectory')
|
||||
.mockReturnValue()
|
||||
|
@ -155,9 +154,6 @@ describe('upload-artifact', () => {
|
|||
jest
|
||||
.spyOn(zip, 'createZipUploadStream')
|
||||
.mockReturnValue(Promise.resolve(new zip.ZipUploadStream(1)))
|
||||
jest
|
||||
.spyOn(util, 'getBackendIdsFromToken')
|
||||
.mockReturnValue({workflowRunBackendId: '', workflowJobRunBackendId: ''})
|
||||
|
||||
const uploadResp = uploadArtifact(
|
||||
'test-artifact',
|
||||
|
@ -169,7 +165,7 @@ describe('upload-artifact', () => {
|
|||
'/home/user/files/plz-upload'
|
||||
)
|
||||
|
||||
expect(uploadResp).resolves.toEqual({success: false})
|
||||
expect(uploadResp).rejects.toThrow()
|
||||
})
|
||||
|
||||
it('should return false if the creation request fails', () => {
|
||||
|
@ -224,7 +220,7 @@ describe('upload-artifact', () => {
|
|||
'/home/user/files/plz-upload'
|
||||
)
|
||||
|
||||
expect(uploadResp).resolves.toEqual({success: false})
|
||||
expect(uploadResp).rejects.toThrow()
|
||||
})
|
||||
|
||||
it('should return false if blob storage upload is unsuccessful', () => {
|
||||
|
@ -269,7 +265,7 @@ describe('upload-artifact', () => {
|
|||
)
|
||||
jest
|
||||
.spyOn(blobUpload, 'uploadZipToBlobStorage')
|
||||
.mockReturnValue(Promise.resolve({isSuccess: false}))
|
||||
.mockReturnValue(Promise.reject(new Error('boom')))
|
||||
|
||||
// ArtifactHttpClient mocks
|
||||
jest.spyOn(config, 'getRuntimeToken').mockReturnValue('test-token')
|
||||
|
@ -287,10 +283,10 @@ describe('upload-artifact', () => {
|
|||
'/home/user/files/plz-upload'
|
||||
)
|
||||
|
||||
expect(uploadResp).resolves.toEqual({success: false})
|
||||
expect(uploadResp).rejects.toThrow()
|
||||
})
|
||||
|
||||
it('should return false if finalize artifact fails', () => {
|
||||
it('should reject if finalize artifact fails', () => {
|
||||
const mockDate = new Date('2020-01-01')
|
||||
jest
|
||||
.spyOn(uploadZipSpecification, 'validateRootDirectory')
|
||||
|
@ -332,7 +328,6 @@ describe('upload-artifact', () => {
|
|||
)
|
||||
jest.spyOn(blobUpload, 'uploadZipToBlobStorage').mockReturnValue(
|
||||
Promise.resolve({
|
||||
isSuccess: true,
|
||||
uploadSize: 1234,
|
||||
sha256Hash: 'test-sha256-hash'
|
||||
})
|
||||
|
@ -357,6 +352,96 @@ describe('upload-artifact', () => {
|
|||
'/home/user/files/plz-upload'
|
||||
)
|
||||
|
||||
expect(uploadResp).resolves.toEqual({success: false})
|
||||
expect(uploadResp).rejects.toThrow()
|
||||
})
|
||||
|
||||
it('should throw an error uploading blob chunks get delayed', async () => {
|
||||
const mockDate = new Date('2020-01-01')
|
||||
const dirPath = path.join(__dirname, `plz-upload`)
|
||||
if (!fs.existsSync(dirPath)) {
|
||||
fs.mkdirSync(dirPath, {recursive: true})
|
||||
}
|
||||
|
||||
fs.writeFileSync(path.join(dirPath, 'file1.txt'), 'test file content')
|
||||
fs.writeFileSync(path.join(dirPath, 'file2.txt'), 'test file content')
|
||||
|
||||
fs.writeFileSync(path.join(dirPath, 'file3.txt'), 'test file content')
|
||||
|
||||
jest
|
||||
.spyOn(uploadZipSpecification, 'validateRootDirectory')
|
||||
.mockReturnValue()
|
||||
jest
|
||||
.spyOn(uploadZipSpecification, 'getUploadZipSpecification')
|
||||
.mockReturnValue([
|
||||
{
|
||||
sourcePath: path.join(dirPath, 'file1.txt'),
|
||||
destinationPath: 'file1.txt'
|
||||
},
|
||||
{
|
||||
sourcePath: path.join(dirPath, 'file2.txt'),
|
||||
destinationPath: 'file2.txt'
|
||||
},
|
||||
{
|
||||
sourcePath: path.join(dirPath, 'file3.txt'),
|
||||
destinationPath: 'dir/file3.txt'
|
||||
}
|
||||
])
|
||||
|
||||
jest.spyOn(util, 'getBackendIdsFromToken').mockReturnValue({
|
||||
workflowRunBackendId: '1234',
|
||||
workflowJobRunBackendId: '5678'
|
||||
})
|
||||
jest
|
||||
.spyOn(retention, 'getExpiration')
|
||||
.mockReturnValue(Timestamp.fromDate(mockDate))
|
||||
jest
|
||||
.spyOn(ArtifactServiceClientJSON.prototype, 'CreateArtifact')
|
||||
.mockReturnValue(
|
||||
Promise.resolve({
|
||||
ok: true,
|
||||
signedUploadUrl: 'https://signed-upload-url.com'
|
||||
})
|
||||
)
|
||||
jest
|
||||
.spyOn(blobUpload, 'uploadZipToBlobStorage')
|
||||
.mockReturnValue(Promise.reject(new Error('Upload progress stalled.')))
|
||||
|
||||
// ArtifactHttpClient mocks
|
||||
jest.spyOn(config, 'getRuntimeToken').mockReturnValue('test-token')
|
||||
jest
|
||||
.spyOn(config, 'getResultsServiceUrl')
|
||||
.mockReturnValue('https://test-url.com')
|
||||
|
||||
BlockBlobClient.prototype.uploadStream = jest
|
||||
.fn()
|
||||
.mockImplementation(
|
||||
async (stream, bufferSize, maxConcurrency, options) => {
|
||||
return new Promise<void>(resolve => {
|
||||
// Call the onProgress callback with a progress event
|
||||
options.onProgress({loadedBytes: 0})
|
||||
|
||||
// Wait for 31 seconds before resolving the promise
|
||||
setTimeout(() => {
|
||||
// Call the onProgress callback again to simulate progress
|
||||
options.onProgress({loadedBytes: 100})
|
||||
|
||||
resolve()
|
||||
}, 31000) // Delay longer than your timeout
|
||||
})
|
||||
}
|
||||
)
|
||||
|
||||
jest.mock('fs')
|
||||
const uploadResp = uploadArtifact(
|
||||
'test-artifact',
|
||||
[
|
||||
'/home/user/files/plz-upload/file1.txt',
|
||||
'/home/user/files/plz-upload/file2.txt',
|
||||
'/home/user/files/plz-upload/dir/file3.txt'
|
||||
],
|
||||
'/home/user/files/plz-upload'
|
||||
)
|
||||
|
||||
expect(uploadResp).rejects.toThrow('Upload progress stalled.')
|
||||
})
|
||||
})
|
||||
|
|
|
@ -1,11 +1,11 @@
|
|||
import * as io from '../../io/src/io'
|
||||
import * as path from 'path'
|
||||
import {promises as fs} from 'fs'
|
||||
import * as core from '@actions/core'
|
||||
import {
|
||||
getUploadZipSpecification,
|
||||
validateRootDirectory
|
||||
} from '../src/internal/upload/upload-zip-specification'
|
||||
import {noopLogs} from './common'
|
||||
|
||||
const root = path.join(__dirname, '_temp', 'upload-specification')
|
||||
const goodItem1Path = path.join(
|
||||
|
@ -51,11 +51,7 @@ const artifactFilesToUpload = [
|
|||
|
||||
describe('Search', () => {
|
||||
beforeAll(async () => {
|
||||
// mock all output so that there is less noise when running tests
|
||||
jest.spyOn(console, 'log').mockImplementation(() => {})
|
||||
jest.spyOn(core, 'debug').mockImplementation(() => {})
|
||||
jest.spyOn(core, 'info').mockImplementation(() => {})
|
||||
jest.spyOn(core, 'warning').mockImplementation(() => {})
|
||||
noopLogs()
|
||||
|
||||
// clear temp directory
|
||||
await io.rmRF(root)
|
||||
|
|
|
@ -1,13 +1,12 @@
|
|||
import * as config from '../src/internal/shared/config'
|
||||
import * as util from '../src/internal/shared/util'
|
||||
|
||||
export const testRuntimeToken =
|
||||
'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwic2NwIjoiQWN0aW9ucy5FeGFtcGxlIEFjdGlvbnMuQW5vdGhlckV4YW1wbGU6dGVzdCBBY3Rpb25zLlJlc3VsdHM6Y2U3ZjU0YzctNjFjNy00YWFlLTg4N2YtMzBkYTQ3NWY1ZjFhOmNhMzk1MDg1LTA0MGEtNTI2Yi0yY2U4LWJkYzg1ZjY5Mjc3NCIsImlhdCI6MTUxNjIzOTAyMn0.XYnI_wHPBlUi1mqYveJnnkJhp4dlFjqxzRmISPsqfw8'
|
||||
|
||||
describe('get-backend-ids-from-token', () => {
|
||||
it('should return backend ids when the token is valid', () => {
|
||||
jest
|
||||
.spyOn(config, 'getRuntimeToken')
|
||||
.mockReturnValue(
|
||||
'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwic2NwIjoiQWN0aW9ucy5FeGFtcGxlIEFjdGlvbnMuQW5vdGhlckV4YW1wbGU6dGVzdCBBY3Rpb25zLlJlc3VsdHM6Y2U3ZjU0YzctNjFjNy00YWFlLTg4N2YtMzBkYTQ3NWY1ZjFhOmNhMzk1MDg1LTA0MGEtNTI2Yi0yY2U4LWJkYzg1ZjY5Mjc3NCIsImlhdCI6MTUxNjIzOTAyMn0.XYnI_wHPBlUi1mqYveJnnkJhp4dlFjqxzRmISPsqfw8'
|
||||
)
|
||||
jest.spyOn(config, 'getRuntimeToken').mockReturnValue(testRuntimeToken)
|
||||
|
||||
const backendIds = util.getBackendIdsFromToken()
|
||||
expect(backendIds.workflowRunBackendId).toBe(
|
||||
|
|
|
@ -1 +0,0 @@
|
|||
Docs will be added here once development of version `2.0.0` has finished
|
|
@ -0,0 +1,62 @@
|
|||
# Frequently Asked Questions
|
||||
|
||||
- [Frequently Asked Questions](#frequently-asked-questions)
|
||||
- [Supported Characters](#supported-characters)
|
||||
- [Compression? ZIP? How is my artifact stored?](#compression-zip-how-is-my-artifact-stored)
|
||||
- [Which versions of the artifacts packages are compatible?](#which-versions-of-the-artifacts-packages-are-compatible)
|
||||
- [How long will my artifact be available?](#how-long-will-my-artifact-be-available)
|
||||
|
||||
## Supported Characters
|
||||
|
||||
When uploading an artifact, the inputted `name` parameter along with the files specified in `files` cannot contain any of the following characters. If they are present in `name` or `files`, the Artifact will be rejected by the server and the upload will fail. These characters are not allowed due to limitations and restrictions with certain file systems such as NTFS. To maintain platform-agnostic behavior, characters that are not supported by an individual filesystem/platform will not be supported on all filesystems/platforms.
|
||||
|
||||
- "
|
||||
- :
|
||||
- <
|
||||
- \>
|
||||
- |
|
||||
- \*
|
||||
- ?
|
||||
|
||||
In addition to the aforementioned characters, the inputted `name` also cannot include the following
|
||||
- \
|
||||
- /
|
||||
|
||||
## Compression? ZIP? How is my artifact stored?
|
||||
|
||||
When creating an Artifact, the files are dynamically compressed and streamed into a ZIP archive. Since they are stored in a ZIP, they can be compressed by Zlib in varying levels.
|
||||
|
||||
The value can range from 0 to 9:
|
||||
|
||||
- 0: No compression
|
||||
- 1: Best speed
|
||||
- 6: Default compression (same as GNU Gzip)
|
||||
- 9: Best compression
|
||||
|
||||
Higher levels will result in better compression, but will take longer to complete.
|
||||
For large files that are not easily compressed, a value of 0 is recommended for significantly faster uploads.
|
||||
|
||||
## Which versions of the artifacts packages are compatible?
|
||||
[actions/upload-artifact](https://github.com/actions/upload-artifact) and [actions/download-artifact](https://github.com/actions/download-artifact), leverage [GitHub Actions toolkit](https://github.com/actions/toolkit) and are typically used together to upload and download artifacts in your workflows.
|
||||
|
||||
| upload-artifact | download-artifact | toolkit |
|
||||
|---|---|---|
|
||||
| v4 | v4 | v2 |
|
||||
| < v3 | < v3 | < v1 |
|
||||
|
||||
Use matching versions of `actions/upload-artifact` and `actions/download-artifact` to ensure compatibility.
|
||||
|
||||
In your GitHub Actions workflow YAML file, you specify the version of the actions you want to use. For example:
|
||||
|
||||
```yaml
|
||||
uses: actions/upload-artifact@v4
|
||||
# ...
|
||||
uses: actions/download-artifact@v4
|
||||
# ...
|
||||
```
|
||||
|
||||
**Release Notes:**
|
||||
Check the release notes for each repository to see if there are any specific notes about compatibility or changes in behavior.
|
||||
|
||||
## How long will my artifact be available?
|
||||
The default retention period is **90 days**. For more information, visit: https://github.com/actions/upload-artifact?tab=readme-ov-file#retention-period
|
|
@ -0,0 +1,43 @@
|
|||
@actions/artifact
|
||||
|
||||
# @actions/artifact
|
||||
|
||||
## Table of contents
|
||||
|
||||
### Classes
|
||||
|
||||
- [ArtifactNotFoundError](classes/ArtifactNotFoundError.md)
|
||||
- [DefaultArtifactClient](classes/DefaultArtifactClient.md)
|
||||
- [FilesNotFoundError](classes/FilesNotFoundError.md)
|
||||
- [GHESNotSupportedError](classes/GHESNotSupportedError.md)
|
||||
- [InvalidResponseError](classes/InvalidResponseError.md)
|
||||
- [NetworkError](classes/NetworkError.md)
|
||||
- [UsageError](classes/UsageError.md)
|
||||
|
||||
### Interfaces
|
||||
|
||||
- [Artifact](interfaces/Artifact.md)
|
||||
- [ArtifactClient](interfaces/ArtifactClient.md)
|
||||
- [DeleteArtifactResponse](interfaces/DeleteArtifactResponse.md)
|
||||
- [DownloadArtifactOptions](interfaces/DownloadArtifactOptions.md)
|
||||
- [DownloadArtifactResponse](interfaces/DownloadArtifactResponse.md)
|
||||
- [FindOptions](interfaces/FindOptions.md)
|
||||
- [GetArtifactResponse](interfaces/GetArtifactResponse.md)
|
||||
- [ListArtifactsOptions](interfaces/ListArtifactsOptions.md)
|
||||
- [ListArtifactsResponse](interfaces/ListArtifactsResponse.md)
|
||||
- [UploadArtifactOptions](interfaces/UploadArtifactOptions.md)
|
||||
- [UploadArtifactResponse](interfaces/UploadArtifactResponse.md)
|
||||
|
||||
### Variables
|
||||
|
||||
- [default](README.md#default)
|
||||
|
||||
## Variables
|
||||
|
||||
### default
|
||||
|
||||
• `Const` **default**: [`ArtifactClient`](interfaces/ArtifactClient.md)
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/artifact.ts:7](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/artifact.ts#L7)
|
|
@ -0,0 +1,169 @@
|
|||
[@actions/artifact](../README.md) / ArtifactNotFoundError
|
||||
|
||||
# Class: ArtifactNotFoundError
|
||||
|
||||
## Hierarchy
|
||||
|
||||
- `Error`
|
||||
|
||||
↳ **`ArtifactNotFoundError`**
|
||||
|
||||
## Table of contents
|
||||
|
||||
### Constructors
|
||||
|
||||
- [constructor](ArtifactNotFoundError.md#constructor)
|
||||
|
||||
### Properties
|
||||
|
||||
- [message](ArtifactNotFoundError.md#message)
|
||||
- [name](ArtifactNotFoundError.md#name)
|
||||
- [stack](ArtifactNotFoundError.md#stack)
|
||||
- [prepareStackTrace](ArtifactNotFoundError.md#preparestacktrace)
|
||||
- [stackTraceLimit](ArtifactNotFoundError.md#stacktracelimit)
|
||||
|
||||
### Methods
|
||||
|
||||
- [captureStackTrace](ArtifactNotFoundError.md#capturestacktrace)
|
||||
|
||||
## Constructors
|
||||
|
||||
### constructor
|
||||
|
||||
• **new ArtifactNotFoundError**(`message?`): [`ArtifactNotFoundError`](ArtifactNotFoundError.md)
|
||||
|
||||
#### Parameters
|
||||
|
||||
| Name | Type | Default value |
|
||||
| :------ | :------ | :------ |
|
||||
| `message` | `string` | `'Artifact not found'` |
|
||||
|
||||
#### Returns
|
||||
|
||||
[`ArtifactNotFoundError`](ArtifactNotFoundError.md)
|
||||
|
||||
#### Overrides
|
||||
|
||||
Error.constructor
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/errors.ts:24](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/errors.ts#L24)
|
||||
|
||||
## Properties
|
||||
|
||||
### message
|
||||
|
||||
• **message**: `string`
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.message
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/typescript/lib/lib.es5.d.ts:1068
|
||||
|
||||
___
|
||||
|
||||
### name
|
||||
|
||||
• **name**: `string`
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.name
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/typescript/lib/lib.es5.d.ts:1067
|
||||
|
||||
___
|
||||
|
||||
### stack
|
||||
|
||||
• `Optional` **stack**: `string`
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.stack
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/typescript/lib/lib.es5.d.ts:1069
|
||||
|
||||
___
|
||||
|
||||
### prepareStackTrace
|
||||
|
||||
▪ `Static` `Optional` **prepareStackTrace**: (`err`: `Error`, `stackTraces`: `CallSite`[]) => `any`
|
||||
|
||||
#### Type declaration
|
||||
|
||||
▸ (`err`, `stackTraces`): `any`
|
||||
|
||||
Optional override for formatting stack traces
|
||||
|
||||
##### Parameters
|
||||
|
||||
| Name | Type |
|
||||
| :------ | :------ |
|
||||
| `err` | `Error` |
|
||||
| `stackTraces` | `CallSite`[] |
|
||||
|
||||
##### Returns
|
||||
|
||||
`any`
|
||||
|
||||
**`See`**
|
||||
|
||||
https://v8.dev/docs/stack-trace-api#customizing-stack-traces
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.prepareStackTrace
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/@types/node/globals.d.ts:11
|
||||
|
||||
___
|
||||
|
||||
### stackTraceLimit
|
||||
|
||||
▪ `Static` **stackTraceLimit**: `number`
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.stackTraceLimit
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/@types/node/globals.d.ts:13
|
||||
|
||||
## Methods
|
||||
|
||||
### captureStackTrace
|
||||
|
||||
▸ **captureStackTrace**(`targetObject`, `constructorOpt?`): `void`
|
||||
|
||||
Create .stack property on a target object
|
||||
|
||||
#### Parameters
|
||||
|
||||
| Name | Type |
|
||||
| :------ | :------ |
|
||||
| `targetObject` | `object` |
|
||||
| `constructorOpt?` | `Function` |
|
||||
|
||||
#### Returns
|
||||
|
||||
`void`
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.captureStackTrace
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/@types/node/globals.d.ts:4
|
|
@ -0,0 +1,193 @@
|
|||
[@actions/artifact](../README.md) / DefaultArtifactClient
|
||||
|
||||
# Class: DefaultArtifactClient
|
||||
|
||||
The default artifact client that is used by the artifact action(s).
|
||||
|
||||
## Implements
|
||||
|
||||
- [`ArtifactClient`](../interfaces/ArtifactClient.md)
|
||||
|
||||
## Table of contents
|
||||
|
||||
### Constructors
|
||||
|
||||
- [constructor](DefaultArtifactClient.md#constructor)
|
||||
|
||||
### Methods
|
||||
|
||||
- [deleteArtifact](DefaultArtifactClient.md#deleteartifact)
|
||||
- [downloadArtifact](DefaultArtifactClient.md#downloadartifact)
|
||||
- [getArtifact](DefaultArtifactClient.md#getartifact)
|
||||
- [listArtifacts](DefaultArtifactClient.md#listartifacts)
|
||||
- [uploadArtifact](DefaultArtifactClient.md#uploadartifact)
|
||||
|
||||
## Constructors
|
||||
|
||||
### constructor
|
||||
|
||||
• **new DefaultArtifactClient**(): [`DefaultArtifactClient`](DefaultArtifactClient.md)
|
||||
|
||||
#### Returns
|
||||
|
||||
[`DefaultArtifactClient`](DefaultArtifactClient.md)
|
||||
|
||||
## Methods
|
||||
|
||||
### deleteArtifact
|
||||
|
||||
▸ **deleteArtifact**(`artifactName`, `options?`): `Promise`\<[`DeleteArtifactResponse`](../interfaces/DeleteArtifactResponse.md)\>
|
||||
|
||||
Delete an Artifact
|
||||
|
||||
If `options.findBy` is specified, this will use the public Delete Artifact API https://docs.github.com/en/rest/actions/artifacts?apiVersion=2022-11-28#delete-an-artifact
|
||||
|
||||
#### Parameters
|
||||
|
||||
| Name | Type | Description |
|
||||
| :------ | :------ | :------ |
|
||||
| `artifactName` | `string` | The name of the artifact to delete |
|
||||
| `options?` | [`FindOptions`](../interfaces/FindOptions.md) | Extra options that allow for the customization of the delete behavior |
|
||||
|
||||
#### Returns
|
||||
|
||||
`Promise`\<[`DeleteArtifactResponse`](../interfaces/DeleteArtifactResponse.md)\>
|
||||
|
||||
single DeleteArtifactResponse object
|
||||
|
||||
#### Implementation of
|
||||
|
||||
[ArtifactClient](../interfaces/ArtifactClient.md).[deleteArtifact](../interfaces/ArtifactClient.md#deleteartifact)
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/client.ts:248](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/client.ts#L248)
|
||||
|
||||
___
|
||||
|
||||
### downloadArtifact
|
||||
|
||||
▸ **downloadArtifact**(`artifactId`, `options?`): `Promise`\<[`DownloadArtifactResponse`](../interfaces/DownloadArtifactResponse.md)\>
|
||||
|
||||
Downloads an artifact and unzips the content.
|
||||
|
||||
If `options.findBy` is specified, this will use the public Download Artifact API https://docs.github.com/en/rest/actions/artifacts?apiVersion=2022-11-28#download-an-artifact
|
||||
|
||||
#### Parameters
|
||||
|
||||
| Name | Type | Description |
|
||||
| :------ | :------ | :------ |
|
||||
| `artifactId` | `number` | The id of the artifact to download |
|
||||
| `options?` | [`DownloadArtifactOptions`](../interfaces/DownloadArtifactOptions.md) & [`FindOptions`](../interfaces/FindOptions.md) | Extra options that allow for the customization of the download behavior |
|
||||
|
||||
#### Returns
|
||||
|
||||
`Promise`\<[`DownloadArtifactResponse`](../interfaces/DownloadArtifactResponse.md)\>
|
||||
|
||||
single DownloadArtifactResponse object
|
||||
|
||||
#### Implementation of
|
||||
|
||||
[ArtifactClient](../interfaces/ArtifactClient.md).[downloadArtifact](../interfaces/ArtifactClient.md#downloadartifact)
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/client.ts:138](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/client.ts#L138)
|
||||
|
||||
___
|
||||
|
||||
### getArtifact
|
||||
|
||||
▸ **getArtifact**(`artifactName`, `options?`): `Promise`\<[`GetArtifactResponse`](../interfaces/GetArtifactResponse.md)\>
|
||||
|
||||
Finds an artifact by name.
|
||||
If there are multiple artifacts with the same name in the same workflow run, this will return the latest.
|
||||
If the artifact is not found, it will throw.
|
||||
|
||||
If `options.findBy` is specified, this will use the public List Artifacts API with a name filter which can get artifacts from other runs.
|
||||
https://docs.github.com/en/rest/actions/artifacts?apiVersion=2022-11-28#list-workflow-run-artifacts
|
||||
`@actions/artifact` v2+ does not allow for creating multiple artifacts with the same name in the same workflow run.
|
||||
It is possible to have multiple artifacts with the same name in the same workflow run by using old versions of upload-artifact (v1,v2 and v3), @actions/artifact < v2 or it is a rerun.
|
||||
If there are multiple artifacts with the same name in the same workflow run this function will return the first artifact that matches the name.
|
||||
|
||||
#### Parameters
|
||||
|
||||
| Name | Type | Description |
|
||||
| :------ | :------ | :------ |
|
||||
| `artifactName` | `string` | The name of the artifact to find |
|
||||
| `options?` | [`FindOptions`](../interfaces/FindOptions.md) | Extra options that allow for the customization of the get behavior |
|
||||
|
||||
#### Returns
|
||||
|
||||
`Promise`\<[`GetArtifactResponse`](../interfaces/GetArtifactResponse.md)\>
|
||||
|
||||
#### Implementation of
|
||||
|
||||
[ArtifactClient](../interfaces/ArtifactClient.md).[getArtifact](../interfaces/ArtifactClient.md#getartifact)
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/client.ts:212](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/client.ts#L212)
|
||||
|
||||
___
|
||||
|
||||
### listArtifacts
|
||||
|
||||
▸ **listArtifacts**(`options?`): `Promise`\<[`ListArtifactsResponse`](../interfaces/ListArtifactsResponse.md)\>
|
||||
|
||||
Lists all artifacts that are part of the current workflow run.
|
||||
This function will return at most 1000 artifacts per workflow run.
|
||||
|
||||
If `options.findBy` is specified, this will call the public List-Artifacts API which can list from other runs.
|
||||
https://docs.github.com/en/rest/actions/artifacts?apiVersion=2022-11-28#list-workflow-run-artifacts
|
||||
|
||||
#### Parameters
|
||||
|
||||
| Name | Type | Description |
|
||||
| :------ | :------ | :------ |
|
||||
| `options?` | [`ListArtifactsOptions`](../interfaces/ListArtifactsOptions.md) & [`FindOptions`](../interfaces/FindOptions.md) | Extra options that allow for the customization of the list behavior |
|
||||
|
||||
#### Returns
|
||||
|
||||
`Promise`\<[`ListArtifactsResponse`](../interfaces/ListArtifactsResponse.md)\>
|
||||
|
||||
ListArtifactResponse object
|
||||
|
||||
#### Implementation of
|
||||
|
||||
[ArtifactClient](../interfaces/ArtifactClient.md).[listArtifacts](../interfaces/ArtifactClient.md#listartifacts)
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/client.ts:176](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/client.ts#L176)
|
||||
|
||||
___
|
||||
|
||||
### uploadArtifact
|
||||
|
||||
▸ **uploadArtifact**(`name`, `files`, `rootDirectory`, `options?`): `Promise`\<[`UploadArtifactResponse`](../interfaces/UploadArtifactResponse.md)\>
|
||||
|
||||
Uploads an artifact.
|
||||
|
||||
#### Parameters
|
||||
|
||||
| Name | Type | Description |
|
||||
| :------ | :------ | :------ |
|
||||
| `name` | `string` | The name of the artifact, required |
|
||||
| `files` | `string`[] | A list of absolute or relative paths that denote what files should be uploaded |
|
||||
| `rootDirectory` | `string` | An absolute or relative file path that denotes the root parent directory of the files being uploaded |
|
||||
| `options?` | [`UploadArtifactOptions`](../interfaces/UploadArtifactOptions.md) | Extra options for customizing the upload behavior |
|
||||
|
||||
#### Returns
|
||||
|
||||
`Promise`\<[`UploadArtifactResponse`](../interfaces/UploadArtifactResponse.md)\>
|
||||
|
||||
single UploadArtifactResponse object
|
||||
|
||||
#### Implementation of
|
||||
|
||||
[ArtifactClient](../interfaces/ArtifactClient.md).[uploadArtifact](../interfaces/ArtifactClient.md#uploadartifact)
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/client.ts:113](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/client.ts#L113)
|
|
@ -0,0 +1,180 @@
|
|||
[@actions/artifact](../README.md) / FilesNotFoundError
|
||||
|
||||
# Class: FilesNotFoundError
|
||||
|
||||
## Hierarchy
|
||||
|
||||
- `Error`
|
||||
|
||||
↳ **`FilesNotFoundError`**
|
||||
|
||||
## Table of contents
|
||||
|
||||
### Constructors
|
||||
|
||||
- [constructor](FilesNotFoundError.md#constructor)
|
||||
|
||||
### Properties
|
||||
|
||||
- [files](FilesNotFoundError.md#files)
|
||||
- [message](FilesNotFoundError.md#message)
|
||||
- [name](FilesNotFoundError.md#name)
|
||||
- [stack](FilesNotFoundError.md#stack)
|
||||
- [prepareStackTrace](FilesNotFoundError.md#preparestacktrace)
|
||||
- [stackTraceLimit](FilesNotFoundError.md#stacktracelimit)
|
||||
|
||||
### Methods
|
||||
|
||||
- [captureStackTrace](FilesNotFoundError.md#capturestacktrace)
|
||||
|
||||
## Constructors
|
||||
|
||||
### constructor
|
||||
|
||||
• **new FilesNotFoundError**(`files?`): [`FilesNotFoundError`](FilesNotFoundError.md)
|
||||
|
||||
#### Parameters
|
||||
|
||||
| Name | Type | Default value |
|
||||
| :------ | :------ | :------ |
|
||||
| `files` | `string`[] | `[]` |
|
||||
|
||||
#### Returns
|
||||
|
||||
[`FilesNotFoundError`](FilesNotFoundError.md)
|
||||
|
||||
#### Overrides
|
||||
|
||||
Error.constructor
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/errors.ts:4](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/errors.ts#L4)
|
||||
|
||||
## Properties
|
||||
|
||||
### files
|
||||
|
||||
• **files**: `string`[]
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/errors.ts:2](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/errors.ts#L2)
|
||||
|
||||
___
|
||||
|
||||
### message
|
||||
|
||||
• **message**: `string`
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.message
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/typescript/lib/lib.es5.d.ts:1068
|
||||
|
||||
___
|
||||
|
||||
### name
|
||||
|
||||
• **name**: `string`
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.name
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/typescript/lib/lib.es5.d.ts:1067
|
||||
|
||||
___
|
||||
|
||||
### stack
|
||||
|
||||
• `Optional` **stack**: `string`
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.stack
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/typescript/lib/lib.es5.d.ts:1069
|
||||
|
||||
___
|
||||
|
||||
### prepareStackTrace
|
||||
|
||||
▪ `Static` `Optional` **prepareStackTrace**: (`err`: `Error`, `stackTraces`: `CallSite`[]) => `any`
|
||||
|
||||
#### Type declaration
|
||||
|
||||
▸ (`err`, `stackTraces`): `any`
|
||||
|
||||
Optional override for formatting stack traces
|
||||
|
||||
##### Parameters
|
||||
|
||||
| Name | Type |
|
||||
| :------ | :------ |
|
||||
| `err` | `Error` |
|
||||
| `stackTraces` | `CallSite`[] |
|
||||
|
||||
##### Returns
|
||||
|
||||
`any`
|
||||
|
||||
**`See`**
|
||||
|
||||
https://v8.dev/docs/stack-trace-api#customizing-stack-traces
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.prepareStackTrace
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/@types/node/globals.d.ts:11
|
||||
|
||||
___
|
||||
|
||||
### stackTraceLimit
|
||||
|
||||
▪ `Static` **stackTraceLimit**: `number`
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.stackTraceLimit
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/@types/node/globals.d.ts:13
|
||||
|
||||
## Methods
|
||||
|
||||
### captureStackTrace
|
||||
|
||||
▸ **captureStackTrace**(`targetObject`, `constructorOpt?`): `void`
|
||||
|
||||
Create .stack property on a target object
|
||||
|
||||
#### Parameters
|
||||
|
||||
| Name | Type |
|
||||
| :------ | :------ |
|
||||
| `targetObject` | `object` |
|
||||
| `constructorOpt?` | `Function` |
|
||||
|
||||
#### Returns
|
||||
|
||||
`void`
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.captureStackTrace
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/@types/node/globals.d.ts:4
|
|
@ -0,0 +1,169 @@
|
|||
[@actions/artifact](../README.md) / GHESNotSupportedError
|
||||
|
||||
# Class: GHESNotSupportedError
|
||||
|
||||
## Hierarchy
|
||||
|
||||
- `Error`
|
||||
|
||||
↳ **`GHESNotSupportedError`**
|
||||
|
||||
## Table of contents
|
||||
|
||||
### Constructors
|
||||
|
||||
- [constructor](GHESNotSupportedError.md#constructor)
|
||||
|
||||
### Properties
|
||||
|
||||
- [message](GHESNotSupportedError.md#message)
|
||||
- [name](GHESNotSupportedError.md#name)
|
||||
- [stack](GHESNotSupportedError.md#stack)
|
||||
- [prepareStackTrace](GHESNotSupportedError.md#preparestacktrace)
|
||||
- [stackTraceLimit](GHESNotSupportedError.md#stacktracelimit)
|
||||
|
||||
### Methods
|
||||
|
||||
- [captureStackTrace](GHESNotSupportedError.md#capturestacktrace)
|
||||
|
||||
## Constructors
|
||||
|
||||
### constructor
|
||||
|
||||
• **new GHESNotSupportedError**(`message?`): [`GHESNotSupportedError`](GHESNotSupportedError.md)
|
||||
|
||||
#### Parameters
|
||||
|
||||
| Name | Type | Default value |
|
||||
| :------ | :------ | :------ |
|
||||
| `message` | `string` | `'@actions/artifact v2.0.0+, upload-artifact@v4+ and download-artifact@v4+ are not currently supported on GHES.'` |
|
||||
|
||||
#### Returns
|
||||
|
||||
[`GHESNotSupportedError`](GHESNotSupportedError.md)
|
||||
|
||||
#### Overrides
|
||||
|
||||
Error.constructor
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/errors.ts:31](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/errors.ts#L31)
|
||||
|
||||
## Properties
|
||||
|
||||
### message
|
||||
|
||||
• **message**: `string`
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.message
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/typescript/lib/lib.es5.d.ts:1068
|
||||
|
||||
___
|
||||
|
||||
### name
|
||||
|
||||
• **name**: `string`
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.name
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/typescript/lib/lib.es5.d.ts:1067
|
||||
|
||||
___
|
||||
|
||||
### stack
|
||||
|
||||
• `Optional` **stack**: `string`
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.stack
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/typescript/lib/lib.es5.d.ts:1069
|
||||
|
||||
___
|
||||
|
||||
### prepareStackTrace
|
||||
|
||||
▪ `Static` `Optional` **prepareStackTrace**: (`err`: `Error`, `stackTraces`: `CallSite`[]) => `any`
|
||||
|
||||
#### Type declaration
|
||||
|
||||
▸ (`err`, `stackTraces`): `any`
|
||||
|
||||
Optional override for formatting stack traces
|
||||
|
||||
##### Parameters
|
||||
|
||||
| Name | Type |
|
||||
| :------ | :------ |
|
||||
| `err` | `Error` |
|
||||
| `stackTraces` | `CallSite`[] |
|
||||
|
||||
##### Returns
|
||||
|
||||
`any`
|
||||
|
||||
**`See`**
|
||||
|
||||
https://v8.dev/docs/stack-trace-api#customizing-stack-traces
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.prepareStackTrace
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/@types/node/globals.d.ts:11
|
||||
|
||||
___
|
||||
|
||||
### stackTraceLimit
|
||||
|
||||
▪ `Static` **stackTraceLimit**: `number`
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.stackTraceLimit
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/@types/node/globals.d.ts:13
|
||||
|
||||
## Methods
|
||||
|
||||
### captureStackTrace
|
||||
|
||||
▸ **captureStackTrace**(`targetObject`, `constructorOpt?`): `void`
|
||||
|
||||
Create .stack property on a target object
|
||||
|
||||
#### Parameters
|
||||
|
||||
| Name | Type |
|
||||
| :------ | :------ |
|
||||
| `targetObject` | `object` |
|
||||
| `constructorOpt?` | `Function` |
|
||||
|
||||
#### Returns
|
||||
|
||||
`void`
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.captureStackTrace
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/@types/node/globals.d.ts:4
|
|
@ -0,0 +1,169 @@
|
|||
[@actions/artifact](../README.md) / InvalidResponseError
|
||||
|
||||
# Class: InvalidResponseError
|
||||
|
||||
## Hierarchy
|
||||
|
||||
- `Error`
|
||||
|
||||
↳ **`InvalidResponseError`**
|
||||
|
||||
## Table of contents
|
||||
|
||||
### Constructors
|
||||
|
||||
- [constructor](InvalidResponseError.md#constructor)
|
||||
|
||||
### Properties
|
||||
|
||||
- [message](InvalidResponseError.md#message)
|
||||
- [name](InvalidResponseError.md#name)
|
||||
- [stack](InvalidResponseError.md#stack)
|
||||
- [prepareStackTrace](InvalidResponseError.md#preparestacktrace)
|
||||
- [stackTraceLimit](InvalidResponseError.md#stacktracelimit)
|
||||
|
||||
### Methods
|
||||
|
||||
- [captureStackTrace](InvalidResponseError.md#capturestacktrace)
|
||||
|
||||
## Constructors
|
||||
|
||||
### constructor
|
||||
|
||||
• **new InvalidResponseError**(`message`): [`InvalidResponseError`](InvalidResponseError.md)
|
||||
|
||||
#### Parameters
|
||||
|
||||
| Name | Type |
|
||||
| :------ | :------ |
|
||||
| `message` | `string` |
|
||||
|
||||
#### Returns
|
||||
|
||||
[`InvalidResponseError`](InvalidResponseError.md)
|
||||
|
||||
#### Overrides
|
||||
|
||||
Error.constructor
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/errors.ts:17](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/errors.ts#L17)
|
||||
|
||||
## Properties
|
||||
|
||||
### message
|
||||
|
||||
• **message**: `string`
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.message
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/typescript/lib/lib.es5.d.ts:1068
|
||||
|
||||
___
|
||||
|
||||
### name
|
||||
|
||||
• **name**: `string`
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.name
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/typescript/lib/lib.es5.d.ts:1067
|
||||
|
||||
___
|
||||
|
||||
### stack
|
||||
|
||||
• `Optional` **stack**: `string`
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.stack
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/typescript/lib/lib.es5.d.ts:1069
|
||||
|
||||
___
|
||||
|
||||
### prepareStackTrace
|
||||
|
||||
▪ `Static` `Optional` **prepareStackTrace**: (`err`: `Error`, `stackTraces`: `CallSite`[]) => `any`
|
||||
|
||||
#### Type declaration
|
||||
|
||||
▸ (`err`, `stackTraces`): `any`
|
||||
|
||||
Optional override for formatting stack traces
|
||||
|
||||
##### Parameters
|
||||
|
||||
| Name | Type |
|
||||
| :------ | :------ |
|
||||
| `err` | `Error` |
|
||||
| `stackTraces` | `CallSite`[] |
|
||||
|
||||
##### Returns
|
||||
|
||||
`any`
|
||||
|
||||
**`See`**
|
||||
|
||||
https://v8.dev/docs/stack-trace-api#customizing-stack-traces
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.prepareStackTrace
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/@types/node/globals.d.ts:11
|
||||
|
||||
___
|
||||
|
||||
### stackTraceLimit
|
||||
|
||||
▪ `Static` **stackTraceLimit**: `number`
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.stackTraceLimit
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/@types/node/globals.d.ts:13
|
||||
|
||||
## Methods
|
||||
|
||||
### captureStackTrace
|
||||
|
||||
▸ **captureStackTrace**(`targetObject`, `constructorOpt?`): `void`
|
||||
|
||||
Create .stack property on a target object
|
||||
|
||||
#### Parameters
|
||||
|
||||
| Name | Type |
|
||||
| :------ | :------ |
|
||||
| `targetObject` | `object` |
|
||||
| `constructorOpt?` | `Function` |
|
||||
|
||||
#### Returns
|
||||
|
||||
`void`
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.captureStackTrace
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/@types/node/globals.d.ts:4
|
|
@ -0,0 +1,201 @@
|
|||
[@actions/artifact](../README.md) / NetworkError
|
||||
|
||||
# Class: NetworkError
|
||||
|
||||
## Hierarchy
|
||||
|
||||
- `Error`
|
||||
|
||||
↳ **`NetworkError`**
|
||||
|
||||
## Table of contents
|
||||
|
||||
### Constructors
|
||||
|
||||
- [constructor](NetworkError.md#constructor)
|
||||
|
||||
### Properties
|
||||
|
||||
- [code](NetworkError.md#code)
|
||||
- [message](NetworkError.md#message)
|
||||
- [name](NetworkError.md#name)
|
||||
- [stack](NetworkError.md#stack)
|
||||
- [prepareStackTrace](NetworkError.md#preparestacktrace)
|
||||
- [stackTraceLimit](NetworkError.md#stacktracelimit)
|
||||
|
||||
### Methods
|
||||
|
||||
- [captureStackTrace](NetworkError.md#capturestacktrace)
|
||||
- [isNetworkErrorCode](NetworkError.md#isnetworkerrorcode)
|
||||
|
||||
## Constructors
|
||||
|
||||
### constructor
|
||||
|
||||
• **new NetworkError**(`code`): [`NetworkError`](NetworkError.md)
|
||||
|
||||
#### Parameters
|
||||
|
||||
| Name | Type |
|
||||
| :------ | :------ |
|
||||
| `code` | `string` |
|
||||
|
||||
#### Returns
|
||||
|
||||
[`NetworkError`](NetworkError.md)
|
||||
|
||||
#### Overrides
|
||||
|
||||
Error.constructor
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/errors.ts:42](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/errors.ts#L42)
|
||||
|
||||
## Properties
|
||||
|
||||
### code
|
||||
|
||||
• **code**: `string`
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/errors.ts:40](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/errors.ts#L40)
|
||||
|
||||
___
|
||||
|
||||
### message
|
||||
|
||||
• **message**: `string`
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.message
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/typescript/lib/lib.es5.d.ts:1068
|
||||
|
||||
___
|
||||
|
||||
### name
|
||||
|
||||
• **name**: `string`
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.name
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/typescript/lib/lib.es5.d.ts:1067
|
||||
|
||||
___
|
||||
|
||||
### stack
|
||||
|
||||
• `Optional` **stack**: `string`
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.stack
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/typescript/lib/lib.es5.d.ts:1069
|
||||
|
||||
___
|
||||
|
||||
### prepareStackTrace
|
||||
|
||||
▪ `Static` `Optional` **prepareStackTrace**: (`err`: `Error`, `stackTraces`: `CallSite`[]) => `any`
|
||||
|
||||
#### Type declaration
|
||||
|
||||
▸ (`err`, `stackTraces`): `any`
|
||||
|
||||
Optional override for formatting stack traces
|
||||
|
||||
##### Parameters
|
||||
|
||||
| Name | Type |
|
||||
| :------ | :------ |
|
||||
| `err` | `Error` |
|
||||
| `stackTraces` | `CallSite`[] |
|
||||
|
||||
##### Returns
|
||||
|
||||
`any`
|
||||
|
||||
**`See`**
|
||||
|
||||
https://v8.dev/docs/stack-trace-api#customizing-stack-traces
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.prepareStackTrace
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/@types/node/globals.d.ts:11
|
||||
|
||||
___
|
||||
|
||||
### stackTraceLimit
|
||||
|
||||
▪ `Static` **stackTraceLimit**: `number`
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.stackTraceLimit
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/@types/node/globals.d.ts:13
|
||||
|
||||
## Methods
|
||||
|
||||
### captureStackTrace
|
||||
|
||||
▸ **captureStackTrace**(`targetObject`, `constructorOpt?`): `void`
|
||||
|
||||
Create .stack property on a target object
|
||||
|
||||
#### Parameters
|
||||
|
||||
| Name | Type |
|
||||
| :------ | :------ |
|
||||
| `targetObject` | `object` |
|
||||
| `constructorOpt?` | `Function` |
|
||||
|
||||
#### Returns
|
||||
|
||||
`void`
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.captureStackTrace
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/@types/node/globals.d.ts:4
|
||||
|
||||
___
|
||||
|
||||
### isNetworkErrorCode
|
||||
|
||||
▸ **isNetworkErrorCode**(`code?`): `boolean`
|
||||
|
||||
#### Parameters
|
||||
|
||||
| Name | Type |
|
||||
| :------ | :------ |
|
||||
| `code?` | `string` |
|
||||
|
||||
#### Returns
|
||||
|
||||
`boolean`
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/errors.ts:49](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/errors.ts#L49)
|
|
@ -0,0 +1,184 @@
|
|||
[@actions/artifact](../README.md) / UsageError
|
||||
|
||||
# Class: UsageError
|
||||
|
||||
## Hierarchy
|
||||
|
||||
- `Error`
|
||||
|
||||
↳ **`UsageError`**
|
||||
|
||||
## Table of contents
|
||||
|
||||
### Constructors
|
||||
|
||||
- [constructor](UsageError.md#constructor)
|
||||
|
||||
### Properties
|
||||
|
||||
- [message](UsageError.md#message)
|
||||
- [name](UsageError.md#name)
|
||||
- [stack](UsageError.md#stack)
|
||||
- [prepareStackTrace](UsageError.md#preparestacktrace)
|
||||
- [stackTraceLimit](UsageError.md#stacktracelimit)
|
||||
|
||||
### Methods
|
||||
|
||||
- [captureStackTrace](UsageError.md#capturestacktrace)
|
||||
- [isUsageErrorMessage](UsageError.md#isusageerrormessage)
|
||||
|
||||
## Constructors
|
||||
|
||||
### constructor
|
||||
|
||||
• **new UsageError**(): [`UsageError`](UsageError.md)
|
||||
|
||||
#### Returns
|
||||
|
||||
[`UsageError`](UsageError.md)
|
||||
|
||||
#### Overrides
|
||||
|
||||
Error.constructor
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/errors.ts:62](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/errors.ts#L62)
|
||||
|
||||
## Properties
|
||||
|
||||
### message
|
||||
|
||||
• **message**: `string`
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.message
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/typescript/lib/lib.es5.d.ts:1068
|
||||
|
||||
___
|
||||
|
||||
### name
|
||||
|
||||
• **name**: `string`
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.name
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/typescript/lib/lib.es5.d.ts:1067
|
||||
|
||||
___
|
||||
|
||||
### stack
|
||||
|
||||
• `Optional` **stack**: `string`
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.stack
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/typescript/lib/lib.es5.d.ts:1069
|
||||
|
||||
___
|
||||
|
||||
### prepareStackTrace
|
||||
|
||||
▪ `Static` `Optional` **prepareStackTrace**: (`err`: `Error`, `stackTraces`: `CallSite`[]) => `any`
|
||||
|
||||
#### Type declaration
|
||||
|
||||
▸ (`err`, `stackTraces`): `any`
|
||||
|
||||
Optional override for formatting stack traces
|
||||
|
||||
##### Parameters
|
||||
|
||||
| Name | Type |
|
||||
| :------ | :------ |
|
||||
| `err` | `Error` |
|
||||
| `stackTraces` | `CallSite`[] |
|
||||
|
||||
##### Returns
|
||||
|
||||
`any`
|
||||
|
||||
**`See`**
|
||||
|
||||
https://v8.dev/docs/stack-trace-api#customizing-stack-traces
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.prepareStackTrace
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/@types/node/globals.d.ts:11
|
||||
|
||||
___
|
||||
|
||||
### stackTraceLimit
|
||||
|
||||
▪ `Static` **stackTraceLimit**: `number`
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.stackTraceLimit
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/@types/node/globals.d.ts:13
|
||||
|
||||
## Methods
|
||||
|
||||
### captureStackTrace
|
||||
|
||||
▸ **captureStackTrace**(`targetObject`, `constructorOpt?`): `void`
|
||||
|
||||
Create .stack property on a target object
|
||||
|
||||
#### Parameters
|
||||
|
||||
| Name | Type |
|
||||
| :------ | :------ |
|
||||
| `targetObject` | `object` |
|
||||
| `constructorOpt?` | `Function` |
|
||||
|
||||
#### Returns
|
||||
|
||||
`void`
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.captureStackTrace
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/@types/node/globals.d.ts:4
|
||||
|
||||
___
|
||||
|
||||
### isUsageErrorMessage
|
||||
|
||||
▸ **isUsageErrorMessage**(`msg?`): `boolean`
|
||||
|
||||
#### Parameters
|
||||
|
||||
| Name | Type |
|
||||
| :------ | :------ |
|
||||
| `msg?` | `string` |
|
||||
|
||||
#### Returns
|
||||
|
||||
`boolean`
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/errors.ts:68](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/errors.ts#L68)
|
|
@ -0,0 +1,62 @@
|
|||
[@actions/artifact](../README.md) / Artifact
|
||||
|
||||
# Interface: Artifact
|
||||
|
||||
An Actions Artifact
|
||||
|
||||
## Table of contents
|
||||
|
||||
### Properties
|
||||
|
||||
- [createdAt](Artifact.md#createdat)
|
||||
- [id](Artifact.md#id)
|
||||
- [name](Artifact.md#name)
|
||||
- [size](Artifact.md#size)
|
||||
|
||||
## Properties
|
||||
|
||||
### createdAt
|
||||
|
||||
• `Optional` **createdAt**: `Date`
|
||||
|
||||
The time when the artifact was created
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/interfaces.ts:123](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/interfaces.ts#L123)
|
||||
|
||||
___
|
||||
|
||||
### id
|
||||
|
||||
• **id**: `number`
|
||||
|
||||
The ID of the artifact
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/interfaces.ts:113](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/interfaces.ts#L113)
|
||||
|
||||
___
|
||||
|
||||
### name
|
||||
|
||||
• **name**: `string`
|
||||
|
||||
The name of the artifact
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/interfaces.ts:108](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/interfaces.ts#L108)
|
||||
|
||||
___
|
||||
|
||||
### size
|
||||
|
||||
• **size**: `number`
|
||||
|
||||
The size of the artifact in bytes
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/interfaces.ts:118](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/interfaces.ts#L118)
|
|
@ -0,0 +1,159 @@
|
|||
[@actions/artifact](../README.md) / ArtifactClient
|
||||
|
||||
# Interface: ArtifactClient
|
||||
|
||||
Generic interface for the artifact client.
|
||||
|
||||
## Implemented by
|
||||
|
||||
- [`DefaultArtifactClient`](../classes/DefaultArtifactClient.md)
|
||||
|
||||
## Table of contents
|
||||
|
||||
### Methods
|
||||
|
||||
- [deleteArtifact](ArtifactClient.md#deleteartifact)
|
||||
- [downloadArtifact](ArtifactClient.md#downloadartifact)
|
||||
- [getArtifact](ArtifactClient.md#getartifact)
|
||||
- [listArtifacts](ArtifactClient.md#listartifacts)
|
||||
- [uploadArtifact](ArtifactClient.md#uploadartifact)
|
||||
|
||||
## Methods
|
||||
|
||||
### deleteArtifact
|
||||
|
||||
▸ **deleteArtifact**(`artifactName`, `options?`): `Promise`\<[`DeleteArtifactResponse`](DeleteArtifactResponse.md)\>
|
||||
|
||||
Delete an Artifact
|
||||
|
||||
If `options.findBy` is specified, this will use the public Delete Artifact API https://docs.github.com/en/rest/actions/artifacts?apiVersion=2022-11-28#delete-an-artifact
|
||||
|
||||
#### Parameters
|
||||
|
||||
| Name | Type | Description |
|
||||
| :------ | :------ | :------ |
|
||||
| `artifactName` | `string` | The name of the artifact to delete |
|
||||
| `options?` | [`FindOptions`](FindOptions.md) | Extra options that allow for the customization of the delete behavior |
|
||||
|
||||
#### Returns
|
||||
|
||||
`Promise`\<[`DeleteArtifactResponse`](DeleteArtifactResponse.md)\>
|
||||
|
||||
single DeleteArtifactResponse object
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/client.ts:103](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/client.ts#L103)
|
||||
|
||||
___
|
||||
|
||||
### downloadArtifact
|
||||
|
||||
▸ **downloadArtifact**(`artifactId`, `options?`): `Promise`\<[`DownloadArtifactResponse`](DownloadArtifactResponse.md)\>
|
||||
|
||||
Downloads an artifact and unzips the content.
|
||||
|
||||
If `options.findBy` is specified, this will use the public Download Artifact API https://docs.github.com/en/rest/actions/artifacts?apiVersion=2022-11-28#download-an-artifact
|
||||
|
||||
#### Parameters
|
||||
|
||||
| Name | Type | Description |
|
||||
| :------ | :------ | :------ |
|
||||
| `artifactId` | `number` | The id of the artifact to download |
|
||||
| `options?` | [`DownloadArtifactOptions`](DownloadArtifactOptions.md) & [`FindOptions`](FindOptions.md) | Extra options that allow for the customization of the download behavior |
|
||||
|
||||
#### Returns
|
||||
|
||||
`Promise`\<[`DownloadArtifactResponse`](DownloadArtifactResponse.md)\>
|
||||
|
||||
single DownloadArtifactResponse object
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/client.ts:89](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/client.ts#L89)
|
||||
|
||||
___
|
||||
|
||||
### getArtifact
|
||||
|
||||
▸ **getArtifact**(`artifactName`, `options?`): `Promise`\<[`GetArtifactResponse`](GetArtifactResponse.md)\>
|
||||
|
||||
Finds an artifact by name.
|
||||
If there are multiple artifacts with the same name in the same workflow run, this will return the latest.
|
||||
If the artifact is not found, it will throw.
|
||||
|
||||
If `options.findBy` is specified, this will use the public List Artifacts API with a name filter which can get artifacts from other runs.
|
||||
https://docs.github.com/en/rest/actions/artifacts?apiVersion=2022-11-28#list-workflow-run-artifacts
|
||||
`@actions/artifact` v2+ does not allow for creating multiple artifacts with the same name in the same workflow run.
|
||||
It is possible to have multiple artifacts with the same name in the same workflow run by using old versions of upload-artifact (v1,v2 and v3), @actions/artifact < v2 or it is a rerun.
|
||||
If there are multiple artifacts with the same name in the same workflow run this function will return the first artifact that matches the name.
|
||||
|
||||
#### Parameters
|
||||
|
||||
| Name | Type | Description |
|
||||
| :------ | :------ | :------ |
|
||||
| `artifactName` | `string` | The name of the artifact to find |
|
||||
| `options?` | [`FindOptions`](FindOptions.md) | Extra options that allow for the customization of the get behavior |
|
||||
|
||||
#### Returns
|
||||
|
||||
`Promise`\<[`GetArtifactResponse`](GetArtifactResponse.md)\>
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/client.ts:75](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/client.ts#L75)
|
||||
|
||||
___
|
||||
|
||||
### listArtifacts
|
||||
|
||||
▸ **listArtifacts**(`options?`): `Promise`\<[`ListArtifactsResponse`](ListArtifactsResponse.md)\>
|
||||
|
||||
Lists all artifacts that are part of the current workflow run.
|
||||
This function will return at most 1000 artifacts per workflow run.
|
||||
|
||||
If `options.findBy` is specified, this will call the public List-Artifacts API which can list from other runs.
|
||||
https://docs.github.com/en/rest/actions/artifacts?apiVersion=2022-11-28#list-workflow-run-artifacts
|
||||
|
||||
#### Parameters
|
||||
|
||||
| Name | Type | Description |
|
||||
| :------ | :------ | :------ |
|
||||
| `options?` | [`ListArtifactsOptions`](ListArtifactsOptions.md) & [`FindOptions`](FindOptions.md) | Extra options that allow for the customization of the list behavior |
|
||||
|
||||
#### Returns
|
||||
|
||||
`Promise`\<[`ListArtifactsResponse`](ListArtifactsResponse.md)\>
|
||||
|
||||
ListArtifactResponse object
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/client.ts:57](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/client.ts#L57)
|
||||
|
||||
___
|
||||
|
||||
### uploadArtifact
|
||||
|
||||
▸ **uploadArtifact**(`name`, `files`, `rootDirectory`, `options?`): `Promise`\<[`UploadArtifactResponse`](UploadArtifactResponse.md)\>
|
||||
|
||||
Uploads an artifact.
|
||||
|
||||
#### Parameters
|
||||
|
||||
| Name | Type | Description |
|
||||
| :------ | :------ | :------ |
|
||||
| `name` | `string` | The name of the artifact, required |
|
||||
| `files` | `string`[] | A list of absolute or relative paths that denote what files should be uploaded |
|
||||
| `rootDirectory` | `string` | An absolute or relative file path that denotes the root parent directory of the files being uploaded |
|
||||
| `options?` | [`UploadArtifactOptions`](UploadArtifactOptions.md) | Extra options for customizing the upload behavior |
|
||||
|
||||
#### Returns
|
||||
|
||||
`Promise`\<[`UploadArtifactResponse`](UploadArtifactResponse.md)\>
|
||||
|
||||
single UploadArtifactResponse object
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/client.ts:40](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/client.ts#L40)
|
|
@ -0,0 +1,23 @@
|
|||
[@actions/artifact](../README.md) / DeleteArtifactResponse
|
||||
|
||||
# Interface: DeleteArtifactResponse
|
||||
|
||||
Response from the server when deleting an artifact
|
||||
|
||||
## Table of contents
|
||||
|
||||
### Properties
|
||||
|
||||
- [id](DeleteArtifactResponse.md#id)
|
||||
|
||||
## Properties
|
||||
|
||||
### id
|
||||
|
||||
• **id**: `number`
|
||||
|
||||
The id of the artifact that was deleted
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/interfaces.ts:158](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/interfaces.ts#L158)
|
|
@ -0,0 +1,23 @@
|
|||
[@actions/artifact](../README.md) / DownloadArtifactOptions
|
||||
|
||||
# Interface: DownloadArtifactOptions
|
||||
|
||||
Options for downloading an artifact
|
||||
|
||||
## Table of contents
|
||||
|
||||
### Properties
|
||||
|
||||
- [path](DownloadArtifactOptions.md#path)
|
||||
|
||||
## Properties
|
||||
|
||||
### path
|
||||
|
||||
• `Optional` **path**: `string`
|
||||
|
||||
Denotes where the artifact will be downloaded to. If not specified then the artifact is download to GITHUB_WORKSPACE
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/interfaces.ts:98](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/interfaces.ts#L98)
|
|
@ -0,0 +1,23 @@
|
|||
[@actions/artifact](../README.md) / DownloadArtifactResponse
|
||||
|
||||
# Interface: DownloadArtifactResponse
|
||||
|
||||
Response from the server when downloading an artifact
|
||||
|
||||
## Table of contents
|
||||
|
||||
### Properties
|
||||
|
||||
- [downloadPath](DownloadArtifactResponse.md#downloadpath)
|
||||
|
||||
## Properties
|
||||
|
||||
### downloadPath
|
||||
|
||||
• `Optional` **downloadPath**: `string`
|
||||
|
||||
The path where the artifact was downloaded to
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/interfaces.ts:88](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/interfaces.ts#L88)
|
|
@ -0,0 +1,30 @@
|
|||
[@actions/artifact](../README.md) / FindOptions
|
||||
|
||||
# Interface: FindOptions
|
||||
|
||||
## Table of contents
|
||||
|
||||
### Properties
|
||||
|
||||
- [findBy](FindOptions.md#findby)
|
||||
|
||||
## Properties
|
||||
|
||||
### findBy
|
||||
|
||||
• `Optional` **findBy**: `Object`
|
||||
|
||||
The criteria for finding Artifact(s) out of the scope of the current run.
|
||||
|
||||
#### Type declaration
|
||||
|
||||
| Name | Type | Description |
|
||||
| :------ | :------ | :------ |
|
||||
| `repositoryName` | `string` | Repository owner (eg. 'toolkit') |
|
||||
| `repositoryOwner` | `string` | Repository owner (eg. 'actions') |
|
||||
| `token` | `string` | Token with actions:read permissions |
|
||||
| `workflowRunId` | `number` | WorkflowRun of the artifact(s) to lookup |
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/interfaces.ts:131](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/interfaces.ts#L131)
|
|
@ -0,0 +1,23 @@
|
|||
[@actions/artifact](../README.md) / GetArtifactResponse
|
||||
|
||||
# Interface: GetArtifactResponse
|
||||
|
||||
Response from the server when getting an artifact
|
||||
|
||||
## Table of contents
|
||||
|
||||
### Properties
|
||||
|
||||
- [artifact](GetArtifactResponse.md#artifact)
|
||||
|
||||
## Properties
|
||||
|
||||
### artifact
|
||||
|
||||
• **artifact**: [`Artifact`](Artifact.md)
|
||||
|
||||
Metadata about the artifact that was found
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/interfaces.ts:57](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/interfaces.ts#L57)
|
|
@ -0,0 +1,24 @@
|
|||
[@actions/artifact](../README.md) / ListArtifactsOptions
|
||||
|
||||
# Interface: ListArtifactsOptions
|
||||
|
||||
Options for listing artifacts
|
||||
|
||||
## Table of contents
|
||||
|
||||
### Properties
|
||||
|
||||
- [latest](ListArtifactsOptions.md#latest)
|
||||
|
||||
## Properties
|
||||
|
||||
### latest
|
||||
|
||||
• `Optional` **latest**: `boolean`
|
||||
|
||||
Filter the workflow run's artifacts to the latest by name
|
||||
In the case of reruns, this can be useful to avoid duplicates
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/interfaces.ts:68](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/interfaces.ts#L68)
|
|
@ -0,0 +1,23 @@
|
|||
[@actions/artifact](../README.md) / ListArtifactsResponse
|
||||
|
||||
# Interface: ListArtifactsResponse
|
||||
|
||||
Response from the server when listing artifacts
|
||||
|
||||
## Table of contents
|
||||
|
||||
### Properties
|
||||
|
||||
- [artifacts](ListArtifactsResponse.md#artifacts)
|
||||
|
||||
## Properties
|
||||
|
||||
### artifacts
|
||||
|
||||
• **artifacts**: [`Artifact`](Artifact.md)[]
|
||||
|
||||
A list of artifacts that were found
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/interfaces.ts:78](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/interfaces.ts#L78)
|
|
@ -0,0 +1,55 @@
|
|||
[@actions/artifact](../README.md) / UploadArtifactOptions
|
||||
|
||||
# Interface: UploadArtifactOptions
|
||||
|
||||
Options for uploading an artifact
|
||||
|
||||
## Table of contents
|
||||
|
||||
### Properties
|
||||
|
||||
- [compressionLevel](UploadArtifactOptions.md#compressionlevel)
|
||||
- [retentionDays](UploadArtifactOptions.md#retentiondays)
|
||||
|
||||
## Properties
|
||||
|
||||
### compressionLevel
|
||||
|
||||
• `Optional` **compressionLevel**: `number`
|
||||
|
||||
The level of compression for Zlib to be applied to the artifact archive.
|
||||
The value can range from 0 to 9:
|
||||
- 0: No compression
|
||||
- 1: Best speed
|
||||
- 6: Default compression (same as GNU Gzip)
|
||||
- 9: Best compression
|
||||
Higher levels will result in better compression, but will take longer to complete.
|
||||
For large files that are not easily compressed, a value of 0 is recommended for significantly faster uploads.
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/interfaces.ts:47](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/interfaces.ts#L47)
|
||||
|
||||
___
|
||||
|
||||
### retentionDays
|
||||
|
||||
• `Optional` **retentionDays**: `number`
|
||||
|
||||
Duration after which artifact will expire in days.
|
||||
|
||||
By default artifact expires after 90 days:
|
||||
https://docs.github.com/en/actions/configuring-and-managing-workflows/persisting-workflow-data-using-artifacts#downloading-and-deleting-artifacts-after-a-workflow-run-is-complete
|
||||
|
||||
Use this option to override the default expiry.
|
||||
|
||||
Min value: 1
|
||||
Max value: 90 unless changed by repository setting
|
||||
|
||||
If this is set to a greater value than the retention settings allowed, the retention on artifacts
|
||||
will be reduced to match the max value allowed on server, and the upload process will continue. An
|
||||
input of 0 assumes default retention setting.
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/interfaces.ts:36](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/interfaces.ts#L36)
|
|
@ -0,0 +1,37 @@
|
|||
[@actions/artifact](../README.md) / UploadArtifactResponse
|
||||
|
||||
# Interface: UploadArtifactResponse
|
||||
|
||||
Response from the server when an artifact is uploaded
|
||||
|
||||
## Table of contents
|
||||
|
||||
### Properties
|
||||
|
||||
- [id](UploadArtifactResponse.md#id)
|
||||
- [size](UploadArtifactResponse.md#size)
|
||||
|
||||
## Properties
|
||||
|
||||
### id
|
||||
|
||||
• `Optional` **id**: `number`
|
||||
|
||||
The id of the artifact that was created. Not provided if no artifact was uploaded
|
||||
This ID can be used as input to other APIs to download, delete or get more information about an artifact: https://docs.github.com/en/rest/actions/artifacts
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/interfaces.ts:14](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/interfaces.ts#L14)
|
||||
|
||||
___
|
||||
|
||||
### size
|
||||
|
||||
• `Optional` **size**: `number`
|
||||
|
||||
Total size of the artifact in bytes. Not provided if no artifact was uploaded
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/interfaces.ts:8](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/interfaces.ts#L8)
|
File diff suppressed because it is too large
Load Diff
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@actions/artifact",
|
||||
"version": "2.0.0",
|
||||
"version": "2.1.7",
|
||||
"preview": true,
|
||||
"description": "Actions artifact lib",
|
||||
"keywords": [
|
||||
|
@ -30,10 +30,11 @@
|
|||
},
|
||||
"scripts": {
|
||||
"audit-moderate": "npm install && npm audit --json --audit-level=moderate > audit.json",
|
||||
"test": "echo \"Error: run tests from root\" && exit 1",
|
||||
"test": "cd ../../ && npm run test ./packages/artifact",
|
||||
"bootstrap": "cd ../../ && npm run bootstrap",
|
||||
"tsc-run": "tsc",
|
||||
"tsc": "npm run bootstrap && npm run tsc-run"
|
||||
"tsc": "npm run bootstrap && npm run tsc-run",
|
||||
"gen:docs": "typedoc --plugin typedoc-plugin-markdown --out docs/generated src/artifact.ts --githubPages false --readme none"
|
||||
},
|
||||
"bugs": {
|
||||
"url": "https://github.com/actions/toolkit/issues"
|
||||
|
@ -48,15 +49,17 @@
|
|||
"@octokit/plugin-retry": "^3.0.9",
|
||||
"@octokit/request-error": "^5.0.0",
|
||||
"@protobuf-ts/plugin": "^2.2.3-alpha.1",
|
||||
"@types/unzipper": "^0.10.6",
|
||||
"archiver": "^5.3.1",
|
||||
"archiver": "^7.0.1",
|
||||
"crypto": "^1.0.1",
|
||||
"jwt-decode": "^3.1.2",
|
||||
"twirp-ts": "^2.5.0",
|
||||
"unzipper": "^0.10.14"
|
||||
"unzip-stream": "^0.3.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/archiver": "^5.3.2",
|
||||
"@types/unzip-stream": "^0.3.4",
|
||||
"typedoc": "^0.25.4",
|
||||
"typedoc-plugin-markdown": "^3.17.1",
|
||||
"typescript": "^5.2.2"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,11 +1,8 @@
|
|||
import {ArtifactClient, Client} from './internal/client'
|
||||
import {ArtifactClient, DefaultArtifactClient} from './internal/client'
|
||||
|
||||
/**
|
||||
* Exported functionality that we want to expose for any users of @actions/artifact
|
||||
*/
|
||||
export * from './internal/shared/interfaces'
|
||||
export {ArtifactClient}
|
||||
export * from './internal/shared/errors'
|
||||
export * from './internal/client'
|
||||
|
||||
export function create(): ArtifactClient {
|
||||
return Client.create()
|
||||
}
|
||||
const client: ArtifactClient = new DefaultArtifactClient()
|
||||
export default client
|
||||
|
|
|
@ -12,6 +12,7 @@ import type { PartialMessage } from "@protobuf-ts/runtime";
|
|||
import { reflectionMergePartial } from "@protobuf-ts/runtime";
|
||||
import { MESSAGE_TYPE } from "@protobuf-ts/runtime";
|
||||
import { MessageType } from "@protobuf-ts/runtime";
|
||||
import { Int64Value } from "../../../google/protobuf/wrappers";
|
||||
import { StringValue } from "../../../google/protobuf/wrappers";
|
||||
import { Timestamp } from "../../../google/protobuf/timestamp";
|
||||
/**
|
||||
|
@ -90,6 +91,141 @@ export interface FinalizeArtifactResponse {
|
|||
*/
|
||||
artifactId: string;
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf message github.actions.results.api.v1.ListArtifactsRequest
|
||||
*/
|
||||
export interface ListArtifactsRequest {
|
||||
/**
|
||||
* The backend plan ID
|
||||
*
|
||||
* @generated from protobuf field: string workflow_run_backend_id = 1;
|
||||
*/
|
||||
workflowRunBackendId: string;
|
||||
/**
|
||||
* The backend job ID
|
||||
*
|
||||
* @generated from protobuf field: string workflow_job_run_backend_id = 2;
|
||||
*/
|
||||
workflowJobRunBackendId: string;
|
||||
/**
|
||||
* Name of the artifact to filter on
|
||||
*
|
||||
* @generated from protobuf field: google.protobuf.StringValue name_filter = 3;
|
||||
*/
|
||||
nameFilter?: StringValue; // optional
|
||||
/**
|
||||
* Monolith Database ID of the artifact to filter on
|
||||
*
|
||||
* @generated from protobuf field: google.protobuf.Int64Value id_filter = 4;
|
||||
*/
|
||||
idFilter?: Int64Value; // optional
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf message github.actions.results.api.v1.ListArtifactsResponse
|
||||
*/
|
||||
export interface ListArtifactsResponse {
|
||||
/**
|
||||
* @generated from protobuf field: repeated github.actions.results.api.v1.ListArtifactsResponse.MonolithArtifact artifacts = 1;
|
||||
*/
|
||||
artifacts: ListArtifactsResponse_MonolithArtifact[];
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf message github.actions.results.api.v1.ListArtifactsResponse.MonolithArtifact
|
||||
*/
|
||||
export interface ListArtifactsResponse_MonolithArtifact {
|
||||
/**
|
||||
* The backend plan ID
|
||||
*
|
||||
* @generated from protobuf field: string workflow_run_backend_id = 1;
|
||||
*/
|
||||
workflowRunBackendId: string;
|
||||
/**
|
||||
* The backend job ID
|
||||
*
|
||||
* @generated from protobuf field: string workflow_job_run_backend_id = 2;
|
||||
*/
|
||||
workflowJobRunBackendId: string;
|
||||
/**
|
||||
* Monolith database ID of the artifact
|
||||
*
|
||||
* @generated from protobuf field: int64 database_id = 3;
|
||||
*/
|
||||
databaseId: string;
|
||||
/**
|
||||
* Name of the artifact
|
||||
*
|
||||
* @generated from protobuf field: string name = 4;
|
||||
*/
|
||||
name: string;
|
||||
/**
|
||||
* Size of the artifact in bytes
|
||||
*
|
||||
* @generated from protobuf field: int64 size = 5;
|
||||
*/
|
||||
size: string;
|
||||
/**
|
||||
* When the artifact was created in the monolith
|
||||
*
|
||||
* @generated from protobuf field: google.protobuf.Timestamp created_at = 6;
|
||||
*/
|
||||
createdAt?: Timestamp;
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf message github.actions.results.api.v1.GetSignedArtifactURLRequest
|
||||
*/
|
||||
export interface GetSignedArtifactURLRequest {
|
||||
/**
|
||||
* @generated from protobuf field: string workflow_run_backend_id = 1;
|
||||
*/
|
||||
workflowRunBackendId: string;
|
||||
/**
|
||||
* @generated from protobuf field: string workflow_job_run_backend_id = 2;
|
||||
*/
|
||||
workflowJobRunBackendId: string;
|
||||
/**
|
||||
* @generated from protobuf field: string name = 3;
|
||||
*/
|
||||
name: string;
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf message github.actions.results.api.v1.GetSignedArtifactURLResponse
|
||||
*/
|
||||
export interface GetSignedArtifactURLResponse {
|
||||
/**
|
||||
* @generated from protobuf field: string signed_url = 1;
|
||||
*/
|
||||
signedUrl: string;
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf message github.actions.results.api.v1.DeleteArtifactRequest
|
||||
*/
|
||||
export interface DeleteArtifactRequest {
|
||||
/**
|
||||
* @generated from protobuf field: string workflow_run_backend_id = 1;
|
||||
*/
|
||||
workflowRunBackendId: string;
|
||||
/**
|
||||
* @generated from protobuf field: string workflow_job_run_backend_id = 2;
|
||||
*/
|
||||
workflowJobRunBackendId: string;
|
||||
/**
|
||||
* @generated from protobuf field: string name = 3;
|
||||
*/
|
||||
name: string;
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf message github.actions.results.api.v1.DeleteArtifactResponse
|
||||
*/
|
||||
export interface DeleteArtifactResponse {
|
||||
/**
|
||||
* @generated from protobuf field: bool ok = 1;
|
||||
*/
|
||||
ok: boolean;
|
||||
/**
|
||||
* @generated from protobuf field: int64 artifact_id = 2;
|
||||
*/
|
||||
artifactId: string;
|
||||
}
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class CreateArtifactRequest$Type extends MessageType<CreateArtifactRequest> {
|
||||
constructor() {
|
||||
|
@ -348,10 +484,433 @@ class FinalizeArtifactResponse$Type extends MessageType<FinalizeArtifactResponse
|
|||
* @generated MessageType for protobuf message github.actions.results.api.v1.FinalizeArtifactResponse
|
||||
*/
|
||||
export const FinalizeArtifactResponse = new FinalizeArtifactResponse$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class ListArtifactsRequest$Type extends MessageType<ListArtifactsRequest> {
|
||||
constructor() {
|
||||
super("github.actions.results.api.v1.ListArtifactsRequest", [
|
||||
{ no: 1, name: "workflow_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 2, name: "workflow_job_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 3, name: "name_filter", kind: "message", T: () => StringValue },
|
||||
{ no: 4, name: "id_filter", kind: "message", T: () => Int64Value }
|
||||
]);
|
||||
}
|
||||
create(value?: PartialMessage<ListArtifactsRequest>): ListArtifactsRequest {
|
||||
const message = { workflowRunBackendId: "", workflowJobRunBackendId: "" };
|
||||
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
reflectionMergePartial<ListArtifactsRequest>(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: ListArtifactsRequest): ListArtifactsRequest {
|
||||
let message = target ?? this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* string workflow_run_backend_id */ 1:
|
||||
message.workflowRunBackendId = reader.string();
|
||||
break;
|
||||
case /* string workflow_job_run_backend_id */ 2:
|
||||
message.workflowJobRunBackendId = reader.string();
|
||||
break;
|
||||
case /* google.protobuf.StringValue name_filter */ 3:
|
||||
message.nameFilter = StringValue.internalBinaryRead(reader, reader.uint32(), options, message.nameFilter);
|
||||
break;
|
||||
case /* google.protobuf.Int64Value id_filter */ 4:
|
||||
message.idFilter = Int64Value.internalBinaryRead(reader, reader.uint32(), options, message.idFilter);
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message: ListArtifactsRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||
/* string workflow_run_backend_id = 1; */
|
||||
if (message.workflowRunBackendId !== "")
|
||||
writer.tag(1, WireType.LengthDelimited).string(message.workflowRunBackendId);
|
||||
/* string workflow_job_run_backend_id = 2; */
|
||||
if (message.workflowJobRunBackendId !== "")
|
||||
writer.tag(2, WireType.LengthDelimited).string(message.workflowJobRunBackendId);
|
||||
/* google.protobuf.StringValue name_filter = 3; */
|
||||
if (message.nameFilter)
|
||||
StringValue.internalBinaryWrite(message.nameFilter, writer.tag(3, WireType.LengthDelimited).fork(), options).join();
|
||||
/* google.protobuf.Int64Value id_filter = 4; */
|
||||
if (message.idFilter)
|
||||
Int64Value.internalBinaryWrite(message.idFilter, writer.tag(4, WireType.LengthDelimited).fork(), options).join();
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.ListArtifactsRequest
|
||||
*/
|
||||
export const ListArtifactsRequest = new ListArtifactsRequest$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class ListArtifactsResponse$Type extends MessageType<ListArtifactsResponse> {
|
||||
constructor() {
|
||||
super("github.actions.results.api.v1.ListArtifactsResponse", [
|
||||
{ no: 1, name: "artifacts", kind: "message", repeat: 1 /*RepeatType.PACKED*/, T: () => ListArtifactsResponse_MonolithArtifact }
|
||||
]);
|
||||
}
|
||||
create(value?: PartialMessage<ListArtifactsResponse>): ListArtifactsResponse {
|
||||
const message = { artifacts: [] };
|
||||
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
reflectionMergePartial<ListArtifactsResponse>(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: ListArtifactsResponse): ListArtifactsResponse {
|
||||
let message = target ?? this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* repeated github.actions.results.api.v1.ListArtifactsResponse.MonolithArtifact artifacts */ 1:
|
||||
message.artifacts.push(ListArtifactsResponse_MonolithArtifact.internalBinaryRead(reader, reader.uint32(), options));
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message: ListArtifactsResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||
/* repeated github.actions.results.api.v1.ListArtifactsResponse.MonolithArtifact artifacts = 1; */
|
||||
for (let i = 0; i < message.artifacts.length; i++)
|
||||
ListArtifactsResponse_MonolithArtifact.internalBinaryWrite(message.artifacts[i], writer.tag(1, WireType.LengthDelimited).fork(), options).join();
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.ListArtifactsResponse
|
||||
*/
|
||||
export const ListArtifactsResponse = new ListArtifactsResponse$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class ListArtifactsResponse_MonolithArtifact$Type extends MessageType<ListArtifactsResponse_MonolithArtifact> {
|
||||
constructor() {
|
||||
super("github.actions.results.api.v1.ListArtifactsResponse.MonolithArtifact", [
|
||||
{ no: 1, name: "workflow_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 2, name: "workflow_job_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 3, name: "database_id", kind: "scalar", T: 3 /*ScalarType.INT64*/ },
|
||||
{ no: 4, name: "name", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 5, name: "size", kind: "scalar", T: 3 /*ScalarType.INT64*/ },
|
||||
{ no: 6, name: "created_at", kind: "message", T: () => Timestamp }
|
||||
]);
|
||||
}
|
||||
create(value?: PartialMessage<ListArtifactsResponse_MonolithArtifact>): ListArtifactsResponse_MonolithArtifact {
|
||||
const message = { workflowRunBackendId: "", workflowJobRunBackendId: "", databaseId: "0", name: "", size: "0" };
|
||||
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
reflectionMergePartial<ListArtifactsResponse_MonolithArtifact>(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: ListArtifactsResponse_MonolithArtifact): ListArtifactsResponse_MonolithArtifact {
|
||||
let message = target ?? this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* string workflow_run_backend_id */ 1:
|
||||
message.workflowRunBackendId = reader.string();
|
||||
break;
|
||||
case /* string workflow_job_run_backend_id */ 2:
|
||||
message.workflowJobRunBackendId = reader.string();
|
||||
break;
|
||||
case /* int64 database_id */ 3:
|
||||
message.databaseId = reader.int64().toString();
|
||||
break;
|
||||
case /* string name */ 4:
|
||||
message.name = reader.string();
|
||||
break;
|
||||
case /* int64 size */ 5:
|
||||
message.size = reader.int64().toString();
|
||||
break;
|
||||
case /* google.protobuf.Timestamp created_at */ 6:
|
||||
message.createdAt = Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.createdAt);
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message: ListArtifactsResponse_MonolithArtifact, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||
/* string workflow_run_backend_id = 1; */
|
||||
if (message.workflowRunBackendId !== "")
|
||||
writer.tag(1, WireType.LengthDelimited).string(message.workflowRunBackendId);
|
||||
/* string workflow_job_run_backend_id = 2; */
|
||||
if (message.workflowJobRunBackendId !== "")
|
||||
writer.tag(2, WireType.LengthDelimited).string(message.workflowJobRunBackendId);
|
||||
/* int64 database_id = 3; */
|
||||
if (message.databaseId !== "0")
|
||||
writer.tag(3, WireType.Varint).int64(message.databaseId);
|
||||
/* string name = 4; */
|
||||
if (message.name !== "")
|
||||
writer.tag(4, WireType.LengthDelimited).string(message.name);
|
||||
/* int64 size = 5; */
|
||||
if (message.size !== "0")
|
||||
writer.tag(5, WireType.Varint).int64(message.size);
|
||||
/* google.protobuf.Timestamp created_at = 6; */
|
||||
if (message.createdAt)
|
||||
Timestamp.internalBinaryWrite(message.createdAt, writer.tag(6, WireType.LengthDelimited).fork(), options).join();
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.ListArtifactsResponse.MonolithArtifact
|
||||
*/
|
||||
export const ListArtifactsResponse_MonolithArtifact = new ListArtifactsResponse_MonolithArtifact$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class GetSignedArtifactURLRequest$Type extends MessageType<GetSignedArtifactURLRequest> {
|
||||
constructor() {
|
||||
super("github.actions.results.api.v1.GetSignedArtifactURLRequest", [
|
||||
{ no: 1, name: "workflow_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 2, name: "workflow_job_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 3, name: "name", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
|
||||
]);
|
||||
}
|
||||
create(value?: PartialMessage<GetSignedArtifactURLRequest>): GetSignedArtifactURLRequest {
|
||||
const message = { workflowRunBackendId: "", workflowJobRunBackendId: "", name: "" };
|
||||
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
reflectionMergePartial<GetSignedArtifactURLRequest>(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: GetSignedArtifactURLRequest): GetSignedArtifactURLRequest {
|
||||
let message = target ?? this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* string workflow_run_backend_id */ 1:
|
||||
message.workflowRunBackendId = reader.string();
|
||||
break;
|
||||
case /* string workflow_job_run_backend_id */ 2:
|
||||
message.workflowJobRunBackendId = reader.string();
|
||||
break;
|
||||
case /* string name */ 3:
|
||||
message.name = reader.string();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message: GetSignedArtifactURLRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||
/* string workflow_run_backend_id = 1; */
|
||||
if (message.workflowRunBackendId !== "")
|
||||
writer.tag(1, WireType.LengthDelimited).string(message.workflowRunBackendId);
|
||||
/* string workflow_job_run_backend_id = 2; */
|
||||
if (message.workflowJobRunBackendId !== "")
|
||||
writer.tag(2, WireType.LengthDelimited).string(message.workflowJobRunBackendId);
|
||||
/* string name = 3; */
|
||||
if (message.name !== "")
|
||||
writer.tag(3, WireType.LengthDelimited).string(message.name);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.GetSignedArtifactURLRequest
|
||||
*/
|
||||
export const GetSignedArtifactURLRequest = new GetSignedArtifactURLRequest$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class GetSignedArtifactURLResponse$Type extends MessageType<GetSignedArtifactURLResponse> {
|
||||
constructor() {
|
||||
super("github.actions.results.api.v1.GetSignedArtifactURLResponse", [
|
||||
{ no: 1, name: "signed_url", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
|
||||
]);
|
||||
}
|
||||
create(value?: PartialMessage<GetSignedArtifactURLResponse>): GetSignedArtifactURLResponse {
|
||||
const message = { signedUrl: "" };
|
||||
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
reflectionMergePartial<GetSignedArtifactURLResponse>(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: GetSignedArtifactURLResponse): GetSignedArtifactURLResponse {
|
||||
let message = target ?? this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* string signed_url */ 1:
|
||||
message.signedUrl = reader.string();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message: GetSignedArtifactURLResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||
/* string signed_url = 1; */
|
||||
if (message.signedUrl !== "")
|
||||
writer.tag(1, WireType.LengthDelimited).string(message.signedUrl);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.GetSignedArtifactURLResponse
|
||||
*/
|
||||
export const GetSignedArtifactURLResponse = new GetSignedArtifactURLResponse$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class DeleteArtifactRequest$Type extends MessageType<DeleteArtifactRequest> {
|
||||
constructor() {
|
||||
super("github.actions.results.api.v1.DeleteArtifactRequest", [
|
||||
{ no: 1, name: "workflow_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 2, name: "workflow_job_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 3, name: "name", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
|
||||
]);
|
||||
}
|
||||
create(value?: PartialMessage<DeleteArtifactRequest>): DeleteArtifactRequest {
|
||||
const message = { workflowRunBackendId: "", workflowJobRunBackendId: "", name: "" };
|
||||
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
reflectionMergePartial<DeleteArtifactRequest>(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: DeleteArtifactRequest): DeleteArtifactRequest {
|
||||
let message = target ?? this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* string workflow_run_backend_id */ 1:
|
||||
message.workflowRunBackendId = reader.string();
|
||||
break;
|
||||
case /* string workflow_job_run_backend_id */ 2:
|
||||
message.workflowJobRunBackendId = reader.string();
|
||||
break;
|
||||
case /* string name */ 3:
|
||||
message.name = reader.string();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message: DeleteArtifactRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||
/* string workflow_run_backend_id = 1; */
|
||||
if (message.workflowRunBackendId !== "")
|
||||
writer.tag(1, WireType.LengthDelimited).string(message.workflowRunBackendId);
|
||||
/* string workflow_job_run_backend_id = 2; */
|
||||
if (message.workflowJobRunBackendId !== "")
|
||||
writer.tag(2, WireType.LengthDelimited).string(message.workflowJobRunBackendId);
|
||||
/* string name = 3; */
|
||||
if (message.name !== "")
|
||||
writer.tag(3, WireType.LengthDelimited).string(message.name);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.DeleteArtifactRequest
|
||||
*/
|
||||
export const DeleteArtifactRequest = new DeleteArtifactRequest$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class DeleteArtifactResponse$Type extends MessageType<DeleteArtifactResponse> {
|
||||
constructor() {
|
||||
super("github.actions.results.api.v1.DeleteArtifactResponse", [
|
||||
{ no: 1, name: "ok", kind: "scalar", T: 8 /*ScalarType.BOOL*/ },
|
||||
{ no: 2, name: "artifact_id", kind: "scalar", T: 3 /*ScalarType.INT64*/ }
|
||||
]);
|
||||
}
|
||||
create(value?: PartialMessage<DeleteArtifactResponse>): DeleteArtifactResponse {
|
||||
const message = { ok: false, artifactId: "0" };
|
||||
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
reflectionMergePartial<DeleteArtifactResponse>(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: DeleteArtifactResponse): DeleteArtifactResponse {
|
||||
let message = target ?? this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* bool ok */ 1:
|
||||
message.ok = reader.bool();
|
||||
break;
|
||||
case /* int64 artifact_id */ 2:
|
||||
message.artifactId = reader.int64().toString();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message: DeleteArtifactResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||
/* bool ok = 1; */
|
||||
if (message.ok !== false)
|
||||
writer.tag(1, WireType.Varint).bool(message.ok);
|
||||
/* int64 artifact_id = 2; */
|
||||
if (message.artifactId !== "0")
|
||||
writer.tag(2, WireType.Varint).int64(message.artifactId);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.DeleteArtifactResponse
|
||||
*/
|
||||
export const DeleteArtifactResponse = new DeleteArtifactResponse$Type();
|
||||
/**
|
||||
* @generated ServiceType for protobuf service github.actions.results.api.v1.ArtifactService
|
||||
*/
|
||||
export const ArtifactService = new ServiceType("github.actions.results.api.v1.ArtifactService", [
|
||||
{ name: "CreateArtifact", options: {}, I: CreateArtifactRequest, O: CreateArtifactResponse },
|
||||
{ name: "FinalizeArtifact", options: {}, I: FinalizeArtifactRequest, O: FinalizeArtifactResponse }
|
||||
{ name: "FinalizeArtifact", options: {}, I: FinalizeArtifactRequest, O: FinalizeArtifactResponse },
|
||||
{ name: "ListArtifacts", options: {}, I: ListArtifactsRequest, O: ListArtifactsResponse },
|
||||
{ name: "GetSignedArtifactURL", options: {}, I: GetSignedArtifactURLRequest, O: GetSignedArtifactURLResponse },
|
||||
{ name: "DeleteArtifact", options: {}, I: DeleteArtifactRequest, O: DeleteArtifactResponse }
|
||||
]);
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -1,122 +1,126 @@
|
|||
import {warning} from '@actions/core'
|
||||
import {isGhes} from './shared/config'
|
||||
import {
|
||||
UploadOptions,
|
||||
UploadResponse,
|
||||
UploadArtifactOptions,
|
||||
UploadArtifactResponse,
|
||||
DownloadArtifactOptions,
|
||||
GetArtifactResponse,
|
||||
ListArtifactsOptions,
|
||||
ListArtifactsResponse,
|
||||
DownloadArtifactResponse
|
||||
DownloadArtifactResponse,
|
||||
FindOptions,
|
||||
DeleteArtifactResponse
|
||||
} from './shared/interfaces'
|
||||
import {uploadArtifact} from './upload/upload-artifact'
|
||||
import {downloadArtifact} from './download/download-artifact'
|
||||
import {getArtifact} from './find/get-artifact'
|
||||
import {listArtifacts} from './find/list-artifacts'
|
||||
import {
|
||||
downloadArtifactPublic,
|
||||
downloadArtifactInternal
|
||||
} from './download/download-artifact'
|
||||
import {
|
||||
deleteArtifactPublic,
|
||||
deleteArtifactInternal
|
||||
} from './delete/delete-artifact'
|
||||
import {getArtifactPublic, getArtifactInternal} from './find/get-artifact'
|
||||
import {listArtifactsPublic, listArtifactsInternal} from './find/list-artifacts'
|
||||
import {GHESNotSupportedError} from './shared/errors'
|
||||
|
||||
/**
|
||||
* Generic interface for the artifact client.
|
||||
*/
|
||||
export interface ArtifactClient {
|
||||
/**
|
||||
* Uploads an artifact
|
||||
* Uploads an artifact.
|
||||
*
|
||||
* @param name The name of the artifact, required
|
||||
* @param files A list of absolute or relative paths that denote what files should be uploaded
|
||||
* @param rootDirectory An absolute or relative file path that denotes the root parent directory of the files being uploaded
|
||||
* @param options Extra options for customizing the upload behavior
|
||||
* @returns single UploadResponse object
|
||||
* @returns single UploadArtifactResponse object
|
||||
*/
|
||||
uploadArtifact(
|
||||
name: string,
|
||||
files: string[],
|
||||
rootDirectory: string,
|
||||
options?: UploadOptions
|
||||
): Promise<UploadResponse>
|
||||
options?: UploadArtifactOptions
|
||||
): Promise<UploadArtifactResponse>
|
||||
|
||||
/**
|
||||
* Lists all artifacts that are part of a workflow run.
|
||||
* Lists all artifacts that are part of the current workflow run.
|
||||
* This function will return at most 1000 artifacts per workflow run.
|
||||
*
|
||||
* This calls the public List-Artifacts API https://docs.github.com/en/rest/actions/artifacts?apiVersion=2022-11-28#list-workflow-run-artifacts
|
||||
* Due to paginated responses from the public API. This function will return at most 1000 artifacts per workflow run (100 per page * maximum 10 calls)
|
||||
* If `options.findBy` is specified, this will call the public List-Artifacts API which can list from other runs.
|
||||
* https://docs.github.com/en/rest/actions/artifacts?apiVersion=2022-11-28#list-workflow-run-artifacts
|
||||
*
|
||||
* @param workflowRunId The workflow run id that the artifact belongs to
|
||||
* @param repositoryOwner The owner of the repository that the artifact belongs to
|
||||
* @param repositoryName The name of the repository that the artifact belongs to
|
||||
* @param token A token with the appropriate permission to the repository to list artifacts
|
||||
* @param options Extra options that allow for the customization of the list behavior
|
||||
* @returns ListArtifactResponse object
|
||||
*/
|
||||
listArtifacts(
|
||||
workflowRunId: number,
|
||||
repositoryOwner: string,
|
||||
repositoryName: string,
|
||||
token: string
|
||||
options?: ListArtifactsOptions & FindOptions
|
||||
): Promise<ListArtifactsResponse>
|
||||
|
||||
/**
|
||||
* Finds an artifact by name given a repository and workflow run id.
|
||||
* Finds an artifact by name.
|
||||
* If there are multiple artifacts with the same name in the same workflow run, this will return the latest.
|
||||
* If the artifact is not found, it will throw.
|
||||
*
|
||||
* This calls the public List-Artifacts API with a name filter https://docs.github.com/en/rest/actions/artifacts?apiVersion=2022-11-28#list-workflow-run-artifacts
|
||||
* @actions/artifact > 2.0.0 does not allow for creating multiple artifacts with the same name in the same workflow run.
|
||||
* It is possible to have multiple artifacts with the same name in the same workflow run by using old versions of upload-artifact (v1,v2 and v3) or @actions/artifact < v2.0.0
|
||||
* If `options.findBy` is specified, this will use the public List Artifacts API with a name filter which can get artifacts from other runs.
|
||||
* https://docs.github.com/en/rest/actions/artifacts?apiVersion=2022-11-28#list-workflow-run-artifacts
|
||||
* `@actions/artifact` v2+ does not allow for creating multiple artifacts with the same name in the same workflow run.
|
||||
* It is possible to have multiple artifacts with the same name in the same workflow run by using old versions of upload-artifact (v1,v2 and v3), @actions/artifact < v2 or it is a rerun.
|
||||
* If there are multiple artifacts with the same name in the same workflow run this function will return the first artifact that matches the name.
|
||||
*
|
||||
* @param artifactName The name of the artifact to find
|
||||
* @param workflowRunId The workflow run id that the artifact belongs to
|
||||
* @param repositoryOwner The owner of the repository that the artifact belongs to
|
||||
* @param repositoryName The name of the repository that the artifact belongs to
|
||||
* @param token A token with the appropriate permission to the repository to find the artifact
|
||||
* @param options Extra options that allow for the customization of the get behavior
|
||||
*/
|
||||
getArtifact(
|
||||
artifactName: string,
|
||||
workflowRunId: number,
|
||||
repositoryOwner: string,
|
||||
repositoryName: string,
|
||||
token: string
|
||||
options?: FindOptions
|
||||
): Promise<GetArtifactResponse>
|
||||
|
||||
/**
|
||||
* Downloads an artifact and unzips the content
|
||||
* Downloads an artifact and unzips the content.
|
||||
*
|
||||
* @param artifactId The name of the artifact to download
|
||||
* @param repositoryOwner The owner of the repository that the artifact belongs to
|
||||
* @param repositoryName The name of the repository that the artifact belongs to
|
||||
* @param token A token with the appropriate permission to the repository to download the artifact
|
||||
* If `options.findBy` is specified, this will use the public Download Artifact API https://docs.github.com/en/rest/actions/artifacts?apiVersion=2022-11-28#download-an-artifact
|
||||
*
|
||||
* @param artifactId The id of the artifact to download
|
||||
* @param options Extra options that allow for the customization of the download behavior
|
||||
* @returns single DownloadArtifactResponse object
|
||||
*/
|
||||
downloadArtifact(
|
||||
artifactId: number,
|
||||
repositoryOwner: string,
|
||||
repositoryName: string,
|
||||
token: string,
|
||||
options?: DownloadArtifactOptions
|
||||
options?: DownloadArtifactOptions & FindOptions
|
||||
): Promise<DownloadArtifactResponse>
|
||||
|
||||
/**
|
||||
* Delete an Artifact
|
||||
*
|
||||
* If `options.findBy` is specified, this will use the public Delete Artifact API https://docs.github.com/en/rest/actions/artifacts?apiVersion=2022-11-28#delete-an-artifact
|
||||
*
|
||||
* @param artifactName The name of the artifact to delete
|
||||
* @param options Extra options that allow for the customization of the delete behavior
|
||||
* @returns single DeleteArtifactResponse object
|
||||
*/
|
||||
deleteArtifact(
|
||||
artifactName: string,
|
||||
options?: FindOptions
|
||||
): Promise<DeleteArtifactResponse>
|
||||
}
|
||||
|
||||
export class Client implements ArtifactClient {
|
||||
/**
|
||||
* Constructs a Client
|
||||
*/
|
||||
static create(): Client {
|
||||
return new Client()
|
||||
}
|
||||
|
||||
/**
|
||||
* Upload Artifact
|
||||
*/
|
||||
/**
|
||||
* The default artifact client that is used by the artifact action(s).
|
||||
*/
|
||||
export class DefaultArtifactClient implements ArtifactClient {
|
||||
async uploadArtifact(
|
||||
name: string,
|
||||
files: string[],
|
||||
rootDirectory: string,
|
||||
options?: UploadOptions | undefined
|
||||
): Promise<UploadResponse> {
|
||||
if (isGhes()) {
|
||||
warning(
|
||||
`@actions/artifact v2.0.0+ and upload-artifact@v4+ are not currently supported on GHES.`
|
||||
)
|
||||
return {
|
||||
success: false
|
||||
}
|
||||
}
|
||||
|
||||
options?: UploadArtifactOptions
|
||||
): Promise<UploadArtifactResponse> {
|
||||
try {
|
||||
if (isGhes()) {
|
||||
throw new GHESNotSupportedError()
|
||||
}
|
||||
|
||||
return uploadArtifact(name, files, rootDirectory, options)
|
||||
} catch (error) {
|
||||
warning(
|
||||
|
@ -126,79 +130,72 @@ Errors can be temporary, so please try again and optionally run the action with
|
|||
|
||||
If the error persists, please check whether Actions is operating normally at [https://githubstatus.com](https://www.githubstatus.com).`
|
||||
)
|
||||
return {
|
||||
success: false
|
||||
}
|
||||
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Download Artifact
|
||||
*/
|
||||
async downloadArtifact(
|
||||
artifactId: number,
|
||||
repositoryOwner: string,
|
||||
repositoryName: string,
|
||||
token: string,
|
||||
options?: DownloadArtifactOptions
|
||||
options?: DownloadArtifactOptions & FindOptions
|
||||
): Promise<DownloadArtifactResponse> {
|
||||
if (isGhes()) {
|
||||
warning(
|
||||
`@actions/artifact v2.0.0+ and download-artifact@v4+ are not currently supported on GHES.`
|
||||
)
|
||||
return {
|
||||
success: false
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
return downloadArtifact(
|
||||
artifactId,
|
||||
repositoryOwner,
|
||||
repositoryName,
|
||||
token,
|
||||
options
|
||||
)
|
||||
if (isGhes()) {
|
||||
throw new GHESNotSupportedError()
|
||||
}
|
||||
|
||||
if (options?.findBy) {
|
||||
const {
|
||||
findBy: {repositoryOwner, repositoryName, token},
|
||||
...downloadOptions
|
||||
} = options
|
||||
|
||||
return downloadArtifactPublic(
|
||||
artifactId,
|
||||
repositoryOwner,
|
||||
repositoryName,
|
||||
token,
|
||||
downloadOptions
|
||||
)
|
||||
}
|
||||
|
||||
return downloadArtifactInternal(artifactId, options)
|
||||
} catch (error) {
|
||||
warning(
|
||||
`Artifact download failed with error: ${error}.
|
||||
`Download Artifact failed with error: ${error}.
|
||||
|
||||
Errors can be temporary, so please try again and optionally run the action with debug mode enabled for more information.
|
||||
|
||||
If the error persists, please check whether Actions and API requests are operating normally at [https://githubstatus.com](https://www.githubstatus.com).`
|
||||
)
|
||||
|
||||
return {
|
||||
success: false
|
||||
}
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* List Artifacts
|
||||
*/
|
||||
async listArtifacts(
|
||||
workflowRunId: number,
|
||||
repositoryOwner: string,
|
||||
repositoryName: string,
|
||||
token: string
|
||||
options?: ListArtifactsOptions & FindOptions
|
||||
): Promise<ListArtifactsResponse> {
|
||||
if (isGhes()) {
|
||||
warning(
|
||||
`@actions/artifact v2.0.0+ and download-artifact@v4+ are not currently supported on GHES.`
|
||||
)
|
||||
return {
|
||||
artifacts: []
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
return listArtifacts(
|
||||
workflowRunId,
|
||||
repositoryOwner,
|
||||
repositoryName,
|
||||
token
|
||||
)
|
||||
if (isGhes()) {
|
||||
throw new GHESNotSupportedError()
|
||||
}
|
||||
|
||||
if (options?.findBy) {
|
||||
const {
|
||||
findBy: {workflowRunId, repositoryOwner, repositoryName, token}
|
||||
} = options
|
||||
|
||||
return listArtifactsPublic(
|
||||
workflowRunId,
|
||||
repositoryOwner,
|
||||
repositoryName,
|
||||
token,
|
||||
options?.latest
|
||||
)
|
||||
}
|
||||
|
||||
return listArtifactsInternal(options?.latest)
|
||||
} catch (error: unknown) {
|
||||
warning(
|
||||
`Listing Artifacts failed with error: ${error}.
|
||||
|
@ -208,50 +205,80 @@ Errors can be temporary, so please try again and optionally run the action with
|
|||
If the error persists, please check whether Actions and API requests are operating normally at [https://githubstatus.com](https://www.githubstatus.com).`
|
||||
)
|
||||
|
||||
return {
|
||||
artifacts: []
|
||||
}
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get Artifact
|
||||
*/
|
||||
async getArtifact(
|
||||
artifactName: string,
|
||||
workflowRunId: number,
|
||||
repositoryOwner: string,
|
||||
repositoryName: string,
|
||||
token: string
|
||||
options?: FindOptions
|
||||
): Promise<GetArtifactResponse> {
|
||||
if (isGhes()) {
|
||||
warning(
|
||||
`@actions/artifact v2.0.0+ and download-artifact@v4+ are not currently supported on GHES.`
|
||||
)
|
||||
return {
|
||||
success: false
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
return getArtifact(
|
||||
artifactName,
|
||||
workflowRunId,
|
||||
repositoryOwner,
|
||||
repositoryName,
|
||||
token
|
||||
)
|
||||
if (isGhes()) {
|
||||
throw new GHESNotSupportedError()
|
||||
}
|
||||
|
||||
if (options?.findBy) {
|
||||
const {
|
||||
findBy: {workflowRunId, repositoryOwner, repositoryName, token}
|
||||
} = options
|
||||
|
||||
return getArtifactPublic(
|
||||
artifactName,
|
||||
workflowRunId,
|
||||
repositoryOwner,
|
||||
repositoryName,
|
||||
token
|
||||
)
|
||||
}
|
||||
|
||||
return getArtifactInternal(artifactName)
|
||||
} catch (error: unknown) {
|
||||
warning(
|
||||
`Fetching Artifact failed with error: ${error}.
|
||||
`Get Artifact failed with error: ${error}.
|
||||
|
||||
Errors can be temporary, so please try again and optionally run the action with debug mode enabled for more information.
|
||||
|
||||
If the error persists, please check whether Actions and API requests are operating normally at [https://githubstatus.com](https://www.githubstatus.com).`
|
||||
)
|
||||
return {
|
||||
success: false
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
async deleteArtifact(
|
||||
artifactName: string,
|
||||
options?: FindOptions
|
||||
): Promise<DeleteArtifactResponse> {
|
||||
try {
|
||||
if (isGhes()) {
|
||||
throw new GHESNotSupportedError()
|
||||
}
|
||||
|
||||
if (options?.findBy) {
|
||||
const {
|
||||
findBy: {repositoryOwner, repositoryName, workflowRunId, token}
|
||||
} = options
|
||||
|
||||
return deleteArtifactPublic(
|
||||
artifactName,
|
||||
workflowRunId,
|
||||
repositoryOwner,
|
||||
repositoryName,
|
||||
token
|
||||
)
|
||||
}
|
||||
|
||||
return deleteArtifactInternal(artifactName)
|
||||
} catch (error) {
|
||||
warning(
|
||||
`Delete Artifact failed with error: ${error}.
|
||||
|
||||
Errors can be temporary, so please try again and optionally run the action with debug mode enabled for more information.
|
||||
|
||||
If the error persists, please check whether Actions and API requests are operating normally at [https://githubstatus.com](https://www.githubstatus.com).`
|
||||
)
|
||||
|
||||
throw error
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,109 @@
|
|||
import {info, debug} from '@actions/core'
|
||||
import {getOctokit} from '@actions/github'
|
||||
import {DeleteArtifactResponse} from '../shared/interfaces'
|
||||
import {getUserAgentString} from '../shared/user-agent'
|
||||
import {getRetryOptions} from '../find/retry-options'
|
||||
import {defaults as defaultGitHubOptions} from '@actions/github/lib/utils'
|
||||
import {requestLog} from '@octokit/plugin-request-log'
|
||||
import {retry} from '@octokit/plugin-retry'
|
||||
import {OctokitOptions} from '@octokit/core/dist-types/types'
|
||||
import {internalArtifactTwirpClient} from '../shared/artifact-twirp-client'
|
||||
import {getBackendIdsFromToken} from '../shared/util'
|
||||
import {
|
||||
DeleteArtifactRequest,
|
||||
ListArtifactsRequest,
|
||||
StringValue
|
||||
} from '../../generated'
|
||||
import {getArtifactPublic} from '../find/get-artifact'
|
||||
import {ArtifactNotFoundError, InvalidResponseError} from '../shared/errors'
|
||||
|
||||
export async function deleteArtifactPublic(
|
||||
artifactName: string,
|
||||
workflowRunId: number,
|
||||
repositoryOwner: string,
|
||||
repositoryName: string,
|
||||
token: string
|
||||
): Promise<DeleteArtifactResponse> {
|
||||
const [retryOpts, requestOpts] = getRetryOptions(defaultGitHubOptions)
|
||||
|
||||
const opts: OctokitOptions = {
|
||||
log: undefined,
|
||||
userAgent: getUserAgentString(),
|
||||
previews: undefined,
|
||||
retry: retryOpts,
|
||||
request: requestOpts
|
||||
}
|
||||
|
||||
const github = getOctokit(token, opts, retry, requestLog)
|
||||
|
||||
const getArtifactResp = await getArtifactPublic(
|
||||
artifactName,
|
||||
workflowRunId,
|
||||
repositoryOwner,
|
||||
repositoryName,
|
||||
token
|
||||
)
|
||||
|
||||
const deleteArtifactResp = await github.rest.actions.deleteArtifact({
|
||||
owner: repositoryOwner,
|
||||
repo: repositoryName,
|
||||
artifact_id: getArtifactResp.artifact.id
|
||||
})
|
||||
|
||||
if (deleteArtifactResp.status !== 204) {
|
||||
throw new InvalidResponseError(
|
||||
`Invalid response from GitHub API: ${deleteArtifactResp.status} (${deleteArtifactResp?.headers?.['x-github-request-id']})`
|
||||
)
|
||||
}
|
||||
|
||||
return {
|
||||
id: getArtifactResp.artifact.id
|
||||
}
|
||||
}
|
||||
|
||||
export async function deleteArtifactInternal(
|
||||
artifactName
|
||||
): Promise<DeleteArtifactResponse> {
|
||||
const artifactClient = internalArtifactTwirpClient()
|
||||
|
||||
const {workflowRunBackendId, workflowJobRunBackendId} =
|
||||
getBackendIdsFromToken()
|
||||
|
||||
const listReq: ListArtifactsRequest = {
|
||||
workflowRunBackendId,
|
||||
workflowJobRunBackendId,
|
||||
nameFilter: StringValue.create({value: artifactName})
|
||||
}
|
||||
|
||||
const listRes = await artifactClient.ListArtifacts(listReq)
|
||||
|
||||
if (listRes.artifacts.length === 0) {
|
||||
throw new ArtifactNotFoundError(
|
||||
`Artifact not found for name: ${artifactName}`
|
||||
)
|
||||
}
|
||||
|
||||
let artifact = listRes.artifacts[0]
|
||||
if (listRes.artifacts.length > 1) {
|
||||
artifact = listRes.artifacts.sort(
|
||||
(a, b) => Number(b.databaseId) - Number(a.databaseId)
|
||||
)[0]
|
||||
|
||||
debug(
|
||||
`More than one artifact found for a single name, returning newest (id: ${artifact.databaseId})`
|
||||
)
|
||||
}
|
||||
|
||||
const req: DeleteArtifactRequest = {
|
||||
workflowRunBackendId: artifact.workflowRunBackendId,
|
||||
workflowJobRunBackendId: artifact.workflowJobRunBackendId,
|
||||
name: artifact.name
|
||||
}
|
||||
|
||||
const res = await artifactClient.DeleteArtifact(req)
|
||||
info(`Artifact '${artifactName}' (ID: ${res.artifactId}) deleted`)
|
||||
|
||||
return {
|
||||
id: Number(res.artifactId)
|
||||
}
|
||||
}
|
|
@ -2,13 +2,21 @@ import fs from 'fs/promises'
|
|||
import * as github from '@actions/github'
|
||||
import * as core from '@actions/core'
|
||||
import * as httpClient from '@actions/http-client'
|
||||
import unzipper from 'unzipper'
|
||||
import unzip from 'unzip-stream'
|
||||
import {
|
||||
DownloadArtifactOptions,
|
||||
DownloadArtifactResponse
|
||||
} from '../shared/interfaces'
|
||||
import {getUserAgentString} from '../shared/user-agent'
|
||||
import {getGitHubWorkspaceDir} from '../shared/config'
|
||||
import {internalArtifactTwirpClient} from '../shared/artifact-twirp-client'
|
||||
import {
|
||||
GetSignedArtifactURLRequest,
|
||||
Int64Value,
|
||||
ListArtifactsRequest
|
||||
} from '../../generated'
|
||||
import {getBackendIdsFromToken} from '../shared/util'
|
||||
import {ArtifactNotFoundError} from '../shared/errors'
|
||||
|
||||
const scrubQueryParameters = (url: string): string => {
|
||||
const parsed = new URL(url)
|
||||
|
@ -30,35 +38,76 @@ async function exists(path: string): Promise<boolean> {
|
|||
}
|
||||
|
||||
async function streamExtract(url: string, directory: string): Promise<void> {
|
||||
let retryCount = 0
|
||||
while (retryCount < 5) {
|
||||
try {
|
||||
await streamExtractExternal(url, directory)
|
||||
return
|
||||
} catch (error) {
|
||||
retryCount++
|
||||
core.debug(
|
||||
`Failed to download artifact after ${retryCount} retries due to ${error.message}. Retrying in 5 seconds...`
|
||||
)
|
||||
// wait 5 seconds before retrying
|
||||
await new Promise(resolve => setTimeout(resolve, 5000))
|
||||
}
|
||||
}
|
||||
|
||||
throw new Error(`Artifact download failed after ${retryCount} retries.`)
|
||||
}
|
||||
|
||||
export async function streamExtractExternal(
|
||||
url: string,
|
||||
directory: string
|
||||
): Promise<void> {
|
||||
const client = new httpClient.HttpClient(getUserAgentString())
|
||||
const response = await client.get(url)
|
||||
|
||||
if (response.message.statusCode !== 200) {
|
||||
throw new Error(
|
||||
`Unexpected HTTP response from blob storage: ${response.message.statusCode} ${response.message.statusMessage}`
|
||||
)
|
||||
}
|
||||
|
||||
return response.message.pipe(unzipper.Extract({path: directory})).promise()
|
||||
const timeout = 30 * 1000 // 30 seconds
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
const timerFn = (): void => {
|
||||
response.message.destroy(
|
||||
new Error(`Blob storage chunk did not respond in ${timeout}ms`)
|
||||
)
|
||||
}
|
||||
const timer = setTimeout(timerFn, timeout)
|
||||
|
||||
response.message
|
||||
.on('data', () => {
|
||||
timer.refresh()
|
||||
})
|
||||
.on('error', (error: Error) => {
|
||||
core.debug(
|
||||
`response.message: Artifact download failed: ${error.message}`
|
||||
)
|
||||
clearTimeout(timer)
|
||||
reject(error)
|
||||
})
|
||||
.pipe(unzip.Extract({path: directory}))
|
||||
.on('close', () => {
|
||||
clearTimeout(timer)
|
||||
resolve()
|
||||
})
|
||||
.on('error', (error: Error) => {
|
||||
reject(error)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
export async function downloadArtifact(
|
||||
export async function downloadArtifactPublic(
|
||||
artifactId: number,
|
||||
repositoryOwner: string,
|
||||
repositoryName: string,
|
||||
token: string,
|
||||
options?: DownloadArtifactOptions
|
||||
): Promise<DownloadArtifactResponse> {
|
||||
const downloadPath = options?.path || getGitHubWorkspaceDir()
|
||||
|
||||
if (!(await exists(downloadPath))) {
|
||||
core.debug(
|
||||
`Artifact destination folder does not exist, creating: ${downloadPath}`
|
||||
)
|
||||
await fs.mkdir(downloadPath, {recursive: true})
|
||||
} else {
|
||||
core.debug(`Artifact destination folder already exists: ${downloadPath}`)
|
||||
}
|
||||
const downloadPath = await resolveOrCreateDirectory(options?.path)
|
||||
|
||||
const api = github.getOctokit(token)
|
||||
|
||||
|
@ -97,5 +146,72 @@ export async function downloadArtifact(
|
|||
throw new Error(`Unable to download and extract artifact: ${error.message}`)
|
||||
}
|
||||
|
||||
return {success: true, downloadPath}
|
||||
return {downloadPath}
|
||||
}
|
||||
|
||||
export async function downloadArtifactInternal(
|
||||
artifactId: number,
|
||||
options?: DownloadArtifactOptions
|
||||
): Promise<DownloadArtifactResponse> {
|
||||
const downloadPath = await resolveOrCreateDirectory(options?.path)
|
||||
|
||||
const artifactClient = internalArtifactTwirpClient()
|
||||
|
||||
const {workflowRunBackendId, workflowJobRunBackendId} =
|
||||
getBackendIdsFromToken()
|
||||
|
||||
const listReq: ListArtifactsRequest = {
|
||||
workflowRunBackendId,
|
||||
workflowJobRunBackendId,
|
||||
idFilter: Int64Value.create({value: artifactId.toString()})
|
||||
}
|
||||
|
||||
const {artifacts} = await artifactClient.ListArtifacts(listReq)
|
||||
|
||||
if (artifacts.length === 0) {
|
||||
throw new ArtifactNotFoundError(
|
||||
`No artifacts found for ID: ${artifactId}\nAre you trying to download from a different run? Try specifying a github-token with \`actions:read\` scope.`
|
||||
)
|
||||
}
|
||||
|
||||
if (artifacts.length > 1) {
|
||||
core.warning('Multiple artifacts found, defaulting to first.')
|
||||
}
|
||||
|
||||
const signedReq: GetSignedArtifactURLRequest = {
|
||||
workflowRunBackendId: artifacts[0].workflowRunBackendId,
|
||||
workflowJobRunBackendId: artifacts[0].workflowJobRunBackendId,
|
||||
name: artifacts[0].name
|
||||
}
|
||||
|
||||
const {signedUrl} = await artifactClient.GetSignedArtifactURL(signedReq)
|
||||
|
||||
core.info(
|
||||
`Redirecting to blob download url: ${scrubQueryParameters(signedUrl)}`
|
||||
)
|
||||
|
||||
try {
|
||||
core.info(`Starting download of artifact to: ${downloadPath}`)
|
||||
await streamExtract(signedUrl, downloadPath)
|
||||
core.info(`Artifact download completed successfully.`)
|
||||
} catch (error) {
|
||||
throw new Error(`Unable to download and extract artifact: ${error.message}`)
|
||||
}
|
||||
|
||||
return {downloadPath}
|
||||
}
|
||||
|
||||
async function resolveOrCreateDirectory(
|
||||
downloadPath = getGitHubWorkspaceDir()
|
||||
): Promise<string> {
|
||||
if (!(await exists(downloadPath))) {
|
||||
core.debug(
|
||||
`Artifact destination folder does not exist, creating: ${downloadPath}`
|
||||
)
|
||||
await fs.mkdir(downloadPath, {recursive: true})
|
||||
} else {
|
||||
core.debug(`Artifact destination folder already exists: ${downloadPath}`)
|
||||
}
|
||||
|
||||
return downloadPath
|
||||
}
|
||||
|
|
|
@ -1,14 +1,18 @@
|
|||
import {GetArtifactResponse} from '../shared/interfaces'
|
||||
import {getOctokit} from '@actions/github'
|
||||
import {getUserAgentString} from '../shared/user-agent'
|
||||
import {defaults as defaultGitHubOptions} from '@actions/github/lib/utils'
|
||||
import {getRetryOptions} from './retry-options'
|
||||
import {requestLog} from '@octokit/plugin-request-log'
|
||||
import {retry} from '@octokit/plugin-retry'
|
||||
import * as core from '@actions/core'
|
||||
import {OctokitOptions} from '@octokit/core/dist-types/types'
|
||||
import {defaults as defaultGitHubOptions} from '@actions/github/lib/utils'
|
||||
import {getRetryOptions} from './retry-options'
|
||||
import {requestLog} from '@octokit/plugin-request-log'
|
||||
import {GetArtifactResponse} from '../shared/interfaces'
|
||||
import {getBackendIdsFromToken} from '../shared/util'
|
||||
import {getUserAgentString} from '../shared/user-agent'
|
||||
import {internalArtifactTwirpClient} from '../shared/artifact-twirp-client'
|
||||
import {ListArtifactsRequest, StringValue, Timestamp} from '../../generated'
|
||||
import {ArtifactNotFoundError, InvalidResponseError} from '../shared/errors'
|
||||
|
||||
export async function getArtifact(
|
||||
export async function getArtifactPublic(
|
||||
artifactName: string,
|
||||
workflowRunId: number,
|
||||
repositoryOwner: string,
|
||||
|
@ -38,32 +42,80 @@ export async function getArtifact(
|
|||
)
|
||||
|
||||
if (getArtifactResp.status !== 200) {
|
||||
core.warning(`non-200 response from GitHub API: ${getArtifactResp.status}`)
|
||||
return {
|
||||
success: false
|
||||
}
|
||||
throw new InvalidResponseError(
|
||||
`Invalid response from GitHub API: ${getArtifactResp.status} (${getArtifactResp?.headers?.['x-github-request-id']})`
|
||||
)
|
||||
}
|
||||
|
||||
if (getArtifactResp.data.artifacts.length === 0) {
|
||||
core.warning('no artifacts found')
|
||||
return {
|
||||
success: false
|
||||
}
|
||||
throw new ArtifactNotFoundError(
|
||||
`Artifact not found for name: ${artifactName}
|
||||
Please ensure that your artifact is not expired and the artifact was uploaded using a compatible version of toolkit/upload-artifact.
|
||||
For more information, visit the GitHub Artifacts FAQ: https://github.com/actions/toolkit/blob/main/packages/artifact/docs/faq.md`
|
||||
)
|
||||
}
|
||||
|
||||
let artifact = getArtifactResp.data.artifacts[0]
|
||||
if (getArtifactResp.data.artifacts.length > 1) {
|
||||
core.warning(
|
||||
'more than one artifact found for a single name, returning first'
|
||||
artifact = getArtifactResp.data.artifacts.sort((a, b) => b.id - a.id)[0]
|
||||
core.debug(
|
||||
`More than one artifact found for a single name, returning newest (id: ${artifact.id})`
|
||||
)
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
artifact: {
|
||||
name: getArtifactResp.data.artifacts[0].name,
|
||||
id: getArtifactResp.data.artifacts[0].id,
|
||||
url: getArtifactResp.data.artifacts[0].url,
|
||||
size: getArtifactResp.data.artifacts[0].size_in_bytes
|
||||
name: artifact.name,
|
||||
id: artifact.id,
|
||||
size: artifact.size_in_bytes,
|
||||
createdAt: artifact.created_at ? new Date(artifact.created_at) : undefined
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function getArtifactInternal(
|
||||
artifactName: string
|
||||
): Promise<GetArtifactResponse> {
|
||||
const artifactClient = internalArtifactTwirpClient()
|
||||
|
||||
const {workflowRunBackendId, workflowJobRunBackendId} =
|
||||
getBackendIdsFromToken()
|
||||
|
||||
const req: ListArtifactsRequest = {
|
||||
workflowRunBackendId,
|
||||
workflowJobRunBackendId,
|
||||
nameFilter: StringValue.create({value: artifactName})
|
||||
}
|
||||
|
||||
const res = await artifactClient.ListArtifacts(req)
|
||||
|
||||
if (res.artifacts.length === 0) {
|
||||
throw new ArtifactNotFoundError(
|
||||
`Artifact not found for name: ${artifactName}
|
||||
Please ensure that your artifact is not expired and the artifact was uploaded using a compatible version of toolkit/upload-artifact.
|
||||
For more information, visit the GitHub Artifacts FAQ: https://github.com/actions/toolkit/blob/main/packages/artifact/docs/faq.md`
|
||||
)
|
||||
}
|
||||
|
||||
let artifact = res.artifacts[0]
|
||||
if (res.artifacts.length > 1) {
|
||||
artifact = res.artifacts.sort(
|
||||
(a, b) => Number(b.databaseId) - Number(a.databaseId)
|
||||
)[0]
|
||||
|
||||
core.debug(
|
||||
`More than one artifact found for a single name, returning newest (id: ${artifact.databaseId})`
|
||||
)
|
||||
}
|
||||
|
||||
return {
|
||||
artifact: {
|
||||
name: artifact.name,
|
||||
id: Number(artifact.databaseId),
|
||||
size: Number(artifact.size),
|
||||
createdAt: artifact.createdAt
|
||||
? Timestamp.toDate(artifact.createdAt)
|
||||
: undefined
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -7,23 +7,27 @@ import {defaults as defaultGitHubOptions} from '@actions/github/lib/utils'
|
|||
import {requestLog} from '@octokit/plugin-request-log'
|
||||
import {retry} from '@octokit/plugin-retry'
|
||||
import {OctokitOptions} from '@octokit/core/dist-types/types'
|
||||
import {internalArtifactTwirpClient} from '../shared/artifact-twirp-client'
|
||||
import {getBackendIdsFromToken} from '../shared/util'
|
||||
import {ListArtifactsRequest, Timestamp} from '../../generated'
|
||||
|
||||
// Limiting to 1000 for perf reasons
|
||||
const maximumArtifactCount = 1000
|
||||
const paginationCount = 100
|
||||
const maxNumberOfPages = maximumArtifactCount / paginationCount
|
||||
|
||||
export async function listArtifacts(
|
||||
export async function listArtifactsPublic(
|
||||
workflowRunId: number,
|
||||
repositoryOwner: string,
|
||||
repositoryName: string,
|
||||
token: string
|
||||
token: string,
|
||||
latest = false
|
||||
): Promise<ListArtifactsResponse> {
|
||||
info(
|
||||
`Fetching artifact list for workflow run ${workflowRunId} in repository ${repositoryOwner}/${repositoryName}`
|
||||
)
|
||||
|
||||
const artifacts: Artifact[] = []
|
||||
let artifacts: Artifact[] = []
|
||||
const [retryOpts, requestOpts] = getRetryOptions(defaultGitHubOptions)
|
||||
|
||||
const opts: OctokitOptions = {
|
||||
|
@ -62,8 +66,8 @@ export async function listArtifacts(
|
|||
artifacts.push({
|
||||
name: artifact.name,
|
||||
id: artifact.id,
|
||||
url: artifact.url,
|
||||
size: artifact.size_in_bytes
|
||||
size: artifact.size_in_bytes,
|
||||
createdAt: artifact.created_at ? new Date(artifact.created_at) : undefined
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -89,15 +93,73 @@ export async function listArtifacts(
|
|||
artifacts.push({
|
||||
name: artifact.name,
|
||||
id: artifact.id,
|
||||
url: artifact.url,
|
||||
size: artifact.size_in_bytes
|
||||
size: artifact.size_in_bytes,
|
||||
createdAt: artifact.created_at
|
||||
? new Date(artifact.created_at)
|
||||
: undefined
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
info(`Finished fetching artifact list`)
|
||||
if (latest) {
|
||||
artifacts = filterLatest(artifacts)
|
||||
}
|
||||
|
||||
info(`Found ${artifacts.length} artifact(s)`)
|
||||
|
||||
return {
|
||||
artifacts
|
||||
}
|
||||
}
|
||||
|
||||
export async function listArtifactsInternal(
|
||||
latest = false
|
||||
): Promise<ListArtifactsResponse> {
|
||||
const artifactClient = internalArtifactTwirpClient()
|
||||
|
||||
const {workflowRunBackendId, workflowJobRunBackendId} =
|
||||
getBackendIdsFromToken()
|
||||
|
||||
const req: ListArtifactsRequest = {
|
||||
workflowRunBackendId,
|
||||
workflowJobRunBackendId
|
||||
}
|
||||
|
||||
const res = await artifactClient.ListArtifacts(req)
|
||||
let artifacts: Artifact[] = res.artifacts.map(artifact => ({
|
||||
name: artifact.name,
|
||||
id: Number(artifact.databaseId),
|
||||
size: Number(artifact.size),
|
||||
createdAt: artifact.createdAt
|
||||
? Timestamp.toDate(artifact.createdAt)
|
||||
: undefined
|
||||
}))
|
||||
|
||||
if (latest) {
|
||||
artifacts = filterLatest(artifacts)
|
||||
}
|
||||
|
||||
info(`Found ${artifacts.length} artifact(s)`)
|
||||
|
||||
return {
|
||||
artifacts
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Filters a list of artifacts to only include the latest artifact for each name
|
||||
* @param artifacts The artifacts to filter
|
||||
* @returns The filtered list of artifacts
|
||||
*/
|
||||
function filterLatest(artifacts: Artifact[]): Artifact[] {
|
||||
artifacts.sort((a, b) => b.id - a.id)
|
||||
const latestArtifacts: Artifact[] = []
|
||||
const seenArtifactNames = new Set<string>()
|
||||
for (const artifact of artifacts) {
|
||||
if (!seenArtifactNames.has(artifact.name)) {
|
||||
latestArtifacts.push(artifact)
|
||||
seenArtifactNames.add(artifact.name)
|
||||
}
|
||||
}
|
||||
return latestArtifacts
|
||||
}
|
||||
|
|
|
@ -3,6 +3,8 @@ import {BearerCredentialHandler} from '@actions/http-client/lib/auth'
|
|||
import {info, debug} from '@actions/core'
|
||||
import {ArtifactServiceClientJSON} from '../../generated'
|
||||
import {getResultsServiceUrl, getRuntimeToken} from './config'
|
||||
import {getUserAgentString} from './user-agent'
|
||||
import {NetworkError, UsageError} from './errors'
|
||||
|
||||
// The twirp http client must implement this interface
|
||||
interface Rpc {
|
||||
|
@ -52,17 +54,17 @@ class ArtifactHttpClient implements Rpc {
|
|||
contentType: 'application/json' | 'application/protobuf',
|
||||
data: object | Uint8Array
|
||||
): Promise<object | Uint8Array> {
|
||||
const url = `${this.baseUrl}/twirp/${service}/${method}`
|
||||
debug(`Requesting ${url}`)
|
||||
const url = new URL(`/twirp/${service}/${method}`, this.baseUrl).href
|
||||
debug(`[Request] ${method} ${url}`)
|
||||
const headers = {
|
||||
'Content-Type': contentType
|
||||
}
|
||||
try {
|
||||
const response = await this.retryableRequest(async () =>
|
||||
const {body} = await this.retryableRequest(async () =>
|
||||
this.httpClient.post(url, JSON.stringify(data), headers)
|
||||
)
|
||||
const body = await response.readBody()
|
||||
return JSON.parse(body)
|
||||
|
||||
return body
|
||||
} catch (error) {
|
||||
throw new Error(`Failed to ${method}: ${error.message}`)
|
||||
}
|
||||
|
@ -70,23 +72,46 @@ class ArtifactHttpClient implements Rpc {
|
|||
|
||||
async retryableRequest(
|
||||
operation: () => Promise<HttpClientResponse>
|
||||
): Promise<HttpClientResponse> {
|
||||
): Promise<{response: HttpClientResponse; body: object}> {
|
||||
let attempt = 0
|
||||
let errorMessage = ''
|
||||
let rawBody = ''
|
||||
while (attempt < this.maxAttempts) {
|
||||
let isRetryable = false
|
||||
|
||||
try {
|
||||
const response = await operation()
|
||||
const statusCode = response.message.statusCode
|
||||
|
||||
rawBody = await response.readBody()
|
||||
debug(`[Response] - ${response.message.statusCode}`)
|
||||
debug(`Headers: ${JSON.stringify(response.message.headers, null, 2)}`)
|
||||
const body = JSON.parse(rawBody)
|
||||
debug(`Body: ${JSON.stringify(body, null, 2)}`)
|
||||
if (this.isSuccessStatusCode(statusCode)) {
|
||||
return response
|
||||
return {response, body}
|
||||
}
|
||||
|
||||
isRetryable = this.isRetryableHttpStatusCode(statusCode)
|
||||
errorMessage = `Failed request: (${statusCode}) ${response.message.statusMessage}`
|
||||
if (body.msg) {
|
||||
if (UsageError.isUsageErrorMessage(body.msg)) {
|
||||
throw new UsageError()
|
||||
}
|
||||
|
||||
errorMessage = `${errorMessage}: ${body.msg}`
|
||||
}
|
||||
} catch (error) {
|
||||
if (error instanceof SyntaxError) {
|
||||
debug(`Raw Body: ${rawBody}`)
|
||||
}
|
||||
|
||||
if (error instanceof UsageError) {
|
||||
throw error
|
||||
}
|
||||
|
||||
if (NetworkError.isNetworkErrorCode(error?.code)) {
|
||||
throw new NetworkError(error?.code)
|
||||
}
|
||||
|
||||
isRetryable = true
|
||||
errorMessage = error.message
|
||||
}
|
||||
|
@ -128,8 +153,7 @@ class ArtifactHttpClient implements Rpc {
|
|||
HttpCodes.GatewayTimeout,
|
||||
HttpCodes.InternalServerError,
|
||||
HttpCodes.ServiceUnavailable,
|
||||
HttpCodes.TooManyRequests,
|
||||
413 // Payload Too Large
|
||||
HttpCodes.TooManyRequests
|
||||
]
|
||||
|
||||
return retryableStatusCodes.includes(statusCode)
|
||||
|
@ -157,17 +181,16 @@ class ArtifactHttpClient implements Rpc {
|
|||
}
|
||||
}
|
||||
|
||||
export function createArtifactTwirpClient(
|
||||
type: 'upload' | 'download',
|
||||
maxAttempts?: number,
|
||||
baseRetryIntervalMilliseconds?: number,
|
||||
export function internalArtifactTwirpClient(options?: {
|
||||
maxAttempts?: number
|
||||
retryIntervalMs?: number
|
||||
retryMultiplier?: number
|
||||
): ArtifactServiceClientJSON {
|
||||
}): ArtifactServiceClientJSON {
|
||||
const client = new ArtifactHttpClient(
|
||||
`@actions/artifact-${type}`,
|
||||
maxAttempts,
|
||||
baseRetryIntervalMilliseconds,
|
||||
retryMultiplier
|
||||
getUserAgentString(),
|
||||
options?.maxAttempts,
|
||||
options?.retryIntervalMs,
|
||||
options?.retryMultiplier
|
||||
)
|
||||
return new ArtifactServiceClientJSON(client)
|
||||
}
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
import os from 'os'
|
||||
|
||||
// Used for controlling the highWaterMark value of the zip that is being streamed
|
||||
// The same value is used as the chunk size that is use during upload to blob storage
|
||||
export function getUploadChunkSize(): number {
|
||||
|
@ -17,14 +19,21 @@ export function getResultsServiceUrl(): string {
|
|||
if (!resultsUrl) {
|
||||
throw new Error('Unable to get the ACTIONS_RESULTS_URL env variable')
|
||||
}
|
||||
return resultsUrl
|
||||
|
||||
return new URL(resultsUrl).origin
|
||||
}
|
||||
|
||||
export function isGhes(): boolean {
|
||||
const ghUrl = new URL(
|
||||
process.env['GITHUB_SERVER_URL'] || 'https://github.com'
|
||||
)
|
||||
return ghUrl.hostname.toUpperCase() !== 'GITHUB.COM'
|
||||
|
||||
const hostname = ghUrl.hostname.trimEnd().toUpperCase()
|
||||
const isGitHubHost = hostname === 'GITHUB.COM'
|
||||
const isGheHost =
|
||||
hostname.endsWith('.GHE.COM') || hostname.endsWith('.GHE.LOCALHOST')
|
||||
|
||||
return !isGitHubHost && !isGheHost
|
||||
}
|
||||
|
||||
export function getGitHubWorkspaceDir(): string {
|
||||
|
@ -34,3 +43,17 @@ export function getGitHubWorkspaceDir(): string {
|
|||
}
|
||||
return ghWorkspaceDir
|
||||
}
|
||||
|
||||
// Mimics behavior of azcopy: https://learn.microsoft.com/en-us/azure/storage/common/storage-use-azcopy-optimize
|
||||
// If your machine has fewer than 5 CPUs, then the value of this variable is set to 32.
|
||||
// Otherwise, the default value is equal to 16 multiplied by the number of CPUs. The maximum value of this variable is 300.
|
||||
export function getConcurrency(): number {
|
||||
const numCPUs = os.cpus().length
|
||||
|
||||
if (numCPUs <= 4) {
|
||||
return 32
|
||||
}
|
||||
|
||||
const concurrency = 16 * numCPUs
|
||||
return concurrency > 300 ? 300 : concurrency
|
||||
}
|
||||
|
|
|
@ -0,0 +1,72 @@
|
|||
export class FilesNotFoundError extends Error {
|
||||
files: string[]
|
||||
|
||||
constructor(files: string[] = []) {
|
||||
let message = 'No files were found to upload'
|
||||
if (files.length > 0) {
|
||||
message += `: ${files.join(', ')}`
|
||||
}
|
||||
|
||||
super(message)
|
||||
this.files = files
|
||||
this.name = 'FilesNotFoundError'
|
||||
}
|
||||
}
|
||||
|
||||
export class InvalidResponseError extends Error {
|
||||
constructor(message: string) {
|
||||
super(message)
|
||||
this.name = 'InvalidResponseError'
|
||||
}
|
||||
}
|
||||
|
||||
export class ArtifactNotFoundError extends Error {
|
||||
constructor(message = 'Artifact not found') {
|
||||
super(message)
|
||||
this.name = 'ArtifactNotFoundError'
|
||||
}
|
||||
}
|
||||
|
||||
export class GHESNotSupportedError extends Error {
|
||||
constructor(
|
||||
message = '@actions/artifact v2.0.0+, upload-artifact@v4+ and download-artifact@v4+ are not currently supported on GHES.'
|
||||
) {
|
||||
super(message)
|
||||
this.name = 'GHESNotSupportedError'
|
||||
}
|
||||
}
|
||||
|
||||
export class NetworkError extends Error {
|
||||
code: string
|
||||
|
||||
constructor(code: string) {
|
||||
const message = `Unable to make request: ${code}\nIf you are using self-hosted runners, please make sure your runner has access to all GitHub endpoints: https://docs.github.com/en/actions/hosting-your-own-runners/managing-self-hosted-runners/about-self-hosted-runners#communication-between-self-hosted-runners-and-github`
|
||||
super(message)
|
||||
this.code = code
|
||||
this.name = 'NetworkError'
|
||||
}
|
||||
|
||||
static isNetworkErrorCode = (code?: string): boolean => {
|
||||
if (!code) return false
|
||||
return [
|
||||
'ECONNRESET',
|
||||
'ENOTFOUND',
|
||||
'ETIMEDOUT',
|
||||
'ECONNREFUSED',
|
||||
'EHOSTUNREACH'
|
||||
].includes(code)
|
||||
}
|
||||
}
|
||||
|
||||
export class UsageError extends Error {
|
||||
constructor() {
|
||||
const message = `Artifact storage quota has been hit. Unable to upload any new artifacts. Usage is recalculated every 6-12 hours.\nMore info on storage limits: https://docs.github.com/en/billing/managing-billing-for-github-actions/about-billing-for-github-actions#calculating-minute-and-storage-spending`
|
||||
super(message)
|
||||
this.name = 'UsageError'
|
||||
}
|
||||
|
||||
static isUsageErrorMessage = (msg?: string): boolean => {
|
||||
if (!msg) return false
|
||||
return msg.includes('insufficient usage')
|
||||
}
|
||||
}
|
|
@ -1,14 +1,7 @@
|
|||
/*****************************************************************************
|
||||
* *
|
||||
* UploadArtifact *
|
||||
* *
|
||||
*****************************************************************************/
|
||||
export interface UploadResponse {
|
||||
/**
|
||||
* Denotes if an artifact was successfully uploaded
|
||||
*/
|
||||
success: boolean
|
||||
|
||||
/**
|
||||
* Response from the server when an artifact is uploaded
|
||||
*/
|
||||
export interface UploadArtifactResponse {
|
||||
/**
|
||||
* Total size of the artifact in bytes. Not provided if no artifact was uploaded
|
||||
*/
|
||||
|
@ -21,7 +14,10 @@ export interface UploadResponse {
|
|||
id?: number
|
||||
}
|
||||
|
||||
export interface UploadOptions {
|
||||
/**
|
||||
* Options for uploading an artifact
|
||||
*/
|
||||
export interface UploadArtifactOptions {
|
||||
/**
|
||||
* Duration after which artifact will expire in days.
|
||||
*
|
||||
|
@ -38,31 +34,43 @@ export interface UploadOptions {
|
|||
* input of 0 assumes default retention setting.
|
||||
*/
|
||||
retentionDays?: number
|
||||
/**
|
||||
* The level of compression for Zlib to be applied to the artifact archive.
|
||||
* The value can range from 0 to 9:
|
||||
* - 0: No compression
|
||||
* - 1: Best speed
|
||||
* - 6: Default compression (same as GNU Gzip)
|
||||
* - 9: Best compression
|
||||
* Higher levels will result in better compression, but will take longer to complete.
|
||||
* For large files that are not easily compressed, a value of 0 is recommended for significantly faster uploads.
|
||||
*/
|
||||
compressionLevel?: number
|
||||
}
|
||||
|
||||
/*****************************************************************************
|
||||
* *
|
||||
* GetArtifact *
|
||||
* *
|
||||
*****************************************************************************/
|
||||
|
||||
/**
|
||||
* Response from the server when getting an artifact
|
||||
*/
|
||||
export interface GetArtifactResponse {
|
||||
/**
|
||||
* If an artifact was found
|
||||
*/
|
||||
success: boolean
|
||||
|
||||
/**
|
||||
* Metadata about the artifact that was found
|
||||
*/
|
||||
artifact?: Artifact
|
||||
artifact: Artifact
|
||||
}
|
||||
|
||||
/*****************************************************************************
|
||||
* *
|
||||
* ListArtifact *
|
||||
* *
|
||||
*****************************************************************************/
|
||||
/**
|
||||
* Options for listing artifacts
|
||||
*/
|
||||
export interface ListArtifactsOptions {
|
||||
/**
|
||||
* Filter the workflow run's artifacts to the latest by name
|
||||
* In the case of reruns, this can be useful to avoid duplicates
|
||||
*/
|
||||
latest?: boolean
|
||||
}
|
||||
|
||||
/**
|
||||
* Response from the server when listing artifacts
|
||||
*/
|
||||
export interface ListArtifactsResponse {
|
||||
/**
|
||||
* A list of artifacts that were found
|
||||
|
@ -70,22 +78,19 @@ export interface ListArtifactsResponse {
|
|||
artifacts: Artifact[]
|
||||
}
|
||||
|
||||
/*****************************************************************************
|
||||
* *
|
||||
* DownloadArtifact *
|
||||
* *
|
||||
*****************************************************************************/
|
||||
/**
|
||||
* Response from the server when downloading an artifact
|
||||
*/
|
||||
export interface DownloadArtifactResponse {
|
||||
/**
|
||||
* If the artifact download was successful
|
||||
*/
|
||||
success: boolean
|
||||
/**
|
||||
* The path where the artifact was downloaded to
|
||||
*/
|
||||
downloadPath?: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Options for downloading an artifact
|
||||
*/
|
||||
export interface DownloadArtifactOptions {
|
||||
/**
|
||||
* Denotes where the artifact will be downloaded to. If not specified then the artifact is download to GITHUB_WORKSPACE
|
||||
|
@ -93,11 +98,9 @@ export interface DownloadArtifactOptions {
|
|||
path?: string
|
||||
}
|
||||
|
||||
/*****************************************************************************
|
||||
* *
|
||||
* Shared *
|
||||
* *
|
||||
*****************************************************************************/
|
||||
/**
|
||||
* An Actions Artifact
|
||||
*/
|
||||
export interface Artifact {
|
||||
/**
|
||||
* The name of the artifact
|
||||
|
@ -109,13 +112,48 @@ export interface Artifact {
|
|||
*/
|
||||
id: number
|
||||
|
||||
/**
|
||||
* The URL of the artifact
|
||||
*/
|
||||
url: string
|
||||
|
||||
/**
|
||||
* The size of the artifact in bytes
|
||||
*/
|
||||
size: number
|
||||
|
||||
/**
|
||||
* The time when the artifact was created
|
||||
*/
|
||||
createdAt?: Date
|
||||
}
|
||||
|
||||
// FindOptions are for fetching Artifact(s) out of the scope of the current run.
|
||||
export interface FindOptions {
|
||||
/**
|
||||
* The criteria for finding Artifact(s) out of the scope of the current run.
|
||||
*/
|
||||
findBy?: {
|
||||
/**
|
||||
* Token with actions:read permissions
|
||||
*/
|
||||
token: string
|
||||
/**
|
||||
* WorkflowRun of the artifact(s) to lookup
|
||||
*/
|
||||
workflowRunId: number
|
||||
/**
|
||||
* Repository owner (eg. 'actions')
|
||||
*/
|
||||
repositoryOwner: string
|
||||
/**
|
||||
* Repository owner (eg. 'toolkit')
|
||||
*/
|
||||
repositoryName: string
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Response from the server when deleting an artifact
|
||||
*/
|
||||
export interface DeleteArtifactResponse {
|
||||
/**
|
||||
* The id of the artifact that was deleted
|
||||
*/
|
||||
id: number
|
||||
}
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
import * as core from '@actions/core'
|
||||
import {getRuntimeToken} from './config'
|
||||
import jwt_decode from 'jwt-decode'
|
||||
|
||||
|
@ -11,7 +12,7 @@ interface ActionsToken {
|
|||
}
|
||||
|
||||
const InvalidJwtError = new Error(
|
||||
'Failed to get backend IDs: The provided JWT token is invalid'
|
||||
'Failed to get backend IDs: The provided JWT token is invalid and/or missing claims'
|
||||
)
|
||||
|
||||
// uses the JWT token claims to get the
|
||||
|
@ -41,24 +42,29 @@ export function getBackendIdsFromToken(): BackendIds {
|
|||
|
||||
for (const scopes of scpParts) {
|
||||
const scopeParts = scopes.split(':')
|
||||
if (scopeParts?.[0] !== 'Actions.Results') {
|
||||
// not the Actions.Results scope
|
||||
continue
|
||||
}
|
||||
|
||||
/*
|
||||
* example scopeParts:
|
||||
* ["Actions.Results", "ce7f54c7-61c7-4aae-887f-30da475f5f1a", "ca395085-040a-526b-2ce8-bdc85f692774"]
|
||||
*/
|
||||
if (scopeParts.length !== 3) {
|
||||
// not the Actions.Results scope
|
||||
continue
|
||||
// missing expected number of claims
|
||||
throw InvalidJwtError
|
||||
}
|
||||
|
||||
if (scopeParts[0] !== 'Actions.Results') {
|
||||
// not the Actions.Results scope
|
||||
continue
|
||||
}
|
||||
|
||||
return {
|
||||
const ids = {
|
||||
workflowRunBackendId: scopeParts[1],
|
||||
workflowJobRunBackendId: scopeParts[2]
|
||||
}
|
||||
|
||||
core.debug(`Workflow Run Backend ID: ${ids.workflowRunBackendId}`)
|
||||
core.debug(`Workflow Job Run Backend ID: ${ids.workflowJobRunBackendId}`)
|
||||
|
||||
return ids
|
||||
}
|
||||
|
||||
throw InvalidJwtError
|
||||
|
|
|
@ -1,17 +1,13 @@
|
|||
import {BlobClient, BlockBlobUploadStreamOptions} from '@azure/storage-blob'
|
||||
import {TransferProgressEvent} from '@azure/core-http'
|
||||
import {ZipUploadStream} from './zip'
|
||||
import {getUploadChunkSize} from '../shared/config'
|
||||
import {getUploadChunkSize, getConcurrency} from '../shared/config'
|
||||
import * as core from '@actions/core'
|
||||
import * as crypto from 'crypto'
|
||||
import * as stream from 'stream'
|
||||
import {NetworkError} from '../shared/errors'
|
||||
|
||||
export interface BlobUploadResponse {
|
||||
/**
|
||||
* If the upload was successful or not
|
||||
*/
|
||||
isSuccess: boolean
|
||||
|
||||
/**
|
||||
* The total reported upload size in bytes. Empty if the upload failed
|
||||
*/
|
||||
|
@ -28,19 +24,40 @@ export async function uploadZipToBlobStorage(
|
|||
zipUploadStream: ZipUploadStream
|
||||
): Promise<BlobUploadResponse> {
|
||||
let uploadByteCount = 0
|
||||
let lastProgressTime = Date.now()
|
||||
let timeoutId: NodeJS.Timeout | undefined
|
||||
|
||||
const maxBuffers = 5
|
||||
const chunkTimer = (timeout: number): NodeJS.Timeout => {
|
||||
// clear the previous timeout
|
||||
if (timeoutId) {
|
||||
clearTimeout(timeoutId)
|
||||
}
|
||||
|
||||
timeoutId = setTimeout(() => {
|
||||
const now = Date.now()
|
||||
// if there's been more than 30 seconds since the
|
||||
// last progress event, then we'll consider the upload stalled
|
||||
if (now - lastProgressTime > timeout) {
|
||||
throw new Error('Upload progress stalled.')
|
||||
}
|
||||
}, timeout)
|
||||
return timeoutId
|
||||
}
|
||||
const maxConcurrency = getConcurrency()
|
||||
const bufferSize = getUploadChunkSize()
|
||||
const blobClient = new BlobClient(authenticatedUploadURL)
|
||||
const blockBlobClient = blobClient.getBlockBlobClient()
|
||||
const timeoutDuration = 300000 // 30 seconds
|
||||
|
||||
core.debug(
|
||||
`Uploading artifact zip to blob storage with maxBuffers: ${maxBuffers}, bufferSize: ${bufferSize}`
|
||||
`Uploading artifact zip to blob storage with maxConcurrency: ${maxConcurrency}, bufferSize: ${bufferSize}`
|
||||
)
|
||||
|
||||
const uploadCallback = (progress: TransferProgressEvent): void => {
|
||||
core.info(`Uploaded bytes ${progress.loadedBytes}`)
|
||||
uploadByteCount = progress.loadedBytes
|
||||
chunkTimer(timeoutDuration)
|
||||
lastProgressTime = Date.now()
|
||||
}
|
||||
|
||||
const options: BlockBlobUploadStreamOptions = {
|
||||
|
@ -55,41 +72,41 @@ export async function uploadZipToBlobStorage(
|
|||
zipUploadStream.pipe(uploadStream) // This stream is used for the upload
|
||||
zipUploadStream.pipe(hashStream).setEncoding('hex') // This stream is used to compute a hash of the zip content that gets used. Integrity check
|
||||
|
||||
try {
|
||||
core.info('Beginning upload of artifact content to blob storage')
|
||||
core.info('Beginning upload of artifact content to blob storage')
|
||||
|
||||
try {
|
||||
// Start the chunk timer
|
||||
timeoutId = chunkTimer(timeoutDuration)
|
||||
await blockBlobClient.uploadStream(
|
||||
uploadStream,
|
||||
bufferSize,
|
||||
maxBuffers,
|
||||
maxConcurrency,
|
||||
options
|
||||
)
|
||||
|
||||
core.info('Finished uploading artifact content to blob storage!')
|
||||
|
||||
hashStream.end()
|
||||
sha256Hash = hashStream.read() as string
|
||||
core.info(`SHA256 hash of uploaded artifact zip is ${sha256Hash}`)
|
||||
} catch (error) {
|
||||
core.warning(
|
||||
`Failed to upload artifact zip to blob storage, error: ${error}`
|
||||
)
|
||||
return {
|
||||
isSuccess: false
|
||||
if (NetworkError.isNetworkErrorCode(error?.code)) {
|
||||
throw new NetworkError(error?.code)
|
||||
}
|
||||
throw error
|
||||
} finally {
|
||||
// clear the timeout whether or not the upload completes
|
||||
if (timeoutId) {
|
||||
clearTimeout(timeoutId)
|
||||
}
|
||||
}
|
||||
|
||||
core.info('Finished uploading artifact content to blob storage!')
|
||||
|
||||
hashStream.end()
|
||||
sha256Hash = hashStream.read() as string
|
||||
core.info(`SHA256 hash of uploaded artifact zip is ${sha256Hash}`)
|
||||
|
||||
if (uploadByteCount === 0) {
|
||||
core.warning(
|
||||
`No data was uploaded to blob storage. Reported upload byte count is 0`
|
||||
`No data was uploaded to blob storage. Reported upload byte count is 0.`
|
||||
)
|
||||
return {
|
||||
isSuccess: false
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
isSuccess: true,
|
||||
uploadSize: uploadByteCount,
|
||||
sha256Hash
|
||||
}
|
||||
|
|
|
@ -1,8 +1,11 @@
|
|||
import * as core from '@actions/core'
|
||||
import {UploadOptions, UploadResponse} from '../shared/interfaces'
|
||||
import {
|
||||
UploadArtifactOptions,
|
||||
UploadArtifactResponse
|
||||
} from '../shared/interfaces'
|
||||
import {getExpiration} from './retention'
|
||||
import {validateArtifactName} from './path-and-artifact-name-validation'
|
||||
import {createArtifactTwirpClient} from '../shared/artifact-twirp-client'
|
||||
import {internalArtifactTwirpClient} from '../shared/artifact-twirp-client'
|
||||
import {
|
||||
UploadZipSpecification,
|
||||
getUploadZipSpecification,
|
||||
|
@ -16,13 +19,14 @@ import {
|
|||
FinalizeArtifactRequest,
|
||||
StringValue
|
||||
} from '../../generated'
|
||||
import {FilesNotFoundError, InvalidResponseError} from '../shared/errors'
|
||||
|
||||
export async function uploadArtifact(
|
||||
name: string,
|
||||
files: string[],
|
||||
rootDirectory: string,
|
||||
options?: UploadOptions | undefined
|
||||
): Promise<UploadResponse> {
|
||||
options?: UploadArtifactOptions | undefined
|
||||
): Promise<UploadArtifactResponse> {
|
||||
validateArtifactName(name)
|
||||
validateRootDirectory(rootDirectory)
|
||||
|
||||
|
@ -31,31 +35,16 @@ export async function uploadArtifact(
|
|||
rootDirectory
|
||||
)
|
||||
if (zipSpecification.length === 0) {
|
||||
core.warning(`No files were found to upload`)
|
||||
return {
|
||||
success: false
|
||||
}
|
||||
throw new FilesNotFoundError(
|
||||
zipSpecification.flatMap(s => (s.sourcePath ? [s.sourcePath] : []))
|
||||
)
|
||||
}
|
||||
|
||||
const zipUploadStream = await createZipUploadStream(zipSpecification)
|
||||
|
||||
// get the IDs needed for the artifact creation
|
||||
const backendIds = getBackendIdsFromToken()
|
||||
if (!backendIds.workflowRunBackendId || !backendIds.workflowJobRunBackendId) {
|
||||
core.warning(
|
||||
`Failed to get the necessary backend ids which are required to create the artifact`
|
||||
)
|
||||
return {
|
||||
success: false
|
||||
}
|
||||
}
|
||||
core.debug(`Workflow Run Backend ID: ${backendIds.workflowRunBackendId}`)
|
||||
core.debug(
|
||||
`Workflow Job Run Backend ID: ${backendIds.workflowJobRunBackendId}`
|
||||
)
|
||||
|
||||
// create the artifact client
|
||||
const artifactClient = createArtifactTwirpClient('upload')
|
||||
const artifactClient = internalArtifactTwirpClient()
|
||||
|
||||
// create the artifact
|
||||
const createArtifactReq: CreateArtifactRequest = {
|
||||
|
@ -74,22 +63,21 @@ export async function uploadArtifact(
|
|||
const createArtifactResp =
|
||||
await artifactClient.CreateArtifact(createArtifactReq)
|
||||
if (!createArtifactResp.ok) {
|
||||
core.warning(`Failed to create artifact`)
|
||||
return {
|
||||
success: false
|
||||
}
|
||||
throw new InvalidResponseError(
|
||||
'CreateArtifact: response from backend was not ok'
|
||||
)
|
||||
}
|
||||
|
||||
const zipUploadStream = await createZipUploadStream(
|
||||
zipSpecification,
|
||||
options?.compressionLevel
|
||||
)
|
||||
|
||||
// Upload zip to blob storage
|
||||
const uploadResult = await uploadZipToBlobStorage(
|
||||
createArtifactResp.signedUploadUrl,
|
||||
zipUploadStream
|
||||
)
|
||||
if (uploadResult.isSuccess === false) {
|
||||
return {
|
||||
success: false
|
||||
}
|
||||
}
|
||||
|
||||
// finalize the artifact
|
||||
const finalizeArtifactReq: FinalizeArtifactRequest = {
|
||||
|
@ -110,10 +98,9 @@ export async function uploadArtifact(
|
|||
const finalizeArtifactResp =
|
||||
await artifactClient.FinalizeArtifact(finalizeArtifactReq)
|
||||
if (!finalizeArtifactResp.ok) {
|
||||
core.warning(`Failed to finalize artifact`)
|
||||
return {
|
||||
success: false
|
||||
}
|
||||
throw new InvalidResponseError(
|
||||
'FinalizeArtifact: response from backend was not ok'
|
||||
)
|
||||
}
|
||||
|
||||
const artifactId = BigInt(finalizeArtifactResp.artifactId)
|
||||
|
@ -122,7 +109,6 @@ export async function uploadArtifact(
|
|||
)
|
||||
|
||||
return {
|
||||
success: true,
|
||||
size: uploadResult.uploadSize,
|
||||
id: Number(artifactId)
|
||||
}
|
||||
|
|
|
@ -5,6 +5,8 @@ import {createReadStream} from 'fs'
|
|||
import {UploadZipSpecification} from './upload-zip-specification'
|
||||
import {getUploadChunkSize} from '../shared/config'
|
||||
|
||||
export const DEFAULT_COMPRESSION_LEVEL = 6
|
||||
|
||||
// Custom stream transformer so we can set the highWaterMark property
|
||||
// See https://github.com/nodejs/node/issues/8855
|
||||
export class ZipUploadStream extends stream.Transform {
|
||||
|
@ -21,14 +23,16 @@ export class ZipUploadStream extends stream.Transform {
|
|||
}
|
||||
|
||||
export async function createZipUploadStream(
|
||||
uploadSpecification: UploadZipSpecification[]
|
||||
uploadSpecification: UploadZipSpecification[],
|
||||
compressionLevel: number = DEFAULT_COMPRESSION_LEVEL
|
||||
): Promise<ZipUploadStream> {
|
||||
core.debug(
|
||||
`Creating Artifact archive with compressionLevel: ${compressionLevel}`
|
||||
)
|
||||
|
||||
const zip = archiver.create('zip', {
|
||||
zlib: {level: 9} // Sets the compression level.
|
||||
// Available options are 0-9
|
||||
// 0 => no compression
|
||||
// 1 => fastest with low compression
|
||||
// 9 => highest compression ratio but the slowest
|
||||
highWaterMark: getUploadChunkSize(),
|
||||
zlib: {level: compressionLevel}
|
||||
})
|
||||
|
||||
// register callbacks for various events during the zip lifecycle
|
||||
|
|
|
@ -0,0 +1,9 @@
|
|||
The MIT License (MIT)
|
||||
|
||||
Copyright 2024 GitHub
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
|
@ -0,0 +1,179 @@
|
|||
# `@actions/attest`
|
||||
|
||||
Functions for generating signed attestations for workflow artifacts.
|
||||
|
||||
Attestations bind some subject (a named artifact along with its digest) to a
|
||||
predicate (some assertion about that subject) using the [in-toto
|
||||
statement](https://github.com/in-toto/attestation/tree/main/spec/v1) format. A
|
||||
signature is generated for the attestation using a
|
||||
[Sigstore](https://www.sigstore.dev/)-issued signing certificate.
|
||||
|
||||
Once the attestation has been created and signed, it will be uploaded to the GH
|
||||
attestations API and associated with the repository from which the workflow was
|
||||
initiated.
|
||||
|
||||
See [Using artifact attestations to establish provenance for builds](https://docs.github.com/en/actions/security-guides/using-artifact-attestations-to-establish-provenance-for-builds)
|
||||
for more information on artifact attestations.
|
||||
|
||||
## Usage
|
||||
|
||||
### `attest`
|
||||
|
||||
The `attest` function takes the supplied subject/predicate pair and generates a
|
||||
signed attestation.
|
||||
|
||||
```js
|
||||
const { attest } = require('@actions/attest');
|
||||
const core = require('@actions/core');
|
||||
|
||||
async function run() {
|
||||
// In order to persist attestations to the repo, this should be a token with
|
||||
// repository write permissions.
|
||||
const ghToken = core.getInput('gh-token');
|
||||
|
||||
const attestation = await attest({
|
||||
subjectName: 'my-artifact-name',
|
||||
subjectDigest: { 'sha256': '36ab4667...'},
|
||||
predicateType: 'https://in-toto.io/attestation/release',
|
||||
predicate: { . . . },
|
||||
token: ghToken
|
||||
});
|
||||
|
||||
console.log(attestation);
|
||||
}
|
||||
|
||||
run();
|
||||
```
|
||||
|
||||
The `attest` function supports the following options:
|
||||
|
||||
```typescript
|
||||
export type AttestOptions = {
|
||||
// The name of the subject to be attested.
|
||||
subjectName: string
|
||||
// The digest of the subject to be attested. Should be a map of digest
|
||||
// algorithms to their hex-encoded values.
|
||||
subjectDigest: Record<string, string>
|
||||
// URI identifying the content type of the predicate being attested.
|
||||
predicateType: string
|
||||
// Predicate to be attested.
|
||||
predicate: object
|
||||
// GitHub token for writing attestations.
|
||||
token: string
|
||||
// Sigstore instance to use for signing. Must be one of "public-good" or
|
||||
// "github".
|
||||
sigstore?: 'public-good' | 'github'
|
||||
// Whether to skip writing the attestation to the GH attestations API.
|
||||
skipWrite?: boolean
|
||||
}
|
||||
```
|
||||
|
||||
### `attestProvenance`
|
||||
|
||||
The `attestProvenance` function accepts the name and digest of some artifact and
|
||||
generates a build provenance attestation over those values.
|
||||
|
||||
The attestation is formed by first generating a [SLSA provenance
|
||||
predicate](https://slsa.dev/spec/v1.0/provenance) populated with
|
||||
[metadata](https://github.com/slsa-framework/github-actions-buildtypes/tree/main/workflow/v1)
|
||||
pulled from the GitHub Actions run.
|
||||
|
||||
```js
|
||||
const { attestProvenance } = require('@actions/attest');
|
||||
const core = require('@actions/core');
|
||||
|
||||
async function run() {
|
||||
// In order to persist attestations to the repo, this should be a token with
|
||||
// repository write permissions.
|
||||
const ghToken = core.getInput('gh-token');
|
||||
|
||||
const attestation = await attestProvenance({
|
||||
subjectName: 'my-artifact-name',
|
||||
subjectDigest: { 'sha256': '36ab4667...'},
|
||||
token: ghToken
|
||||
});
|
||||
|
||||
console.log(attestation);
|
||||
}
|
||||
|
||||
run();
|
||||
```
|
||||
|
||||
The `attestProvenance` function supports the following options:
|
||||
|
||||
```typescript
|
||||
export type AttestProvenanceOptions = {
|
||||
// The name of the subject to be attested.
|
||||
subjectName: string
|
||||
// The digest of the subject to be attested. Should be a map of digest
|
||||
// algorithms to their hex-encoded values.
|
||||
subjectDigest: Record<string, string>
|
||||
// GitHub token for writing attestations.
|
||||
token: string
|
||||
// Sigstore instance to use for signing. Must be one of "public-good" or
|
||||
// "github".
|
||||
sigstore?: 'public-good' | 'github'
|
||||
// Whether to skip writing the attestation to the GH attestations API.
|
||||
skipWrite?: boolean
|
||||
// Issuer URL responsible for minting the OIDC token from which the
|
||||
// provenance data is read. Defaults to
|
||||
// 'https://token.actions.githubusercontent.com".
|
||||
issuer?: string
|
||||
}
|
||||
```
|
||||
|
||||
### `Attestation`
|
||||
|
||||
The `Attestation` returned by `attest`/`attestProvenance` has the following
|
||||
fields:
|
||||
|
||||
```typescript
|
||||
export type Attestation = {
|
||||
/*
|
||||
* JSON-serialized Sigstore bundle containing the provenance attestation,
|
||||
* signature, signing certificate and witnessed timestamp.
|
||||
*/
|
||||
bundle: SerializedBundle
|
||||
/*
|
||||
* PEM-encoded signing certificate used to sign the attestation.
|
||||
*/
|
||||
certificate: string
|
||||
/*
|
||||
* ID of Rekor transparency log entry created for the attestation (if
|
||||
* applicable).
|
||||
*/
|
||||
tlogID?: string
|
||||
/*
|
||||
* ID of the persisted attestation (accessible via the GH API).
|
||||
*/
|
||||
attestationID?: string
|
||||
}
|
||||
```
|
||||
|
||||
For details about the Sigstore bundle format, see the [Bundle protobuf
|
||||
specification](https://github.com/sigstore/protobuf-specs/blob/main/protos/sigstore_bundle.proto).
|
||||
|
||||
## Sigstore Instance
|
||||
|
||||
When generating the signed attestation there are two different Sigstore
|
||||
instances which can be used to issue the signing certificate. By default,
|
||||
workflows initiated from public repositories will use the Sigstore public-good
|
||||
instance and persist the attestation signature to the public [Rekor transparency
|
||||
log](https://docs.sigstore.dev/logging/overview/). Workflows initiated from
|
||||
private/internal repositories will use the GitHub-internal Sigstore instance
|
||||
which uses a signed timestamp issued by GitHub's timestamp authority in place of
|
||||
the public transparency log.
|
||||
|
||||
The default Sigstore instance selection can be overridden by passing an explicit
|
||||
value of either "public-good" or "github" for the `sigstore` option when calling
|
||||
either `attest` or `attestProvenance`.
|
||||
|
||||
## Storage
|
||||
|
||||
Attestations created by `attest`/`attestProvenance` will be uploaded to the GH
|
||||
attestations API and associated with the appropriate repository. Attestation
|
||||
storage is only supported for public repositories or repositories which belong
|
||||
to a GitHub Enterprise Cloud account.
|
||||
|
||||
In order to generate attestations for private, non-Enterprise repositories, the
|
||||
`skipWrite` option should be set to `true`.
|
|
@ -0,0 +1,20 @@
|
|||
# @actions/attest Releases
|
||||
|
||||
### 1.2.1
|
||||
|
||||
- Retry request on attestation persistence failure
|
||||
|
||||
### 1.2.0
|
||||
|
||||
- Generate attestations using the v0.3 Sigstore bundle format.
|
||||
- Bump @sigstore/bundle from 2.2.0 to 2.3.0.
|
||||
- Bump @sigstore/sign from 2.2.3 to 2.3.0.
|
||||
- Remove dependency on make-fetch-happen
|
||||
|
||||
### 1.1.0
|
||||
|
||||
- Updates the `attestProvenance` function to retrieve a token from the GitHub OIDC provider and use the token claims to populate the provenance statement.
|
||||
|
||||
### 1.0.0
|
||||
|
||||
- Initial release
|
|
@ -0,0 +1,19 @@
|
|||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`buildIntotoStatement returns an intoto statement 1`] = `
|
||||
{
|
||||
"_type": "https://in-toto.io/Statement/v1",
|
||||
"predicate": {
|
||||
"key": "value",
|
||||
},
|
||||
"predicateType": "predicatey",
|
||||
"subject": [
|
||||
{
|
||||
"digest": {
|
||||
"sha256": "7d070f6b64d9bcc530fe99cc21eaaa4b3c364e0b2d367d7735671fa202a03b32",
|
||||
},
|
||||
"name": "subjecty",
|
||||
},
|
||||
],
|
||||
}
|
||||
`;
|
|
@ -0,0 +1,42 @@
|
|||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`provenance functions buildSLSAProvenancePredicate returns a provenance hydrated from an OIDC token 1`] = `
|
||||
{
|
||||
"params": {
|
||||
"buildDefinition": {
|
||||
"buildType": "https://slsa-framework.github.io/github-actions-buildtypes/workflow/v1",
|
||||
"externalParameters": {
|
||||
"workflow": {
|
||||
"path": ".github/workflows/main.yml",
|
||||
"ref": "main",
|
||||
"repository": "https://github.com/owner/repo",
|
||||
},
|
||||
},
|
||||
"internalParameters": {
|
||||
"github": {
|
||||
"event_name": "push",
|
||||
"repository_id": "repo-id",
|
||||
"repository_owner_id": "owner-id",
|
||||
},
|
||||
},
|
||||
"resolvedDependencies": [
|
||||
{
|
||||
"digest": {
|
||||
"gitCommit": "babca52ab0c93ae16539e5923cb0d7403b9a093b",
|
||||
},
|
||||
"uri": "git+https://github.com/owner/repo@refs/heads/main",
|
||||
},
|
||||
],
|
||||
},
|
||||
"runDetails": {
|
||||
"builder": {
|
||||
"id": "https://github.com/actions/runner/github-hosted",
|
||||
},
|
||||
"metadata": {
|
||||
"invocationId": "https://github.com/owner/repo/actions/runs/run-id/attempts/run-attempt",
|
||||
},
|
||||
},
|
||||
},
|
||||
"type": "https://slsa.dev/provenance/v1",
|
||||
}
|
||||
`;
|
|
@ -0,0 +1,41 @@
|
|||
import {signingEndpoints} from '../src/endpoints'
|
||||
|
||||
describe('signingEndpoints', () => {
|
||||
const originalEnv = process.env
|
||||
|
||||
afterEach(() => {
|
||||
process.env = originalEnv
|
||||
})
|
||||
|
||||
describe('when using github.com', () => {
|
||||
beforeEach(async () => {
|
||||
process.env = {
|
||||
...originalEnv,
|
||||
GITHUB_SERVER_URL: 'https://github.com'
|
||||
}
|
||||
})
|
||||
|
||||
it('returns expected endpoints', async () => {
|
||||
const endpoints = signingEndpoints('github')
|
||||
|
||||
expect(endpoints.fulcioURL).toEqual('https://fulcio.githubapp.com')
|
||||
expect(endpoints.tsaServerURL).toEqual('https://timestamp.githubapp.com')
|
||||
})
|
||||
})
|
||||
|
||||
describe('when using custom domain', () => {
|
||||
beforeEach(async () => {
|
||||
process.env = {
|
||||
...originalEnv,
|
||||
GITHUB_SERVER_URL: 'https://foo.bar.com'
|
||||
}
|
||||
})
|
||||
|
||||
it('returns a expected endpoints', async () => {
|
||||
const endpoints = signingEndpoints('github')
|
||||
|
||||
expect(endpoints.fulcioURL).toEqual('https://fulcio.foo.bar.com')
|
||||
expect(endpoints.tsaServerURL).toEqual('https://timestamp.foo.bar.com')
|
||||
})
|
||||
})
|
||||
})
|
|
@ -0,0 +1,6 @@
|
|||
import {attest, attestProvenance} from '../src'
|
||||
|
||||
it('exports functions', () => {
|
||||
expect(attestProvenance).toBeInstanceOf(Function)
|
||||
expect(attest).toBeInstanceOf(Function)
|
||||
})
|
|
@ -0,0 +1,23 @@
|
|||
import {buildIntotoStatement} from '../src/intoto'
|
||||
import type {Predicate, Subject} from '../src/shared.types'
|
||||
|
||||
describe('buildIntotoStatement', () => {
|
||||
const subject: Subject = {
|
||||
name: 'subjecty',
|
||||
digest: {
|
||||
sha256: '7d070f6b64d9bcc530fe99cc21eaaa4b3c364e0b2d367d7735671fa202a03b32'
|
||||
}
|
||||
}
|
||||
|
||||
const predicate: Predicate = {
|
||||
type: 'predicatey',
|
||||
params: {
|
||||
key: 'value'
|
||||
}
|
||||
}
|
||||
|
||||
it('returns an intoto statement', () => {
|
||||
const statement = buildIntotoStatement(subject, predicate)
|
||||
expect(statement).toMatchSnapshot()
|
||||
})
|
||||
})
|
|
@ -0,0 +1,147 @@
|
|||
import * as jose from 'jose'
|
||||
import nock from 'nock'
|
||||
import {getIDTokenClaims} from '../src/oidc'
|
||||
|
||||
describe('getIDTokenClaims', () => {
|
||||
const originalEnv = process.env
|
||||
const issuer = 'https://example.com'
|
||||
const audience = 'nobody'
|
||||
const requestToken = 'token'
|
||||
const openidConfigPath = '/.well-known/openid-configuration'
|
||||
const jwksPath = '/.well-known/jwks.json'
|
||||
const tokenPath = '/token'
|
||||
const openIDConfig = {jwks_uri: `${issuer}${jwksPath}`}
|
||||
|
||||
/* eslint-disable-next-line @typescript-eslint/no-explicit-any */
|
||||
let key: any
|
||||
|
||||
beforeEach(async () => {
|
||||
process.env = {
|
||||
...originalEnv,
|
||||
ACTIONS_ID_TOKEN_REQUEST_URL: `${issuer}${tokenPath}?`,
|
||||
ACTIONS_ID_TOKEN_REQUEST_TOKEN: requestToken
|
||||
}
|
||||
|
||||
// Generate JWT signing key
|
||||
key = await jose.generateKeyPair('PS256')
|
||||
|
||||
// Create JWK and JWKS
|
||||
const jwk = await jose.exportJWK(key.publicKey)
|
||||
const jwks = {keys: [jwk]}
|
||||
|
||||
nock(issuer).get(openidConfigPath).reply(200, openIDConfig)
|
||||
nock(issuer).get(jwksPath).reply(200, jwks)
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
process.env = originalEnv
|
||||
})
|
||||
|
||||
describe('when ID token is valid', () => {
|
||||
const claims = {
|
||||
iss: issuer,
|
||||
aud: audience,
|
||||
ref: 'ref',
|
||||
sha: 'sha',
|
||||
repository: 'repo',
|
||||
event_name: 'push',
|
||||
workflow_ref: 'main',
|
||||
repository_id: '1',
|
||||
repository_owner_id: '1',
|
||||
runner_environment: 'github-hosted',
|
||||
run_id: '1',
|
||||
run_attempt: '1'
|
||||
}
|
||||
|
||||
beforeEach(async () => {
|
||||
const jwt = await new jose.SignJWT(claims)
|
||||
.setProtectedHeader({alg: 'PS256'})
|
||||
.sign(key.privateKey)
|
||||
|
||||
nock(issuer).get(tokenPath).query({audience}).reply(200, {value: jwt})
|
||||
})
|
||||
|
||||
it('returns the ID token claims', async () => {
|
||||
const result = await getIDTokenClaims(issuer)
|
||||
expect(result).toEqual(claims)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when ID token is missing required claims', () => {
|
||||
const claims = {
|
||||
iss: issuer,
|
||||
aud: audience
|
||||
}
|
||||
|
||||
beforeEach(async () => {
|
||||
const jwt = await new jose.SignJWT(claims)
|
||||
.setProtectedHeader({alg: 'PS256'})
|
||||
.sign(key.privateKey)
|
||||
|
||||
nock(issuer).get(tokenPath).query({audience}).reply(200, {value: jwt})
|
||||
})
|
||||
|
||||
it('throws an error', async () => {
|
||||
await expect(getIDTokenClaims(issuer)).rejects.toThrow(/missing claims/i)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when ID has the wrong issuer', () => {
|
||||
const claims = {foo: 'bar', iss: 'foo', aud: 'nobody'}
|
||||
|
||||
beforeEach(async () => {
|
||||
const jwt = await new jose.SignJWT(claims)
|
||||
.setProtectedHeader({alg: 'PS256'})
|
||||
.sign(key.privateKey)
|
||||
|
||||
nock(issuer).get(tokenPath).query({audience}).reply(200, {value: jwt})
|
||||
})
|
||||
|
||||
it('throws an error', async () => {
|
||||
await expect(getIDTokenClaims(issuer)).rejects.toThrow(/issuer invalid/)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when ID has the wrong audience', () => {
|
||||
const claims = {foo: 'bar', iss: issuer, aud: 'bar'}
|
||||
|
||||
beforeEach(async () => {
|
||||
const jwt = await new jose.SignJWT(claims)
|
||||
.setProtectedHeader({alg: 'PS256'})
|
||||
.sign(key.privateKey)
|
||||
|
||||
nock(issuer).get(tokenPath).query({audience}).reply(200, {value: jwt})
|
||||
})
|
||||
|
||||
it('throw an error', async () => {
|
||||
await expect(getIDTokenClaims(issuer)).rejects.toThrow(/audience invalid/)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when openid config cannot be retrieved', () => {
|
||||
const claims = {foo: 'bar', iss: issuer, aud: 'nobody'}
|
||||
|
||||
beforeEach(async () => {
|
||||
const jwt = await new jose.SignJWT(claims)
|
||||
.setProtectedHeader({alg: 'PS256'})
|
||||
.sign(key.privateKey)
|
||||
|
||||
nock(issuer).get(tokenPath).query({audience}).reply(200, {value: jwt})
|
||||
|
||||
// Disable the openid config endpoint
|
||||
nock.removeInterceptor({
|
||||
proto: 'https',
|
||||
hostname: 'example.com',
|
||||
port: '443',
|
||||
method: 'GET',
|
||||
path: openidConfigPath
|
||||
})
|
||||
})
|
||||
|
||||
it('throws an error', async () => {
|
||||
await expect(getIDTokenClaims(issuer)).rejects.toThrow(
|
||||
/failed to get id/i
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
|
@ -0,0 +1,252 @@
|
|||
import * as github from '@actions/github'
|
||||
import {mockFulcio, mockRekor, mockTSA} from '@sigstore/mock'
|
||||
import * as jose from 'jose'
|
||||
import nock from 'nock'
|
||||
import {MockAgent, setGlobalDispatcher} from 'undici'
|
||||
import {SIGSTORE_PUBLIC_GOOD, signingEndpoints} from '../src/endpoints'
|
||||
import {attestProvenance, buildSLSAProvenancePredicate} from '../src/provenance'
|
||||
|
||||
describe('provenance functions', () => {
|
||||
const originalEnv = process.env
|
||||
const issuer = 'https://example.com'
|
||||
const audience = 'nobody'
|
||||
const jwksPath = '/.well-known/jwks.json'
|
||||
const tokenPath = '/token'
|
||||
|
||||
// MockAgent for mocking @actions/github
|
||||
const mockAgent = new MockAgent()
|
||||
setGlobalDispatcher(mockAgent)
|
||||
|
||||
const claims = {
|
||||
iss: issuer,
|
||||
aud: 'nobody',
|
||||
repository: 'owner/repo',
|
||||
ref: 'refs/heads/main',
|
||||
sha: 'babca52ab0c93ae16539e5923cb0d7403b9a093b',
|
||||
workflow_ref: 'owner/repo/.github/workflows/main.yml@main',
|
||||
event_name: 'push',
|
||||
repository_id: 'repo-id',
|
||||
repository_owner_id: 'owner-id',
|
||||
run_id: 'run-id',
|
||||
run_attempt: 'run-attempt',
|
||||
runner_environment: 'github-hosted'
|
||||
}
|
||||
|
||||
beforeEach(async () => {
|
||||
process.env = {
|
||||
...originalEnv,
|
||||
ACTIONS_ID_TOKEN_REQUEST_URL: `${issuer}${tokenPath}?`,
|
||||
ACTIONS_ID_TOKEN_REQUEST_TOKEN: 'token',
|
||||
GITHUB_SERVER_URL: 'https://github.com',
|
||||
GITHUB_REPOSITORY: claims.repository
|
||||
}
|
||||
|
||||
// Generate JWT signing key
|
||||
const key = await jose.generateKeyPair('PS256')
|
||||
|
||||
// Create JWK, JWKS, and JWT
|
||||
const jwk = await jose.exportJWK(key.publicKey)
|
||||
const jwks = {keys: [jwk]}
|
||||
const jwt = await new jose.SignJWT(claims)
|
||||
.setProtectedHeader({alg: 'PS256'})
|
||||
.sign(key.privateKey)
|
||||
|
||||
// Mock OpenID configuration and JWKS endpoints
|
||||
nock(issuer)
|
||||
.get('/.well-known/openid-configuration')
|
||||
.reply(200, {jwks_uri: `${issuer}${jwksPath}`})
|
||||
nock(issuer).get(jwksPath).reply(200, jwks)
|
||||
|
||||
// Mock OIDC token endpoint for populating the provenance
|
||||
nock(issuer).get(tokenPath).query({audience}).reply(200, {value: jwt})
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
process.env = originalEnv
|
||||
})
|
||||
|
||||
describe('buildSLSAProvenancePredicate', () => {
|
||||
it('returns a provenance hydrated from an OIDC token', async () => {
|
||||
const predicate = await buildSLSAProvenancePredicate(issuer)
|
||||
expect(predicate).toMatchSnapshot()
|
||||
})
|
||||
})
|
||||
|
||||
describe('attestProvenance', () => {
|
||||
// Subject to attest
|
||||
const subjectName = 'subjective'
|
||||
const subjectDigest = {
|
||||
sha256: '7d070f6b64d9bcc530fe99cc21eaaa4b3c364e0b2d367d7735671fa202a03b32'
|
||||
}
|
||||
|
||||
// Fake an OIDC token
|
||||
const oidcPayload = {sub: 'foo@bar.com', iss: ''}
|
||||
const oidcToken = `.${Buffer.from(JSON.stringify(oidcPayload)).toString(
|
||||
'base64'
|
||||
)}.}`
|
||||
|
||||
const attestationID = '1234567890'
|
||||
|
||||
beforeEach(async () => {
|
||||
nock(issuer)
|
||||
.get(tokenPath)
|
||||
.query({audience: 'sigstore'})
|
||||
.reply(200, {value: oidcToken})
|
||||
})
|
||||
|
||||
describe('when using the github Sigstore instance', () => {
|
||||
const {fulcioURL, tsaServerURL} = signingEndpoints('github')
|
||||
|
||||
beforeEach(async () => {
|
||||
// Mock Sigstore
|
||||
await mockFulcio({baseURL: fulcioURL, strict: false})
|
||||
await mockTSA({baseURL: tsaServerURL})
|
||||
|
||||
mockAgent
|
||||
.get('https://api.github.com')
|
||||
.intercept({
|
||||
path: /^\/repos\/.*\/.*\/attestations$/,
|
||||
method: 'post'
|
||||
})
|
||||
.reply(201, {id: attestationID})
|
||||
})
|
||||
|
||||
describe('when the sigstore instance is explicitly set', () => {
|
||||
it('attests provenance', async () => {
|
||||
const attestation = await attestProvenance({
|
||||
subjectName,
|
||||
subjectDigest,
|
||||
token: 'token',
|
||||
sigstore: 'github',
|
||||
issuer
|
||||
})
|
||||
|
||||
expect(attestation).toBeDefined()
|
||||
expect(attestation.bundle).toBeDefined()
|
||||
expect(attestation.certificate).toMatch(/-----BEGIN CERTIFICATE-----/)
|
||||
expect(attestation.tlogID).toBeUndefined()
|
||||
expect(attestation.attestationID).toBe(attestationID)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the sigstore instance is inferred from the repo visibility', () => {
|
||||
const savedRepository = github.context.payload.repository
|
||||
|
||||
beforeEach(() => {
|
||||
/* eslint-disable-next-line @typescript-eslint/no-explicit-any */
|
||||
github.context.payload.repository = {visibility: 'private'} as any
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
github.context.payload.repository = savedRepository
|
||||
})
|
||||
|
||||
it('attests provenance', async () => {
|
||||
const attestation = await attestProvenance({
|
||||
subjectName,
|
||||
subjectDigest,
|
||||
token: 'token',
|
||||
issuer
|
||||
})
|
||||
|
||||
expect(attestation).toBeDefined()
|
||||
expect(attestation.bundle).toBeDefined()
|
||||
expect(attestation.certificate).toMatch(/-----BEGIN CERTIFICATE-----/)
|
||||
expect(attestation.tlogID).toBeUndefined()
|
||||
expect(attestation.attestationID).toBe(attestationID)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('when using the public-good Sigstore instance', () => {
|
||||
const {fulcioURL, rekorURL} = SIGSTORE_PUBLIC_GOOD
|
||||
|
||||
beforeEach(async () => {
|
||||
// Mock Sigstore
|
||||
await mockFulcio({baseURL: fulcioURL, strict: false})
|
||||
await mockRekor({baseURL: rekorURL})
|
||||
|
||||
// Mock GH attestations API
|
||||
mockAgent
|
||||
.get('https://api.github.com')
|
||||
.intercept({
|
||||
path: /^\/repos\/.*\/.*\/attestations$/,
|
||||
method: 'post'
|
||||
})
|
||||
.reply(201, {id: attestationID})
|
||||
})
|
||||
|
||||
describe('when the sigstore instance is explicitly set', () => {
|
||||
it('attests provenance', async () => {
|
||||
const attestation = await attestProvenance({
|
||||
subjectName,
|
||||
subjectDigest,
|
||||
token: 'token',
|
||||
sigstore: 'public-good',
|
||||
issuer
|
||||
})
|
||||
|
||||
expect(attestation).toBeDefined()
|
||||
expect(attestation.bundle).toBeDefined()
|
||||
expect(attestation.certificate).toMatch(/-----BEGIN CERTIFICATE-----/)
|
||||
expect(attestation.tlogID).toBeDefined()
|
||||
expect(attestation.attestationID).toBe(attestationID)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the sigstore instance is inferred from the repo visibility', () => {
|
||||
const savedRepository = github.context.payload.repository
|
||||
|
||||
beforeEach(() => {
|
||||
/* eslint-disable-next-line @typescript-eslint/no-explicit-any */
|
||||
github.context.payload.repository = {visibility: 'public'} as any
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
github.context.payload.repository = savedRepository
|
||||
})
|
||||
|
||||
it('attests provenance', async () => {
|
||||
const attestation = await attestProvenance({
|
||||
subjectName,
|
||||
subjectDigest,
|
||||
token: 'token',
|
||||
issuer
|
||||
})
|
||||
|
||||
expect(attestation).toBeDefined()
|
||||
expect(attestation.bundle).toBeDefined()
|
||||
expect(attestation.certificate).toMatch(/-----BEGIN CERTIFICATE-----/)
|
||||
expect(attestation.tlogID).toBeDefined()
|
||||
expect(attestation.attestationID).toBe(attestationID)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('when skipWrite is set to true', () => {
|
||||
const {fulcioURL, rekorURL} = SIGSTORE_PUBLIC_GOOD
|
||||
beforeEach(async () => {
|
||||
// Mock Sigstore
|
||||
await mockFulcio({baseURL: fulcioURL, strict: false})
|
||||
await mockRekor({baseURL: rekorURL})
|
||||
})
|
||||
|
||||
it('attests provenance', async () => {
|
||||
const attestation = await attestProvenance({
|
||||
subjectName,
|
||||
subjectDigest,
|
||||
token: 'token',
|
||||
sigstore: 'public-good',
|
||||
skipWrite: true,
|
||||
issuer
|
||||
})
|
||||
|
||||
expect(attestation).toBeDefined()
|
||||
expect(attestation.bundle).toBeDefined()
|
||||
expect(attestation.certificate).toMatch(/-----BEGIN CERTIFICATE-----/)
|
||||
expect(attestation.tlogID).toBeDefined()
|
||||
expect(attestation.attestationID).toBeUndefined()
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
|
@ -0,0 +1,101 @@
|
|||
import {mockFulcio, mockRekor, mockTSA} from '@sigstore/mock'
|
||||
import nock from 'nock'
|
||||
import {Payload, signPayload} from '../src/sign'
|
||||
|
||||
describe('signProvenance', () => {
|
||||
const originalEnv = process.env
|
||||
|
||||
// Fake an OIDC token
|
||||
const subject = 'foo@bar.com'
|
||||
const oidcPayload = {sub: subject, iss: ''}
|
||||
const oidcToken = `.${Buffer.from(JSON.stringify(oidcPayload)).toString(
|
||||
'base64'
|
||||
)}.}`
|
||||
|
||||
// Dummy provenance to be signed
|
||||
const provenance = {
|
||||
_type: 'https://in-toto.io/Statement/v1',
|
||||
subject: {
|
||||
name: 'subjective',
|
||||
digest: {
|
||||
sha256:
|
||||
'7d070f6b64d9bcc530fe99cc21eaaa4b3c364e0b2d367d7735671fa202a03b32'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const payload: Payload = {
|
||||
body: Buffer.from(JSON.stringify(provenance)),
|
||||
type: 'application/vnd.in-toto+json'
|
||||
}
|
||||
|
||||
const fulcioURL = 'https://fulcio.url'
|
||||
const rekorURL = 'https://rekor.url'
|
||||
const tsaServerURL = 'https://tsa.url'
|
||||
|
||||
beforeEach(() => {
|
||||
// Mock OIDC token endpoint
|
||||
const tokenURL = 'https://token.url'
|
||||
|
||||
process.env = {
|
||||
...originalEnv,
|
||||
ACTIONS_ID_TOKEN_REQUEST_URL: tokenURL,
|
||||
ACTIONS_ID_TOKEN_REQUEST_TOKEN: 'token'
|
||||
}
|
||||
|
||||
nock(tokenURL)
|
||||
.get('/')
|
||||
.query({audience: 'sigstore'})
|
||||
.reply(200, {value: oidcToken})
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
process.env = originalEnv
|
||||
})
|
||||
|
||||
describe('when visibility is public', () => {
|
||||
beforeEach(async () => {
|
||||
await mockFulcio({baseURL: fulcioURL, strict: false})
|
||||
await mockRekor({baseURL: rekorURL})
|
||||
})
|
||||
|
||||
it('returns a bundle', async () => {
|
||||
const att = await signPayload(payload, {fulcioURL, rekorURL})
|
||||
|
||||
expect(att).toBeDefined()
|
||||
expect(att.mediaType).toEqual(
|
||||
'application/vnd.dev.sigstore.bundle.v0.3+json'
|
||||
)
|
||||
|
||||
expect(att.content.$case).toEqual('dsseEnvelope')
|
||||
expect(att.verificationMaterial.content.$case).toEqual('certificate')
|
||||
expect(att.verificationMaterial.tlogEntries).toHaveLength(1)
|
||||
expect(
|
||||
att.verificationMaterial.timestampVerificationData?.rfc3161Timestamps
|
||||
).toHaveLength(0)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when visibility is private', () => {
|
||||
beforeEach(async () => {
|
||||
await mockFulcio({baseURL: fulcioURL, strict: false})
|
||||
await mockTSA({baseURL: tsaServerURL})
|
||||
})
|
||||
|
||||
it('returns a bundle', async () => {
|
||||
const att = await signPayload(payload, {fulcioURL, tsaServerURL})
|
||||
|
||||
expect(att).toBeDefined()
|
||||
expect(att.mediaType).toEqual(
|
||||
'application/vnd.dev.sigstore.bundle.v0.3+json'
|
||||
)
|
||||
|
||||
expect(att.content.$case).toEqual('dsseEnvelope')
|
||||
expect(att.verificationMaterial.content.$case).toEqual('certificate')
|
||||
expect(att.verificationMaterial.tlogEntries).toHaveLength(0)
|
||||
expect(
|
||||
att.verificationMaterial.timestampVerificationData?.rfc3161Timestamps
|
||||
).toHaveLength(1)
|
||||
})
|
||||
})
|
||||
})
|
|
@ -0,0 +1,90 @@
|
|||
import {MockAgent, setGlobalDispatcher} from 'undici'
|
||||
import {writeAttestation} from '../src/store'
|
||||
|
||||
describe('writeAttestation', () => {
|
||||
const originalEnv = process.env
|
||||
const attestation = {foo: 'bar '}
|
||||
const token = 'token'
|
||||
|
||||
const mockAgent = new MockAgent()
|
||||
setGlobalDispatcher(mockAgent)
|
||||
|
||||
beforeEach(() => {
|
||||
process.env = {
|
||||
...originalEnv,
|
||||
GITHUB_REPOSITORY: 'foo/bar'
|
||||
}
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
process.env = originalEnv
|
||||
})
|
||||
|
||||
describe('when the api call is successful', () => {
|
||||
beforeEach(() => {
|
||||
mockAgent
|
||||
.get('https://api.github.com')
|
||||
.intercept({
|
||||
path: '/repos/foo/bar/attestations',
|
||||
method: 'POST',
|
||||
headers: {authorization: `token ${token}`},
|
||||
body: JSON.stringify({bundle: attestation})
|
||||
})
|
||||
.reply(201, {id: '123'})
|
||||
})
|
||||
|
||||
it('persists the attestation', async () => {
|
||||
await expect(writeAttestation(attestation, token)).resolves.toEqual('123')
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the api call fails', () => {
|
||||
beforeEach(() => {
|
||||
mockAgent
|
||||
.get('https://api.github.com')
|
||||
.intercept({
|
||||
path: '/repos/foo/bar/attestations',
|
||||
method: 'POST',
|
||||
headers: {authorization: `token ${token}`},
|
||||
body: JSON.stringify({bundle: attestation})
|
||||
})
|
||||
.reply(500, 'oops')
|
||||
})
|
||||
|
||||
it('throws an error', async () => {
|
||||
await expect(
|
||||
writeAttestation(attestation, token, {retry: 0})
|
||||
).rejects.toThrow(/oops/)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the api call fails but succeeds on retry', () => {
|
||||
beforeEach(() => {
|
||||
const pool = mockAgent.get('https://api.github.com')
|
||||
|
||||
pool
|
||||
.intercept({
|
||||
path: '/repos/foo/bar/attestations',
|
||||
method: 'POST',
|
||||
headers: {authorization: `token ${token}`},
|
||||
body: JSON.stringify({bundle: attestation})
|
||||
})
|
||||
.reply(500, 'oops')
|
||||
.times(1)
|
||||
|
||||
pool
|
||||
.intercept({
|
||||
path: '/repos/foo/bar/attestations',
|
||||
method: 'POST',
|
||||
headers: {authorization: `token ${token}`},
|
||||
body: JSON.stringify({bundle: attestation})
|
||||
})
|
||||
.reply(201, {id: '123'})
|
||||
.times(1)
|
||||
})
|
||||
|
||||
it('persists the attestation', async () => {
|
||||
await expect(writeAttestation(attestation, token)).resolves.toEqual('123')
|
||||
})
|
||||
})
|
||||
})
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,60 @@
|
|||
{
|
||||
"name": "@actions/attest",
|
||||
"version": "1.2.1",
|
||||
"description": "Actions attestation lib",
|
||||
"keywords": [
|
||||
"github",
|
||||
"actions",
|
||||
"attestation"
|
||||
],
|
||||
"homepage": "https://github.com/actions/toolkit/tree/main/packages/attest",
|
||||
"license": "MIT",
|
||||
"main": "lib/index.js",
|
||||
"types": "lib/index.d.ts",
|
||||
"directories": {
|
||||
"lib": "lib",
|
||||
"test": "__tests__"
|
||||
},
|
||||
"files": [
|
||||
"lib"
|
||||
],
|
||||
"publishConfig": {
|
||||
"access": "public",
|
||||
"provenance": true
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/actions/toolkit.git",
|
||||
"directory": "packages/attest"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "echo \"Error: run tests from root\" && exit 1",
|
||||
"tsc": "tsc"
|
||||
},
|
||||
"bugs": {
|
||||
"url": "https://github.com/actions/toolkit/issues"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@sigstore/mock": "^0.6.5",
|
||||
"@sigstore/rekor-types": "^2.0.0",
|
||||
"@types/jsonwebtoken": "^9.0.6",
|
||||
"jose": "^5.2.3",
|
||||
"nock": "^13.5.1",
|
||||
"undici": "^5.28.4"
|
||||
},
|
||||
"dependencies": {
|
||||
"@actions/core": "^1.10.1",
|
||||
"@actions/github": "^6.0.0",
|
||||
"@actions/http-client": "^2.2.1",
|
||||
"@octokit/plugin-retry": "^6.0.1",
|
||||
"@sigstore/bundle": "^2.3.0",
|
||||
"@sigstore/sign": "^2.3.0",
|
||||
"jsonwebtoken": "^9.0.2",
|
||||
"jwks-rsa": "^3.1.0"
|
||||
},
|
||||
"overrides": {
|
||||
"@octokit/plugin-retry": {
|
||||
"@octokit/core": "^5.2.0"
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,97 @@
|
|||
import {bundleToJSON} from '@sigstore/bundle'
|
||||
import {X509Certificate} from 'crypto'
|
||||
import {SigstoreInstance, signingEndpoints} from './endpoints'
|
||||
import {buildIntotoStatement} from './intoto'
|
||||
import {Payload, signPayload} from './sign'
|
||||
import {writeAttestation} from './store'
|
||||
|
||||
import type {Bundle} from '@sigstore/sign'
|
||||
import type {Attestation, Predicate, Subject} from './shared.types'
|
||||
|
||||
const INTOTO_PAYLOAD_TYPE = 'application/vnd.in-toto+json'
|
||||
|
||||
/**
|
||||
* Options for attesting a subject / predicate.
|
||||
*/
|
||||
export type AttestOptions = {
|
||||
// The name of the subject to be attested.
|
||||
subjectName: string
|
||||
// The digest of the subject to be attested. Should be a map of digest
|
||||
// algorithms to their hex-encoded values.
|
||||
subjectDigest: Record<string, string>
|
||||
// Content type of the predicate being attested.
|
||||
predicateType: string
|
||||
// Predicate to be attested.
|
||||
predicate: object
|
||||
// GitHub token for writing attestations.
|
||||
token: string
|
||||
// Sigstore instance to use for signing. Must be one of "public-good" or
|
||||
// "github".
|
||||
sigstore?: SigstoreInstance
|
||||
// Whether to skip writing the attestation to the GH attestations API.
|
||||
skipWrite?: boolean
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates an attestation for the given subject and predicate. The subject and
|
||||
* predicate are combined into an in-toto statement, which is then signed using
|
||||
* the identified Sigstore instance and stored as an attestation.
|
||||
* @param options - The options for attestation.
|
||||
* @returns A promise that resolves to the attestation.
|
||||
*/
|
||||
export async function attest(options: AttestOptions): Promise<Attestation> {
|
||||
const subject: Subject = {
|
||||
name: options.subjectName,
|
||||
digest: options.subjectDigest
|
||||
}
|
||||
const predicate: Predicate = {
|
||||
type: options.predicateType,
|
||||
params: options.predicate
|
||||
}
|
||||
const statement = buildIntotoStatement(subject, predicate)
|
||||
|
||||
// Sign the provenance statement
|
||||
const payload: Payload = {
|
||||
body: Buffer.from(JSON.stringify(statement)),
|
||||
type: INTOTO_PAYLOAD_TYPE
|
||||
}
|
||||
const endpoints = signingEndpoints(options.sigstore)
|
||||
const bundle = await signPayload(payload, endpoints)
|
||||
|
||||
// Store the attestation
|
||||
let attestationID: string | undefined
|
||||
if (options.skipWrite !== true) {
|
||||
attestationID = await writeAttestation(bundleToJSON(bundle), options.token)
|
||||
}
|
||||
|
||||
return toAttestation(bundle, attestationID)
|
||||
}
|
||||
|
||||
function toAttestation(bundle: Bundle, attestationID?: string): Attestation {
|
||||
let certBytes: Buffer
|
||||
switch (bundle.verificationMaterial.content.$case) {
|
||||
case 'x509CertificateChain':
|
||||
certBytes =
|
||||
bundle.verificationMaterial.content.x509CertificateChain.certificates[0]
|
||||
.rawBytes
|
||||
break
|
||||
case 'certificate':
|
||||
certBytes = bundle.verificationMaterial.content.certificate.rawBytes
|
||||
break
|
||||
default:
|
||||
throw new Error('Bundle must contain an x509 certificate')
|
||||
}
|
||||
|
||||
const signingCert = new X509Certificate(certBytes)
|
||||
|
||||
// Collect transparency log ID if available
|
||||
const tlogEntries = bundle.verificationMaterial.tlogEntries
|
||||
const tlogID = tlogEntries.length > 0 ? tlogEntries[0].logIndex : undefined
|
||||
|
||||
return {
|
||||
bundle: bundleToJSON(bundle),
|
||||
certificate: signingCert.toString(),
|
||||
tlogID,
|
||||
attestationID
|
||||
}
|
||||
}
|
|
@ -0,0 +1,55 @@
|
|||
import * as github from '@actions/github'
|
||||
|
||||
const PUBLIC_GOOD_ID = 'public-good'
|
||||
const GITHUB_ID = 'github'
|
||||
|
||||
const FULCIO_PUBLIC_GOOD_URL = 'https://fulcio.sigstore.dev'
|
||||
const REKOR_PUBLIC_GOOD_URL = 'https://rekor.sigstore.dev'
|
||||
|
||||
export type SigstoreInstance = typeof PUBLIC_GOOD_ID | typeof GITHUB_ID
|
||||
|
||||
export type Endpoints = {
|
||||
fulcioURL: string
|
||||
rekorURL?: string
|
||||
tsaServerURL?: string
|
||||
}
|
||||
|
||||
export const SIGSTORE_PUBLIC_GOOD: Endpoints = {
|
||||
fulcioURL: FULCIO_PUBLIC_GOOD_URL,
|
||||
rekorURL: REKOR_PUBLIC_GOOD_URL
|
||||
}
|
||||
|
||||
export const signingEndpoints = (sigstore?: SigstoreInstance): Endpoints => {
|
||||
let instance: SigstoreInstance
|
||||
|
||||
// An explicitly set instance type takes precedence, but if not set, use the
|
||||
// repository's visibility to determine the instance type.
|
||||
if (sigstore && [PUBLIC_GOOD_ID, GITHUB_ID].includes(sigstore)) {
|
||||
instance = sigstore
|
||||
} else {
|
||||
instance =
|
||||
github.context.payload.repository?.visibility === 'public'
|
||||
? PUBLIC_GOOD_ID
|
||||
: GITHUB_ID
|
||||
}
|
||||
|
||||
switch (instance) {
|
||||
case PUBLIC_GOOD_ID:
|
||||
return SIGSTORE_PUBLIC_GOOD
|
||||
case GITHUB_ID:
|
||||
return buildGitHubEndpoints()
|
||||
}
|
||||
}
|
||||
|
||||
function buildGitHubEndpoints(): Endpoints {
|
||||
const serverURL = process.env.GITHUB_SERVER_URL || 'https://github.com'
|
||||
let host = new URL(serverURL).hostname
|
||||
|
||||
if (host === 'github.com') {
|
||||
host = 'githubapp.com'
|
||||
}
|
||||
return {
|
||||
fulcioURL: `https://fulcio.${host}`,
|
||||
tsaServerURL: `https://timestamp.${host}`
|
||||
}
|
||||
}
|
|
@ -0,0 +1,9 @@
|
|||
export {AttestOptions, attest} from './attest'
|
||||
export {
|
||||
AttestProvenanceOptions,
|
||||
attestProvenance,
|
||||
buildSLSAProvenancePredicate
|
||||
} from './provenance'
|
||||
|
||||
export type {SerializedBundle} from '@sigstore/bundle'
|
||||
export type {Attestation, Predicate, Subject} from './shared.types'
|
|
@ -0,0 +1,32 @@
|
|||
import {Predicate, Subject} from './shared.types'
|
||||
|
||||
const INTOTO_STATEMENT_V1_TYPE = 'https://in-toto.io/Statement/v1'
|
||||
|
||||
/**
|
||||
* An in-toto statement.
|
||||
* https://github.com/in-toto/attestation/blob/main/spec/v1/statement.md
|
||||
*/
|
||||
export type InTotoStatement = {
|
||||
_type: string
|
||||
subject: Subject[]
|
||||
predicateType: string
|
||||
predicate: object
|
||||
}
|
||||
|
||||
/**
|
||||
* Assembles the given subject and predicate into an in-toto statement.
|
||||
* @param subject - The subject of the statement.
|
||||
* @param predicate - The predicate of the statement.
|
||||
* @returns The constructed in-toto statement.
|
||||
*/
|
||||
export const buildIntotoStatement = (
|
||||
subject: Subject,
|
||||
predicate: Predicate
|
||||
): InTotoStatement => {
|
||||
return {
|
||||
_type: INTOTO_STATEMENT_V1_TYPE,
|
||||
subject: [subject],
|
||||
predicateType: predicate.type,
|
||||
predicate: predicate.params
|
||||
}
|
||||
}
|
|
@ -0,0 +1,102 @@
|
|||
import {getIDToken} from '@actions/core'
|
||||
import {HttpClient} from '@actions/http-client'
|
||||
import * as jwt from 'jsonwebtoken'
|
||||
import jwks from 'jwks-rsa'
|
||||
|
||||
const OIDC_AUDIENCE = 'nobody'
|
||||
|
||||
const REQUIRED_CLAIMS = [
|
||||
'iss',
|
||||
'ref',
|
||||
'sha',
|
||||
'repository',
|
||||
'event_name',
|
||||
'workflow_ref',
|
||||
'repository_id',
|
||||
'repository_owner_id',
|
||||
'runner_environment',
|
||||
'run_id',
|
||||
'run_attempt'
|
||||
] as const
|
||||
|
||||
export type ClaimSet = {[K in (typeof REQUIRED_CLAIMS)[number]]: string}
|
||||
|
||||
type OIDCConfig = {
|
||||
jwks_uri: string
|
||||
}
|
||||
|
||||
export const getIDTokenClaims = async (issuer: string): Promise<ClaimSet> => {
|
||||
try {
|
||||
const token = await getIDToken(OIDC_AUDIENCE)
|
||||
const claims = await decodeOIDCToken(token, issuer)
|
||||
assertClaimSet(claims)
|
||||
return claims
|
||||
} catch (error) {
|
||||
throw new Error(`Failed to get ID token: ${error.message}`)
|
||||
}
|
||||
}
|
||||
|
||||
const decodeOIDCToken = async (
|
||||
token: string,
|
||||
issuer: string
|
||||
): Promise<jwt.JwtPayload> => {
|
||||
// Verify and decode token
|
||||
return new Promise((resolve, reject) => {
|
||||
jwt.verify(
|
||||
token,
|
||||
getPublicKey(issuer),
|
||||
{audience: OIDC_AUDIENCE, issuer},
|
||||
(err, decoded) => {
|
||||
if (err) {
|
||||
reject(err)
|
||||
} else if (!decoded || typeof decoded === 'string') {
|
||||
reject(new Error('No decoded token'))
|
||||
} else {
|
||||
resolve(decoded)
|
||||
}
|
||||
}
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
// Returns a callback to locate the public key for the given JWT header. This
|
||||
// involves two calls:
|
||||
// 1. Fetch the OpenID configuration to get the JWKS URI.
|
||||
// 2. Fetch the public key from the JWKS URI.
|
||||
const getPublicKey =
|
||||
(issuer: string): jwt.GetPublicKeyOrSecret =>
|
||||
(header: jwt.JwtHeader, callback: jwt.SigningKeyCallback) => {
|
||||
// Look up the JWKS URI from the issuer's OpenID configuration
|
||||
new HttpClient('actions/attest')
|
||||
.getJson<OIDCConfig>(`${issuer}/.well-known/openid-configuration`)
|
||||
.then(data => {
|
||||
if (!data.result) {
|
||||
callback(new Error('No OpenID configuration found'))
|
||||
} else {
|
||||
// Fetch the public key from the JWKS URI
|
||||
jwks({jwksUri: data.result.jwks_uri}).getSigningKey(
|
||||
header.kid,
|
||||
(err, key) => {
|
||||
callback(err, key?.getPublicKey())
|
||||
}
|
||||
)
|
||||
}
|
||||
})
|
||||
.catch(err => {
|
||||
callback(err)
|
||||
})
|
||||
}
|
||||
|
||||
function assertClaimSet(claims: jwt.JwtPayload): asserts claims is ClaimSet {
|
||||
const missingClaims: string[] = []
|
||||
|
||||
for (const claim of REQUIRED_CLAIMS) {
|
||||
if (!(claim in claims)) {
|
||||
missingClaims.push(claim)
|
||||
}
|
||||
}
|
||||
|
||||
if (missingClaims.length > 0) {
|
||||
throw new Error(`Missing claims: ${missingClaims.join(', ')}`)
|
||||
}
|
||||
}
|
|
@ -0,0 +1,99 @@
|
|||
import {attest, AttestOptions} from './attest'
|
||||
import {getIDTokenClaims} from './oidc'
|
||||
import type {Attestation, Predicate} from './shared.types'
|
||||
|
||||
const SLSA_PREDICATE_V1_TYPE = 'https://slsa.dev/provenance/v1'
|
||||
|
||||
const GITHUB_BUILDER_ID_PREFIX = 'https://github.com/actions/runner'
|
||||
const GITHUB_BUILD_TYPE =
|
||||
'https://slsa-framework.github.io/github-actions-buildtypes/workflow/v1'
|
||||
|
||||
const DEFAULT_ISSUER = 'https://token.actions.githubusercontent.com'
|
||||
|
||||
export type AttestProvenanceOptions = Omit<
|
||||
AttestOptions,
|
||||
'predicate' | 'predicateType'
|
||||
> & {
|
||||
issuer?: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Builds an SLSA (Supply Chain Levels for Software Artifacts) provenance
|
||||
* predicate using the GitHub Actions Workflow build type.
|
||||
* https://slsa.dev/spec/v1.0/provenance
|
||||
* https://github.com/slsa-framework/github-actions-buildtypes/tree/main/workflow/v1
|
||||
* @param issuer - URL for the OIDC issuer. Defaults to the GitHub Actions token
|
||||
* issuer.
|
||||
* @returns The SLSA provenance predicate.
|
||||
*/
|
||||
export const buildSLSAProvenancePredicate = async (
|
||||
issuer: string = DEFAULT_ISSUER
|
||||
): Promise<Predicate> => {
|
||||
const serverURL = process.env.GITHUB_SERVER_URL
|
||||
const claims = await getIDTokenClaims(issuer)
|
||||
|
||||
// Split just the path and ref from the workflow string.
|
||||
// owner/repo/.github/workflows/main.yml@main =>
|
||||
// .github/workflows/main.yml, main
|
||||
const [workflowPath, workflowRef] = claims.workflow_ref
|
||||
.replace(`${claims.repository}/`, '')
|
||||
.split('@')
|
||||
|
||||
return {
|
||||
type: SLSA_PREDICATE_V1_TYPE,
|
||||
params: {
|
||||
buildDefinition: {
|
||||
buildType: GITHUB_BUILD_TYPE,
|
||||
externalParameters: {
|
||||
workflow: {
|
||||
ref: workflowRef,
|
||||
repository: `${serverURL}/${claims.repository}`,
|
||||
path: workflowPath
|
||||
}
|
||||
},
|
||||
internalParameters: {
|
||||
github: {
|
||||
event_name: claims.event_name,
|
||||
repository_id: claims.repository_id,
|
||||
repository_owner_id: claims.repository_owner_id
|
||||
}
|
||||
},
|
||||
resolvedDependencies: [
|
||||
{
|
||||
uri: `git+${serverURL}/${claims.repository}@${claims.ref}`,
|
||||
digest: {
|
||||
gitCommit: claims.sha
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
runDetails: {
|
||||
builder: {
|
||||
id: `${GITHUB_BUILDER_ID_PREFIX}/${claims.runner_environment}`
|
||||
},
|
||||
metadata: {
|
||||
invocationId: `${serverURL}/${claims.repository}/actions/runs/${claims.run_id}/attempts/${claims.run_attempt}`
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Attests the build provenance of the provided subject. Generates the SLSA
|
||||
* build provenance predicate, assembles it into an in-toto statement, and
|
||||
* attests it.
|
||||
*
|
||||
* @param options - The options for attesting the provenance.
|
||||
* @returns A promise that resolves to the attestation.
|
||||
*/
|
||||
export async function attestProvenance(
|
||||
options: AttestProvenanceOptions
|
||||
): Promise<Attestation> {
|
||||
const predicate = await buildSLSAProvenancePredicate(options.issuer)
|
||||
return attest({
|
||||
...options,
|
||||
predicateType: predicate.type,
|
||||
predicate: predicate.params
|
||||
})
|
||||
}
|
|
@ -0,0 +1,52 @@
|
|||
import type {SerializedBundle} from '@sigstore/bundle'
|
||||
|
||||
/*
|
||||
* The subject of an attestation.
|
||||
*/
|
||||
export type Subject = {
|
||||
/*
|
||||
* Name of the subject.
|
||||
*/
|
||||
name: string
|
||||
/*
|
||||
* Digests of the subject. Should be a map of digest algorithms to their hex-encoded values.
|
||||
*/
|
||||
digest: Record<string, string>
|
||||
}
|
||||
|
||||
/*
|
||||
* The predicate of an attestation.
|
||||
*/
|
||||
export type Predicate = {
|
||||
/*
|
||||
* URI identifying the content type of the predicate.
|
||||
*/
|
||||
type: string
|
||||
/*
|
||||
* Predicate parameters.
|
||||
*/
|
||||
params: object
|
||||
}
|
||||
|
||||
/*
|
||||
* Artifact attestation.
|
||||
*/
|
||||
export type Attestation = {
|
||||
/*
|
||||
* Serialized Sigstore bundle containing the provenance attestation,
|
||||
* signature, signing certificate and witnessed timestamp.
|
||||
*/
|
||||
bundle: SerializedBundle
|
||||
/*
|
||||
* PEM-encoded signing certificate used to sign the attestation.
|
||||
*/
|
||||
certificate: string
|
||||
/*
|
||||
* ID of Rekor transparency log entry created for the attestation.
|
||||
*/
|
||||
tlogID?: string
|
||||
/*
|
||||
* ID of the persisted attestation (accessible via the GH API).
|
||||
*/
|
||||
attestationID?: string
|
||||
}
|
|
@ -0,0 +1,109 @@
|
|||
import {
|
||||
Bundle,
|
||||
BundleBuilder,
|
||||
CIContextProvider,
|
||||
DSSEBundleBuilder,
|
||||
FulcioSigner,
|
||||
RekorWitness,
|
||||
TSAWitness,
|
||||
Witness
|
||||
} from '@sigstore/sign'
|
||||
|
||||
const OIDC_AUDIENCE = 'sigstore'
|
||||
const DEFAULT_TIMEOUT = 10000
|
||||
const DEFAULT_RETRIES = 3
|
||||
|
||||
/**
|
||||
* The payload to be signed (body) and its media type (type).
|
||||
*/
|
||||
export type Payload = {
|
||||
body: Buffer
|
||||
type: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Options for signing a document.
|
||||
*/
|
||||
export type SignOptions = {
|
||||
/**
|
||||
* The URL of the Fulcio service.
|
||||
*/
|
||||
fulcioURL: string
|
||||
/**
|
||||
* The URL of the Rekor service.
|
||||
*/
|
||||
rekorURL?: string
|
||||
/**
|
||||
* The URL of the TSA (Time Stamping Authority) server.
|
||||
*/
|
||||
tsaServerURL?: string
|
||||
/**
|
||||
* The timeout duration in milliseconds when communicating with Sigstore
|
||||
* services.
|
||||
*/
|
||||
timeout?: number
|
||||
/**
|
||||
* The number of retry attempts.
|
||||
*/
|
||||
retry?: number
|
||||
}
|
||||
|
||||
/**
|
||||
* Signs the provided payload with a Sigstore-issued certificate and returns the
|
||||
* signature bundle.
|
||||
* @param payload Payload to be signed.
|
||||
* @param options Signing options.
|
||||
* @returns A promise that resolves to the Sigstore signature bundle.
|
||||
*/
|
||||
export const signPayload = async (
|
||||
payload: Payload,
|
||||
options: SignOptions
|
||||
): Promise<Bundle> => {
|
||||
const artifact = {
|
||||
data: payload.body,
|
||||
type: payload.type
|
||||
}
|
||||
|
||||
// Sign the artifact and build the bundle
|
||||
return initBundleBuilder(options).create(artifact)
|
||||
}
|
||||
|
||||
// Assembles the Sigstore bundle builder with the appropriate options
|
||||
const initBundleBuilder = (opts: SignOptions): BundleBuilder => {
|
||||
const identityProvider = new CIContextProvider(OIDC_AUDIENCE)
|
||||
const timeout = opts.timeout || DEFAULT_TIMEOUT
|
||||
const retry = opts.retry || DEFAULT_RETRIES
|
||||
const witnesses: Witness[] = []
|
||||
|
||||
const signer = new FulcioSigner({
|
||||
identityProvider,
|
||||
fulcioBaseURL: opts.fulcioURL,
|
||||
timeout,
|
||||
retry
|
||||
})
|
||||
|
||||
if (opts.rekorURL) {
|
||||
witnesses.push(
|
||||
new RekorWitness({
|
||||
rekorBaseURL: opts.rekorURL,
|
||||
entryType: 'dsse',
|
||||
timeout,
|
||||
retry
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
if (opts.tsaServerURL) {
|
||||
witnesses.push(
|
||||
new TSAWitness({
|
||||
tsaBaseURL: opts.tsaServerURL,
|
||||
timeout,
|
||||
retry
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
// Build the bundle with the singleCertificate option which will
|
||||
// trigger the creation of v0.3 DSSE bundles
|
||||
return new DSSEBundleBuilder({signer, witnesses, singleCertificate: true})
|
||||
}
|
|
@ -0,0 +1,41 @@
|
|||
import * as github from '@actions/github'
|
||||
import {retry} from '@octokit/plugin-retry'
|
||||
|
||||
const CREATE_ATTESTATION_REQUEST = 'POST /repos/{owner}/{repo}/attestations'
|
||||
const DEFAULT_RETRY_COUNT = 5
|
||||
|
||||
export type WriteOptions = {
|
||||
retry?: number
|
||||
}
|
||||
/**
|
||||
* Writes an attestation to the repository's attestations endpoint.
|
||||
* @param attestation - The attestation to write.
|
||||
* @param token - The GitHub token for authentication.
|
||||
* @returns The ID of the attestation.
|
||||
* @throws Error if the attestation fails to persist.
|
||||
*/
|
||||
export const writeAttestation = async (
|
||||
attestation: unknown,
|
||||
token: string,
|
||||
options: WriteOptions = {}
|
||||
): Promise<string> => {
|
||||
const retries = options.retry ?? DEFAULT_RETRY_COUNT
|
||||
const octokit = github.getOctokit(token, {retry: {retries}}, retry)
|
||||
|
||||
try {
|
||||
const response = await octokit.request(CREATE_ATTESTATION_REQUEST, {
|
||||
owner: github.context.repo.owner,
|
||||
repo: github.context.repo.repo,
|
||||
data: {bundle: attestation}
|
||||
})
|
||||
|
||||
const data =
|
||||
typeof response.data == 'string'
|
||||
? JSON.parse(response.data)
|
||||
: response.data
|
||||
return data?.id
|
||||
} catch (err) {
|
||||
const message = err instanceof Error ? err.message : err
|
||||
throw new Error(`Failed to persist attestation: ${message}`)
|
||||
}
|
||||
}
|
|
@ -0,0 +1,12 @@
|
|||
{
|
||||
"extends": "../../tsconfig.json",
|
||||
"compilerOptions": {
|
||||
"baseUrl": "./",
|
||||
"outDir": "./lib",
|
||||
"declaration": true,
|
||||
"rootDir": "./src"
|
||||
},
|
||||
"include": [
|
||||
"./src"
|
||||
]
|
||||
}
|
|
@ -1,16 +1,154 @@
|
|||
# @actions/cache Releases
|
||||
|
||||
### 0.1.0
|
||||
### 3.2.4
|
||||
|
||||
- Initial release
|
||||
- Updated `isGhes` check to include `.ghe.com` and `.ghe.localhost` as accepted hosts
|
||||
|
||||
### 0.2.0
|
||||
### 3.2.3
|
||||
|
||||
- Fixes issues with the zstd compression algorithm on Windows and Ubuntu 16.04 [#469](https://github.com/actions/toolkit/pull/469)
|
||||
- Fixed a bug that mutated path arguments to `getCacheVersion` [#1378](https://github.com/actions/toolkit/pull/1378)
|
||||
|
||||
### 0.2.1
|
||||
### 3.2.2
|
||||
|
||||
- Fix to await async function getCompressionMethod
|
||||
- Add new default cache download method to improve performance and reduce hangs [#1484](https://github.com/actions/toolkit/pull/1484)
|
||||
|
||||
### 3.2.1
|
||||
|
||||
- Updated @azure/storage-blob to `v12.13.0`
|
||||
|
||||
### 3.2.0
|
||||
|
||||
- Add `lookupOnly` to cache restore `DownloadOptions`.
|
||||
|
||||
### 3.1.4
|
||||
|
||||
- Fix zstd not being used due to `zstd --version` output change in zstd 1.5.4 release. See [#1353](https://github.com/actions/toolkit/pull/1353).
|
||||
|
||||
### 3.1.3
|
||||
|
||||
- Fix to prevent from setting MYSYS environement variable globally [#1329](https://github.com/actions/toolkit/pull/1329).
|
||||
|
||||
### 3.1.2
|
||||
|
||||
- Fix issue with symlink restoration on windows.
|
||||
|
||||
### 3.1.1
|
||||
|
||||
- Reverted changes in 3.1.0 to fix issue with symlink restoration on windows.
|
||||
- Added support for verbose logging about cache version during cache miss.
|
||||
|
||||
### 3.1.0
|
||||
|
||||
- Update actions/cache on windows to use gnu tar and zstd by default
|
||||
- Update actions/cache on windows to fallback to bsdtar and zstd if gnu tar is not available.
|
||||
- Added support for fallback to gzip to restore old caches on windows.
|
||||
|
||||
### 3.1.0-beta.3
|
||||
|
||||
- Bug Fixes for fallback to gzip to restore old caches on windows and bsdtar if gnutar is not available.
|
||||
|
||||
### 3.1.0-beta.2
|
||||
|
||||
- Added support for fallback to gzip to restore old caches on windows.
|
||||
|
||||
### 3.0.6
|
||||
|
||||
- Added `@azure/abort-controller` to dependencies to fix compatibility issue with ESM [#1208](https://github.com/actions/toolkit/issues/1208)
|
||||
|
||||
### 3.0.5
|
||||
|
||||
- Update `@actions/cache` to use `@actions/core@^1.10.0`
|
||||
|
||||
### 3.0.4
|
||||
|
||||
- Fix zstd not working for windows on gnu tar in issues [#888](https://github.com/actions/cache/issues/888) and [#891](https://github.com/actions/cache/issues/891).
|
||||
- Allowing users to provide a custom timeout as input for aborting download of a cache segment using an environment variable `SEGMENT_DOWNLOAD_TIMEOUT_MINS`. Default is 60 minutes.
|
||||
|
||||
### 3.0.3
|
||||
|
||||
- Bug fixes for download stuck issue [#810](https://github.com/actions/cache/issues/810).
|
||||
|
||||
### 3.0.2
|
||||
|
||||
- Added 1 hour timeout for the download stuck issue [#810](https://github.com/actions/cache/issues/810).
|
||||
|
||||
### 3.0.1
|
||||
|
||||
- Fix [#833](https://github.com/actions/cache/issues/833) - cache doesn't work with github workspace directory.
|
||||
- Fix [#809](https://github.com/actions/cache/issues/809) `zstd -d: no such file or directory` error on AWS self-hosted runners.
|
||||
|
||||
### 3.0.0
|
||||
|
||||
- Updated actions/cache to suppress Actions cache server error and log warning for those error [#1122](https://github.com/actions/toolkit/pull/1122)
|
||||
|
||||
### 2.0.6
|
||||
|
||||
- Fix `Tar failed with error: The process '/usr/bin/tar' failed with exit code 1` issue when temp directory where tar is getting created is actually the subdirectory of the path mentioned by the user for caching. ([issue](https://github.com/actions/cache/issues/689))
|
||||
|
||||
### 2.0.5
|
||||
|
||||
- Fix to avoid saving empty cache when no files are available for caching. ([issue](https://github.com/actions/cache/issues/624))
|
||||
|
||||
### 2.0.4
|
||||
|
||||
- Update to v2.0.1 of `@actions/http-client` [#1087](https://github.com/actions/toolkit/pull/1087)
|
||||
|
||||
### 2.0.3
|
||||
|
||||
- Update to v2.0.0 of `@actions/http-client`
|
||||
|
||||
### 2.0.0
|
||||
|
||||
- Added support to check if Actions cache service feature is available or not [#1028](https://github.com/actions/toolkit/pull/1028)
|
||||
|
||||
### 1.0.11
|
||||
|
||||
- Fix file downloads > 2GB([issue](https://github.com/actions/cache/issues/773))
|
||||
|
||||
### 1.0.10
|
||||
|
||||
- Update `lockfileVersion` to `v2` in `package-lock.json [#1022](https://github.com/actions/toolkit/pull/1022)
|
||||
|
||||
### 1.0.9
|
||||
|
||||
- Use @azure/ms-rest-js v2.6.0
|
||||
- Use @azure/storage-blob v12.8.0
|
||||
|
||||
### 1.0.8
|
||||
|
||||
- Increase the allowed artifact cache size from 5GB to 10GB ([issue](https://github.com/actions/cache/discussions/497))
|
||||
|
||||
### 1.0.7
|
||||
|
||||
- Fixes permissions issue extracting archives with GNU tar on macOS ([issue](https://github.com/actions/cache/issues/527))
|
||||
|
||||
### 1.0.6
|
||||
|
||||
- Make caching more verbose [#650](https://github.com/actions/toolkit/pull/650)
|
||||
- Use GNU tar on macOS if available [#701](https://github.com/actions/toolkit/pull/701)
|
||||
|
||||
### 1.0.5
|
||||
|
||||
- Fix to ensure Windows cache paths get resolved correctly
|
||||
|
||||
### 1.0.4
|
||||
|
||||
- Use @actions/core v1.2.6
|
||||
- Fixes uploadChunk to throw an error if any unsuccessful response code is received
|
||||
|
||||
### 1.0.3
|
||||
|
||||
- Use http-client v1.0.9
|
||||
- Fixes error handling so retries are not attempted on non-retryable errors (409 Conflict, for example)
|
||||
- Adds 5 second delay between retry attempts
|
||||
|
||||
### 1.0.2
|
||||
|
||||
- Use posix archive format to add support for some tools
|
||||
|
||||
### 1.0.1
|
||||
|
||||
- Fix bug in downloading large files (> 2 GBs) with the Azure SDK
|
||||
|
||||
### 1.0.0
|
||||
|
||||
|
@ -19,148 +157,14 @@
|
|||
- Includes changes that break compatibility with earlier versions, including:
|
||||
- `retry`, `retryTypedResponse`, and `retryHttpClientResponse` moved from `cacheHttpClient` to `requestUtils`
|
||||
|
||||
### 1.0.1
|
||||
### 0.2.1
|
||||
|
||||
- Fix bug in downloading large files (> 2 GBs) with the Azure SDK
|
||||
- Fix to await async function getCompressionMethod
|
||||
|
||||
### 1.0.2
|
||||
### 0.2.0
|
||||
|
||||
- Use posix archive format to add support for some tools
|
||||
- Fixes issues with the zstd compression algorithm on Windows and Ubuntu 16.04 [#469](https://github.com/actions/toolkit/pull/469)
|
||||
|
||||
### 1.0.3
|
||||
### 0.1.0
|
||||
|
||||
- Use http-client v1.0.9
|
||||
- Fixes error handling so retries are not attempted on non-retryable errors (409 Conflict, for example)
|
||||
- Adds 5 second delay between retry attempts
|
||||
|
||||
### 1.0.4
|
||||
|
||||
- Use @actions/core v1.2.6
|
||||
- Fixes uploadChunk to throw an error if any unsuccessful response code is received
|
||||
|
||||
### 1.0.5
|
||||
|
||||
- Fix to ensure Windows cache paths get resolved correctly
|
||||
|
||||
### 1.0.6
|
||||
|
||||
- Make caching more verbose [#650](https://github.com/actions/toolkit/pull/650)
|
||||
- Use GNU tar on macOS if available [#701](https://github.com/actions/toolkit/pull/701)
|
||||
|
||||
### 1.0.7
|
||||
|
||||
- Fixes permissions issue extracting archives with GNU tar on macOS ([issue](https://github.com/actions/cache/issues/527))
|
||||
|
||||
### 1.0.8
|
||||
|
||||
- Increase the allowed artifact cache size from 5GB to 10GB ([issue](https://github.com/actions/cache/discussions/497))
|
||||
|
||||
### 1.0.9
|
||||
|
||||
- Use @azure/ms-rest-js v2.6.0
|
||||
- Use @azure/storage-blob v12.8.0
|
||||
|
||||
### 1.0.10
|
||||
|
||||
- Update `lockfileVersion` to `v2` in `package-lock.json [#1022](https://github.com/actions/toolkit/pull/1022)
|
||||
|
||||
### 1.0.11
|
||||
|
||||
- Fix file downloads > 2GB([issue](https://github.com/actions/cache/issues/773))
|
||||
|
||||
### 2.0.0
|
||||
|
||||
- Added support to check if Actions cache service feature is available or not [#1028](https://github.com/actions/toolkit/pull/1028)
|
||||
|
||||
### 2.0.3
|
||||
|
||||
- Update to v2.0.0 of `@actions/http-client`
|
||||
|
||||
### 2.0.4
|
||||
|
||||
- Update to v2.0.1 of `@actions/http-client` [#1087](https://github.com/actions/toolkit/pull/1087)
|
||||
|
||||
### 2.0.5
|
||||
|
||||
- Fix to avoid saving empty cache when no files are available for caching. ([issue](https://github.com/actions/cache/issues/624))
|
||||
|
||||
### 2.0.6
|
||||
|
||||
- Fix `Tar failed with error: The process '/usr/bin/tar' failed with exit code 1` issue when temp directory where tar is getting created is actually the subdirectory of the path mentioned by the user for caching. ([issue](https://github.com/actions/cache/issues/689))
|
||||
|
||||
### 3.0.0
|
||||
|
||||
- Updated actions/cache to suppress Actions cache server error and log warning for those error [#1122](https://github.com/actions/toolkit/pull/1122)
|
||||
|
||||
### 3.0.1
|
||||
|
||||
- Fix [#833](https://github.com/actions/cache/issues/833) - cache doesn't work with github workspace directory.
|
||||
- Fix [#809](https://github.com/actions/cache/issues/809) `zstd -d: no such file or directory` error on AWS self-hosted runners.
|
||||
|
||||
### 3.0.2
|
||||
|
||||
- Added 1 hour timeout for the download stuck issue [#810](https://github.com/actions/cache/issues/810).
|
||||
|
||||
### 3.0.3
|
||||
|
||||
- Bug fixes for download stuck issue [#810](https://github.com/actions/cache/issues/810).
|
||||
|
||||
### 3.0.4
|
||||
|
||||
- Fix zstd not working for windows on gnu tar in issues [#888](https://github.com/actions/cache/issues/888) and [#891](https://github.com/actions/cache/issues/891).
|
||||
- Allowing users to provide a custom timeout as input for aborting download of a cache segment using an environment variable `SEGMENT_DOWNLOAD_TIMEOUT_MINS`. Default is 60 minutes.
|
||||
|
||||
### 3.0.5
|
||||
|
||||
- Update `@actions/cache` to use `@actions/core@^1.10.0`
|
||||
|
||||
### 3.0.6
|
||||
|
||||
- Added `@azure/abort-controller` to dependencies to fix compatibility issue with ESM [#1208](https://github.com/actions/toolkit/issues/1208)
|
||||
|
||||
### 3.1.0-beta.1
|
||||
|
||||
- Update actions/cache on windows to use gnu tar and zstd by default and fallback to bsdtar and zstd if gnu tar is not available. ([issue](https://github.com/actions/cache/issues/984))
|
||||
|
||||
### 3.1.0-beta.2
|
||||
|
||||
- Added support for fallback to gzip to restore old caches on windows.
|
||||
|
||||
### 3.1.0-beta.3
|
||||
|
||||
- Bug Fixes for fallback to gzip to restore old caches on windows and bsdtar if gnutar is not available.
|
||||
|
||||
### 3.1.0
|
||||
|
||||
- Update actions/cache on windows to use gnu tar and zstd by default
|
||||
- Update actions/cache on windows to fallback to bsdtar and zstd if gnu tar is not available.
|
||||
- Added support for fallback to gzip to restore old caches on windows.
|
||||
|
||||
### 3.1.1
|
||||
|
||||
- Reverted changes in 3.1.0 to fix issue with symlink restoration on windows.
|
||||
- Added support for verbose logging about cache version during cache miss.
|
||||
|
||||
### 3.1.2
|
||||
|
||||
- Fix issue with symlink restoration on windows.
|
||||
|
||||
### 3.1.3
|
||||
|
||||
- Fix to prevent from setting MYSYS environement variable globally [#1329](https://github.com/actions/toolkit/pull/1329).
|
||||
|
||||
### 3.1.4
|
||||
|
||||
- Fix zstd not being used due to `zstd --version` output change in zstd 1.5.4 release. See [#1353](https://github.com/actions/toolkit/pull/1353).
|
||||
|
||||
### 3.2.0
|
||||
|
||||
- Add `lookupOnly` to cache restore `DownloadOptions`.
|
||||
|
||||
### 3.2.1
|
||||
|
||||
- Updated @azure/storage-blob to `v12.13.0`
|
||||
|
||||
### 3.2.2
|
||||
|
||||
- Add new default cache download method to improve performance and reduce hangs [#1484](https://github.com/actions/toolkit/pull/1484)
|
||||
- Initial release
|
||||
|
|
|
@ -5,6 +5,12 @@ import {DownloadOptions, getDownloadOptions} from '../src/options'
|
|||
|
||||
jest.mock('../src/internal/downloadUtils')
|
||||
|
||||
test('getCacheVersion does not mutate arguments', async () => {
|
||||
const paths = ['node_modules']
|
||||
getCacheVersion(paths, undefined, true)
|
||||
expect(paths).toEqual(['node_modules'])
|
||||
})
|
||||
|
||||
test('getCacheVersion with one path returns version', async () => {
|
||||
const paths = ['node_modules']
|
||||
const result = getCacheVersion(paths, undefined, true)
|
||||
|
|
|
@ -2,6 +2,10 @@ import {promises as fs} from 'fs'
|
|||
import * as path from 'path'
|
||||
import * as cacheUtils from '../src/internal/cacheUtils'
|
||||
|
||||
beforeEach(() => {
|
||||
jest.resetModules()
|
||||
})
|
||||
|
||||
test('getArchiveFileSizeInBytes returns file size', () => {
|
||||
const filePath = path.join(__dirname, '__fixtures__', 'helloWorld.txt')
|
||||
|
||||
|
@ -38,3 +42,23 @@ test('resolvePaths works on github workspace directory', async () => {
|
|||
const paths = await cacheUtils.resolvePaths([workspace])
|
||||
expect(paths.length).toBeGreaterThan(0)
|
||||
})
|
||||
|
||||
test('isGhes returns false for github.com', async () => {
|
||||
process.env.GITHUB_SERVER_URL = 'https://github.com'
|
||||
expect(cacheUtils.isGhes()).toBe(false)
|
||||
})
|
||||
|
||||
test('isGhes returns false for ghe.com', async () => {
|
||||
process.env.GITHUB_SERVER_URL = 'https://somedomain.ghe.com'
|
||||
expect(cacheUtils.isGhes()).toBe(false)
|
||||
})
|
||||
|
||||
test('isGhes returns true for enterprise URL', async () => {
|
||||
process.env.GITHUB_SERVER_URL = 'https://my-enterprise.github.com'
|
||||
expect(cacheUtils.isGhes()).toBe(true)
|
||||
})
|
||||
|
||||
test('isGhes returns false for ghe.localhost', () => {
|
||||
process.env.GITHUB_SERVER_URL = 'https://my.domain.ghe.localhost'
|
||||
expect(cacheUtils.isGhes()).toBe(false)
|
||||
})
|
||||
|
|
|
@ -1,12 +1,12 @@
|
|||
{
|
||||
"name": "@actions/cache",
|
||||
"version": "3.2.2",
|
||||
"version": "3.2.4",
|
||||
"lockfileVersion": 2,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "@actions/cache",
|
||||
"version": "3.2.2",
|
||||
"version": "3.2.4",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@actions/core": "^1.10.0",
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@actions/cache",
|
||||
"version": "3.2.2",
|
||||
"version": "3.2.4",
|
||||
"preview": true,
|
||||
"description": "Actions cache lib",
|
||||
"keywords": [
|
||||
|
|
|
@ -80,7 +80,8 @@ export function getCacheVersion(
|
|||
compressionMethod?: CompressionMethod,
|
||||
enableCrossOsArchive = false
|
||||
): string {
|
||||
const components = paths
|
||||
// don't pass changes upstream
|
||||
const components = paths.slice()
|
||||
|
||||
// Add compression method to cache version to restore
|
||||
// compressed cache as per compression method
|
||||
|
|
|
@ -135,5 +135,11 @@ export function isGhes(): boolean {
|
|||
const ghUrl = new URL(
|
||||
process.env['GITHUB_SERVER_URL'] || 'https://github.com'
|
||||
)
|
||||
return ghUrl.hostname.toUpperCase() !== 'GITHUB.COM'
|
||||
|
||||
const hostname = ghUrl.hostname.trimEnd().toUpperCase()
|
||||
const isGitHubHost = hostname === 'GITHUB.COM'
|
||||
const isGheHost =
|
||||
hostname.endsWith('.GHE.COM') || hostname.endsWith('.GHE.LOCALHOST')
|
||||
|
||||
return !isGitHubHost && !isGheHost
|
||||
}
|
||||
|
|
|
@ -333,3 +333,154 @@ toPlatformPath('/foo/bar') // => \foo\bar
|
|||
// On a Linux runner.
|
||||
toPlatformPath('\\foo\\bar') // => /foo/bar
|
||||
```
|
||||
|
||||
#### Platform helper
|
||||
|
||||
Provides shorthands for getting information about platform action is running on.
|
||||
|
||||
```js
|
||||
import { platform } from '@actions/core'
|
||||
|
||||
/* equals to a call of os.platform() */
|
||||
platform.platform // 'win32' | 'darwin' | 'linux' | 'freebsd' | 'openbsd' | 'android' | 'cygwin' | 'sunos'
|
||||
|
||||
/* equals to a call of os.arch() */
|
||||
platform.arch // 'x64' | 'arm' | 'arm64' | 'ia32' | 'mips' | 'mipsel' | 'ppc' | 'ppc64' | 'riscv64' | 's390' | 's390x'
|
||||
|
||||
/* common shorthands for platform-specific logic */
|
||||
platform.isWindows // true
|
||||
platform.isMacOS // false
|
||||
platform.isLinux // false
|
||||
|
||||
/* run platform-specific script to get more details about the exact platform, works on Windows, MacOS and Linux */
|
||||
const {
|
||||
name, // Microsoft Windows 11 Enterprise
|
||||
version, // 10.0.22621
|
||||
} = await platform.getDetails()
|
||||
```
|
||||
|
||||
#### Populating job summary
|
||||
|
||||
These methods can be used to populate a [job summary](https://docs.github.com/en/actions/using-workflows/workflow-commands-for-github-actions#adding-a-job-summary). A job summary is a buffer that can be added to throughout your job via `core.summary` methods.
|
||||
|
||||
Job summaries when complete must be written to the summary buffer file via the `core.summary.write()` method.
|
||||
|
||||
All methods except `addRaw()` utilize the `addRaw()` method to append to the buffer, followed by an EOL using the `addEOL()` method.
|
||||
|
||||
```typescript
|
||||
|
||||
// Write raw text, optionally add an EOL after the content, defaults to false
|
||||
core.summary.addRaw('Some content here :speech_balloon:', true)
|
||||
// Output: Some content here :speech_balloon:\n
|
||||
|
||||
// Add an operating system-specific end-of-line marker
|
||||
core.summary.addEOL()
|
||||
// Output (POSIX): \n
|
||||
// Output (Windows): \r\n
|
||||
|
||||
// Add a codeblock with an optional language for syntax highlighting
|
||||
core.summary.addCodeBlock('console.log(\'hello world\')', 'javascript')
|
||||
// Output: <pre lang="javascript"><code>console.log('hello world')</code></pre>
|
||||
|
||||
// Add a list, second parameter indicates if list is ordered, defaults to false
|
||||
core.summary.addList(['item1','item2','item3'], true)
|
||||
// Output: <ol><li>item1</li><li>item2</li><li>item3</li></ol>
|
||||
|
||||
// Add a collapsible HTML details element
|
||||
core.summary.addDetails('Label', 'Some detail that will be collapsed')
|
||||
// Output: <details><summary>Label</summary>Some detail that will be collapsed</details>
|
||||
|
||||
// Add an image, image options parameter is optional, you can supply one of or both width and height in pixels
|
||||
core.summary.addImage('example.png', 'alt description of img', {width: '100', height: '100'})
|
||||
// Output: <img src="example.png" alt="alt description of img" width="100" height="100">
|
||||
|
||||
// Add an HTML section heading element, optionally pass a level that translates to 'hX' ie. h2. Defaults to h1
|
||||
core.summary.addHeading('My Heading', '2')
|
||||
// Output: <h2>My Heading</h2>
|
||||
|
||||
// Add an HTML thematic break <hr>
|
||||
core.summary.addSeparator()
|
||||
// Output: <hr>
|
||||
|
||||
// Add an HTML line break <br>
|
||||
core.summary.addBreak()
|
||||
// Output: <br>
|
||||
|
||||
// Add an HTML blockquote with an optional citation
|
||||
core.summary.addQuote('To be or not to be', 'Shakespeare')
|
||||
// Output: <blockquote cite="Shakespeare">To be or not to be</blockquote>
|
||||
|
||||
// Add an HTML anchor tag
|
||||
core.summary.addLink('click here', 'https://github.com')
|
||||
// Output: <a href="https://github.com">click here</a>
|
||||
|
||||
```
|
||||
|
||||
Tables are added using the `addTable()` method, and an array of `SummaryTableRow`.
|
||||
|
||||
```typescript
|
||||
|
||||
export type SummaryTableRow = (SummaryTableCell | string)[]
|
||||
|
||||
export interface SummaryTableCell {
|
||||
/**
|
||||
* Cell content
|
||||
*/
|
||||
data: string
|
||||
/**
|
||||
* Render cell as header
|
||||
* (optional) default: false
|
||||
*/
|
||||
header?: boolean
|
||||
/**
|
||||
* Number of columns the cell extends
|
||||
* (optional) default: '1'
|
||||
*/
|
||||
colspan?: string
|
||||
/**
|
||||
* Number of rows the cell extends
|
||||
* (optional) default: '1'
|
||||
*/
|
||||
rowspan?: string
|
||||
}
|
||||
|
||||
```
|
||||
|
||||
For example
|
||||
|
||||
```typescript
|
||||
|
||||
const tableData = [
|
||||
{data: 'Header1', header: true},
|
||||
{data: 'Header2', header: true},
|
||||
{data: 'Header3', header: true},
|
||||
{data: 'MyData1'},
|
||||
{data: 'MyData2'},
|
||||
{data: 'MyData3'}
|
||||
]
|
||||
|
||||
// Add an HTML table
|
||||
core.summary.addTable([tableData])
|
||||
// Output: <table><tr><th>Header1</th><th>Header2</th><th>Header3</th></tr><tr></tr><td>MyData1</td><td>MyData2</td><td>MyData3</td></tr></table>
|
||||
|
||||
```
|
||||
|
||||
In addition to job summary content, there are utility functions for interfacing with the buffer.
|
||||
|
||||
```typescript
|
||||
|
||||
// Empties the summary buffer AND wipes the summary file on disk
|
||||
core.summary.clear()
|
||||
|
||||
// Returns the current summary buffer as a string
|
||||
core.summary.stringify()
|
||||
|
||||
// If the summary buffer is empty
|
||||
core.summary.isEmptyBuffer()
|
||||
|
||||
// Resets the summary buffer without writing to the summary file on disk
|
||||
core.summary.emptyBuffer()
|
||||
|
||||
// Writes text in the buffer to the summary buffer file and empties the buffer, optionally overwriting all existing content in the summary file with buffer contents. Defaults to false.
|
||||
core.summary.write({overwrite: true})
|
||||
```
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue