mirror of https://github.com/actions/toolkit
Merge branch 'actions:main' into main
commit
a5b91a9c3c
|
@ -43,7 +43,7 @@ Note that before a PR will be accepted, you must ensure:
|
|||
1. In a new branch, create a new Lerna package:
|
||||
|
||||
```console
|
||||
$ npm run create-package new-package
|
||||
$ npm run new-package [name]
|
||||
```
|
||||
|
||||
This will ask you some questions about the new package. Start with `0.0.0` as the first version (look generally at some of the other packages for how the package.json is structured).
|
||||
|
|
|
@ -18,10 +18,10 @@ jobs:
|
|||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set Node.js 20.x
|
||||
uses: actions/setup-node@v3
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20.x
|
||||
|
||||
|
|
|
@ -22,10 +22,10 @@ jobs:
|
|||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set Node.js 20.x
|
||||
uses: actions/setup-node@v3
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20.x
|
||||
|
||||
|
|
|
@ -20,7 +20,7 @@ jobs:
|
|||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
|
|
|
@ -1,11 +1,13 @@
|
|||
name: Publish NPM
|
||||
|
||||
run-name: Publish NPM - ${{ github.event.inputs.package }}
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
package:
|
||||
required: true
|
||||
description: 'core, artifact, cache, exec, github, glob, http-client, io, tool-cache'
|
||||
description: 'core, artifact, cache, exec, github, glob, http-client, io, tool-cache, attest'
|
||||
|
||||
jobs:
|
||||
test:
|
||||
|
@ -13,13 +15,13 @@ jobs:
|
|||
|
||||
steps:
|
||||
- name: setup repo
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: verify package exists
|
||||
run: ls packages/${{ github.event.inputs.package }}
|
||||
|
||||
- name: Set Node.js 20.x
|
||||
uses: actions/setup-node@v3
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20.x
|
||||
|
||||
|
@ -40,7 +42,7 @@ jobs:
|
|||
working-directory: packages/${{ github.event.inputs.package }}
|
||||
|
||||
- name: upload artifact
|
||||
uses: actions/upload-artifact@v3
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ github.event.inputs.package }}
|
||||
path: packages/${{ github.event.inputs.package }}/*.tgz
|
||||
|
@ -49,10 +51,13 @@ jobs:
|
|||
runs-on: macos-latest
|
||||
needs: test
|
||||
environment: npm-publish
|
||||
permissions:
|
||||
contents: read
|
||||
id-token: write
|
||||
steps:
|
||||
|
||||
- name: download artifact
|
||||
uses: actions/download-artifact@v3
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: ${{ github.event.inputs.package }}
|
||||
|
||||
|
@ -62,7 +67,7 @@ jobs:
|
|||
NPM_TOKEN: ${{ secrets.TOKEN }}
|
||||
|
||||
- name: publish
|
||||
run: npm publish *.tgz
|
||||
run: npm publish --provenance *.tgz
|
||||
|
||||
- name: notify slack on failure
|
||||
if: failure()
|
||||
|
|
|
@ -23,10 +23,10 @@ jobs:
|
|||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set Node.js 20.x
|
||||
uses: actions/setup-node@v3
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20.x
|
||||
|
||||
|
|
|
@ -9,7 +9,7 @@ jobs:
|
|||
if: ${{ github.repository_owner == 'actions' }}
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
- name: Update Octokit
|
||||
working-directory: packages/github
|
||||
run: |
|
||||
|
|
|
@ -2,3 +2,4 @@
|
|||
|
||||
/packages/artifact/ @actions/artifacts-actions
|
||||
/packages/cache/ @actions/actions-cache
|
||||
/packages/attest/ @actions/package-security
|
||||
|
|
|
@ -102,6 +102,15 @@ $ npm install @actions/cache
|
|||
```
|
||||
<br/>
|
||||
|
||||
:lock_with_ink_pen: [@actions/attest](packages/attest)
|
||||
|
||||
Provides functions to write attestations for workflow artifacts. Read more [here](packages/attest)
|
||||
|
||||
```bash
|
||||
$ npm install @actions/attest
|
||||
```
|
||||
<br/>
|
||||
|
||||
## Creating an Action with the Toolkit
|
||||
|
||||
:question: [Choosing an action type](docs/action-types.md)
|
||||
|
|
|
@ -32,7 +32,7 @@ jobs:
|
|||
os: [ubuntu-16.04, windows-2019]
|
||||
runs-on: ${{matrix.os}}
|
||||
actions:
|
||||
- uses: actions/setup-node@v3
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
version: ${{matrix.node}}
|
||||
- run: |
|
||||
|
|
|
@ -18,7 +18,7 @@ e.g. To use https://github.com/actions/setup-node, users will author:
|
|||
|
||||
```yaml
|
||||
steps:
|
||||
using: actions/setup-node@v3
|
||||
using: actions/setup-node@v4
|
||||
```
|
||||
|
||||
# Define Metadata
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -13,7 +13,7 @@
|
|||
"lint": "eslint packages/**/*.ts",
|
||||
"lint-fix": "eslint packages/**/*.ts --fix",
|
||||
"new-package": "scripts/create-package",
|
||||
"test": "jest --testTimeout 60000"
|
||||
"test": "jest --testTimeout 70000"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/jest": "^29.5.4",
|
||||
|
@ -27,7 +27,7 @@
|
|||
"eslint-plugin-prettier": "^5.0.0",
|
||||
"flow-bin": "^0.115.0",
|
||||
"jest": "^29.6.4",
|
||||
"lerna": "^7.1.4",
|
||||
"lerna": "^6.4.1",
|
||||
"nx": "16.6.0",
|
||||
"prettier": "^3.0.0",
|
||||
"ts-jest": "^29.1.1",
|
||||
|
|
|
@ -1,15 +1,132 @@
|
|||
# @actions/artifact Releases
|
||||
|
||||
### 0.1.0
|
||||
### 2.1.9
|
||||
|
||||
- Initial release
|
||||
- Fixed artifact upload chunk timeout logic [#1774](https://github.com/actions/toolkit/pull/1774)
|
||||
- Use lazy stream to prevent issues with open file limits [#1771](https://github.com/actions/toolkit/pull/1771)
|
||||
|
||||
### 0.2.0
|
||||
### 2.1.8
|
||||
|
||||
- Fixes to TCP connections not closing
|
||||
- GZip file compression to speed up downloads
|
||||
- Improved logging and output
|
||||
- Extra documentation
|
||||
- Allows `*.localhost` domains for hostname checks for local development.
|
||||
|
||||
### 2.1.7
|
||||
|
||||
- Update unzip-stream dependency and reverted to using `unzip.Extract()`
|
||||
|
||||
### 2.1.6
|
||||
|
||||
- Will retry on invalid request responses.
|
||||
|
||||
### 2.1.5
|
||||
|
||||
- Bumped `archiver` dependency to 7.0.1
|
||||
|
||||
### 2.1.4
|
||||
|
||||
- Adds info-level logging for zip extraction
|
||||
|
||||
### 2.1.3
|
||||
|
||||
- Fixes a bug in the extract logic updated in 2.1.2
|
||||
|
||||
### 2.1.2
|
||||
|
||||
- Updated the stream extract functionality to use `unzip.Parse()` instead of `unzip.Extract()` for greater control of unzipping artifacts
|
||||
|
||||
### 2.1.1
|
||||
|
||||
- Updated `isGhes` check to include `.ghe.com` and `.ghe.localhost` as accepted hosts
|
||||
|
||||
### 2.1.0
|
||||
|
||||
- Added `ArtifactClient#deleteArtifact` to delete artifacts by name [#1626](https://github.com/actions/toolkit/pull/1626)
|
||||
- Update error messaging to be more useful [#1628](https://github.com/actions/toolkit/pull/1628)
|
||||
|
||||
### 2.0.1
|
||||
|
||||
- Patch to fix transient request timeouts https://github.com/actions/download-artifact/issues/249
|
||||
|
||||
### 2.0.0
|
||||
|
||||
- Major release. Supports new Artifact backend for improved speed, reliability and behavior.
|
||||
- Numerous API changes, [some breaking](./README.md#breaking-changes).
|
||||
|
||||
- [Blog post with more info](https://github.blog/2024-02-12-get-started-with-v4-of-github-actions-artifacts/)
|
||||
|
||||
### 1.1.1
|
||||
|
||||
- Fixed a bug in Node16 where if an HTTP download finished too quickly (<1ms, e.g. when it's mocked) we attempt to delete a temp file that has not been created yet [#1278](https://github.com/actions/toolkit/pull/1278/commits/b9de68a590daf37c6747e38d3cb4f1dd2cfb791c)
|
||||
|
||||
### 1.1.0
|
||||
|
||||
- Add `x-actions-results-crc64` and `x-actions-results-md5` checksum headers on upload [#1063](https://github.com/actions/toolkit/pull/1063)
|
||||
|
||||
### 1.0.2
|
||||
|
||||
- Update to v2.0.1 of `@actions/http-client` [#1087](https://github.com/actions/toolkit/pull/1087)
|
||||
|
||||
### 1.0.1
|
||||
|
||||
- Update to v2.0.0 of `@actions/http-client`
|
||||
|
||||
### 1.0.0
|
||||
|
||||
- Update `lockfileVersion` to `v2` in `package-lock.json` [#1009](https://github.com/actions/toolkit/pull/1009)
|
||||
|
||||
### 0.6.1
|
||||
|
||||
- Fix for failing 0 byte file uploads on Windows [#962](https://github.com/actions/toolkit/pull/962)
|
||||
|
||||
### 0.6.0
|
||||
|
||||
- Support upload from named pipes [#748](https://github.com/actions/toolkit/pull/748)
|
||||
- Fixes to percentage values being greater than 100% when downloading all artifacts [#889](https://github.com/actions/toolkit/pull/889)
|
||||
- Improved logging and output during artifact upload [#949](https://github.com/actions/toolkit/pull/949)
|
||||
- Improvements to client-side validation for certain invalid characters not allowed during upload: [#951](https://github.com/actions/toolkit/pull/951)
|
||||
- Faster upload speeds for certain types of large files by exempting gzip compression [#956](https://github.com/actions/toolkit/pull/956)
|
||||
- More detailed logging when dealing with chunked uploads [#957](https://github.com/actions/toolkit/pull/957)
|
||||
|
||||
### 0.5.2
|
||||
|
||||
- Add HTTP 500 as a retryable status code for artifact upload and download.
|
||||
|
||||
### 0.5.1
|
||||
|
||||
- Bump @actions/http-client to version 1.0.11 to fix proxy related issues during artifact upload and download
|
||||
|
||||
### 0.5.0
|
||||
|
||||
- Improved retry-ability for all http calls during artifact upload and download if an error is encountered
|
||||
|
||||
### 0.4.2
|
||||
|
||||
- Improved retry-ability when a partial artifact download is encountered
|
||||
|
||||
### 0.4.1
|
||||
|
||||
- Update to latest @actions/core version
|
||||
|
||||
### 0.4.0
|
||||
|
||||
- Add option to specify custom retentions on artifacts
|
||||
-
|
||||
### 0.3.5
|
||||
|
||||
- Retry in the event of a 413 response
|
||||
|
||||
### 0.3.3
|
||||
|
||||
- Increase chunk size during upload from 4MB to 8MB
|
||||
- Improve user-agent strings during API calls to help internally diagnose issues
|
||||
|
||||
### 0.3.2
|
||||
|
||||
- Fix to ensure readstreams get correctly reset in the event of a retry
|
||||
|
||||
### 0.3.1
|
||||
|
||||
- Fix to ensure temporary gzip files get correctly deleted during artifact upload
|
||||
- Remove spaces as a forbidden character during upload
|
||||
|
||||
### 0.3.0
|
||||
|
||||
|
@ -20,93 +137,13 @@
|
|||
- Clearer error message if storage quota has been reached
|
||||
- Improved logging and output during artifact download
|
||||
|
||||
### 0.3.1
|
||||
### 0.2.0
|
||||
|
||||
- Fix to ensure temporary gzip files get correctly deleted during artifact upload
|
||||
- Remove spaces as a forbidden character during upload
|
||||
- Fixes to TCP connections not closing
|
||||
- GZip file compression to speed up downloads
|
||||
- Improved logging and output
|
||||
- Extra documentation
|
||||
|
||||
### 0.3.2
|
||||
### 0.1.0
|
||||
|
||||
- Fix to ensure readstreams get correctly reset in the event of a retry
|
||||
|
||||
### 0.3.3
|
||||
|
||||
- Increase chunk size during upload from 4MB to 8MB
|
||||
- Improve user-agent strings during API calls to help internally diagnose issues
|
||||
|
||||
### 0.3.5
|
||||
|
||||
- Retry in the event of a 413 response
|
||||
|
||||
### 0.4.0
|
||||
|
||||
- Add option to specify custom retentions on artifacts
|
||||
|
||||
### 0.4.1
|
||||
|
||||
- Update to latest @actions/core version
|
||||
|
||||
### 0.4.2
|
||||
|
||||
- Improved retry-ability when a partial artifact download is encountered
|
||||
|
||||
### 0.5.0
|
||||
|
||||
- Improved retry-ability for all http calls during artifact upload and download if an error is encountered
|
||||
|
||||
### 0.5.1
|
||||
|
||||
- Bump @actions/http-client to version 1.0.11 to fix proxy related issues during artifact upload and download
|
||||
|
||||
### 0.5.2
|
||||
|
||||
- Add HTTP 500 as a retryable status code for artifact upload and download.
|
||||
|
||||
### 0.6.0
|
||||
|
||||
- Support upload from named pipes [#748](https://github.com/actions/toolkit/pull/748)
|
||||
- Fixes to percentage values being greater than 100% when downloading all artifacts [#889](https://github.com/actions/toolkit/pull/889)
|
||||
- Improved logging and output during artifact upload [#949](https://github.com/actions/toolkit/pull/949)
|
||||
- Improvements to client-side validation for certain invalid characters not allowed during upload: [#951](https://github.com/actions/toolkit/pull/951)
|
||||
- Faster upload speeds for certain types of large files by exempting gzip compression [#956](https://github.com/actions/toolkit/pull/956)
|
||||
- More detailed logging when dealing with chunked uploads [#957](https://github.com/actions/toolkit/pull/957)
|
||||
|
||||
### 0.6.1
|
||||
|
||||
- Fix for failing 0 byte file uploads on Windows [#962](https://github.com/actions/toolkit/pull/962)
|
||||
|
||||
### 1.0.0
|
||||
|
||||
- Update `lockfileVersion` to `v2` in `package-lock.json` [#1009](https://github.com/actions/toolkit/pull/1009)
|
||||
|
||||
### 1.0.1
|
||||
|
||||
- Update to v2.0.0 of `@actions/http-client`
|
||||
|
||||
### 1.0.2
|
||||
|
||||
- Update to v2.0.1 of `@actions/http-client` [#1087](https://github.com/actions/toolkit/pull/1087)
|
||||
|
||||
### 1.1.0
|
||||
|
||||
- Add `x-actions-results-crc64` and `x-actions-results-md5` checksum headers on upload [#1063](https://github.com/actions/toolkit/pull/1063)
|
||||
|
||||
### 1.1.1
|
||||
|
||||
- Fixed a bug in Node16 where if an HTTP download finished too quickly (<1ms, e.g. when it's mocked) we attempt to delete a temp file that has not been created yet [#1278](https://github.com/actions/toolkit/pull/1278/commits/b9de68a590daf37c6747e38d3cb4f1dd2cfb791c)
|
||||
|
||||
### 2.0.0
|
||||
|
||||
- Major release. Supports new Artifact backend for improved speed, reliability and behavior.
|
||||
- Numerous API changes, [some breaking](./README.md#breaking-changes).
|
||||
|
||||
- Blog post with more info: TBD
|
||||
|
||||
### 2.0.1
|
||||
|
||||
- Patch to fix transient request timeouts https://github.com/actions/download-artifact/issues/249
|
||||
|
||||
### 2.1.0
|
||||
|
||||
- Added `ArtifactClient#deleteArtifact` to delete artifacts by name [#1626](https://github.com/actions/toolkit/pull/1626)
|
||||
- Update error messaging to be more useful [#1628](https://github.com/actions/toolkit/pull/1628)
|
||||
- Initial release
|
||||
|
|
|
@ -116,6 +116,54 @@ describe('artifact-http-client', () => {
|
|||
expect(mockPost).toHaveBeenCalledTimes(2)
|
||||
})
|
||||
|
||||
it('should retry if invalid body response', async () => {
|
||||
const mockPost = jest
|
||||
.fn(() => {
|
||||
const msgSucceeded = new http.IncomingMessage(new net.Socket())
|
||||
msgSucceeded.statusCode = 200
|
||||
return {
|
||||
message: msgSucceeded,
|
||||
readBody: async () => {
|
||||
return Promise.resolve(
|
||||
`{"ok": true, "signedUploadUrl": "http://localhost:8080/upload"}`
|
||||
)
|
||||
}
|
||||
}
|
||||
})
|
||||
.mockImplementationOnce(() => {
|
||||
const msgFailed = new http.IncomingMessage(new net.Socket())
|
||||
msgFailed.statusCode = 502
|
||||
msgFailed.statusMessage = 'Bad Gateway'
|
||||
return {
|
||||
message: msgFailed,
|
||||
readBody: async () => {
|
||||
return Promise.resolve('💥')
|
||||
}
|
||||
}
|
||||
})
|
||||
const mockHttpClient = (
|
||||
HttpClient as unknown as jest.Mock
|
||||
).mockImplementation(() => {
|
||||
return {
|
||||
post: mockPost
|
||||
}
|
||||
})
|
||||
|
||||
const client = internalArtifactTwirpClient(clientOptions)
|
||||
const artifact = await client.CreateArtifact({
|
||||
workflowRunBackendId: '1234',
|
||||
workflowJobRunBackendId: '5678',
|
||||
name: 'artifact',
|
||||
version: 4
|
||||
})
|
||||
|
||||
expect(mockHttpClient).toHaveBeenCalledTimes(1)
|
||||
expect(artifact).toBeDefined()
|
||||
expect(artifact.ok).toBe(true)
|
||||
expect(artifact.signedUploadUrl).toBe('http://localhost:8080/upload')
|
||||
expect(mockPost).toHaveBeenCalledTimes(2)
|
||||
})
|
||||
|
||||
it('should fail if the request fails 5 times', async () => {
|
||||
const mockPost = jest.fn(() => {
|
||||
const msgFailed = new http.IncomingMessage(new net.Socket())
|
||||
|
|
|
@ -0,0 +1,32 @@
|
|||
import * as config from '../src/internal/shared/config'
|
||||
|
||||
beforeEach(() => {
|
||||
jest.resetModules()
|
||||
})
|
||||
|
||||
describe('isGhes', () => {
|
||||
it('should return false when the request domain is github.com', () => {
|
||||
process.env.GITHUB_SERVER_URL = 'https://github.com'
|
||||
expect(config.isGhes()).toBe(false)
|
||||
})
|
||||
|
||||
it('should return false when the request domain ends with ghe.com', () => {
|
||||
process.env.GITHUB_SERVER_URL = 'https://my.domain.ghe.com'
|
||||
expect(config.isGhes()).toBe(false)
|
||||
})
|
||||
|
||||
it('should return false when the request domain ends with ghe.localhost', () => {
|
||||
process.env.GITHUB_SERVER_URL = 'https://my.domain.ghe.localhost'
|
||||
expect(config.isGhes()).toBe(false)
|
||||
})
|
||||
|
||||
it('should return false when the request domain ends with .localhost', () => {
|
||||
process.env.GITHUB_SERVER_URL = 'https://github.localhost'
|
||||
expect(config.isGhes()).toBe(false)
|
||||
})
|
||||
|
||||
it('should return false when the request domain is specific to an enterprise', () => {
|
||||
process.env.GITHUB_SERVER_URL = 'https://my-enterprise.github.com'
|
||||
expect(config.isGhes()).toBe(true)
|
||||
})
|
||||
})
|
|
@ -121,6 +121,16 @@ const mockGetArtifactFailure = jest.fn(() => {
|
|||
}
|
||||
})
|
||||
|
||||
const mockGetArtifactMalicious = jest.fn(() => {
|
||||
const message = new http.IncomingMessage(new net.Socket())
|
||||
message.statusCode = 200
|
||||
message.push(fs.readFileSync(path.join(__dirname, 'fixtures', 'evil.zip'))) // evil.zip contains files that are formatted x/../../etc/hosts
|
||||
message.push(null)
|
||||
return {
|
||||
message
|
||||
}
|
||||
})
|
||||
|
||||
describe('download-artifact', () => {
|
||||
describe('public', () => {
|
||||
beforeEach(setup)
|
||||
|
@ -170,6 +180,59 @@ describe('download-artifact', () => {
|
|||
expect(response.downloadPath).toBe(fixtures.workspaceDir)
|
||||
})
|
||||
|
||||
it('should not allow path traversal from malicious artifacts', async () => {
|
||||
const downloadArtifactMock = github.getOctokit(fixtures.token).rest
|
||||
.actions.downloadArtifact as MockedDownloadArtifact
|
||||
downloadArtifactMock.mockResolvedValueOnce({
|
||||
headers: {
|
||||
location: fixtures.blobStorageUrl
|
||||
},
|
||||
status: 302,
|
||||
url: '',
|
||||
data: Buffer.from('')
|
||||
})
|
||||
|
||||
const mockHttpClient = (HttpClient as jest.Mock).mockImplementation(
|
||||
() => {
|
||||
return {
|
||||
get: mockGetArtifactMalicious
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
const response = await downloadArtifactPublic(
|
||||
fixtures.artifactID,
|
||||
fixtures.repositoryOwner,
|
||||
fixtures.repositoryName,
|
||||
fixtures.token
|
||||
)
|
||||
|
||||
expect(downloadArtifactMock).toHaveBeenCalledWith({
|
||||
owner: fixtures.repositoryOwner,
|
||||
repo: fixtures.repositoryName,
|
||||
artifact_id: fixtures.artifactID,
|
||||
archive_format: 'zip',
|
||||
request: {
|
||||
redirect: 'manual'
|
||||
}
|
||||
})
|
||||
|
||||
expect(mockHttpClient).toHaveBeenCalledWith(getUserAgentString())
|
||||
expect(mockGetArtifactMalicious).toHaveBeenCalledWith(
|
||||
fixtures.blobStorageUrl
|
||||
)
|
||||
|
||||
// ensure path traversal was not possible
|
||||
expect(
|
||||
fs.existsSync(path.join(fixtures.workspaceDir, 'x/etc/hosts'))
|
||||
).toBe(true)
|
||||
expect(
|
||||
fs.existsSync(path.join(fixtures.workspaceDir, 'y/etc/hosts'))
|
||||
).toBe(true)
|
||||
|
||||
expect(response.downloadPath).toBe(fixtures.workspaceDir)
|
||||
})
|
||||
|
||||
it('should successfully download an artifact to user defined path', async () => {
|
||||
const customPath = path.join(testDir, 'custom')
|
||||
|
||||
|
|
Binary file not shown.
|
@ -1,257 +1,137 @@
|
|||
import * as uploadZipSpecification from '../src/internal/upload/upload-zip-specification'
|
||||
import * as zip from '../src/internal/upload/zip'
|
||||
import * as util from '../src/internal/shared/util'
|
||||
import * as retention from '../src/internal/upload/retention'
|
||||
import * as config from '../src/internal/shared/config'
|
||||
import {Timestamp, ArtifactServiceClientJSON} from '../src/generated'
|
||||
import {ArtifactServiceClientJSON} from '../src/generated'
|
||||
import * as blobUpload from '../src/internal/upload/blob-upload'
|
||||
import {uploadArtifact} from '../src/internal/upload/upload-artifact'
|
||||
import {noopLogs} from './common'
|
||||
import {FilesNotFoundError} from '../src/internal/shared/errors'
|
||||
import {BlockBlobUploadStreamOptions} from '@azure/storage-blob'
|
||||
import * as fs from 'fs'
|
||||
import * as path from 'path'
|
||||
|
||||
const uploadStreamMock = jest.fn()
|
||||
const blockBlobClientMock = jest.fn().mockImplementation(() => ({
|
||||
uploadStream: uploadStreamMock
|
||||
}))
|
||||
|
||||
jest.mock('@azure/storage-blob', () => ({
|
||||
BlobClient: jest.fn().mockImplementation(() => {
|
||||
return {
|
||||
getBlockBlobClient: blockBlobClientMock
|
||||
}
|
||||
})
|
||||
}))
|
||||
|
||||
const fixtures = {
|
||||
uploadDirectory: path.join(__dirname, '_temp', 'plz-upload'),
|
||||
files: [
|
||||
['file1.txt', 'test 1 file content'],
|
||||
['file2.txt', 'test 2 file content'],
|
||||
['file3.txt', 'test 3 file content']
|
||||
],
|
||||
backendIDs: {
|
||||
workflowRunBackendId: '67dbcc20-e851-4452-a7c3-2cc0d2e0ec67',
|
||||
workflowJobRunBackendId: '5f49179d-3386-4c38-85f7-00f8138facd0'
|
||||
},
|
||||
runtimeToken: 'test-token',
|
||||
resultsServiceURL: 'http://results.local',
|
||||
inputs: {
|
||||
artifactName: 'test-artifact',
|
||||
files: [
|
||||
'/home/user/files/plz-upload/file1.txt',
|
||||
'/home/user/files/plz-upload/file2.txt',
|
||||
'/home/user/files/plz-upload/dir/file3.txt'
|
||||
],
|
||||
rootDirectory: '/home/user/files/plz-upload'
|
||||
}
|
||||
}
|
||||
|
||||
describe('upload-artifact', () => {
|
||||
beforeAll(() => {
|
||||
if (!fs.existsSync(fixtures.uploadDirectory)) {
|
||||
fs.mkdirSync(fixtures.uploadDirectory, {recursive: true})
|
||||
}
|
||||
|
||||
for (const [file, content] of fixtures.files) {
|
||||
fs.writeFileSync(path.join(fixtures.uploadDirectory, file), content)
|
||||
}
|
||||
})
|
||||
|
||||
beforeEach(() => {
|
||||
noopLogs()
|
||||
jest
|
||||
.spyOn(uploadZipSpecification, 'validateRootDirectory')
|
||||
.mockReturnValue()
|
||||
jest
|
||||
.spyOn(util, 'getBackendIdsFromToken')
|
||||
.mockReturnValue(fixtures.backendIDs)
|
||||
jest
|
||||
.spyOn(uploadZipSpecification, 'getUploadZipSpecification')
|
||||
.mockReturnValue(
|
||||
fixtures.files.map(file => ({
|
||||
sourcePath: path.join(fixtures.uploadDirectory, file[0]),
|
||||
destinationPath: file[0]
|
||||
}))
|
||||
)
|
||||
jest.spyOn(config, 'getRuntimeToken').mockReturnValue(fixtures.runtimeToken)
|
||||
jest
|
||||
.spyOn(config, 'getResultsServiceUrl')
|
||||
.mockReturnValue(fixtures.resultsServiceURL)
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
jest.restoreAllMocks()
|
||||
})
|
||||
|
||||
it('should successfully upload an artifact', () => {
|
||||
const mockDate = new Date('2020-01-01')
|
||||
jest
|
||||
.spyOn(uploadZipSpecification, 'validateRootDirectory')
|
||||
.mockReturnValue()
|
||||
jest
|
||||
.spyOn(uploadZipSpecification, 'getUploadZipSpecification')
|
||||
.mockReturnValue([
|
||||
{
|
||||
sourcePath: '/home/user/files/plz-upload/file1.txt',
|
||||
destinationPath: 'file1.txt'
|
||||
},
|
||||
{
|
||||
sourcePath: '/home/user/files/plz-upload/file2.txt',
|
||||
destinationPath: 'file2.txt'
|
||||
},
|
||||
{
|
||||
sourcePath: '/home/user/files/plz-upload/dir/file3.txt',
|
||||
destinationPath: 'dir/file3.txt'
|
||||
}
|
||||
])
|
||||
|
||||
jest
|
||||
.spyOn(zip, 'createZipUploadStream')
|
||||
.mockReturnValue(Promise.resolve(new zip.ZipUploadStream(1)))
|
||||
jest.spyOn(util, 'getBackendIdsFromToken').mockReturnValue({
|
||||
workflowRunBackendId: '1234',
|
||||
workflowJobRunBackendId: '5678'
|
||||
})
|
||||
jest
|
||||
.spyOn(retention, 'getExpiration')
|
||||
.mockReturnValue(Timestamp.fromDate(mockDate))
|
||||
jest
|
||||
.spyOn(ArtifactServiceClientJSON.prototype, 'CreateArtifact')
|
||||
.mockReturnValue(
|
||||
Promise.resolve({
|
||||
ok: true,
|
||||
signedUploadUrl: 'https://signed-upload-url.com'
|
||||
})
|
||||
)
|
||||
jest.spyOn(blobUpload, 'uploadZipToBlobStorage').mockReturnValue(
|
||||
Promise.resolve({
|
||||
uploadSize: 1234,
|
||||
sha256Hash: 'test-sha256-hash'
|
||||
})
|
||||
)
|
||||
jest
|
||||
.spyOn(ArtifactServiceClientJSON.prototype, 'FinalizeArtifact')
|
||||
.mockReturnValue(Promise.resolve({ok: true, artifactId: '1'}))
|
||||
|
||||
// ArtifactHttpClient mocks
|
||||
jest.spyOn(config, 'getRuntimeToken').mockReturnValue('test-token')
|
||||
jest
|
||||
.spyOn(config, 'getResultsServiceUrl')
|
||||
.mockReturnValue('https://test-url.com')
|
||||
|
||||
const uploadResp = uploadArtifact(
|
||||
'test-artifact',
|
||||
[
|
||||
'/home/user/files/plz-upload/file1.txt',
|
||||
'/home/user/files/plz-upload/file2.txt',
|
||||
'/home/user/files/plz-upload/dir/file3.txt'
|
||||
],
|
||||
'/home/user/files/plz-upload'
|
||||
)
|
||||
|
||||
expect(uploadResp).resolves.toEqual({size: 1234, id: 1})
|
||||
})
|
||||
|
||||
it('should throw an error if the root directory is invalid', () => {
|
||||
jest
|
||||
.spyOn(uploadZipSpecification, 'validateRootDirectory')
|
||||
.mockImplementation(() => {
|
||||
throw new Error('Invalid root directory')
|
||||
})
|
||||
|
||||
const uploadResp = uploadArtifact(
|
||||
'test-artifact',
|
||||
[
|
||||
'/home/user/files/plz-upload/file1.txt',
|
||||
'/home/user/files/plz-upload/file2.txt',
|
||||
'/home/user/files/plz-upload/dir/file3.txt'
|
||||
],
|
||||
'/home/user/files/plz-upload'
|
||||
)
|
||||
|
||||
expect(uploadResp).rejects.toThrow('Invalid root directory')
|
||||
})
|
||||
|
||||
it('should reject if there are no files to upload', () => {
|
||||
jest
|
||||
.spyOn(uploadZipSpecification, 'validateRootDirectory')
|
||||
.mockReturnValue()
|
||||
it('should reject if there are no files to upload', async () => {
|
||||
jest
|
||||
.spyOn(uploadZipSpecification, 'getUploadZipSpecification')
|
||||
.mockClear()
|
||||
.mockReturnValue([])
|
||||
|
||||
const uploadResp = uploadArtifact(
|
||||
'test-artifact',
|
||||
[
|
||||
'/home/user/files/plz-upload/file1.txt',
|
||||
'/home/user/files/plz-upload/file2.txt',
|
||||
'/home/user/files/plz-upload/dir/file3.txt'
|
||||
],
|
||||
'/home/user/files/plz-upload'
|
||||
fixtures.inputs.artifactName,
|
||||
fixtures.inputs.files,
|
||||
fixtures.inputs.rootDirectory
|
||||
)
|
||||
expect(uploadResp).rejects.toThrowError(FilesNotFoundError)
|
||||
await expect(uploadResp).rejects.toThrowError(FilesNotFoundError)
|
||||
})
|
||||
|
||||
it('should reject if no backend IDs are found', () => {
|
||||
jest
|
||||
.spyOn(uploadZipSpecification, 'validateRootDirectory')
|
||||
.mockReturnValue()
|
||||
jest
|
||||
.spyOn(uploadZipSpecification, 'getUploadZipSpecification')
|
||||
.mockReturnValue([
|
||||
{
|
||||
sourcePath: '/home/user/files/plz-upload/file1.txt',
|
||||
destinationPath: 'file1.txt'
|
||||
},
|
||||
{
|
||||
sourcePath: '/home/user/files/plz-upload/file2.txt',
|
||||
destinationPath: 'file2.txt'
|
||||
},
|
||||
{
|
||||
sourcePath: '/home/user/files/plz-upload/dir/file3.txt',
|
||||
destinationPath: 'dir/file3.txt'
|
||||
}
|
||||
])
|
||||
|
||||
jest
|
||||
.spyOn(zip, 'createZipUploadStream')
|
||||
.mockReturnValue(Promise.resolve(new zip.ZipUploadStream(1)))
|
||||
it('should reject if no backend IDs are found', async () => {
|
||||
jest.spyOn(util, 'getBackendIdsFromToken').mockRestore()
|
||||
|
||||
const uploadResp = uploadArtifact(
|
||||
'test-artifact',
|
||||
[
|
||||
'/home/user/files/plz-upload/file1.txt',
|
||||
'/home/user/files/plz-upload/file2.txt',
|
||||
'/home/user/files/plz-upload/dir/file3.txt'
|
||||
],
|
||||
'/home/user/files/plz-upload'
|
||||
fixtures.inputs.artifactName,
|
||||
fixtures.inputs.files,
|
||||
fixtures.inputs.rootDirectory
|
||||
)
|
||||
|
||||
expect(uploadResp).rejects.toThrow()
|
||||
await expect(uploadResp).rejects.toThrow()
|
||||
})
|
||||
|
||||
it('should return false if the creation request fails', () => {
|
||||
const mockDate = new Date('2020-01-01')
|
||||
jest
|
||||
.spyOn(uploadZipSpecification, 'validateRootDirectory')
|
||||
.mockReturnValue()
|
||||
jest
|
||||
.spyOn(uploadZipSpecification, 'getUploadZipSpecification')
|
||||
.mockReturnValue([
|
||||
{
|
||||
sourcePath: '/home/user/files/plz-upload/file1.txt',
|
||||
destinationPath: 'file1.txt'
|
||||
},
|
||||
{
|
||||
sourcePath: '/home/user/files/plz-upload/file2.txt',
|
||||
destinationPath: 'file2.txt'
|
||||
},
|
||||
{
|
||||
sourcePath: '/home/user/files/plz-upload/dir/file3.txt',
|
||||
destinationPath: 'dir/file3.txt'
|
||||
}
|
||||
])
|
||||
|
||||
it('should return false if the creation request fails', async () => {
|
||||
jest
|
||||
.spyOn(zip, 'createZipUploadStream')
|
||||
.mockReturnValue(Promise.resolve(new zip.ZipUploadStream(1)))
|
||||
jest.spyOn(util, 'getBackendIdsFromToken').mockReturnValue({
|
||||
workflowRunBackendId: '1234',
|
||||
workflowJobRunBackendId: '5678'
|
||||
})
|
||||
jest
|
||||
.spyOn(retention, 'getExpiration')
|
||||
.mockReturnValue(Timestamp.fromDate(mockDate))
|
||||
jest
|
||||
.spyOn(ArtifactServiceClientJSON.prototype, 'CreateArtifact')
|
||||
.mockReturnValue(Promise.resolve({ok: false, signedUploadUrl: ''}))
|
||||
|
||||
// ArtifactHttpClient mocks
|
||||
jest.spyOn(config, 'getRuntimeToken').mockReturnValue('test-token')
|
||||
jest
|
||||
.spyOn(config, 'getResultsServiceUrl')
|
||||
.mockReturnValue('https://test-url.com')
|
||||
|
||||
const uploadResp = uploadArtifact(
|
||||
'test-artifact',
|
||||
[
|
||||
'/home/user/files/plz-upload/file1.txt',
|
||||
'/home/user/files/plz-upload/file2.txt',
|
||||
'/home/user/files/plz-upload/dir/file3.txt'
|
||||
],
|
||||
'/home/user/files/plz-upload'
|
||||
fixtures.inputs.artifactName,
|
||||
fixtures.inputs.files,
|
||||
fixtures.inputs.rootDirectory
|
||||
)
|
||||
|
||||
expect(uploadResp).rejects.toThrow()
|
||||
await expect(uploadResp).rejects.toThrow()
|
||||
})
|
||||
|
||||
it('should return false if blob storage upload is unsuccessful', () => {
|
||||
const mockDate = new Date('2020-01-01')
|
||||
jest
|
||||
.spyOn(uploadZipSpecification, 'validateRootDirectory')
|
||||
.mockReturnValue()
|
||||
jest
|
||||
.spyOn(uploadZipSpecification, 'getUploadZipSpecification')
|
||||
.mockReturnValue([
|
||||
{
|
||||
sourcePath: '/home/user/files/plz-upload/file1.txt',
|
||||
destinationPath: 'file1.txt'
|
||||
},
|
||||
{
|
||||
sourcePath: '/home/user/files/plz-upload/file2.txt',
|
||||
destinationPath: 'file2.txt'
|
||||
},
|
||||
{
|
||||
sourcePath: '/home/user/files/plz-upload/dir/file3.txt',
|
||||
destinationPath: 'dir/file3.txt'
|
||||
}
|
||||
])
|
||||
|
||||
it('should return false if blob storage upload is unsuccessful', async () => {
|
||||
jest
|
||||
.spyOn(zip, 'createZipUploadStream')
|
||||
.mockReturnValue(Promise.resolve(new zip.ZipUploadStream(1)))
|
||||
jest.spyOn(util, 'getBackendIdsFromToken').mockReturnValue({
|
||||
workflowRunBackendId: '1234',
|
||||
workflowJobRunBackendId: '5678'
|
||||
})
|
||||
jest
|
||||
.spyOn(retention, 'getExpiration')
|
||||
.mockReturnValue(Timestamp.fromDate(mockDate))
|
||||
jest
|
||||
.spyOn(ArtifactServiceClientJSON.prototype, 'CreateArtifact')
|
||||
.mockReturnValue(
|
||||
|
@ -264,57 +144,19 @@ describe('upload-artifact', () => {
|
|||
.spyOn(blobUpload, 'uploadZipToBlobStorage')
|
||||
.mockReturnValue(Promise.reject(new Error('boom')))
|
||||
|
||||
// ArtifactHttpClient mocks
|
||||
jest.spyOn(config, 'getRuntimeToken').mockReturnValue('test-token')
|
||||
jest
|
||||
.spyOn(config, 'getResultsServiceUrl')
|
||||
.mockReturnValue('https://test-url.com')
|
||||
|
||||
const uploadResp = uploadArtifact(
|
||||
'test-artifact',
|
||||
[
|
||||
'/home/user/files/plz-upload/file1.txt',
|
||||
'/home/user/files/plz-upload/file2.txt',
|
||||
'/home/user/files/plz-upload/dir/file3.txt'
|
||||
],
|
||||
'/home/user/files/plz-upload'
|
||||
fixtures.inputs.artifactName,
|
||||
fixtures.inputs.files,
|
||||
fixtures.inputs.rootDirectory
|
||||
)
|
||||
|
||||
expect(uploadResp).rejects.toThrow()
|
||||
await expect(uploadResp).rejects.toThrow()
|
||||
})
|
||||
|
||||
it('should reject if finalize artifact fails', () => {
|
||||
const mockDate = new Date('2020-01-01')
|
||||
jest
|
||||
.spyOn(uploadZipSpecification, 'validateRootDirectory')
|
||||
.mockReturnValue()
|
||||
jest
|
||||
.spyOn(uploadZipSpecification, 'getUploadZipSpecification')
|
||||
.mockReturnValue([
|
||||
{
|
||||
sourcePath: '/home/user/files/plz-upload/file1.txt',
|
||||
destinationPath: 'file1.txt'
|
||||
},
|
||||
{
|
||||
sourcePath: '/home/user/files/plz-upload/file2.txt',
|
||||
destinationPath: 'file2.txt'
|
||||
},
|
||||
{
|
||||
sourcePath: '/home/user/files/plz-upload/dir/file3.txt',
|
||||
destinationPath: 'dir/file3.txt'
|
||||
}
|
||||
])
|
||||
|
||||
it('should reject if finalize artifact fails', async () => {
|
||||
jest
|
||||
.spyOn(zip, 'createZipUploadStream')
|
||||
.mockReturnValue(Promise.resolve(new zip.ZipUploadStream(1)))
|
||||
jest.spyOn(util, 'getBackendIdsFromToken').mockReturnValue({
|
||||
workflowRunBackendId: '1234',
|
||||
workflowJobRunBackendId: '5678'
|
||||
})
|
||||
jest
|
||||
.spyOn(retention, 'getExpiration')
|
||||
.mockReturnValue(Timestamp.fromDate(mockDate))
|
||||
jest
|
||||
.spyOn(ArtifactServiceClientJSON.prototype, 'CreateArtifact')
|
||||
.mockReturnValue(
|
||||
|
@ -333,22 +175,113 @@ describe('upload-artifact', () => {
|
|||
.spyOn(ArtifactServiceClientJSON.prototype, 'FinalizeArtifact')
|
||||
.mockReturnValue(Promise.resolve({ok: false, artifactId: ''}))
|
||||
|
||||
// ArtifactHttpClient mocks
|
||||
jest.spyOn(config, 'getRuntimeToken').mockReturnValue('test-token')
|
||||
jest
|
||||
.spyOn(config, 'getResultsServiceUrl')
|
||||
.mockReturnValue('https://test-url.com')
|
||||
|
||||
const uploadResp = uploadArtifact(
|
||||
'test-artifact',
|
||||
[
|
||||
'/home/user/files/plz-upload/file1.txt',
|
||||
'/home/user/files/plz-upload/file2.txt',
|
||||
'/home/user/files/plz-upload/dir/file3.txt'
|
||||
],
|
||||
'/home/user/files/plz-upload'
|
||||
fixtures.inputs.artifactName,
|
||||
fixtures.inputs.files,
|
||||
fixtures.inputs.rootDirectory
|
||||
)
|
||||
|
||||
expect(uploadResp).rejects.toThrow()
|
||||
await expect(uploadResp).rejects.toThrow()
|
||||
})
|
||||
|
||||
it('should successfully upload an artifact', async () => {
|
||||
jest
|
||||
.spyOn(ArtifactServiceClientJSON.prototype, 'CreateArtifact')
|
||||
.mockReturnValue(
|
||||
Promise.resolve({
|
||||
ok: true,
|
||||
signedUploadUrl: 'https://signed-upload-url.local'
|
||||
})
|
||||
)
|
||||
jest
|
||||
.spyOn(ArtifactServiceClientJSON.prototype, 'FinalizeArtifact')
|
||||
.mockReturnValue(
|
||||
Promise.resolve({
|
||||
ok: true,
|
||||
artifactId: '1'
|
||||
})
|
||||
)
|
||||
|
||||
uploadStreamMock.mockImplementation(
|
||||
async (
|
||||
stream: NodeJS.ReadableStream,
|
||||
bufferSize?: number,
|
||||
maxConcurrency?: number,
|
||||
options?: BlockBlobUploadStreamOptions
|
||||
) => {
|
||||
const {onProgress, abortSignal} = options || {}
|
||||
|
||||
onProgress?.({loadedBytes: 0})
|
||||
|
||||
return new Promise(resolve => {
|
||||
const timerId = setTimeout(() => {
|
||||
onProgress?.({loadedBytes: 256})
|
||||
resolve({})
|
||||
}, 1_000)
|
||||
abortSignal?.addEventListener('abort', () => {
|
||||
clearTimeout(timerId)
|
||||
resolve({})
|
||||
})
|
||||
})
|
||||
}
|
||||
)
|
||||
|
||||
const {id, size} = await uploadArtifact(
|
||||
fixtures.inputs.artifactName,
|
||||
fixtures.inputs.files,
|
||||
fixtures.inputs.rootDirectory
|
||||
)
|
||||
|
||||
expect(id).toBe(1)
|
||||
expect(size).toBe(256)
|
||||
})
|
||||
|
||||
it('should throw an error uploading blob chunks get delayed', async () => {
|
||||
jest
|
||||
.spyOn(ArtifactServiceClientJSON.prototype, 'CreateArtifact')
|
||||
.mockReturnValue(
|
||||
Promise.resolve({
|
||||
ok: true,
|
||||
signedUploadUrl: 'https://signed-upload-url.local'
|
||||
})
|
||||
)
|
||||
jest
|
||||
.spyOn(ArtifactServiceClientJSON.prototype, 'FinalizeArtifact')
|
||||
.mockReturnValue(
|
||||
Promise.resolve({
|
||||
ok: true,
|
||||
artifactId: '1'
|
||||
})
|
||||
)
|
||||
jest
|
||||
.spyOn(config, 'getResultsServiceUrl')
|
||||
.mockReturnValue('https://results.local')
|
||||
|
||||
jest.spyOn(config, 'getUploadChunkTimeout').mockReturnValue(2_000)
|
||||
|
||||
uploadStreamMock.mockImplementation(
|
||||
async (
|
||||
stream: NodeJS.ReadableStream,
|
||||
bufferSize?: number,
|
||||
maxConcurrency?: number,
|
||||
options?: BlockBlobUploadStreamOptions
|
||||
) => {
|
||||
const {onProgress, abortSignal} = options || {}
|
||||
onProgress?.({loadedBytes: 0})
|
||||
return new Promise(resolve => {
|
||||
abortSignal?.addEventListener('abort', () => {
|
||||
resolve({})
|
||||
})
|
||||
})
|
||||
}
|
||||
)
|
||||
|
||||
const uploadResp = uploadArtifact(
|
||||
fixtures.inputs.artifactName,
|
||||
fixtures.inputs.files,
|
||||
fixtures.inputs.rootDirectory
|
||||
)
|
||||
|
||||
await expect(uploadResp).rejects.toThrow('Upload progress stalled.')
|
||||
})
|
||||
})
|
||||
|
|
|
@ -3,6 +3,8 @@
|
|||
- [Frequently Asked Questions](#frequently-asked-questions)
|
||||
- [Supported Characters](#supported-characters)
|
||||
- [Compression? ZIP? How is my artifact stored?](#compression-zip-how-is-my-artifact-stored)
|
||||
- [Which versions of the artifacts packages are compatible?](#which-versions-of-the-artifacts-packages-are-compatible)
|
||||
- [How long will my artifact be available?](#how-long-will-my-artifact-be-available)
|
||||
|
||||
## Supported Characters
|
||||
|
||||
|
@ -35,22 +37,26 @@ Higher levels will result in better compression, but will take longer to complet
|
|||
For large files that are not easily compressed, a value of 0 is recommended for significantly faster uploads.
|
||||
|
||||
## Which versions of the artifacts packages are compatible?
|
||||
[actions/upload-artifact](https://github.com/actions/upload-artifact) and [actions/download-artifact](https://github.com/actions/download-artifact), are part of the [GitHub Actions toolkit](https://github.com/actions/toolkit) and are typically used together to upload and download artifacts in your workflows.
|
||||
[actions/upload-artifact](https://github.com/actions/upload-artifact) and [actions/download-artifact](https://github.com/actions/download-artifact), leverage [GitHub Actions toolkit](https://github.com/actions/toolkit) and are typically used together to upload and download artifacts in your workflows.
|
||||
|
||||
1. **Matching Versions:**
|
||||
- Use matching versions of `actions/upload-artifact` and `actions/download-artifact` to ensure compatibility.
|
||||
| upload-artifact | download-artifact | toolkit |
|
||||
|---|---|---|
|
||||
| v4 | v4 | v2 |
|
||||
| < v3 | < v3 | < v1 |
|
||||
|
||||
2. **Workflow YAML File:**
|
||||
- In your GitHub Actions workflow YAML file, you specify the version of the actions you want to use. For example:
|
||||
```yaml
|
||||
uses: actions/upload-artifact@v4
|
||||
# ...
|
||||
uses: actions/download-artifact@v4
|
||||
# ...
|
||||
```
|
||||
Use matching versions of `actions/upload-artifact` and `actions/download-artifact` to ensure compatibility.
|
||||
|
||||
3. **Release Notes:**
|
||||
- Check the release notes for each repository to see if there are any specific notes about compatibility or changes in behavior.
|
||||
In your GitHub Actions workflow YAML file, you specify the version of the actions you want to use. For example:
|
||||
|
||||
```yaml
|
||||
uses: actions/upload-artifact@v4
|
||||
# ...
|
||||
uses: actions/download-artifact@v4
|
||||
# ...
|
||||
```
|
||||
|
||||
**Release Notes:**
|
||||
Check the release notes for each repository to see if there are any specific notes about compatibility or changes in behavior.
|
||||
|
||||
## How long will my artifact be available?
|
||||
The default retention period is 90 days. For more information, visit: https://github.com/actions/upload-artifact?tab=readme-ov-file#retention-period
|
||||
The default retention period is **90 days**. For more information, visit: https://github.com/actions/upload-artifact?tab=readme-ov-file#retention-period
|
||||
|
|
|
@ -1,12 +1,12 @@
|
|||
{
|
||||
"name": "@actions/artifact",
|
||||
"version": "2.0.1",
|
||||
"version": "2.1.9",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "@actions/artifact",
|
||||
"version": "2.0.1",
|
||||
"version": "2.1.9",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@actions/core": "^1.10.0",
|
||||
|
@ -18,7 +18,7 @@
|
|||
"@octokit/plugin-retry": "^3.0.9",
|
||||
"@octokit/request-error": "^5.0.0",
|
||||
"@protobuf-ts/plugin": "^2.2.3-alpha.1",
|
||||
"archiver": "^5.3.1",
|
||||
"archiver": "^7.0.1",
|
||||
"crypto": "^1.0.1",
|
||||
"jwt-decode": "^3.1.2",
|
||||
"twirp-ts": "^2.5.0",
|
||||
|
@ -186,6 +186,22 @@
|
|||
"node": ">=14.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@isaacs/cliui": {
|
||||
"version": "8.0.2",
|
||||
"resolved": "https://registry.npmjs.org/@isaacs/cliui/-/cliui-8.0.2.tgz",
|
||||
"integrity": "sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==",
|
||||
"dependencies": {
|
||||
"string-width": "^5.1.2",
|
||||
"string-width-cjs": "npm:string-width@^4.2.0",
|
||||
"strip-ansi": "^7.0.1",
|
||||
"strip-ansi-cjs": "npm:strip-ansi@^6.0.1",
|
||||
"wrap-ansi": "^8.1.0",
|
||||
"wrap-ansi-cjs": "npm:wrap-ansi@^7.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/@octokit/auth-token": {
|
||||
"version": "2.5.0",
|
||||
"resolved": "https://registry.npmjs.org/@octokit/auth-token/-/auth-token-2.5.0.tgz",
|
||||
|
@ -348,6 +364,15 @@
|
|||
"node": ">=8.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@pkgjs/parseargs": {
|
||||
"version": "0.11.0",
|
||||
"resolved": "https://registry.npmjs.org/@pkgjs/parseargs/-/parseargs-0.11.0.tgz",
|
||||
"integrity": "sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==",
|
||||
"optional": true,
|
||||
"engines": {
|
||||
"node": ">=14"
|
||||
}
|
||||
},
|
||||
"node_modules/@protobuf-ts/plugin": {
|
||||
"version": "2.9.1",
|
||||
"resolved": "https://registry.npmjs.org/@protobuf-ts/plugin/-/plugin-2.9.1.tgz",
|
||||
|
@ -480,74 +505,120 @@
|
|||
"@types/node": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/abort-controller": {
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/abort-controller/-/abort-controller-3.0.0.tgz",
|
||||
"integrity": "sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg==",
|
||||
"dependencies": {
|
||||
"event-target-shim": "^5.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=6.5"
|
||||
}
|
||||
},
|
||||
"node_modules/ansi-regex": {
|
||||
"version": "6.0.1",
|
||||
"resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.0.1.tgz",
|
||||
"integrity": "sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA==",
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/chalk/ansi-regex?sponsor=1"
|
||||
}
|
||||
},
|
||||
"node_modules/ansi-sequence-parser": {
|
||||
"version": "1.1.1",
|
||||
"resolved": "https://registry.npmjs.org/ansi-sequence-parser/-/ansi-sequence-parser-1.1.1.tgz",
|
||||
"integrity": "sha512-vJXt3yiaUL4UU546s3rPXlsry/RnM730G1+HkpKE012AN0sx1eOrxSu95oKDIonskeLTijMgqWZ3uDEe3NFvyg==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/ansi-styles": {
|
||||
"version": "6.2.1",
|
||||
"resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.1.tgz",
|
||||
"integrity": "sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==",
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/chalk/ansi-styles?sponsor=1"
|
||||
}
|
||||
},
|
||||
"node_modules/archiver": {
|
||||
"version": "5.3.1",
|
||||
"resolved": "https://registry.npmjs.org/archiver/-/archiver-5.3.1.tgz",
|
||||
"integrity": "sha512-8KyabkmbYrH+9ibcTScQ1xCJC/CGcugdVIwB+53f5sZziXgwUh3iXlAlANMxcZyDEfTHMe6+Z5FofV8nopXP7w==",
|
||||
"version": "7.0.1",
|
||||
"resolved": "https://registry.npmjs.org/archiver/-/archiver-7.0.1.tgz",
|
||||
"integrity": "sha512-ZcbTaIqJOfCc03QwD468Unz/5Ir8ATtvAHsK+FdXbDIbGfihqh9mrvdcYunQzqn4HrvWWaFyaxJhGZagaJJpPQ==",
|
||||
"dependencies": {
|
||||
"archiver-utils": "^2.1.0",
|
||||
"async": "^3.2.3",
|
||||
"buffer-crc32": "^0.2.1",
|
||||
"readable-stream": "^3.6.0",
|
||||
"readdir-glob": "^1.0.0",
|
||||
"tar-stream": "^2.2.0",
|
||||
"zip-stream": "^4.1.0"
|
||||
"archiver-utils": "^5.0.2",
|
||||
"async": "^3.2.4",
|
||||
"buffer-crc32": "^1.0.0",
|
||||
"readable-stream": "^4.0.0",
|
||||
"readdir-glob": "^1.1.2",
|
||||
"tar-stream": "^3.0.0",
|
||||
"zip-stream": "^6.0.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 10"
|
||||
"node": ">= 14"
|
||||
}
|
||||
},
|
||||
"node_modules/archiver-utils": {
|
||||
"version": "2.1.0",
|
||||
"resolved": "https://registry.npmjs.org/archiver-utils/-/archiver-utils-2.1.0.tgz",
|
||||
"integrity": "sha512-bEL/yUb/fNNiNTuUz979Z0Yg5L+LzLxGJz8x79lYmR54fmTIb6ob/hNQgkQnIUDWIFjZVQwl9Xs356I6BAMHfw==",
|
||||
"version": "5.0.2",
|
||||
"resolved": "https://registry.npmjs.org/archiver-utils/-/archiver-utils-5.0.2.tgz",
|
||||
"integrity": "sha512-wuLJMmIBQYCsGZgYLTy5FIB2pF6Lfb6cXMSF8Qywwk3t20zWnAi7zLcQFdKQmIB8wyZpY5ER38x08GbwtR2cLA==",
|
||||
"dependencies": {
|
||||
"glob": "^7.1.4",
|
||||
"glob": "^10.0.0",
|
||||
"graceful-fs": "^4.2.0",
|
||||
"is-stream": "^2.0.1",
|
||||
"lazystream": "^1.0.0",
|
||||
"lodash.defaults": "^4.2.0",
|
||||
"lodash.difference": "^4.5.0",
|
||||
"lodash.flatten": "^4.4.0",
|
||||
"lodash.isplainobject": "^4.0.6",
|
||||
"lodash.union": "^4.6.0",
|
||||
"lodash": "^4.17.15",
|
||||
"normalize-path": "^3.0.0",
|
||||
"readable-stream": "^2.0.0"
|
||||
"readable-stream": "^4.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 6"
|
||||
"node": ">= 14"
|
||||
}
|
||||
},
|
||||
"node_modules/archiver-utils/node_modules/readable-stream": {
|
||||
"version": "2.3.8",
|
||||
"resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.8.tgz",
|
||||
"integrity": "sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA==",
|
||||
"node_modules/archiver-utils/node_modules/brace-expansion": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz",
|
||||
"integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==",
|
||||
"dependencies": {
|
||||
"core-util-is": "~1.0.0",
|
||||
"inherits": "~2.0.3",
|
||||
"isarray": "~1.0.0",
|
||||
"process-nextick-args": "~2.0.0",
|
||||
"safe-buffer": "~5.1.1",
|
||||
"string_decoder": "~1.1.1",
|
||||
"util-deprecate": "~1.0.1"
|
||||
"balanced-match": "^1.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/archiver-utils/node_modules/safe-buffer": {
|
||||
"version": "5.1.2",
|
||||
"resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz",
|
||||
"integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g=="
|
||||
},
|
||||
"node_modules/archiver-utils/node_modules/string_decoder": {
|
||||
"version": "1.1.1",
|
||||
"resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz",
|
||||
"integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==",
|
||||
"node_modules/archiver-utils/node_modules/glob": {
|
||||
"version": "10.3.12",
|
||||
"resolved": "https://registry.npmjs.org/glob/-/glob-10.3.12.tgz",
|
||||
"integrity": "sha512-TCNv8vJ+xz4QiqTpfOJA7HvYv+tNIRHKfUWw/q+v2jdgN4ebz+KY9tGx5J4rHP0o84mNP+ApH66HRX8us3Khqg==",
|
||||
"dependencies": {
|
||||
"safe-buffer": "~5.1.0"
|
||||
"foreground-child": "^3.1.0",
|
||||
"jackspeak": "^2.3.6",
|
||||
"minimatch": "^9.0.1",
|
||||
"minipass": "^7.0.4",
|
||||
"path-scurry": "^1.10.2"
|
||||
},
|
||||
"bin": {
|
||||
"glob": "dist/esm/bin.mjs"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=16 || 14 >=14.17"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/isaacs"
|
||||
}
|
||||
},
|
||||
"node_modules/archiver-utils/node_modules/minimatch": {
|
||||
"version": "9.0.4",
|
||||
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.4.tgz",
|
||||
"integrity": "sha512-KqWh+VchfxcMNRAJjj2tnsSJdNbHsVgnkBhTNrW7AjVo6OvLtxw8zfT9oLw1JSohlFzJ8jCoTgaoXvJ+kHt6fw==",
|
||||
"dependencies": {
|
||||
"brace-expansion": "^2.0.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=16 || 14 >=14.17"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/isaacs"
|
||||
}
|
||||
},
|
||||
"node_modules/async": {
|
||||
|
@ -560,11 +631,22 @@
|
|||
"resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz",
|
||||
"integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q=="
|
||||
},
|
||||
"node_modules/b4a": {
|
||||
"version": "1.6.6",
|
||||
"resolved": "https://registry.npmjs.org/b4a/-/b4a-1.6.6.tgz",
|
||||
"integrity": "sha512-5Tk1HLk6b6ctmjIkAcU/Ujv/1WqiDl0F0JdRCR80VsOcUlHcu7pWeWRlOqQLHfDEsVx9YH/aif5AG4ehoCtTmg=="
|
||||
},
|
||||
"node_modules/balanced-match": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz",
|
||||
"integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw=="
|
||||
},
|
||||
"node_modules/bare-events": {
|
||||
"version": "2.2.2",
|
||||
"resolved": "https://registry.npmjs.org/bare-events/-/bare-events-2.2.2.tgz",
|
||||
"integrity": "sha512-h7z00dWdG0PYOQEvChhOSWvOfkIKsdZGkWr083FgN/HyoQuebSew/cgirYqh9SCuy/hRvxc5Vy6Fw8xAmYHLkQ==",
|
||||
"optional": true
|
||||
},
|
||||
"node_modules/base64-js": {
|
||||
"version": "1.5.1",
|
||||
"resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz",
|
||||
|
@ -601,16 +683,6 @@
|
|||
"node": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/bl": {
|
||||
"version": "4.1.0",
|
||||
"resolved": "https://registry.npmjs.org/bl/-/bl-4.1.0.tgz",
|
||||
"integrity": "sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w==",
|
||||
"dependencies": {
|
||||
"buffer": "^5.5.0",
|
||||
"inherits": "^2.0.4",
|
||||
"readable-stream": "^3.4.0"
|
||||
}
|
||||
},
|
||||
"node_modules/bottleneck": {
|
||||
"version": "2.19.5",
|
||||
"resolved": "https://registry.npmjs.org/bottleneck/-/bottleneck-2.19.5.tgz",
|
||||
|
@ -626,9 +698,9 @@
|
|||
}
|
||||
},
|
||||
"node_modules/buffer": {
|
||||
"version": "5.7.1",
|
||||
"resolved": "https://registry.npmjs.org/buffer/-/buffer-5.7.1.tgz",
|
||||
"integrity": "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==",
|
||||
"version": "6.0.3",
|
||||
"resolved": "https://registry.npmjs.org/buffer/-/buffer-6.0.3.tgz",
|
||||
"integrity": "sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA==",
|
||||
"funding": [
|
||||
{
|
||||
"type": "github",
|
||||
|
@ -645,15 +717,15 @@
|
|||
],
|
||||
"dependencies": {
|
||||
"base64-js": "^1.3.1",
|
||||
"ieee754": "^1.1.13"
|
||||
"ieee754": "^1.2.1"
|
||||
}
|
||||
},
|
||||
"node_modules/buffer-crc32": {
|
||||
"version": "0.2.13",
|
||||
"resolved": "https://registry.npmjs.org/buffer-crc32/-/buffer-crc32-0.2.13.tgz",
|
||||
"integrity": "sha512-VO9Ht/+p3SN7SKWqcrgEzjGbRSJYTx+Q1pTQC0wrWqHx0vpJraQ6GtHx8tvcg1rlK1byhU5gccxgOgj7B0TDkQ==",
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/buffer-crc32/-/buffer-crc32-1.0.0.tgz",
|
||||
"integrity": "sha512-Db1SbgBS/fg/392AblrMJk97KggmvYhr4pB5ZIMTWtaivCPMWLkmb7m21cJvpvgK+J3nsU2CmmixNBZx4vFj/w==",
|
||||
"engines": {
|
||||
"node": "*"
|
||||
"node": ">=8.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/buffers": {
|
||||
|
@ -684,6 +756,22 @@
|
|||
"node": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/color-convert": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz",
|
||||
"integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==",
|
||||
"dependencies": {
|
||||
"color-name": "~1.1.4"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=7.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/color-name": {
|
||||
"version": "1.1.4",
|
||||
"resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz",
|
||||
"integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA=="
|
||||
},
|
||||
"node_modules/combined-stream": {
|
||||
"version": "1.0.8",
|
||||
"resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz",
|
||||
|
@ -704,17 +792,18 @@
|
|||
}
|
||||
},
|
||||
"node_modules/compress-commons": {
|
||||
"version": "4.1.1",
|
||||
"resolved": "https://registry.npmjs.org/compress-commons/-/compress-commons-4.1.1.tgz",
|
||||
"integrity": "sha512-QLdDLCKNV2dtoTorqgxngQCMA+gWXkM/Nwu7FpeBhk/RdkzimqC3jueb/FDmaZeXh+uby1jkBqE3xArsLBE5wQ==",
|
||||
"version": "6.0.2",
|
||||
"resolved": "https://registry.npmjs.org/compress-commons/-/compress-commons-6.0.2.tgz",
|
||||
"integrity": "sha512-6FqVXeETqWPoGcfzrXb37E50NP0LXT8kAMu5ooZayhWWdgEY4lBEEcbQNXtkuKQsGduxiIcI4gOTsxTmuq/bSg==",
|
||||
"dependencies": {
|
||||
"buffer-crc32": "^0.2.13",
|
||||
"crc32-stream": "^4.0.2",
|
||||
"crc-32": "^1.2.0",
|
||||
"crc32-stream": "^6.0.0",
|
||||
"is-stream": "^2.0.1",
|
||||
"normalize-path": "^3.0.0",
|
||||
"readable-stream": "^3.6.0"
|
||||
"readable-stream": "^4.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 10"
|
||||
"node": ">= 14"
|
||||
}
|
||||
},
|
||||
"node_modules/concat-map": {
|
||||
|
@ -739,15 +828,28 @@
|
|||
}
|
||||
},
|
||||
"node_modules/crc32-stream": {
|
||||
"version": "4.0.2",
|
||||
"resolved": "https://registry.npmjs.org/crc32-stream/-/crc32-stream-4.0.2.tgz",
|
||||
"integrity": "sha512-DxFZ/Hk473b/muq1VJ///PMNLj0ZMnzye9thBpmjpJKCc5eMgB95aK8zCGrGfQ90cWo561Te6HK9D+j4KPdM6w==",
|
||||
"version": "6.0.0",
|
||||
"resolved": "https://registry.npmjs.org/crc32-stream/-/crc32-stream-6.0.0.tgz",
|
||||
"integrity": "sha512-piICUB6ei4IlTv1+653yq5+KoqfBYmj9bw6LqXoOneTMDXk5nM1qt12mFW1caG3LlJXEKW1Bp0WggEmIfQB34g==",
|
||||
"dependencies": {
|
||||
"crc-32": "^1.2.0",
|
||||
"readable-stream": "^3.4.0"
|
||||
"readable-stream": "^4.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 10"
|
||||
"node": ">= 14"
|
||||
}
|
||||
},
|
||||
"node_modules/cross-spawn": {
|
||||
"version": "7.0.3",
|
||||
"resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz",
|
||||
"integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==",
|
||||
"dependencies": {
|
||||
"path-key": "^3.1.0",
|
||||
"shebang-command": "^2.0.0",
|
||||
"which": "^2.0.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 8"
|
||||
}
|
||||
},
|
||||
"node_modules/crypto": {
|
||||
|
@ -781,12 +883,22 @@
|
|||
"dot-object": "bin/dot-object"
|
||||
}
|
||||
},
|
||||
"node_modules/end-of-stream": {
|
||||
"version": "1.4.4",
|
||||
"resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.4.tgz",
|
||||
"integrity": "sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==",
|
||||
"dependencies": {
|
||||
"once": "^1.4.0"
|
||||
"node_modules/eastasianwidth": {
|
||||
"version": "0.2.0",
|
||||
"resolved": "https://registry.npmjs.org/eastasianwidth/-/eastasianwidth-0.2.0.tgz",
|
||||
"integrity": "sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA=="
|
||||
},
|
||||
"node_modules/emoji-regex": {
|
||||
"version": "9.2.2",
|
||||
"resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz",
|
||||
"integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg=="
|
||||
},
|
||||
"node_modules/event-target-shim": {
|
||||
"version": "5.0.1",
|
||||
"resolved": "https://registry.npmjs.org/event-target-shim/-/event-target-shim-5.0.1.tgz",
|
||||
"integrity": "sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ==",
|
||||
"engines": {
|
||||
"node": ">=6"
|
||||
}
|
||||
},
|
||||
"node_modules/events": {
|
||||
|
@ -797,6 +909,26 @@
|
|||
"node": ">=0.8.x"
|
||||
}
|
||||
},
|
||||
"node_modules/fast-fifo": {
|
||||
"version": "1.3.2",
|
||||
"resolved": "https://registry.npmjs.org/fast-fifo/-/fast-fifo-1.3.2.tgz",
|
||||
"integrity": "sha512-/d9sfos4yxzpwkDkuN7k2SqFKtYNmCTzgfEpz82x34IM9/zc8KGxQoXg1liNC/izpRM/MBdt44Nmx41ZWqk+FQ=="
|
||||
},
|
||||
"node_modules/foreground-child": {
|
||||
"version": "3.1.1",
|
||||
"resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-3.1.1.tgz",
|
||||
"integrity": "sha512-TMKDUnIte6bfb5nWv7V/caI169OHgvwjb7V4WkeUvbQQdjr5rWKqHFiKWb/fcOwB+CzBT+qbWjvj+DVwRskpIg==",
|
||||
"dependencies": {
|
||||
"cross-spawn": "^7.0.0",
|
||||
"signal-exit": "^4.0.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=14"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/isaacs"
|
||||
}
|
||||
},
|
||||
"node_modules/form-data": {
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz",
|
||||
|
@ -810,11 +942,6 @@
|
|||
"node": ">= 6"
|
||||
}
|
||||
},
|
||||
"node_modules/fs-constants": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/fs-constants/-/fs-constants-1.0.0.tgz",
|
||||
"integrity": "sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow=="
|
||||
},
|
||||
"node_modules/fs.realpath": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz",
|
||||
|
@ -898,6 +1025,14 @@
|
|||
"resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz",
|
||||
"integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ=="
|
||||
},
|
||||
"node_modules/is-fullwidth-code-point": {
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz",
|
||||
"integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==",
|
||||
"engines": {
|
||||
"node": ">=8"
|
||||
}
|
||||
},
|
||||
"node_modules/is-plain-object": {
|
||||
"version": "5.0.0",
|
||||
"resolved": "https://registry.npmjs.org/is-plain-object/-/is-plain-object-5.0.0.tgz",
|
||||
|
@ -906,11 +1041,44 @@
|
|||
"node": ">=0.10.0"
|
||||
}
|
||||
},
|
||||
"node_modules/is-stream": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz",
|
||||
"integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==",
|
||||
"engines": {
|
||||
"node": ">=8"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/sindresorhus"
|
||||
}
|
||||
},
|
||||
"node_modules/isarray": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz",
|
||||
"integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ=="
|
||||
},
|
||||
"node_modules/isexe": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz",
|
||||
"integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw=="
|
||||
},
|
||||
"node_modules/jackspeak": {
|
||||
"version": "2.3.6",
|
||||
"resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-2.3.6.tgz",
|
||||
"integrity": "sha512-N3yCS/NegsOBokc8GAdM8UcmfsKiSS8cipheD/nivzr700H+nsMOxJjQnvwOcRYVuFkdH0wGUvW2WbXGmrZGbQ==",
|
||||
"dependencies": {
|
||||
"@isaacs/cliui": "^8.0.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=14"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/isaacs"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"@pkgjs/parseargs": "^0.11.0"
|
||||
}
|
||||
},
|
||||
"node_modules/jsonc-parser": {
|
||||
"version": "3.2.0",
|
||||
"resolved": "https://registry.npmjs.org/jsonc-parser/-/jsonc-parser-3.2.0.tgz",
|
||||
|
@ -965,31 +1133,6 @@
|
|||
"resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz",
|
||||
"integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg=="
|
||||
},
|
||||
"node_modules/lodash.defaults": {
|
||||
"version": "4.2.0",
|
||||
"resolved": "https://registry.npmjs.org/lodash.defaults/-/lodash.defaults-4.2.0.tgz",
|
||||
"integrity": "sha512-qjxPLHd3r5DnsdGacqOMU6pb/avJzdh9tFX2ymgoZE27BmjXrNy/y4LoaiTeAb+O3gL8AfpJGtqfX/ae2leYYQ=="
|
||||
},
|
||||
"node_modules/lodash.difference": {
|
||||
"version": "4.5.0",
|
||||
"resolved": "https://registry.npmjs.org/lodash.difference/-/lodash.difference-4.5.0.tgz",
|
||||
"integrity": "sha512-dS2j+W26TQ7taQBGN8Lbbq04ssV3emRw4NY58WErlTO29pIqS0HmoT5aJ9+TUQ1N3G+JOZSji4eugsWwGp9yPA=="
|
||||
},
|
||||
"node_modules/lodash.flatten": {
|
||||
"version": "4.4.0",
|
||||
"resolved": "https://registry.npmjs.org/lodash.flatten/-/lodash.flatten-4.4.0.tgz",
|
||||
"integrity": "sha512-C5N2Z3DgnnKr0LOpv/hKCgKdb7ZZwafIrsesve6lmzvZIRZRGaZ/l6Q8+2W7NaT+ZwO3fFlSCzCzrDCFdJfZ4g=="
|
||||
},
|
||||
"node_modules/lodash.isplainobject": {
|
||||
"version": "4.0.6",
|
||||
"resolved": "https://registry.npmjs.org/lodash.isplainobject/-/lodash.isplainobject-4.0.6.tgz",
|
||||
"integrity": "sha512-oSXzaWypCMHkPC3NvBEaPHf0KsA5mvPrOPgQWDsbg8n7orZ290M0BmC/jgRZ4vcJ6DTAhjrsSYgdsW/F+MFOBA=="
|
||||
},
|
||||
"node_modules/lodash.union": {
|
||||
"version": "4.6.0",
|
||||
"resolved": "https://registry.npmjs.org/lodash.union/-/lodash.union-4.6.0.tgz",
|
||||
"integrity": "sha512-c4pB2CdGrGdjMKYLA+XiRDO7Y0PRQbm/Gzg8qMj+QH+pFVAoTp5sBpO0odL3FjoPCGjK96p6qsP+yQoiLoOBcw=="
|
||||
},
|
||||
"node_modules/lower-case": {
|
||||
"version": "2.0.2",
|
||||
"resolved": "https://registry.npmjs.org/lower-case/-/lower-case-2.0.2.tgz",
|
||||
|
@ -998,6 +1141,14 @@
|
|||
"tslib": "^2.0.3"
|
||||
}
|
||||
},
|
||||
"node_modules/lru-cache": {
|
||||
"version": "10.2.0",
|
||||
"resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.2.0.tgz",
|
||||
"integrity": "sha512-2bIM8x+VAf6JT4bKAljS1qUWgMsqZRPGJS6FSahIMPVvctcNhyVp7AJu7quxOW9jwkryBReKZY5tY5JYv2n/7Q==",
|
||||
"engines": {
|
||||
"node": "14 || >=16.14"
|
||||
}
|
||||
},
|
||||
"node_modules/lunr": {
|
||||
"version": "2.3.9",
|
||||
"resolved": "https://registry.npmjs.org/lunr/-/lunr-2.3.9.tgz",
|
||||
|
@ -1054,6 +1205,14 @@
|
|||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/minipass": {
|
||||
"version": "7.0.4",
|
||||
"resolved": "https://registry.npmjs.org/minipass/-/minipass-7.0.4.tgz",
|
||||
"integrity": "sha512-jYofLM5Dam9279rdkWzqHozUo4ybjdZmCsDHePy5V/PbBcVMiSZR97gmAy45aqi8CK1lG2ECd356FU86avfwUQ==",
|
||||
"engines": {
|
||||
"node": ">=16 || 14 >=14.17"
|
||||
}
|
||||
},
|
||||
"node_modules/mkdirp": {
|
||||
"version": "0.5.6",
|
||||
"resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.6.tgz",
|
||||
|
@ -1132,6 +1291,29 @@
|
|||
"node": ">=0.10.0"
|
||||
}
|
||||
},
|
||||
"node_modules/path-key": {
|
||||
"version": "3.1.1",
|
||||
"resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz",
|
||||
"integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==",
|
||||
"engines": {
|
||||
"node": ">=8"
|
||||
}
|
||||
},
|
||||
"node_modules/path-scurry": {
|
||||
"version": "1.10.2",
|
||||
"resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-1.10.2.tgz",
|
||||
"integrity": "sha512-7xTavNy5RQXnsjANvVvMkEjvloOinkAjv/Z6Ildz9v2RinZ4SBKTWFOVRbaF8p0vpHnyjV/UwNDdKuUv6M5qcA==",
|
||||
"dependencies": {
|
||||
"lru-cache": "^10.2.0",
|
||||
"minipass": "^5.0.0 || ^6.0.2 || ^7.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=16 || 14 >=14.17"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/isaacs"
|
||||
}
|
||||
},
|
||||
"node_modules/path-to-regexp": {
|
||||
"version": "6.2.1",
|
||||
"resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-6.2.1.tgz",
|
||||
|
@ -1164,17 +1346,24 @@
|
|||
"resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz",
|
||||
"integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag=="
|
||||
},
|
||||
"node_modules/queue-tick": {
|
||||
"version": "1.0.1",
|
||||
"resolved": "https://registry.npmjs.org/queue-tick/-/queue-tick-1.0.1.tgz",
|
||||
"integrity": "sha512-kJt5qhMxoszgU/62PLP1CJytzd2NKetjSRnyuj31fDd3Rlcz3fzlFdFLD1SItunPwyqEOkca6GbV612BWfaBag=="
|
||||
},
|
||||
"node_modules/readable-stream": {
|
||||
"version": "3.6.2",
|
||||
"resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz",
|
||||
"integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==",
|
||||
"version": "4.5.2",
|
||||
"resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-4.5.2.tgz",
|
||||
"integrity": "sha512-yjavECdqeZ3GLXNgRXgeQEdz9fvDDkNKyHnbHRFtOr7/LcfgBcmct7t/ET+HaCTqfh06OzoAxrkN/IfjJBVe+g==",
|
||||
"dependencies": {
|
||||
"inherits": "^2.0.3",
|
||||
"string_decoder": "^1.1.1",
|
||||
"util-deprecate": "^1.0.1"
|
||||
"abort-controller": "^3.0.0",
|
||||
"buffer": "^6.0.3",
|
||||
"events": "^3.3.0",
|
||||
"process": "^0.11.10",
|
||||
"string_decoder": "^1.3.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 6"
|
||||
"node": "^12.22.0 || ^14.17.0 || >=16.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/readdir-glob": {
|
||||
|
@ -1228,6 +1417,25 @@
|
|||
"resolved": "https://registry.npmjs.org/sax/-/sax-1.2.4.tgz",
|
||||
"integrity": "sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw=="
|
||||
},
|
||||
"node_modules/shebang-command": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz",
|
||||
"integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==",
|
||||
"dependencies": {
|
||||
"shebang-regex": "^3.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=8"
|
||||
}
|
||||
},
|
||||
"node_modules/shebang-regex": {
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz",
|
||||
"integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==",
|
||||
"engines": {
|
||||
"node": ">=8"
|
||||
}
|
||||
},
|
||||
"node_modules/shiki": {
|
||||
"version": "0.14.5",
|
||||
"resolved": "https://registry.npmjs.org/shiki/-/shiki-0.14.5.tgz",
|
||||
|
@ -1240,6 +1448,17 @@
|
|||
"vscode-textmate": "^8.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/signal-exit": {
|
||||
"version": "4.1.0",
|
||||
"resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz",
|
||||
"integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==",
|
||||
"engines": {
|
||||
"node": ">=14"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/isaacs"
|
||||
}
|
||||
},
|
||||
"node_modules/source-map": {
|
||||
"version": "0.6.1",
|
||||
"resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz",
|
||||
|
@ -1249,6 +1468,18 @@
|
|||
"node": ">=0.10.0"
|
||||
}
|
||||
},
|
||||
"node_modules/streamx": {
|
||||
"version": "2.16.1",
|
||||
"resolved": "https://registry.npmjs.org/streamx/-/streamx-2.16.1.tgz",
|
||||
"integrity": "sha512-m9QYj6WygWyWa3H1YY69amr4nVgy61xfjys7xO7kviL5rfIEc2naf+ewFiOA+aEJD7y0JO3h2GoiUv4TDwEGzQ==",
|
||||
"dependencies": {
|
||||
"fast-fifo": "^1.1.0",
|
||||
"queue-tick": "^1.0.1"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"bare-events": "^2.2.0"
|
||||
}
|
||||
},
|
||||
"node_modules/string_decoder": {
|
||||
"version": "1.3.0",
|
||||
"resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz",
|
||||
|
@ -1257,19 +1488,102 @@
|
|||
"safe-buffer": "~5.2.0"
|
||||
}
|
||||
},
|
||||
"node_modules/tar-stream": {
|
||||
"version": "2.2.0",
|
||||
"resolved": "https://registry.npmjs.org/tar-stream/-/tar-stream-2.2.0.tgz",
|
||||
"integrity": "sha512-ujeqbceABgwMZxEJnk2HDY2DlnUZ+9oEcb1KzTVfYHio0UE6dG71n60d8D2I4qNvleWrrXpmjpt7vZeF1LnMZQ==",
|
||||
"node_modules/string-width": {
|
||||
"version": "5.1.2",
|
||||
"resolved": "https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz",
|
||||
"integrity": "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==",
|
||||
"dependencies": {
|
||||
"bl": "^4.0.3",
|
||||
"end-of-stream": "^1.4.1",
|
||||
"fs-constants": "^1.0.0",
|
||||
"inherits": "^2.0.3",
|
||||
"readable-stream": "^3.1.1"
|
||||
"eastasianwidth": "^0.2.0",
|
||||
"emoji-regex": "^9.2.2",
|
||||
"strip-ansi": "^7.0.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=6"
|
||||
"node": ">=12"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/sindresorhus"
|
||||
}
|
||||
},
|
||||
"node_modules/string-width-cjs": {
|
||||
"name": "string-width",
|
||||
"version": "4.2.3",
|
||||
"resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz",
|
||||
"integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==",
|
||||
"dependencies": {
|
||||
"emoji-regex": "^8.0.0",
|
||||
"is-fullwidth-code-point": "^3.0.0",
|
||||
"strip-ansi": "^6.0.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=8"
|
||||
}
|
||||
},
|
||||
"node_modules/string-width-cjs/node_modules/ansi-regex": {
|
||||
"version": "5.0.1",
|
||||
"resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz",
|
||||
"integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==",
|
||||
"engines": {
|
||||
"node": ">=8"
|
||||
}
|
||||
},
|
||||
"node_modules/string-width-cjs/node_modules/emoji-regex": {
|
||||
"version": "8.0.0",
|
||||
"resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz",
|
||||
"integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A=="
|
||||
},
|
||||
"node_modules/string-width-cjs/node_modules/strip-ansi": {
|
||||
"version": "6.0.1",
|
||||
"resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz",
|
||||
"integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==",
|
||||
"dependencies": {
|
||||
"ansi-regex": "^5.0.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=8"
|
||||
}
|
||||
},
|
||||
"node_modules/strip-ansi": {
|
||||
"version": "7.1.0",
|
||||
"resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz",
|
||||
"integrity": "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==",
|
||||
"dependencies": {
|
||||
"ansi-regex": "^6.0.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/chalk/strip-ansi?sponsor=1"
|
||||
}
|
||||
},
|
||||
"node_modules/strip-ansi-cjs": {
|
||||
"name": "strip-ansi",
|
||||
"version": "6.0.1",
|
||||
"resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz",
|
||||
"integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==",
|
||||
"dependencies": {
|
||||
"ansi-regex": "^5.0.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=8"
|
||||
}
|
||||
},
|
||||
"node_modules/strip-ansi-cjs/node_modules/ansi-regex": {
|
||||
"version": "5.0.1",
|
||||
"resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz",
|
||||
"integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==",
|
||||
"engines": {
|
||||
"node": ">=8"
|
||||
}
|
||||
},
|
||||
"node_modules/tar-stream": {
|
||||
"version": "3.1.7",
|
||||
"resolved": "https://registry.npmjs.org/tar-stream/-/tar-stream-3.1.7.tgz",
|
||||
"integrity": "sha512-qJj60CXt7IU1Ffyc3NJMjh6EkuCFej46zUqJ4J7pqYlThyd9bO0XBTmcOIhSzZJVWfsLks0+nle/j538YAW9RQ==",
|
||||
"dependencies": {
|
||||
"b4a": "^1.6.4",
|
||||
"fast-fifo": "^1.2.0",
|
||||
"streamx": "^2.15.0"
|
||||
}
|
||||
},
|
||||
"node_modules/tr46": {
|
||||
|
@ -1424,9 +1738,9 @@
|
|||
"integrity": "sha512-isyNax3wXoKaulPDZWHQqbmIx1k2tb9fb3GGDBRxCscfYV2Ch7WxPArBsFEG8s/safwXTT7H4QGhaIkTp9447w=="
|
||||
},
|
||||
"node_modules/unzip-stream": {
|
||||
"version": "0.3.1",
|
||||
"resolved": "https://registry.npmjs.org/unzip-stream/-/unzip-stream-0.3.1.tgz",
|
||||
"integrity": "sha512-RzaGXLNt+CW+T41h1zl6pGz3EaeVhYlK+rdAap+7DxW5kqsqePO8kRtWPaCiVqdhZc86EctSPVYNix30YOMzmw==",
|
||||
"version": "0.3.4",
|
||||
"resolved": "https://registry.npmjs.org/unzip-stream/-/unzip-stream-0.3.4.tgz",
|
||||
"integrity": "sha512-PyofABPVv+d7fL7GOpusx7eRT9YETY2X04PhwbSipdj6bMxVCFJrr+nm0Mxqbf9hUiTin/UsnuFWBXlDZFy0Cw==",
|
||||
"dependencies": {
|
||||
"binary": "^0.3.0",
|
||||
"mkdirp": "^0.5.1"
|
||||
|
@ -1471,12 +1785,110 @@
|
|||
"webidl-conversions": "^3.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/which": {
|
||||
"version": "2.0.2",
|
||||
"resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz",
|
||||
"integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==",
|
||||
"dependencies": {
|
||||
"isexe": "^2.0.0"
|
||||
},
|
||||
"bin": {
|
||||
"node-which": "bin/node-which"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 8"
|
||||
}
|
||||
},
|
||||
"node_modules/wordwrap": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/wordwrap/-/wordwrap-1.0.0.tgz",
|
||||
"integrity": "sha512-gvVzJFlPycKc5dZN4yPkP8w7Dc37BtP1yczEneOb4uq34pXZcvrtRTmWV8W+Ume+XCxKgbjM+nevkyFPMybd4Q==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/wrap-ansi": {
|
||||
"version": "8.1.0",
|
||||
"resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-8.1.0.tgz",
|
||||
"integrity": "sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==",
|
||||
"dependencies": {
|
||||
"ansi-styles": "^6.1.0",
|
||||
"string-width": "^5.0.1",
|
||||
"strip-ansi": "^7.0.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/chalk/wrap-ansi?sponsor=1"
|
||||
}
|
||||
},
|
||||
"node_modules/wrap-ansi-cjs": {
|
||||
"name": "wrap-ansi",
|
||||
"version": "7.0.0",
|
||||
"resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz",
|
||||
"integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==",
|
||||
"dependencies": {
|
||||
"ansi-styles": "^4.0.0",
|
||||
"string-width": "^4.1.0",
|
||||
"strip-ansi": "^6.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/chalk/wrap-ansi?sponsor=1"
|
||||
}
|
||||
},
|
||||
"node_modules/wrap-ansi-cjs/node_modules/ansi-regex": {
|
||||
"version": "5.0.1",
|
||||
"resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz",
|
||||
"integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==",
|
||||
"engines": {
|
||||
"node": ">=8"
|
||||
}
|
||||
},
|
||||
"node_modules/wrap-ansi-cjs/node_modules/ansi-styles": {
|
||||
"version": "4.3.0",
|
||||
"resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz",
|
||||
"integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==",
|
||||
"dependencies": {
|
||||
"color-convert": "^2.0.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=8"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/chalk/ansi-styles?sponsor=1"
|
||||
}
|
||||
},
|
||||
"node_modules/wrap-ansi-cjs/node_modules/emoji-regex": {
|
||||
"version": "8.0.0",
|
||||
"resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz",
|
||||
"integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A=="
|
||||
},
|
||||
"node_modules/wrap-ansi-cjs/node_modules/string-width": {
|
||||
"version": "4.2.3",
|
||||
"resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz",
|
||||
"integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==",
|
||||
"dependencies": {
|
||||
"emoji-regex": "^8.0.0",
|
||||
"is-fullwidth-code-point": "^3.0.0",
|
||||
"strip-ansi": "^6.0.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=8"
|
||||
}
|
||||
},
|
||||
"node_modules/wrap-ansi-cjs/node_modules/strip-ansi": {
|
||||
"version": "6.0.1",
|
||||
"resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz",
|
||||
"integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==",
|
||||
"dependencies": {
|
||||
"ansi-regex": "^5.0.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=8"
|
||||
}
|
||||
},
|
||||
"node_modules/wrappy": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz",
|
||||
|
@ -1511,16 +1923,16 @@
|
|||
}
|
||||
},
|
||||
"node_modules/zip-stream": {
|
||||
"version": "4.1.0",
|
||||
"resolved": "https://registry.npmjs.org/zip-stream/-/zip-stream-4.1.0.tgz",
|
||||
"integrity": "sha512-zshzwQW7gG7hjpBlgeQP9RuyPGNxvJdzR8SUM3QhxCnLjWN2E7j3dOvpeDcQoETfHx0urRS7EtmVToql7YpU4A==",
|
||||
"version": "6.0.1",
|
||||
"resolved": "https://registry.npmjs.org/zip-stream/-/zip-stream-6.0.1.tgz",
|
||||
"integrity": "sha512-zK7YHHz4ZXpW89AHXUPbQVGKI7uvkd3hzusTdotCg1UxyaVtg0zFJSTfW/Dq5f7OBBVnq6cZIaC8Ti4hb6dtCA==",
|
||||
"dependencies": {
|
||||
"archiver-utils": "^2.1.0",
|
||||
"compress-commons": "^4.1.0",
|
||||
"readable-stream": "^3.6.0"
|
||||
"archiver-utils": "^5.0.0",
|
||||
"compress-commons": "^6.0.2",
|
||||
"readable-stream": "^4.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 10"
|
||||
"node": ">= 14"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@actions/artifact",
|
||||
"version": "2.1.0",
|
||||
"version": "2.1.9",
|
||||
"preview": true,
|
||||
"description": "Actions artifact lib",
|
||||
"keywords": [
|
||||
|
@ -49,7 +49,7 @@
|
|||
"@octokit/plugin-retry": "^3.0.9",
|
||||
"@octokit/request-error": "^5.0.0",
|
||||
"@protobuf-ts/plugin": "^2.2.3-alpha.1",
|
||||
"archiver": "^5.3.1",
|
||||
"archiver": "^7.0.1",
|
||||
"crypto": "^1.0.1",
|
||||
"jwt-decode": "^3.1.2",
|
||||
"twirp-ts": "^2.5.0",
|
||||
|
|
|
@ -102,7 +102,6 @@ class ArtifactHttpClient implements Rpc {
|
|||
} catch (error) {
|
||||
if (error instanceof SyntaxError) {
|
||||
debug(`Raw Body: ${rawBody}`)
|
||||
throw error
|
||||
}
|
||||
|
||||
if (error instanceof UsageError) {
|
||||
|
|
|
@ -27,7 +27,13 @@ export function isGhes(): boolean {
|
|||
const ghUrl = new URL(
|
||||
process.env['GITHUB_SERVER_URL'] || 'https://github.com'
|
||||
)
|
||||
return ghUrl.hostname.toUpperCase() !== 'GITHUB.COM'
|
||||
|
||||
const hostname = ghUrl.hostname.trimEnd().toUpperCase()
|
||||
const isGitHubHost = hostname === 'GITHUB.COM'
|
||||
const isGheHost = hostname.endsWith('.GHE.COM')
|
||||
const isLocalHost = hostname.endsWith('.LOCALHOST')
|
||||
|
||||
return !isGitHubHost && !isGheHost && !isLocalHost
|
||||
}
|
||||
|
||||
export function getGitHubWorkspaceDir(): string {
|
||||
|
@ -51,3 +57,7 @@ export function getConcurrency(): number {
|
|||
const concurrency = 16 * numCPUs
|
||||
return concurrency > 300 ? 300 : concurrency
|
||||
}
|
||||
|
||||
export function getUploadChunkTimeout(): number {
|
||||
return 300_000 // 5 minutes
|
||||
}
|
||||
|
|
|
@ -1,7 +1,11 @@
|
|||
import {BlobClient, BlockBlobUploadStreamOptions} from '@azure/storage-blob'
|
||||
import {TransferProgressEvent} from '@azure/core-http'
|
||||
import {ZipUploadStream} from './zip'
|
||||
import {getUploadChunkSize, getConcurrency} from '../shared/config'
|
||||
import {
|
||||
getUploadChunkSize,
|
||||
getConcurrency,
|
||||
getUploadChunkTimeout
|
||||
} from '../shared/config'
|
||||
import * as core from '@actions/core'
|
||||
import * as crypto from 'crypto'
|
||||
import * as stream from 'stream'
|
||||
|
@ -24,6 +28,22 @@ export async function uploadZipToBlobStorage(
|
|||
zipUploadStream: ZipUploadStream
|
||||
): Promise<BlobUploadResponse> {
|
||||
let uploadByteCount = 0
|
||||
let lastProgressTime = Date.now()
|
||||
const abortController = new AbortController()
|
||||
|
||||
const chunkTimer = async (interval: number): Promise<void> =>
|
||||
new Promise((resolve, reject) => {
|
||||
const timer = setInterval(() => {
|
||||
if (Date.now() - lastProgressTime > interval) {
|
||||
reject(new Error('Upload progress stalled.'))
|
||||
}
|
||||
}, interval)
|
||||
|
||||
abortController.signal.addEventListener('abort', () => {
|
||||
clearInterval(timer)
|
||||
resolve()
|
||||
})
|
||||
})
|
||||
|
||||
const maxConcurrency = getConcurrency()
|
||||
const bufferSize = getUploadChunkSize()
|
||||
|
@ -37,11 +57,13 @@ export async function uploadZipToBlobStorage(
|
|||
const uploadCallback = (progress: TransferProgressEvent): void => {
|
||||
core.info(`Uploaded bytes ${progress.loadedBytes}`)
|
||||
uploadByteCount = progress.loadedBytes
|
||||
lastProgressTime = Date.now()
|
||||
}
|
||||
|
||||
const options: BlockBlobUploadStreamOptions = {
|
||||
blobHTTPHeaders: {blobContentType: 'zip'},
|
||||
onProgress: uploadCallback
|
||||
onProgress: uploadCallback,
|
||||
abortSignal: abortController.signal
|
||||
}
|
||||
|
||||
let sha256Hash: string | undefined = undefined
|
||||
|
@ -54,18 +76,22 @@ export async function uploadZipToBlobStorage(
|
|||
core.info('Beginning upload of artifact content to blob storage')
|
||||
|
||||
try {
|
||||
await blockBlobClient.uploadStream(
|
||||
uploadStream,
|
||||
bufferSize,
|
||||
maxConcurrency,
|
||||
options
|
||||
)
|
||||
await Promise.race([
|
||||
blockBlobClient.uploadStream(
|
||||
uploadStream,
|
||||
bufferSize,
|
||||
maxConcurrency,
|
||||
options
|
||||
),
|
||||
chunkTimer(getUploadChunkTimeout())
|
||||
])
|
||||
} catch (error) {
|
||||
if (NetworkError.isNetworkErrorCode(error?.code)) {
|
||||
throw new NetworkError(error?.code)
|
||||
}
|
||||
|
||||
throw error
|
||||
} finally {
|
||||
abortController.abort()
|
||||
}
|
||||
|
||||
core.info('Finished uploading artifact content to blob storage!')
|
||||
|
@ -79,7 +105,6 @@ export async function uploadZipToBlobStorage(
|
|||
`No data was uploaded to blob storage. Reported upload byte count is 0.`
|
||||
)
|
||||
}
|
||||
|
||||
return {
|
||||
uploadSize: uploadByteCount,
|
||||
sha256Hash
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
import * as stream from 'stream'
|
||||
import * as archiver from 'archiver'
|
||||
import * as core from '@actions/core'
|
||||
import {createReadStream} from 'fs'
|
||||
import {UploadZipSpecification} from './upload-zip-specification'
|
||||
import {getUploadChunkSize} from '../shared/config'
|
||||
|
||||
|
@ -44,7 +43,7 @@ export async function createZipUploadStream(
|
|||
for (const file of uploadSpecification) {
|
||||
if (file.sourcePath !== null) {
|
||||
// Add a normal file to the zip
|
||||
zip.append(createReadStream(file.sourcePath), {
|
||||
zip.file(file.sourcePath, {
|
||||
name: file.destinationPath
|
||||
})
|
||||
} else {
|
||||
|
|
|
@ -0,0 +1,9 @@
|
|||
The MIT License (MIT)
|
||||
|
||||
Copyright 2024 GitHub
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
|
@ -0,0 +1,183 @@
|
|||
# `@actions/attest`
|
||||
|
||||
Functions for generating signed attestations for workflow artifacts.
|
||||
|
||||
Attestations bind some subject (a named artifact along with its digest) to a
|
||||
predicate (some assertion about that subject) using the [in-toto
|
||||
statement](https://github.com/in-toto/attestation/tree/main/spec/v1) format. A
|
||||
signature is generated for the attestation using a
|
||||
[Sigstore](https://www.sigstore.dev/)-issued signing certificate.
|
||||
|
||||
Once the attestation has been created and signed, it will be uploaded to the GH
|
||||
attestations API and associated with the repository from which the workflow was
|
||||
initiated.
|
||||
|
||||
See [Using artifact attestations to establish provenance for builds](https://docs.github.com/en/actions/security-guides/using-artifact-attestations-to-establish-provenance-for-builds)
|
||||
for more information on artifact attestations.
|
||||
|
||||
## Usage
|
||||
|
||||
### `attest`
|
||||
|
||||
The `attest` function takes the supplied subject/predicate pair and generates a
|
||||
signed attestation.
|
||||
|
||||
```js
|
||||
const { attest } = require('@actions/attest');
|
||||
const core = require('@actions/core');
|
||||
|
||||
async function run() {
|
||||
// In order to persist attestations to the repo, this should be a token with
|
||||
// repository write permissions.
|
||||
const ghToken = core.getInput('gh-token');
|
||||
|
||||
const attestation = await attest({
|
||||
subjectName: 'my-artifact-name',
|
||||
subjectDigest: { 'sha256': '36ab4667...'},
|
||||
predicateType: 'https://in-toto.io/attestation/release',
|
||||
predicate: { . . . },
|
||||
token: ghToken
|
||||
});
|
||||
|
||||
console.log(attestation);
|
||||
}
|
||||
|
||||
run();
|
||||
```
|
||||
|
||||
The `attest` function supports the following options:
|
||||
|
||||
```typescript
|
||||
export type AttestOptions = {
|
||||
// The name of the subject to be attested.
|
||||
subjectName: string
|
||||
// The digest of the subject to be attested. Should be a map of digest
|
||||
// algorithms to their hex-encoded values.
|
||||
subjectDigest: Record<string, string>
|
||||
// URI identifying the content type of the predicate being attested.
|
||||
predicateType: string
|
||||
// Predicate to be attested.
|
||||
predicate: object
|
||||
// GitHub token for writing attestations.
|
||||
token: string
|
||||
// Sigstore instance to use for signing. Must be one of "public-good" or
|
||||
// "github".
|
||||
sigstore?: 'public-good' | 'github'
|
||||
// HTTP headers to include in request to attestations API.
|
||||
headers?: {[header: string]: string | number | undefined}
|
||||
// Whether to skip writing the attestation to the GH attestations API.
|
||||
skipWrite?: boolean
|
||||
}
|
||||
```
|
||||
|
||||
### `attestProvenance`
|
||||
|
||||
The `attestProvenance` function accepts the name and digest of some artifact and
|
||||
generates a build provenance attestation over those values.
|
||||
|
||||
The attestation is formed by first generating a [SLSA provenance
|
||||
predicate](https://slsa.dev/spec/v1.0/provenance) populated with
|
||||
[metadata](https://github.com/slsa-framework/github-actions-buildtypes/tree/main/workflow/v1)
|
||||
pulled from the GitHub Actions run.
|
||||
|
||||
```js
|
||||
const { attestProvenance } = require('@actions/attest');
|
||||
const core = require('@actions/core');
|
||||
|
||||
async function run() {
|
||||
// In order to persist attestations to the repo, this should be a token with
|
||||
// repository write permissions.
|
||||
const ghToken = core.getInput('gh-token');
|
||||
|
||||
const attestation = await attestProvenance({
|
||||
subjectName: 'my-artifact-name',
|
||||
subjectDigest: { 'sha256': '36ab4667...'},
|
||||
token: ghToken
|
||||
});
|
||||
|
||||
console.log(attestation);
|
||||
}
|
||||
|
||||
run();
|
||||
```
|
||||
|
||||
The `attestProvenance` function supports the following options:
|
||||
|
||||
```typescript
|
||||
export type AttestProvenanceOptions = {
|
||||
// The name of the subject to be attested.
|
||||
subjectName: string
|
||||
// The digest of the subject to be attested. Should be a map of digest
|
||||
// algorithms to their hex-encoded values.
|
||||
subjectDigest: Record<string, string>
|
||||
// GitHub token for writing attestations.
|
||||
token: string
|
||||
// Sigstore instance to use for signing. Must be one of "public-good" or
|
||||
// "github".
|
||||
sigstore?: 'public-good' | 'github'
|
||||
// HTTP headers to include in request to attestations API.
|
||||
headers?: {[header: string]: string | number | undefined}
|
||||
// Whether to skip writing the attestation to the GH attestations API.
|
||||
skipWrite?: boolean
|
||||
// Issuer URL responsible for minting the OIDC token from which the
|
||||
// provenance data is read. Defaults to
|
||||
// 'https://token.actions.githubusercontent.com".
|
||||
issuer?: string
|
||||
}
|
||||
```
|
||||
|
||||
### `Attestation`
|
||||
|
||||
The `Attestation` returned by `attest`/`attestProvenance` has the following
|
||||
fields:
|
||||
|
||||
```typescript
|
||||
export type Attestation = {
|
||||
/*
|
||||
* JSON-serialized Sigstore bundle containing the provenance attestation,
|
||||
* signature, signing certificate and witnessed timestamp.
|
||||
*/
|
||||
bundle: SerializedBundle
|
||||
/*
|
||||
* PEM-encoded signing certificate used to sign the attestation.
|
||||
*/
|
||||
certificate: string
|
||||
/*
|
||||
* ID of Rekor transparency log entry created for the attestation (if
|
||||
* applicable).
|
||||
*/
|
||||
tlogID?: string
|
||||
/*
|
||||
* ID of the persisted attestation (accessible via the GH API).
|
||||
*/
|
||||
attestationID?: string
|
||||
}
|
||||
```
|
||||
|
||||
For details about the Sigstore bundle format, see the [Bundle protobuf
|
||||
specification](https://github.com/sigstore/protobuf-specs/blob/main/protos/sigstore_bundle.proto).
|
||||
|
||||
## Sigstore Instance
|
||||
|
||||
When generating the signed attestation there are two different Sigstore
|
||||
instances which can be used to issue the signing certificate. By default,
|
||||
workflows initiated from public repositories will use the Sigstore public-good
|
||||
instance and persist the attestation signature to the public [Rekor transparency
|
||||
log](https://docs.sigstore.dev/logging/overview/). Workflows initiated from
|
||||
private/internal repositories will use the GitHub-internal Sigstore instance
|
||||
which uses a signed timestamp issued by GitHub's timestamp authority in place of
|
||||
the public transparency log.
|
||||
|
||||
The default Sigstore instance selection can be overridden by passing an explicit
|
||||
value of either "public-good" or "github" for the `sigstore` option when calling
|
||||
either `attest` or `attestProvenance`.
|
||||
|
||||
## Storage
|
||||
|
||||
Attestations created by `attest`/`attestProvenance` will be uploaded to the GH
|
||||
attestations API and associated with the appropriate repository. Attestation
|
||||
storage is only supported for public repositories or repositories which belong
|
||||
to a GitHub Enterprise Cloud account.
|
||||
|
||||
In order to generate attestations for private, non-Enterprise repositories, the
|
||||
`skipWrite` option should be set to `true`.
|
|
@ -0,0 +1,37 @@
|
|||
# @actions/attest Releases
|
||||
|
||||
### 1.4.0
|
||||
|
||||
- Add new `headers` parameter to the `attest` and `attestProvenance` functions.
|
||||
- Update `buildSLSAProvenancePredicate`/`attestProvenance` to automatically derive default OIDC issuer URL from current execution context.
|
||||
|
||||
### 1.3.1
|
||||
|
||||
- Fix bug with proxy support when retrieving JWKS for OIDC issuer
|
||||
|
||||
### 1.3.0
|
||||
|
||||
- Dynamic construction of Sigstore API URLs
|
||||
- Switch to new GH provenance build type
|
||||
- Fetch existing Rekor entry on 409 conflict error
|
||||
- Bump @sigstore/bundle from 2.3.0 to 2.3.2
|
||||
- Bump @sigstore/sign from 2.3.0 to 2.3.2
|
||||
|
||||
### 1.2.1
|
||||
|
||||
- Retry request on attestation persistence failure
|
||||
|
||||
### 1.2.0
|
||||
|
||||
- Generate attestations using the v0.3 Sigstore bundle format.
|
||||
- Bump @sigstore/bundle from 2.2.0 to 2.3.0.
|
||||
- Bump @sigstore/sign from 2.2.3 to 2.3.0.
|
||||
- Remove dependency on make-fetch-happen
|
||||
|
||||
### 1.1.0
|
||||
|
||||
- Updates the `attestProvenance` function to retrieve a token from the GitHub OIDC provider and use the token claims to populate the provenance statement.
|
||||
|
||||
### 1.0.0
|
||||
|
||||
- Initial release
|
|
@ -0,0 +1,19 @@
|
|||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`buildIntotoStatement returns an intoto statement 1`] = `
|
||||
{
|
||||
"_type": "https://in-toto.io/Statement/v1",
|
||||
"predicate": {
|
||||
"key": "value",
|
||||
},
|
||||
"predicateType": "predicatey",
|
||||
"subject": [
|
||||
{
|
||||
"digest": {
|
||||
"sha256": "7d070f6b64d9bcc530fe99cc21eaaa4b3c364e0b2d367d7735671fa202a03b32",
|
||||
},
|
||||
"name": "subjecty",
|
||||
},
|
||||
],
|
||||
}
|
||||
`;
|
|
@ -0,0 +1,43 @@
|
|||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`provenance functions buildSLSAProvenancePredicate returns a provenance hydrated from an OIDC token 1`] = `
|
||||
{
|
||||
"params": {
|
||||
"buildDefinition": {
|
||||
"buildType": "https://actions.github.io/buildtypes/workflow/v1",
|
||||
"externalParameters": {
|
||||
"workflow": {
|
||||
"path": ".github/workflows/main.yml",
|
||||
"ref": "main",
|
||||
"repository": "https://foo.ghe.com/owner/repo",
|
||||
},
|
||||
},
|
||||
"internalParameters": {
|
||||
"github": {
|
||||
"event_name": "push",
|
||||
"repository_id": "repo-id",
|
||||
"repository_owner_id": "owner-id",
|
||||
"runner_environment": "github-hosted",
|
||||
},
|
||||
},
|
||||
"resolvedDependencies": [
|
||||
{
|
||||
"digest": {
|
||||
"gitCommit": "babca52ab0c93ae16539e5923cb0d7403b9a093b",
|
||||
},
|
||||
"uri": "git+https://foo.ghe.com/owner/repo@refs/heads/main",
|
||||
},
|
||||
],
|
||||
},
|
||||
"runDetails": {
|
||||
"builder": {
|
||||
"id": "https://foo.ghe.com/owner/workflows/.github/workflows/publish.yml@main",
|
||||
},
|
||||
"metadata": {
|
||||
"invocationId": "https://foo.ghe.com/owner/repo/actions/runs/run-id/attempts/run-attempt",
|
||||
},
|
||||
},
|
||||
},
|
||||
"type": "https://slsa.dev/provenance/v1",
|
||||
}
|
||||
`;
|
|
@ -0,0 +1,41 @@
|
|||
import {signingEndpoints} from '../src/endpoints'
|
||||
|
||||
describe('signingEndpoints', () => {
|
||||
const originalEnv = process.env
|
||||
|
||||
afterEach(() => {
|
||||
process.env = originalEnv
|
||||
})
|
||||
|
||||
describe('when using github.com', () => {
|
||||
beforeEach(async () => {
|
||||
process.env = {
|
||||
...originalEnv,
|
||||
GITHUB_SERVER_URL: 'https://github.com'
|
||||
}
|
||||
})
|
||||
|
||||
it('returns expected endpoints', async () => {
|
||||
const endpoints = signingEndpoints('github')
|
||||
|
||||
expect(endpoints.fulcioURL).toEqual('https://fulcio.githubapp.com')
|
||||
expect(endpoints.tsaServerURL).toEqual('https://timestamp.githubapp.com')
|
||||
})
|
||||
})
|
||||
|
||||
describe('when using custom domain', () => {
|
||||
beforeEach(async () => {
|
||||
process.env = {
|
||||
...originalEnv,
|
||||
GITHUB_SERVER_URL: 'https://foo.bar.com'
|
||||
}
|
||||
})
|
||||
|
||||
it('returns a expected endpoints', async () => {
|
||||
const endpoints = signingEndpoints('github')
|
||||
|
||||
expect(endpoints.fulcioURL).toEqual('https://fulcio.foo.bar.com')
|
||||
expect(endpoints.tsaServerURL).toEqual('https://timestamp.foo.bar.com')
|
||||
})
|
||||
})
|
||||
})
|
|
@ -0,0 +1,6 @@
|
|||
import {attest, attestProvenance} from '../src'
|
||||
|
||||
it('exports functions', () => {
|
||||
expect(attestProvenance).toBeInstanceOf(Function)
|
||||
expect(attest).toBeInstanceOf(Function)
|
||||
})
|
|
@ -0,0 +1,23 @@
|
|||
import {buildIntotoStatement} from '../src/intoto'
|
||||
import type {Predicate, Subject} from '../src/shared.types'
|
||||
|
||||
describe('buildIntotoStatement', () => {
|
||||
const subject: Subject = {
|
||||
name: 'subjecty',
|
||||
digest: {
|
||||
sha256: '7d070f6b64d9bcc530fe99cc21eaaa4b3c364e0b2d367d7735671fa202a03b32'
|
||||
}
|
||||
}
|
||||
|
||||
const predicate: Predicate = {
|
||||
type: 'predicatey',
|
||||
params: {
|
||||
key: 'value'
|
||||
}
|
||||
}
|
||||
|
||||
it('returns an intoto statement', () => {
|
||||
const statement = buildIntotoStatement(subject, predicate)
|
||||
expect(statement).toMatchSnapshot()
|
||||
})
|
||||
})
|
|
@ -0,0 +1,148 @@
|
|||
import * as jose from 'jose'
|
||||
import nock from 'nock'
|
||||
import {getIDTokenClaims} from '../src/oidc'
|
||||
|
||||
describe('getIDTokenClaims', () => {
|
||||
const originalEnv = process.env
|
||||
const issuer = 'https://example.com'
|
||||
const audience = 'nobody'
|
||||
const requestToken = 'token'
|
||||
const openidConfigPath = '/.well-known/openid-configuration'
|
||||
const jwksPath = '/.well-known/jwks.json'
|
||||
const tokenPath = '/token'
|
||||
const openIDConfig = {jwks_uri: `${issuer}${jwksPath}`}
|
||||
|
||||
/* eslint-disable-next-line @typescript-eslint/no-explicit-any */
|
||||
let key: any
|
||||
|
||||
beforeEach(async () => {
|
||||
process.env = {
|
||||
...originalEnv,
|
||||
ACTIONS_ID_TOKEN_REQUEST_URL: `${issuer}${tokenPath}?`,
|
||||
ACTIONS_ID_TOKEN_REQUEST_TOKEN: requestToken
|
||||
}
|
||||
|
||||
// Generate JWT signing key
|
||||
key = await jose.generateKeyPair('PS256')
|
||||
|
||||
// Create JWK and JWKS
|
||||
const jwk = await jose.exportJWK(key.publicKey)
|
||||
const jwks = {keys: [jwk]}
|
||||
|
||||
nock(issuer).get(openidConfigPath).reply(200, openIDConfig)
|
||||
nock(issuer).get(jwksPath).reply(200, jwks)
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
process.env = originalEnv
|
||||
})
|
||||
|
||||
describe('when ID token is valid', () => {
|
||||
const claims = {
|
||||
iss: issuer,
|
||||
aud: audience,
|
||||
ref: 'ref',
|
||||
sha: 'sha',
|
||||
repository: 'repo',
|
||||
event_name: 'push',
|
||||
job_workflow_ref: 'job_workflow_ref',
|
||||
workflow_ref: 'workflow',
|
||||
repository_id: '1',
|
||||
repository_owner_id: '1',
|
||||
runner_environment: 'github-hosted',
|
||||
run_id: '1',
|
||||
run_attempt: '1'
|
||||
}
|
||||
|
||||
beforeEach(async () => {
|
||||
const jwt = await new jose.SignJWT(claims)
|
||||
.setProtectedHeader({alg: 'PS256'})
|
||||
.sign(key.privateKey)
|
||||
|
||||
nock(issuer).get(tokenPath).query({audience}).reply(200, {value: jwt})
|
||||
})
|
||||
|
||||
it('returns the ID token claims', async () => {
|
||||
const result = await getIDTokenClaims(issuer)
|
||||
expect(result).toEqual(claims)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when ID token is missing required claims', () => {
|
||||
const claims = {
|
||||
iss: issuer,
|
||||
aud: audience
|
||||
}
|
||||
|
||||
beforeEach(async () => {
|
||||
const jwt = await new jose.SignJWT(claims)
|
||||
.setProtectedHeader({alg: 'PS256'})
|
||||
.sign(key.privateKey)
|
||||
|
||||
nock(issuer).get(tokenPath).query({audience}).reply(200, {value: jwt})
|
||||
})
|
||||
|
||||
it('throws an error', async () => {
|
||||
await expect(getIDTokenClaims(issuer)).rejects.toThrow(/missing claims/i)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when ID has the wrong issuer', () => {
|
||||
const claims = {foo: 'bar', iss: 'foo', aud: 'nobody'}
|
||||
|
||||
beforeEach(async () => {
|
||||
const jwt = await new jose.SignJWT(claims)
|
||||
.setProtectedHeader({alg: 'PS256'})
|
||||
.sign(key.privateKey)
|
||||
|
||||
nock(issuer).get(tokenPath).query({audience}).reply(200, {value: jwt})
|
||||
})
|
||||
|
||||
it('throws an error', async () => {
|
||||
await expect(getIDTokenClaims(issuer)).rejects.toThrow(/unexpected "iss"/)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when ID has the wrong audience', () => {
|
||||
const claims = {foo: 'bar', iss: issuer, aud: 'bar'}
|
||||
|
||||
beforeEach(async () => {
|
||||
const jwt = await new jose.SignJWT(claims)
|
||||
.setProtectedHeader({alg: 'PS256'})
|
||||
.sign(key.privateKey)
|
||||
|
||||
nock(issuer).get(tokenPath).query({audience}).reply(200, {value: jwt})
|
||||
})
|
||||
|
||||
it('throw an error', async () => {
|
||||
await expect(getIDTokenClaims(issuer)).rejects.toThrow(/unexpected "aud"/)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when openid config cannot be retrieved', () => {
|
||||
const claims = {foo: 'bar', iss: issuer, aud: 'nobody'}
|
||||
|
||||
beforeEach(async () => {
|
||||
const jwt = await new jose.SignJWT(claims)
|
||||
.setProtectedHeader({alg: 'PS256'})
|
||||
.sign(key.privateKey)
|
||||
|
||||
nock(issuer).get(tokenPath).query({audience}).reply(200, {value: jwt})
|
||||
|
||||
// Disable the openid config endpoint
|
||||
nock.removeInterceptor({
|
||||
proto: 'https',
|
||||
hostname: 'example.com',
|
||||
port: '443',
|
||||
method: 'GET',
|
||||
path: openidConfigPath
|
||||
})
|
||||
})
|
||||
|
||||
it('throws an error', async () => {
|
||||
await expect(getIDTokenClaims(issuer)).rejects.toThrow(
|
||||
/failed to get id/i
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
|
@ -0,0 +1,248 @@
|
|||
import * as github from '@actions/github'
|
||||
import {mockFulcio, mockRekor, mockTSA} from '@sigstore/mock'
|
||||
import * as jose from 'jose'
|
||||
import nock from 'nock'
|
||||
import {MockAgent, setGlobalDispatcher} from 'undici'
|
||||
import {SIGSTORE_PUBLIC_GOOD, signingEndpoints} from '../src/endpoints'
|
||||
import {attestProvenance, buildSLSAProvenancePredicate} from '../src/provenance'
|
||||
|
||||
describe('provenance functions', () => {
|
||||
const originalEnv = process.env
|
||||
const issuer = 'https://token.actions.foo.ghe.com'
|
||||
const audience = 'nobody'
|
||||
const jwksPath = '/.well-known/jwks.json'
|
||||
const tokenPath = '/token'
|
||||
|
||||
// MockAgent for mocking @actions/github
|
||||
const mockAgent = new MockAgent()
|
||||
setGlobalDispatcher(mockAgent)
|
||||
|
||||
const claims = {
|
||||
iss: issuer,
|
||||
aud: 'nobody',
|
||||
repository: 'owner/repo',
|
||||
ref: 'refs/heads/main',
|
||||
sha: 'babca52ab0c93ae16539e5923cb0d7403b9a093b',
|
||||
job_workflow_ref: 'owner/workflows/.github/workflows/publish.yml@main',
|
||||
workflow_ref: 'owner/repo/.github/workflows/main.yml@main',
|
||||
event_name: 'push',
|
||||
repository_id: 'repo-id',
|
||||
repository_owner_id: 'owner-id',
|
||||
run_id: 'run-id',
|
||||
run_attempt: 'run-attempt',
|
||||
runner_environment: 'github-hosted'
|
||||
}
|
||||
|
||||
beforeEach(async () => {
|
||||
process.env = {
|
||||
...originalEnv,
|
||||
ACTIONS_ID_TOKEN_REQUEST_URL: `${issuer}${tokenPath}?`,
|
||||
ACTIONS_ID_TOKEN_REQUEST_TOKEN: 'token',
|
||||
GITHUB_SERVER_URL: 'https://foo.ghe.com',
|
||||
GITHUB_REPOSITORY: claims.repository
|
||||
}
|
||||
|
||||
// Generate JWT signing key
|
||||
const key = await jose.generateKeyPair('PS256')
|
||||
|
||||
// Create JWK, JWKS, and JWT
|
||||
const jwk = await jose.exportJWK(key.publicKey)
|
||||
const jwks = {keys: [jwk]}
|
||||
const jwt = await new jose.SignJWT(claims)
|
||||
.setProtectedHeader({alg: 'PS256'})
|
||||
.sign(key.privateKey)
|
||||
|
||||
// Mock OpenID configuration and JWKS endpoints
|
||||
nock(issuer)
|
||||
.get('/.well-known/openid-configuration')
|
||||
.reply(200, {jwks_uri: `${issuer}${jwksPath}`})
|
||||
nock(issuer).get(jwksPath).reply(200, jwks)
|
||||
|
||||
// Mock OIDC token endpoint for populating the provenance
|
||||
nock(issuer).get(tokenPath).query({audience}).reply(200, {value: jwt})
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
process.env = originalEnv
|
||||
})
|
||||
|
||||
describe('buildSLSAProvenancePredicate', () => {
|
||||
it('returns a provenance hydrated from an OIDC token', async () => {
|
||||
const predicate = await buildSLSAProvenancePredicate()
|
||||
expect(predicate).toMatchSnapshot()
|
||||
})
|
||||
})
|
||||
|
||||
describe('attestProvenance', () => {
|
||||
// Subject to attest
|
||||
const subjectName = 'subjective'
|
||||
const subjectDigest = {
|
||||
sha256: '7d070f6b64d9bcc530fe99cc21eaaa4b3c364e0b2d367d7735671fa202a03b32'
|
||||
}
|
||||
|
||||
// Fake an OIDC token
|
||||
const oidcPayload = {sub: 'foo@bar.com', iss: ''}
|
||||
const oidcToken = `.${Buffer.from(JSON.stringify(oidcPayload)).toString(
|
||||
'base64'
|
||||
)}.}`
|
||||
|
||||
const attestationID = '1234567890'
|
||||
|
||||
beforeEach(async () => {
|
||||
nock(issuer)
|
||||
.get(tokenPath)
|
||||
.query({audience: 'sigstore'})
|
||||
.reply(200, {value: oidcToken})
|
||||
})
|
||||
|
||||
describe('when using the github Sigstore instance', () => {
|
||||
beforeEach(async () => {
|
||||
const {fulcioURL, tsaServerURL} = signingEndpoints('github')
|
||||
|
||||
// Mock Sigstore
|
||||
await mockFulcio({baseURL: fulcioURL, strict: false})
|
||||
await mockTSA({baseURL: tsaServerURL})
|
||||
|
||||
mockAgent
|
||||
.get('https://api.github.com')
|
||||
.intercept({
|
||||
path: /^\/repos\/.*\/.*\/attestations$/,
|
||||
method: 'post'
|
||||
})
|
||||
.reply(201, {id: attestationID})
|
||||
})
|
||||
|
||||
describe('when the sigstore instance is explicitly set', () => {
|
||||
it('attests provenance', async () => {
|
||||
const attestation = await attestProvenance({
|
||||
subjectName,
|
||||
subjectDigest,
|
||||
token: 'token',
|
||||
sigstore: 'github'
|
||||
})
|
||||
|
||||
expect(attestation).toBeDefined()
|
||||
expect(attestation.bundle).toBeDefined()
|
||||
expect(attestation.certificate).toMatch(/-----BEGIN CERTIFICATE-----/)
|
||||
expect(attestation.tlogID).toBeUndefined()
|
||||
expect(attestation.attestationID).toBe(attestationID)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the sigstore instance is inferred from the repo visibility', () => {
|
||||
const savedRepository = github.context.payload.repository
|
||||
|
||||
beforeEach(() => {
|
||||
/* eslint-disable-next-line @typescript-eslint/no-explicit-any */
|
||||
github.context.payload.repository = {visibility: 'private'} as any
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
github.context.payload.repository = savedRepository
|
||||
})
|
||||
|
||||
it('attests provenance', async () => {
|
||||
const attestation = await attestProvenance({
|
||||
subjectName,
|
||||
subjectDigest,
|
||||
token: 'token'
|
||||
})
|
||||
|
||||
expect(attestation).toBeDefined()
|
||||
expect(attestation.bundle).toBeDefined()
|
||||
expect(attestation.certificate).toMatch(/-----BEGIN CERTIFICATE-----/)
|
||||
expect(attestation.tlogID).toBeUndefined()
|
||||
expect(attestation.attestationID).toBe(attestationID)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('when using the public-good Sigstore instance', () => {
|
||||
const {fulcioURL, rekorURL} = SIGSTORE_PUBLIC_GOOD
|
||||
|
||||
beforeEach(async () => {
|
||||
// Mock Sigstore
|
||||
await mockFulcio({baseURL: fulcioURL, strict: false})
|
||||
await mockRekor({baseURL: rekorURL})
|
||||
|
||||
// Mock GH attestations API
|
||||
mockAgent
|
||||
.get('https://api.github.com')
|
||||
.intercept({
|
||||
path: /^\/repos\/.*\/.*\/attestations$/,
|
||||
method: 'post'
|
||||
})
|
||||
.reply(201, {id: attestationID})
|
||||
})
|
||||
|
||||
describe('when the sigstore instance is explicitly set', () => {
|
||||
it('attests provenance', async () => {
|
||||
const attestation = await attestProvenance({
|
||||
subjectName,
|
||||
subjectDigest,
|
||||
token: 'token',
|
||||
sigstore: 'public-good'
|
||||
})
|
||||
|
||||
expect(attestation).toBeDefined()
|
||||
expect(attestation.bundle).toBeDefined()
|
||||
expect(attestation.certificate).toMatch(/-----BEGIN CERTIFICATE-----/)
|
||||
expect(attestation.tlogID).toBeDefined()
|
||||
expect(attestation.attestationID).toBe(attestationID)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the sigstore instance is inferred from the repo visibility', () => {
|
||||
const savedRepository = github.context.payload.repository
|
||||
|
||||
beforeEach(() => {
|
||||
/* eslint-disable-next-line @typescript-eslint/no-explicit-any */
|
||||
github.context.payload.repository = {visibility: 'public'} as any
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
github.context.payload.repository = savedRepository
|
||||
})
|
||||
|
||||
it('attests provenance', async () => {
|
||||
const attestation = await attestProvenance({
|
||||
subjectName,
|
||||
subjectDigest,
|
||||
token: 'token'
|
||||
})
|
||||
|
||||
expect(attestation).toBeDefined()
|
||||
expect(attestation.bundle).toBeDefined()
|
||||
expect(attestation.certificate).toMatch(/-----BEGIN CERTIFICATE-----/)
|
||||
expect(attestation.tlogID).toBeDefined()
|
||||
expect(attestation.attestationID).toBe(attestationID)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('when skipWrite is set to true', () => {
|
||||
const {fulcioURL, rekorURL} = SIGSTORE_PUBLIC_GOOD
|
||||
beforeEach(async () => {
|
||||
// Mock Sigstore
|
||||
await mockFulcio({baseURL: fulcioURL, strict: false})
|
||||
await mockRekor({baseURL: rekorURL})
|
||||
})
|
||||
|
||||
it('attests provenance', async () => {
|
||||
const attestation = await attestProvenance({
|
||||
subjectName,
|
||||
subjectDigest,
|
||||
token: 'token',
|
||||
sigstore: 'public-good',
|
||||
skipWrite: true
|
||||
})
|
||||
|
||||
expect(attestation).toBeDefined()
|
||||
expect(attestation.bundle).toBeDefined()
|
||||
expect(attestation.certificate).toMatch(/-----BEGIN CERTIFICATE-----/)
|
||||
expect(attestation.tlogID).toBeDefined()
|
||||
expect(attestation.attestationID).toBeUndefined()
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
|
@ -0,0 +1,101 @@
|
|||
import {mockFulcio, mockRekor, mockTSA} from '@sigstore/mock'
|
||||
import nock from 'nock'
|
||||
import {Payload, signPayload} from '../src/sign'
|
||||
|
||||
describe('signProvenance', () => {
|
||||
const originalEnv = process.env
|
||||
|
||||
// Fake an OIDC token
|
||||
const subject = 'foo@bar.com'
|
||||
const oidcPayload = {sub: subject, iss: ''}
|
||||
const oidcToken = `.${Buffer.from(JSON.stringify(oidcPayload)).toString(
|
||||
'base64'
|
||||
)}.}`
|
||||
|
||||
// Dummy provenance to be signed
|
||||
const provenance = {
|
||||
_type: 'https://in-toto.io/Statement/v1',
|
||||
subject: {
|
||||
name: 'subjective',
|
||||
digest: {
|
||||
sha256:
|
||||
'7d070f6b64d9bcc530fe99cc21eaaa4b3c364e0b2d367d7735671fa202a03b32'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const payload: Payload = {
|
||||
body: Buffer.from(JSON.stringify(provenance)),
|
||||
type: 'application/vnd.in-toto+json'
|
||||
}
|
||||
|
||||
const fulcioURL = 'https://fulcio.url'
|
||||
const rekorURL = 'https://rekor.url'
|
||||
const tsaServerURL = 'https://tsa.url'
|
||||
|
||||
beforeEach(() => {
|
||||
// Mock OIDC token endpoint
|
||||
const tokenURL = 'https://token.url'
|
||||
|
||||
process.env = {
|
||||
...originalEnv,
|
||||
ACTIONS_ID_TOKEN_REQUEST_URL: tokenURL,
|
||||
ACTIONS_ID_TOKEN_REQUEST_TOKEN: 'token'
|
||||
}
|
||||
|
||||
nock(tokenURL)
|
||||
.get('/')
|
||||
.query({audience: 'sigstore'})
|
||||
.reply(200, {value: oidcToken})
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
process.env = originalEnv
|
||||
})
|
||||
|
||||
describe('when visibility is public', () => {
|
||||
beforeEach(async () => {
|
||||
await mockFulcio({baseURL: fulcioURL, strict: false})
|
||||
await mockRekor({baseURL: rekorURL})
|
||||
})
|
||||
|
||||
it('returns a bundle', async () => {
|
||||
const att = await signPayload(payload, {fulcioURL, rekorURL})
|
||||
|
||||
expect(att).toBeDefined()
|
||||
expect(att.mediaType).toEqual(
|
||||
'application/vnd.dev.sigstore.bundle.v0.3+json'
|
||||
)
|
||||
|
||||
expect(att.content.$case).toEqual('dsseEnvelope')
|
||||
expect(att.verificationMaterial.content.$case).toEqual('certificate')
|
||||
expect(att.verificationMaterial.tlogEntries).toHaveLength(1)
|
||||
expect(
|
||||
att.verificationMaterial.timestampVerificationData?.rfc3161Timestamps
|
||||
).toHaveLength(0)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when visibility is private', () => {
|
||||
beforeEach(async () => {
|
||||
await mockFulcio({baseURL: fulcioURL, strict: false})
|
||||
await mockTSA({baseURL: tsaServerURL})
|
||||
})
|
||||
|
||||
it('returns a bundle', async () => {
|
||||
const att = await signPayload(payload, {fulcioURL, tsaServerURL})
|
||||
|
||||
expect(att).toBeDefined()
|
||||
expect(att.mediaType).toEqual(
|
||||
'application/vnd.dev.sigstore.bundle.v0.3+json'
|
||||
)
|
||||
|
||||
expect(att.content.$case).toEqual('dsseEnvelope')
|
||||
expect(att.verificationMaterial.content.$case).toEqual('certificate')
|
||||
expect(att.verificationMaterial.tlogEntries).toHaveLength(0)
|
||||
expect(
|
||||
att.verificationMaterial.timestampVerificationData?.rfc3161Timestamps
|
||||
).toHaveLength(1)
|
||||
})
|
||||
})
|
||||
})
|
|
@ -0,0 +1,93 @@
|
|||
import {MockAgent, setGlobalDispatcher} from 'undici'
|
||||
import {writeAttestation} from '../src/store'
|
||||
|
||||
describe('writeAttestation', () => {
|
||||
const originalEnv = process.env
|
||||
const attestation = {foo: 'bar '}
|
||||
const token = 'token'
|
||||
const headers = {'X-GitHub-Foo': 'true'}
|
||||
|
||||
const mockAgent = new MockAgent()
|
||||
setGlobalDispatcher(mockAgent)
|
||||
|
||||
beforeEach(() => {
|
||||
process.env = {
|
||||
...originalEnv,
|
||||
GITHUB_REPOSITORY: 'foo/bar'
|
||||
}
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
process.env = originalEnv
|
||||
})
|
||||
|
||||
describe('when the api call is successful', () => {
|
||||
beforeEach(() => {
|
||||
mockAgent
|
||||
.get('https://api.github.com')
|
||||
.intercept({
|
||||
path: '/repos/foo/bar/attestations',
|
||||
method: 'POST',
|
||||
headers: {authorization: `token ${token}`, ...headers},
|
||||
body: JSON.stringify({bundle: attestation})
|
||||
})
|
||||
.reply(201, {id: '123'})
|
||||
})
|
||||
|
||||
it('persists the attestation', async () => {
|
||||
await expect(
|
||||
writeAttestation(attestation, token, {headers})
|
||||
).resolves.toEqual('123')
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the api call fails', () => {
|
||||
beforeEach(() => {
|
||||
mockAgent
|
||||
.get('https://api.github.com')
|
||||
.intercept({
|
||||
path: '/repos/foo/bar/attestations',
|
||||
method: 'POST',
|
||||
headers: {authorization: `token ${token}`},
|
||||
body: JSON.stringify({bundle: attestation})
|
||||
})
|
||||
.reply(500, 'oops')
|
||||
})
|
||||
|
||||
it('throws an error', async () => {
|
||||
await expect(
|
||||
writeAttestation(attestation, token, {retry: 0})
|
||||
).rejects.toThrow(/oops/)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the api call fails but succeeds on retry', () => {
|
||||
beforeEach(() => {
|
||||
const pool = mockAgent.get('https://api.github.com')
|
||||
|
||||
pool
|
||||
.intercept({
|
||||
path: '/repos/foo/bar/attestations',
|
||||
method: 'POST',
|
||||
headers: {authorization: `token ${token}`},
|
||||
body: JSON.stringify({bundle: attestation})
|
||||
})
|
||||
.reply(500, 'oops')
|
||||
.times(1)
|
||||
|
||||
pool
|
||||
.intercept({
|
||||
path: '/repos/foo/bar/attestations',
|
||||
method: 'POST',
|
||||
headers: {authorization: `token ${token}`},
|
||||
body: JSON.stringify({bundle: attestation})
|
||||
})
|
||||
.reply(201, {id: '123'})
|
||||
.times(1)
|
||||
})
|
||||
|
||||
it('persists the attestation', async () => {
|
||||
await expect(writeAttestation(attestation, token)).resolves.toEqual('123')
|
||||
})
|
||||
})
|
||||
})
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,58 @@
|
|||
{
|
||||
"name": "@actions/attest",
|
||||
"version": "1.4.0",
|
||||
"description": "Actions attestation lib",
|
||||
"keywords": [
|
||||
"github",
|
||||
"actions",
|
||||
"attestation"
|
||||
],
|
||||
"homepage": "https://github.com/actions/toolkit/tree/main/packages/attest",
|
||||
"license": "MIT",
|
||||
"main": "lib/index.js",
|
||||
"types": "lib/index.d.ts",
|
||||
"directories": {
|
||||
"lib": "lib",
|
||||
"test": "__tests__"
|
||||
},
|
||||
"files": [
|
||||
"lib"
|
||||
],
|
||||
"publishConfig": {
|
||||
"access": "public",
|
||||
"provenance": true
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/actions/toolkit.git",
|
||||
"directory": "packages/attest"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "echo \"Error: run tests from root\" && exit 1",
|
||||
"tsc": "tsc"
|
||||
},
|
||||
"bugs": {
|
||||
"url": "https://github.com/actions/toolkit/issues"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@sigstore/mock": "^0.7.4",
|
||||
"@sigstore/rekor-types": "^2.0.0",
|
||||
"@types/jsonwebtoken": "^9.0.6",
|
||||
"nock": "^13.5.1",
|
||||
"undici": "^5.28.4"
|
||||
},
|
||||
"dependencies": {
|
||||
"@actions/core": "^1.10.1",
|
||||
"@actions/github": "^6.0.0",
|
||||
"@actions/http-client": "^2.2.1",
|
||||
"@octokit/plugin-retry": "^6.0.1",
|
||||
"@sigstore/bundle": "^2.3.2",
|
||||
"@sigstore/sign": "^2.3.2",
|
||||
"jose": "^5.2.3"
|
||||
},
|
||||
"overrides": {
|
||||
"@octokit/plugin-retry": {
|
||||
"@octokit/core": "^5.2.0"
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,103 @@
|
|||
import {bundleToJSON} from '@sigstore/bundle'
|
||||
import {X509Certificate} from 'crypto'
|
||||
import {SigstoreInstance, signingEndpoints} from './endpoints'
|
||||
import {buildIntotoStatement} from './intoto'
|
||||
import {Payload, signPayload} from './sign'
|
||||
import {writeAttestation} from './store'
|
||||
|
||||
import type {Bundle} from '@sigstore/sign'
|
||||
import type {Attestation, Predicate, Subject} from './shared.types'
|
||||
|
||||
const INTOTO_PAYLOAD_TYPE = 'application/vnd.in-toto+json'
|
||||
|
||||
/**
|
||||
* Options for attesting a subject / predicate.
|
||||
*/
|
||||
export type AttestOptions = {
|
||||
// The name of the subject to be attested.
|
||||
subjectName: string
|
||||
// The digest of the subject to be attested. Should be a map of digest
|
||||
// algorithms to their hex-encoded values.
|
||||
subjectDigest: Record<string, string>
|
||||
// Content type of the predicate being attested.
|
||||
predicateType: string
|
||||
// Predicate to be attested.
|
||||
predicate: object
|
||||
// GitHub token for writing attestations.
|
||||
token: string
|
||||
// Sigstore instance to use for signing. Must be one of "public-good" or
|
||||
// "github".
|
||||
sigstore?: SigstoreInstance
|
||||
// HTTP headers to include in request to attestations API.
|
||||
headers?: {[header: string]: string | number | undefined}
|
||||
// Whether to skip writing the attestation to the GH attestations API.
|
||||
skipWrite?: boolean
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates an attestation for the given subject and predicate. The subject and
|
||||
* predicate are combined into an in-toto statement, which is then signed using
|
||||
* the identified Sigstore instance and stored as an attestation.
|
||||
* @param options - The options for attestation.
|
||||
* @returns A promise that resolves to the attestation.
|
||||
*/
|
||||
export async function attest(options: AttestOptions): Promise<Attestation> {
|
||||
const subject: Subject = {
|
||||
name: options.subjectName,
|
||||
digest: options.subjectDigest
|
||||
}
|
||||
const predicate: Predicate = {
|
||||
type: options.predicateType,
|
||||
params: options.predicate
|
||||
}
|
||||
const statement = buildIntotoStatement(subject, predicate)
|
||||
|
||||
// Sign the provenance statement
|
||||
const payload: Payload = {
|
||||
body: Buffer.from(JSON.stringify(statement)),
|
||||
type: INTOTO_PAYLOAD_TYPE
|
||||
}
|
||||
const endpoints = signingEndpoints(options.sigstore)
|
||||
const bundle = await signPayload(payload, endpoints)
|
||||
|
||||
// Store the attestation
|
||||
let attestationID: string | undefined
|
||||
if (options.skipWrite !== true) {
|
||||
attestationID = await writeAttestation(
|
||||
bundleToJSON(bundle),
|
||||
options.token,
|
||||
{headers: options.headers}
|
||||
)
|
||||
}
|
||||
|
||||
return toAttestation(bundle, attestationID)
|
||||
}
|
||||
|
||||
function toAttestation(bundle: Bundle, attestationID?: string): Attestation {
|
||||
let certBytes: Buffer
|
||||
switch (bundle.verificationMaterial.content.$case) {
|
||||
case 'x509CertificateChain':
|
||||
certBytes =
|
||||
bundle.verificationMaterial.content.x509CertificateChain.certificates[0]
|
||||
.rawBytes
|
||||
break
|
||||
case 'certificate':
|
||||
certBytes = bundle.verificationMaterial.content.certificate.rawBytes
|
||||
break
|
||||
default:
|
||||
throw new Error('Bundle must contain an x509 certificate')
|
||||
}
|
||||
|
||||
const signingCert = new X509Certificate(certBytes)
|
||||
|
||||
// Collect transparency log ID if available
|
||||
const tlogEntries = bundle.verificationMaterial.tlogEntries
|
||||
const tlogID = tlogEntries.length > 0 ? tlogEntries[0].logIndex : undefined
|
||||
|
||||
return {
|
||||
bundle: bundleToJSON(bundle),
|
||||
certificate: signingCert.toString(),
|
||||
tlogID,
|
||||
attestationID
|
||||
}
|
||||
}
|
|
@ -0,0 +1,55 @@
|
|||
import * as github from '@actions/github'
|
||||
|
||||
const PUBLIC_GOOD_ID = 'public-good'
|
||||
const GITHUB_ID = 'github'
|
||||
|
||||
const FULCIO_PUBLIC_GOOD_URL = 'https://fulcio.sigstore.dev'
|
||||
const REKOR_PUBLIC_GOOD_URL = 'https://rekor.sigstore.dev'
|
||||
|
||||
export type SigstoreInstance = typeof PUBLIC_GOOD_ID | typeof GITHUB_ID
|
||||
|
||||
export type Endpoints = {
|
||||
fulcioURL: string
|
||||
rekorURL?: string
|
||||
tsaServerURL?: string
|
||||
}
|
||||
|
||||
export const SIGSTORE_PUBLIC_GOOD: Endpoints = {
|
||||
fulcioURL: FULCIO_PUBLIC_GOOD_URL,
|
||||
rekorURL: REKOR_PUBLIC_GOOD_URL
|
||||
}
|
||||
|
||||
export const signingEndpoints = (sigstore?: SigstoreInstance): Endpoints => {
|
||||
let instance: SigstoreInstance
|
||||
|
||||
// An explicitly set instance type takes precedence, but if not set, use the
|
||||
// repository's visibility to determine the instance type.
|
||||
if (sigstore && [PUBLIC_GOOD_ID, GITHUB_ID].includes(sigstore)) {
|
||||
instance = sigstore
|
||||
} else {
|
||||
instance =
|
||||
github.context.payload.repository?.visibility === 'public'
|
||||
? PUBLIC_GOOD_ID
|
||||
: GITHUB_ID
|
||||
}
|
||||
|
||||
switch (instance) {
|
||||
case PUBLIC_GOOD_ID:
|
||||
return SIGSTORE_PUBLIC_GOOD
|
||||
case GITHUB_ID:
|
||||
return buildGitHubEndpoints()
|
||||
}
|
||||
}
|
||||
|
||||
function buildGitHubEndpoints(): Endpoints {
|
||||
const serverURL = process.env.GITHUB_SERVER_URL || 'https://github.com'
|
||||
let host = new URL(serverURL).hostname
|
||||
|
||||
if (host === 'github.com') {
|
||||
host = 'githubapp.com'
|
||||
}
|
||||
return {
|
||||
fulcioURL: `https://fulcio.${host}`,
|
||||
tsaServerURL: `https://timestamp.${host}`
|
||||
}
|
||||
}
|
|
@ -0,0 +1,9 @@
|
|||
export {AttestOptions, attest} from './attest'
|
||||
export {
|
||||
AttestProvenanceOptions,
|
||||
attestProvenance,
|
||||
buildSLSAProvenancePredicate
|
||||
} from './provenance'
|
||||
|
||||
export type {SerializedBundle} from '@sigstore/bundle'
|
||||
export type {Attestation, Predicate, Subject} from './shared.types'
|
|
@ -0,0 +1,32 @@
|
|||
import {Predicate, Subject} from './shared.types'
|
||||
|
||||
const INTOTO_STATEMENT_V1_TYPE = 'https://in-toto.io/Statement/v1'
|
||||
|
||||
/**
|
||||
* An in-toto statement.
|
||||
* https://github.com/in-toto/attestation/blob/main/spec/v1/statement.md
|
||||
*/
|
||||
export type InTotoStatement = {
|
||||
_type: string
|
||||
subject: Subject[]
|
||||
predicateType: string
|
||||
predicate: object
|
||||
}
|
||||
|
||||
/**
|
||||
* Assembles the given subject and predicate into an in-toto statement.
|
||||
* @param subject - The subject of the statement.
|
||||
* @param predicate - The predicate of the statement.
|
||||
* @returns The constructed in-toto statement.
|
||||
*/
|
||||
export const buildIntotoStatement = (
|
||||
subject: Subject,
|
||||
predicate: Predicate
|
||||
): InTotoStatement => {
|
||||
return {
|
||||
_type: INTOTO_STATEMENT_V1_TYPE,
|
||||
subject: [subject],
|
||||
predicateType: predicate.type,
|
||||
predicate: predicate.params
|
||||
}
|
||||
}
|
|
@ -0,0 +1,108 @@
|
|||
import {getIDToken} from '@actions/core'
|
||||
import {HttpClient} from '@actions/http-client'
|
||||
import * as jose from 'jose'
|
||||
|
||||
const OIDC_AUDIENCE = 'nobody'
|
||||
|
||||
const VALID_SERVER_URLS = [
|
||||
'https://github.com',
|
||||
new RegExp('^https://[a-z0-9-]+\\.ghe\\.com$')
|
||||
] as const
|
||||
|
||||
const REQUIRED_CLAIMS = [
|
||||
'iss',
|
||||
'ref',
|
||||
'sha',
|
||||
'repository',
|
||||
'event_name',
|
||||
'job_workflow_ref',
|
||||
'workflow_ref',
|
||||
'repository_id',
|
||||
'repository_owner_id',
|
||||
'runner_environment',
|
||||
'run_id',
|
||||
'run_attempt'
|
||||
] as const
|
||||
|
||||
export type ClaimSet = {[K in (typeof REQUIRED_CLAIMS)[number]]: string}
|
||||
|
||||
type OIDCConfig = {
|
||||
jwks_uri: string
|
||||
}
|
||||
|
||||
export const getIDTokenClaims = async (issuer?: string): Promise<ClaimSet> => {
|
||||
issuer = issuer || getIssuer()
|
||||
try {
|
||||
const token = await getIDToken(OIDC_AUDIENCE)
|
||||
const claims = await decodeOIDCToken(token, issuer)
|
||||
assertClaimSet(claims)
|
||||
return claims
|
||||
} catch (error) {
|
||||
throw new Error(`Failed to get ID token: ${error.message}`)
|
||||
}
|
||||
}
|
||||
|
||||
const decodeOIDCToken = async (
|
||||
token: string,
|
||||
issuer: string
|
||||
): Promise<jose.JWTPayload> => {
|
||||
// Verify and decode token
|
||||
const jwks = jose.createLocalJWKSet(await getJWKS(issuer))
|
||||
const {payload} = await jose.jwtVerify(token, jwks, {
|
||||
audience: OIDC_AUDIENCE,
|
||||
issuer
|
||||
})
|
||||
|
||||
return payload
|
||||
}
|
||||
|
||||
const getJWKS = async (issuer: string): Promise<jose.JSONWebKeySet> => {
|
||||
const client = new HttpClient('@actions/attest')
|
||||
const config = await client.getJson<OIDCConfig>(
|
||||
`${issuer}/.well-known/openid-configuration`
|
||||
)
|
||||
|
||||
if (!config.result) {
|
||||
throw new Error('No OpenID configuration found')
|
||||
}
|
||||
|
||||
const jwks = await client.getJson<jose.JSONWebKeySet>(config.result.jwks_uri)
|
||||
|
||||
if (!jwks.result) {
|
||||
throw new Error('No JWKS found for issuer')
|
||||
}
|
||||
|
||||
return jwks.result
|
||||
}
|
||||
|
||||
function assertClaimSet(claims: jose.JWTPayload): asserts claims is ClaimSet {
|
||||
const missingClaims: string[] = []
|
||||
|
||||
for (const claim of REQUIRED_CLAIMS) {
|
||||
if (!(claim in claims)) {
|
||||
missingClaims.push(claim)
|
||||
}
|
||||
}
|
||||
|
||||
if (missingClaims.length > 0) {
|
||||
throw new Error(`Missing claims: ${missingClaims.join(', ')}`)
|
||||
}
|
||||
}
|
||||
|
||||
// Derive the current OIDC issuer based on the server URL
|
||||
function getIssuer(): string {
|
||||
const serverURL = process.env.GITHUB_SERVER_URL || 'https://github.com'
|
||||
|
||||
// Ensure the server URL is a valid GitHub server URL
|
||||
if (!VALID_SERVER_URLS.some(valid_url => serverURL.match(valid_url))) {
|
||||
throw new Error(`Invalid server URL: ${serverURL}`)
|
||||
}
|
||||
|
||||
let host = new URL(serverURL).hostname
|
||||
|
||||
if (host === 'github.com') {
|
||||
host = 'githubusercontent.com'
|
||||
}
|
||||
|
||||
return `https://token.actions.${host}`
|
||||
}
|
|
@ -0,0 +1,95 @@
|
|||
import {attest, AttestOptions} from './attest'
|
||||
import {getIDTokenClaims} from './oidc'
|
||||
import type {Attestation, Predicate} from './shared.types'
|
||||
|
||||
const SLSA_PREDICATE_V1_TYPE = 'https://slsa.dev/provenance/v1'
|
||||
const GITHUB_BUILD_TYPE = 'https://actions.github.io/buildtypes/workflow/v1'
|
||||
|
||||
export type AttestProvenanceOptions = Omit<
|
||||
AttestOptions,
|
||||
'predicate' | 'predicateType'
|
||||
> & {
|
||||
issuer?: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Builds an SLSA (Supply Chain Levels for Software Artifacts) provenance
|
||||
* predicate using the GitHub Actions Workflow build type.
|
||||
* https://slsa.dev/spec/v1.0/provenance
|
||||
* https://github.com/slsa-framework/github-actions-buildtypes/tree/main/workflow/v1
|
||||
* @param issuer - URL for the OIDC issuer. Defaults to the GitHub Actions token
|
||||
* issuer.
|
||||
* @returns The SLSA provenance predicate.
|
||||
*/
|
||||
export const buildSLSAProvenancePredicate = async (
|
||||
issuer?: string
|
||||
): Promise<Predicate> => {
|
||||
const serverURL = process.env.GITHUB_SERVER_URL
|
||||
const claims = await getIDTokenClaims(issuer)
|
||||
|
||||
// Split just the path and ref from the workflow string.
|
||||
// owner/repo/.github/workflows/main.yml@main =>
|
||||
// .github/workflows/main.yml, main
|
||||
const [workflowPath, workflowRef] = claims.workflow_ref
|
||||
.replace(`${claims.repository}/`, '')
|
||||
.split('@')
|
||||
|
||||
return {
|
||||
type: SLSA_PREDICATE_V1_TYPE,
|
||||
params: {
|
||||
buildDefinition: {
|
||||
buildType: GITHUB_BUILD_TYPE,
|
||||
externalParameters: {
|
||||
workflow: {
|
||||
ref: workflowRef,
|
||||
repository: `${serverURL}/${claims.repository}`,
|
||||
path: workflowPath
|
||||
}
|
||||
},
|
||||
internalParameters: {
|
||||
github: {
|
||||
event_name: claims.event_name,
|
||||
repository_id: claims.repository_id,
|
||||
repository_owner_id: claims.repository_owner_id,
|
||||
runner_environment: claims.runner_environment
|
||||
}
|
||||
},
|
||||
resolvedDependencies: [
|
||||
{
|
||||
uri: `git+${serverURL}/${claims.repository}@${claims.ref}`,
|
||||
digest: {
|
||||
gitCommit: claims.sha
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
runDetails: {
|
||||
builder: {
|
||||
id: `${serverURL}/${claims.job_workflow_ref}`
|
||||
},
|
||||
metadata: {
|
||||
invocationId: `${serverURL}/${claims.repository}/actions/runs/${claims.run_id}/attempts/${claims.run_attempt}`
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Attests the build provenance of the provided subject. Generates the SLSA
|
||||
* build provenance predicate, assembles it into an in-toto statement, and
|
||||
* attests it.
|
||||
*
|
||||
* @param options - The options for attesting the provenance.
|
||||
* @returns A promise that resolves to the attestation.
|
||||
*/
|
||||
export async function attestProvenance(
|
||||
options: AttestProvenanceOptions
|
||||
): Promise<Attestation> {
|
||||
const predicate = await buildSLSAProvenancePredicate(options.issuer)
|
||||
return attest({
|
||||
...options,
|
||||
predicateType: predicate.type,
|
||||
predicate: predicate.params
|
||||
})
|
||||
}
|
|
@ -0,0 +1,52 @@
|
|||
import type {SerializedBundle} from '@sigstore/bundle'
|
||||
|
||||
/*
|
||||
* The subject of an attestation.
|
||||
*/
|
||||
export type Subject = {
|
||||
/*
|
||||
* Name of the subject.
|
||||
*/
|
||||
name: string
|
||||
/*
|
||||
* Digests of the subject. Should be a map of digest algorithms to their hex-encoded values.
|
||||
*/
|
||||
digest: Record<string, string>
|
||||
}
|
||||
|
||||
/*
|
||||
* The predicate of an attestation.
|
||||
*/
|
||||
export type Predicate = {
|
||||
/*
|
||||
* URI identifying the content type of the predicate.
|
||||
*/
|
||||
type: string
|
||||
/*
|
||||
* Predicate parameters.
|
||||
*/
|
||||
params: object
|
||||
}
|
||||
|
||||
/*
|
||||
* Artifact attestation.
|
||||
*/
|
||||
export type Attestation = {
|
||||
/*
|
||||
* Serialized Sigstore bundle containing the provenance attestation,
|
||||
* signature, signing certificate and witnessed timestamp.
|
||||
*/
|
||||
bundle: SerializedBundle
|
||||
/*
|
||||
* PEM-encoded signing certificate used to sign the attestation.
|
||||
*/
|
||||
certificate: string
|
||||
/*
|
||||
* ID of Rekor transparency log entry created for the attestation.
|
||||
*/
|
||||
tlogID?: string
|
||||
/*
|
||||
* ID of the persisted attestation (accessible via the GH API).
|
||||
*/
|
||||
attestationID?: string
|
||||
}
|
|
@ -0,0 +1,110 @@
|
|||
import {
|
||||
Bundle,
|
||||
BundleBuilder,
|
||||
CIContextProvider,
|
||||
DSSEBundleBuilder,
|
||||
FulcioSigner,
|
||||
RekorWitness,
|
||||
TSAWitness,
|
||||
Witness
|
||||
} from '@sigstore/sign'
|
||||
|
||||
const OIDC_AUDIENCE = 'sigstore'
|
||||
const DEFAULT_TIMEOUT = 10000
|
||||
const DEFAULT_RETRIES = 3
|
||||
|
||||
/**
|
||||
* The payload to be signed (body) and its media type (type).
|
||||
*/
|
||||
export type Payload = {
|
||||
body: Buffer
|
||||
type: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Options for signing a document.
|
||||
*/
|
||||
export type SignOptions = {
|
||||
/**
|
||||
* The URL of the Fulcio service.
|
||||
*/
|
||||
fulcioURL: string
|
||||
/**
|
||||
* The URL of the Rekor service.
|
||||
*/
|
||||
rekorURL?: string
|
||||
/**
|
||||
* The URL of the TSA (Time Stamping Authority) server.
|
||||
*/
|
||||
tsaServerURL?: string
|
||||
/**
|
||||
* The timeout duration in milliseconds when communicating with Sigstore
|
||||
* services.
|
||||
*/
|
||||
timeout?: number
|
||||
/**
|
||||
* The number of retry attempts.
|
||||
*/
|
||||
retry?: number
|
||||
}
|
||||
|
||||
/**
|
||||
* Signs the provided payload with a Sigstore-issued certificate and returns the
|
||||
* signature bundle.
|
||||
* @param payload Payload to be signed.
|
||||
* @param options Signing options.
|
||||
* @returns A promise that resolves to the Sigstore signature bundle.
|
||||
*/
|
||||
export const signPayload = async (
|
||||
payload: Payload,
|
||||
options: SignOptions
|
||||
): Promise<Bundle> => {
|
||||
const artifact = {
|
||||
data: payload.body,
|
||||
type: payload.type
|
||||
}
|
||||
|
||||
// Sign the artifact and build the bundle
|
||||
return initBundleBuilder(options).create(artifact)
|
||||
}
|
||||
|
||||
// Assembles the Sigstore bundle builder with the appropriate options
|
||||
const initBundleBuilder = (opts: SignOptions): BundleBuilder => {
|
||||
const identityProvider = new CIContextProvider(OIDC_AUDIENCE)
|
||||
const timeout = opts.timeout || DEFAULT_TIMEOUT
|
||||
const retry = opts.retry || DEFAULT_RETRIES
|
||||
const witnesses: Witness[] = []
|
||||
|
||||
const signer = new FulcioSigner({
|
||||
identityProvider,
|
||||
fulcioBaseURL: opts.fulcioURL,
|
||||
timeout,
|
||||
retry
|
||||
})
|
||||
|
||||
if (opts.rekorURL) {
|
||||
witnesses.push(
|
||||
new RekorWitness({
|
||||
rekorBaseURL: opts.rekorURL,
|
||||
entryType: 'dsse',
|
||||
fetchOnConflict: true,
|
||||
timeout,
|
||||
retry
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
if (opts.tsaServerURL) {
|
||||
witnesses.push(
|
||||
new TSAWitness({
|
||||
tsaBaseURL: opts.tsaServerURL,
|
||||
timeout,
|
||||
retry
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
// Build the bundle with the singleCertificate option which will
|
||||
// trigger the creation of v0.3 DSSE bundles
|
||||
return new DSSEBundleBuilder({signer, witnesses, singleCertificate: true})
|
||||
}
|
|
@ -0,0 +1,44 @@
|
|||
import * as github from '@actions/github'
|
||||
import {retry} from '@octokit/plugin-retry'
|
||||
import {RequestHeaders} from '@octokit/types'
|
||||
|
||||
const CREATE_ATTESTATION_REQUEST = 'POST /repos/{owner}/{repo}/attestations'
|
||||
const DEFAULT_RETRY_COUNT = 5
|
||||
|
||||
export type WriteOptions = {
|
||||
retry?: number
|
||||
headers?: RequestHeaders
|
||||
}
|
||||
/**
|
||||
* Writes an attestation to the repository's attestations endpoint.
|
||||
* @param attestation - The attestation to write.
|
||||
* @param token - The GitHub token for authentication.
|
||||
* @returns The ID of the attestation.
|
||||
* @throws Error if the attestation fails to persist.
|
||||
*/
|
||||
export const writeAttestation = async (
|
||||
attestation: unknown,
|
||||
token: string,
|
||||
options: WriteOptions = {}
|
||||
): Promise<string> => {
|
||||
const retries = options.retry ?? DEFAULT_RETRY_COUNT
|
||||
const octokit = github.getOctokit(token, {retry: {retries}}, retry)
|
||||
|
||||
try {
|
||||
const response = await octokit.request(CREATE_ATTESTATION_REQUEST, {
|
||||
owner: github.context.repo.owner,
|
||||
repo: github.context.repo.repo,
|
||||
headers: options.headers,
|
||||
data: {bundle: attestation}
|
||||
})
|
||||
|
||||
const data =
|
||||
typeof response.data == 'string'
|
||||
? JSON.parse(response.data)
|
||||
: response.data
|
||||
return data?.id
|
||||
} catch (err) {
|
||||
const message = err instanceof Error ? err.message : err
|
||||
throw new Error(`Failed to persist attestation: ${message}`)
|
||||
}
|
||||
}
|
|
@ -0,0 +1,12 @@
|
|||
{
|
||||
"extends": "../../tsconfig.json",
|
||||
"compilerOptions": {
|
||||
"baseUrl": "./",
|
||||
"outDir": "./lib",
|
||||
"declaration": true,
|
||||
"rootDir": "./src"
|
||||
},
|
||||
"include": [
|
||||
"./src"
|
||||
]
|
||||
}
|
|
@ -1,16 +1,154 @@
|
|||
# @actions/cache Releases
|
||||
|
||||
### 0.1.0
|
||||
### 3.2.4
|
||||
|
||||
- Initial release
|
||||
- Updated `isGhes` check to include `.ghe.com` and `.ghe.localhost` as accepted hosts
|
||||
|
||||
### 3.2.3
|
||||
|
||||
### 0.2.0
|
||||
- Fixed a bug that mutated path arguments to `getCacheVersion` [#1378](https://github.com/actions/toolkit/pull/1378)
|
||||
|
||||
- Fixes issues with the zstd compression algorithm on Windows and Ubuntu 16.04 [#469](https://github.com/actions/toolkit/pull/469)
|
||||
### 3.2.2
|
||||
|
||||
### 0.2.1
|
||||
- Add new default cache download method to improve performance and reduce hangs [#1484](https://github.com/actions/toolkit/pull/1484)
|
||||
|
||||
- Fix to await async function getCompressionMethod
|
||||
### 3.2.1
|
||||
|
||||
- Updated @azure/storage-blob to `v12.13.0`
|
||||
|
||||
### 3.2.0
|
||||
|
||||
- Add `lookupOnly` to cache restore `DownloadOptions`.
|
||||
|
||||
### 3.1.4
|
||||
|
||||
- Fix zstd not being used due to `zstd --version` output change in zstd 1.5.4 release. See [#1353](https://github.com/actions/toolkit/pull/1353).
|
||||
|
||||
### 3.1.3
|
||||
|
||||
- Fix to prevent from setting MYSYS environement variable globally [#1329](https://github.com/actions/toolkit/pull/1329).
|
||||
|
||||
### 3.1.2
|
||||
|
||||
- Fix issue with symlink restoration on windows.
|
||||
|
||||
### 3.1.1
|
||||
|
||||
- Reverted changes in 3.1.0 to fix issue with symlink restoration on windows.
|
||||
- Added support for verbose logging about cache version during cache miss.
|
||||
|
||||
### 3.1.0
|
||||
|
||||
- Update actions/cache on windows to use gnu tar and zstd by default
|
||||
- Update actions/cache on windows to fallback to bsdtar and zstd if gnu tar is not available.
|
||||
- Added support for fallback to gzip to restore old caches on windows.
|
||||
|
||||
### 3.1.0-beta.3
|
||||
|
||||
- Bug Fixes for fallback to gzip to restore old caches on windows and bsdtar if gnutar is not available.
|
||||
|
||||
### 3.1.0-beta.2
|
||||
|
||||
- Added support for fallback to gzip to restore old caches on windows.
|
||||
|
||||
### 3.0.6
|
||||
|
||||
- Added `@azure/abort-controller` to dependencies to fix compatibility issue with ESM [#1208](https://github.com/actions/toolkit/issues/1208)
|
||||
|
||||
### 3.0.5
|
||||
|
||||
- Update `@actions/cache` to use `@actions/core@^1.10.0`
|
||||
|
||||
### 3.0.4
|
||||
|
||||
- Fix zstd not working for windows on gnu tar in issues [#888](https://github.com/actions/cache/issues/888) and [#891](https://github.com/actions/cache/issues/891).
|
||||
- Allowing users to provide a custom timeout as input for aborting download of a cache segment using an environment variable `SEGMENT_DOWNLOAD_TIMEOUT_MINS`. Default is 60 minutes.
|
||||
|
||||
### 3.0.3
|
||||
|
||||
- Bug fixes for download stuck issue [#810](https://github.com/actions/cache/issues/810).
|
||||
|
||||
### 3.0.2
|
||||
|
||||
- Added 1 hour timeout for the download stuck issue [#810](https://github.com/actions/cache/issues/810).
|
||||
|
||||
### 3.0.1
|
||||
|
||||
- Fix [#833](https://github.com/actions/cache/issues/833) - cache doesn't work with github workspace directory.
|
||||
- Fix [#809](https://github.com/actions/cache/issues/809) `zstd -d: no such file or directory` error on AWS self-hosted runners.
|
||||
|
||||
### 3.0.0
|
||||
|
||||
- Updated actions/cache to suppress Actions cache server error and log warning for those error [#1122](https://github.com/actions/toolkit/pull/1122)
|
||||
|
||||
### 2.0.6
|
||||
|
||||
- Fix `Tar failed with error: The process '/usr/bin/tar' failed with exit code 1` issue when temp directory where tar is getting created is actually the subdirectory of the path mentioned by the user for caching. ([issue](https://github.com/actions/cache/issues/689))
|
||||
|
||||
### 2.0.5
|
||||
|
||||
- Fix to avoid saving empty cache when no files are available for caching. ([issue](https://github.com/actions/cache/issues/624))
|
||||
|
||||
### 2.0.4
|
||||
|
||||
- Update to v2.0.1 of `@actions/http-client` [#1087](https://github.com/actions/toolkit/pull/1087)
|
||||
|
||||
### 2.0.3
|
||||
|
||||
- Update to v2.0.0 of `@actions/http-client`
|
||||
|
||||
### 2.0.0
|
||||
|
||||
- Added support to check if Actions cache service feature is available or not [#1028](https://github.com/actions/toolkit/pull/1028)
|
||||
|
||||
### 1.0.11
|
||||
|
||||
- Fix file downloads > 2GB([issue](https://github.com/actions/cache/issues/773))
|
||||
|
||||
### 1.0.10
|
||||
|
||||
- Update `lockfileVersion` to `v2` in `package-lock.json [#1022](https://github.com/actions/toolkit/pull/1022)
|
||||
|
||||
### 1.0.9
|
||||
|
||||
- Use @azure/ms-rest-js v2.6.0
|
||||
- Use @azure/storage-blob v12.8.0
|
||||
|
||||
### 1.0.8
|
||||
|
||||
- Increase the allowed artifact cache size from 5GB to 10GB ([issue](https://github.com/actions/cache/discussions/497))
|
||||
|
||||
### 1.0.7
|
||||
|
||||
- Fixes permissions issue extracting archives with GNU tar on macOS ([issue](https://github.com/actions/cache/issues/527))
|
||||
|
||||
### 1.0.6
|
||||
|
||||
- Make caching more verbose [#650](https://github.com/actions/toolkit/pull/650)
|
||||
- Use GNU tar on macOS if available [#701](https://github.com/actions/toolkit/pull/701)
|
||||
|
||||
### 1.0.5
|
||||
|
||||
- Fix to ensure Windows cache paths get resolved correctly
|
||||
|
||||
### 1.0.4
|
||||
|
||||
- Use @actions/core v1.2.6
|
||||
- Fixes uploadChunk to throw an error if any unsuccessful response code is received
|
||||
|
||||
### 1.0.3
|
||||
|
||||
- Use http-client v1.0.9
|
||||
- Fixes error handling so retries are not attempted on non-retryable errors (409 Conflict, for example)
|
||||
- Adds 5 second delay between retry attempts
|
||||
|
||||
### 1.0.2
|
||||
|
||||
- Use posix archive format to add support for some tools
|
||||
|
||||
### 1.0.1
|
||||
|
||||
- Fix bug in downloading large files (> 2 GBs) with the Azure SDK
|
||||
|
||||
### 1.0.0
|
||||
|
||||
|
@ -19,152 +157,14 @@
|
|||
- Includes changes that break compatibility with earlier versions, including:
|
||||
- `retry`, `retryTypedResponse`, and `retryHttpClientResponse` moved from `cacheHttpClient` to `requestUtils`
|
||||
|
||||
### 1.0.1
|
||||
### 0.2.1
|
||||
|
||||
- Fix bug in downloading large files (> 2 GBs) with the Azure SDK
|
||||
- Fix to await async function getCompressionMethod
|
||||
|
||||
### 0.2.0
|
||||
|
||||
### 1.0.2
|
||||
- Fixes issues with the zstd compression algorithm on Windows and Ubuntu 16.04 [#469](https://github.com/actions/toolkit/pull/469)
|
||||
|
||||
### 0.1.0
|
||||
|
||||
- Use posix archive format to add support for some tools
|
||||
|
||||
### 1.0.3
|
||||
|
||||
- Use http-client v1.0.9
|
||||
- Fixes error handling so retries are not attempted on non-retryable errors (409 Conflict, for example)
|
||||
- Adds 5 second delay between retry attempts
|
||||
|
||||
### 1.0.4
|
||||
|
||||
- Use @actions/core v1.2.6
|
||||
- Fixes uploadChunk to throw an error if any unsuccessful response code is received
|
||||
|
||||
### 1.0.5
|
||||
|
||||
- Fix to ensure Windows cache paths get resolved correctly
|
||||
|
||||
### 1.0.6
|
||||
|
||||
- Make caching more verbose [#650](https://github.com/actions/toolkit/pull/650)
|
||||
- Use GNU tar on macOS if available [#701](https://github.com/actions/toolkit/pull/701)
|
||||
|
||||
### 1.0.7
|
||||
|
||||
- Fixes permissions issue extracting archives with GNU tar on macOS ([issue](https://github.com/actions/cache/issues/527))
|
||||
|
||||
### 1.0.8
|
||||
|
||||
- Increase the allowed artifact cache size from 5GB to 10GB ([issue](https://github.com/actions/cache/discussions/497))
|
||||
|
||||
### 1.0.9
|
||||
|
||||
- Use @azure/ms-rest-js v2.6.0
|
||||
- Use @azure/storage-blob v12.8.0
|
||||
|
||||
### 1.0.10
|
||||
|
||||
- Update `lockfileVersion` to `v2` in `package-lock.json [#1022](https://github.com/actions/toolkit/pull/1022)
|
||||
|
||||
### 1.0.11
|
||||
|
||||
- Fix file downloads > 2GB([issue](https://github.com/actions/cache/issues/773))
|
||||
|
||||
### 2.0.0
|
||||
|
||||
- Added support to check if Actions cache service feature is available or not [#1028](https://github.com/actions/toolkit/pull/1028)
|
||||
|
||||
### 2.0.3
|
||||
|
||||
- Update to v2.0.0 of `@actions/http-client`
|
||||
|
||||
### 2.0.4
|
||||
|
||||
- Update to v2.0.1 of `@actions/http-client` [#1087](https://github.com/actions/toolkit/pull/1087)
|
||||
|
||||
### 2.0.5
|
||||
|
||||
- Fix to avoid saving empty cache when no files are available for caching. ([issue](https://github.com/actions/cache/issues/624))
|
||||
|
||||
### 2.0.6
|
||||
|
||||
- Fix `Tar failed with error: The process '/usr/bin/tar' failed with exit code 1` issue when temp directory where tar is getting created is actually the subdirectory of the path mentioned by the user for caching. ([issue](https://github.com/actions/cache/issues/689))
|
||||
|
||||
### 3.0.0
|
||||
|
||||
- Updated actions/cache to suppress Actions cache server error and log warning for those error [#1122](https://github.com/actions/toolkit/pull/1122)
|
||||
|
||||
### 3.0.1
|
||||
|
||||
- Fix [#833](https://github.com/actions/cache/issues/833) - cache doesn't work with github workspace directory.
|
||||
- Fix [#809](https://github.com/actions/cache/issues/809) `zstd -d: no such file or directory` error on AWS self-hosted runners.
|
||||
|
||||
### 3.0.2
|
||||
|
||||
- Added 1 hour timeout for the download stuck issue [#810](https://github.com/actions/cache/issues/810).
|
||||
|
||||
### 3.0.3
|
||||
|
||||
- Bug fixes for download stuck issue [#810](https://github.com/actions/cache/issues/810).
|
||||
|
||||
### 3.0.4
|
||||
|
||||
- Fix zstd not working for windows on gnu tar in issues [#888](https://github.com/actions/cache/issues/888) and [#891](https://github.com/actions/cache/issues/891).
|
||||
- Allowing users to provide a custom timeout as input for aborting download of a cache segment using an environment variable `SEGMENT_DOWNLOAD_TIMEOUT_MINS`. Default is 60 minutes.
|
||||
|
||||
### 3.0.5
|
||||
|
||||
- Update `@actions/cache` to use `@actions/core@^1.10.0`
|
||||
|
||||
### 3.0.6
|
||||
|
||||
- Added `@azure/abort-controller` to dependencies to fix compatibility issue with ESM [#1208](https://github.com/actions/toolkit/issues/1208)
|
||||
|
||||
### 3.1.0-beta.1
|
||||
|
||||
- Update actions/cache on windows to use gnu tar and zstd by default and fallback to bsdtar and zstd if gnu tar is not available. ([issue](https://github.com/actions/cache/issues/984))
|
||||
|
||||
### 3.1.0-beta.2
|
||||
|
||||
- Added support for fallback to gzip to restore old caches on windows.
|
||||
|
||||
### 3.1.0-beta.3
|
||||
|
||||
- Bug Fixes for fallback to gzip to restore old caches on windows and bsdtar if gnutar is not available.
|
||||
|
||||
### 3.1.0
|
||||
|
||||
- Update actions/cache on windows to use gnu tar and zstd by default
|
||||
- Update actions/cache on windows to fallback to bsdtar and zstd if gnu tar is not available.
|
||||
- Added support for fallback to gzip to restore old caches on windows.
|
||||
|
||||
### 3.1.1
|
||||
|
||||
- Reverted changes in 3.1.0 to fix issue with symlink restoration on windows.
|
||||
- Added support for verbose logging about cache version during cache miss.
|
||||
|
||||
### 3.1.2
|
||||
|
||||
- Fix issue with symlink restoration on windows.
|
||||
|
||||
### 3.1.3
|
||||
|
||||
- Fix to prevent from setting MYSYS environement variable globally [#1329](https://github.com/actions/toolkit/pull/1329).
|
||||
|
||||
### 3.1.4
|
||||
|
||||
- Fix zstd not being used due to `zstd --version` output change in zstd 1.5.4 release. See [#1353](https://github.com/actions/toolkit/pull/1353).
|
||||
|
||||
### 3.2.0
|
||||
|
||||
- Add `lookupOnly` to cache restore `DownloadOptions`.
|
||||
|
||||
### 3.2.1
|
||||
|
||||
- Updated @azure/storage-blob to `v12.13.0`
|
||||
|
||||
### 3.2.2
|
||||
|
||||
- Add new default cache download method to improve performance and reduce hangs [#1484](https://github.com/actions/toolkit/pull/1484)
|
||||
|
||||
### 3.2.3
|
||||
|
||||
- Fixed a bug that mutated path arguments to `getCacheVersion` [#1378](https://github.com/actions/toolkit/pull/1378)
|
||||
- Initial release
|
||||
|
|
|
@ -5,6 +5,12 @@ import {DownloadOptions, getDownloadOptions} from '../src/options'
|
|||
|
||||
jest.mock('../src/internal/downloadUtils')
|
||||
|
||||
test('getCacheVersion does not mutate arguments', async () => {
|
||||
const paths = ['node_modules']
|
||||
getCacheVersion(paths, undefined, true)
|
||||
expect(paths).toEqual(['node_modules'])
|
||||
})
|
||||
|
||||
test('getCacheVersion with one path returns version', async () => {
|
||||
const paths = ['node_modules']
|
||||
const result = getCacheVersion(paths, undefined, true)
|
||||
|
|
|
@ -2,6 +2,10 @@ import {promises as fs} from 'fs'
|
|||
import * as path from 'path'
|
||||
import * as cacheUtils from '../src/internal/cacheUtils'
|
||||
|
||||
beforeEach(() => {
|
||||
jest.resetModules()
|
||||
})
|
||||
|
||||
test('getArchiveFileSizeInBytes returns file size', () => {
|
||||
const filePath = path.join(__dirname, '__fixtures__', 'helloWorld.txt')
|
||||
|
||||
|
@ -38,3 +42,23 @@ test('resolvePaths works on github workspace directory', async () => {
|
|||
const paths = await cacheUtils.resolvePaths([workspace])
|
||||
expect(paths.length).toBeGreaterThan(0)
|
||||
})
|
||||
|
||||
test('isGhes returns false for github.com', async () => {
|
||||
process.env.GITHUB_SERVER_URL = 'https://github.com'
|
||||
expect(cacheUtils.isGhes()).toBe(false)
|
||||
})
|
||||
|
||||
test('isGhes returns false for ghe.com', async () => {
|
||||
process.env.GITHUB_SERVER_URL = 'https://somedomain.ghe.com'
|
||||
expect(cacheUtils.isGhes()).toBe(false)
|
||||
})
|
||||
|
||||
test('isGhes returns true for enterprise URL', async () => {
|
||||
process.env.GITHUB_SERVER_URL = 'https://my-enterprise.github.com'
|
||||
expect(cacheUtils.isGhes()).toBe(true)
|
||||
})
|
||||
|
||||
test('isGhes returns false for ghe.localhost', () => {
|
||||
process.env.GITHUB_SERVER_URL = 'https://my.domain.ghe.localhost'
|
||||
expect(cacheUtils.isGhes()).toBe(false)
|
||||
})
|
||||
|
|
|
@ -1,12 +1,12 @@
|
|||
{
|
||||
"name": "@actions/cache",
|
||||
"version": "3.2.2",
|
||||
"version": "3.2.4",
|
||||
"lockfileVersion": 2,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "@actions/cache",
|
||||
"version": "3.2.2",
|
||||
"version": "3.2.4",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@actions/core": "^1.10.0",
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@actions/cache",
|
||||
"version": "3.2.3",
|
||||
"version": "3.2.4",
|
||||
"preview": true,
|
||||
"description": "Actions cache lib",
|
||||
"keywords": [
|
||||
|
|
|
@ -135,5 +135,11 @@ export function isGhes(): boolean {
|
|||
const ghUrl = new URL(
|
||||
process.env['GITHUB_SERVER_URL'] || 'https://github.com'
|
||||
)
|
||||
return ghUrl.hostname.toUpperCase() !== 'GITHUB.COM'
|
||||
|
||||
const hostname = ghUrl.hostname.trimEnd().toUpperCase()
|
||||
const isGitHubHost = hostname === 'GITHUB.COM'
|
||||
const isGheHost =
|
||||
hostname.endsWith('.GHE.COM') || hostname.endsWith('.GHE.LOCALHOST')
|
||||
|
||||
return !isGitHubHost && !isGheHost
|
||||
}
|
||||
|
|
|
@ -1,5 +1,8 @@
|
|||
# @actions/glob Releases
|
||||
|
||||
### 0.5.0
|
||||
- Added `excludeHiddenFiles` option, which is disabled by default to preserve existing behavior [#1791: Add glob option to ignore hidden files](https://github.com/actions/toolkit/pull/1791)
|
||||
|
||||
### 0.4.0
|
||||
- Pass in the current workspace as a parameter to HashFiles [#1318](https://github.com/actions/toolkit/pull/1318)
|
||||
|
||||
|
|
|
@ -708,7 +708,7 @@ describe('globber', () => {
|
|||
expect(itemPaths).toEqual([])
|
||||
})
|
||||
|
||||
it('returns hidden files', async () => {
|
||||
it('returns hidden files by default', async () => {
|
||||
// Create the following layout:
|
||||
// <root>
|
||||
// <root>/.emptyFolder
|
||||
|
@ -734,6 +734,26 @@ describe('globber', () => {
|
|||
])
|
||||
})
|
||||
|
||||
it('ignores hidden files when excludeHiddenFiles is set', async () => {
|
||||
// Create the following layout:
|
||||
// <root>
|
||||
// <root>/.emptyFolder
|
||||
// <root>/.file
|
||||
// <root>/.folder
|
||||
// <root>/.folder/file
|
||||
const root = path.join(getTestTemp(), 'ignores-hidden-files')
|
||||
await createHiddenDirectory(path.join(root, '.emptyFolder'))
|
||||
await createHiddenDirectory(path.join(root, '.folder'))
|
||||
await createHiddenFile(path.join(root, '.file'), 'test .file content')
|
||||
await fs.writeFile(
|
||||
path.join(root, '.folder', 'file'),
|
||||
'test .folder/file content'
|
||||
)
|
||||
|
||||
const itemPaths = await glob(root, {excludeHiddenFiles: true})
|
||||
expect(itemPaths).toEqual([root])
|
||||
})
|
||||
|
||||
it('returns normalized paths', async () => {
|
||||
// Create the following layout:
|
||||
// <root>/hello/world.txt
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@actions/glob",
|
||||
"version": "0.4.0",
|
||||
"version": "0.5.0",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"description": "Actions glob lib",
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@actions/glob",
|
||||
"version": "0.4.0",
|
||||
"version": "0.5.0",
|
||||
"preview": true,
|
||||
"description": "Actions glob lib",
|
||||
"keywords": [
|
||||
|
|
|
@ -9,7 +9,8 @@ export function getOptions(copy?: GlobOptions): GlobOptions {
|
|||
followSymbolicLinks: true,
|
||||
implicitDescendants: true,
|
||||
matchDirectories: true,
|
||||
omitBrokenSymbolicLinks: true
|
||||
omitBrokenSymbolicLinks: true,
|
||||
excludeHiddenFiles: false
|
||||
}
|
||||
|
||||
if (copy) {
|
||||
|
@ -32,6 +33,11 @@ export function getOptions(copy?: GlobOptions): GlobOptions {
|
|||
result.omitBrokenSymbolicLinks = copy.omitBrokenSymbolicLinks
|
||||
core.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`)
|
||||
}
|
||||
|
||||
if (typeof copy.excludeHiddenFiles === 'boolean') {
|
||||
result.excludeHiddenFiles = copy.excludeHiddenFiles
|
||||
core.debug(`excludeHiddenFiles '${result.excludeHiddenFiles}'`)
|
||||
}
|
||||
}
|
||||
|
||||
return result
|
||||
|
|
|
@ -36,4 +36,13 @@ export interface GlobOptions {
|
|||
* @default true
|
||||
*/
|
||||
omitBrokenSymbolicLinks?: boolean
|
||||
|
||||
/**
|
||||
* Indicates whether to exclude hidden files (files and directories starting with a `.`).
|
||||
* This does not apply to Windows files and directories with the hidden attribute unless
|
||||
* they are also prefixed with a `.`.
|
||||
*
|
||||
* @default false
|
||||
*/
|
||||
excludeHiddenFiles?: boolean
|
||||
}
|
||||
|
|
|
@ -128,6 +128,11 @@ export class DefaultGlobber implements Globber {
|
|||
continue
|
||||
}
|
||||
|
||||
// Hidden file or directory?
|
||||
if (options.excludeHiddenFiles && path.basename(item.path).match(/^\./)) {
|
||||
continue
|
||||
}
|
||||
|
||||
// Directory
|
||||
if (stats.isDirectory()) {
|
||||
// Matched
|
||||
|
|
|
@ -1,5 +1,11 @@
|
|||
## Releases
|
||||
|
||||
## 2.2.2
|
||||
- Better handling of url encoded usernames and passwords in proxy config [#1782](https://github.com/actions/toolkit/pull/1782)
|
||||
|
||||
## 2.2.1
|
||||
- Make sure RequestOptions.keepAlive is applied properly on node20 runtime [#1572](https://github.com/actions/toolkit/pull/1572)
|
||||
|
||||
## 2.2.0
|
||||
- Add function to return proxy agent dispatcher for compatibility with latest octokit packages [#1547](https://github.com/actions/toolkit/pull/1547)
|
||||
|
||||
|
|
|
@ -37,7 +37,7 @@ describe('basics', () => {
|
|||
// "user-agent": "typed-test-client-tests"
|
||||
// },
|
||||
// "origin": "173.95.152.44",
|
||||
// "url": "https://postman-echo.com/get"
|
||||
// "url": "http://postman-echo.com/get"
|
||||
// }
|
||||
|
||||
it('does basic http get request', async () => {
|
||||
|
@ -63,16 +63,17 @@ describe('basics', () => {
|
|||
expect(obj.headers['user-agent']).toBeFalsy()
|
||||
})
|
||||
|
||||
/* TODO write a mock rather then relying on a third party
|
||||
it('does basic https get request', async () => {
|
||||
const res: httpm.HttpClientResponse = await _http.get(
|
||||
'https://postman-echo.com/get'
|
||||
'http://postman-echo.com/get'
|
||||
)
|
||||
expect(res.message.statusCode).toBe(200)
|
||||
const body: string = await res.readBody()
|
||||
const obj = JSON.parse(body)
|
||||
expect(obj.url).toBe('https://postman-echo.com/get')
|
||||
expect(obj.url).toBe('http://postman-echo.com/get')
|
||||
})
|
||||
|
||||
*/
|
||||
it('does basic http get request with default headers', async () => {
|
||||
const http: httpm.HttpClient = new httpm.HttpClient(
|
||||
'http-client-tests',
|
||||
|
@ -125,12 +126,12 @@ describe('basics', () => {
|
|||
it('pipes a get request', async () => {
|
||||
return new Promise<void>(async resolve => {
|
||||
const file = fs.createWriteStream(sampleFilePath)
|
||||
;(await _http.get('https://postman-echo.com/get')).message
|
||||
;(await _http.get('http://postman-echo.com/get')).message
|
||||
.pipe(file)
|
||||
.on('close', () => {
|
||||
const body: string = fs.readFileSync(sampleFilePath).toString()
|
||||
const obj = JSON.parse(body)
|
||||
expect(obj.url).toBe('https://postman-echo.com/get')
|
||||
expect(obj.url).toBe('http://postman-echo.com/get')
|
||||
resolve()
|
||||
})
|
||||
})
|
||||
|
@ -138,32 +139,32 @@ describe('basics', () => {
|
|||
|
||||
it('does basic get request with redirects', async () => {
|
||||
const res: httpm.HttpClientResponse = await _http.get(
|
||||
`https://postman-echo.com/redirect-to?url=${encodeURIComponent(
|
||||
'https://postman-echo.com/get'
|
||||
`http://postman-echo.com/redirect-to?url=${encodeURIComponent(
|
||||
'http://postman-echo.com/get'
|
||||
)}`
|
||||
)
|
||||
expect(res.message.statusCode).toBe(200)
|
||||
const body: string = await res.readBody()
|
||||
const obj = JSON.parse(body)
|
||||
expect(obj.url).toBe('https://postman-echo.com/get')
|
||||
expect(obj.url).toBe('http://postman-echo.com/get')
|
||||
})
|
||||
|
||||
it('does basic get request with redirects (303)', async () => {
|
||||
const res: httpm.HttpClientResponse = await _http.get(
|
||||
`https://postman-echo.com/redirect-to?url=${encodeURIComponent(
|
||||
'https://postman-echo.com/get'
|
||||
`http://postman-echo.com/redirect-to?url=${encodeURIComponent(
|
||||
'http://postman-echo.com/get'
|
||||
)}&status_code=303`
|
||||
)
|
||||
expect(res.message.statusCode).toBe(200)
|
||||
const body: string = await res.readBody()
|
||||
const obj = JSON.parse(body)
|
||||
expect(obj.url).toBe('https://postman-echo.com/get')
|
||||
expect(obj.url).toBe('http://postman-echo.com/get')
|
||||
})
|
||||
|
||||
it('returns 404 for not found get request on redirect', async () => {
|
||||
const res: httpm.HttpClientResponse = await _http.get(
|
||||
`https://postman-echo.com/redirect-to?url=${encodeURIComponent(
|
||||
'https://postman-echo.com/status/404'
|
||||
`http://postman-echo.com/redirect-to?url=${encodeURIComponent(
|
||||
'http://postman-echo.com/status/404'
|
||||
)}&status_code=303`
|
||||
)
|
||||
expect(res.message.statusCode).toBe(404)
|
||||
|
@ -177,8 +178,8 @@ describe('basics', () => {
|
|||
{allowRedirects: false}
|
||||
)
|
||||
const res: httpm.HttpClientResponse = await http.get(
|
||||
`https://postman-echo.com/redirect-to?url=${encodeURIComponent(
|
||||
'https://postman-echo.com/get'
|
||||
`http://postman-echo.com/redirect-to?url=${encodeURIComponent(
|
||||
'http://postman-echo.com/get'
|
||||
)}`
|
||||
)
|
||||
expect(res.message.statusCode).toBe(302)
|
||||
|
@ -191,8 +192,8 @@ describe('basics', () => {
|
|||
authorization: 'shhh'
|
||||
}
|
||||
const res: httpm.HttpClientResponse = await _http.get(
|
||||
`https://postman-echo.com/redirect-to?url=${encodeURIComponent(
|
||||
'https://www.postman-echo.com/get'
|
||||
`http://postman-echo.com/redirect-to?url=${encodeURIComponent(
|
||||
'http://www.postman-echo.com/get'
|
||||
)}`,
|
||||
headers
|
||||
)
|
||||
|
@ -204,7 +205,7 @@ describe('basics', () => {
|
|||
expect(obj.headers[httpm.Headers.Accept]).toBe('application/json')
|
||||
expect(obj.headers['Authorization']).toBeUndefined()
|
||||
expect(obj.headers['authorization']).toBeUndefined()
|
||||
expect(obj.url).toBe('https://www.postman-echo.com/get')
|
||||
expect(obj.url).toBe('http://www.postman-echo.com/get')
|
||||
})
|
||||
|
||||
it('does not pass Auth with diff hostname redirects', async () => {
|
||||
|
@ -213,8 +214,8 @@ describe('basics', () => {
|
|||
Authorization: 'shhh'
|
||||
}
|
||||
const res: httpm.HttpClientResponse = await _http.get(
|
||||
`https://postman-echo.com/redirect-to?url=${encodeURIComponent(
|
||||
'https://www.postman-echo.com/get'
|
||||
`http://postman-echo.com/redirect-to?url=${encodeURIComponent(
|
||||
'http://www.postman-echo.com/get'
|
||||
)}`,
|
||||
headers
|
||||
)
|
||||
|
@ -226,7 +227,7 @@ describe('basics', () => {
|
|||
expect(obj.headers[httpm.Headers.Accept]).toBe('application/json')
|
||||
expect(obj.headers['Authorization']).toBeUndefined()
|
||||
expect(obj.headers['authorization']).toBeUndefined()
|
||||
expect(obj.url).toBe('https://www.postman-echo.com/get')
|
||||
expect(obj.url).toBe('http://www.postman-echo.com/get')
|
||||
})
|
||||
|
||||
it('does basic head request', async () => {
|
||||
|
@ -289,11 +290,11 @@ describe('basics', () => {
|
|||
|
||||
it('gets a json object', async () => {
|
||||
const jsonObj = await _http.getJson<HttpBinData>(
|
||||
'https://postman-echo.com/get'
|
||||
'http://postman-echo.com/get'
|
||||
)
|
||||
expect(jsonObj.statusCode).toBe(200)
|
||||
expect(jsonObj.result).toBeDefined()
|
||||
expect(jsonObj.result?.url).toBe('https://postman-echo.com/get')
|
||||
expect(jsonObj.result?.url).toBe('http://postman-echo.com/get')
|
||||
expect(jsonObj.result?.headers[httpm.Headers.Accept]).toBe(
|
||||
httpm.MediaTypes.ApplicationJson
|
||||
)
|
||||
|
@ -304,7 +305,7 @@ describe('basics', () => {
|
|||
|
||||
it('getting a non existent json object returns null', async () => {
|
||||
const jsonObj = await _http.getJson<HttpBinData>(
|
||||
'https://postman-echo.com/status/404'
|
||||
'http://postman-echo.com/status/404'
|
||||
)
|
||||
expect(jsonObj.statusCode).toBe(404)
|
||||
expect(jsonObj.result).toBeNull()
|
||||
|
@ -313,12 +314,12 @@ describe('basics', () => {
|
|||
it('posts a json object', async () => {
|
||||
const res = {name: 'foo'}
|
||||
const restRes = await _http.postJson<HttpBinData>(
|
||||
'https://postman-echo.com/post',
|
||||
'http://postman-echo.com/post',
|
||||
res
|
||||
)
|
||||
expect(restRes.statusCode).toBe(200)
|
||||
expect(restRes.result).toBeDefined()
|
||||
expect(restRes.result?.url).toBe('https://postman-echo.com/post')
|
||||
expect(restRes.result?.url).toBe('http://postman-echo.com/post')
|
||||
expect(restRes.result?.json.name).toBe('foo')
|
||||
expect(restRes.result?.headers[httpm.Headers.Accept]).toBe(
|
||||
httpm.MediaTypes.ApplicationJson
|
||||
|
@ -334,12 +335,12 @@ describe('basics', () => {
|
|||
it('puts a json object', async () => {
|
||||
const res = {name: 'foo'}
|
||||
const restRes = await _http.putJson<HttpBinData>(
|
||||
'https://postman-echo.com/put',
|
||||
'http://postman-echo.com/put',
|
||||
res
|
||||
)
|
||||
expect(restRes.statusCode).toBe(200)
|
||||
expect(restRes.result).toBeDefined()
|
||||
expect(restRes.result?.url).toBe('https://postman-echo.com/put')
|
||||
expect(restRes.result?.url).toBe('http://postman-echo.com/put')
|
||||
expect(restRes.result?.json.name).toBe('foo')
|
||||
|
||||
expect(restRes.result?.headers[httpm.Headers.Accept]).toBe(
|
||||
|
@ -356,12 +357,12 @@ describe('basics', () => {
|
|||
it('patch a json object', async () => {
|
||||
const res = {name: 'foo'}
|
||||
const restRes = await _http.patchJson<HttpBinData>(
|
||||
'https://postman-echo.com/patch',
|
||||
'http://postman-echo.com/patch',
|
||||
res
|
||||
)
|
||||
expect(restRes.statusCode).toBe(200)
|
||||
expect(restRes.result).toBeDefined()
|
||||
expect(restRes.result?.url).toBe('https://postman-echo.com/patch')
|
||||
expect(restRes.result?.url).toBe('http://postman-echo.com/patch')
|
||||
expect(restRes.result?.json.name).toBe('foo')
|
||||
expect(restRes.result?.headers[httpm.Headers.Accept]).toBe(
|
||||
httpm.MediaTypes.ApplicationJson
|
||||
|
|
|
@ -12,7 +12,7 @@ describe('headers', () => {
|
|||
it('preserves existing headers on getJson', async () => {
|
||||
const additionalHeaders = {[httpm.Headers.Accept]: 'foo'}
|
||||
let jsonObj = await _http.getJson<any>(
|
||||
'https://postman-echo.com/get',
|
||||
'http://postman-echo.com/get',
|
||||
additionalHeaders
|
||||
)
|
||||
expect(jsonObj.result.headers[httpm.Headers.Accept]).toBe('foo')
|
||||
|
@ -26,7 +26,7 @@ describe('headers', () => {
|
|||
[httpm.Headers.Accept]: 'baz'
|
||||
}
|
||||
}
|
||||
jsonObj = await httpWithHeaders.getJson<any>('https://postman-echo.com/get')
|
||||
jsonObj = await httpWithHeaders.getJson<any>('http://postman-echo.com/get')
|
||||
expect(jsonObj.result.headers[httpm.Headers.Accept]).toBe('baz')
|
||||
expect(jsonObj.headers[httpm.Headers.ContentType]).toContain(
|
||||
httpm.MediaTypes.ApplicationJson
|
||||
|
@ -36,7 +36,7 @@ describe('headers', () => {
|
|||
it('preserves existing headers on postJson', async () => {
|
||||
const additionalHeaders = {[httpm.Headers.Accept]: 'foo'}
|
||||
let jsonObj = await _http.postJson<any>(
|
||||
'https://postman-echo.com/post',
|
||||
'http://postman-echo.com/post',
|
||||
{},
|
||||
additionalHeaders
|
||||
)
|
||||
|
@ -52,7 +52,7 @@ describe('headers', () => {
|
|||
}
|
||||
}
|
||||
jsonObj = await httpWithHeaders.postJson<any>(
|
||||
'https://postman-echo.com/post',
|
||||
'http://postman-echo.com/post',
|
||||
{}
|
||||
)
|
||||
expect(jsonObj.result.headers[httpm.Headers.Accept]).toBe('baz')
|
||||
|
@ -64,7 +64,7 @@ describe('headers', () => {
|
|||
it('preserves existing headers on putJson', async () => {
|
||||
const additionalHeaders = {[httpm.Headers.Accept]: 'foo'}
|
||||
let jsonObj = await _http.putJson<any>(
|
||||
'https://postman-echo.com/put',
|
||||
'http://postman-echo.com/put',
|
||||
{},
|
||||
additionalHeaders
|
||||
)
|
||||
|
@ -80,7 +80,7 @@ describe('headers', () => {
|
|||
}
|
||||
}
|
||||
jsonObj = await httpWithHeaders.putJson<any>(
|
||||
'https://postman-echo.com/put',
|
||||
'http://postman-echo.com/put',
|
||||
{}
|
||||
)
|
||||
expect(jsonObj.result.headers[httpm.Headers.Accept]).toBe('baz')
|
||||
|
@ -92,7 +92,7 @@ describe('headers', () => {
|
|||
it('preserves existing headers on patchJson', async () => {
|
||||
const additionalHeaders = {[httpm.Headers.Accept]: 'foo'}
|
||||
let jsonObj = await _http.patchJson<any>(
|
||||
'https://postman-echo.com/patch',
|
||||
'http://postman-echo.com/patch',
|
||||
{},
|
||||
additionalHeaders
|
||||
)
|
||||
|
@ -108,7 +108,7 @@ describe('headers', () => {
|
|||
}
|
||||
}
|
||||
jsonObj = await httpWithHeaders.patchJson<any>(
|
||||
'https://postman-echo.com/patch',
|
||||
'http://postman-echo.com/patch',
|
||||
{}
|
||||
)
|
||||
expect(jsonObj.result.headers[httpm.Headers.Accept]).toBe('baz')
|
||||
|
|
|
@ -11,6 +11,12 @@ describe('basics', () => {
|
|||
_http.dispose()
|
||||
})
|
||||
|
||||
it.each([true, false])('creates Agent with keepAlive %s', keepAlive => {
|
||||
const http = new httpm.HttpClient('http-client-tests', [], {keepAlive})
|
||||
const agent = http.getAgent('http://postman-echo.com')
|
||||
expect(agent).toHaveProperty('keepAlive', keepAlive)
|
||||
})
|
||||
|
||||
it('does basic http get request with keepAlive true', async () => {
|
||||
const res: httpm.HttpClientResponse = await _http.get(
|
||||
'http://postman-echo.com/get'
|
||||
|
|
|
@ -222,30 +222,33 @@ describe('proxy', () => {
|
|||
expect(_proxyConnects).toHaveLength(0)
|
||||
})
|
||||
|
||||
// TODO mock this out so we don't rely on a third party
|
||||
/*
|
||||
it('HttpClient does basic https get request through proxy', async () => {
|
||||
process.env['https_proxy'] = _proxyUrl
|
||||
const httpClient = new httpm.HttpClient()
|
||||
const res: httpm.HttpClientResponse = await httpClient.get(
|
||||
'https://postman-echo.com/get'
|
||||
'http://postman-echo.com/get'
|
||||
)
|
||||
expect(res.message.statusCode).toBe(200)
|
||||
const body: string = await res.readBody()
|
||||
const obj = JSON.parse(body)
|
||||
expect(obj.url).toBe('https://postman-echo.com/get')
|
||||
expect(obj.url).toBe('http://postman-echo.com/get')
|
||||
expect(_proxyConnects).toEqual(['postman-echo.com:443'])
|
||||
})
|
||||
*/
|
||||
|
||||
it('HttpClient does basic https get request when bypass proxy', async () => {
|
||||
process.env['https_proxy'] = _proxyUrl
|
||||
it('HttpClient does basic http get request when bypass proxy', async () => {
|
||||
process.env['http_proxy'] = _proxyUrl
|
||||
process.env['no_proxy'] = 'postman-echo.com'
|
||||
const httpClient = new httpm.HttpClient()
|
||||
const res: httpm.HttpClientResponse = await httpClient.get(
|
||||
'https://postman-echo.com/get'
|
||||
'http://postman-echo.com/get'
|
||||
)
|
||||
expect(res.message.statusCode).toBe(200)
|
||||
const body: string = await res.readBody()
|
||||
const obj = JSON.parse(body)
|
||||
expect(obj.url).toBe('https://postman-echo.com/get')
|
||||
expect(obj.url).toBe('http://postman-echo.com/get')
|
||||
expect(_proxyConnects).toHaveLength(0)
|
||||
})
|
||||
|
||||
|
@ -304,6 +307,18 @@ describe('proxy', () => {
|
|||
console.log(agent)
|
||||
expect(agent instanceof ProxyAgent).toBe(true)
|
||||
})
|
||||
|
||||
it('proxyAuth is set in tunnel agent when authentication is provided with URIencoding', async () => {
|
||||
process.env['https_proxy'] =
|
||||
'http://user%40github.com:p%40ssword@127.0.0.1:8080'
|
||||
const httpClient = new httpm.HttpClient()
|
||||
const agent: any = httpClient.getAgent('https://some-url')
|
||||
// eslint-disable-next-line no-console
|
||||
console.log(agent)
|
||||
expect(agent.proxyOptions.host).toBe('127.0.0.1')
|
||||
expect(agent.proxyOptions.port).toBe('8080')
|
||||
expect(agent.proxyOptions.proxyAuth).toBe('user@github.com:p@ssword')
|
||||
})
|
||||
})
|
||||
|
||||
function _clearVars(): void {
|
||||
|
|
|
@ -1,12 +1,12 @@
|
|||
{
|
||||
"name": "@actions/http-client",
|
||||
"version": "2.2.0",
|
||||
"version": "2.2.2",
|
||||
"lockfileVersion": 2,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "@actions/http-client",
|
||||
"version": "2.2.0",
|
||||
"version": "2.2.1",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"tunnel": "^0.0.6",
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@actions/http-client",
|
||||
"version": "2.2.0",
|
||||
"version": "2.2.2",
|
||||
"description": "Actions Http Client",
|
||||
"keywords": [
|
||||
"github",
|
||||
|
@ -48,4 +48,4 @@
|
|||
"tunnel": "^0.0.6",
|
||||
"undici": "^5.25.4"
|
||||
}
|
||||
}
|
||||
}
|
|
@ -649,7 +649,7 @@ export class HttpClient {
|
|||
agent = this._proxyAgent
|
||||
}
|
||||
|
||||
if (this._keepAlive && !useProxy) {
|
||||
if (!useProxy) {
|
||||
agent = this._agent
|
||||
}
|
||||
|
||||
|
@ -690,18 +690,13 @@ export class HttpClient {
|
|||
this._proxyAgent = agent
|
||||
}
|
||||
|
||||
// if reusing agent across request and tunneling agent isn't assigned create a new agent
|
||||
if (this._keepAlive && !agent) {
|
||||
// if tunneling agent isn't assigned create a new agent
|
||||
if (!agent) {
|
||||
const options = {keepAlive: this._keepAlive, maxSockets}
|
||||
agent = usingSsl ? new https.Agent(options) : new http.Agent(options)
|
||||
this._agent = agent
|
||||
}
|
||||
|
||||
// if not using private agent and tunnel agent isn't setup then use global agent
|
||||
if (!agent) {
|
||||
agent = usingSsl ? https.globalAgent : http.globalAgent
|
||||
}
|
||||
|
||||
if (usingSsl && this._ignoreSslError) {
|
||||
// we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process
|
||||
// http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options
|
||||
|
|
|
@ -15,10 +15,10 @@ export function getProxyUrl(reqUrl: URL): URL | undefined {
|
|||
|
||||
if (proxyVar) {
|
||||
try {
|
||||
return new URL(proxyVar)
|
||||
return new DecodedURL(proxyVar)
|
||||
} catch {
|
||||
if (!proxyVar.startsWith('http://') && !proxyVar.startsWith('https://'))
|
||||
return new URL(`http://${proxyVar}`)
|
||||
return new DecodedURL(`http://${proxyVar}`)
|
||||
}
|
||||
} else {
|
||||
return undefined
|
||||
|
@ -87,3 +87,22 @@ function isLoopbackAddress(host: string): boolean {
|
|||
hostLower.startsWith('[0:0:0:0:0:0:0:1]')
|
||||
)
|
||||
}
|
||||
|
||||
class DecodedURL extends URL {
|
||||
private _decodedUsername: string
|
||||
private _decodedPassword: string
|
||||
|
||||
constructor(url: string | URL, base?: string | URL) {
|
||||
super(url, base)
|
||||
this._decodedUsername = decodeURIComponent(super.username)
|
||||
this._decodedPassword = decodeURIComponent(super.password)
|
||||
}
|
||||
|
||||
get username(): string {
|
||||
return this._decodedUsername
|
||||
}
|
||||
|
||||
get password(): string {
|
||||
return this._decodedPassword
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue