1
0
Fork 0

Merge branch 'main' into joshmgross/extend-node-test-coverage

pull/1843/head
Josh Gross 2024-11-07 16:03:03 -05:00 committed by GitHub
commit 51465dd530
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
23 changed files with 1005 additions and 791 deletions

View File

@ -11,7 +11,7 @@ on:
jobs: jobs:
test: test:
runs-on: macos-latest runs-on: macos-latest-large
steps: steps:
- name: setup repo - name: setup repo
@ -48,7 +48,7 @@ jobs:
path: packages/${{ github.event.inputs.package }}/*.tgz path: packages/${{ github.event.inputs.package }}/*.tgz
publish: publish:
runs-on: macos-latest runs-on: macos-latest-large
needs: test needs: test
environment: npm-publish environment: npm-publish
permissions: permissions:

View File

@ -16,7 +16,7 @@ jobs:
strategy: strategy:
matrix: matrix:
runs-on: [ubuntu-latest, macos-latest, windows-latest] runs-on: [ubuntu-latest, macos-latest-large, windows-latest]
# Node 18 is the current default Node version in hosted runners, so users may still use the toolkit with it when running tests (see https://github.com/actions/toolkit/issues/1841) # Node 18 is the current default Node version in hosted runners, so users may still use the toolkit with it when running tests (see https://github.com/actions/toolkit/issues/1841)
# Node 20 is the currently support Node version for actions - https://docs.github.com/actions/sharing-automations/creating-actions/metadata-syntax-for-github-actions#runsusing-for-javascript-actions # Node 20 is the currently support Node version for actions - https://docs.github.com/actions/sharing-automations/creating-actions/metadata-syntax-for-github-actions#runsusing-for-javascript-actions

View File

@ -1,5 +1,10 @@
# @actions/artifact Releases # @actions/artifact Releases
### 2.1.11
- Fixed a bug with relative symlinks resolution [#1844](https://github.com/actions/toolkit/pull/1844)
- Use native `crypto` [#1815](https://github.com/actions/toolkit/pull/1815)
### 2.1.10 ### 2.1.10
- Fixed a regression with symlinks not being automatically resolved [#1830](https://github.com/actions/toolkit/pull/1830) - Fixed a regression with symlinks not being automatically resolved [#1830](https://github.com/actions/toolkit/pull/1830)

View File

@ -10,6 +10,7 @@ import {FilesNotFoundError} from '../src/internal/shared/errors'
import {BlockBlobUploadStreamOptions} from '@azure/storage-blob' import {BlockBlobUploadStreamOptions} from '@azure/storage-blob'
import * as fs from 'fs' import * as fs from 'fs'
import * as path from 'path' import * as path from 'path'
import unzip from 'unzip-stream'
const uploadStreamMock = jest.fn() const uploadStreamMock = jest.fn()
const blockBlobClientMock = jest.fn().mockImplementation(() => ({ const blockBlobClientMock = jest.fn().mockImplementation(() => ({
@ -31,9 +32,20 @@ const fixtures = {
{name: 'file2.txt', content: 'test 2 file content'}, {name: 'file2.txt', content: 'test 2 file content'},
{name: 'file3.txt', content: 'test 3 file content'}, {name: 'file3.txt', content: 'test 3 file content'},
{ {
name: 'from_symlink.txt', name: 'real.txt',
content: 'from a symlink'
},
{
name: 'relative.txt',
content: 'from a symlink', content: 'from a symlink',
symlink: '../symlinked.txt' symlink: 'real.txt',
relative: true
},
{
name: 'absolute.txt',
content: 'from a symlink',
symlink: 'real.txt',
relative: false
} }
], ],
backendIDs: { backendIDs: {
@ -55,14 +67,17 @@ const fixtures = {
describe('upload-artifact', () => { describe('upload-artifact', () => {
beforeAll(() => { beforeAll(() => {
if (!fs.existsSync(fixtures.uploadDirectory)) { fs.mkdirSync(fixtures.uploadDirectory, {
fs.mkdirSync(fixtures.uploadDirectory, {recursive: true}) recursive: true
} })
for (const file of fixtures.files) { for (const file of fixtures.files) {
if (file.symlink) { if (file.symlink) {
const symlinkPath = path.join(fixtures.uploadDirectory, file.symlink) let symlinkPath = file.symlink
fs.writeFileSync(symlinkPath, file.content) if (!file.relative) {
symlinkPath = path.join(fixtures.uploadDirectory, file.symlink)
}
if (!fs.existsSync(path.join(fixtures.uploadDirectory, file.name))) { if (!fs.existsSync(path.join(fixtures.uploadDirectory, file.name))) {
fs.symlinkSync( fs.symlinkSync(
symlinkPath, symlinkPath,
@ -227,6 +242,12 @@ describe('upload-artifact', () => {
}) })
) )
let loadedBytes = 0
const uploadedZip = path.join(
fixtures.uploadDirectory,
'..',
'uploaded.zip'
)
uploadStreamMock.mockImplementation( uploadStreamMock.mockImplementation(
async ( async (
stream: NodeJS.ReadableStream, stream: NodeJS.ReadableStream,
@ -234,19 +255,28 @@ describe('upload-artifact', () => {
maxConcurrency?: number, maxConcurrency?: number,
options?: BlockBlobUploadStreamOptions options?: BlockBlobUploadStreamOptions
) => { ) => {
const {onProgress, abortSignal} = options || {} const {onProgress} = options || {}
if (fs.existsSync(uploadedZip)) {
fs.unlinkSync(uploadedZip)
}
const uploadedZipStream = fs.createWriteStream(uploadedZip)
onProgress?.({loadedBytes: 0}) onProgress?.({loadedBytes: 0})
return new Promise((resolve, reject) => {
return new Promise(resolve => { stream.on('data', chunk => {
const timerId = setTimeout(() => { loadedBytes += chunk.length
onProgress?.({loadedBytes: 256}) uploadedZipStream.write(chunk)
resolve({}) onProgress?.({loadedBytes})
}, 1_000) })
abortSignal?.addEventListener('abort', () => { stream.on('end', () => {
clearTimeout(timerId) onProgress?.({loadedBytes})
uploadedZipStream.end()
resolve({}) resolve({})
}) })
stream.on('error', err => {
reject(err)
})
}) })
} }
) )
@ -260,7 +290,34 @@ describe('upload-artifact', () => {
) )
expect(id).toBe(1) expect(id).toBe(1)
expect(size).toBe(256) expect(size).toBe(loadedBytes)
const extractedDirectory = path.join(
fixtures.uploadDirectory,
'..',
'extracted'
)
if (fs.existsSync(extractedDirectory)) {
fs.rmdirSync(extractedDirectory, {recursive: true})
}
const extract = new Promise((resolve, reject) => {
fs.createReadStream(uploadedZip)
.pipe(unzip.Extract({path: extractedDirectory}))
.on('close', () => {
resolve(true)
})
.on('error', err => {
reject(err)
})
})
await expect(extract).resolves.toBe(true)
for (const file of fixtures.files) {
const filePath = path.join(extractedDirectory, file.name)
expect(fs.existsSync(filePath)).toBe(true)
expect(fs.readFileSync(filePath, 'utf8')).toBe(file.content)
}
}) })
it('should throw an error uploading blob chunks get delayed', async () => { it('should throw an error uploading blob chunks get delayed', async () => {

View File

@ -1,12 +1,12 @@
{ {
"name": "@actions/artifact", "name": "@actions/artifact",
"version": "2.1.10", "version": "2.1.11",
"lockfileVersion": 3, "lockfileVersion": 3,
"requires": true, "requires": true,
"packages": { "packages": {
"": { "": {
"name": "@actions/artifact", "name": "@actions/artifact",
"version": "2.1.10", "version": "2.1.11",
"license": "MIT", "license": "MIT",
"dependencies": { "dependencies": {
"@actions/core": "^1.10.0", "@actions/core": "^1.10.0",
@ -19,7 +19,6 @@
"@octokit/request-error": "^5.0.0", "@octokit/request-error": "^5.0.0",
"@protobuf-ts/plugin": "^2.2.3-alpha.1", "@protobuf-ts/plugin": "^2.2.3-alpha.1",
"archiver": "^7.0.1", "archiver": "^7.0.1",
"crypto": "^1.0.1",
"jwt-decode": "^3.1.2", "jwt-decode": "^3.1.2",
"twirp-ts": "^2.5.0", "twirp-ts": "^2.5.0",
"unzip-stream": "^0.3.1" "unzip-stream": "^0.3.1"
@ -852,12 +851,6 @@
"node": ">= 8" "node": ">= 8"
} }
}, },
"node_modules/crypto": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/crypto/-/crypto-1.0.1.tgz",
"integrity": "sha512-VxBKmeNcqQdiUQUW2Tzq0t377b54N2bMtXO/qiLa+6eRRmmC4qT3D4OnTGoT/U6O9aklQ/jTwbOtRMTTY8G0Ig==",
"deprecated": "This package is no longer supported. It's now a built-in Node module. If you've depended on crypto, you should switch to the one that's built-in."
},
"node_modules/delayed-stream": { "node_modules/delayed-stream": {
"version": "1.0.0", "version": "1.0.0",
"resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz",

View File

@ -1,6 +1,6 @@
{ {
"name": "@actions/artifact", "name": "@actions/artifact",
"version": "2.1.10", "version": "2.1.11",
"preview": true, "preview": true,
"description": "Actions artifact lib", "description": "Actions artifact lib",
"keywords": [ "keywords": [
@ -50,7 +50,6 @@
"@octokit/request-error": "^5.0.0", "@octokit/request-error": "^5.0.0",
"@protobuf-ts/plugin": "^2.2.3-alpha.1", "@protobuf-ts/plugin": "^2.2.3-alpha.1",
"archiver": "^7.0.1", "archiver": "^7.0.1",
"crypto": "^1.0.1",
"jwt-decode": "^3.1.2", "jwt-decode": "^3.1.2",
"twirp-ts": "^2.5.0", "twirp-ts": "^2.5.0",
"unzip-stream": "^0.3.1" "unzip-stream": "^0.3.1"

View File

@ -1,5 +1,5 @@
import * as stream from 'stream' import * as stream from 'stream'
import {readlink} from 'fs/promises' import {realpath} from 'fs/promises'
import * as archiver from 'archiver' import * as archiver from 'archiver'
import * as core from '@actions/core' import * as core from '@actions/core'
import {UploadZipSpecification} from './upload-zip-specification' import {UploadZipSpecification} from './upload-zip-specification'
@ -46,7 +46,7 @@ export async function createZipUploadStream(
// Check if symlink and resolve the source path // Check if symlink and resolve the source path
let sourcePath = file.sourcePath let sourcePath = file.sourcePath
if (file.stats.isSymbolicLink()) { if (file.stats.isSymbolicLink()) {
sourcePath = await readlink(file.sourcePath) sourcePath = await realpath(file.sourcePath)
} }
// Add the file to the zip // Add the file to the zip

View File

@ -32,8 +32,7 @@ async function run() {
const ghToken = core.getInput('gh-token'); const ghToken = core.getInput('gh-token');
const attestation = await attest({ const attestation = await attest({
subjectName: 'my-artifact-name', subjects: [{name: 'my-artifact-name', digest: { 'sha256': '36ab4667...'}}],
subjectDigest: { 'sha256': '36ab4667...'},
predicateType: 'https://in-toto.io/attestation/release', predicateType: 'https://in-toto.io/attestation/release',
predicate: { . . . }, predicate: { . . . },
token: ghToken token: ghToken
@ -49,11 +48,12 @@ The `attest` function supports the following options:
```typescript ```typescript
export type AttestOptions = { export type AttestOptions = {
// The name of the subject to be attested. // Deprecated. Use 'subjects' instead.
subjectName: string subjectName?: string
// The digest of the subject to be attested. Should be a map of digest // Deprecated. Use 'subjects' instead.
// algorithms to their hex-encoded values. subjectDigest?: Record<string, string>
subjectDigest: Record<string, string> // Collection of subjects to be attested
subjects?: Subject[]
// URI identifying the content type of the predicate being attested. // URI identifying the content type of the predicate being attested.
predicateType: string predicateType: string
// Predicate to be attested. // Predicate to be attested.
@ -68,6 +68,13 @@ export type AttestOptions = {
// Whether to skip writing the attestation to the GH attestations API. // Whether to skip writing the attestation to the GH attestations API.
skipWrite?: boolean skipWrite?: boolean
} }
export type Subject = {
// Name of the subject.
name: string
// Digests of the subject. Should be a map of digest algorithms to their hex-encoded values.
digest: Record<string, string>
}
``` ```
### `attestProvenance` ### `attestProvenance`
@ -105,12 +112,13 @@ The `attestProvenance` function supports the following options:
```typescript ```typescript
export type AttestProvenanceOptions = { export type AttestProvenanceOptions = {
// The name of the subject to be attested. // Deprecated. Use 'subjects' instead.
subjectName: string subjectName?: string
// The digest of the subject to be attested. Should be a map of digest // Deprecated. Use 'subjects' instead.
// algorithms to their hex-encoded values. subjectDigest?: Record<string, string>
subjectDigest: Record<string, string> // Collection of subjects to be attested
// GitHub token for writing attestations. subjects?: Subject[]
// URI identifying the content type of the predicate being attested.
token: string token: string
// Sigstore instance to use for signing. Must be one of "public-good" or // Sigstore instance to use for signing. Must be one of "public-good" or
// "github". // "github".

View File

@ -1,8 +1,17 @@
# @actions/attest Releases # @actions/attest Releases
### 1.5.0
- Bump @actions/core from 1.10.1 to 1.11.1 [#1847](https://github.com/actions/toolkit/pull/1847)
- Bump @sigstore/bundle from 2.3.2 to 3.0.0 [#1846](https://github.com/actions/toolkit/pull/1846)
- Bump @sigstore/sign from 2.3.2 to 3.0.0 [#1846](https://github.com/actions/toolkit/pull/1846)
- Support for generating multi-subject attestations [#1864](https://github.com/actions/toolkit/pull/1865)
- Fix bug in `buildSLSAProvenancePredicate` related to `workflow_ref` OIDC token claims containing the "@" symbol in the tag name [#1863](https://github.com/actions/toolkit/pull/1863)
### 1.4.2 ### 1.4.2
- Fix bug in `buildSLSAProvenancePredicate`/`attestProvenance` when generating provenance statement for enterprise account using customized OIDC issuer value [#1823](https://github.com/actions/toolkit/pull/1823) - Fix bug in `buildSLSAProvenancePredicate`/`attestProvenance` when generating provenance statement for enterprise account using customized OIDC issuer value [#1823](https://github.com/actions/toolkit/pull/1823)
### 1.4.1 ### 1.4.1
- Bump @actions/http-client from 2.2.1 to 2.2.3 [#1805](https://github.com/actions/toolkit/pull/1805) - Bump @actions/http-client from 2.2.1 to 2.2.3 [#1805](https://github.com/actions/toolkit/pull/1805)

View File

@ -1,5 +1,47 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP // Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`provenance functions buildSLSAProvenancePredicate handle tags including "@" character 1`] = `
{
"params": {
"buildDefinition": {
"buildType": "https://actions.github.io/buildtypes/workflow/v1",
"externalParameters": {
"workflow": {
"path": ".github/workflows/main.yml",
"ref": "foo@1.0.0",
"repository": "https://foo.ghe.com/owner/repo",
},
},
"internalParameters": {
"github": {
"event_name": "push",
"repository_id": "repo-id",
"repository_owner_id": "owner-id",
"runner_environment": "github-hosted",
},
},
"resolvedDependencies": [
{
"digest": {
"gitCommit": "babca52ab0c93ae16539e5923cb0d7403b9a093b",
},
"uri": "git+https://foo.ghe.com/owner/repo@refs/heads/main",
},
],
},
"runDetails": {
"builder": {
"id": "https://foo.ghe.com/owner/workflows/.github/workflows/publish.yml@main",
},
"metadata": {
"invocationId": "https://foo.ghe.com/owner/repo/actions/runs/run-id/attempts/run-attempt",
},
},
},
"type": "https://slsa.dev/provenance/v1",
}
`;
exports[`provenance functions buildSLSAProvenancePredicate returns a provenance hydrated from an OIDC token 1`] = ` exports[`provenance functions buildSLSAProvenancePredicate returns a provenance hydrated from an OIDC token 1`] = `
{ {
"params": { "params": {

View File

@ -0,0 +1,16 @@
import {attest} from '../src/attest'
describe('attest', () => {
describe('when no subject information is provided', () => {
it('throws an error', async () => {
const options = {
predicateType: 'foo',
predicate: {bar: 'baz'},
token: 'token'
}
expect(attest(options)).rejects.toThrowError(
'Must provide either subjectName and subjectDigest or subjects'
)
})
})
})

View File

@ -17,7 +17,7 @@ describe('buildIntotoStatement', () => {
} }
it('returns an intoto statement', () => { it('returns an intoto statement', () => {
const statement = buildIntotoStatement(subject, predicate) const statement = buildIntotoStatement([subject], predicate)
expect(statement).toMatchSnapshot() expect(statement).toMatchSnapshot()
}) })
}) })

View File

@ -33,15 +33,7 @@ describe('provenance functions', () => {
runner_environment: 'github-hosted' runner_environment: 'github-hosted'
} }
beforeEach(async () => { const mockIssuer = async (claims: jose.JWTPayload): Promise<void> => {
process.env = {
...originalEnv,
ACTIONS_ID_TOKEN_REQUEST_URL: `${issuer}${tokenPath}?`,
ACTIONS_ID_TOKEN_REQUEST_TOKEN: 'token',
GITHUB_SERVER_URL: 'https://foo.ghe.com',
GITHUB_REPOSITORY: claims.repository
}
// Generate JWT signing key // Generate JWT signing key
const key = await jose.generateKeyPair('PS256') const key = await jose.generateKeyPair('PS256')
@ -60,6 +52,18 @@ describe('provenance functions', () => {
// Mock OIDC token endpoint for populating the provenance // Mock OIDC token endpoint for populating the provenance
nock(issuer).get(tokenPath).query({audience}).reply(200, {value: jwt}) nock(issuer).get(tokenPath).query({audience}).reply(200, {value: jwt})
}
beforeEach(async () => {
process.env = {
...originalEnv,
ACTIONS_ID_TOKEN_REQUEST_URL: `${issuer}${tokenPath}?`,
ACTIONS_ID_TOKEN_REQUEST_TOKEN: 'token',
GITHUB_SERVER_URL: 'https://foo.ghe.com',
GITHUB_REPOSITORY: claims.repository
}
await mockIssuer(claims)
}) })
afterEach(() => { afterEach(() => {
@ -71,6 +75,16 @@ describe('provenance functions', () => {
const predicate = await buildSLSAProvenancePredicate() const predicate = await buildSLSAProvenancePredicate()
expect(predicate).toMatchSnapshot() expect(predicate).toMatchSnapshot()
}) })
it('handle tags including "@" character', async () => {
nock.cleanAll()
await mockIssuer({
...claims,
workflow_ref: 'owner/repo/.github/workflows/main.yml@foo@1.0.0'
})
const predicate = await buildSLSAProvenancePredicate()
expect(predicate).toMatchSnapshot()
})
}) })
describe('attestProvenance', () => { describe('attestProvenance', () => {
@ -115,8 +129,7 @@ describe('provenance functions', () => {
describe('when the sigstore instance is explicitly set', () => { describe('when the sigstore instance is explicitly set', () => {
it('attests provenance', async () => { it('attests provenance', async () => {
const attestation = await attestProvenance({ const attestation = await attestProvenance({
subjectName, subjects: [{name: subjectName, digest: subjectDigest}],
subjectDigest,
token: 'token', token: 'token',
sigstore: 'github' sigstore: 'github'
}) })
@ -143,8 +156,7 @@ describe('provenance functions', () => {
it('attests provenance', async () => { it('attests provenance', async () => {
const attestation = await attestProvenance({ const attestation = await attestProvenance({
subjectName, subjects: [{name: subjectName, digest: subjectDigest}],
subjectDigest,
token: 'token' token: 'token'
}) })
@ -178,8 +190,7 @@ describe('provenance functions', () => {
describe('when the sigstore instance is explicitly set', () => { describe('when the sigstore instance is explicitly set', () => {
it('attests provenance', async () => { it('attests provenance', async () => {
const attestation = await attestProvenance({ const attestation = await attestProvenance({
subjectName, subjects: [{name: subjectName, digest: subjectDigest}],
subjectDigest,
token: 'token', token: 'token',
sigstore: 'public-good' sigstore: 'public-good'
}) })
@ -206,8 +217,7 @@ describe('provenance functions', () => {
it('attests provenance', async () => { it('attests provenance', async () => {
const attestation = await attestProvenance({ const attestation = await attestProvenance({
subjectName, subjects: [{name: subjectName, digest: subjectDigest}],
subjectDigest,
token: 'token' token: 'token'
}) })

File diff suppressed because it is too large Load Diff

View File

@ -1,6 +1,6 @@
{ {
"name": "@actions/attest", "name": "@actions/attest",
"version": "1.4.2", "version": "1.5.0",
"description": "Actions attestation lib", "description": "Actions attestation lib",
"keywords": [ "keywords": [
"github", "github",
@ -35,19 +35,19 @@
"url": "https://github.com/actions/toolkit/issues" "url": "https://github.com/actions/toolkit/issues"
}, },
"devDependencies": { "devDependencies": {
"@sigstore/mock": "^0.7.4", "@sigstore/mock": "^0.8.0",
"@sigstore/rekor-types": "^2.0.0", "@sigstore/rekor-types": "^3.0.0",
"@types/jsonwebtoken": "^9.0.6", "@types/jsonwebtoken": "^9.0.6",
"nock": "^13.5.1", "nock": "^13.5.1",
"undici": "^5.28.4" "undici": "^5.28.4"
}, },
"dependencies": { "dependencies": {
"@actions/core": "^1.10.1", "@actions/core": "^1.11.1",
"@actions/github": "^6.0.0", "@actions/github": "^6.0.0",
"@actions/http-client": "^2.2.3", "@actions/http-client": "^2.2.3",
"@octokit/plugin-retry": "^6.0.1", "@octokit/plugin-retry": "^6.0.1",
"@sigstore/bundle": "^2.3.2", "@sigstore/bundle": "^3.0.0",
"@sigstore/sign": "^2.3.2", "@sigstore/sign": "^3.0.0",
"jose": "^5.2.3" "jose": "^5.2.3"
}, },
"overrides": { "overrides": {

View File

@ -14,11 +14,16 @@ const INTOTO_PAYLOAD_TYPE = 'application/vnd.in-toto+json'
* Options for attesting a subject / predicate. * Options for attesting a subject / predicate.
*/ */
export type AttestOptions = { export type AttestOptions = {
// The name of the subject to be attested. /**
subjectName: string * @deprecated Use `subjects` instead.
// The digest of the subject to be attested. Should be a map of digest **/
// algorithms to their hex-encoded values. subjectName?: string
subjectDigest: Record<string, string> /**
* @deprecated Use `subjects` instead.
**/
subjectDigest?: Record<string, string>
// Subjects to be attested.
subjects?: Subject[]
// Content type of the predicate being attested. // Content type of the predicate being attested.
predicateType: string predicateType: string
// Predicate to be attested. // Predicate to be attested.
@ -42,15 +47,24 @@ export type AttestOptions = {
* @returns A promise that resolves to the attestation. * @returns A promise that resolves to the attestation.
*/ */
export async function attest(options: AttestOptions): Promise<Attestation> { export async function attest(options: AttestOptions): Promise<Attestation> {
const subject: Subject = { let subjects: Subject[]
name: options.subjectName,
digest: options.subjectDigest if (options.subjects) {
subjects = options.subjects
} else if (options.subjectName && options.subjectDigest) {
subjects = [{name: options.subjectName, digest: options.subjectDigest}]
} else {
throw new Error(
'Must provide either subjectName and subjectDigest or subjects'
)
} }
const predicate: Predicate = { const predicate: Predicate = {
type: options.predicateType, type: options.predicateType,
params: options.predicate params: options.predicate
} }
const statement = buildIntotoStatement(subject, predicate)
const statement = buildIntotoStatement(subjects, predicate)
// Sign the provenance statement // Sign the provenance statement
const payload: Payload = { const payload: Payload = {

View File

@ -20,12 +20,12 @@ export type InTotoStatement = {
* @returns The constructed in-toto statement. * @returns The constructed in-toto statement.
*/ */
export const buildIntotoStatement = ( export const buildIntotoStatement = (
subject: Subject, subjects: Subject[],
predicate: Predicate predicate: Predicate
): InTotoStatement => { ): InTotoStatement => {
return { return {
_type: INTOTO_STATEMENT_V1_TYPE, _type: INTOTO_STATEMENT_V1_TYPE,
subject: [subject], subject: subjects,
predicateType: predicate.type, predicateType: predicate.type,
predicate: predicate.params predicate: predicate.params
} }

View File

@ -30,9 +30,11 @@ export const buildSLSAProvenancePredicate = async (
// Split just the path and ref from the workflow string. // Split just the path and ref from the workflow string.
// owner/repo/.github/workflows/main.yml@main => // owner/repo/.github/workflows/main.yml@main =>
// .github/workflows/main.yml, main // .github/workflows/main.yml, main
const [workflowPath, workflowRef] = claims.workflow_ref const [workflowPath, ...workflowRefChunks] = claims.workflow_ref
.replace(`${claims.repository}/`, '') .replace(`${claims.repository}/`, '')
.split('@') .split('@')
// Handle case where tag contains `@` (e.g: when using changesets in a monorepo context),
const workflowRef = workflowRefChunks.join('@')
return { return {
type: SLSA_PREDICATE_V1_TYPE, type: SLSA_PREDICATE_V1_TYPE,

View File

@ -86,7 +86,6 @@ const initBundleBuilder = (opts: SignOptions): BundleBuilder => {
witnesses.push( witnesses.push(
new RekorWitness({ new RekorWitness({
rekorBaseURL: opts.rekorURL, rekorBaseURL: opts.rekorURL,
entryType: 'dsse',
fetchOnConflict: true, fetchOnConflict: true,
timeout, timeout,
retry retry
@ -106,5 +105,5 @@ const initBundleBuilder = (opts: SignOptions): BundleBuilder => {
// Build the bundle with the singleCertificate option which will // Build the bundle with the singleCertificate option which will
// trigger the creation of v0.3 DSSE bundles // trigger the creation of v0.3 DSSE bundles
return new DSSEBundleBuilder({signer, witnesses, singleCertificate: true}) return new DSSEBundleBuilder({signer, witnesses})
} }

View File

@ -1,5 +1,9 @@
# @actions/cache Releases # @actions/cache Releases
### 3.3.0
- Update `@actions/core` to `1.11.1`
- Remove dependency on `uuid` package [#1824](https://github.com/actions/toolkit/pull/1824), [#1842](https://github.com/actions/toolkit/pull/1842)
### 3.2.4 ### 3.2.4
- Updated `isGhes` check to include `.ghe.com` and `.ghe.localhost` as accepted hosts - Updated `isGhes` check to include `.ghe.com` and `.ghe.localhost` as accepted hosts

41
packages/cache/package-lock.json generated vendored
View File

@ -1,15 +1,15 @@
{ {
"name": "@actions/cache", "name": "@actions/cache",
"version": "3.2.4", "version": "3.3.0",
"lockfileVersion": 2, "lockfileVersion": 2,
"requires": true, "requires": true,
"packages": { "packages": {
"": { "": {
"name": "@actions/cache", "name": "@actions/cache",
"version": "3.2.4", "version": "3.3.0",
"license": "MIT", "license": "MIT",
"dependencies": { "dependencies": {
"@actions/core": "^1.10.0", "@actions/core": "^1.11.1",
"@actions/exec": "^1.0.1", "@actions/exec": "^1.0.1",
"@actions/glob": "^0.1.0", "@actions/glob": "^0.1.0",
"@actions/http-client": "^2.1.1", "@actions/http-client": "^2.1.1",
@ -25,20 +25,12 @@
} }
}, },
"node_modules/@actions/core": { "node_modules/@actions/core": {
"version": "1.10.0", "version": "1.11.1",
"resolved": "https://registry.npmjs.org/@actions/core/-/core-1.10.0.tgz", "resolved": "https://registry.npmjs.org/@actions/core/-/core-1.11.1.tgz",
"integrity": "sha512-2aZDDa3zrrZbP5ZYg159sNoLRb61nQ7awl5pSvIq5Qpj81vwDzdMRKzkWJGJuwVvWpvZKx7vspJALyvaaIQyug==", "integrity": "sha512-hXJCSrkwfA46Vd9Z3q4cpEpHB1rL5NG04+/rbqW9d3+CSvtB1tYe8UTpAlixa1vj0m/ULglfEK2UKxMGxCxv5A==",
"dependencies": { "dependencies": {
"@actions/http-client": "^2.0.1", "@actions/exec": "^1.1.1",
"uuid": "^8.3.2" "@actions/http-client": "^2.0.1"
}
},
"node_modules/@actions/core/node_modules/uuid": {
"version": "8.3.2",
"resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz",
"integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==",
"bin": {
"uuid": "dist/bin/uuid"
} }
}, },
"node_modules/@actions/exec": { "node_modules/@actions/exec": {
@ -515,19 +507,12 @@
}, },
"dependencies": { "dependencies": {
"@actions/core": { "@actions/core": {
"version": "1.10.0", "version": "1.11.1",
"resolved": "https://registry.npmjs.org/@actions/core/-/core-1.10.0.tgz", "resolved": "https://registry.npmjs.org/@actions/core/-/core-1.11.1.tgz",
"integrity": "sha512-2aZDDa3zrrZbP5ZYg159sNoLRb61nQ7awl5pSvIq5Qpj81vwDzdMRKzkWJGJuwVvWpvZKx7vspJALyvaaIQyug==", "integrity": "sha512-hXJCSrkwfA46Vd9Z3q4cpEpHB1rL5NG04+/rbqW9d3+CSvtB1tYe8UTpAlixa1vj0m/ULglfEK2UKxMGxCxv5A==",
"requires": { "requires": {
"@actions/http-client": "^2.0.1", "@actions/exec": "^1.1.1",
"uuid": "^8.3.2" "@actions/http-client": "^2.0.1"
},
"dependencies": {
"uuid": {
"version": "8.3.2",
"resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz",
"integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg=="
}
} }
}, },
"@actions/exec": { "@actions/exec": {

View File

@ -1,6 +1,6 @@
{ {
"name": "@actions/cache", "name": "@actions/cache",
"version": "3.2.4", "version": "3.3.0",
"preview": true, "preview": true,
"description": "Actions cache lib", "description": "Actions cache lib",
"keywords": [ "keywords": [
@ -37,7 +37,7 @@
"url": "https://github.com/actions/toolkit/issues" "url": "https://github.com/actions/toolkit/issues"
}, },
"dependencies": { "dependencies": {
"@actions/core": "^1.10.0", "@actions/core": "^1.11.1",
"@actions/exec": "^1.0.1", "@actions/exec": "^1.0.1",
"@actions/glob": "^0.1.0", "@actions/glob": "^0.1.0",
"@actions/http-client": "^2.1.1", "@actions/http-client": "^2.1.1",

View File

@ -1,5 +1,8 @@
# @actions/tool-cache Releases # @actions/tool-cache Releases
### Unreleased
- Remove dependency on `uuid` package [#1824](https://github.com/actions/toolkit/pull/1824), [#1842](https://github.com/actions/toolkit/pull/1842)
### 2.0.1 ### 2.0.1
- Update to v2.0.1 of `@actions/http-client` [#1087](https://github.com/actions/toolkit/pull/1087) - Update to v2.0.1 of `@actions/http-client` [#1087](https://github.com/actions/toolkit/pull/1087)