1
0
Fork 0

Merge branch 'main' into neo-cache-service

pull/1857/head
Bassem Dghaidi 2024-09-24 02:36:02 -07:00 committed by GitHub
commit 70e5684b1f
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
39 changed files with 811 additions and 1218 deletions

View File

@ -32,7 +32,7 @@ jobs:
run: npm run bootstrap
- name: audit tools (without allow-list)
run: npm audit --audit-level=moderate
run: npm audit --audit-level=moderate --omit dev
- name: audit packages
run: npm run audit-all

View File

@ -1,5 +1,7 @@
name: Publish NPM
run-name: Publish NPM - ${{ github.event.inputs.package }}
on:
workflow_dispatch:
inputs:

29
package-lock.json generated
View File

@ -6569,12 +6569,13 @@
}
},
"node_modules/axios": {
"version": "1.6.2",
"resolved": "https://registry.npmjs.org/axios/-/axios-1.6.2.tgz",
"integrity": "sha512-7i24Ri4pmDRfJTR7LDBhsOTtcm+9kjX5WiY1X3wIisx6G9So3pfMkEiU7emUBe46oceVImccTEM3k6C5dbVW8A==",
"version": "1.7.4",
"resolved": "https://registry.npmjs.org/axios/-/axios-1.7.4.tgz",
"integrity": "sha512-DukmaFRnY6AzAALSH4J2M3k6PkaC+MfaAGdEERRWcC9q3/TWQwLpHR8ZRLKTdQ3aBDL64EdluRDjJqKw+BPZEw==",
"dev": true,
"license": "MIT",
"dependencies": {
"follow-redirects": "^1.15.0",
"follow-redirects": "^1.15.6",
"form-data": "^4.0.0",
"proxy-from-env": "^1.1.0"
}
@ -6829,12 +6830,12 @@
}
},
"node_modules/braces": {
"version": "3.0.2",
"resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz",
"integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==",
"version": "3.0.3",
"resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz",
"integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==",
"dev": true,
"dependencies": {
"fill-range": "^7.0.1"
"fill-range": "^7.1.1"
},
"engines": {
"node": ">=8"
@ -8242,9 +8243,9 @@
}
},
"node_modules/ejs": {
"version": "3.1.9",
"resolved": "https://registry.npmjs.org/ejs/-/ejs-3.1.9.tgz",
"integrity": "sha512-rC+QVNMJWv+MtPgkt0y+0rVEIdbtxVADApW9JXrUVlzHetgcyczP/E7DJmWJ4fJCZF2cPcBk0laWO9ZHMG3DmQ==",
"version": "3.1.10",
"resolved": "https://registry.npmjs.org/ejs/-/ejs-3.1.10.tgz",
"integrity": "sha512-UeJmFfOrAQS8OJWPZ4qtgHyWExa088/MtK5UEyoJGFH67cDEXkZSviOiKRCZ4Xij0zxI3JECgYs3oKx+AizQBA==",
"dev": true,
"dependencies": {
"jake": "^10.8.5"
@ -9316,9 +9317,9 @@
}
},
"node_modules/fill-range": {
"version": "7.0.1",
"resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz",
"integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==",
"version": "7.1.1",
"resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz",
"integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==",
"dev": true,
"dependencies": {
"to-regex-range": "^5.0.1"

View File

@ -1,5 +1,14 @@
# @actions/artifact Releases
### 2.1.9
- Fixed artifact upload chunk timeout logic [#1774](https://github.com/actions/toolkit/pull/1774)
- Use lazy stream to prevent issues with open file limits [#1771](https://github.com/actions/toolkit/pull/1771)
### 2.1.8
- Allows `*.localhost` domains for hostname checks for local development.
### 2.1.7
- Update unzip-stream dependency and reverted to using `unzip.Extract()`

View File

@ -20,6 +20,11 @@ describe('isGhes', () => {
expect(config.isGhes()).toBe(false)
})
it('should return false when the request domain ends with .localhost', () => {
process.env.GITHUB_SERVER_URL = 'https://github.localhost'
expect(config.isGhes()).toBe(false)
})
it('should return false when the request domain is specific to an enterprise', () => {
process.env.GITHUB_SERVER_URL = 'https://my-enterprise.github.com'
expect(config.isGhes()).toBe(true)

View File

@ -1,260 +1,137 @@
import * as uploadZipSpecification from '../src/internal/upload/upload-zip-specification'
import * as zip from '../src/internal/upload/zip'
import * as util from '../src/internal/shared/util'
import * as retention from '../src/internal/upload/retention'
import * as config from '../src/internal/shared/config'
import {Timestamp, ArtifactServiceClientJSON} from '../src/generated'
import {ArtifactServiceClientJSON} from '../src/generated'
import * as blobUpload from '../src/internal/upload/blob-upload'
import {uploadArtifact} from '../src/internal/upload/upload-artifact'
import {noopLogs} from './common'
import {FilesNotFoundError} from '../src/internal/shared/errors'
import {BlockBlobClient} from '@azure/storage-blob'
import {BlockBlobUploadStreamOptions} from '@azure/storage-blob'
import * as fs from 'fs'
import * as path from 'path'
const uploadStreamMock = jest.fn()
const blockBlobClientMock = jest.fn().mockImplementation(() => ({
uploadStream: uploadStreamMock
}))
jest.mock('@azure/storage-blob', () => ({
BlobClient: jest.fn().mockImplementation(() => {
return {
getBlockBlobClient: blockBlobClientMock
}
})
}))
const fixtures = {
uploadDirectory: path.join(__dirname, '_temp', 'plz-upload'),
files: [
['file1.txt', 'test 1 file content'],
['file2.txt', 'test 2 file content'],
['file3.txt', 'test 3 file content']
],
backendIDs: {
workflowRunBackendId: '67dbcc20-e851-4452-a7c3-2cc0d2e0ec67',
workflowJobRunBackendId: '5f49179d-3386-4c38-85f7-00f8138facd0'
},
runtimeToken: 'test-token',
resultsServiceURL: 'http://results.local',
inputs: {
artifactName: 'test-artifact',
files: [
'/home/user/files/plz-upload/file1.txt',
'/home/user/files/plz-upload/file2.txt',
'/home/user/files/plz-upload/dir/file3.txt'
],
rootDirectory: '/home/user/files/plz-upload'
}
}
describe('upload-artifact', () => {
beforeAll(() => {
if (!fs.existsSync(fixtures.uploadDirectory)) {
fs.mkdirSync(fixtures.uploadDirectory, {recursive: true})
}
for (const [file, content] of fixtures.files) {
fs.writeFileSync(path.join(fixtures.uploadDirectory, file), content)
}
})
beforeEach(() => {
noopLogs()
jest
.spyOn(uploadZipSpecification, 'validateRootDirectory')
.mockReturnValue()
jest
.spyOn(util, 'getBackendIdsFromToken')
.mockReturnValue(fixtures.backendIDs)
jest
.spyOn(uploadZipSpecification, 'getUploadZipSpecification')
.mockReturnValue(
fixtures.files.map(file => ({
sourcePath: path.join(fixtures.uploadDirectory, file[0]),
destinationPath: file[0]
}))
)
jest.spyOn(config, 'getRuntimeToken').mockReturnValue(fixtures.runtimeToken)
jest
.spyOn(config, 'getResultsServiceUrl')
.mockReturnValue(fixtures.resultsServiceURL)
})
afterEach(() => {
jest.restoreAllMocks()
})
it('should successfully upload an artifact', () => {
const mockDate = new Date('2020-01-01')
jest
.spyOn(uploadZipSpecification, 'validateRootDirectory')
.mockReturnValue()
jest
.spyOn(uploadZipSpecification, 'getUploadZipSpecification')
.mockReturnValue([
{
sourcePath: '/home/user/files/plz-upload/file1.txt',
destinationPath: 'file1.txt'
},
{
sourcePath: '/home/user/files/plz-upload/file2.txt',
destinationPath: 'file2.txt'
},
{
sourcePath: '/home/user/files/plz-upload/dir/file3.txt',
destinationPath: 'dir/file3.txt'
}
])
jest
.spyOn(zip, 'createZipUploadStream')
.mockReturnValue(Promise.resolve(new zip.ZipUploadStream(1)))
jest.spyOn(util, 'getBackendIdsFromToken').mockReturnValue({
workflowRunBackendId: '1234',
workflowJobRunBackendId: '5678'
})
jest
.spyOn(retention, 'getExpiration')
.mockReturnValue(Timestamp.fromDate(mockDate))
jest
.spyOn(ArtifactServiceClientJSON.prototype, 'CreateArtifact')
.mockReturnValue(
Promise.resolve({
ok: true,
signedUploadUrl: 'https://signed-upload-url.com'
})
)
jest.spyOn(blobUpload, 'uploadZipToBlobStorage').mockReturnValue(
Promise.resolve({
uploadSize: 1234,
sha256Hash: 'test-sha256-hash'
})
)
jest
.spyOn(ArtifactServiceClientJSON.prototype, 'FinalizeArtifact')
.mockReturnValue(Promise.resolve({ok: true, artifactId: '1'}))
// ArtifactHttpClient mocks
jest.spyOn(config, 'getRuntimeToken').mockReturnValue('test-token')
jest
.spyOn(config, 'getResultsServiceUrl')
.mockReturnValue('https://test-url.com')
const uploadResp = uploadArtifact(
'test-artifact',
[
'/home/user/files/plz-upload/file1.txt',
'/home/user/files/plz-upload/file2.txt',
'/home/user/files/plz-upload/dir/file3.txt'
],
'/home/user/files/plz-upload'
)
expect(uploadResp).resolves.toEqual({size: 1234, id: 1})
})
it('should throw an error if the root directory is invalid', () => {
jest
.spyOn(uploadZipSpecification, 'validateRootDirectory')
.mockImplementation(() => {
throw new Error('Invalid root directory')
})
const uploadResp = uploadArtifact(
'test-artifact',
[
'/home/user/files/plz-upload/file1.txt',
'/home/user/files/plz-upload/file2.txt',
'/home/user/files/plz-upload/dir/file3.txt'
],
'/home/user/files/plz-upload'
)
expect(uploadResp).rejects.toThrow('Invalid root directory')
})
it('should reject if there are no files to upload', () => {
jest
.spyOn(uploadZipSpecification, 'validateRootDirectory')
.mockReturnValue()
it('should reject if there are no files to upload', async () => {
jest
.spyOn(uploadZipSpecification, 'getUploadZipSpecification')
.mockClear()
.mockReturnValue([])
const uploadResp = uploadArtifact(
'test-artifact',
[
'/home/user/files/plz-upload/file1.txt',
'/home/user/files/plz-upload/file2.txt',
'/home/user/files/plz-upload/dir/file3.txt'
],
'/home/user/files/plz-upload'
fixtures.inputs.artifactName,
fixtures.inputs.files,
fixtures.inputs.rootDirectory
)
expect(uploadResp).rejects.toThrowError(FilesNotFoundError)
await expect(uploadResp).rejects.toThrowError(FilesNotFoundError)
})
it('should reject if no backend IDs are found', () => {
jest
.spyOn(uploadZipSpecification, 'validateRootDirectory')
.mockReturnValue()
jest
.spyOn(uploadZipSpecification, 'getUploadZipSpecification')
.mockReturnValue([
{
sourcePath: '/home/user/files/plz-upload/file1.txt',
destinationPath: 'file1.txt'
},
{
sourcePath: '/home/user/files/plz-upload/file2.txt',
destinationPath: 'file2.txt'
},
{
sourcePath: '/home/user/files/plz-upload/dir/file3.txt',
destinationPath: 'dir/file3.txt'
}
])
jest
.spyOn(zip, 'createZipUploadStream')
.mockReturnValue(Promise.resolve(new zip.ZipUploadStream(1)))
it('should reject if no backend IDs are found', async () => {
jest.spyOn(util, 'getBackendIdsFromToken').mockRestore()
const uploadResp = uploadArtifact(
'test-artifact',
[
'/home/user/files/plz-upload/file1.txt',
'/home/user/files/plz-upload/file2.txt',
'/home/user/files/plz-upload/dir/file3.txt'
],
'/home/user/files/plz-upload'
fixtures.inputs.artifactName,
fixtures.inputs.files,
fixtures.inputs.rootDirectory
)
expect(uploadResp).rejects.toThrow()
await expect(uploadResp).rejects.toThrow()
})
it('should return false if the creation request fails', () => {
const mockDate = new Date('2020-01-01')
jest
.spyOn(uploadZipSpecification, 'validateRootDirectory')
.mockReturnValue()
jest
.spyOn(uploadZipSpecification, 'getUploadZipSpecification')
.mockReturnValue([
{
sourcePath: '/home/user/files/plz-upload/file1.txt',
destinationPath: 'file1.txt'
},
{
sourcePath: '/home/user/files/plz-upload/file2.txt',
destinationPath: 'file2.txt'
},
{
sourcePath: '/home/user/files/plz-upload/dir/file3.txt',
destinationPath: 'dir/file3.txt'
}
])
it('should return false if the creation request fails', async () => {
jest
.spyOn(zip, 'createZipUploadStream')
.mockReturnValue(Promise.resolve(new zip.ZipUploadStream(1)))
jest.spyOn(util, 'getBackendIdsFromToken').mockReturnValue({
workflowRunBackendId: '1234',
workflowJobRunBackendId: '5678'
})
jest
.spyOn(retention, 'getExpiration')
.mockReturnValue(Timestamp.fromDate(mockDate))
jest
.spyOn(ArtifactServiceClientJSON.prototype, 'CreateArtifact')
.mockReturnValue(Promise.resolve({ok: false, signedUploadUrl: ''}))
// ArtifactHttpClient mocks
jest.spyOn(config, 'getRuntimeToken').mockReturnValue('test-token')
jest
.spyOn(config, 'getResultsServiceUrl')
.mockReturnValue('https://test-url.com')
const uploadResp = uploadArtifact(
'test-artifact',
[
'/home/user/files/plz-upload/file1.txt',
'/home/user/files/plz-upload/file2.txt',
'/home/user/files/plz-upload/dir/file3.txt'
],
'/home/user/files/plz-upload'
fixtures.inputs.artifactName,
fixtures.inputs.files,
fixtures.inputs.rootDirectory
)
expect(uploadResp).rejects.toThrow()
await expect(uploadResp).rejects.toThrow()
})
it('should return false if blob storage upload is unsuccessful', () => {
const mockDate = new Date('2020-01-01')
jest
.spyOn(uploadZipSpecification, 'validateRootDirectory')
.mockReturnValue()
jest
.spyOn(uploadZipSpecification, 'getUploadZipSpecification')
.mockReturnValue([
{
sourcePath: '/home/user/files/plz-upload/file1.txt',
destinationPath: 'file1.txt'
},
{
sourcePath: '/home/user/files/plz-upload/file2.txt',
destinationPath: 'file2.txt'
},
{
sourcePath: '/home/user/files/plz-upload/dir/file3.txt',
destinationPath: 'dir/file3.txt'
}
])
it('should return false if blob storage upload is unsuccessful', async () => {
jest
.spyOn(zip, 'createZipUploadStream')
.mockReturnValue(Promise.resolve(new zip.ZipUploadStream(1)))
jest.spyOn(util, 'getBackendIdsFromToken').mockReturnValue({
workflowRunBackendId: '1234',
workflowJobRunBackendId: '5678'
})
jest
.spyOn(retention, 'getExpiration')
.mockReturnValue(Timestamp.fromDate(mockDate))
jest
.spyOn(ArtifactServiceClientJSON.prototype, 'CreateArtifact')
.mockReturnValue(
@ -267,57 +144,19 @@ describe('upload-artifact', () => {
.spyOn(blobUpload, 'uploadZipToBlobStorage')
.mockReturnValue(Promise.reject(new Error('boom')))
// ArtifactHttpClient mocks
jest.spyOn(config, 'getRuntimeToken').mockReturnValue('test-token')
jest
.spyOn(config, 'getResultsServiceUrl')
.mockReturnValue('https://test-url.com')
const uploadResp = uploadArtifact(
'test-artifact',
[
'/home/user/files/plz-upload/file1.txt',
'/home/user/files/plz-upload/file2.txt',
'/home/user/files/plz-upload/dir/file3.txt'
],
'/home/user/files/plz-upload'
fixtures.inputs.artifactName,
fixtures.inputs.files,
fixtures.inputs.rootDirectory
)
expect(uploadResp).rejects.toThrow()
await expect(uploadResp).rejects.toThrow()
})
it('should reject if finalize artifact fails', () => {
const mockDate = new Date('2020-01-01')
jest
.spyOn(uploadZipSpecification, 'validateRootDirectory')
.mockReturnValue()
jest
.spyOn(uploadZipSpecification, 'getUploadZipSpecification')
.mockReturnValue([
{
sourcePath: '/home/user/files/plz-upload/file1.txt',
destinationPath: 'file1.txt'
},
{
sourcePath: '/home/user/files/plz-upload/file2.txt',
destinationPath: 'file2.txt'
},
{
sourcePath: '/home/user/files/plz-upload/dir/file3.txt',
destinationPath: 'dir/file3.txt'
}
])
it('should reject if finalize artifact fails', async () => {
jest
.spyOn(zip, 'createZipUploadStream')
.mockReturnValue(Promise.resolve(new zip.ZipUploadStream(1)))
jest.spyOn(util, 'getBackendIdsFromToken').mockReturnValue({
workflowRunBackendId: '1234',
workflowJobRunBackendId: '5678'
})
jest
.spyOn(retention, 'getExpiration')
.mockReturnValue(Timestamp.fromDate(mockDate))
jest
.spyOn(ArtifactServiceClientJSON.prototype, 'CreateArtifact')
.mockReturnValue(
@ -336,112 +175,113 @@ describe('upload-artifact', () => {
.spyOn(ArtifactServiceClientJSON.prototype, 'FinalizeArtifact')
.mockReturnValue(Promise.resolve({ok: false, artifactId: ''}))
// ArtifactHttpClient mocks
jest.spyOn(config, 'getRuntimeToken').mockReturnValue('test-token')
jest
.spyOn(config, 'getResultsServiceUrl')
.mockReturnValue('https://test-url.com')
const uploadResp = uploadArtifact(
'test-artifact',
[
'/home/user/files/plz-upload/file1.txt',
'/home/user/files/plz-upload/file2.txt',
'/home/user/files/plz-upload/dir/file3.txt'
],
'/home/user/files/plz-upload'
fixtures.inputs.artifactName,
fixtures.inputs.files,
fixtures.inputs.rootDirectory
)
expect(uploadResp).rejects.toThrow()
await expect(uploadResp).rejects.toThrow()
})
it('should throw an error uploading blob chunks get delayed', async () => {
const mockDate = new Date('2020-01-01')
const dirPath = path.join(__dirname, `plz-upload`)
if (!fs.existsSync(dirPath)) {
fs.mkdirSync(dirPath, {recursive: true})
}
fs.writeFileSync(path.join(dirPath, 'file1.txt'), 'test file content')
fs.writeFileSync(path.join(dirPath, 'file2.txt'), 'test file content')
fs.writeFileSync(path.join(dirPath, 'file3.txt'), 'test file content')
jest
.spyOn(uploadZipSpecification, 'validateRootDirectory')
.mockReturnValue()
jest
.spyOn(uploadZipSpecification, 'getUploadZipSpecification')
.mockReturnValue([
{
sourcePath: path.join(dirPath, 'file1.txt'),
destinationPath: 'file1.txt'
},
{
sourcePath: path.join(dirPath, 'file2.txt'),
destinationPath: 'file2.txt'
},
{
sourcePath: path.join(dirPath, 'file3.txt'),
destinationPath: 'dir/file3.txt'
}
])
jest.spyOn(util, 'getBackendIdsFromToken').mockReturnValue({
workflowRunBackendId: '1234',
workflowJobRunBackendId: '5678'
})
jest
.spyOn(retention, 'getExpiration')
.mockReturnValue(Timestamp.fromDate(mockDate))
it('should successfully upload an artifact', async () => {
jest
.spyOn(ArtifactServiceClientJSON.prototype, 'CreateArtifact')
.mockReturnValue(
Promise.resolve({
ok: true,
signedUploadUrl: 'https://signed-upload-url.com'
signedUploadUrl: 'https://signed-upload-url.local'
})
)
jest
.spyOn(blobUpload, 'uploadZipToBlobStorage')
.mockReturnValue(Promise.reject(new Error('Upload progress stalled.')))
// ArtifactHttpClient mocks
jest.spyOn(config, 'getRuntimeToken').mockReturnValue('test-token')
jest
.spyOn(config, 'getResultsServiceUrl')
.mockReturnValue('https://test-url.com')
BlockBlobClient.prototype.uploadStream = jest
.fn()
.mockImplementation(
async (stream, bufferSize, maxConcurrency, options) => {
return new Promise<void>(resolve => {
// Call the onProgress callback with a progress event
options.onProgress({loadedBytes: 0})
// Wait for 31 seconds before resolving the promise
setTimeout(() => {
// Call the onProgress callback again to simulate progress
options.onProgress({loadedBytes: 100})
resolve()
}, 31000) // Delay longer than your timeout
})
}
.spyOn(ArtifactServiceClientJSON.prototype, 'FinalizeArtifact')
.mockReturnValue(
Promise.resolve({
ok: true,
artifactId: '1'
})
)
jest.mock('fs')
const uploadResp = uploadArtifact(
'test-artifact',
[
'/home/user/files/plz-upload/file1.txt',
'/home/user/files/plz-upload/file2.txt',
'/home/user/files/plz-upload/dir/file3.txt'
],
'/home/user/files/plz-upload'
uploadStreamMock.mockImplementation(
async (
stream: NodeJS.ReadableStream,
bufferSize?: number,
maxConcurrency?: number,
options?: BlockBlobUploadStreamOptions
) => {
const {onProgress, abortSignal} = options || {}
onProgress?.({loadedBytes: 0})
return new Promise(resolve => {
const timerId = setTimeout(() => {
onProgress?.({loadedBytes: 256})
resolve({})
}, 1_000)
abortSignal?.addEventListener('abort', () => {
clearTimeout(timerId)
resolve({})
})
})
}
)
expect(uploadResp).rejects.toThrow('Upload progress stalled.')
const {id, size} = await uploadArtifact(
fixtures.inputs.artifactName,
fixtures.inputs.files,
fixtures.inputs.rootDirectory
)
expect(id).toBe(1)
expect(size).toBe(256)
})
it('should throw an error uploading blob chunks get delayed', async () => {
jest
.spyOn(ArtifactServiceClientJSON.prototype, 'CreateArtifact')
.mockReturnValue(
Promise.resolve({
ok: true,
signedUploadUrl: 'https://signed-upload-url.local'
})
)
jest
.spyOn(ArtifactServiceClientJSON.prototype, 'FinalizeArtifact')
.mockReturnValue(
Promise.resolve({
ok: true,
artifactId: '1'
})
)
jest
.spyOn(config, 'getResultsServiceUrl')
.mockReturnValue('https://results.local')
jest.spyOn(config, 'getUploadChunkTimeout').mockReturnValue(2_000)
uploadStreamMock.mockImplementation(
async (
stream: NodeJS.ReadableStream,
bufferSize?: number,
maxConcurrency?: number,
options?: BlockBlobUploadStreamOptions
) => {
const {onProgress, abortSignal} = options || {}
onProgress?.({loadedBytes: 0})
return new Promise(resolve => {
abortSignal?.addEventListener('abort', () => {
resolve({})
})
})
}
)
const uploadResp = uploadArtifact(
fixtures.inputs.artifactName,
fixtures.inputs.files,
fixtures.inputs.rootDirectory
)
await expect(uploadResp).rejects.toThrow('Upload progress stalled.')
})
})

View File

@ -1,12 +1,12 @@
{
"name": "@actions/artifact",
"version": "2.1.7",
"version": "2.1.9",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "@actions/artifact",
"version": "2.1.7",
"version": "2.1.9",
"license": "MIT",
"dependencies": {
"@actions/core": "^1.10.0",

View File

@ -1,6 +1,6 @@
{
"name": "@actions/artifact",
"version": "2.1.7",
"version": "2.1.9",
"preview": true,
"description": "Actions artifact lib",
"keywords": [

View File

@ -30,10 +30,10 @@ export function isGhes(): boolean {
const hostname = ghUrl.hostname.trimEnd().toUpperCase()
const isGitHubHost = hostname === 'GITHUB.COM'
const isGheHost =
hostname.endsWith('.GHE.COM') || hostname.endsWith('.GHE.LOCALHOST')
const isGheHost = hostname.endsWith('.GHE.COM')
const isLocalHost = hostname.endsWith('.LOCALHOST')
return !isGitHubHost && !isGheHost
return !isGitHubHost && !isGheHost && !isLocalHost
}
export function getGitHubWorkspaceDir(): string {
@ -57,3 +57,7 @@ export function getConcurrency(): number {
const concurrency = 16 * numCPUs
return concurrency > 300 ? 300 : concurrency
}
export function getUploadChunkTimeout(): number {
return 300_000 // 5 minutes
}

View File

@ -1,7 +1,11 @@
import {BlobClient, BlockBlobUploadStreamOptions} from '@azure/storage-blob'
import {TransferProgressEvent} from '@azure/core-http'
import {ZipUploadStream} from './zip'
import {getUploadChunkSize, getConcurrency} from '../shared/config'
import {
getUploadChunkSize,
getConcurrency,
getUploadChunkTimeout
} from '../shared/config'
import * as core from '@actions/core'
import * as crypto from 'crypto'
import * as stream from 'stream'
@ -25,29 +29,26 @@ export async function uploadZipToBlobStorage(
): Promise<BlobUploadResponse> {
let uploadByteCount = 0
let lastProgressTime = Date.now()
let timeoutId: NodeJS.Timeout | undefined
const abortController = new AbortController()
const chunkTimer = (timeout: number): NodeJS.Timeout => {
// clear the previous timeout
if (timeoutId) {
clearTimeout(timeoutId)
}
const chunkTimer = async (interval: number): Promise<void> =>
new Promise((resolve, reject) => {
const timer = setInterval(() => {
if (Date.now() - lastProgressTime > interval) {
reject(new Error('Upload progress stalled.'))
}
}, interval)
abortController.signal.addEventListener('abort', () => {
clearInterval(timer)
resolve()
})
})
timeoutId = setTimeout(() => {
const now = Date.now()
// if there's been more than 30 seconds since the
// last progress event, then we'll consider the upload stalled
if (now - lastProgressTime > timeout) {
throw new Error('Upload progress stalled.')
}
}, timeout)
return timeoutId
}
const maxConcurrency = getConcurrency()
const bufferSize = getUploadChunkSize()
const blobClient = new BlobClient(authenticatedUploadURL)
const blockBlobClient = blobClient.getBlockBlobClient()
const timeoutDuration = 300000 // 30 seconds
core.debug(
`Uploading artifact zip to blob storage with maxConcurrency: ${maxConcurrency}, bufferSize: ${bufferSize}`
@ -56,13 +57,13 @@ export async function uploadZipToBlobStorage(
const uploadCallback = (progress: TransferProgressEvent): void => {
core.info(`Uploaded bytes ${progress.loadedBytes}`)
uploadByteCount = progress.loadedBytes
chunkTimer(timeoutDuration)
lastProgressTime = Date.now()
}
const options: BlockBlobUploadStreamOptions = {
blobHTTPHeaders: {blobContentType: 'zip'},
onProgress: uploadCallback
onProgress: uploadCallback,
abortSignal: abortController.signal
}
let sha256Hash: string | undefined = undefined
@ -75,24 +76,22 @@ export async function uploadZipToBlobStorage(
core.info('Beginning upload of artifact content to blob storage')
try {
// Start the chunk timer
timeoutId = chunkTimer(timeoutDuration)
await blockBlobClient.uploadStream(
uploadStream,
bufferSize,
maxConcurrency,
options
)
await Promise.race([
blockBlobClient.uploadStream(
uploadStream,
bufferSize,
maxConcurrency,
options
),
chunkTimer(getUploadChunkTimeout())
])
} catch (error) {
if (NetworkError.isNetworkErrorCode(error?.code)) {
throw new NetworkError(error?.code)
}
throw error
} finally {
// clear the timeout whether or not the upload completes
if (timeoutId) {
clearTimeout(timeoutId)
}
abortController.abort()
}
core.info('Finished uploading artifact content to blob storage!')

View File

@ -1,7 +1,6 @@
import * as stream from 'stream'
import * as archiver from 'archiver'
import * as core from '@actions/core'
import {createReadStream} from 'fs'
import {UploadZipSpecification} from './upload-zip-specification'
import {getUploadChunkSize} from '../shared/config'
@ -44,7 +43,7 @@ export async function createZipUploadStream(
for (const file of uploadSpecification) {
if (file.sourcePath !== null) {
// Add a normal file to the zip
zip.append(createReadStream(file.sourcePath), {
zip.file(file.sourcePath, {
name: file.destinationPath
})
} else {

View File

@ -63,6 +63,8 @@ export type AttestOptions = {
// Sigstore instance to use for signing. Must be one of "public-good" or
// "github".
sigstore?: 'public-good' | 'github'
// HTTP headers to include in request to attestations API.
headers?: {[header: string]: string | number | undefined}
// Whether to skip writing the attestation to the GH attestations API.
skipWrite?: boolean
}
@ -113,6 +115,8 @@ export type AttestProvenanceOptions = {
// Sigstore instance to use for signing. Must be one of "public-good" or
// "github".
sigstore?: 'public-good' | 'github'
// HTTP headers to include in request to attestations API.
headers?: {[header: string]: string | number | undefined}
// Whether to skip writing the attestation to the GH attestations API.
skipWrite?: boolean
// Issuer URL responsible for minting the OIDC token from which the

View File

@ -1,19 +1,42 @@
# @actions/attest Releases
### 1.4.2
- Fix bug in `buildSLSAProvenancePredicate`/`attestProvenance` when generating provenance statement for enterprise account using customized OIDC issuer value [#1823](https://github.com/actions/toolkit/pull/1823)
### 1.4.1
- Bump @actions/http-client from 2.2.1 to 2.2.3 [#1805](https://github.com/actions/toolkit/pull/1805)
### 1.4.0
- Add new `headers` parameter to the `attest` and `attestProvenance` functions [#1790](https://github.com/actions/toolkit/pull/1790)
- Update `buildSLSAProvenancePredicate`/`attestProvenance` to automatically derive default OIDC issuer URL from current execution context [#1796](https://github.com/actions/toolkit/pull/1796)
### 1.3.1
- Fix bug with proxy support when retrieving JWKS for OIDC issuer [#1776](https://github.com/actions/toolkit/pull/1776)
### 1.3.0
- Dynamic construction of Sigstore API URLs [#1735](https://github.com/actions/toolkit/pull/1735)
- Switch to new GH provenance build type [#1745](https://github.com/actions/toolkit/pull/1745)
- Fetch existing Rekor entry on 409 conflict error [#1759](https://github.com/actions/toolkit/pull/1759)
- Bump @sigstore/bundle from 2.3.0 to 2.3.2 [#1738](https://github.com/actions/toolkit/pull/1738)
- Bump @sigstore/sign from 2.3.0 to 2.3.2 [#1738](https://github.com/actions/toolkit/pull/1738)
### 1.2.1
- Retry request on attestation persistence failure
- Retry request on attestation persistence failure [#1725](https://github.com/actions/toolkit/pull/1725)
### 1.2.0
- Generate attestations using the v0.3 Sigstore bundle format.
- Bump @sigstore/bundle from 2.2.0 to 2.3.0.
- Bump @sigstore/sign from 2.2.3 to 2.3.0.
- Remove dependency on make-fetch-happen
- Generate attestations using the v0.3 Sigstore bundle format [#1701](https://github.com/actions/toolkit/pull/1701)
- Bump @sigstore/bundle from 2.2.0 to 2.3.0 [#1701](https://github.com/actions/toolkit/pull/1701)
- Bump @sigstore/sign from 2.2.3 to 2.3.0 [#1701](https://github.com/actions/toolkit/pull/1701)
- Remove dependency on make-fetch-happen [#1714](https://github.com/actions/toolkit/pull/1714)
### 1.1.0
- Updates the `attestProvenance` function to retrieve a token from the GitHub OIDC provider and use the token claims to populate the provenance statement.
- Updates the `attestProvenance` function to retrieve a token from the GitHub OIDC provider and use the token claims to populate the provenance statement [#1693](https://github.com/actions/toolkit/pull/1693)
### 1.0.0

View File

@ -4,12 +4,12 @@ exports[`provenance functions buildSLSAProvenancePredicate returns a provenance
{
"params": {
"buildDefinition": {
"buildType": "https://slsa-framework.github.io/github-actions-buildtypes/workflow/v1",
"buildType": "https://actions.github.io/buildtypes/workflow/v1",
"externalParameters": {
"workflow": {
"path": ".github/workflows/main.yml",
"ref": "main",
"repository": "https://github.com/owner/repo",
"repository": "https://foo.ghe.com/owner/repo",
},
},
"internalParameters": {
@ -17,6 +17,7 @@ exports[`provenance functions buildSLSAProvenancePredicate returns a provenance
"event_name": "push",
"repository_id": "repo-id",
"repository_owner_id": "owner-id",
"runner_environment": "github-hosted",
},
},
"resolvedDependencies": [
@ -24,16 +25,16 @@ exports[`provenance functions buildSLSAProvenancePredicate returns a provenance
"digest": {
"gitCommit": "babca52ab0c93ae16539e5923cb0d7403b9a093b",
},
"uri": "git+https://github.com/owner/repo@refs/heads/main",
"uri": "git+https://foo.ghe.com/owner/repo@refs/heads/main",
},
],
},
"runDetails": {
"builder": {
"id": "https://github.com/actions/runner/github-hosted",
"id": "https://foo.ghe.com/owner/workflows/.github/workflows/publish.yml@main",
},
"metadata": {
"invocationId": "https://github.com/owner/repo/actions/runs/run-id/attempts/run-attempt",
"invocationId": "https://foo.ghe.com/owner/repo/actions/runs/run-id/attempts/run-attempt",
},
},
},

View File

@ -45,7 +45,8 @@ describe('getIDTokenClaims', () => {
sha: 'sha',
repository: 'repo',
event_name: 'push',
workflow_ref: 'main',
job_workflow_ref: 'job_workflow_ref',
workflow_ref: 'workflow',
repository_id: '1',
repository_owner_id: '1',
runner_environment: 'github-hosted',
@ -67,6 +68,55 @@ describe('getIDTokenClaims', () => {
})
})
describe('when ID token is valid (w/ enterprise slug)', () => {
const claims = {
iss: `${issuer}/foo-bar`,
aud: audience,
ref: 'ref',
sha: 'sha',
repository: 'repo',
event_name: 'push',
job_workflow_ref: 'job_workflow_ref',
workflow_ref: 'workflow',
repository_id: '1',
repository_owner_id: '1',
runner_environment: 'github-hosted',
run_id: '1',
run_attempt: '1'
}
beforeEach(async () => {
const jwt = await new jose.SignJWT(claims)
.setProtectedHeader({alg: 'PS256'})
.sign(key.privateKey)
nock(issuer).get(tokenPath).query({audience}).reply(200, {value: jwt})
})
it('returns the ID token claims', async () => {
const result = await getIDTokenClaims(issuer)
expect(result).toEqual(claims)
})
})
describe('when ID token is missing the "iss" claim', () => {
const claims = {
aud: audience
}
beforeEach(async () => {
const jwt = await new jose.SignJWT(claims)
.setProtectedHeader({alg: 'PS256'})
.sign(key.privateKey)
nock(issuer).get(tokenPath).query({audience}).reply(200, {value: jwt})
})
it('throws an error', async () => {
await expect(getIDTokenClaims(issuer)).rejects.toThrow(/missing "iss"/i)
})
})
describe('when ID token is missing required claims', () => {
const claims = {
iss: issuer,
@ -98,7 +148,9 @@ describe('getIDTokenClaims', () => {
})
it('throws an error', async () => {
await expect(getIDTokenClaims(issuer)).rejects.toThrow(/issuer invalid/)
await expect(getIDTokenClaims(issuer)).rejects.toThrow(
/unexpected "iss"/i
)
})
})
@ -114,7 +166,7 @@ describe('getIDTokenClaims', () => {
})
it('throw an error', async () => {
await expect(getIDTokenClaims(issuer)).rejects.toThrow(/audience invalid/)
await expect(getIDTokenClaims(issuer)).rejects.toThrow(/unexpected "aud"/)
})
})

View File

@ -8,7 +8,7 @@ import {attestProvenance, buildSLSAProvenancePredicate} from '../src/provenance'
describe('provenance functions', () => {
const originalEnv = process.env
const issuer = 'https://example.com'
const issuer = 'https://token.actions.foo.ghe.com'
const audience = 'nobody'
const jwksPath = '/.well-known/jwks.json'
const tokenPath = '/token'
@ -23,6 +23,7 @@ describe('provenance functions', () => {
repository: 'owner/repo',
ref: 'refs/heads/main',
sha: 'babca52ab0c93ae16539e5923cb0d7403b9a093b',
job_workflow_ref: 'owner/workflows/.github/workflows/publish.yml@main',
workflow_ref: 'owner/repo/.github/workflows/main.yml@main',
event_name: 'push',
repository_id: 'repo-id',
@ -37,7 +38,7 @@ describe('provenance functions', () => {
...originalEnv,
ACTIONS_ID_TOKEN_REQUEST_URL: `${issuer}${tokenPath}?`,
ACTIONS_ID_TOKEN_REQUEST_TOKEN: 'token',
GITHUB_SERVER_URL: 'https://github.com',
GITHUB_SERVER_URL: 'https://foo.ghe.com',
GITHUB_REPOSITORY: claims.repository
}
@ -67,7 +68,7 @@ describe('provenance functions', () => {
describe('buildSLSAProvenancePredicate', () => {
it('returns a provenance hydrated from an OIDC token', async () => {
const predicate = await buildSLSAProvenancePredicate(issuer)
const predicate = await buildSLSAProvenancePredicate()
expect(predicate).toMatchSnapshot()
})
})
@ -95,9 +96,9 @@ describe('provenance functions', () => {
})
describe('when using the github Sigstore instance', () => {
const {fulcioURL, tsaServerURL} = signingEndpoints('github')
beforeEach(async () => {
const {fulcioURL, tsaServerURL} = signingEndpoints('github')
// Mock Sigstore
await mockFulcio({baseURL: fulcioURL, strict: false})
await mockTSA({baseURL: tsaServerURL})
@ -117,8 +118,7 @@ describe('provenance functions', () => {
subjectName,
subjectDigest,
token: 'token',
sigstore: 'github',
issuer
sigstore: 'github'
})
expect(attestation).toBeDefined()
@ -145,8 +145,7 @@ describe('provenance functions', () => {
const attestation = await attestProvenance({
subjectName,
subjectDigest,
token: 'token',
issuer
token: 'token'
})
expect(attestation).toBeDefined()
@ -182,8 +181,7 @@ describe('provenance functions', () => {
subjectName,
subjectDigest,
token: 'token',
sigstore: 'public-good',
issuer
sigstore: 'public-good'
})
expect(attestation).toBeDefined()
@ -210,8 +208,7 @@ describe('provenance functions', () => {
const attestation = await attestProvenance({
subjectName,
subjectDigest,
token: 'token',
issuer
token: 'token'
})
expect(attestation).toBeDefined()
@ -237,8 +234,7 @@ describe('provenance functions', () => {
subjectDigest,
token: 'token',
sigstore: 'public-good',
skipWrite: true,
issuer
skipWrite: true
})
expect(attestation).toBeDefined()

View File

@ -5,6 +5,7 @@ describe('writeAttestation', () => {
const originalEnv = process.env
const attestation = {foo: 'bar '}
const token = 'token'
const headers = {'X-GitHub-Foo': 'true'}
const mockAgent = new MockAgent()
setGlobalDispatcher(mockAgent)
@ -27,14 +28,16 @@ describe('writeAttestation', () => {
.intercept({
path: '/repos/foo/bar/attestations',
method: 'POST',
headers: {authorization: `token ${token}`},
headers: {authorization: `token ${token}`, ...headers},
body: JSON.stringify({bundle: attestation})
})
.reply(201, {id: '123'})
})
it('persists the attestation', async () => {
await expect(writeAttestation(attestation, token)).resolves.toEqual('123')
await expect(
writeAttestation(attestation, token, {headers})
).resolves.toEqual('123')
})
})

File diff suppressed because it is too large Load Diff

View File

@ -1,6 +1,6 @@
{
"name": "@actions/attest",
"version": "1.2.1",
"version": "1.4.2",
"description": "Actions attestation lib",
"keywords": [
"github",
@ -35,22 +35,20 @@
"url": "https://github.com/actions/toolkit/issues"
},
"devDependencies": {
"@sigstore/mock": "^0.6.5",
"@sigstore/mock": "^0.7.4",
"@sigstore/rekor-types": "^2.0.0",
"@types/jsonwebtoken": "^9.0.6",
"jose": "^5.2.3",
"nock": "^13.5.1",
"undici": "^5.28.4"
},
"dependencies": {
"@actions/core": "^1.10.1",
"@actions/github": "^6.0.0",
"@actions/http-client": "^2.2.1",
"@actions/http-client": "^2.2.3",
"@octokit/plugin-retry": "^6.0.1",
"@sigstore/bundle": "^2.3.0",
"@sigstore/sign": "^2.3.0",
"jsonwebtoken": "^9.0.2",
"jwks-rsa": "^3.1.0"
"@sigstore/bundle": "^2.3.2",
"@sigstore/sign": "^2.3.2",
"jose": "^5.2.3"
},
"overrides": {
"@octokit/plugin-retry": {

View File

@ -28,6 +28,8 @@ export type AttestOptions = {
// Sigstore instance to use for signing. Must be one of "public-good" or
// "github".
sigstore?: SigstoreInstance
// HTTP headers to include in request to attestations API.
headers?: {[header: string]: string | number | undefined}
// Whether to skip writing the attestation to the GH attestations API.
skipWrite?: boolean
}
@ -61,7 +63,11 @@ export async function attest(options: AttestOptions): Promise<Attestation> {
// Store the attestation
let attestationID: string | undefined
if (options.skipWrite !== true) {
attestationID = await writeAttestation(bundleToJSON(bundle), options.token)
attestationID = await writeAttestation(
bundleToJSON(bundle),
options.token,
{headers: options.headers}
)
}
return toAttestation(bundle, attestationID)

View File

@ -1,16 +1,21 @@
import {getIDToken} from '@actions/core'
import {HttpClient} from '@actions/http-client'
import * as jwt from 'jsonwebtoken'
import jwks from 'jwks-rsa'
import * as jose from 'jose'
const OIDC_AUDIENCE = 'nobody'
const VALID_SERVER_URLS = [
'https://github.com',
new RegExp('^https://[a-z0-9-]+\\.ghe\\.com$')
] as const
const REQUIRED_CLAIMS = [
'iss',
'ref',
'sha',
'repository',
'event_name',
'job_workflow_ref',
'workflow_ref',
'repository_id',
'repository_owner_id',
@ -25,7 +30,8 @@ type OIDCConfig = {
jwks_uri: string
}
export const getIDTokenClaims = async (issuer: string): Promise<ClaimSet> => {
export const getIDTokenClaims = async (issuer?: string): Promise<ClaimSet> => {
issuer = issuer || getIssuer()
try {
const token = await getIDToken(OIDC_AUDIENCE)
const claims = await decodeOIDCToken(token, issuer)
@ -39,55 +45,46 @@ export const getIDTokenClaims = async (issuer: string): Promise<ClaimSet> => {
const decodeOIDCToken = async (
token: string,
issuer: string
): Promise<jwt.JwtPayload> => {
): Promise<jose.JWTPayload> => {
// Verify and decode token
return new Promise((resolve, reject) => {
jwt.verify(
token,
getPublicKey(issuer),
{audience: OIDC_AUDIENCE, issuer},
(err, decoded) => {
if (err) {
reject(err)
} else if (!decoded || typeof decoded === 'string') {
reject(new Error('No decoded token'))
} else {
resolve(decoded)
}
}
)
const jwks = jose.createLocalJWKSet(await getJWKS(issuer))
const {payload} = await jose.jwtVerify(token, jwks, {
audience: OIDC_AUDIENCE
})
}
// Returns a callback to locate the public key for the given JWT header. This
// involves two calls:
// 1. Fetch the OpenID configuration to get the JWKS URI.
// 2. Fetch the public key from the JWKS URI.
const getPublicKey =
(issuer: string): jwt.GetPublicKeyOrSecret =>
(header: jwt.JwtHeader, callback: jwt.SigningKeyCallback) => {
// Look up the JWKS URI from the issuer's OpenID configuration
new HttpClient('actions/attest')
.getJson<OIDCConfig>(`${issuer}/.well-known/openid-configuration`)
.then(data => {
if (!data.result) {
callback(new Error('No OpenID configuration found'))
} else {
// Fetch the public key from the JWKS URI
jwks({jwksUri: data.result.jwks_uri}).getSigningKey(
header.kid,
(err, key) => {
callback(err, key?.getPublicKey())
}
)
}
})
.catch(err => {
callback(err)
})
if (!payload.iss) {
throw new Error('Missing "iss" claim')
}
function assertClaimSet(claims: jwt.JwtPayload): asserts claims is ClaimSet {
// Check that the issuer STARTS WITH the expected issuer URL to account for
// the fact that the value may include an enterprise-specific slug
if (!payload.iss.startsWith(issuer)) {
throw new Error(`Unexpected "iss" claim: ${payload.iss}`)
}
return payload
}
const getJWKS = async (issuer: string): Promise<jose.JSONWebKeySet> => {
const client = new HttpClient('@actions/attest')
const config = await client.getJson<OIDCConfig>(
`${issuer}/.well-known/openid-configuration`
)
if (!config.result) {
throw new Error('No OpenID configuration found')
}
const jwks = await client.getJson<jose.JSONWebKeySet>(config.result.jwks_uri)
if (!jwks.result) {
throw new Error('No JWKS found for issuer')
}
return jwks.result
}
function assertClaimSet(claims: jose.JWTPayload): asserts claims is ClaimSet {
const missingClaims: string[] = []
for (const claim of REQUIRED_CLAIMS) {
@ -100,3 +97,21 @@ function assertClaimSet(claims: jwt.JwtPayload): asserts claims is ClaimSet {
throw new Error(`Missing claims: ${missingClaims.join(', ')}`)
}
}
// Derive the current OIDC issuer based on the server URL
function getIssuer(): string {
const serverURL = process.env.GITHUB_SERVER_URL || 'https://github.com'
// Ensure the server URL is a valid GitHub server URL
if (!VALID_SERVER_URLS.some(valid_url => serverURL.match(valid_url))) {
throw new Error(`Invalid server URL: ${serverURL}`)
}
let host = new URL(serverURL).hostname
if (host === 'github.com') {
host = 'githubusercontent.com'
}
return `https://token.actions.${host}`
}

View File

@ -3,12 +3,7 @@ import {getIDTokenClaims} from './oidc'
import type {Attestation, Predicate} from './shared.types'
const SLSA_PREDICATE_V1_TYPE = 'https://slsa.dev/provenance/v1'
const GITHUB_BUILDER_ID_PREFIX = 'https://github.com/actions/runner'
const GITHUB_BUILD_TYPE =
'https://slsa-framework.github.io/github-actions-buildtypes/workflow/v1'
const DEFAULT_ISSUER = 'https://token.actions.githubusercontent.com'
const GITHUB_BUILD_TYPE = 'https://actions.github.io/buildtypes/workflow/v1'
export type AttestProvenanceOptions = Omit<
AttestOptions,
@ -27,7 +22,7 @@ export type AttestProvenanceOptions = Omit<
* @returns The SLSA provenance predicate.
*/
export const buildSLSAProvenancePredicate = async (
issuer: string = DEFAULT_ISSUER
issuer?: string
): Promise<Predicate> => {
const serverURL = process.env.GITHUB_SERVER_URL
const claims = await getIDTokenClaims(issuer)
@ -55,7 +50,8 @@ export const buildSLSAProvenancePredicate = async (
github: {
event_name: claims.event_name,
repository_id: claims.repository_id,
repository_owner_id: claims.repository_owner_id
repository_owner_id: claims.repository_owner_id,
runner_environment: claims.runner_environment
}
},
resolvedDependencies: [
@ -69,7 +65,7 @@ export const buildSLSAProvenancePredicate = async (
},
runDetails: {
builder: {
id: `${GITHUB_BUILDER_ID_PREFIX}/${claims.runner_environment}`
id: `${serverURL}/${claims.job_workflow_ref}`
},
metadata: {
invocationId: `${serverURL}/${claims.repository}/actions/runs/${claims.run_id}/attempts/${claims.run_attempt}`

View File

@ -87,6 +87,7 @@ const initBundleBuilder = (opts: SignOptions): BundleBuilder => {
new RekorWitness({
rekorBaseURL: opts.rekorURL,
entryType: 'dsse',
fetchOnConflict: true,
timeout,
retry
})

View File

@ -1,11 +1,13 @@
import * as github from '@actions/github'
import {retry} from '@octokit/plugin-retry'
import {RequestHeaders} from '@octokit/types'
const CREATE_ATTESTATION_REQUEST = 'POST /repos/{owner}/{repo}/attestations'
const DEFAULT_RETRY_COUNT = 5
export type WriteOptions = {
retry?: number
headers?: RequestHeaders
}
/**
* Writes an attestation to the repository's attestations endpoint.
@ -26,6 +28,7 @@ export const writeAttestation = async (
const response = await octokit.request(CREATE_ATTESTATION_REQUEST, {
owner: github.context.repo.owner,
repo: github.context.repo.repo,
headers: options.headers,
data: {bundle: attestation}
})

View File

@ -1,5 +1,8 @@
# @actions/glob Releases
### 0.5.0
- Added `excludeHiddenFiles` option, which is disabled by default to preserve existing behavior [#1791: Add glob option to ignore hidden files](https://github.com/actions/toolkit/pull/1791)
### 0.4.0
- Pass in the current workspace as a parameter to HashFiles [#1318](https://github.com/actions/toolkit/pull/1318)

View File

@ -708,7 +708,7 @@ describe('globber', () => {
expect(itemPaths).toEqual([])
})
it('returns hidden files', async () => {
it('returns hidden files by default', async () => {
// Create the following layout:
// <root>
// <root>/.emptyFolder
@ -734,6 +734,26 @@ describe('globber', () => {
])
})
it('ignores hidden files when excludeHiddenFiles is set', async () => {
// Create the following layout:
// <root>
// <root>/.emptyFolder
// <root>/.file
// <root>/.folder
// <root>/.folder/file
const root = path.join(getTestTemp(), 'ignores-hidden-files')
await createHiddenDirectory(path.join(root, '.emptyFolder'))
await createHiddenDirectory(path.join(root, '.folder'))
await createHiddenFile(path.join(root, '.file'), 'test .file content')
await fs.writeFile(
path.join(root, '.folder', 'file'),
'test .folder/file content'
)
const itemPaths = await glob(root, {excludeHiddenFiles: true})
expect(itemPaths).toEqual([root])
})
it('returns normalized paths', async () => {
// Create the following layout:
// <root>/hello/world.txt

View File

@ -1,6 +1,6 @@
{
"name": "@actions/glob",
"version": "0.4.0",
"version": "0.5.0",
"lockfileVersion": 3,
"requires": true,
"description": "Actions glob lib",

View File

@ -1,6 +1,6 @@
{
"name": "@actions/glob",
"version": "0.4.0",
"version": "0.5.0",
"preview": true,
"description": "Actions glob lib",
"keywords": [

View File

@ -9,7 +9,8 @@ export function getOptions(copy?: GlobOptions): GlobOptions {
followSymbolicLinks: true,
implicitDescendants: true,
matchDirectories: true,
omitBrokenSymbolicLinks: true
omitBrokenSymbolicLinks: true,
excludeHiddenFiles: false
}
if (copy) {
@ -32,6 +33,11 @@ export function getOptions(copy?: GlobOptions): GlobOptions {
result.omitBrokenSymbolicLinks = copy.omitBrokenSymbolicLinks
core.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`)
}
if (typeof copy.excludeHiddenFiles === 'boolean') {
result.excludeHiddenFiles = copy.excludeHiddenFiles
core.debug(`excludeHiddenFiles '${result.excludeHiddenFiles}'`)
}
}
return result

View File

@ -36,4 +36,13 @@ export interface GlobOptions {
* @default true
*/
omitBrokenSymbolicLinks?: boolean
/**
* Indicates whether to exclude hidden files (files and directories starting with a `.`).
* This does not apply to Windows files and directories with the hidden attribute unless
* they are also prefixed with a `.`.
*
* @default false
*/
excludeHiddenFiles?: boolean
}

View File

@ -128,6 +128,11 @@ export class DefaultGlobber implements Globber {
continue
}
// Hidden file or directory?
if (options.excludeHiddenFiles && path.basename(item.path).match(/^\./)) {
continue
}
// Directory
if (stats.isDirectory()) {
// Matched

View File

@ -1,5 +1,14 @@
## Releases
## 2.2.3
- Fixed an issue where proxy username and password were not handled correctly [#1799](https://github.com/actions/toolkit/pull/1799)
## 2.2.2
- Better handling of url encoded usernames and passwords in proxy config [#1782](https://github.com/actions/toolkit/pull/1782)
## 2.2.1
- Make sure RequestOptions.keepAlive is applied properly on node20 runtime [#1572](https://github.com/actions/toolkit/pull/1572)
## 2.2.0
- Add function to return proxy agent dispatcher for compatibility with latest octokit packages [#1547](https://github.com/actions/toolkit/pull/1547)

View File

@ -37,7 +37,7 @@ describe('basics', () => {
// "user-agent": "typed-test-client-tests"
// },
// "origin": "173.95.152.44",
// "url": "https://postman-echo.com/get"
// "url": "http://postman-echo.com/get"
// }
it('does basic http get request', async () => {
@ -63,16 +63,17 @@ describe('basics', () => {
expect(obj.headers['user-agent']).toBeFalsy()
})
/* TODO write a mock rather then relying on a third party
it('does basic https get request', async () => {
const res: httpm.HttpClientResponse = await _http.get(
'https://postman-echo.com/get'
'http://postman-echo.com/get'
)
expect(res.message.statusCode).toBe(200)
const body: string = await res.readBody()
const obj = JSON.parse(body)
expect(obj.url).toBe('https://postman-echo.com/get')
expect(obj.url).toBe('http://postman-echo.com/get')
})
*/
it('does basic http get request with default headers', async () => {
const http: httpm.HttpClient = new httpm.HttpClient(
'http-client-tests',
@ -125,12 +126,12 @@ describe('basics', () => {
it('pipes a get request', async () => {
return new Promise<void>(async resolve => {
const file = fs.createWriteStream(sampleFilePath)
;(await _http.get('https://postman-echo.com/get')).message
;(await _http.get('http://postman-echo.com/get')).message
.pipe(file)
.on('close', () => {
const body: string = fs.readFileSync(sampleFilePath).toString()
const obj = JSON.parse(body)
expect(obj.url).toBe('https://postman-echo.com/get')
expect(obj.url).toBe('http://postman-echo.com/get')
resolve()
})
})
@ -138,32 +139,32 @@ describe('basics', () => {
it('does basic get request with redirects', async () => {
const res: httpm.HttpClientResponse = await _http.get(
`https://postman-echo.com/redirect-to?url=${encodeURIComponent(
'https://postman-echo.com/get'
`http://postman-echo.com/redirect-to?url=${encodeURIComponent(
'http://postman-echo.com/get'
)}`
)
expect(res.message.statusCode).toBe(200)
const body: string = await res.readBody()
const obj = JSON.parse(body)
expect(obj.url).toBe('https://postman-echo.com/get')
expect(obj.url).toBe('http://postman-echo.com/get')
})
it('does basic get request with redirects (303)', async () => {
const res: httpm.HttpClientResponse = await _http.get(
`https://postman-echo.com/redirect-to?url=${encodeURIComponent(
'https://postman-echo.com/get'
`http://postman-echo.com/redirect-to?url=${encodeURIComponent(
'http://postman-echo.com/get'
)}&status_code=303`
)
expect(res.message.statusCode).toBe(200)
const body: string = await res.readBody()
const obj = JSON.parse(body)
expect(obj.url).toBe('https://postman-echo.com/get')
expect(obj.url).toBe('http://postman-echo.com/get')
})
it('returns 404 for not found get request on redirect', async () => {
const res: httpm.HttpClientResponse = await _http.get(
`https://postman-echo.com/redirect-to?url=${encodeURIComponent(
'https://postman-echo.com/status/404'
`http://postman-echo.com/redirect-to?url=${encodeURIComponent(
'http://postman-echo.com/status/404'
)}&status_code=303`
)
expect(res.message.statusCode).toBe(404)
@ -177,8 +178,8 @@ describe('basics', () => {
{allowRedirects: false}
)
const res: httpm.HttpClientResponse = await http.get(
`https://postman-echo.com/redirect-to?url=${encodeURIComponent(
'https://postman-echo.com/get'
`http://postman-echo.com/redirect-to?url=${encodeURIComponent(
'http://postman-echo.com/get'
)}`
)
expect(res.message.statusCode).toBe(302)
@ -191,8 +192,8 @@ describe('basics', () => {
authorization: 'shhh'
}
const res: httpm.HttpClientResponse = await _http.get(
`https://postman-echo.com/redirect-to?url=${encodeURIComponent(
'https://www.postman-echo.com/get'
`http://postman-echo.com/redirect-to?url=${encodeURIComponent(
'http://www.postman-echo.com/get'
)}`,
headers
)
@ -204,7 +205,7 @@ describe('basics', () => {
expect(obj.headers[httpm.Headers.Accept]).toBe('application/json')
expect(obj.headers['Authorization']).toBeUndefined()
expect(obj.headers['authorization']).toBeUndefined()
expect(obj.url).toBe('https://www.postman-echo.com/get')
expect(obj.url).toBe('http://www.postman-echo.com/get')
})
it('does not pass Auth with diff hostname redirects', async () => {
@ -213,8 +214,8 @@ describe('basics', () => {
Authorization: 'shhh'
}
const res: httpm.HttpClientResponse = await _http.get(
`https://postman-echo.com/redirect-to?url=${encodeURIComponent(
'https://www.postman-echo.com/get'
`http://postman-echo.com/redirect-to?url=${encodeURIComponent(
'http://www.postman-echo.com/get'
)}`,
headers
)
@ -226,7 +227,7 @@ describe('basics', () => {
expect(obj.headers[httpm.Headers.Accept]).toBe('application/json')
expect(obj.headers['Authorization']).toBeUndefined()
expect(obj.headers['authorization']).toBeUndefined()
expect(obj.url).toBe('https://www.postman-echo.com/get')
expect(obj.url).toBe('http://www.postman-echo.com/get')
})
it('does basic head request', async () => {
@ -289,11 +290,11 @@ describe('basics', () => {
it('gets a json object', async () => {
const jsonObj = await _http.getJson<HttpBinData>(
'https://postman-echo.com/get'
'http://postman-echo.com/get'
)
expect(jsonObj.statusCode).toBe(200)
expect(jsonObj.result).toBeDefined()
expect(jsonObj.result?.url).toBe('https://postman-echo.com/get')
expect(jsonObj.result?.url).toBe('http://postman-echo.com/get')
expect(jsonObj.result?.headers[httpm.Headers.Accept]).toBe(
httpm.MediaTypes.ApplicationJson
)
@ -304,7 +305,7 @@ describe('basics', () => {
it('getting a non existent json object returns null', async () => {
const jsonObj = await _http.getJson<HttpBinData>(
'https://postman-echo.com/status/404'
'http://postman-echo.com/status/404'
)
expect(jsonObj.statusCode).toBe(404)
expect(jsonObj.result).toBeNull()
@ -313,12 +314,12 @@ describe('basics', () => {
it('posts a json object', async () => {
const res = {name: 'foo'}
const restRes = await _http.postJson<HttpBinData>(
'https://postman-echo.com/post',
'http://postman-echo.com/post',
res
)
expect(restRes.statusCode).toBe(200)
expect(restRes.result).toBeDefined()
expect(restRes.result?.url).toBe('https://postman-echo.com/post')
expect(restRes.result?.url).toBe('http://postman-echo.com/post')
expect(restRes.result?.json.name).toBe('foo')
expect(restRes.result?.headers[httpm.Headers.Accept]).toBe(
httpm.MediaTypes.ApplicationJson
@ -334,12 +335,12 @@ describe('basics', () => {
it('puts a json object', async () => {
const res = {name: 'foo'}
const restRes = await _http.putJson<HttpBinData>(
'https://postman-echo.com/put',
'http://postman-echo.com/put',
res
)
expect(restRes.statusCode).toBe(200)
expect(restRes.result).toBeDefined()
expect(restRes.result?.url).toBe('https://postman-echo.com/put')
expect(restRes.result?.url).toBe('http://postman-echo.com/put')
expect(restRes.result?.json.name).toBe('foo')
expect(restRes.result?.headers[httpm.Headers.Accept]).toBe(
@ -356,12 +357,12 @@ describe('basics', () => {
it('patch a json object', async () => {
const res = {name: 'foo'}
const restRes = await _http.patchJson<HttpBinData>(
'https://postman-echo.com/patch',
'http://postman-echo.com/patch',
res
)
expect(restRes.statusCode).toBe(200)
expect(restRes.result).toBeDefined()
expect(restRes.result?.url).toBe('https://postman-echo.com/patch')
expect(restRes.result?.url).toBe('http://postman-echo.com/patch')
expect(restRes.result?.json.name).toBe('foo')
expect(restRes.result?.headers[httpm.Headers.Accept]).toBe(
httpm.MediaTypes.ApplicationJson

View File

@ -12,7 +12,7 @@ describe('headers', () => {
it('preserves existing headers on getJson', async () => {
const additionalHeaders = {[httpm.Headers.Accept]: 'foo'}
let jsonObj = await _http.getJson<any>(
'https://postman-echo.com/get',
'http://postman-echo.com/get',
additionalHeaders
)
expect(jsonObj.result.headers[httpm.Headers.Accept]).toBe('foo')
@ -26,7 +26,7 @@ describe('headers', () => {
[httpm.Headers.Accept]: 'baz'
}
}
jsonObj = await httpWithHeaders.getJson<any>('https://postman-echo.com/get')
jsonObj = await httpWithHeaders.getJson<any>('http://postman-echo.com/get')
expect(jsonObj.result.headers[httpm.Headers.Accept]).toBe('baz')
expect(jsonObj.headers[httpm.Headers.ContentType]).toContain(
httpm.MediaTypes.ApplicationJson
@ -36,7 +36,7 @@ describe('headers', () => {
it('preserves existing headers on postJson', async () => {
const additionalHeaders = {[httpm.Headers.Accept]: 'foo'}
let jsonObj = await _http.postJson<any>(
'https://postman-echo.com/post',
'http://postman-echo.com/post',
{},
additionalHeaders
)
@ -52,7 +52,7 @@ describe('headers', () => {
}
}
jsonObj = await httpWithHeaders.postJson<any>(
'https://postman-echo.com/post',
'http://postman-echo.com/post',
{}
)
expect(jsonObj.result.headers[httpm.Headers.Accept]).toBe('baz')
@ -64,7 +64,7 @@ describe('headers', () => {
it('preserves existing headers on putJson', async () => {
const additionalHeaders = {[httpm.Headers.Accept]: 'foo'}
let jsonObj = await _http.putJson<any>(
'https://postman-echo.com/put',
'http://postman-echo.com/put',
{},
additionalHeaders
)
@ -80,7 +80,7 @@ describe('headers', () => {
}
}
jsonObj = await httpWithHeaders.putJson<any>(
'https://postman-echo.com/put',
'http://postman-echo.com/put',
{}
)
expect(jsonObj.result.headers[httpm.Headers.Accept]).toBe('baz')
@ -92,7 +92,7 @@ describe('headers', () => {
it('preserves existing headers on patchJson', async () => {
const additionalHeaders = {[httpm.Headers.Accept]: 'foo'}
let jsonObj = await _http.patchJson<any>(
'https://postman-echo.com/patch',
'http://postman-echo.com/patch',
{},
additionalHeaders
)
@ -108,7 +108,7 @@ describe('headers', () => {
}
}
jsonObj = await httpWithHeaders.patchJson<any>(
'https://postman-echo.com/patch',
'http://postman-echo.com/patch',
{}
)
expect(jsonObj.result.headers[httpm.Headers.Accept]).toBe('baz')

View File

@ -222,30 +222,33 @@ describe('proxy', () => {
expect(_proxyConnects).toHaveLength(0)
})
// TODO mock this out so we don't rely on a third party
/*
it('HttpClient does basic https get request through proxy', async () => {
process.env['https_proxy'] = _proxyUrl
const httpClient = new httpm.HttpClient()
const res: httpm.HttpClientResponse = await httpClient.get(
'https://postman-echo.com/get'
'http://postman-echo.com/get'
)
expect(res.message.statusCode).toBe(200)
const body: string = await res.readBody()
const obj = JSON.parse(body)
expect(obj.url).toBe('https://postman-echo.com/get')
expect(obj.url).toBe('http://postman-echo.com/get')
expect(_proxyConnects).toEqual(['postman-echo.com:443'])
})
*/
it('HttpClient does basic https get request when bypass proxy', async () => {
process.env['https_proxy'] = _proxyUrl
it('HttpClient does basic http get request when bypass proxy', async () => {
process.env['http_proxy'] = _proxyUrl
process.env['no_proxy'] = 'postman-echo.com'
const httpClient = new httpm.HttpClient()
const res: httpm.HttpClientResponse = await httpClient.get(
'https://postman-echo.com/get'
'http://postman-echo.com/get'
)
expect(res.message.statusCode).toBe(200)
const body: string = await res.readBody()
const obj = JSON.parse(body)
expect(obj.url).toBe('https://postman-echo.com/get')
expect(obj.url).toBe('http://postman-echo.com/get')
expect(_proxyConnects).toHaveLength(0)
})
@ -304,6 +307,18 @@ describe('proxy', () => {
console.log(agent)
expect(agent instanceof ProxyAgent).toBe(true)
})
it('proxyAuth is set in tunnel agent when authentication is provided with URIencoding', async () => {
process.env['https_proxy'] =
'http://user%40github.com:p%40ssword@127.0.0.1:8080'
const httpClient = new httpm.HttpClient()
const agent: any = httpClient.getAgent('https://some-url')
// eslint-disable-next-line no-console
console.log(agent)
expect(agent.proxyOptions.host).toBe('127.0.0.1')
expect(agent.proxyOptions.port).toBe('8080')
expect(agent.proxyOptions.proxyAuth).toBe('user@github.com:p@ssword')
})
})
function _clearVars(): void {

View File

@ -1,6 +1,6 @@
{
"name": "@actions/http-client",
"version": "2.2.1",
"version": "2.2.3",
"lockfileVersion": 2,
"requires": true,
"packages": {

View File

@ -1,6 +1,6 @@
{
"name": "@actions/http-client",
"version": "2.2.1",
"version": "2.2.3",
"description": "Actions Http Client",
"keywords": [
"github",

View File

@ -726,7 +726,9 @@ export class HttpClient {
uri: proxyUrl.href,
pipelining: !this._keepAlive ? 0 : 1,
...((proxyUrl.username || proxyUrl.password) && {
token: `${proxyUrl.username}:${proxyUrl.password}`
token: `Basic ${Buffer.from(
`${proxyUrl.username}:${proxyUrl.password}`
).toString('base64')}`
})
})
this._proxyAgentDispatcher = proxyAgent

View File

@ -15,10 +15,10 @@ export function getProxyUrl(reqUrl: URL): URL | undefined {
if (proxyVar) {
try {
return new URL(proxyVar)
return new DecodedURL(proxyVar)
} catch {
if (!proxyVar.startsWith('http://') && !proxyVar.startsWith('https://'))
return new URL(`http://${proxyVar}`)
return new DecodedURL(`http://${proxyVar}`)
}
} else {
return undefined
@ -87,3 +87,22 @@ function isLoopbackAddress(host: string): boolean {
hostLower.startsWith('[0:0:0:0:0:0:0:1]')
)
}
class DecodedURL extends URL {
private _decodedUsername: string
private _decodedPassword: string
constructor(url: string | URL, base?: string | URL) {
super(url, base)
this._decodedUsername = decodeURIComponent(super.username)
this._decodedPassword = decodeURIComponent(super.password)
}
get username(): string {
return this._decodedUsername
}
get password(): string {
return this._decodedPassword
}
}