1
0
Fork 0

Merge branch 'main' into neo-cache-service

pull/1857/head
Bassem Dghaidi 2024-09-24 02:36:02 -07:00 committed by GitHub
commit 70e5684b1f
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
39 changed files with 811 additions and 1218 deletions

View File

@ -32,7 +32,7 @@ jobs:
run: npm run bootstrap run: npm run bootstrap
- name: audit tools (without allow-list) - name: audit tools (without allow-list)
run: npm audit --audit-level=moderate run: npm audit --audit-level=moderate --omit dev
- name: audit packages - name: audit packages
run: npm run audit-all run: npm run audit-all

View File

@ -1,5 +1,7 @@
name: Publish NPM name: Publish NPM
run-name: Publish NPM - ${{ github.event.inputs.package }}
on: on:
workflow_dispatch: workflow_dispatch:
inputs: inputs:

29
package-lock.json generated
View File

@ -6569,12 +6569,13 @@
} }
}, },
"node_modules/axios": { "node_modules/axios": {
"version": "1.6.2", "version": "1.7.4",
"resolved": "https://registry.npmjs.org/axios/-/axios-1.6.2.tgz", "resolved": "https://registry.npmjs.org/axios/-/axios-1.7.4.tgz",
"integrity": "sha512-7i24Ri4pmDRfJTR7LDBhsOTtcm+9kjX5WiY1X3wIisx6G9So3pfMkEiU7emUBe46oceVImccTEM3k6C5dbVW8A==", "integrity": "sha512-DukmaFRnY6AzAALSH4J2M3k6PkaC+MfaAGdEERRWcC9q3/TWQwLpHR8ZRLKTdQ3aBDL64EdluRDjJqKw+BPZEw==",
"dev": true, "dev": true,
"license": "MIT",
"dependencies": { "dependencies": {
"follow-redirects": "^1.15.0", "follow-redirects": "^1.15.6",
"form-data": "^4.0.0", "form-data": "^4.0.0",
"proxy-from-env": "^1.1.0" "proxy-from-env": "^1.1.0"
} }
@ -6829,12 +6830,12 @@
} }
}, },
"node_modules/braces": { "node_modules/braces": {
"version": "3.0.2", "version": "3.0.3",
"resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz",
"integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==",
"dev": true, "dev": true,
"dependencies": { "dependencies": {
"fill-range": "^7.0.1" "fill-range": "^7.1.1"
}, },
"engines": { "engines": {
"node": ">=8" "node": ">=8"
@ -8242,9 +8243,9 @@
} }
}, },
"node_modules/ejs": { "node_modules/ejs": {
"version": "3.1.9", "version": "3.1.10",
"resolved": "https://registry.npmjs.org/ejs/-/ejs-3.1.9.tgz", "resolved": "https://registry.npmjs.org/ejs/-/ejs-3.1.10.tgz",
"integrity": "sha512-rC+QVNMJWv+MtPgkt0y+0rVEIdbtxVADApW9JXrUVlzHetgcyczP/E7DJmWJ4fJCZF2cPcBk0laWO9ZHMG3DmQ==", "integrity": "sha512-UeJmFfOrAQS8OJWPZ4qtgHyWExa088/MtK5UEyoJGFH67cDEXkZSviOiKRCZ4Xij0zxI3JECgYs3oKx+AizQBA==",
"dev": true, "dev": true,
"dependencies": { "dependencies": {
"jake": "^10.8.5" "jake": "^10.8.5"
@ -9316,9 +9317,9 @@
} }
}, },
"node_modules/fill-range": { "node_modules/fill-range": {
"version": "7.0.1", "version": "7.1.1",
"resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz",
"integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==",
"dev": true, "dev": true,
"dependencies": { "dependencies": {
"to-regex-range": "^5.0.1" "to-regex-range": "^5.0.1"

View File

@ -1,5 +1,14 @@
# @actions/artifact Releases # @actions/artifact Releases
### 2.1.9
- Fixed artifact upload chunk timeout logic [#1774](https://github.com/actions/toolkit/pull/1774)
- Use lazy stream to prevent issues with open file limits [#1771](https://github.com/actions/toolkit/pull/1771)
### 2.1.8
- Allows `*.localhost` domains for hostname checks for local development.
### 2.1.7 ### 2.1.7
- Update unzip-stream dependency and reverted to using `unzip.Extract()` - Update unzip-stream dependency and reverted to using `unzip.Extract()`

View File

@ -20,6 +20,11 @@ describe('isGhes', () => {
expect(config.isGhes()).toBe(false) expect(config.isGhes()).toBe(false)
}) })
it('should return false when the request domain ends with .localhost', () => {
process.env.GITHUB_SERVER_URL = 'https://github.localhost'
expect(config.isGhes()).toBe(false)
})
it('should return false when the request domain is specific to an enterprise', () => { it('should return false when the request domain is specific to an enterprise', () => {
process.env.GITHUB_SERVER_URL = 'https://my-enterprise.github.com' process.env.GITHUB_SERVER_URL = 'https://my-enterprise.github.com'
expect(config.isGhes()).toBe(true) expect(config.isGhes()).toBe(true)

View File

@ -1,260 +1,137 @@
import * as uploadZipSpecification from '../src/internal/upload/upload-zip-specification' import * as uploadZipSpecification from '../src/internal/upload/upload-zip-specification'
import * as zip from '../src/internal/upload/zip' import * as zip from '../src/internal/upload/zip'
import * as util from '../src/internal/shared/util' import * as util from '../src/internal/shared/util'
import * as retention from '../src/internal/upload/retention'
import * as config from '../src/internal/shared/config' import * as config from '../src/internal/shared/config'
import {Timestamp, ArtifactServiceClientJSON} from '../src/generated' import {ArtifactServiceClientJSON} from '../src/generated'
import * as blobUpload from '../src/internal/upload/blob-upload' import * as blobUpload from '../src/internal/upload/blob-upload'
import {uploadArtifact} from '../src/internal/upload/upload-artifact' import {uploadArtifact} from '../src/internal/upload/upload-artifact'
import {noopLogs} from './common' import {noopLogs} from './common'
import {FilesNotFoundError} from '../src/internal/shared/errors' import {FilesNotFoundError} from '../src/internal/shared/errors'
import {BlockBlobClient} from '@azure/storage-blob' import {BlockBlobUploadStreamOptions} from '@azure/storage-blob'
import * as fs from 'fs' import * as fs from 'fs'
import * as path from 'path' import * as path from 'path'
const uploadStreamMock = jest.fn()
const blockBlobClientMock = jest.fn().mockImplementation(() => ({
uploadStream: uploadStreamMock
}))
jest.mock('@azure/storage-blob', () => ({
BlobClient: jest.fn().mockImplementation(() => {
return {
getBlockBlobClient: blockBlobClientMock
}
})
}))
const fixtures = {
uploadDirectory: path.join(__dirname, '_temp', 'plz-upload'),
files: [
['file1.txt', 'test 1 file content'],
['file2.txt', 'test 2 file content'],
['file3.txt', 'test 3 file content']
],
backendIDs: {
workflowRunBackendId: '67dbcc20-e851-4452-a7c3-2cc0d2e0ec67',
workflowJobRunBackendId: '5f49179d-3386-4c38-85f7-00f8138facd0'
},
runtimeToken: 'test-token',
resultsServiceURL: 'http://results.local',
inputs: {
artifactName: 'test-artifact',
files: [
'/home/user/files/plz-upload/file1.txt',
'/home/user/files/plz-upload/file2.txt',
'/home/user/files/plz-upload/dir/file3.txt'
],
rootDirectory: '/home/user/files/plz-upload'
}
}
describe('upload-artifact', () => { describe('upload-artifact', () => {
beforeAll(() => {
if (!fs.existsSync(fixtures.uploadDirectory)) {
fs.mkdirSync(fixtures.uploadDirectory, {recursive: true})
}
for (const [file, content] of fixtures.files) {
fs.writeFileSync(path.join(fixtures.uploadDirectory, file), content)
}
})
beforeEach(() => { beforeEach(() => {
noopLogs() noopLogs()
jest
.spyOn(uploadZipSpecification, 'validateRootDirectory')
.mockReturnValue()
jest
.spyOn(util, 'getBackendIdsFromToken')
.mockReturnValue(fixtures.backendIDs)
jest
.spyOn(uploadZipSpecification, 'getUploadZipSpecification')
.mockReturnValue(
fixtures.files.map(file => ({
sourcePath: path.join(fixtures.uploadDirectory, file[0]),
destinationPath: file[0]
}))
)
jest.spyOn(config, 'getRuntimeToken').mockReturnValue(fixtures.runtimeToken)
jest
.spyOn(config, 'getResultsServiceUrl')
.mockReturnValue(fixtures.resultsServiceURL)
}) })
afterEach(() => { afterEach(() => {
jest.restoreAllMocks() jest.restoreAllMocks()
}) })
it('should successfully upload an artifact', () => { it('should reject if there are no files to upload', async () => {
const mockDate = new Date('2020-01-01')
jest
.spyOn(uploadZipSpecification, 'validateRootDirectory')
.mockReturnValue()
jest
.spyOn(uploadZipSpecification, 'getUploadZipSpecification')
.mockReturnValue([
{
sourcePath: '/home/user/files/plz-upload/file1.txt',
destinationPath: 'file1.txt'
},
{
sourcePath: '/home/user/files/plz-upload/file2.txt',
destinationPath: 'file2.txt'
},
{
sourcePath: '/home/user/files/plz-upload/dir/file3.txt',
destinationPath: 'dir/file3.txt'
}
])
jest
.spyOn(zip, 'createZipUploadStream')
.mockReturnValue(Promise.resolve(new zip.ZipUploadStream(1)))
jest.spyOn(util, 'getBackendIdsFromToken').mockReturnValue({
workflowRunBackendId: '1234',
workflowJobRunBackendId: '5678'
})
jest
.spyOn(retention, 'getExpiration')
.mockReturnValue(Timestamp.fromDate(mockDate))
jest
.spyOn(ArtifactServiceClientJSON.prototype, 'CreateArtifact')
.mockReturnValue(
Promise.resolve({
ok: true,
signedUploadUrl: 'https://signed-upload-url.com'
})
)
jest.spyOn(blobUpload, 'uploadZipToBlobStorage').mockReturnValue(
Promise.resolve({
uploadSize: 1234,
sha256Hash: 'test-sha256-hash'
})
)
jest
.spyOn(ArtifactServiceClientJSON.prototype, 'FinalizeArtifact')
.mockReturnValue(Promise.resolve({ok: true, artifactId: '1'}))
// ArtifactHttpClient mocks
jest.spyOn(config, 'getRuntimeToken').mockReturnValue('test-token')
jest
.spyOn(config, 'getResultsServiceUrl')
.mockReturnValue('https://test-url.com')
const uploadResp = uploadArtifact(
'test-artifact',
[
'/home/user/files/plz-upload/file1.txt',
'/home/user/files/plz-upload/file2.txt',
'/home/user/files/plz-upload/dir/file3.txt'
],
'/home/user/files/plz-upload'
)
expect(uploadResp).resolves.toEqual({size: 1234, id: 1})
})
it('should throw an error if the root directory is invalid', () => {
jest
.spyOn(uploadZipSpecification, 'validateRootDirectory')
.mockImplementation(() => {
throw new Error('Invalid root directory')
})
const uploadResp = uploadArtifact(
'test-artifact',
[
'/home/user/files/plz-upload/file1.txt',
'/home/user/files/plz-upload/file2.txt',
'/home/user/files/plz-upload/dir/file3.txt'
],
'/home/user/files/plz-upload'
)
expect(uploadResp).rejects.toThrow('Invalid root directory')
})
it('should reject if there are no files to upload', () => {
jest
.spyOn(uploadZipSpecification, 'validateRootDirectory')
.mockReturnValue()
jest jest
.spyOn(uploadZipSpecification, 'getUploadZipSpecification') .spyOn(uploadZipSpecification, 'getUploadZipSpecification')
.mockClear()
.mockReturnValue([]) .mockReturnValue([])
const uploadResp = uploadArtifact( const uploadResp = uploadArtifact(
'test-artifact', fixtures.inputs.artifactName,
[ fixtures.inputs.files,
'/home/user/files/plz-upload/file1.txt', fixtures.inputs.rootDirectory
'/home/user/files/plz-upload/file2.txt',
'/home/user/files/plz-upload/dir/file3.txt'
],
'/home/user/files/plz-upload'
) )
expect(uploadResp).rejects.toThrowError(FilesNotFoundError) await expect(uploadResp).rejects.toThrowError(FilesNotFoundError)
}) })
it('should reject if no backend IDs are found', () => { it('should reject if no backend IDs are found', async () => {
jest jest.spyOn(util, 'getBackendIdsFromToken').mockRestore()
.spyOn(uploadZipSpecification, 'validateRootDirectory')
.mockReturnValue()
jest
.spyOn(uploadZipSpecification, 'getUploadZipSpecification')
.mockReturnValue([
{
sourcePath: '/home/user/files/plz-upload/file1.txt',
destinationPath: 'file1.txt'
},
{
sourcePath: '/home/user/files/plz-upload/file2.txt',
destinationPath: 'file2.txt'
},
{
sourcePath: '/home/user/files/plz-upload/dir/file3.txt',
destinationPath: 'dir/file3.txt'
}
])
jest
.spyOn(zip, 'createZipUploadStream')
.mockReturnValue(Promise.resolve(new zip.ZipUploadStream(1)))
const uploadResp = uploadArtifact( const uploadResp = uploadArtifact(
'test-artifact', fixtures.inputs.artifactName,
[ fixtures.inputs.files,
'/home/user/files/plz-upload/file1.txt', fixtures.inputs.rootDirectory
'/home/user/files/plz-upload/file2.txt',
'/home/user/files/plz-upload/dir/file3.txt'
],
'/home/user/files/plz-upload'
) )
expect(uploadResp).rejects.toThrow() await expect(uploadResp).rejects.toThrow()
}) })
it('should return false if the creation request fails', () => { it('should return false if the creation request fails', async () => {
const mockDate = new Date('2020-01-01')
jest
.spyOn(uploadZipSpecification, 'validateRootDirectory')
.mockReturnValue()
jest
.spyOn(uploadZipSpecification, 'getUploadZipSpecification')
.mockReturnValue([
{
sourcePath: '/home/user/files/plz-upload/file1.txt',
destinationPath: 'file1.txt'
},
{
sourcePath: '/home/user/files/plz-upload/file2.txt',
destinationPath: 'file2.txt'
},
{
sourcePath: '/home/user/files/plz-upload/dir/file3.txt',
destinationPath: 'dir/file3.txt'
}
])
jest jest
.spyOn(zip, 'createZipUploadStream') .spyOn(zip, 'createZipUploadStream')
.mockReturnValue(Promise.resolve(new zip.ZipUploadStream(1))) .mockReturnValue(Promise.resolve(new zip.ZipUploadStream(1)))
jest.spyOn(util, 'getBackendIdsFromToken').mockReturnValue({
workflowRunBackendId: '1234',
workflowJobRunBackendId: '5678'
})
jest
.spyOn(retention, 'getExpiration')
.mockReturnValue(Timestamp.fromDate(mockDate))
jest jest
.spyOn(ArtifactServiceClientJSON.prototype, 'CreateArtifact') .spyOn(ArtifactServiceClientJSON.prototype, 'CreateArtifact')
.mockReturnValue(Promise.resolve({ok: false, signedUploadUrl: ''})) .mockReturnValue(Promise.resolve({ok: false, signedUploadUrl: ''}))
// ArtifactHttpClient mocks
jest.spyOn(config, 'getRuntimeToken').mockReturnValue('test-token')
jest
.spyOn(config, 'getResultsServiceUrl')
.mockReturnValue('https://test-url.com')
const uploadResp = uploadArtifact( const uploadResp = uploadArtifact(
'test-artifact', fixtures.inputs.artifactName,
[ fixtures.inputs.files,
'/home/user/files/plz-upload/file1.txt', fixtures.inputs.rootDirectory
'/home/user/files/plz-upload/file2.txt',
'/home/user/files/plz-upload/dir/file3.txt'
],
'/home/user/files/plz-upload'
) )
expect(uploadResp).rejects.toThrow() await expect(uploadResp).rejects.toThrow()
}) })
it('should return false if blob storage upload is unsuccessful', () => { it('should return false if blob storage upload is unsuccessful', async () => {
const mockDate = new Date('2020-01-01')
jest
.spyOn(uploadZipSpecification, 'validateRootDirectory')
.mockReturnValue()
jest
.spyOn(uploadZipSpecification, 'getUploadZipSpecification')
.mockReturnValue([
{
sourcePath: '/home/user/files/plz-upload/file1.txt',
destinationPath: 'file1.txt'
},
{
sourcePath: '/home/user/files/plz-upload/file2.txt',
destinationPath: 'file2.txt'
},
{
sourcePath: '/home/user/files/plz-upload/dir/file3.txt',
destinationPath: 'dir/file3.txt'
}
])
jest jest
.spyOn(zip, 'createZipUploadStream') .spyOn(zip, 'createZipUploadStream')
.mockReturnValue(Promise.resolve(new zip.ZipUploadStream(1))) .mockReturnValue(Promise.resolve(new zip.ZipUploadStream(1)))
jest.spyOn(util, 'getBackendIdsFromToken').mockReturnValue({
workflowRunBackendId: '1234',
workflowJobRunBackendId: '5678'
})
jest
.spyOn(retention, 'getExpiration')
.mockReturnValue(Timestamp.fromDate(mockDate))
jest jest
.spyOn(ArtifactServiceClientJSON.prototype, 'CreateArtifact') .spyOn(ArtifactServiceClientJSON.prototype, 'CreateArtifact')
.mockReturnValue( .mockReturnValue(
@ -267,57 +144,19 @@ describe('upload-artifact', () => {
.spyOn(blobUpload, 'uploadZipToBlobStorage') .spyOn(blobUpload, 'uploadZipToBlobStorage')
.mockReturnValue(Promise.reject(new Error('boom'))) .mockReturnValue(Promise.reject(new Error('boom')))
// ArtifactHttpClient mocks
jest.spyOn(config, 'getRuntimeToken').mockReturnValue('test-token')
jest
.spyOn(config, 'getResultsServiceUrl')
.mockReturnValue('https://test-url.com')
const uploadResp = uploadArtifact( const uploadResp = uploadArtifact(
'test-artifact', fixtures.inputs.artifactName,
[ fixtures.inputs.files,
'/home/user/files/plz-upload/file1.txt', fixtures.inputs.rootDirectory
'/home/user/files/plz-upload/file2.txt',
'/home/user/files/plz-upload/dir/file3.txt'
],
'/home/user/files/plz-upload'
) )
expect(uploadResp).rejects.toThrow() await expect(uploadResp).rejects.toThrow()
}) })
it('should reject if finalize artifact fails', () => { it('should reject if finalize artifact fails', async () => {
const mockDate = new Date('2020-01-01')
jest
.spyOn(uploadZipSpecification, 'validateRootDirectory')
.mockReturnValue()
jest
.spyOn(uploadZipSpecification, 'getUploadZipSpecification')
.mockReturnValue([
{
sourcePath: '/home/user/files/plz-upload/file1.txt',
destinationPath: 'file1.txt'
},
{
sourcePath: '/home/user/files/plz-upload/file2.txt',
destinationPath: 'file2.txt'
},
{
sourcePath: '/home/user/files/plz-upload/dir/file3.txt',
destinationPath: 'dir/file3.txt'
}
])
jest jest
.spyOn(zip, 'createZipUploadStream') .spyOn(zip, 'createZipUploadStream')
.mockReturnValue(Promise.resolve(new zip.ZipUploadStream(1))) .mockReturnValue(Promise.resolve(new zip.ZipUploadStream(1)))
jest.spyOn(util, 'getBackendIdsFromToken').mockReturnValue({
workflowRunBackendId: '1234',
workflowJobRunBackendId: '5678'
})
jest
.spyOn(retention, 'getExpiration')
.mockReturnValue(Timestamp.fromDate(mockDate))
jest jest
.spyOn(ArtifactServiceClientJSON.prototype, 'CreateArtifact') .spyOn(ArtifactServiceClientJSON.prototype, 'CreateArtifact')
.mockReturnValue( .mockReturnValue(
@ -336,112 +175,113 @@ describe('upload-artifact', () => {
.spyOn(ArtifactServiceClientJSON.prototype, 'FinalizeArtifact') .spyOn(ArtifactServiceClientJSON.prototype, 'FinalizeArtifact')
.mockReturnValue(Promise.resolve({ok: false, artifactId: ''})) .mockReturnValue(Promise.resolve({ok: false, artifactId: ''}))
// ArtifactHttpClient mocks
jest.spyOn(config, 'getRuntimeToken').mockReturnValue('test-token')
jest
.spyOn(config, 'getResultsServiceUrl')
.mockReturnValue('https://test-url.com')
const uploadResp = uploadArtifact( const uploadResp = uploadArtifact(
'test-artifact', fixtures.inputs.artifactName,
[ fixtures.inputs.files,
'/home/user/files/plz-upload/file1.txt', fixtures.inputs.rootDirectory
'/home/user/files/plz-upload/file2.txt',
'/home/user/files/plz-upload/dir/file3.txt'
],
'/home/user/files/plz-upload'
) )
expect(uploadResp).rejects.toThrow() await expect(uploadResp).rejects.toThrow()
}) })
it('should throw an error uploading blob chunks get delayed', async () => { it('should successfully upload an artifact', async () => {
const mockDate = new Date('2020-01-01')
const dirPath = path.join(__dirname, `plz-upload`)
if (!fs.existsSync(dirPath)) {
fs.mkdirSync(dirPath, {recursive: true})
}
fs.writeFileSync(path.join(dirPath, 'file1.txt'), 'test file content')
fs.writeFileSync(path.join(dirPath, 'file2.txt'), 'test file content')
fs.writeFileSync(path.join(dirPath, 'file3.txt'), 'test file content')
jest
.spyOn(uploadZipSpecification, 'validateRootDirectory')
.mockReturnValue()
jest
.spyOn(uploadZipSpecification, 'getUploadZipSpecification')
.mockReturnValue([
{
sourcePath: path.join(dirPath, 'file1.txt'),
destinationPath: 'file1.txt'
},
{
sourcePath: path.join(dirPath, 'file2.txt'),
destinationPath: 'file2.txt'
},
{
sourcePath: path.join(dirPath, 'file3.txt'),
destinationPath: 'dir/file3.txt'
}
])
jest.spyOn(util, 'getBackendIdsFromToken').mockReturnValue({
workflowRunBackendId: '1234',
workflowJobRunBackendId: '5678'
})
jest
.spyOn(retention, 'getExpiration')
.mockReturnValue(Timestamp.fromDate(mockDate))
jest jest
.spyOn(ArtifactServiceClientJSON.prototype, 'CreateArtifact') .spyOn(ArtifactServiceClientJSON.prototype, 'CreateArtifact')
.mockReturnValue( .mockReturnValue(
Promise.resolve({ Promise.resolve({
ok: true, ok: true,
signedUploadUrl: 'https://signed-upload-url.com' signedUploadUrl: 'https://signed-upload-url.local'
}) })
) )
jest jest
.spyOn(blobUpload, 'uploadZipToBlobStorage') .spyOn(ArtifactServiceClientJSON.prototype, 'FinalizeArtifact')
.mockReturnValue(Promise.reject(new Error('Upload progress stalled.'))) .mockReturnValue(
Promise.resolve({
ok: true,
artifactId: '1'
})
)
// ArtifactHttpClient mocks uploadStreamMock.mockImplementation(
jest.spyOn(config, 'getRuntimeToken').mockReturnValue('test-token') async (
jest stream: NodeJS.ReadableStream,
.spyOn(config, 'getResultsServiceUrl') bufferSize?: number,
.mockReturnValue('https://test-url.com') maxConcurrency?: number,
options?: BlockBlobUploadStreamOptions
) => {
const {onProgress, abortSignal} = options || {}
BlockBlobClient.prototype.uploadStream = jest onProgress?.({loadedBytes: 0})
.fn()
.mockImplementation(
async (stream, bufferSize, maxConcurrency, options) => {
return new Promise<void>(resolve => {
// Call the onProgress callback with a progress event
options.onProgress({loadedBytes: 0})
// Wait for 31 seconds before resolving the promise return new Promise(resolve => {
setTimeout(() => { const timerId = setTimeout(() => {
// Call the onProgress callback again to simulate progress onProgress?.({loadedBytes: 256})
options.onProgress({loadedBytes: 100}) resolve({})
}, 1_000)
resolve() abortSignal?.addEventListener('abort', () => {
}, 31000) // Delay longer than your timeout clearTimeout(timerId)
resolve({})
})
}) })
} }
) )
jest.mock('fs') const {id, size} = await uploadArtifact(
const uploadResp = uploadArtifact( fixtures.inputs.artifactName,
'test-artifact', fixtures.inputs.files,
[ fixtures.inputs.rootDirectory
'/home/user/files/plz-upload/file1.txt',
'/home/user/files/plz-upload/file2.txt',
'/home/user/files/plz-upload/dir/file3.txt'
],
'/home/user/files/plz-upload'
) )
expect(uploadResp).rejects.toThrow('Upload progress stalled.') expect(id).toBe(1)
expect(size).toBe(256)
})
it('should throw an error uploading blob chunks get delayed', async () => {
jest
.spyOn(ArtifactServiceClientJSON.prototype, 'CreateArtifact')
.mockReturnValue(
Promise.resolve({
ok: true,
signedUploadUrl: 'https://signed-upload-url.local'
})
)
jest
.spyOn(ArtifactServiceClientJSON.prototype, 'FinalizeArtifact')
.mockReturnValue(
Promise.resolve({
ok: true,
artifactId: '1'
})
)
jest
.spyOn(config, 'getResultsServiceUrl')
.mockReturnValue('https://results.local')
jest.spyOn(config, 'getUploadChunkTimeout').mockReturnValue(2_000)
uploadStreamMock.mockImplementation(
async (
stream: NodeJS.ReadableStream,
bufferSize?: number,
maxConcurrency?: number,
options?: BlockBlobUploadStreamOptions
) => {
const {onProgress, abortSignal} = options || {}
onProgress?.({loadedBytes: 0})
return new Promise(resolve => {
abortSignal?.addEventListener('abort', () => {
resolve({})
})
})
}
)
const uploadResp = uploadArtifact(
fixtures.inputs.artifactName,
fixtures.inputs.files,
fixtures.inputs.rootDirectory
)
await expect(uploadResp).rejects.toThrow('Upload progress stalled.')
}) })
}) })

View File

@ -1,12 +1,12 @@
{ {
"name": "@actions/artifact", "name": "@actions/artifact",
"version": "2.1.7", "version": "2.1.9",
"lockfileVersion": 3, "lockfileVersion": 3,
"requires": true, "requires": true,
"packages": { "packages": {
"": { "": {
"name": "@actions/artifact", "name": "@actions/artifact",
"version": "2.1.7", "version": "2.1.9",
"license": "MIT", "license": "MIT",
"dependencies": { "dependencies": {
"@actions/core": "^1.10.0", "@actions/core": "^1.10.0",

View File

@ -1,6 +1,6 @@
{ {
"name": "@actions/artifact", "name": "@actions/artifact",
"version": "2.1.7", "version": "2.1.9",
"preview": true, "preview": true,
"description": "Actions artifact lib", "description": "Actions artifact lib",
"keywords": [ "keywords": [

View File

@ -30,10 +30,10 @@ export function isGhes(): boolean {
const hostname = ghUrl.hostname.trimEnd().toUpperCase() const hostname = ghUrl.hostname.trimEnd().toUpperCase()
const isGitHubHost = hostname === 'GITHUB.COM' const isGitHubHost = hostname === 'GITHUB.COM'
const isGheHost = const isGheHost = hostname.endsWith('.GHE.COM')
hostname.endsWith('.GHE.COM') || hostname.endsWith('.GHE.LOCALHOST') const isLocalHost = hostname.endsWith('.LOCALHOST')
return !isGitHubHost && !isGheHost return !isGitHubHost && !isGheHost && !isLocalHost
} }
export function getGitHubWorkspaceDir(): string { export function getGitHubWorkspaceDir(): string {
@ -57,3 +57,7 @@ export function getConcurrency(): number {
const concurrency = 16 * numCPUs const concurrency = 16 * numCPUs
return concurrency > 300 ? 300 : concurrency return concurrency > 300 ? 300 : concurrency
} }
export function getUploadChunkTimeout(): number {
return 300_000 // 5 minutes
}

View File

@ -1,7 +1,11 @@
import {BlobClient, BlockBlobUploadStreamOptions} from '@azure/storage-blob' import {BlobClient, BlockBlobUploadStreamOptions} from '@azure/storage-blob'
import {TransferProgressEvent} from '@azure/core-http' import {TransferProgressEvent} from '@azure/core-http'
import {ZipUploadStream} from './zip' import {ZipUploadStream} from './zip'
import {getUploadChunkSize, getConcurrency} from '../shared/config' import {
getUploadChunkSize,
getConcurrency,
getUploadChunkTimeout
} from '../shared/config'
import * as core from '@actions/core' import * as core from '@actions/core'
import * as crypto from 'crypto' import * as crypto from 'crypto'
import * as stream from 'stream' import * as stream from 'stream'
@ -25,29 +29,26 @@ export async function uploadZipToBlobStorage(
): Promise<BlobUploadResponse> { ): Promise<BlobUploadResponse> {
let uploadByteCount = 0 let uploadByteCount = 0
let lastProgressTime = Date.now() let lastProgressTime = Date.now()
let timeoutId: NodeJS.Timeout | undefined const abortController = new AbortController()
const chunkTimer = (timeout: number): NodeJS.Timeout => { const chunkTimer = async (interval: number): Promise<void> =>
// clear the previous timeout new Promise((resolve, reject) => {
if (timeoutId) { const timer = setInterval(() => {
clearTimeout(timeoutId) if (Date.now() - lastProgressTime > interval) {
reject(new Error('Upload progress stalled.'))
} }
}, interval)
abortController.signal.addEventListener('abort', () => {
clearInterval(timer)
resolve()
})
})
timeoutId = setTimeout(() => {
const now = Date.now()
// if there's been more than 30 seconds since the
// last progress event, then we'll consider the upload stalled
if (now - lastProgressTime > timeout) {
throw new Error('Upload progress stalled.')
}
}, timeout)
return timeoutId
}
const maxConcurrency = getConcurrency() const maxConcurrency = getConcurrency()
const bufferSize = getUploadChunkSize() const bufferSize = getUploadChunkSize()
const blobClient = new BlobClient(authenticatedUploadURL) const blobClient = new BlobClient(authenticatedUploadURL)
const blockBlobClient = blobClient.getBlockBlobClient() const blockBlobClient = blobClient.getBlockBlobClient()
const timeoutDuration = 300000 // 30 seconds
core.debug( core.debug(
`Uploading artifact zip to blob storage with maxConcurrency: ${maxConcurrency}, bufferSize: ${bufferSize}` `Uploading artifact zip to blob storage with maxConcurrency: ${maxConcurrency}, bufferSize: ${bufferSize}`
@ -56,13 +57,13 @@ export async function uploadZipToBlobStorage(
const uploadCallback = (progress: TransferProgressEvent): void => { const uploadCallback = (progress: TransferProgressEvent): void => {
core.info(`Uploaded bytes ${progress.loadedBytes}`) core.info(`Uploaded bytes ${progress.loadedBytes}`)
uploadByteCount = progress.loadedBytes uploadByteCount = progress.loadedBytes
chunkTimer(timeoutDuration)
lastProgressTime = Date.now() lastProgressTime = Date.now()
} }
const options: BlockBlobUploadStreamOptions = { const options: BlockBlobUploadStreamOptions = {
blobHTTPHeaders: {blobContentType: 'zip'}, blobHTTPHeaders: {blobContentType: 'zip'},
onProgress: uploadCallback onProgress: uploadCallback,
abortSignal: abortController.signal
} }
let sha256Hash: string | undefined = undefined let sha256Hash: string | undefined = undefined
@ -75,24 +76,22 @@ export async function uploadZipToBlobStorage(
core.info('Beginning upload of artifact content to blob storage') core.info('Beginning upload of artifact content to blob storage')
try { try {
// Start the chunk timer await Promise.race([
timeoutId = chunkTimer(timeoutDuration) blockBlobClient.uploadStream(
await blockBlobClient.uploadStream(
uploadStream, uploadStream,
bufferSize, bufferSize,
maxConcurrency, maxConcurrency,
options options
) ),
chunkTimer(getUploadChunkTimeout())
])
} catch (error) { } catch (error) {
if (NetworkError.isNetworkErrorCode(error?.code)) { if (NetworkError.isNetworkErrorCode(error?.code)) {
throw new NetworkError(error?.code) throw new NetworkError(error?.code)
} }
throw error throw error
} finally { } finally {
// clear the timeout whether or not the upload completes abortController.abort()
if (timeoutId) {
clearTimeout(timeoutId)
}
} }
core.info('Finished uploading artifact content to blob storage!') core.info('Finished uploading artifact content to blob storage!')

View File

@ -1,7 +1,6 @@
import * as stream from 'stream' import * as stream from 'stream'
import * as archiver from 'archiver' import * as archiver from 'archiver'
import * as core from '@actions/core' import * as core from '@actions/core'
import {createReadStream} from 'fs'
import {UploadZipSpecification} from './upload-zip-specification' import {UploadZipSpecification} from './upload-zip-specification'
import {getUploadChunkSize} from '../shared/config' import {getUploadChunkSize} from '../shared/config'
@ -44,7 +43,7 @@ export async function createZipUploadStream(
for (const file of uploadSpecification) { for (const file of uploadSpecification) {
if (file.sourcePath !== null) { if (file.sourcePath !== null) {
// Add a normal file to the zip // Add a normal file to the zip
zip.append(createReadStream(file.sourcePath), { zip.file(file.sourcePath, {
name: file.destinationPath name: file.destinationPath
}) })
} else { } else {

View File

@ -63,6 +63,8 @@ export type AttestOptions = {
// Sigstore instance to use for signing. Must be one of "public-good" or // Sigstore instance to use for signing. Must be one of "public-good" or
// "github". // "github".
sigstore?: 'public-good' | 'github' sigstore?: 'public-good' | 'github'
// HTTP headers to include in request to attestations API.
headers?: {[header: string]: string | number | undefined}
// Whether to skip writing the attestation to the GH attestations API. // Whether to skip writing the attestation to the GH attestations API.
skipWrite?: boolean skipWrite?: boolean
} }
@ -113,6 +115,8 @@ export type AttestProvenanceOptions = {
// Sigstore instance to use for signing. Must be one of "public-good" or // Sigstore instance to use for signing. Must be one of "public-good" or
// "github". // "github".
sigstore?: 'public-good' | 'github' sigstore?: 'public-good' | 'github'
// HTTP headers to include in request to attestations API.
headers?: {[header: string]: string | number | undefined}
// Whether to skip writing the attestation to the GH attestations API. // Whether to skip writing the attestation to the GH attestations API.
skipWrite?: boolean skipWrite?: boolean
// Issuer URL responsible for minting the OIDC token from which the // Issuer URL responsible for minting the OIDC token from which the

View File

@ -1,19 +1,42 @@
# @actions/attest Releases # @actions/attest Releases
### 1.4.2
- Fix bug in `buildSLSAProvenancePredicate`/`attestProvenance` when generating provenance statement for enterprise account using customized OIDC issuer value [#1823](https://github.com/actions/toolkit/pull/1823)
### 1.4.1
- Bump @actions/http-client from 2.2.1 to 2.2.3 [#1805](https://github.com/actions/toolkit/pull/1805)
### 1.4.0
- Add new `headers` parameter to the `attest` and `attestProvenance` functions [#1790](https://github.com/actions/toolkit/pull/1790)
- Update `buildSLSAProvenancePredicate`/`attestProvenance` to automatically derive default OIDC issuer URL from current execution context [#1796](https://github.com/actions/toolkit/pull/1796)
### 1.3.1
- Fix bug with proxy support when retrieving JWKS for OIDC issuer [#1776](https://github.com/actions/toolkit/pull/1776)
### 1.3.0
- Dynamic construction of Sigstore API URLs [#1735](https://github.com/actions/toolkit/pull/1735)
- Switch to new GH provenance build type [#1745](https://github.com/actions/toolkit/pull/1745)
- Fetch existing Rekor entry on 409 conflict error [#1759](https://github.com/actions/toolkit/pull/1759)
- Bump @sigstore/bundle from 2.3.0 to 2.3.2 [#1738](https://github.com/actions/toolkit/pull/1738)
- Bump @sigstore/sign from 2.3.0 to 2.3.2 [#1738](https://github.com/actions/toolkit/pull/1738)
### 1.2.1 ### 1.2.1
- Retry request on attestation persistence failure - Retry request on attestation persistence failure [#1725](https://github.com/actions/toolkit/pull/1725)
### 1.2.0 ### 1.2.0
- Generate attestations using the v0.3 Sigstore bundle format. - Generate attestations using the v0.3 Sigstore bundle format [#1701](https://github.com/actions/toolkit/pull/1701)
- Bump @sigstore/bundle from 2.2.0 to 2.3.0. - Bump @sigstore/bundle from 2.2.0 to 2.3.0 [#1701](https://github.com/actions/toolkit/pull/1701)
- Bump @sigstore/sign from 2.2.3 to 2.3.0. - Bump @sigstore/sign from 2.2.3 to 2.3.0 [#1701](https://github.com/actions/toolkit/pull/1701)
- Remove dependency on make-fetch-happen - Remove dependency on make-fetch-happen [#1714](https://github.com/actions/toolkit/pull/1714)
### 1.1.0 ### 1.1.0
- Updates the `attestProvenance` function to retrieve a token from the GitHub OIDC provider and use the token claims to populate the provenance statement. - Updates the `attestProvenance` function to retrieve a token from the GitHub OIDC provider and use the token claims to populate the provenance statement [#1693](https://github.com/actions/toolkit/pull/1693)
### 1.0.0 ### 1.0.0

View File

@ -4,12 +4,12 @@ exports[`provenance functions buildSLSAProvenancePredicate returns a provenance
{ {
"params": { "params": {
"buildDefinition": { "buildDefinition": {
"buildType": "https://slsa-framework.github.io/github-actions-buildtypes/workflow/v1", "buildType": "https://actions.github.io/buildtypes/workflow/v1",
"externalParameters": { "externalParameters": {
"workflow": { "workflow": {
"path": ".github/workflows/main.yml", "path": ".github/workflows/main.yml",
"ref": "main", "ref": "main",
"repository": "https://github.com/owner/repo", "repository": "https://foo.ghe.com/owner/repo",
}, },
}, },
"internalParameters": { "internalParameters": {
@ -17,6 +17,7 @@ exports[`provenance functions buildSLSAProvenancePredicate returns a provenance
"event_name": "push", "event_name": "push",
"repository_id": "repo-id", "repository_id": "repo-id",
"repository_owner_id": "owner-id", "repository_owner_id": "owner-id",
"runner_environment": "github-hosted",
}, },
}, },
"resolvedDependencies": [ "resolvedDependencies": [
@ -24,16 +25,16 @@ exports[`provenance functions buildSLSAProvenancePredicate returns a provenance
"digest": { "digest": {
"gitCommit": "babca52ab0c93ae16539e5923cb0d7403b9a093b", "gitCommit": "babca52ab0c93ae16539e5923cb0d7403b9a093b",
}, },
"uri": "git+https://github.com/owner/repo@refs/heads/main", "uri": "git+https://foo.ghe.com/owner/repo@refs/heads/main",
}, },
], ],
}, },
"runDetails": { "runDetails": {
"builder": { "builder": {
"id": "https://github.com/actions/runner/github-hosted", "id": "https://foo.ghe.com/owner/workflows/.github/workflows/publish.yml@main",
}, },
"metadata": { "metadata": {
"invocationId": "https://github.com/owner/repo/actions/runs/run-id/attempts/run-attempt", "invocationId": "https://foo.ghe.com/owner/repo/actions/runs/run-id/attempts/run-attempt",
}, },
}, },
}, },

View File

@ -45,7 +45,8 @@ describe('getIDTokenClaims', () => {
sha: 'sha', sha: 'sha',
repository: 'repo', repository: 'repo',
event_name: 'push', event_name: 'push',
workflow_ref: 'main', job_workflow_ref: 'job_workflow_ref',
workflow_ref: 'workflow',
repository_id: '1', repository_id: '1',
repository_owner_id: '1', repository_owner_id: '1',
runner_environment: 'github-hosted', runner_environment: 'github-hosted',
@ -67,6 +68,55 @@ describe('getIDTokenClaims', () => {
}) })
}) })
describe('when ID token is valid (w/ enterprise slug)', () => {
const claims = {
iss: `${issuer}/foo-bar`,
aud: audience,
ref: 'ref',
sha: 'sha',
repository: 'repo',
event_name: 'push',
job_workflow_ref: 'job_workflow_ref',
workflow_ref: 'workflow',
repository_id: '1',
repository_owner_id: '1',
runner_environment: 'github-hosted',
run_id: '1',
run_attempt: '1'
}
beforeEach(async () => {
const jwt = await new jose.SignJWT(claims)
.setProtectedHeader({alg: 'PS256'})
.sign(key.privateKey)
nock(issuer).get(tokenPath).query({audience}).reply(200, {value: jwt})
})
it('returns the ID token claims', async () => {
const result = await getIDTokenClaims(issuer)
expect(result).toEqual(claims)
})
})
describe('when ID token is missing the "iss" claim', () => {
const claims = {
aud: audience
}
beforeEach(async () => {
const jwt = await new jose.SignJWT(claims)
.setProtectedHeader({alg: 'PS256'})
.sign(key.privateKey)
nock(issuer).get(tokenPath).query({audience}).reply(200, {value: jwt})
})
it('throws an error', async () => {
await expect(getIDTokenClaims(issuer)).rejects.toThrow(/missing "iss"/i)
})
})
describe('when ID token is missing required claims', () => { describe('when ID token is missing required claims', () => {
const claims = { const claims = {
iss: issuer, iss: issuer,
@ -98,7 +148,9 @@ describe('getIDTokenClaims', () => {
}) })
it('throws an error', async () => { it('throws an error', async () => {
await expect(getIDTokenClaims(issuer)).rejects.toThrow(/issuer invalid/) await expect(getIDTokenClaims(issuer)).rejects.toThrow(
/unexpected "iss"/i
)
}) })
}) })
@ -114,7 +166,7 @@ describe('getIDTokenClaims', () => {
}) })
it('throw an error', async () => { it('throw an error', async () => {
await expect(getIDTokenClaims(issuer)).rejects.toThrow(/audience invalid/) await expect(getIDTokenClaims(issuer)).rejects.toThrow(/unexpected "aud"/)
}) })
}) })

View File

@ -8,7 +8,7 @@ import {attestProvenance, buildSLSAProvenancePredicate} from '../src/provenance'
describe('provenance functions', () => { describe('provenance functions', () => {
const originalEnv = process.env const originalEnv = process.env
const issuer = 'https://example.com' const issuer = 'https://token.actions.foo.ghe.com'
const audience = 'nobody' const audience = 'nobody'
const jwksPath = '/.well-known/jwks.json' const jwksPath = '/.well-known/jwks.json'
const tokenPath = '/token' const tokenPath = '/token'
@ -23,6 +23,7 @@ describe('provenance functions', () => {
repository: 'owner/repo', repository: 'owner/repo',
ref: 'refs/heads/main', ref: 'refs/heads/main',
sha: 'babca52ab0c93ae16539e5923cb0d7403b9a093b', sha: 'babca52ab0c93ae16539e5923cb0d7403b9a093b',
job_workflow_ref: 'owner/workflows/.github/workflows/publish.yml@main',
workflow_ref: 'owner/repo/.github/workflows/main.yml@main', workflow_ref: 'owner/repo/.github/workflows/main.yml@main',
event_name: 'push', event_name: 'push',
repository_id: 'repo-id', repository_id: 'repo-id',
@ -37,7 +38,7 @@ describe('provenance functions', () => {
...originalEnv, ...originalEnv,
ACTIONS_ID_TOKEN_REQUEST_URL: `${issuer}${tokenPath}?`, ACTIONS_ID_TOKEN_REQUEST_URL: `${issuer}${tokenPath}?`,
ACTIONS_ID_TOKEN_REQUEST_TOKEN: 'token', ACTIONS_ID_TOKEN_REQUEST_TOKEN: 'token',
GITHUB_SERVER_URL: 'https://github.com', GITHUB_SERVER_URL: 'https://foo.ghe.com',
GITHUB_REPOSITORY: claims.repository GITHUB_REPOSITORY: claims.repository
} }
@ -67,7 +68,7 @@ describe('provenance functions', () => {
describe('buildSLSAProvenancePredicate', () => { describe('buildSLSAProvenancePredicate', () => {
it('returns a provenance hydrated from an OIDC token', async () => { it('returns a provenance hydrated from an OIDC token', async () => {
const predicate = await buildSLSAProvenancePredicate(issuer) const predicate = await buildSLSAProvenancePredicate()
expect(predicate).toMatchSnapshot() expect(predicate).toMatchSnapshot()
}) })
}) })
@ -95,9 +96,9 @@ describe('provenance functions', () => {
}) })
describe('when using the github Sigstore instance', () => { describe('when using the github Sigstore instance', () => {
beforeEach(async () => {
const {fulcioURL, tsaServerURL} = signingEndpoints('github') const {fulcioURL, tsaServerURL} = signingEndpoints('github')
beforeEach(async () => {
// Mock Sigstore // Mock Sigstore
await mockFulcio({baseURL: fulcioURL, strict: false}) await mockFulcio({baseURL: fulcioURL, strict: false})
await mockTSA({baseURL: tsaServerURL}) await mockTSA({baseURL: tsaServerURL})
@ -117,8 +118,7 @@ describe('provenance functions', () => {
subjectName, subjectName,
subjectDigest, subjectDigest,
token: 'token', token: 'token',
sigstore: 'github', sigstore: 'github'
issuer
}) })
expect(attestation).toBeDefined() expect(attestation).toBeDefined()
@ -145,8 +145,7 @@ describe('provenance functions', () => {
const attestation = await attestProvenance({ const attestation = await attestProvenance({
subjectName, subjectName,
subjectDigest, subjectDigest,
token: 'token', token: 'token'
issuer
}) })
expect(attestation).toBeDefined() expect(attestation).toBeDefined()
@ -182,8 +181,7 @@ describe('provenance functions', () => {
subjectName, subjectName,
subjectDigest, subjectDigest,
token: 'token', token: 'token',
sigstore: 'public-good', sigstore: 'public-good'
issuer
}) })
expect(attestation).toBeDefined() expect(attestation).toBeDefined()
@ -210,8 +208,7 @@ describe('provenance functions', () => {
const attestation = await attestProvenance({ const attestation = await attestProvenance({
subjectName, subjectName,
subjectDigest, subjectDigest,
token: 'token', token: 'token'
issuer
}) })
expect(attestation).toBeDefined() expect(attestation).toBeDefined()
@ -237,8 +234,7 @@ describe('provenance functions', () => {
subjectDigest, subjectDigest,
token: 'token', token: 'token',
sigstore: 'public-good', sigstore: 'public-good',
skipWrite: true, skipWrite: true
issuer
}) })
expect(attestation).toBeDefined() expect(attestation).toBeDefined()

View File

@ -5,6 +5,7 @@ describe('writeAttestation', () => {
const originalEnv = process.env const originalEnv = process.env
const attestation = {foo: 'bar '} const attestation = {foo: 'bar '}
const token = 'token' const token = 'token'
const headers = {'X-GitHub-Foo': 'true'}
const mockAgent = new MockAgent() const mockAgent = new MockAgent()
setGlobalDispatcher(mockAgent) setGlobalDispatcher(mockAgent)
@ -27,14 +28,16 @@ describe('writeAttestation', () => {
.intercept({ .intercept({
path: '/repos/foo/bar/attestations', path: '/repos/foo/bar/attestations',
method: 'POST', method: 'POST',
headers: {authorization: `token ${token}`}, headers: {authorization: `token ${token}`, ...headers},
body: JSON.stringify({bundle: attestation}) body: JSON.stringify({bundle: attestation})
}) })
.reply(201, {id: '123'}) .reply(201, {id: '123'})
}) })
it('persists the attestation', async () => { it('persists the attestation', async () => {
await expect(writeAttestation(attestation, token)).resolves.toEqual('123') await expect(
writeAttestation(attestation, token, {headers})
).resolves.toEqual('123')
}) })
}) })

File diff suppressed because it is too large Load Diff

View File

@ -1,6 +1,6 @@
{ {
"name": "@actions/attest", "name": "@actions/attest",
"version": "1.2.1", "version": "1.4.2",
"description": "Actions attestation lib", "description": "Actions attestation lib",
"keywords": [ "keywords": [
"github", "github",
@ -35,22 +35,20 @@
"url": "https://github.com/actions/toolkit/issues" "url": "https://github.com/actions/toolkit/issues"
}, },
"devDependencies": { "devDependencies": {
"@sigstore/mock": "^0.6.5", "@sigstore/mock": "^0.7.4",
"@sigstore/rekor-types": "^2.0.0", "@sigstore/rekor-types": "^2.0.0",
"@types/jsonwebtoken": "^9.0.6", "@types/jsonwebtoken": "^9.0.6",
"jose": "^5.2.3",
"nock": "^13.5.1", "nock": "^13.5.1",
"undici": "^5.28.4" "undici": "^5.28.4"
}, },
"dependencies": { "dependencies": {
"@actions/core": "^1.10.1", "@actions/core": "^1.10.1",
"@actions/github": "^6.0.0", "@actions/github": "^6.0.0",
"@actions/http-client": "^2.2.1", "@actions/http-client": "^2.2.3",
"@octokit/plugin-retry": "^6.0.1", "@octokit/plugin-retry": "^6.0.1",
"@sigstore/bundle": "^2.3.0", "@sigstore/bundle": "^2.3.2",
"@sigstore/sign": "^2.3.0", "@sigstore/sign": "^2.3.2",
"jsonwebtoken": "^9.0.2", "jose": "^5.2.3"
"jwks-rsa": "^3.1.0"
}, },
"overrides": { "overrides": {
"@octokit/plugin-retry": { "@octokit/plugin-retry": {

View File

@ -28,6 +28,8 @@ export type AttestOptions = {
// Sigstore instance to use for signing. Must be one of "public-good" or // Sigstore instance to use for signing. Must be one of "public-good" or
// "github". // "github".
sigstore?: SigstoreInstance sigstore?: SigstoreInstance
// HTTP headers to include in request to attestations API.
headers?: {[header: string]: string | number | undefined}
// Whether to skip writing the attestation to the GH attestations API. // Whether to skip writing the attestation to the GH attestations API.
skipWrite?: boolean skipWrite?: boolean
} }
@ -61,7 +63,11 @@ export async function attest(options: AttestOptions): Promise<Attestation> {
// Store the attestation // Store the attestation
let attestationID: string | undefined let attestationID: string | undefined
if (options.skipWrite !== true) { if (options.skipWrite !== true) {
attestationID = await writeAttestation(bundleToJSON(bundle), options.token) attestationID = await writeAttestation(
bundleToJSON(bundle),
options.token,
{headers: options.headers}
)
} }
return toAttestation(bundle, attestationID) return toAttestation(bundle, attestationID)

View File

@ -1,16 +1,21 @@
import {getIDToken} from '@actions/core' import {getIDToken} from '@actions/core'
import {HttpClient} from '@actions/http-client' import {HttpClient} from '@actions/http-client'
import * as jwt from 'jsonwebtoken' import * as jose from 'jose'
import jwks from 'jwks-rsa'
const OIDC_AUDIENCE = 'nobody' const OIDC_AUDIENCE = 'nobody'
const VALID_SERVER_URLS = [
'https://github.com',
new RegExp('^https://[a-z0-9-]+\\.ghe\\.com$')
] as const
const REQUIRED_CLAIMS = [ const REQUIRED_CLAIMS = [
'iss', 'iss',
'ref', 'ref',
'sha', 'sha',
'repository', 'repository',
'event_name', 'event_name',
'job_workflow_ref',
'workflow_ref', 'workflow_ref',
'repository_id', 'repository_id',
'repository_owner_id', 'repository_owner_id',
@ -25,7 +30,8 @@ type OIDCConfig = {
jwks_uri: string jwks_uri: string
} }
export const getIDTokenClaims = async (issuer: string): Promise<ClaimSet> => { export const getIDTokenClaims = async (issuer?: string): Promise<ClaimSet> => {
issuer = issuer || getIssuer()
try { try {
const token = await getIDToken(OIDC_AUDIENCE) const token = await getIDToken(OIDC_AUDIENCE)
const claims = await decodeOIDCToken(token, issuer) const claims = await decodeOIDCToken(token, issuer)
@ -39,55 +45,46 @@ export const getIDTokenClaims = async (issuer: string): Promise<ClaimSet> => {
const decodeOIDCToken = async ( const decodeOIDCToken = async (
token: string, token: string,
issuer: string issuer: string
): Promise<jwt.JwtPayload> => { ): Promise<jose.JWTPayload> => {
// Verify and decode token // Verify and decode token
return new Promise((resolve, reject) => { const jwks = jose.createLocalJWKSet(await getJWKS(issuer))
jwt.verify( const {payload} = await jose.jwtVerify(token, jwks, {
token, audience: OIDC_AUDIENCE
getPublicKey(issuer),
{audience: OIDC_AUDIENCE, issuer},
(err, decoded) => {
if (err) {
reject(err)
} else if (!decoded || typeof decoded === 'string') {
reject(new Error('No decoded token'))
} else {
resolve(decoded)
}
}
)
}) })
if (!payload.iss) {
throw new Error('Missing "iss" claim')
} }
// Returns a callback to locate the public key for the given JWT header. This // Check that the issuer STARTS WITH the expected issuer URL to account for
// involves two calls: // the fact that the value may include an enterprise-specific slug
// 1. Fetch the OpenID configuration to get the JWKS URI. if (!payload.iss.startsWith(issuer)) {
// 2. Fetch the public key from the JWKS URI. throw new Error(`Unexpected "iss" claim: ${payload.iss}`)
const getPublicKey =
(issuer: string): jwt.GetPublicKeyOrSecret =>
(header: jwt.JwtHeader, callback: jwt.SigningKeyCallback) => {
// Look up the JWKS URI from the issuer's OpenID configuration
new HttpClient('actions/attest')
.getJson<OIDCConfig>(`${issuer}/.well-known/openid-configuration`)
.then(data => {
if (!data.result) {
callback(new Error('No OpenID configuration found'))
} else {
// Fetch the public key from the JWKS URI
jwks({jwksUri: data.result.jwks_uri}).getSigningKey(
header.kid,
(err, key) => {
callback(err, key?.getPublicKey())
}
)
}
})
.catch(err => {
callback(err)
})
} }
function assertClaimSet(claims: jwt.JwtPayload): asserts claims is ClaimSet { return payload
}
const getJWKS = async (issuer: string): Promise<jose.JSONWebKeySet> => {
const client = new HttpClient('@actions/attest')
const config = await client.getJson<OIDCConfig>(
`${issuer}/.well-known/openid-configuration`
)
if (!config.result) {
throw new Error('No OpenID configuration found')
}
const jwks = await client.getJson<jose.JSONWebKeySet>(config.result.jwks_uri)
if (!jwks.result) {
throw new Error('No JWKS found for issuer')
}
return jwks.result
}
function assertClaimSet(claims: jose.JWTPayload): asserts claims is ClaimSet {
const missingClaims: string[] = [] const missingClaims: string[] = []
for (const claim of REQUIRED_CLAIMS) { for (const claim of REQUIRED_CLAIMS) {
@ -100,3 +97,21 @@ function assertClaimSet(claims: jwt.JwtPayload): asserts claims is ClaimSet {
throw new Error(`Missing claims: ${missingClaims.join(', ')}`) throw new Error(`Missing claims: ${missingClaims.join(', ')}`)
} }
} }
// Derive the current OIDC issuer based on the server URL
function getIssuer(): string {
const serverURL = process.env.GITHUB_SERVER_URL || 'https://github.com'
// Ensure the server URL is a valid GitHub server URL
if (!VALID_SERVER_URLS.some(valid_url => serverURL.match(valid_url))) {
throw new Error(`Invalid server URL: ${serverURL}`)
}
let host = new URL(serverURL).hostname
if (host === 'github.com') {
host = 'githubusercontent.com'
}
return `https://token.actions.${host}`
}

View File

@ -3,12 +3,7 @@ import {getIDTokenClaims} from './oidc'
import type {Attestation, Predicate} from './shared.types' import type {Attestation, Predicate} from './shared.types'
const SLSA_PREDICATE_V1_TYPE = 'https://slsa.dev/provenance/v1' const SLSA_PREDICATE_V1_TYPE = 'https://slsa.dev/provenance/v1'
const GITHUB_BUILD_TYPE = 'https://actions.github.io/buildtypes/workflow/v1'
const GITHUB_BUILDER_ID_PREFIX = 'https://github.com/actions/runner'
const GITHUB_BUILD_TYPE =
'https://slsa-framework.github.io/github-actions-buildtypes/workflow/v1'
const DEFAULT_ISSUER = 'https://token.actions.githubusercontent.com'
export type AttestProvenanceOptions = Omit< export type AttestProvenanceOptions = Omit<
AttestOptions, AttestOptions,
@ -27,7 +22,7 @@ export type AttestProvenanceOptions = Omit<
* @returns The SLSA provenance predicate. * @returns The SLSA provenance predicate.
*/ */
export const buildSLSAProvenancePredicate = async ( export const buildSLSAProvenancePredicate = async (
issuer: string = DEFAULT_ISSUER issuer?: string
): Promise<Predicate> => { ): Promise<Predicate> => {
const serverURL = process.env.GITHUB_SERVER_URL const serverURL = process.env.GITHUB_SERVER_URL
const claims = await getIDTokenClaims(issuer) const claims = await getIDTokenClaims(issuer)
@ -55,7 +50,8 @@ export const buildSLSAProvenancePredicate = async (
github: { github: {
event_name: claims.event_name, event_name: claims.event_name,
repository_id: claims.repository_id, repository_id: claims.repository_id,
repository_owner_id: claims.repository_owner_id repository_owner_id: claims.repository_owner_id,
runner_environment: claims.runner_environment
} }
}, },
resolvedDependencies: [ resolvedDependencies: [
@ -69,7 +65,7 @@ export const buildSLSAProvenancePredicate = async (
}, },
runDetails: { runDetails: {
builder: { builder: {
id: `${GITHUB_BUILDER_ID_PREFIX}/${claims.runner_environment}` id: `${serverURL}/${claims.job_workflow_ref}`
}, },
metadata: { metadata: {
invocationId: `${serverURL}/${claims.repository}/actions/runs/${claims.run_id}/attempts/${claims.run_attempt}` invocationId: `${serverURL}/${claims.repository}/actions/runs/${claims.run_id}/attempts/${claims.run_attempt}`

View File

@ -87,6 +87,7 @@ const initBundleBuilder = (opts: SignOptions): BundleBuilder => {
new RekorWitness({ new RekorWitness({
rekorBaseURL: opts.rekorURL, rekorBaseURL: opts.rekorURL,
entryType: 'dsse', entryType: 'dsse',
fetchOnConflict: true,
timeout, timeout,
retry retry
}) })

View File

@ -1,11 +1,13 @@
import * as github from '@actions/github' import * as github from '@actions/github'
import {retry} from '@octokit/plugin-retry' import {retry} from '@octokit/plugin-retry'
import {RequestHeaders} from '@octokit/types'
const CREATE_ATTESTATION_REQUEST = 'POST /repos/{owner}/{repo}/attestations' const CREATE_ATTESTATION_REQUEST = 'POST /repos/{owner}/{repo}/attestations'
const DEFAULT_RETRY_COUNT = 5 const DEFAULT_RETRY_COUNT = 5
export type WriteOptions = { export type WriteOptions = {
retry?: number retry?: number
headers?: RequestHeaders
} }
/** /**
* Writes an attestation to the repository's attestations endpoint. * Writes an attestation to the repository's attestations endpoint.
@ -26,6 +28,7 @@ export const writeAttestation = async (
const response = await octokit.request(CREATE_ATTESTATION_REQUEST, { const response = await octokit.request(CREATE_ATTESTATION_REQUEST, {
owner: github.context.repo.owner, owner: github.context.repo.owner,
repo: github.context.repo.repo, repo: github.context.repo.repo,
headers: options.headers,
data: {bundle: attestation} data: {bundle: attestation}
}) })

View File

@ -1,5 +1,8 @@
# @actions/glob Releases # @actions/glob Releases
### 0.5.0
- Added `excludeHiddenFiles` option, which is disabled by default to preserve existing behavior [#1791: Add glob option to ignore hidden files](https://github.com/actions/toolkit/pull/1791)
### 0.4.0 ### 0.4.0
- Pass in the current workspace as a parameter to HashFiles [#1318](https://github.com/actions/toolkit/pull/1318) - Pass in the current workspace as a parameter to HashFiles [#1318](https://github.com/actions/toolkit/pull/1318)

View File

@ -708,7 +708,7 @@ describe('globber', () => {
expect(itemPaths).toEqual([]) expect(itemPaths).toEqual([])
}) })
it('returns hidden files', async () => { it('returns hidden files by default', async () => {
// Create the following layout: // Create the following layout:
// <root> // <root>
// <root>/.emptyFolder // <root>/.emptyFolder
@ -734,6 +734,26 @@ describe('globber', () => {
]) ])
}) })
it('ignores hidden files when excludeHiddenFiles is set', async () => {
// Create the following layout:
// <root>
// <root>/.emptyFolder
// <root>/.file
// <root>/.folder
// <root>/.folder/file
const root = path.join(getTestTemp(), 'ignores-hidden-files')
await createHiddenDirectory(path.join(root, '.emptyFolder'))
await createHiddenDirectory(path.join(root, '.folder'))
await createHiddenFile(path.join(root, '.file'), 'test .file content')
await fs.writeFile(
path.join(root, '.folder', 'file'),
'test .folder/file content'
)
const itemPaths = await glob(root, {excludeHiddenFiles: true})
expect(itemPaths).toEqual([root])
})
it('returns normalized paths', async () => { it('returns normalized paths', async () => {
// Create the following layout: // Create the following layout:
// <root>/hello/world.txt // <root>/hello/world.txt

View File

@ -1,6 +1,6 @@
{ {
"name": "@actions/glob", "name": "@actions/glob",
"version": "0.4.0", "version": "0.5.0",
"lockfileVersion": 3, "lockfileVersion": 3,
"requires": true, "requires": true,
"description": "Actions glob lib", "description": "Actions glob lib",

View File

@ -1,6 +1,6 @@
{ {
"name": "@actions/glob", "name": "@actions/glob",
"version": "0.4.0", "version": "0.5.0",
"preview": true, "preview": true,
"description": "Actions glob lib", "description": "Actions glob lib",
"keywords": [ "keywords": [

View File

@ -9,7 +9,8 @@ export function getOptions(copy?: GlobOptions): GlobOptions {
followSymbolicLinks: true, followSymbolicLinks: true,
implicitDescendants: true, implicitDescendants: true,
matchDirectories: true, matchDirectories: true,
omitBrokenSymbolicLinks: true omitBrokenSymbolicLinks: true,
excludeHiddenFiles: false
} }
if (copy) { if (copy) {
@ -32,6 +33,11 @@ export function getOptions(copy?: GlobOptions): GlobOptions {
result.omitBrokenSymbolicLinks = copy.omitBrokenSymbolicLinks result.omitBrokenSymbolicLinks = copy.omitBrokenSymbolicLinks
core.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`) core.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`)
} }
if (typeof copy.excludeHiddenFiles === 'boolean') {
result.excludeHiddenFiles = copy.excludeHiddenFiles
core.debug(`excludeHiddenFiles '${result.excludeHiddenFiles}'`)
}
} }
return result return result

View File

@ -36,4 +36,13 @@ export interface GlobOptions {
* @default true * @default true
*/ */
omitBrokenSymbolicLinks?: boolean omitBrokenSymbolicLinks?: boolean
/**
* Indicates whether to exclude hidden files (files and directories starting with a `.`).
* This does not apply to Windows files and directories with the hidden attribute unless
* they are also prefixed with a `.`.
*
* @default false
*/
excludeHiddenFiles?: boolean
} }

View File

@ -128,6 +128,11 @@ export class DefaultGlobber implements Globber {
continue continue
} }
// Hidden file or directory?
if (options.excludeHiddenFiles && path.basename(item.path).match(/^\./)) {
continue
}
// Directory // Directory
if (stats.isDirectory()) { if (stats.isDirectory()) {
// Matched // Matched

View File

@ -1,5 +1,14 @@
## Releases ## Releases
## 2.2.3
- Fixed an issue where proxy username and password were not handled correctly [#1799](https://github.com/actions/toolkit/pull/1799)
## 2.2.2
- Better handling of url encoded usernames and passwords in proxy config [#1782](https://github.com/actions/toolkit/pull/1782)
## 2.2.1
- Make sure RequestOptions.keepAlive is applied properly on node20 runtime [#1572](https://github.com/actions/toolkit/pull/1572)
## 2.2.0 ## 2.2.0
- Add function to return proxy agent dispatcher for compatibility with latest octokit packages [#1547](https://github.com/actions/toolkit/pull/1547) - Add function to return proxy agent dispatcher for compatibility with latest octokit packages [#1547](https://github.com/actions/toolkit/pull/1547)

View File

@ -37,7 +37,7 @@ describe('basics', () => {
// "user-agent": "typed-test-client-tests" // "user-agent": "typed-test-client-tests"
// }, // },
// "origin": "173.95.152.44", // "origin": "173.95.152.44",
// "url": "https://postman-echo.com/get" // "url": "http://postman-echo.com/get"
// } // }
it('does basic http get request', async () => { it('does basic http get request', async () => {
@ -63,16 +63,17 @@ describe('basics', () => {
expect(obj.headers['user-agent']).toBeFalsy() expect(obj.headers['user-agent']).toBeFalsy()
}) })
/* TODO write a mock rather then relying on a third party
it('does basic https get request', async () => { it('does basic https get request', async () => {
const res: httpm.HttpClientResponse = await _http.get( const res: httpm.HttpClientResponse = await _http.get(
'https://postman-echo.com/get' 'http://postman-echo.com/get'
) )
expect(res.message.statusCode).toBe(200) expect(res.message.statusCode).toBe(200)
const body: string = await res.readBody() const body: string = await res.readBody()
const obj = JSON.parse(body) const obj = JSON.parse(body)
expect(obj.url).toBe('https://postman-echo.com/get') expect(obj.url).toBe('http://postman-echo.com/get')
}) })
*/
it('does basic http get request with default headers', async () => { it('does basic http get request with default headers', async () => {
const http: httpm.HttpClient = new httpm.HttpClient( const http: httpm.HttpClient = new httpm.HttpClient(
'http-client-tests', 'http-client-tests',
@ -125,12 +126,12 @@ describe('basics', () => {
it('pipes a get request', async () => { it('pipes a get request', async () => {
return new Promise<void>(async resolve => { return new Promise<void>(async resolve => {
const file = fs.createWriteStream(sampleFilePath) const file = fs.createWriteStream(sampleFilePath)
;(await _http.get('https://postman-echo.com/get')).message ;(await _http.get('http://postman-echo.com/get')).message
.pipe(file) .pipe(file)
.on('close', () => { .on('close', () => {
const body: string = fs.readFileSync(sampleFilePath).toString() const body: string = fs.readFileSync(sampleFilePath).toString()
const obj = JSON.parse(body) const obj = JSON.parse(body)
expect(obj.url).toBe('https://postman-echo.com/get') expect(obj.url).toBe('http://postman-echo.com/get')
resolve() resolve()
}) })
}) })
@ -138,32 +139,32 @@ describe('basics', () => {
it('does basic get request with redirects', async () => { it('does basic get request with redirects', async () => {
const res: httpm.HttpClientResponse = await _http.get( const res: httpm.HttpClientResponse = await _http.get(
`https://postman-echo.com/redirect-to?url=${encodeURIComponent( `http://postman-echo.com/redirect-to?url=${encodeURIComponent(
'https://postman-echo.com/get' 'http://postman-echo.com/get'
)}` )}`
) )
expect(res.message.statusCode).toBe(200) expect(res.message.statusCode).toBe(200)
const body: string = await res.readBody() const body: string = await res.readBody()
const obj = JSON.parse(body) const obj = JSON.parse(body)
expect(obj.url).toBe('https://postman-echo.com/get') expect(obj.url).toBe('http://postman-echo.com/get')
}) })
it('does basic get request with redirects (303)', async () => { it('does basic get request with redirects (303)', async () => {
const res: httpm.HttpClientResponse = await _http.get( const res: httpm.HttpClientResponse = await _http.get(
`https://postman-echo.com/redirect-to?url=${encodeURIComponent( `http://postman-echo.com/redirect-to?url=${encodeURIComponent(
'https://postman-echo.com/get' 'http://postman-echo.com/get'
)}&status_code=303` )}&status_code=303`
) )
expect(res.message.statusCode).toBe(200) expect(res.message.statusCode).toBe(200)
const body: string = await res.readBody() const body: string = await res.readBody()
const obj = JSON.parse(body) const obj = JSON.parse(body)
expect(obj.url).toBe('https://postman-echo.com/get') expect(obj.url).toBe('http://postman-echo.com/get')
}) })
it('returns 404 for not found get request on redirect', async () => { it('returns 404 for not found get request on redirect', async () => {
const res: httpm.HttpClientResponse = await _http.get( const res: httpm.HttpClientResponse = await _http.get(
`https://postman-echo.com/redirect-to?url=${encodeURIComponent( `http://postman-echo.com/redirect-to?url=${encodeURIComponent(
'https://postman-echo.com/status/404' 'http://postman-echo.com/status/404'
)}&status_code=303` )}&status_code=303`
) )
expect(res.message.statusCode).toBe(404) expect(res.message.statusCode).toBe(404)
@ -177,8 +178,8 @@ describe('basics', () => {
{allowRedirects: false} {allowRedirects: false}
) )
const res: httpm.HttpClientResponse = await http.get( const res: httpm.HttpClientResponse = await http.get(
`https://postman-echo.com/redirect-to?url=${encodeURIComponent( `http://postman-echo.com/redirect-to?url=${encodeURIComponent(
'https://postman-echo.com/get' 'http://postman-echo.com/get'
)}` )}`
) )
expect(res.message.statusCode).toBe(302) expect(res.message.statusCode).toBe(302)
@ -191,8 +192,8 @@ describe('basics', () => {
authorization: 'shhh' authorization: 'shhh'
} }
const res: httpm.HttpClientResponse = await _http.get( const res: httpm.HttpClientResponse = await _http.get(
`https://postman-echo.com/redirect-to?url=${encodeURIComponent( `http://postman-echo.com/redirect-to?url=${encodeURIComponent(
'https://www.postman-echo.com/get' 'http://www.postman-echo.com/get'
)}`, )}`,
headers headers
) )
@ -204,7 +205,7 @@ describe('basics', () => {
expect(obj.headers[httpm.Headers.Accept]).toBe('application/json') expect(obj.headers[httpm.Headers.Accept]).toBe('application/json')
expect(obj.headers['Authorization']).toBeUndefined() expect(obj.headers['Authorization']).toBeUndefined()
expect(obj.headers['authorization']).toBeUndefined() expect(obj.headers['authorization']).toBeUndefined()
expect(obj.url).toBe('https://www.postman-echo.com/get') expect(obj.url).toBe('http://www.postman-echo.com/get')
}) })
it('does not pass Auth with diff hostname redirects', async () => { it('does not pass Auth with diff hostname redirects', async () => {
@ -213,8 +214,8 @@ describe('basics', () => {
Authorization: 'shhh' Authorization: 'shhh'
} }
const res: httpm.HttpClientResponse = await _http.get( const res: httpm.HttpClientResponse = await _http.get(
`https://postman-echo.com/redirect-to?url=${encodeURIComponent( `http://postman-echo.com/redirect-to?url=${encodeURIComponent(
'https://www.postman-echo.com/get' 'http://www.postman-echo.com/get'
)}`, )}`,
headers headers
) )
@ -226,7 +227,7 @@ describe('basics', () => {
expect(obj.headers[httpm.Headers.Accept]).toBe('application/json') expect(obj.headers[httpm.Headers.Accept]).toBe('application/json')
expect(obj.headers['Authorization']).toBeUndefined() expect(obj.headers['Authorization']).toBeUndefined()
expect(obj.headers['authorization']).toBeUndefined() expect(obj.headers['authorization']).toBeUndefined()
expect(obj.url).toBe('https://www.postman-echo.com/get') expect(obj.url).toBe('http://www.postman-echo.com/get')
}) })
it('does basic head request', async () => { it('does basic head request', async () => {
@ -289,11 +290,11 @@ describe('basics', () => {
it('gets a json object', async () => { it('gets a json object', async () => {
const jsonObj = await _http.getJson<HttpBinData>( const jsonObj = await _http.getJson<HttpBinData>(
'https://postman-echo.com/get' 'http://postman-echo.com/get'
) )
expect(jsonObj.statusCode).toBe(200) expect(jsonObj.statusCode).toBe(200)
expect(jsonObj.result).toBeDefined() expect(jsonObj.result).toBeDefined()
expect(jsonObj.result?.url).toBe('https://postman-echo.com/get') expect(jsonObj.result?.url).toBe('http://postman-echo.com/get')
expect(jsonObj.result?.headers[httpm.Headers.Accept]).toBe( expect(jsonObj.result?.headers[httpm.Headers.Accept]).toBe(
httpm.MediaTypes.ApplicationJson httpm.MediaTypes.ApplicationJson
) )
@ -304,7 +305,7 @@ describe('basics', () => {
it('getting a non existent json object returns null', async () => { it('getting a non existent json object returns null', async () => {
const jsonObj = await _http.getJson<HttpBinData>( const jsonObj = await _http.getJson<HttpBinData>(
'https://postman-echo.com/status/404' 'http://postman-echo.com/status/404'
) )
expect(jsonObj.statusCode).toBe(404) expect(jsonObj.statusCode).toBe(404)
expect(jsonObj.result).toBeNull() expect(jsonObj.result).toBeNull()
@ -313,12 +314,12 @@ describe('basics', () => {
it('posts a json object', async () => { it('posts a json object', async () => {
const res = {name: 'foo'} const res = {name: 'foo'}
const restRes = await _http.postJson<HttpBinData>( const restRes = await _http.postJson<HttpBinData>(
'https://postman-echo.com/post', 'http://postman-echo.com/post',
res res
) )
expect(restRes.statusCode).toBe(200) expect(restRes.statusCode).toBe(200)
expect(restRes.result).toBeDefined() expect(restRes.result).toBeDefined()
expect(restRes.result?.url).toBe('https://postman-echo.com/post') expect(restRes.result?.url).toBe('http://postman-echo.com/post')
expect(restRes.result?.json.name).toBe('foo') expect(restRes.result?.json.name).toBe('foo')
expect(restRes.result?.headers[httpm.Headers.Accept]).toBe( expect(restRes.result?.headers[httpm.Headers.Accept]).toBe(
httpm.MediaTypes.ApplicationJson httpm.MediaTypes.ApplicationJson
@ -334,12 +335,12 @@ describe('basics', () => {
it('puts a json object', async () => { it('puts a json object', async () => {
const res = {name: 'foo'} const res = {name: 'foo'}
const restRes = await _http.putJson<HttpBinData>( const restRes = await _http.putJson<HttpBinData>(
'https://postman-echo.com/put', 'http://postman-echo.com/put',
res res
) )
expect(restRes.statusCode).toBe(200) expect(restRes.statusCode).toBe(200)
expect(restRes.result).toBeDefined() expect(restRes.result).toBeDefined()
expect(restRes.result?.url).toBe('https://postman-echo.com/put') expect(restRes.result?.url).toBe('http://postman-echo.com/put')
expect(restRes.result?.json.name).toBe('foo') expect(restRes.result?.json.name).toBe('foo')
expect(restRes.result?.headers[httpm.Headers.Accept]).toBe( expect(restRes.result?.headers[httpm.Headers.Accept]).toBe(
@ -356,12 +357,12 @@ describe('basics', () => {
it('patch a json object', async () => { it('patch a json object', async () => {
const res = {name: 'foo'} const res = {name: 'foo'}
const restRes = await _http.patchJson<HttpBinData>( const restRes = await _http.patchJson<HttpBinData>(
'https://postman-echo.com/patch', 'http://postman-echo.com/patch',
res res
) )
expect(restRes.statusCode).toBe(200) expect(restRes.statusCode).toBe(200)
expect(restRes.result).toBeDefined() expect(restRes.result).toBeDefined()
expect(restRes.result?.url).toBe('https://postman-echo.com/patch') expect(restRes.result?.url).toBe('http://postman-echo.com/patch')
expect(restRes.result?.json.name).toBe('foo') expect(restRes.result?.json.name).toBe('foo')
expect(restRes.result?.headers[httpm.Headers.Accept]).toBe( expect(restRes.result?.headers[httpm.Headers.Accept]).toBe(
httpm.MediaTypes.ApplicationJson httpm.MediaTypes.ApplicationJson

View File

@ -12,7 +12,7 @@ describe('headers', () => {
it('preserves existing headers on getJson', async () => { it('preserves existing headers on getJson', async () => {
const additionalHeaders = {[httpm.Headers.Accept]: 'foo'} const additionalHeaders = {[httpm.Headers.Accept]: 'foo'}
let jsonObj = await _http.getJson<any>( let jsonObj = await _http.getJson<any>(
'https://postman-echo.com/get', 'http://postman-echo.com/get',
additionalHeaders additionalHeaders
) )
expect(jsonObj.result.headers[httpm.Headers.Accept]).toBe('foo') expect(jsonObj.result.headers[httpm.Headers.Accept]).toBe('foo')
@ -26,7 +26,7 @@ describe('headers', () => {
[httpm.Headers.Accept]: 'baz' [httpm.Headers.Accept]: 'baz'
} }
} }
jsonObj = await httpWithHeaders.getJson<any>('https://postman-echo.com/get') jsonObj = await httpWithHeaders.getJson<any>('http://postman-echo.com/get')
expect(jsonObj.result.headers[httpm.Headers.Accept]).toBe('baz') expect(jsonObj.result.headers[httpm.Headers.Accept]).toBe('baz')
expect(jsonObj.headers[httpm.Headers.ContentType]).toContain( expect(jsonObj.headers[httpm.Headers.ContentType]).toContain(
httpm.MediaTypes.ApplicationJson httpm.MediaTypes.ApplicationJson
@ -36,7 +36,7 @@ describe('headers', () => {
it('preserves existing headers on postJson', async () => { it('preserves existing headers on postJson', async () => {
const additionalHeaders = {[httpm.Headers.Accept]: 'foo'} const additionalHeaders = {[httpm.Headers.Accept]: 'foo'}
let jsonObj = await _http.postJson<any>( let jsonObj = await _http.postJson<any>(
'https://postman-echo.com/post', 'http://postman-echo.com/post',
{}, {},
additionalHeaders additionalHeaders
) )
@ -52,7 +52,7 @@ describe('headers', () => {
} }
} }
jsonObj = await httpWithHeaders.postJson<any>( jsonObj = await httpWithHeaders.postJson<any>(
'https://postman-echo.com/post', 'http://postman-echo.com/post',
{} {}
) )
expect(jsonObj.result.headers[httpm.Headers.Accept]).toBe('baz') expect(jsonObj.result.headers[httpm.Headers.Accept]).toBe('baz')
@ -64,7 +64,7 @@ describe('headers', () => {
it('preserves existing headers on putJson', async () => { it('preserves existing headers on putJson', async () => {
const additionalHeaders = {[httpm.Headers.Accept]: 'foo'} const additionalHeaders = {[httpm.Headers.Accept]: 'foo'}
let jsonObj = await _http.putJson<any>( let jsonObj = await _http.putJson<any>(
'https://postman-echo.com/put', 'http://postman-echo.com/put',
{}, {},
additionalHeaders additionalHeaders
) )
@ -80,7 +80,7 @@ describe('headers', () => {
} }
} }
jsonObj = await httpWithHeaders.putJson<any>( jsonObj = await httpWithHeaders.putJson<any>(
'https://postman-echo.com/put', 'http://postman-echo.com/put',
{} {}
) )
expect(jsonObj.result.headers[httpm.Headers.Accept]).toBe('baz') expect(jsonObj.result.headers[httpm.Headers.Accept]).toBe('baz')
@ -92,7 +92,7 @@ describe('headers', () => {
it('preserves existing headers on patchJson', async () => { it('preserves existing headers on patchJson', async () => {
const additionalHeaders = {[httpm.Headers.Accept]: 'foo'} const additionalHeaders = {[httpm.Headers.Accept]: 'foo'}
let jsonObj = await _http.patchJson<any>( let jsonObj = await _http.patchJson<any>(
'https://postman-echo.com/patch', 'http://postman-echo.com/patch',
{}, {},
additionalHeaders additionalHeaders
) )
@ -108,7 +108,7 @@ describe('headers', () => {
} }
} }
jsonObj = await httpWithHeaders.patchJson<any>( jsonObj = await httpWithHeaders.patchJson<any>(
'https://postman-echo.com/patch', 'http://postman-echo.com/patch',
{} {}
) )
expect(jsonObj.result.headers[httpm.Headers.Accept]).toBe('baz') expect(jsonObj.result.headers[httpm.Headers.Accept]).toBe('baz')

View File

@ -222,30 +222,33 @@ describe('proxy', () => {
expect(_proxyConnects).toHaveLength(0) expect(_proxyConnects).toHaveLength(0)
}) })
// TODO mock this out so we don't rely on a third party
/*
it('HttpClient does basic https get request through proxy', async () => { it('HttpClient does basic https get request through proxy', async () => {
process.env['https_proxy'] = _proxyUrl process.env['https_proxy'] = _proxyUrl
const httpClient = new httpm.HttpClient() const httpClient = new httpm.HttpClient()
const res: httpm.HttpClientResponse = await httpClient.get( const res: httpm.HttpClientResponse = await httpClient.get(
'https://postman-echo.com/get' 'http://postman-echo.com/get'
) )
expect(res.message.statusCode).toBe(200) expect(res.message.statusCode).toBe(200)
const body: string = await res.readBody() const body: string = await res.readBody()
const obj = JSON.parse(body) const obj = JSON.parse(body)
expect(obj.url).toBe('https://postman-echo.com/get') expect(obj.url).toBe('http://postman-echo.com/get')
expect(_proxyConnects).toEqual(['postman-echo.com:443']) expect(_proxyConnects).toEqual(['postman-echo.com:443'])
}) })
*/
it('HttpClient does basic https get request when bypass proxy', async () => { it('HttpClient does basic http get request when bypass proxy', async () => {
process.env['https_proxy'] = _proxyUrl process.env['http_proxy'] = _proxyUrl
process.env['no_proxy'] = 'postman-echo.com' process.env['no_proxy'] = 'postman-echo.com'
const httpClient = new httpm.HttpClient() const httpClient = new httpm.HttpClient()
const res: httpm.HttpClientResponse = await httpClient.get( const res: httpm.HttpClientResponse = await httpClient.get(
'https://postman-echo.com/get' 'http://postman-echo.com/get'
) )
expect(res.message.statusCode).toBe(200) expect(res.message.statusCode).toBe(200)
const body: string = await res.readBody() const body: string = await res.readBody()
const obj = JSON.parse(body) const obj = JSON.parse(body)
expect(obj.url).toBe('https://postman-echo.com/get') expect(obj.url).toBe('http://postman-echo.com/get')
expect(_proxyConnects).toHaveLength(0) expect(_proxyConnects).toHaveLength(0)
}) })
@ -304,6 +307,18 @@ describe('proxy', () => {
console.log(agent) console.log(agent)
expect(agent instanceof ProxyAgent).toBe(true) expect(agent instanceof ProxyAgent).toBe(true)
}) })
it('proxyAuth is set in tunnel agent when authentication is provided with URIencoding', async () => {
process.env['https_proxy'] =
'http://user%40github.com:p%40ssword@127.0.0.1:8080'
const httpClient = new httpm.HttpClient()
const agent: any = httpClient.getAgent('https://some-url')
// eslint-disable-next-line no-console
console.log(agent)
expect(agent.proxyOptions.host).toBe('127.0.0.1')
expect(agent.proxyOptions.port).toBe('8080')
expect(agent.proxyOptions.proxyAuth).toBe('user@github.com:p@ssword')
})
}) })
function _clearVars(): void { function _clearVars(): void {

View File

@ -1,6 +1,6 @@
{ {
"name": "@actions/http-client", "name": "@actions/http-client",
"version": "2.2.1", "version": "2.2.3",
"lockfileVersion": 2, "lockfileVersion": 2,
"requires": true, "requires": true,
"packages": { "packages": {

View File

@ -1,6 +1,6 @@
{ {
"name": "@actions/http-client", "name": "@actions/http-client",
"version": "2.2.1", "version": "2.2.3",
"description": "Actions Http Client", "description": "Actions Http Client",
"keywords": [ "keywords": [
"github", "github",

View File

@ -726,7 +726,9 @@ export class HttpClient {
uri: proxyUrl.href, uri: proxyUrl.href,
pipelining: !this._keepAlive ? 0 : 1, pipelining: !this._keepAlive ? 0 : 1,
...((proxyUrl.username || proxyUrl.password) && { ...((proxyUrl.username || proxyUrl.password) && {
token: `${proxyUrl.username}:${proxyUrl.password}` token: `Basic ${Buffer.from(
`${proxyUrl.username}:${proxyUrl.password}`
).toString('base64')}`
}) })
}) })
this._proxyAgentDispatcher = proxyAgent this._proxyAgentDispatcher = proxyAgent

View File

@ -15,10 +15,10 @@ export function getProxyUrl(reqUrl: URL): URL | undefined {
if (proxyVar) { if (proxyVar) {
try { try {
return new URL(proxyVar) return new DecodedURL(proxyVar)
} catch { } catch {
if (!proxyVar.startsWith('http://') && !proxyVar.startsWith('https://')) if (!proxyVar.startsWith('http://') && !proxyVar.startsWith('https://'))
return new URL(`http://${proxyVar}`) return new DecodedURL(`http://${proxyVar}`)
} }
} else { } else {
return undefined return undefined
@ -87,3 +87,22 @@ function isLoopbackAddress(host: string): boolean {
hostLower.startsWith('[0:0:0:0:0:0:0:1]') hostLower.startsWith('[0:0:0:0:0:0:0:1]')
) )
} }
class DecodedURL extends URL {
private _decodedUsername: string
private _decodedPassword: string
constructor(url: string | URL, base?: string | URL) {
super(url, base)
this._decodedUsername = decodeURIComponent(super.username)
this._decodedPassword = decodeURIComponent(super.password)
}
get username(): string {
return this._decodedUsername
}
get password(): string {
return this._decodedPassword
}
}