1
0
Fork 0

Merge branch 'main' into main

pull/1699/head
Danila Danko 2025-02-21 18:20:54 +03:00 committed by GitHub
commit daf810242f
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
115 changed files with 10640 additions and 5577 deletions

View File

@ -43,7 +43,7 @@ Note that before a PR will be accepted, you must ensure:
1. In a new branch, create a new Lerna package:
```console
$ npm run create-package new-package
$ npm run new-package [name]
```
This will ask you some questions about the new package. Start with `0.0.0` as the first version (look generally at some of the other packages for how the package.json is structured).

View File

@ -32,7 +32,7 @@ jobs:
run: npm run bootstrap
- name: audit tools (without allow-list)
run: npm audit --audit-level=moderate
run: npm audit --audit-level=moderate --omit dev
- name: audit packages
run: npm run audit-all

View File

@ -1,5 +1,7 @@
name: Publish NPM
run-name: Publish NPM - ${{ github.event.inputs.package }}
on:
workflow_dispatch:
inputs:
@ -9,7 +11,7 @@ on:
jobs:
test:
runs-on: macos-latest
runs-on: macos-latest-large
steps:
- name: setup repo
@ -46,7 +48,7 @@ jobs:
path: packages/${{ github.event.inputs.package }}/*.tgz
publish:
runs-on: macos-latest
runs-on: macos-latest-large
needs: test
environment: npm-publish
permissions:

View File

@ -16,7 +16,11 @@ jobs:
strategy:
matrix:
runs-on: [ubuntu-latest, macos-latest, windows-latest]
runs-on: [ubuntu-latest, macos-latest-large, windows-latest]
# Node 18 is the current default Node version in hosted runners, so users may still use the toolkit with it when running tests (see https://github.com/actions/toolkit/issues/1841)
# Node 20 is the currently support Node version for actions - https://docs.github.com/actions/sharing-automations/creating-actions/metadata-syntax-for-github-actions#runsusing-for-javascript-actions
node-version: [18.x, 20.x]
fail-fast: false
runs-on: ${{ matrix.runs-on }}
@ -25,10 +29,10 @@ jobs:
- name: Checkout
uses: actions/checkout@v4
- name: Set Node.js 20.x
- name: Set up Node ${{ matrix.node-version }}
uses: actions/setup-node@v4
with:
node-version: 20.x
node-version: ${{ matrix.node-version }}
- name: npm install
run: npm install

5396
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -13,7 +13,7 @@
"lint": "eslint packages/**/*.ts",
"lint-fix": "eslint packages/**/*.ts --fix",
"new-package": "scripts/create-package",
"test": "jest --testTimeout 60000"
"test": "jest --testTimeout 70000"
},
"devDependencies": {
"@types/jest": "^29.5.4",
@ -27,7 +27,7 @@
"eslint-plugin-prettier": "^5.0.0",
"flow-bin": "^0.115.0",
"jest": "^29.6.4",
"lerna": "^7.1.4",
"lerna": "^6.4.1",
"nx": "16.6.0",
"prettier": "^3.0.0",
"ts-jest": "^29.1.1",

View File

@ -1,5 +1,48 @@
# @actions/artifact Releases
### 2.2.2
- Default concurrency to 5 for uploading artifacts [#1962](https://github.com/actions/toolkit/pull/1962
### 2.2.1
- Add `ACTIONS_ARTIFACT_UPLOAD_CONCURRENCY` and `ACTIONS_ARTIFACT_UPLOAD_TIMEOUT_MS` environment variables [#1928](https://github.com/actions/toolkit/pull/1928)
### 2.2.0
- Return artifact digest on upload [#1896](https://github.com/actions/toolkit/pull/1896)
### 2.1.11
- Fixed a bug with relative symlinks resolution [#1844](https://github.com/actions/toolkit/pull/1844)
- Use native `crypto` [#1815](https://github.com/actions/toolkit/pull/1815)
### 2.1.10
- Fixed a regression with symlinks not being automatically resolved [#1830](https://github.com/actions/toolkit/pull/1830)
- Fixed a regression with chunk timeout [#1786](https://github.com/actions/toolkit/pull/1786)
### 2.1.9
- Fixed artifact upload chunk timeout logic [#1774](https://github.com/actions/toolkit/pull/1774)
- Use lazy stream to prevent issues with open file limits [#1771](https://github.com/actions/toolkit/pull/1771)
### 2.1.8
- Allows `*.localhost` domains for hostname checks for local development.
### 2.1.7
- Update unzip-stream dependency and reverted to using `unzip.Extract()`
### 2.1.6
- Will retry on invalid request responses.
### 2.1.5
- Bumped `archiver` dependency to 7.0.1
### 2.1.4
- Adds info-level logging for zip extraction
@ -11,9 +54,9 @@
### 2.1.2
- Updated the stream extract functionality to use `unzip.Parse()` instead of `unzip.Extract()` for greater control of unzipping artifacts
### 2.1.1
- Updated `isGhes` check to include `.ghe.com` and `.ghe.localhost` as accepted hosts
### 2.1.0

View File

@ -116,6 +116,54 @@ describe('artifact-http-client', () => {
expect(mockPost).toHaveBeenCalledTimes(2)
})
it('should retry if invalid body response', async () => {
const mockPost = jest
.fn(() => {
const msgSucceeded = new http.IncomingMessage(new net.Socket())
msgSucceeded.statusCode = 200
return {
message: msgSucceeded,
readBody: async () => {
return Promise.resolve(
`{"ok": true, "signedUploadUrl": "http://localhost:8080/upload"}`
)
}
}
})
.mockImplementationOnce(() => {
const msgFailed = new http.IncomingMessage(new net.Socket())
msgFailed.statusCode = 502
msgFailed.statusMessage = 'Bad Gateway'
return {
message: msgFailed,
readBody: async () => {
return Promise.resolve('💥')
}
}
})
const mockHttpClient = (
HttpClient as unknown as jest.Mock
).mockImplementation(() => {
return {
post: mockPost
}
})
const client = internalArtifactTwirpClient(clientOptions)
const artifact = await client.CreateArtifact({
workflowRunBackendId: '1234',
workflowJobRunBackendId: '5678',
name: 'artifact',
version: 4
})
expect(mockHttpClient).toHaveBeenCalledTimes(1)
expect(artifact).toBeDefined()
expect(artifact.ok).toBe(true)
expect(artifact.signedUploadUrl).toBe('http://localhost:8080/upload')
expect(mockPost).toHaveBeenCalledTimes(2)
})
it('should fail if the request fails 5 times', async () => {
const mockPost = jest.fn(() => {
const msgFailed = new http.IncomingMessage(new net.Socket())

View File

@ -1,4 +1,10 @@
import * as config from '../src/internal/shared/config'
import os from 'os'
// Mock the 'os' module
jest.mock('os', () => ({
cpus: jest.fn()
}))
beforeEach(() => {
jest.resetModules()
@ -20,8 +26,78 @@ describe('isGhes', () => {
expect(config.isGhes()).toBe(false)
})
it('should return false when the request domain ends with .localhost', () => {
process.env.GITHUB_SERVER_URL = 'https://github.localhost'
expect(config.isGhes()).toBe(false)
})
it('should return false when the request domain is specific to an enterprise', () => {
process.env.GITHUB_SERVER_URL = 'https://my-enterprise.github.com'
expect(config.isGhes()).toBe(true)
})
})
describe('uploadChunkTimeoutEnv', () => {
it('should return default 300000 when no env set', () => {
expect(config.getUploadChunkTimeout()).toBe(300000)
})
it('should return value set in ACTIONS_ARTIFACT_UPLOAD_TIMEOUT_MS', () => {
process.env.ACTIONS_ARTIFACT_UPLOAD_TIMEOUT_MS = '150000'
expect(config.getUploadChunkTimeout()).toBe(150000)
})
it('should throw if value set in ACTIONS_ARTIFACT_UPLOAD_TIMEOUT_MS is invalid', () => {
process.env.ACTIONS_ARTIFACT_UPLOAD_TIMEOUT_MS = 'abc'
expect(() => {
config.getUploadChunkTimeout()
}).toThrow()
})
})
describe('uploadConcurrencyEnv', () => {
it('Concurrency default to 5', () => {
;(os.cpus as jest.Mock).mockReturnValue(new Array(4))
expect(config.getConcurrency()).toBe(5)
})
it('Concurrency max out at 300 on systems with many CPUs', () => {
;(os.cpus as jest.Mock).mockReturnValue(new Array(32))
process.env.ACTIONS_ARTIFACT_UPLOAD_CONCURRENCY = '301'
expect(config.getConcurrency()).toBe(300)
})
it('Concurrency can be set to 32 when cpu num is <= 4', () => {
;(os.cpus as jest.Mock).mockReturnValue(new Array(4))
process.env.ACTIONS_ARTIFACT_UPLOAD_CONCURRENCY = '32'
expect(config.getConcurrency()).toBe(32)
})
it('Concurrency can be set 16 * num of cpu when cpu num is > 4', () => {
;(os.cpus as jest.Mock).mockReturnValue(new Array(6))
process.env.ACTIONS_ARTIFACT_UPLOAD_CONCURRENCY = '96'
expect(config.getConcurrency()).toBe(96)
})
it('Concurrency can be overridden by env var ACTIONS_ARTIFACT_UPLOAD_CONCURRENCY', () => {
;(os.cpus as jest.Mock).mockReturnValue(new Array(4))
process.env.ACTIONS_ARTIFACT_UPLOAD_CONCURRENCY = '10'
expect(config.getConcurrency()).toBe(10)
})
it('should throw with invalid value of ACTIONS_ARTIFACT_UPLOAD_CONCURRENCY', () => {
;(os.cpus as jest.Mock).mockReturnValue(new Array(4))
process.env.ACTIONS_ARTIFACT_UPLOAD_CONCURRENCY = 'abc'
expect(() => {
config.getConcurrency()
}).toThrow()
})
it('should throw if ACTIONS_ARTIFACT_UPLOAD_CONCURRENCY is < 1', () => {
;(os.cpus as jest.Mock).mockReturnValue(new Array(4))
process.env.ACTIONS_ARTIFACT_UPLOAD_CONCURRENCY = '0'
expect(() => {
config.getConcurrency()
}).toThrow()
})
})

View File

@ -200,14 +200,12 @@ describe('download-artifact', () => {
}
)
await expect(
downloadArtifactPublic(
fixtures.artifactID,
fixtures.repositoryOwner,
fixtures.repositoryName,
fixtures.token
)
).rejects.toBeInstanceOf(Error)
const response = await downloadArtifactPublic(
fixtures.artifactID,
fixtures.repositoryOwner,
fixtures.repositoryName,
fixtures.token
)
expect(downloadArtifactMock).toHaveBeenCalledWith({
owner: fixtures.repositoryOwner,
@ -223,6 +221,16 @@ describe('download-artifact', () => {
expect(mockGetArtifactMalicious).toHaveBeenCalledWith(
fixtures.blobStorageUrl
)
// ensure path traversal was not possible
expect(
fs.existsSync(path.join(fixtures.workspaceDir, 'x/etc/hosts'))
).toBe(true)
expect(
fs.existsSync(path.join(fixtures.workspaceDir, 'y/etc/hosts'))
).toBe(true)
expect(response.downloadPath).toBe(fixtures.workspaceDir)
})
it('should successfully download an artifact to user defined path', async () => {

View File

@ -1,257 +1,173 @@
import * as uploadZipSpecification from '../src/internal/upload/upload-zip-specification'
import * as zip from '../src/internal/upload/zip'
import * as util from '../src/internal/shared/util'
import * as retention from '../src/internal/upload/retention'
import * as config from '../src/internal/shared/config'
import {Timestamp, ArtifactServiceClientJSON} from '../src/generated'
import {ArtifactServiceClientJSON} from '../src/generated'
import * as blobUpload from '../src/internal/upload/blob-upload'
import {uploadArtifact} from '../src/internal/upload/upload-artifact'
import {noopLogs} from './common'
import {FilesNotFoundError} from '../src/internal/shared/errors'
import {BlockBlobUploadStreamOptions} from '@azure/storage-blob'
import * as fs from 'fs'
import * as path from 'path'
import unzip from 'unzip-stream'
const uploadStreamMock = jest.fn()
const blockBlobClientMock = jest.fn().mockImplementation(() => ({
uploadStream: uploadStreamMock
}))
jest.mock('@azure/storage-blob', () => ({
BlobClient: jest.fn().mockImplementation(() => {
return {
getBlockBlobClient: blockBlobClientMock
}
})
}))
const fixtures = {
uploadDirectory: path.join(__dirname, '_temp', 'plz-upload'),
files: [
{name: 'file1.txt', content: 'test 1 file content'},
{name: 'file2.txt', content: 'test 2 file content'},
{name: 'file3.txt', content: 'test 3 file content'},
{
name: 'real.txt',
content: 'from a symlink'
},
{
name: 'relative.txt',
content: 'from a symlink',
symlink: 'real.txt',
relative: true
},
{
name: 'absolute.txt',
content: 'from a symlink',
symlink: 'real.txt',
relative: false
}
],
backendIDs: {
workflowRunBackendId: '67dbcc20-e851-4452-a7c3-2cc0d2e0ec67',
workflowJobRunBackendId: '5f49179d-3386-4c38-85f7-00f8138facd0'
},
runtimeToken: 'test-token',
resultsServiceURL: 'http://results.local',
inputs: {
artifactName: 'test-artifact',
files: [
'/home/user/files/plz-upload/file1.txt',
'/home/user/files/plz-upload/file2.txt',
'/home/user/files/plz-upload/dir/file3.txt'
],
rootDirectory: '/home/user/files/plz-upload'
}
}
describe('upload-artifact', () => {
beforeAll(() => {
fs.mkdirSync(fixtures.uploadDirectory, {
recursive: true
})
for (const file of fixtures.files) {
if (file.symlink) {
let symlinkPath = file.symlink
if (!file.relative) {
symlinkPath = path.join(fixtures.uploadDirectory, file.symlink)
}
if (!fs.existsSync(path.join(fixtures.uploadDirectory, file.name))) {
fs.symlinkSync(
symlinkPath,
path.join(fixtures.uploadDirectory, file.name),
'file'
)
}
} else {
fs.writeFileSync(
path.join(fixtures.uploadDirectory, file.name),
file.content
)
}
}
})
beforeEach(() => {
noopLogs()
jest
.spyOn(uploadZipSpecification, 'validateRootDirectory')
.mockReturnValue()
jest
.spyOn(util, 'getBackendIdsFromToken')
.mockReturnValue(fixtures.backendIDs)
jest
.spyOn(uploadZipSpecification, 'getUploadZipSpecification')
.mockReturnValue(
fixtures.files.map(file => ({
sourcePath: path.join(fixtures.uploadDirectory, file.name),
destinationPath: file.name,
stats: new fs.Stats()
}))
)
jest.spyOn(config, 'getRuntimeToken').mockReturnValue(fixtures.runtimeToken)
jest
.spyOn(config, 'getResultsServiceUrl')
.mockReturnValue(fixtures.resultsServiceURL)
})
afterEach(() => {
jest.restoreAllMocks()
})
it('should successfully upload an artifact', () => {
const mockDate = new Date('2020-01-01')
jest
.spyOn(uploadZipSpecification, 'validateRootDirectory')
.mockReturnValue()
jest
.spyOn(uploadZipSpecification, 'getUploadZipSpecification')
.mockReturnValue([
{
sourcePath: '/home/user/files/plz-upload/file1.txt',
destinationPath: 'file1.txt'
},
{
sourcePath: '/home/user/files/plz-upload/file2.txt',
destinationPath: 'file2.txt'
},
{
sourcePath: '/home/user/files/plz-upload/dir/file3.txt',
destinationPath: 'dir/file3.txt'
}
])
jest
.spyOn(zip, 'createZipUploadStream')
.mockReturnValue(Promise.resolve(new zip.ZipUploadStream(1)))
jest.spyOn(util, 'getBackendIdsFromToken').mockReturnValue({
workflowRunBackendId: '1234',
workflowJobRunBackendId: '5678'
})
jest
.spyOn(retention, 'getExpiration')
.mockReturnValue(Timestamp.fromDate(mockDate))
jest
.spyOn(ArtifactServiceClientJSON.prototype, 'CreateArtifact')
.mockReturnValue(
Promise.resolve({
ok: true,
signedUploadUrl: 'https://signed-upload-url.com'
})
)
jest.spyOn(blobUpload, 'uploadZipToBlobStorage').mockReturnValue(
Promise.resolve({
uploadSize: 1234,
sha256Hash: 'test-sha256-hash'
})
)
jest
.spyOn(ArtifactServiceClientJSON.prototype, 'FinalizeArtifact')
.mockReturnValue(Promise.resolve({ok: true, artifactId: '1'}))
// ArtifactHttpClient mocks
jest.spyOn(config, 'getRuntimeToken').mockReturnValue('test-token')
jest
.spyOn(config, 'getResultsServiceUrl')
.mockReturnValue('https://test-url.com')
const uploadResp = uploadArtifact(
'test-artifact',
[
'/home/user/files/plz-upload/file1.txt',
'/home/user/files/plz-upload/file2.txt',
'/home/user/files/plz-upload/dir/file3.txt'
],
'/home/user/files/plz-upload'
)
expect(uploadResp).resolves.toEqual({size: 1234, id: 1})
})
it('should throw an error if the root directory is invalid', () => {
jest
.spyOn(uploadZipSpecification, 'validateRootDirectory')
.mockImplementation(() => {
throw new Error('Invalid root directory')
})
const uploadResp = uploadArtifact(
'test-artifact',
[
'/home/user/files/plz-upload/file1.txt',
'/home/user/files/plz-upload/file2.txt',
'/home/user/files/plz-upload/dir/file3.txt'
],
'/home/user/files/plz-upload'
)
expect(uploadResp).rejects.toThrow('Invalid root directory')
})
it('should reject if there are no files to upload', () => {
jest
.spyOn(uploadZipSpecification, 'validateRootDirectory')
.mockReturnValue()
it('should reject if there are no files to upload', async () => {
jest
.spyOn(uploadZipSpecification, 'getUploadZipSpecification')
.mockClear()
.mockReturnValue([])
const uploadResp = uploadArtifact(
'test-artifact',
[
'/home/user/files/plz-upload/file1.txt',
'/home/user/files/plz-upload/file2.txt',
'/home/user/files/plz-upload/dir/file3.txt'
],
'/home/user/files/plz-upload'
fixtures.inputs.artifactName,
fixtures.inputs.files,
fixtures.inputs.rootDirectory
)
expect(uploadResp).rejects.toThrowError(FilesNotFoundError)
await expect(uploadResp).rejects.toThrowError(FilesNotFoundError)
})
it('should reject if no backend IDs are found', () => {
jest
.spyOn(uploadZipSpecification, 'validateRootDirectory')
.mockReturnValue()
jest
.spyOn(uploadZipSpecification, 'getUploadZipSpecification')
.mockReturnValue([
{
sourcePath: '/home/user/files/plz-upload/file1.txt',
destinationPath: 'file1.txt'
},
{
sourcePath: '/home/user/files/plz-upload/file2.txt',
destinationPath: 'file2.txt'
},
{
sourcePath: '/home/user/files/plz-upload/dir/file3.txt',
destinationPath: 'dir/file3.txt'
}
])
jest
.spyOn(zip, 'createZipUploadStream')
.mockReturnValue(Promise.resolve(new zip.ZipUploadStream(1)))
it('should reject if no backend IDs are found', async () => {
jest.spyOn(util, 'getBackendIdsFromToken').mockRestore()
const uploadResp = uploadArtifact(
'test-artifact',
[
'/home/user/files/plz-upload/file1.txt',
'/home/user/files/plz-upload/file2.txt',
'/home/user/files/plz-upload/dir/file3.txt'
],
'/home/user/files/plz-upload'
fixtures.inputs.artifactName,
fixtures.inputs.files,
fixtures.inputs.rootDirectory
)
expect(uploadResp).rejects.toThrow()
await expect(uploadResp).rejects.toThrow()
})
it('should return false if the creation request fails', () => {
const mockDate = new Date('2020-01-01')
jest
.spyOn(uploadZipSpecification, 'validateRootDirectory')
.mockReturnValue()
jest
.spyOn(uploadZipSpecification, 'getUploadZipSpecification')
.mockReturnValue([
{
sourcePath: '/home/user/files/plz-upload/file1.txt',
destinationPath: 'file1.txt'
},
{
sourcePath: '/home/user/files/plz-upload/file2.txt',
destinationPath: 'file2.txt'
},
{
sourcePath: '/home/user/files/plz-upload/dir/file3.txt',
destinationPath: 'dir/file3.txt'
}
])
it('should return false if the creation request fails', async () => {
jest
.spyOn(zip, 'createZipUploadStream')
.mockReturnValue(Promise.resolve(new zip.ZipUploadStream(1)))
jest.spyOn(util, 'getBackendIdsFromToken').mockReturnValue({
workflowRunBackendId: '1234',
workflowJobRunBackendId: '5678'
})
jest
.spyOn(retention, 'getExpiration')
.mockReturnValue(Timestamp.fromDate(mockDate))
jest
.spyOn(ArtifactServiceClientJSON.prototype, 'CreateArtifact')
.mockReturnValue(Promise.resolve({ok: false, signedUploadUrl: ''}))
// ArtifactHttpClient mocks
jest.spyOn(config, 'getRuntimeToken').mockReturnValue('test-token')
jest
.spyOn(config, 'getResultsServiceUrl')
.mockReturnValue('https://test-url.com')
const uploadResp = uploadArtifact(
'test-artifact',
[
'/home/user/files/plz-upload/file1.txt',
'/home/user/files/plz-upload/file2.txt',
'/home/user/files/plz-upload/dir/file3.txt'
],
'/home/user/files/plz-upload'
fixtures.inputs.artifactName,
fixtures.inputs.files,
fixtures.inputs.rootDirectory
)
expect(uploadResp).rejects.toThrow()
await expect(uploadResp).rejects.toThrow()
})
it('should return false if blob storage upload is unsuccessful', () => {
const mockDate = new Date('2020-01-01')
jest
.spyOn(uploadZipSpecification, 'validateRootDirectory')
.mockReturnValue()
jest
.spyOn(uploadZipSpecification, 'getUploadZipSpecification')
.mockReturnValue([
{
sourcePath: '/home/user/files/plz-upload/file1.txt',
destinationPath: 'file1.txt'
},
{
sourcePath: '/home/user/files/plz-upload/file2.txt',
destinationPath: 'file2.txt'
},
{
sourcePath: '/home/user/files/plz-upload/dir/file3.txt',
destinationPath: 'dir/file3.txt'
}
])
it('should return false if blob storage upload is unsuccessful', async () => {
jest
.spyOn(zip, 'createZipUploadStream')
.mockReturnValue(Promise.resolve(new zip.ZipUploadStream(1)))
jest.spyOn(util, 'getBackendIdsFromToken').mockReturnValue({
workflowRunBackendId: '1234',
workflowJobRunBackendId: '5678'
})
jest
.spyOn(retention, 'getExpiration')
.mockReturnValue(Timestamp.fromDate(mockDate))
jest
.spyOn(ArtifactServiceClientJSON.prototype, 'CreateArtifact')
.mockReturnValue(
@ -264,57 +180,19 @@ describe('upload-artifact', () => {
.spyOn(blobUpload, 'uploadZipToBlobStorage')
.mockReturnValue(Promise.reject(new Error('boom')))
// ArtifactHttpClient mocks
jest.spyOn(config, 'getRuntimeToken').mockReturnValue('test-token')
jest
.spyOn(config, 'getResultsServiceUrl')
.mockReturnValue('https://test-url.com')
const uploadResp = uploadArtifact(
'test-artifact',
[
'/home/user/files/plz-upload/file1.txt',
'/home/user/files/plz-upload/file2.txt',
'/home/user/files/plz-upload/dir/file3.txt'
],
'/home/user/files/plz-upload'
fixtures.inputs.artifactName,
fixtures.inputs.files,
fixtures.inputs.rootDirectory
)
expect(uploadResp).rejects.toThrow()
await expect(uploadResp).rejects.toThrow()
})
it('should reject if finalize artifact fails', () => {
const mockDate = new Date('2020-01-01')
jest
.spyOn(uploadZipSpecification, 'validateRootDirectory')
.mockReturnValue()
jest
.spyOn(uploadZipSpecification, 'getUploadZipSpecification')
.mockReturnValue([
{
sourcePath: '/home/user/files/plz-upload/file1.txt',
destinationPath: 'file1.txt'
},
{
sourcePath: '/home/user/files/plz-upload/file2.txt',
destinationPath: 'file2.txt'
},
{
sourcePath: '/home/user/files/plz-upload/dir/file3.txt',
destinationPath: 'dir/file3.txt'
}
])
it('should reject if finalize artifact fails', async () => {
jest
.spyOn(zip, 'createZipUploadStream')
.mockReturnValue(Promise.resolve(new zip.ZipUploadStream(1)))
jest.spyOn(util, 'getBackendIdsFromToken').mockReturnValue({
workflowRunBackendId: '1234',
workflowJobRunBackendId: '5678'
})
jest
.spyOn(retention, 'getExpiration')
.mockReturnValue(Timestamp.fromDate(mockDate))
jest
.spyOn(ArtifactServiceClientJSON.prototype, 'CreateArtifact')
.mockReturnValue(
@ -333,22 +211,163 @@ describe('upload-artifact', () => {
.spyOn(ArtifactServiceClientJSON.prototype, 'FinalizeArtifact')
.mockReturnValue(Promise.resolve({ok: false, artifactId: ''}))
// ArtifactHttpClient mocks
jest.spyOn(config, 'getRuntimeToken').mockReturnValue('test-token')
jest
.spyOn(config, 'getResultsServiceUrl')
.mockReturnValue('https://test-url.com')
const uploadResp = uploadArtifact(
'test-artifact',
[
'/home/user/files/plz-upload/file1.txt',
'/home/user/files/plz-upload/file2.txt',
'/home/user/files/plz-upload/dir/file3.txt'
],
'/home/user/files/plz-upload'
fixtures.inputs.artifactName,
fixtures.inputs.files,
fixtures.inputs.rootDirectory
)
expect(uploadResp).rejects.toThrow()
await expect(uploadResp).rejects.toThrow()
})
it('should successfully upload an artifact', async () => {
jest
.spyOn(uploadZipSpecification, 'getUploadZipSpecification')
.mockRestore()
jest
.spyOn(ArtifactServiceClientJSON.prototype, 'CreateArtifact')
.mockReturnValue(
Promise.resolve({
ok: true,
signedUploadUrl: 'https://signed-upload-url.local'
})
)
jest
.spyOn(ArtifactServiceClientJSON.prototype, 'FinalizeArtifact')
.mockReturnValue(
Promise.resolve({
ok: true,
artifactId: '1'
})
)
let loadedBytes = 0
const uploadedZip = path.join(
fixtures.uploadDirectory,
'..',
'uploaded.zip'
)
uploadStreamMock.mockImplementation(
async (
stream: NodeJS.ReadableStream,
bufferSize?: number,
maxConcurrency?: number,
options?: BlockBlobUploadStreamOptions
) => {
const {onProgress} = options || {}
if (fs.existsSync(uploadedZip)) {
fs.unlinkSync(uploadedZip)
}
const uploadedZipStream = fs.createWriteStream(uploadedZip)
onProgress?.({loadedBytes: 0})
return new Promise((resolve, reject) => {
stream.on('data', chunk => {
loadedBytes += chunk.length
uploadedZipStream.write(chunk)
onProgress?.({loadedBytes})
})
stream.on('end', () => {
onProgress?.({loadedBytes})
uploadedZipStream.end()
resolve({})
})
stream.on('error', err => {
reject(err)
})
})
}
)
const {id, size, digest} = await uploadArtifact(
fixtures.inputs.artifactName,
fixtures.files.map(file =>
path.join(fixtures.uploadDirectory, file.name)
),
fixtures.uploadDirectory
)
expect(id).toBe(1)
expect(size).toBe(loadedBytes)
expect(digest).toBeDefined()
expect(digest).toHaveLength(64)
const extractedDirectory = path.join(
fixtures.uploadDirectory,
'..',
'extracted'
)
if (fs.existsSync(extractedDirectory)) {
fs.rmdirSync(extractedDirectory, {recursive: true})
}
const extract = new Promise((resolve, reject) => {
fs.createReadStream(uploadedZip)
.pipe(unzip.Extract({path: extractedDirectory}))
.on('close', () => {
resolve(true)
})
.on('error', err => {
reject(err)
})
})
await expect(extract).resolves.toBe(true)
for (const file of fixtures.files) {
const filePath = path.join(extractedDirectory, file.name)
expect(fs.existsSync(filePath)).toBe(true)
expect(fs.readFileSync(filePath, 'utf8')).toBe(file.content)
}
})
it('should throw an error uploading blob chunks get delayed', async () => {
jest
.spyOn(ArtifactServiceClientJSON.prototype, 'CreateArtifact')
.mockReturnValue(
Promise.resolve({
ok: true,
signedUploadUrl: 'https://signed-upload-url.local'
})
)
jest
.spyOn(ArtifactServiceClientJSON.prototype, 'FinalizeArtifact')
.mockReturnValue(
Promise.resolve({
ok: true,
artifactId: '1'
})
)
jest
.spyOn(config, 'getResultsServiceUrl')
.mockReturnValue('https://results.local')
jest.spyOn(config, 'getUploadChunkTimeout').mockReturnValue(2_000)
uploadStreamMock.mockImplementation(
async (
stream: NodeJS.ReadableStream,
bufferSize?: number,
maxConcurrency?: number,
options?: BlockBlobUploadStreamOptions
) => {
const {onProgress, abortSignal} = options || {}
onProgress?.({loadedBytes: 0})
return new Promise(resolve => {
abortSignal?.addEventListener('abort', () => {
resolve({})
})
})
}
)
const uploadResp = uploadArtifact(
fixtures.inputs.artifactName,
fixtures.inputs.files,
fixtures.inputs.rootDirectory
)
await expect(uploadResp).rejects.toThrow('Upload progress stalled.')
})
})

View File

@ -305,4 +305,22 @@ describe('Search', () => {
}
}
})
it('Upload Specification - Includes symlinks', async () => {
const targetPath = path.join(root, 'link-dir', 'symlink-me.txt')
await fs.mkdir(path.dirname(targetPath), {recursive: true})
await fs.writeFile(targetPath, 'symlink file content')
const uploadPath = path.join(root, 'upload-dir', 'symlink.txt')
await fs.mkdir(path.dirname(uploadPath), {recursive: true})
await fs.symlink(targetPath, uploadPath, 'file')
const specifications = getUploadZipSpecification([uploadPath], root)
expect(specifications.length).toEqual(1)
expect(specifications[0].sourcePath).toEqual(uploadPath)
expect(specifications[0].destinationPath).toEqual(
path.join('/upload-dir', 'symlink.txt')
)
expect(specifications[0].stats.isSymbolicLink()).toBe(true)
})
})

View File

@ -40,4 +40,4 @@
#### Defined in
[src/artifact.ts:7](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/artifact.ts#L7)
[src/artifact.ts:7](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/artifact.ts#L7)

View File

@ -48,7 +48,7 @@ Error.constructor
#### Defined in
[src/internal/shared/errors.ts:24](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/errors.ts#L24)
[src/internal/shared/errors.ts:24](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/errors.ts#L24)
## Properties

View File

@ -61,7 +61,7 @@ single DeleteArtifactResponse object
#### Defined in
[src/internal/client.ts:248](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/client.ts#L248)
[src/internal/client.ts:248](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/client.ts#L248)
___
@ -92,7 +92,7 @@ single DownloadArtifactResponse object
#### Defined in
[src/internal/client.ts:138](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/client.ts#L138)
[src/internal/client.ts:138](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/client.ts#L138)
___
@ -127,7 +127,7 @@ If there are multiple artifacts with the same name in the same workflow run this
#### Defined in
[src/internal/client.ts:212](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/client.ts#L212)
[src/internal/client.ts:212](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/client.ts#L212)
___
@ -159,7 +159,7 @@ ListArtifactResponse object
#### Defined in
[src/internal/client.ts:176](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/client.ts#L176)
[src/internal/client.ts:176](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/client.ts#L176)
___
@ -190,4 +190,4 @@ single UploadArtifactResponse object
#### Defined in
[src/internal/client.ts:113](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/client.ts#L113)
[src/internal/client.ts:113](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/client.ts#L113)

View File

@ -49,7 +49,7 @@ Error.constructor
#### Defined in
[src/internal/shared/errors.ts:4](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/errors.ts#L4)
[src/internal/shared/errors.ts:4](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/errors.ts#L4)
## Properties
@ -59,7 +59,7 @@ Error.constructor
#### Defined in
[src/internal/shared/errors.ts:2](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/errors.ts#L2)
[src/internal/shared/errors.ts:2](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/errors.ts#L2)
___

View File

@ -48,7 +48,7 @@ Error.constructor
#### Defined in
[src/internal/shared/errors.ts:31](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/errors.ts#L31)
[src/internal/shared/errors.ts:31](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/errors.ts#L31)
## Properties

View File

@ -48,7 +48,7 @@ Error.constructor
#### Defined in
[src/internal/shared/errors.ts:17](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/errors.ts#L17)
[src/internal/shared/errors.ts:17](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/errors.ts#L17)
## Properties

View File

@ -50,7 +50,7 @@ Error.constructor
#### Defined in
[src/internal/shared/errors.ts:42](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/errors.ts#L42)
[src/internal/shared/errors.ts:42](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/errors.ts#L42)
## Properties
@ -60,7 +60,7 @@ Error.constructor
#### Defined in
[src/internal/shared/errors.ts:40](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/errors.ts#L40)
[src/internal/shared/errors.ts:40](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/errors.ts#L40)
___
@ -198,4 +198,4 @@ ___
#### Defined in
[src/internal/shared/errors.ts:49](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/errors.ts#L49)
[src/internal/shared/errors.ts:49](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/errors.ts#L49)

View File

@ -43,7 +43,7 @@ Error.constructor
#### Defined in
[src/internal/shared/errors.ts:62](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/errors.ts#L62)
[src/internal/shared/errors.ts:62](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/errors.ts#L62)
## Properties
@ -181,4 +181,4 @@ ___
#### Defined in
[src/internal/shared/errors.ts:68](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/errors.ts#L68)
[src/internal/shared/errors.ts:68](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/errors.ts#L68)

View File

@ -23,7 +23,7 @@ The time when the artifact was created
#### Defined in
[src/internal/shared/interfaces.ts:123](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/interfaces.ts#L123)
[src/internal/shared/interfaces.ts:128](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L128)
___
@ -35,7 +35,7 @@ The ID of the artifact
#### Defined in
[src/internal/shared/interfaces.ts:113](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/interfaces.ts#L113)
[src/internal/shared/interfaces.ts:118](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L118)
___
@ -47,7 +47,7 @@ The name of the artifact
#### Defined in
[src/internal/shared/interfaces.ts:108](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/interfaces.ts#L108)
[src/internal/shared/interfaces.ts:113](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L113)
___
@ -59,4 +59,4 @@ The size of the artifact in bytes
#### Defined in
[src/internal/shared/interfaces.ts:118](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/interfaces.ts#L118)
[src/internal/shared/interfaces.ts:123](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L123)

View File

@ -43,7 +43,7 @@ single DeleteArtifactResponse object
#### Defined in
[src/internal/client.ts:103](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/client.ts#L103)
[src/internal/client.ts:103](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/client.ts#L103)
___
@ -70,7 +70,7 @@ single DownloadArtifactResponse object
#### Defined in
[src/internal/client.ts:89](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/client.ts#L89)
[src/internal/client.ts:89](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/client.ts#L89)
___
@ -101,7 +101,7 @@ If there are multiple artifacts with the same name in the same workflow run this
#### Defined in
[src/internal/client.ts:75](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/client.ts#L75)
[src/internal/client.ts:75](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/client.ts#L75)
___
@ -129,7 +129,7 @@ ListArtifactResponse object
#### Defined in
[src/internal/client.ts:57](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/client.ts#L57)
[src/internal/client.ts:57](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/client.ts#L57)
___
@ -156,4 +156,4 @@ single UploadArtifactResponse object
#### Defined in
[src/internal/client.ts:40](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/client.ts#L40)
[src/internal/client.ts:40](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/client.ts#L40)

View File

@ -20,4 +20,4 @@ The id of the artifact that was deleted
#### Defined in
[src/internal/shared/interfaces.ts:158](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/interfaces.ts#L158)
[src/internal/shared/interfaces.ts:163](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L163)

View File

@ -20,4 +20,4 @@ Denotes where the artifact will be downloaded to. If not specified then the arti
#### Defined in
[src/internal/shared/interfaces.ts:98](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/interfaces.ts#L98)
[src/internal/shared/interfaces.ts:103](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L103)

View File

@ -20,4 +20,4 @@ The path where the artifact was downloaded to
#### Defined in
[src/internal/shared/interfaces.ts:88](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/interfaces.ts#L88)
[src/internal/shared/interfaces.ts:93](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L93)

View File

@ -27,4 +27,4 @@ The criteria for finding Artifact(s) out of the scope of the current run.
#### Defined in
[src/internal/shared/interfaces.ts:131](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/interfaces.ts#L131)
[src/internal/shared/interfaces.ts:136](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L136)

View File

@ -20,4 +20,4 @@ Metadata about the artifact that was found
#### Defined in
[src/internal/shared/interfaces.ts:57](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/interfaces.ts#L57)
[src/internal/shared/interfaces.ts:62](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L62)

View File

@ -21,4 +21,4 @@ In the case of reruns, this can be useful to avoid duplicates
#### Defined in
[src/internal/shared/interfaces.ts:68](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/interfaces.ts#L68)
[src/internal/shared/interfaces.ts:73](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L73)

View File

@ -20,4 +20,4 @@ A list of artifacts that were found
#### Defined in
[src/internal/shared/interfaces.ts:78](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/interfaces.ts#L78)
[src/internal/shared/interfaces.ts:83](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L83)

View File

@ -28,7 +28,7 @@ For large files that are not easily compressed, a value of 0 is recommended for
#### Defined in
[src/internal/shared/interfaces.ts:47](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/interfaces.ts#L47)
[src/internal/shared/interfaces.ts:52](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L52)
___
@ -52,4 +52,4 @@ input of 0 assumes default retention setting.
#### Defined in
[src/internal/shared/interfaces.ts:36](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/interfaces.ts#L36)
[src/internal/shared/interfaces.ts:41](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L41)

View File

@ -8,11 +8,24 @@ Response from the server when an artifact is uploaded
### Properties
- [digest](UploadArtifactResponse.md#digest)
- [id](UploadArtifactResponse.md#id)
- [size](UploadArtifactResponse.md#size)
## Properties
### digest
`Optional` **digest**: `string`
The SHA256 digest of the artifact that was created. Not provided if no artifact was uploaded
#### Defined in
[src/internal/shared/interfaces.ts:19](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L19)
___
### id
`Optional` **id**: `number`
@ -22,7 +35,7 @@ This ID can be used as input to other APIs to download, delete or get more infor
#### Defined in
[src/internal/shared/interfaces.ts:14](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/interfaces.ts#L14)
[src/internal/shared/interfaces.ts:14](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L14)
___
@ -34,4 +47,4 @@ Total size of the artifact in bytes. Not provided if no artifact was uploaded
#### Defined in
[src/internal/shared/interfaces.ts:8](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/interfaces.ts#L8)
[src/internal/shared/interfaces.ts:8](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L8)

File diff suppressed because it is too large Load Diff

View File

@ -1,6 +1,6 @@
{
"name": "@actions/artifact",
"version": "2.1.4",
"version": "2.2.2",
"preview": true,
"description": "Actions artifact lib",
"keywords": [
@ -49,10 +49,8 @@
"@octokit/plugin-retry": "^3.0.9",
"@octokit/request-error": "^5.0.0",
"@protobuf-ts/plugin": "^2.2.3-alpha.1",
"archiver": "^5.3.1",
"crypto": "^1.0.1",
"archiver": "^7.0.1",
"jwt-decode": "^3.1.2",
"twirp-ts": "^2.5.0",
"unzip-stream": "^0.3.1"
},
"devDependencies": {

View File

@ -1,4 +1,4 @@
export * from './google/protobuf/timestamp'
export * from './google/protobuf/wrappers'
export * from './results/api/v1/artifact'
export * from './results/api/v1/artifact.twirp'
export * from './results/api/v1/artifact.twirp-client'

View File

@ -0,0 +1,232 @@
import {
CreateArtifactRequest,
CreateArtifactResponse,
FinalizeArtifactRequest,
FinalizeArtifactResponse,
ListArtifactsRequest,
ListArtifactsResponse,
GetSignedArtifactURLRequest,
GetSignedArtifactURLResponse,
DeleteArtifactRequest,
DeleteArtifactResponse,
} from "./artifact";
//==================================//
// Client Code //
//==================================//
interface Rpc {
request(
service: string,
method: string,
contentType: "application/json" | "application/protobuf",
data: object | Uint8Array
): Promise<object | Uint8Array>;
}
export interface ArtifactServiceClient {
CreateArtifact(
request: CreateArtifactRequest
): Promise<CreateArtifactResponse>;
FinalizeArtifact(
request: FinalizeArtifactRequest
): Promise<FinalizeArtifactResponse>;
ListArtifacts(request: ListArtifactsRequest): Promise<ListArtifactsResponse>;
GetSignedArtifactURL(
request: GetSignedArtifactURLRequest
): Promise<GetSignedArtifactURLResponse>;
DeleteArtifact(
request: DeleteArtifactRequest
): Promise<DeleteArtifactResponse>;
}
export class ArtifactServiceClientJSON implements ArtifactServiceClient {
private readonly rpc: Rpc;
constructor(rpc: Rpc) {
this.rpc = rpc;
this.CreateArtifact.bind(this);
this.FinalizeArtifact.bind(this);
this.ListArtifacts.bind(this);
this.GetSignedArtifactURL.bind(this);
this.DeleteArtifact.bind(this);
}
CreateArtifact(
request: CreateArtifactRequest
): Promise<CreateArtifactResponse> {
const data = CreateArtifactRequest.toJson(request, {
useProtoFieldName: true,
emitDefaultValues: false,
});
const promise = this.rpc.request(
"github.actions.results.api.v1.ArtifactService",
"CreateArtifact",
"application/json",
data as object
);
return promise.then((data) =>
CreateArtifactResponse.fromJson(data as any, {
ignoreUnknownFields: true,
})
);
}
FinalizeArtifact(
request: FinalizeArtifactRequest
): Promise<FinalizeArtifactResponse> {
const data = FinalizeArtifactRequest.toJson(request, {
useProtoFieldName: true,
emitDefaultValues: false,
});
const promise = this.rpc.request(
"github.actions.results.api.v1.ArtifactService",
"FinalizeArtifact",
"application/json",
data as object
);
return promise.then((data) =>
FinalizeArtifactResponse.fromJson(data as any, {
ignoreUnknownFields: true,
})
);
}
ListArtifacts(request: ListArtifactsRequest): Promise<ListArtifactsResponse> {
const data = ListArtifactsRequest.toJson(request, {
useProtoFieldName: true,
emitDefaultValues: false,
});
const promise = this.rpc.request(
"github.actions.results.api.v1.ArtifactService",
"ListArtifacts",
"application/json",
data as object
);
return promise.then((data) =>
ListArtifactsResponse.fromJson(data as any, { ignoreUnknownFields: true })
);
}
GetSignedArtifactURL(
request: GetSignedArtifactURLRequest
): Promise<GetSignedArtifactURLResponse> {
const data = GetSignedArtifactURLRequest.toJson(request, {
useProtoFieldName: true,
emitDefaultValues: false,
});
const promise = this.rpc.request(
"github.actions.results.api.v1.ArtifactService",
"GetSignedArtifactURL",
"application/json",
data as object
);
return promise.then((data) =>
GetSignedArtifactURLResponse.fromJson(data as any, {
ignoreUnknownFields: true,
})
);
}
DeleteArtifact(
request: DeleteArtifactRequest
): Promise<DeleteArtifactResponse> {
const data = DeleteArtifactRequest.toJson(request, {
useProtoFieldName: true,
emitDefaultValues: false,
});
const promise = this.rpc.request(
"github.actions.results.api.v1.ArtifactService",
"DeleteArtifact",
"application/json",
data as object
);
return promise.then((data) =>
DeleteArtifactResponse.fromJson(data as any, {
ignoreUnknownFields: true,
})
);
}
}
export class ArtifactServiceClientProtobuf implements ArtifactServiceClient {
private readonly rpc: Rpc;
constructor(rpc: Rpc) {
this.rpc = rpc;
this.CreateArtifact.bind(this);
this.FinalizeArtifact.bind(this);
this.ListArtifacts.bind(this);
this.GetSignedArtifactURL.bind(this);
this.DeleteArtifact.bind(this);
}
CreateArtifact(
request: CreateArtifactRequest
): Promise<CreateArtifactResponse> {
const data = CreateArtifactRequest.toBinary(request);
const promise = this.rpc.request(
"github.actions.results.api.v1.ArtifactService",
"CreateArtifact",
"application/protobuf",
data
);
return promise.then((data) =>
CreateArtifactResponse.fromBinary(data as Uint8Array)
);
}
FinalizeArtifact(
request: FinalizeArtifactRequest
): Promise<FinalizeArtifactResponse> {
const data = FinalizeArtifactRequest.toBinary(request);
const promise = this.rpc.request(
"github.actions.results.api.v1.ArtifactService",
"FinalizeArtifact",
"application/protobuf",
data
);
return promise.then((data) =>
FinalizeArtifactResponse.fromBinary(data as Uint8Array)
);
}
ListArtifacts(request: ListArtifactsRequest): Promise<ListArtifactsResponse> {
const data = ListArtifactsRequest.toBinary(request);
const promise = this.rpc.request(
"github.actions.results.api.v1.ArtifactService",
"ListArtifacts",
"application/protobuf",
data
);
return promise.then((data) =>
ListArtifactsResponse.fromBinary(data as Uint8Array)
);
}
GetSignedArtifactURL(
request: GetSignedArtifactURLRequest
): Promise<GetSignedArtifactURLResponse> {
const data = GetSignedArtifactURLRequest.toBinary(request);
const promise = this.rpc.request(
"github.actions.results.api.v1.ArtifactService",
"GetSignedArtifactURL",
"application/protobuf",
data
);
return promise.then((data) =>
GetSignedArtifactURLResponse.fromBinary(data as Uint8Array)
);
}
DeleteArtifact(
request: DeleteArtifactRequest
): Promise<DeleteArtifactResponse> {
const data = DeleteArtifactRequest.toBinary(request);
const promise = this.rpc.request(
"github.actions.results.api.v1.ArtifactService",
"DeleteArtifact",
"application/protobuf",
data
);
return promise.then((data) =>
DeleteArtifactResponse.fromBinary(data as Uint8Array)
);
}
}

View File

@ -1,976 +0,0 @@
import {
TwirpContext,
TwirpServer,
RouterEvents,
TwirpError,
TwirpErrorCode,
Interceptor,
TwirpContentType,
chainInterceptors,
} from "twirp-ts";
import {
CreateArtifactRequest,
CreateArtifactResponse,
FinalizeArtifactRequest,
FinalizeArtifactResponse,
ListArtifactsRequest,
ListArtifactsResponse,
GetSignedArtifactURLRequest,
GetSignedArtifactURLResponse,
DeleteArtifactRequest,
DeleteArtifactResponse,
} from "./artifact";
//==================================//
// Client Code //
//==================================//
interface Rpc {
request(
service: string,
method: string,
contentType: "application/json" | "application/protobuf",
data: object | Uint8Array
): Promise<object | Uint8Array>;
}
export interface ArtifactServiceClient {
CreateArtifact(
request: CreateArtifactRequest
): Promise<CreateArtifactResponse>;
FinalizeArtifact(
request: FinalizeArtifactRequest
): Promise<FinalizeArtifactResponse>;
ListArtifacts(request: ListArtifactsRequest): Promise<ListArtifactsResponse>;
GetSignedArtifactURL(
request: GetSignedArtifactURLRequest
): Promise<GetSignedArtifactURLResponse>;
DeleteArtifact(
request: DeleteArtifactRequest
): Promise<DeleteArtifactResponse>;
}
export class ArtifactServiceClientJSON implements ArtifactServiceClient {
private readonly rpc: Rpc;
constructor(rpc: Rpc) {
this.rpc = rpc;
this.CreateArtifact.bind(this);
this.FinalizeArtifact.bind(this);
this.ListArtifacts.bind(this);
this.GetSignedArtifactURL.bind(this);
this.DeleteArtifact.bind(this);
}
CreateArtifact(
request: CreateArtifactRequest
): Promise<CreateArtifactResponse> {
const data = CreateArtifactRequest.toJson(request, {
useProtoFieldName: true,
emitDefaultValues: false,
});
const promise = this.rpc.request(
"github.actions.results.api.v1.ArtifactService",
"CreateArtifact",
"application/json",
data as object
);
return promise.then((data) =>
CreateArtifactResponse.fromJson(data as any, {
ignoreUnknownFields: true,
})
);
}
FinalizeArtifact(
request: FinalizeArtifactRequest
): Promise<FinalizeArtifactResponse> {
const data = FinalizeArtifactRequest.toJson(request, {
useProtoFieldName: true,
emitDefaultValues: false,
});
const promise = this.rpc.request(
"github.actions.results.api.v1.ArtifactService",
"FinalizeArtifact",
"application/json",
data as object
);
return promise.then((data) =>
FinalizeArtifactResponse.fromJson(data as any, {
ignoreUnknownFields: true,
})
);
}
ListArtifacts(request: ListArtifactsRequest): Promise<ListArtifactsResponse> {
const data = ListArtifactsRequest.toJson(request, {
useProtoFieldName: true,
emitDefaultValues: false,
});
const promise = this.rpc.request(
"github.actions.results.api.v1.ArtifactService",
"ListArtifacts",
"application/json",
data as object
);
return promise.then((data) =>
ListArtifactsResponse.fromJson(data as any, { ignoreUnknownFields: true })
);
}
GetSignedArtifactURL(
request: GetSignedArtifactURLRequest
): Promise<GetSignedArtifactURLResponse> {
const data = GetSignedArtifactURLRequest.toJson(request, {
useProtoFieldName: true,
emitDefaultValues: false,
});
const promise = this.rpc.request(
"github.actions.results.api.v1.ArtifactService",
"GetSignedArtifactURL",
"application/json",
data as object
);
return promise.then((data) =>
GetSignedArtifactURLResponse.fromJson(data as any, {
ignoreUnknownFields: true,
})
);
}
DeleteArtifact(
request: DeleteArtifactRequest
): Promise<DeleteArtifactResponse> {
const data = DeleteArtifactRequest.toJson(request, {
useProtoFieldName: true,
emitDefaultValues: false,
});
const promise = this.rpc.request(
"github.actions.results.api.v1.ArtifactService",
"DeleteArtifact",
"application/json",
data as object
);
return promise.then((data) =>
DeleteArtifactResponse.fromJson(data as any, {
ignoreUnknownFields: true,
})
);
}
}
export class ArtifactServiceClientProtobuf implements ArtifactServiceClient {
private readonly rpc: Rpc;
constructor(rpc: Rpc) {
this.rpc = rpc;
this.CreateArtifact.bind(this);
this.FinalizeArtifact.bind(this);
this.ListArtifacts.bind(this);
this.GetSignedArtifactURL.bind(this);
this.DeleteArtifact.bind(this);
}
CreateArtifact(
request: CreateArtifactRequest
): Promise<CreateArtifactResponse> {
const data = CreateArtifactRequest.toBinary(request);
const promise = this.rpc.request(
"github.actions.results.api.v1.ArtifactService",
"CreateArtifact",
"application/protobuf",
data
);
return promise.then((data) =>
CreateArtifactResponse.fromBinary(data as Uint8Array)
);
}
FinalizeArtifact(
request: FinalizeArtifactRequest
): Promise<FinalizeArtifactResponse> {
const data = FinalizeArtifactRequest.toBinary(request);
const promise = this.rpc.request(
"github.actions.results.api.v1.ArtifactService",
"FinalizeArtifact",
"application/protobuf",
data
);
return promise.then((data) =>
FinalizeArtifactResponse.fromBinary(data as Uint8Array)
);
}
ListArtifacts(request: ListArtifactsRequest): Promise<ListArtifactsResponse> {
const data = ListArtifactsRequest.toBinary(request);
const promise = this.rpc.request(
"github.actions.results.api.v1.ArtifactService",
"ListArtifacts",
"application/protobuf",
data
);
return promise.then((data) =>
ListArtifactsResponse.fromBinary(data as Uint8Array)
);
}
GetSignedArtifactURL(
request: GetSignedArtifactURLRequest
): Promise<GetSignedArtifactURLResponse> {
const data = GetSignedArtifactURLRequest.toBinary(request);
const promise = this.rpc.request(
"github.actions.results.api.v1.ArtifactService",
"GetSignedArtifactURL",
"application/protobuf",
data
);
return promise.then((data) =>
GetSignedArtifactURLResponse.fromBinary(data as Uint8Array)
);
}
DeleteArtifact(
request: DeleteArtifactRequest
): Promise<DeleteArtifactResponse> {
const data = DeleteArtifactRequest.toBinary(request);
const promise = this.rpc.request(
"github.actions.results.api.v1.ArtifactService",
"DeleteArtifact",
"application/protobuf",
data
);
return promise.then((data) =>
DeleteArtifactResponse.fromBinary(data as Uint8Array)
);
}
}
//==================================//
// Server Code //
//==================================//
export interface ArtifactServiceTwirp<T extends TwirpContext = TwirpContext> {
CreateArtifact(
ctx: T,
request: CreateArtifactRequest
): Promise<CreateArtifactResponse>;
FinalizeArtifact(
ctx: T,
request: FinalizeArtifactRequest
): Promise<FinalizeArtifactResponse>;
ListArtifacts(
ctx: T,
request: ListArtifactsRequest
): Promise<ListArtifactsResponse>;
GetSignedArtifactURL(
ctx: T,
request: GetSignedArtifactURLRequest
): Promise<GetSignedArtifactURLResponse>;
DeleteArtifact(
ctx: T,
request: DeleteArtifactRequest
): Promise<DeleteArtifactResponse>;
}
export enum ArtifactServiceMethod {
CreateArtifact = "CreateArtifact",
FinalizeArtifact = "FinalizeArtifact",
ListArtifacts = "ListArtifacts",
GetSignedArtifactURL = "GetSignedArtifactURL",
DeleteArtifact = "DeleteArtifact",
}
export const ArtifactServiceMethodList = [
ArtifactServiceMethod.CreateArtifact,
ArtifactServiceMethod.FinalizeArtifact,
ArtifactServiceMethod.ListArtifacts,
ArtifactServiceMethod.GetSignedArtifactURL,
ArtifactServiceMethod.DeleteArtifact,
];
export function createArtifactServiceServer<
T extends TwirpContext = TwirpContext
>(service: ArtifactServiceTwirp<T>) {
return new TwirpServer<ArtifactServiceTwirp, T>({
service,
packageName: "github.actions.results.api.v1",
serviceName: "ArtifactService",
methodList: ArtifactServiceMethodList,
matchRoute: matchArtifactServiceRoute,
});
}
function matchArtifactServiceRoute<T extends TwirpContext = TwirpContext>(
method: string,
events: RouterEvents<T>
) {
switch (method) {
case "CreateArtifact":
return async (
ctx: T,
service: ArtifactServiceTwirp,
data: Buffer,
interceptors?: Interceptor<
T,
CreateArtifactRequest,
CreateArtifactResponse
>[]
) => {
ctx = { ...ctx, methodName: "CreateArtifact" };
await events.onMatch(ctx);
return handleArtifactServiceCreateArtifactRequest(
ctx,
service,
data,
interceptors
);
};
case "FinalizeArtifact":
return async (
ctx: T,
service: ArtifactServiceTwirp,
data: Buffer,
interceptors?: Interceptor<
T,
FinalizeArtifactRequest,
FinalizeArtifactResponse
>[]
) => {
ctx = { ...ctx, methodName: "FinalizeArtifact" };
await events.onMatch(ctx);
return handleArtifactServiceFinalizeArtifactRequest(
ctx,
service,
data,
interceptors
);
};
case "ListArtifacts":
return async (
ctx: T,
service: ArtifactServiceTwirp,
data: Buffer,
interceptors?: Interceptor<
T,
ListArtifactsRequest,
ListArtifactsResponse
>[]
) => {
ctx = { ...ctx, methodName: "ListArtifacts" };
await events.onMatch(ctx);
return handleArtifactServiceListArtifactsRequest(
ctx,
service,
data,
interceptors
);
};
case "GetSignedArtifactURL":
return async (
ctx: T,
service: ArtifactServiceTwirp,
data: Buffer,
interceptors?: Interceptor<
T,
GetSignedArtifactURLRequest,
GetSignedArtifactURLResponse
>[]
) => {
ctx = { ...ctx, methodName: "GetSignedArtifactURL" };
await events.onMatch(ctx);
return handleArtifactServiceGetSignedArtifactURLRequest(
ctx,
service,
data,
interceptors
);
};
case "DeleteArtifact":
return async (
ctx: T,
service: ArtifactServiceTwirp,
data: Buffer,
interceptors?: Interceptor<
T,
DeleteArtifactRequest,
DeleteArtifactResponse
>[]
) => {
ctx = { ...ctx, methodName: "DeleteArtifact" };
await events.onMatch(ctx);
return handleArtifactServiceDeleteArtifactRequest(
ctx,
service,
data,
interceptors
);
};
default:
events.onNotFound();
const msg = `no handler found`;
throw new TwirpError(TwirpErrorCode.BadRoute, msg);
}
}
function handleArtifactServiceCreateArtifactRequest<
T extends TwirpContext = TwirpContext
>(
ctx: T,
service: ArtifactServiceTwirp,
data: Buffer,
interceptors?: Interceptor<T, CreateArtifactRequest, CreateArtifactResponse>[]
): Promise<string | Uint8Array> {
switch (ctx.contentType) {
case TwirpContentType.JSON:
return handleArtifactServiceCreateArtifactJSON<T>(
ctx,
service,
data,
interceptors
);
case TwirpContentType.Protobuf:
return handleArtifactServiceCreateArtifactProtobuf<T>(
ctx,
service,
data,
interceptors
);
default:
const msg = "unexpected Content-Type";
throw new TwirpError(TwirpErrorCode.BadRoute, msg);
}
}
function handleArtifactServiceFinalizeArtifactRequest<
T extends TwirpContext = TwirpContext
>(
ctx: T,
service: ArtifactServiceTwirp,
data: Buffer,
interceptors?: Interceptor<
T,
FinalizeArtifactRequest,
FinalizeArtifactResponse
>[]
): Promise<string | Uint8Array> {
switch (ctx.contentType) {
case TwirpContentType.JSON:
return handleArtifactServiceFinalizeArtifactJSON<T>(
ctx,
service,
data,
interceptors
);
case TwirpContentType.Protobuf:
return handleArtifactServiceFinalizeArtifactProtobuf<T>(
ctx,
service,
data,
interceptors
);
default:
const msg = "unexpected Content-Type";
throw new TwirpError(TwirpErrorCode.BadRoute, msg);
}
}
function handleArtifactServiceListArtifactsRequest<
T extends TwirpContext = TwirpContext
>(
ctx: T,
service: ArtifactServiceTwirp,
data: Buffer,
interceptors?: Interceptor<T, ListArtifactsRequest, ListArtifactsResponse>[]
): Promise<string | Uint8Array> {
switch (ctx.contentType) {
case TwirpContentType.JSON:
return handleArtifactServiceListArtifactsJSON<T>(
ctx,
service,
data,
interceptors
);
case TwirpContentType.Protobuf:
return handleArtifactServiceListArtifactsProtobuf<T>(
ctx,
service,
data,
interceptors
);
default:
const msg = "unexpected Content-Type";
throw new TwirpError(TwirpErrorCode.BadRoute, msg);
}
}
function handleArtifactServiceGetSignedArtifactURLRequest<
T extends TwirpContext = TwirpContext
>(
ctx: T,
service: ArtifactServiceTwirp,
data: Buffer,
interceptors?: Interceptor<
T,
GetSignedArtifactURLRequest,
GetSignedArtifactURLResponse
>[]
): Promise<string | Uint8Array> {
switch (ctx.contentType) {
case TwirpContentType.JSON:
return handleArtifactServiceGetSignedArtifactURLJSON<T>(
ctx,
service,
data,
interceptors
);
case TwirpContentType.Protobuf:
return handleArtifactServiceGetSignedArtifactURLProtobuf<T>(
ctx,
service,
data,
interceptors
);
default:
const msg = "unexpected Content-Type";
throw new TwirpError(TwirpErrorCode.BadRoute, msg);
}
}
function handleArtifactServiceDeleteArtifactRequest<
T extends TwirpContext = TwirpContext
>(
ctx: T,
service: ArtifactServiceTwirp,
data: Buffer,
interceptors?: Interceptor<T, DeleteArtifactRequest, DeleteArtifactResponse>[]
): Promise<string | Uint8Array> {
switch (ctx.contentType) {
case TwirpContentType.JSON:
return handleArtifactServiceDeleteArtifactJSON<T>(
ctx,
service,
data,
interceptors
);
case TwirpContentType.Protobuf:
return handleArtifactServiceDeleteArtifactProtobuf<T>(
ctx,
service,
data,
interceptors
);
default:
const msg = "unexpected Content-Type";
throw new TwirpError(TwirpErrorCode.BadRoute, msg);
}
}
async function handleArtifactServiceCreateArtifactJSON<
T extends TwirpContext = TwirpContext
>(
ctx: T,
service: ArtifactServiceTwirp,
data: Buffer,
interceptors?: Interceptor<T, CreateArtifactRequest, CreateArtifactResponse>[]
) {
let request: CreateArtifactRequest;
let response: CreateArtifactResponse;
try {
const body = JSON.parse(data.toString() || "{}");
request = CreateArtifactRequest.fromJson(body, {
ignoreUnknownFields: true,
});
} catch (e) {
if (e instanceof Error) {
const msg = "the json request could not be decoded";
throw new TwirpError(TwirpErrorCode.Malformed, msg).withCause(e, true);
}
}
if (interceptors && interceptors.length > 0) {
const interceptor = chainInterceptors(...interceptors) as Interceptor<
T,
CreateArtifactRequest,
CreateArtifactResponse
>;
response = await interceptor(ctx, request!, (ctx, inputReq) => {
return service.CreateArtifact(ctx, inputReq);
});
} else {
response = await service.CreateArtifact(ctx, request!);
}
return JSON.stringify(
CreateArtifactResponse.toJson(response, {
useProtoFieldName: true,
emitDefaultValues: false,
}) as string
);
}
async function handleArtifactServiceFinalizeArtifactJSON<
T extends TwirpContext = TwirpContext
>(
ctx: T,
service: ArtifactServiceTwirp,
data: Buffer,
interceptors?: Interceptor<
T,
FinalizeArtifactRequest,
FinalizeArtifactResponse
>[]
) {
let request: FinalizeArtifactRequest;
let response: FinalizeArtifactResponse;
try {
const body = JSON.parse(data.toString() || "{}");
request = FinalizeArtifactRequest.fromJson(body, {
ignoreUnknownFields: true,
});
} catch (e) {
if (e instanceof Error) {
const msg = "the json request could not be decoded";
throw new TwirpError(TwirpErrorCode.Malformed, msg).withCause(e, true);
}
}
if (interceptors && interceptors.length > 0) {
const interceptor = chainInterceptors(...interceptors) as Interceptor<
T,
FinalizeArtifactRequest,
FinalizeArtifactResponse
>;
response = await interceptor(ctx, request!, (ctx, inputReq) => {
return service.FinalizeArtifact(ctx, inputReq);
});
} else {
response = await service.FinalizeArtifact(ctx, request!);
}
return JSON.stringify(
FinalizeArtifactResponse.toJson(response, {
useProtoFieldName: true,
emitDefaultValues: false,
}) as string
);
}
async function handleArtifactServiceListArtifactsJSON<
T extends TwirpContext = TwirpContext
>(
ctx: T,
service: ArtifactServiceTwirp,
data: Buffer,
interceptors?: Interceptor<T, ListArtifactsRequest, ListArtifactsResponse>[]
) {
let request: ListArtifactsRequest;
let response: ListArtifactsResponse;
try {
const body = JSON.parse(data.toString() || "{}");
request = ListArtifactsRequest.fromJson(body, {
ignoreUnknownFields: true,
});
} catch (e) {
if (e instanceof Error) {
const msg = "the json request could not be decoded";
throw new TwirpError(TwirpErrorCode.Malformed, msg).withCause(e, true);
}
}
if (interceptors && interceptors.length > 0) {
const interceptor = chainInterceptors(...interceptors) as Interceptor<
T,
ListArtifactsRequest,
ListArtifactsResponse
>;
response = await interceptor(ctx, request!, (ctx, inputReq) => {
return service.ListArtifacts(ctx, inputReq);
});
} else {
response = await service.ListArtifacts(ctx, request!);
}
return JSON.stringify(
ListArtifactsResponse.toJson(response, {
useProtoFieldName: true,
emitDefaultValues: false,
}) as string
);
}
async function handleArtifactServiceGetSignedArtifactURLJSON<
T extends TwirpContext = TwirpContext
>(
ctx: T,
service: ArtifactServiceTwirp,
data: Buffer,
interceptors?: Interceptor<
T,
GetSignedArtifactURLRequest,
GetSignedArtifactURLResponse
>[]
) {
let request: GetSignedArtifactURLRequest;
let response: GetSignedArtifactURLResponse;
try {
const body = JSON.parse(data.toString() || "{}");
request = GetSignedArtifactURLRequest.fromJson(body, {
ignoreUnknownFields: true,
});
} catch (e) {
if (e instanceof Error) {
const msg = "the json request could not be decoded";
throw new TwirpError(TwirpErrorCode.Malformed, msg).withCause(e, true);
}
}
if (interceptors && interceptors.length > 0) {
const interceptor = chainInterceptors(...interceptors) as Interceptor<
T,
GetSignedArtifactURLRequest,
GetSignedArtifactURLResponse
>;
response = await interceptor(ctx, request!, (ctx, inputReq) => {
return service.GetSignedArtifactURL(ctx, inputReq);
});
} else {
response = await service.GetSignedArtifactURL(ctx, request!);
}
return JSON.stringify(
GetSignedArtifactURLResponse.toJson(response, {
useProtoFieldName: true,
emitDefaultValues: false,
}) as string
);
}
async function handleArtifactServiceDeleteArtifactJSON<
T extends TwirpContext = TwirpContext
>(
ctx: T,
service: ArtifactServiceTwirp,
data: Buffer,
interceptors?: Interceptor<T, DeleteArtifactRequest, DeleteArtifactResponse>[]
) {
let request: DeleteArtifactRequest;
let response: DeleteArtifactResponse;
try {
const body = JSON.parse(data.toString() || "{}");
request = DeleteArtifactRequest.fromJson(body, {
ignoreUnknownFields: true,
});
} catch (e) {
if (e instanceof Error) {
const msg = "the json request could not be decoded";
throw new TwirpError(TwirpErrorCode.Malformed, msg).withCause(e, true);
}
}
if (interceptors && interceptors.length > 0) {
const interceptor = chainInterceptors(...interceptors) as Interceptor<
T,
DeleteArtifactRequest,
DeleteArtifactResponse
>;
response = await interceptor(ctx, request!, (ctx, inputReq) => {
return service.DeleteArtifact(ctx, inputReq);
});
} else {
response = await service.DeleteArtifact(ctx, request!);
}
return JSON.stringify(
DeleteArtifactResponse.toJson(response, {
useProtoFieldName: true,
emitDefaultValues: false,
}) as string
);
}
async function handleArtifactServiceCreateArtifactProtobuf<
T extends TwirpContext = TwirpContext
>(
ctx: T,
service: ArtifactServiceTwirp,
data: Buffer,
interceptors?: Interceptor<T, CreateArtifactRequest, CreateArtifactResponse>[]
) {
let request: CreateArtifactRequest;
let response: CreateArtifactResponse;
try {
request = CreateArtifactRequest.fromBinary(data);
} catch (e) {
if (e instanceof Error) {
const msg = "the protobuf request could not be decoded";
throw new TwirpError(TwirpErrorCode.Malformed, msg).withCause(e, true);
}
}
if (interceptors && interceptors.length > 0) {
const interceptor = chainInterceptors(...interceptors) as Interceptor<
T,
CreateArtifactRequest,
CreateArtifactResponse
>;
response = await interceptor(ctx, request!, (ctx, inputReq) => {
return service.CreateArtifact(ctx, inputReq);
});
} else {
response = await service.CreateArtifact(ctx, request!);
}
return Buffer.from(CreateArtifactResponse.toBinary(response));
}
async function handleArtifactServiceFinalizeArtifactProtobuf<
T extends TwirpContext = TwirpContext
>(
ctx: T,
service: ArtifactServiceTwirp,
data: Buffer,
interceptors?: Interceptor<
T,
FinalizeArtifactRequest,
FinalizeArtifactResponse
>[]
) {
let request: FinalizeArtifactRequest;
let response: FinalizeArtifactResponse;
try {
request = FinalizeArtifactRequest.fromBinary(data);
} catch (e) {
if (e instanceof Error) {
const msg = "the protobuf request could not be decoded";
throw new TwirpError(TwirpErrorCode.Malformed, msg).withCause(e, true);
}
}
if (interceptors && interceptors.length > 0) {
const interceptor = chainInterceptors(...interceptors) as Interceptor<
T,
FinalizeArtifactRequest,
FinalizeArtifactResponse
>;
response = await interceptor(ctx, request!, (ctx, inputReq) => {
return service.FinalizeArtifact(ctx, inputReq);
});
} else {
response = await service.FinalizeArtifact(ctx, request!);
}
return Buffer.from(FinalizeArtifactResponse.toBinary(response));
}
async function handleArtifactServiceListArtifactsProtobuf<
T extends TwirpContext = TwirpContext
>(
ctx: T,
service: ArtifactServiceTwirp,
data: Buffer,
interceptors?: Interceptor<T, ListArtifactsRequest, ListArtifactsResponse>[]
) {
let request: ListArtifactsRequest;
let response: ListArtifactsResponse;
try {
request = ListArtifactsRequest.fromBinary(data);
} catch (e) {
if (e instanceof Error) {
const msg = "the protobuf request could not be decoded";
throw new TwirpError(TwirpErrorCode.Malformed, msg).withCause(e, true);
}
}
if (interceptors && interceptors.length > 0) {
const interceptor = chainInterceptors(...interceptors) as Interceptor<
T,
ListArtifactsRequest,
ListArtifactsResponse
>;
response = await interceptor(ctx, request!, (ctx, inputReq) => {
return service.ListArtifacts(ctx, inputReq);
});
} else {
response = await service.ListArtifacts(ctx, request!);
}
return Buffer.from(ListArtifactsResponse.toBinary(response));
}
async function handleArtifactServiceGetSignedArtifactURLProtobuf<
T extends TwirpContext = TwirpContext
>(
ctx: T,
service: ArtifactServiceTwirp,
data: Buffer,
interceptors?: Interceptor<
T,
GetSignedArtifactURLRequest,
GetSignedArtifactURLResponse
>[]
) {
let request: GetSignedArtifactURLRequest;
let response: GetSignedArtifactURLResponse;
try {
request = GetSignedArtifactURLRequest.fromBinary(data);
} catch (e) {
if (e instanceof Error) {
const msg = "the protobuf request could not be decoded";
throw new TwirpError(TwirpErrorCode.Malformed, msg).withCause(e, true);
}
}
if (interceptors && interceptors.length > 0) {
const interceptor = chainInterceptors(...interceptors) as Interceptor<
T,
GetSignedArtifactURLRequest,
GetSignedArtifactURLResponse
>;
response = await interceptor(ctx, request!, (ctx, inputReq) => {
return service.GetSignedArtifactURL(ctx, inputReq);
});
} else {
response = await service.GetSignedArtifactURL(ctx, request!);
}
return Buffer.from(GetSignedArtifactURLResponse.toBinary(response));
}
async function handleArtifactServiceDeleteArtifactProtobuf<
T extends TwirpContext = TwirpContext
>(
ctx: T,
service: ArtifactServiceTwirp,
data: Buffer,
interceptors?: Interceptor<T, DeleteArtifactRequest, DeleteArtifactResponse>[]
) {
let request: DeleteArtifactRequest;
let response: DeleteArtifactResponse;
try {
request = DeleteArtifactRequest.fromBinary(data);
} catch (e) {
if (e instanceof Error) {
const msg = "the protobuf request could not be decoded";
throw new TwirpError(TwirpErrorCode.Malformed, msg).withCause(e, true);
}
}
if (interceptors && interceptors.length > 0) {
const interceptor = chainInterceptors(...interceptors) as Interceptor<
T,
DeleteArtifactRequest,
DeleteArtifactResponse
>;
response = await interceptor(ctx, request!, (ctx, inputReq) => {
return service.DeleteArtifact(ctx, inputReq);
});
} else {
response = await service.DeleteArtifact(ctx, request!);
}
return Buffer.from(DeleteArtifactResponse.toBinary(response));
}

View File

@ -1,7 +1,4 @@
import fs from 'fs/promises'
import * as stream from 'stream'
import {createWriteStream} from 'fs'
import * as path from 'path'
import * as github from '@actions/github'
import * as core from '@actions/core'
import * as httpClient from '@actions/http-client'
@ -47,11 +44,6 @@ async function streamExtract(url: string, directory: string): Promise<void> {
await streamExtractExternal(url, directory)
return
} catch (error) {
if (error.message.includes('Malformed extraction path')) {
throw new Error(
`Artifact download failed with unretryable error: ${error.message}`
)
}
retryCount++
core.debug(
`Failed to download artifact after ${retryCount} retries due to ${error.message}. Retrying in 5 seconds...`
@ -86,8 +78,6 @@ export async function streamExtractExternal(
}
const timer = setTimeout(timerFn, timeout)
const createdDirectories = new Set<string>()
createdDirectories.add(directory)
response.message
.on('data', () => {
timer.refresh()
@ -99,46 +89,8 @@ export async function streamExtractExternal(
clearTimeout(timer)
reject(error)
})
.pipe(unzip.Parse())
.pipe(
new stream.Transform({
objectMode: true,
transform: async (entry, _, callback) => {
const fullPath = path.normalize(path.join(directory, entry.path))
if (!directory.endsWith(path.sep)) {
directory += path.sep
}
if (!fullPath.startsWith(directory)) {
reject(new Error(`Malformed extraction path: ${fullPath}`))
}
if (entry.type === 'Directory') {
if (!createdDirectories.has(fullPath)) {
createdDirectories.add(fullPath)
await resolveOrCreateDirectory(fullPath).then(() => {
entry.autodrain()
callback()
})
} else {
entry.autodrain()
callback()
}
} else {
core.info(`Extracting artifact entry: ${fullPath}`)
if (!createdDirectories.has(path.dirname(fullPath))) {
createdDirectories.add(path.dirname(fullPath))
await resolveOrCreateDirectory(path.dirname(fullPath))
}
const writeStream = createWriteStream(fullPath)
writeStream.on('finish', callback)
writeStream.on('error', reject)
entry.pipe(writeStream)
}
}
})
)
.on('finish', async () => {
.pipe(unzip.Extract({path: directory}))
.on('close', () => {
clearTimeout(timer)
resolve()
})

View File

@ -102,7 +102,6 @@ class ArtifactHttpClient implements Rpc {
} catch (error) {
if (error instanceof SyntaxError) {
debug(`Raw Body: ${rawBody}`)
throw error
}
if (error instanceof UsageError) {

View File

@ -1,4 +1,5 @@
import os from 'os'
import {info} from '@actions/core'
// Used for controlling the highWaterMark value of the zip that is being streamed
// The same value is used as the chunk size that is use during upload to blob storage
@ -30,10 +31,10 @@ export function isGhes(): boolean {
const hostname = ghUrl.hostname.trimEnd().toUpperCase()
const isGitHubHost = hostname === 'GITHUB.COM'
const isGheHost =
hostname.endsWith('.GHE.COM') || hostname.endsWith('.GHE.LOCALHOST')
const isGheHost = hostname.endsWith('.GHE.COM')
const isLocalHost = hostname.endsWith('.LOCALHOST')
return !isGitHubHost && !isGheHost
return !isGitHubHost && !isGheHost && !isLocalHost
}
export function getGitHubWorkspaceDir(): string {
@ -44,16 +45,55 @@ export function getGitHubWorkspaceDir(): string {
return ghWorkspaceDir
}
// Mimics behavior of azcopy: https://learn.microsoft.com/en-us/azure/storage/common/storage-use-azcopy-optimize
// If your machine has fewer than 5 CPUs, then the value of this variable is set to 32.
// Otherwise, the default value is equal to 16 multiplied by the number of CPUs. The maximum value of this variable is 300.
// The maximum value of concurrency is 300.
// This value can be changed with ACTIONS_ARTIFACT_UPLOAD_CONCURRENCY variable.
export function getConcurrency(): number {
const numCPUs = os.cpus().length
let concurrencyCap = 32
if (numCPUs <= 4) {
return 32
if (numCPUs > 4) {
const concurrency = 16 * numCPUs
concurrencyCap = concurrency > 300 ? 300 : concurrency
}
const concurrency = 16 * numCPUs
return concurrency > 300 ? 300 : concurrency
const concurrencyOverride = process.env['ACTIONS_ARTIFACT_UPLOAD_CONCURRENCY']
if (concurrencyOverride) {
const concurrency = parseInt(concurrencyOverride)
if (isNaN(concurrency) || concurrency < 1) {
throw new Error(
'Invalid value set for ACTIONS_ARTIFACT_UPLOAD_CONCURRENCY env variable'
)
}
if (concurrency < concurrencyCap) {
info(
`Set concurrency based on the value set in ACTIONS_ARTIFACT_UPLOAD_CONCURRENCY.`
)
return concurrency
}
info(
`ACTIONS_ARTIFACT_UPLOAD_CONCURRENCY is higher than the cap of ${concurrencyCap} based on the number of cpus. Set it to the maximum value allowed.`
)
return concurrencyCap
}
// default concurrency to 5
return 5
}
export function getUploadChunkTimeout(): number {
const timeoutVar = process.env['ACTIONS_ARTIFACT_UPLOAD_TIMEOUT_MS']
if (!timeoutVar) {
return 300000 // 5 minutes
}
const timeout = parseInt(timeoutVar)
if (isNaN(timeout)) {
throw new Error(
'Invalid value set for ACTIONS_ARTIFACT_UPLOAD_TIMEOUT_MS env variable'
)
}
return timeout
}

View File

@ -12,6 +12,11 @@ export interface UploadArtifactResponse {
* This ID can be used as input to other APIs to download, delete or get more information about an artifact: https://docs.github.com/en/rest/actions/artifacts
*/
id?: number
/**
* The SHA256 digest of the artifact that was created. Not provided if no artifact was uploaded
*/
digest?: string
}
/**

View File

@ -1,7 +1,11 @@
import {BlobClient, BlockBlobUploadStreamOptions} from '@azure/storage-blob'
import {TransferProgressEvent} from '@azure/core-http'
import {ZipUploadStream} from './zip'
import {getUploadChunkSize, getConcurrency} from '../shared/config'
import {
getUploadChunkSize,
getConcurrency,
getUploadChunkTimeout
} from '../shared/config'
import * as core from '@actions/core'
import * as crypto from 'crypto'
import * as stream from 'stream'
@ -24,6 +28,22 @@ export async function uploadZipToBlobStorage(
zipUploadStream: ZipUploadStream
): Promise<BlobUploadResponse> {
let uploadByteCount = 0
let lastProgressTime = Date.now()
const abortController = new AbortController()
const chunkTimer = async (interval: number): Promise<void> =>
new Promise((resolve, reject) => {
const timer = setInterval(() => {
if (Date.now() - lastProgressTime > interval) {
reject(new Error('Upload progress stalled.'))
}
}, interval)
abortController.signal.addEventListener('abort', () => {
clearInterval(timer)
resolve()
})
})
const maxConcurrency = getConcurrency()
const bufferSize = getUploadChunkSize()
@ -37,11 +57,13 @@ export async function uploadZipToBlobStorage(
const uploadCallback = (progress: TransferProgressEvent): void => {
core.info(`Uploaded bytes ${progress.loadedBytes}`)
uploadByteCount = progress.loadedBytes
lastProgressTime = Date.now()
}
const options: BlockBlobUploadStreamOptions = {
blobHTTPHeaders: {blobContentType: 'zip'},
onProgress: uploadCallback
onProgress: uploadCallback,
abortSignal: abortController.signal
}
let sha256Hash: string | undefined = undefined
@ -54,18 +76,22 @@ export async function uploadZipToBlobStorage(
core.info('Beginning upload of artifact content to blob storage')
try {
await blockBlobClient.uploadStream(
uploadStream,
bufferSize,
maxConcurrency,
options
)
await Promise.race([
blockBlobClient.uploadStream(
uploadStream,
bufferSize,
maxConcurrency,
options
),
chunkTimer(getUploadChunkTimeout())
])
} catch (error) {
if (NetworkError.isNetworkErrorCode(error?.code)) {
throw new NetworkError(error?.code)
}
throw error
} finally {
abortController.abort()
}
core.info('Finished uploading artifact content to blob storage!')
@ -79,7 +105,6 @@ export async function uploadZipToBlobStorage(
`No data was uploaded to blob storage. Reported upload byte count is 0.`
)
}
return {
uploadSize: uploadByteCount,
sha256Hash

View File

@ -110,6 +110,7 @@ export async function uploadArtifact(
return {
size: uploadResult.uploadSize,
digest: uploadResult.sha256Hash,
id: Number(artifactId)
}
}

View File

@ -13,6 +13,12 @@ export interface UploadZipSpecification {
* The destination path in a zip for a file
*/
destinationPath: string
/**
* Information about the file
* https://nodejs.org/api/fs.html#class-fsstats
*/
stats: fs.Stats
}
/**
@ -75,10 +81,11 @@ export function getUploadZipSpecification(
- file3.txt
*/
for (let file of filesToZip) {
if (!fs.existsSync(file)) {
const stats = fs.lstatSync(file, {throwIfNoEntry: false})
if (!stats) {
throw new Error(`File ${file} does not exist`)
}
if (!fs.statSync(file).isDirectory()) {
if (!stats.isDirectory()) {
// Normalize and resolve, this allows for either absolute or relative paths to be used
file = normalize(file)
file = resolve(file)
@ -94,7 +101,8 @@ export function getUploadZipSpecification(
specification.push({
sourcePath: file,
destinationPath: uploadPath
destinationPath: uploadPath,
stats
})
} else {
// Empty directory
@ -103,7 +111,8 @@ export function getUploadZipSpecification(
specification.push({
sourcePath: null,
destinationPath: directoryPath
destinationPath: directoryPath,
stats
})
}
}

View File

@ -1,7 +1,7 @@
import * as stream from 'stream'
import {realpath} from 'fs/promises'
import * as archiver from 'archiver'
import * as core from '@actions/core'
import {createReadStream} from 'fs'
import {UploadZipSpecification} from './upload-zip-specification'
import {getUploadChunkSize} from '../shared/config'
@ -43,8 +43,14 @@ export async function createZipUploadStream(
for (const file of uploadSpecification) {
if (file.sourcePath !== null) {
// Add a normal file to the zip
zip.append(createReadStream(file.sourcePath), {
// Check if symlink and resolve the source path
let sourcePath = file.sourcePath
if (file.stats.isSymbolicLink()) {
sourcePath = await realpath(file.sourcePath)
}
// Add the file to the zip
zip.file(sourcePath, {
name: file.destinationPath
})
} else {

View File

@ -12,6 +12,9 @@ Once the attestation has been created and signed, it will be uploaded to the GH
attestations API and associated with the repository from which the workflow was
initiated.
See [Using artifact attestations to establish provenance for builds](https://docs.github.com/en/actions/security-guides/using-artifact-attestations-to-establish-provenance-for-builds)
for more information on artifact attestations.
## Usage
### `attest`
@ -29,8 +32,7 @@ async function run() {
const ghToken = core.getInput('gh-token');
const attestation = await attest({
subjectName: 'my-artifact-name',
subjectDigest: { 'sha256': '36ab4667...'},
subjects: [{name: 'my-artifact-name', digest: { 'sha256': '36ab4667...'}}],
predicateType: 'https://in-toto.io/attestation/release',
predicate: { . . . },
token: ghToken
@ -46,11 +48,12 @@ The `attest` function supports the following options:
```typescript
export type AttestOptions = {
// The name of the subject to be attested.
subjectName: string
// The digest of the subject to be attested. Should be a map of digest
// algorithms to their hex-encoded values.
subjectDigest: Record<string, string>
// Deprecated. Use 'subjects' instead.
subjectName?: string
// Deprecated. Use 'subjects' instead.
subjectDigest?: Record<string, string>
// Collection of subjects to be attested
subjects?: Subject[]
// URI identifying the content type of the predicate being attested.
predicateType: string
// Predicate to be attested.
@ -60,9 +63,18 @@ export type AttestOptions = {
// Sigstore instance to use for signing. Must be one of "public-good" or
// "github".
sigstore?: 'public-good' | 'github'
// HTTP headers to include in request to attestations API.
headers?: {[header: string]: string | number | undefined}
// Whether to skip writing the attestation to the GH attestations API.
skipWrite?: boolean
}
export type Subject = {
// Name of the subject.
name: string
// Digests of the subject. Should be a map of digest algorithms to their hex-encoded values.
digest: Record<string, string>
}
```
### `attestProvenance`
@ -100,16 +112,19 @@ The `attestProvenance` function supports the following options:
```typescript
export type AttestProvenanceOptions = {
// The name of the subject to be attested.
subjectName: string
// The digest of the subject to be attested. Should be a map of digest
// algorithms to their hex-encoded values.
subjectDigest: Record<string, string>
// GitHub token for writing attestations.
// Deprecated. Use 'subjects' instead.
subjectName?: string
// Deprecated. Use 'subjects' instead.
subjectDigest?: Record<string, string>
// Collection of subjects to be attested
subjects?: Subject[]
// URI identifying the content type of the predicate being attested.
token: string
// Sigstore instance to use for signing. Must be one of "public-good" or
// "github".
sigstore?: 'public-good' | 'github'
// HTTP headers to include in request to attestations API.
headers?: {[header: string]: string | number | undefined}
// Whether to skip writing the attestation to the GH attestations API.
skipWrite?: boolean
// Issuer URL responsible for minting the OIDC token from which the

View File

@ -1,8 +1,51 @@
# @actions/attest Releases
### 1.5.0
- Bump @actions/core from 1.10.1 to 1.11.1 [#1847](https://github.com/actions/toolkit/pull/1847)
- Bump @sigstore/bundle from 2.3.2 to 3.0.0 [#1846](https://github.com/actions/toolkit/pull/1846)
- Bump @sigstore/sign from 2.3.2 to 3.0.0 [#1846](https://github.com/actions/toolkit/pull/1846)
- Support for generating multi-subject attestations [#1864](https://github.com/actions/toolkit/pull/1865)
- Fix bug in `buildSLSAProvenancePredicate` related to `workflow_ref` OIDC token claims containing the "@" symbol in the tag name [#1863](https://github.com/actions/toolkit/pull/1863)
### 1.4.2
- Fix bug in `buildSLSAProvenancePredicate`/`attestProvenance` when generating provenance statement for enterprise account using customized OIDC issuer value [#1823](https://github.com/actions/toolkit/pull/1823)
### 1.4.1
- Bump @actions/http-client from 2.2.1 to 2.2.3 [#1805](https://github.com/actions/toolkit/pull/1805)
### 1.4.0
- Add new `headers` parameter to the `attest` and `attestProvenance` functions [#1790](https://github.com/actions/toolkit/pull/1790)
- Update `buildSLSAProvenancePredicate`/`attestProvenance` to automatically derive default OIDC issuer URL from current execution context [#1796](https://github.com/actions/toolkit/pull/1796)
### 1.3.1
- Fix bug with proxy support when retrieving JWKS for OIDC issuer [#1776](https://github.com/actions/toolkit/pull/1776)
### 1.3.0
- Dynamic construction of Sigstore API URLs [#1735](https://github.com/actions/toolkit/pull/1735)
- Switch to new GH provenance build type [#1745](https://github.com/actions/toolkit/pull/1745)
- Fetch existing Rekor entry on 409 conflict error [#1759](https://github.com/actions/toolkit/pull/1759)
- Bump @sigstore/bundle from 2.3.0 to 2.3.2 [#1738](https://github.com/actions/toolkit/pull/1738)
- Bump @sigstore/sign from 2.3.0 to 2.3.2 [#1738](https://github.com/actions/toolkit/pull/1738)
### 1.2.1
- Retry request on attestation persistence failure [#1725](https://github.com/actions/toolkit/pull/1725)
### 1.2.0
- Generate attestations using the v0.3 Sigstore bundle format [#1701](https://github.com/actions/toolkit/pull/1701)
- Bump @sigstore/bundle from 2.2.0 to 2.3.0 [#1701](https://github.com/actions/toolkit/pull/1701)
- Bump @sigstore/sign from 2.2.3 to 2.3.0 [#1701](https://github.com/actions/toolkit/pull/1701)
- Remove dependency on make-fetch-happen [#1714](https://github.com/actions/toolkit/pull/1714)
### 1.1.0
- Updates the `attestProvenance` function to retrieve a token from the GitHub OIDC provider and use the token claims to populate the provenance statement.
- Updates the `attestProvenance` function to retrieve a token from the GitHub OIDC provider and use the token claims to populate the provenance statement [#1693](https://github.com/actions/toolkit/pull/1693)
### 1.0.0

View File

@ -1,15 +1,15 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`provenance functions buildSLSAProvenancePredicate returns a provenance hydrated from an OIDC token 1`] = `
exports[`provenance functions buildSLSAProvenancePredicate handle tags including "@" character 1`] = `
{
"params": {
"buildDefinition": {
"buildType": "https://slsa-framework.github.io/github-actions-buildtypes/workflow/v1",
"buildType": "https://actions.github.io/buildtypes/workflow/v1",
"externalParameters": {
"workflow": {
"path": ".github/workflows/main.yml",
"ref": "main",
"repository": "https://github.com/owner/repo",
"ref": "foo@1.0.0",
"repository": "https://foo.ghe.com/owner/repo",
},
},
"internalParameters": {
@ -17,6 +17,7 @@ exports[`provenance functions buildSLSAProvenancePredicate returns a provenance
"event_name": "push",
"repository_id": "repo-id",
"repository_owner_id": "owner-id",
"runner_environment": "github-hosted",
},
},
"resolvedDependencies": [
@ -24,16 +25,58 @@ exports[`provenance functions buildSLSAProvenancePredicate returns a provenance
"digest": {
"gitCommit": "babca52ab0c93ae16539e5923cb0d7403b9a093b",
},
"uri": "git+https://github.com/owner/repo@refs/heads/main",
"uri": "git+https://foo.ghe.com/owner/repo@refs/heads/main",
},
],
},
"runDetails": {
"builder": {
"id": "https://github.com/actions/runner/github-hosted",
"id": "https://foo.ghe.com/owner/workflows/.github/workflows/publish.yml@main",
},
"metadata": {
"invocationId": "https://github.com/owner/repo/actions/runs/run-id/attempts/run-attempt",
"invocationId": "https://foo.ghe.com/owner/repo/actions/runs/run-id/attempts/run-attempt",
},
},
},
"type": "https://slsa.dev/provenance/v1",
}
`;
exports[`provenance functions buildSLSAProvenancePredicate returns a provenance hydrated from an OIDC token 1`] = `
{
"params": {
"buildDefinition": {
"buildType": "https://actions.github.io/buildtypes/workflow/v1",
"externalParameters": {
"workflow": {
"path": ".github/workflows/main.yml",
"ref": "main",
"repository": "https://foo.ghe.com/owner/repo",
},
},
"internalParameters": {
"github": {
"event_name": "push",
"repository_id": "repo-id",
"repository_owner_id": "owner-id",
"runner_environment": "github-hosted",
},
},
"resolvedDependencies": [
{
"digest": {
"gitCommit": "babca52ab0c93ae16539e5923cb0d7403b9a093b",
},
"uri": "git+https://foo.ghe.com/owner/repo@refs/heads/main",
},
],
},
"runDetails": {
"builder": {
"id": "https://foo.ghe.com/owner/workflows/.github/workflows/publish.yml@main",
},
"metadata": {
"invocationId": "https://foo.ghe.com/owner/repo/actions/runs/run-id/attempts/run-attempt",
},
},
},

View File

@ -0,0 +1,16 @@
import {attest} from '../src/attest'
describe('attest', () => {
describe('when no subject information is provided', () => {
it('throws an error', async () => {
const options = {
predicateType: 'foo',
predicate: {bar: 'baz'},
token: 'token'
}
expect(attest(options)).rejects.toThrowError(
'Must provide either subjectName and subjectDigest or subjects'
)
})
})
})

View File

@ -0,0 +1,41 @@
import {signingEndpoints} from '../src/endpoints'
describe('signingEndpoints', () => {
const originalEnv = process.env
afterEach(() => {
process.env = originalEnv
})
describe('when using github.com', () => {
beforeEach(async () => {
process.env = {
...originalEnv,
GITHUB_SERVER_URL: 'https://github.com'
}
})
it('returns expected endpoints', async () => {
const endpoints = signingEndpoints('github')
expect(endpoints.fulcioURL).toEqual('https://fulcio.githubapp.com')
expect(endpoints.tsaServerURL).toEqual('https://timestamp.githubapp.com')
})
})
describe('when using custom domain', () => {
beforeEach(async () => {
process.env = {
...originalEnv,
GITHUB_SERVER_URL: 'https://foo.bar.com'
}
})
it('returns a expected endpoints', async () => {
const endpoints = signingEndpoints('github')
expect(endpoints.fulcioURL).toEqual('https://fulcio.foo.bar.com')
expect(endpoints.tsaServerURL).toEqual('https://timestamp.foo.bar.com')
})
})
})

View File

@ -17,7 +17,7 @@ describe('buildIntotoStatement', () => {
}
it('returns an intoto statement', () => {
const statement = buildIntotoStatement(subject, predicate)
const statement = buildIntotoStatement([subject], predicate)
expect(statement).toMatchSnapshot()
})
})

View File

@ -45,7 +45,8 @@ describe('getIDTokenClaims', () => {
sha: 'sha',
repository: 'repo',
event_name: 'push',
workflow_ref: 'main',
job_workflow_ref: 'job_workflow_ref',
workflow_ref: 'workflow',
repository_id: '1',
repository_owner_id: '1',
runner_environment: 'github-hosted',
@ -67,6 +68,55 @@ describe('getIDTokenClaims', () => {
})
})
describe('when ID token is valid (w/ enterprise slug)', () => {
const claims = {
iss: `${issuer}/foo-bar`,
aud: audience,
ref: 'ref',
sha: 'sha',
repository: 'repo',
event_name: 'push',
job_workflow_ref: 'job_workflow_ref',
workflow_ref: 'workflow',
repository_id: '1',
repository_owner_id: '1',
runner_environment: 'github-hosted',
run_id: '1',
run_attempt: '1'
}
beforeEach(async () => {
const jwt = await new jose.SignJWT(claims)
.setProtectedHeader({alg: 'PS256'})
.sign(key.privateKey)
nock(issuer).get(tokenPath).query({audience}).reply(200, {value: jwt})
})
it('returns the ID token claims', async () => {
const result = await getIDTokenClaims(issuer)
expect(result).toEqual(claims)
})
})
describe('when ID token is missing the "iss" claim', () => {
const claims = {
aud: audience
}
beforeEach(async () => {
const jwt = await new jose.SignJWT(claims)
.setProtectedHeader({alg: 'PS256'})
.sign(key.privateKey)
nock(issuer).get(tokenPath).query({audience}).reply(200, {value: jwt})
})
it('throws an error', async () => {
await expect(getIDTokenClaims(issuer)).rejects.toThrow(/missing "iss"/i)
})
})
describe('when ID token is missing required claims', () => {
const claims = {
iss: issuer,
@ -98,7 +148,9 @@ describe('getIDTokenClaims', () => {
})
it('throws an error', async () => {
await expect(getIDTokenClaims(issuer)).rejects.toThrow(/issuer invalid/)
await expect(getIDTokenClaims(issuer)).rejects.toThrow(
/unexpected "iss"/i
)
})
})
@ -114,7 +166,7 @@ describe('getIDTokenClaims', () => {
})
it('throw an error', async () => {
await expect(getIDTokenClaims(issuer)).rejects.toThrow(/audience invalid/)
await expect(getIDTokenClaims(issuer)).rejects.toThrow(/unexpected "aud"/)
})
})

View File

@ -2,22 +2,28 @@ import * as github from '@actions/github'
import {mockFulcio, mockRekor, mockTSA} from '@sigstore/mock'
import * as jose from 'jose'
import nock from 'nock'
import {SIGSTORE_GITHUB, SIGSTORE_PUBLIC_GOOD} from '../src/endpoints'
import {MockAgent, setGlobalDispatcher} from 'undici'
import {SIGSTORE_PUBLIC_GOOD, signingEndpoints} from '../src/endpoints'
import {attestProvenance, buildSLSAProvenancePredicate} from '../src/provenance'
describe('provenance functions', () => {
const originalEnv = process.env
const issuer = 'https://example.com'
const issuer = 'https://token.actions.foo.ghe.com'
const audience = 'nobody'
const jwksPath = '/.well-known/jwks.json'
const tokenPath = '/token'
// MockAgent for mocking @actions/github
const mockAgent = new MockAgent()
setGlobalDispatcher(mockAgent)
const claims = {
iss: issuer,
aud: 'nobody',
repository: 'owner/repo',
ref: 'refs/heads/main',
sha: 'babca52ab0c93ae16539e5923cb0d7403b9a093b',
job_workflow_ref: 'owner/workflows/.github/workflows/publish.yml@main',
workflow_ref: 'owner/repo/.github/workflows/main.yml@main',
event_name: 'push',
repository_id: 'repo-id',
@ -27,15 +33,7 @@ describe('provenance functions', () => {
runner_environment: 'github-hosted'
}
beforeEach(async () => {
process.env = {
...originalEnv,
ACTIONS_ID_TOKEN_REQUEST_URL: `${issuer}${tokenPath}?`,
ACTIONS_ID_TOKEN_REQUEST_TOKEN: 'token',
GITHUB_SERVER_URL: 'https://github.com',
GITHUB_REPOSITORY: claims.repository
}
const mockIssuer = async (claims: jose.JWTPayload): Promise<void> => {
// Generate JWT signing key
const key = await jose.generateKeyPair('PS256')
@ -54,6 +52,18 @@ describe('provenance functions', () => {
// Mock OIDC token endpoint for populating the provenance
nock(issuer).get(tokenPath).query({audience}).reply(200, {value: jwt})
}
beforeEach(async () => {
process.env = {
...originalEnv,
ACTIONS_ID_TOKEN_REQUEST_URL: `${issuer}${tokenPath}?`,
ACTIONS_ID_TOKEN_REQUEST_TOKEN: 'token',
GITHUB_SERVER_URL: 'https://foo.ghe.com',
GITHUB_REPOSITORY: claims.repository
}
await mockIssuer(claims)
})
afterEach(() => {
@ -62,7 +72,17 @@ describe('provenance functions', () => {
describe('buildSLSAProvenancePredicate', () => {
it('returns a provenance hydrated from an OIDC token', async () => {
const predicate = await buildSLSAProvenancePredicate(issuer)
const predicate = await buildSLSAProvenancePredicate()
expect(predicate).toMatchSnapshot()
})
it('handle tags including "@" character', async () => {
nock.cleanAll()
await mockIssuer({
...claims,
workflow_ref: 'owner/repo/.github/workflows/main.yml@foo@1.0.0'
})
const predicate = await buildSLSAProvenancePredicate()
expect(predicate).toMatchSnapshot()
})
})
@ -90,27 +110,28 @@ describe('provenance functions', () => {
})
describe('when using the github Sigstore instance', () => {
const {fulcioURL, tsaServerURL} = SIGSTORE_GITHUB
beforeEach(async () => {
const {fulcioURL, tsaServerURL} = signingEndpoints('github')
// Mock Sigstore
await mockFulcio({baseURL: fulcioURL, strict: false})
await mockTSA({baseURL: tsaServerURL})
// Mock GH attestations API
nock('https://api.github.com')
.post(/^\/repos\/.*\/.*\/attestations$/)
mockAgent
.get('https://api.github.com')
.intercept({
path: /^\/repos\/.*\/.*\/attestations$/,
method: 'post'
})
.reply(201, {id: attestationID})
})
describe('when the sigstore instance is explicitly set', () => {
it('attests provenance', async () => {
const attestation = await attestProvenance({
subjectName,
subjectDigest,
subjects: [{name: subjectName, digest: subjectDigest}],
token: 'token',
sigstore: 'github',
issuer
sigstore: 'github'
})
expect(attestation).toBeDefined()
@ -135,10 +156,8 @@ describe('provenance functions', () => {
it('attests provenance', async () => {
const attestation = await attestProvenance({
subjectName,
subjectDigest,
token: 'token',
issuer
subjects: [{name: subjectName, digest: subjectDigest}],
token: 'token'
})
expect(attestation).toBeDefined()
@ -159,19 +178,21 @@ describe('provenance functions', () => {
await mockRekor({baseURL: rekorURL})
// Mock GH attestations API
nock('https://api.github.com')
.post(/^\/repos\/.*\/.*\/attestations$/)
mockAgent
.get('https://api.github.com')
.intercept({
path: /^\/repos\/.*\/.*\/attestations$/,
method: 'post'
})
.reply(201, {id: attestationID})
})
describe('when the sigstore instance is explicitly set', () => {
it('attests provenance', async () => {
const attestation = await attestProvenance({
subjectName,
subjectDigest,
subjects: [{name: subjectName, digest: subjectDigest}],
token: 'token',
sigstore: 'public-good',
issuer
sigstore: 'public-good'
})
expect(attestation).toBeDefined()
@ -196,10 +217,8 @@ describe('provenance functions', () => {
it('attests provenance', async () => {
const attestation = await attestProvenance({
subjectName,
subjectDigest,
token: 'token',
issuer
subjects: [{name: subjectName, digest: subjectDigest}],
token: 'token'
})
expect(attestation).toBeDefined()
@ -225,8 +244,7 @@ describe('provenance functions', () => {
subjectDigest,
token: 'token',
sigstore: 'public-good',
skipWrite: true,
issuer
skipWrite: true
})
expect(attestation).toBeDefined()

View File

@ -64,13 +64,11 @@ describe('signProvenance', () => {
expect(att).toBeDefined()
expect(att.mediaType).toEqual(
'application/vnd.dev.sigstore.bundle+json;version=0.2'
'application/vnd.dev.sigstore.bundle.v0.3+json'
)
expect(att.content.$case).toEqual('dsseEnvelope')
expect(att.verificationMaterial.content.$case).toEqual(
'x509CertificateChain'
)
expect(att.verificationMaterial.content.$case).toEqual('certificate')
expect(att.verificationMaterial.tlogEntries).toHaveLength(1)
expect(
att.verificationMaterial.timestampVerificationData?.rfc3161Timestamps
@ -89,13 +87,11 @@ describe('signProvenance', () => {
expect(att).toBeDefined()
expect(att.mediaType).toEqual(
'application/vnd.dev.sigstore.bundle+json;version=0.2'
'application/vnd.dev.sigstore.bundle.v0.3+json'
)
expect(att.content.$case).toEqual('dsseEnvelope')
expect(att.verificationMaterial.content.$case).toEqual(
'x509CertificateChain'
)
expect(att.verificationMaterial.content.$case).toEqual('certificate')
expect(att.verificationMaterial.tlogEntries).toHaveLength(0)
expect(
att.verificationMaterial.timestampVerificationData?.rfc3161Timestamps

View File

@ -1,10 +1,14 @@
import nock from 'nock'
import {MockAgent, setGlobalDispatcher} from 'undici'
import {writeAttestation} from '../src/store'
describe('writeAttestation', () => {
const originalEnv = process.env
const attestation = {foo: 'bar '}
const token = 'token'
const headers = {'X-GitHub-Foo': 'true'}
const mockAgent = new MockAgent()
setGlobalDispatcher(mockAgent)
beforeEach(() => {
process.env = {
@ -19,27 +23,71 @@ describe('writeAttestation', () => {
describe('when the api call is successful', () => {
beforeEach(() => {
nock('https://api.github.com')
.matchHeader('authorization', `token ${token}`)
.post('/repos/foo/bar/attestations', {bundle: attestation})
mockAgent
.get('https://api.github.com')
.intercept({
path: '/repos/foo/bar/attestations',
method: 'POST',
headers: {authorization: `token ${token}`, ...headers},
body: JSON.stringify({bundle: attestation})
})
.reply(201, {id: '123'})
})
it('persists the attestation', async () => {
await expect(
writeAttestation(attestation, token, {headers})
).resolves.toEqual('123')
})
})
describe('when the api call fails', () => {
beforeEach(() => {
mockAgent
.get('https://api.github.com')
.intercept({
path: '/repos/foo/bar/attestations',
method: 'POST',
headers: {authorization: `token ${token}`},
body: JSON.stringify({bundle: attestation})
})
.reply(500, 'oops')
})
it('throws an error', async () => {
await expect(
writeAttestation(attestation, token, {retry: 0})
).rejects.toThrow(/oops/)
})
})
describe('when the api call fails but succeeds on retry', () => {
beforeEach(() => {
const pool = mockAgent.get('https://api.github.com')
pool
.intercept({
path: '/repos/foo/bar/attestations',
method: 'POST',
headers: {authorization: `token ${token}`},
body: JSON.stringify({bundle: attestation})
})
.reply(500, 'oops')
.times(1)
pool
.intercept({
path: '/repos/foo/bar/attestations',
method: 'POST',
headers: {authorization: `token ${token}`},
body: JSON.stringify({bundle: attestation})
})
.reply(201, {id: '123'})
.times(1)
})
it('persists the attestation', async () => {
await expect(writeAttestation(attestation, token)).resolves.toEqual('123')
})
})
describe('when the api call fails', () => {
beforeEach(() => {
nock('https://api.github.com')
.matchHeader('authorization', `token ${token}`)
.post('/repos/foo/bar/attestations', {bundle: attestation})
.reply(500, 'oops')
})
it('throws an error', async () => {
await expect(writeAttestation(attestation, token)).rejects.toThrow(/oops/)
})
})
})

File diff suppressed because it is too large Load Diff

View File

@ -1,6 +1,6 @@
{
"name": "@actions/attest",
"version": "1.1.0",
"version": "1.5.0",
"description": "Actions attestation lib",
"keywords": [
"github",
@ -35,21 +35,24 @@
"url": "https://github.com/actions/toolkit/issues"
},
"devDependencies": {
"@sigstore/mock": "^0.6.5",
"@sigstore/rekor-types": "^2.0.0",
"@sigstore/mock": "^0.8.0",
"@sigstore/rekor-types": "^3.0.0",
"@types/jsonwebtoken": "^9.0.6",
"@types/make-fetch-happen": "^10.0.4",
"jose": "^5.2.3",
"nock": "^13.5.1"
"nock": "^13.5.1",
"undici": "^5.28.4"
},
"dependencies": {
"@actions/core": "^1.10.1",
"@actions/core": "^1.11.1",
"@actions/github": "^6.0.0",
"@actions/http-client": "^2.2.1",
"@sigstore/bundle": "^2.2.0",
"@sigstore/sign": "^2.2.3",
"jsonwebtoken": "^9.0.2",
"jwks-rsa": "^3.1.0",
"make-fetch-happen": "^13.0.0"
"@actions/http-client": "^2.2.3",
"@octokit/plugin-retry": "^6.0.1",
"@sigstore/bundle": "^3.0.0",
"@sigstore/sign": "^3.0.0",
"jose": "^5.2.3"
},
"overrides": {
"@octokit/plugin-retry": {
"@octokit/core": "^5.2.0"
}
}
}

View File

@ -1,10 +1,11 @@
import {Bundle, bundleToJSON} from '@sigstore/bundle'
import {bundleToJSON} from '@sigstore/bundle'
import {X509Certificate} from 'crypto'
import {SigstoreInstance, signingEndpoints} from './endpoints'
import {buildIntotoStatement} from './intoto'
import {Payload, signPayload} from './sign'
import {writeAttestation} from './store'
import type {Bundle} from '@sigstore/sign'
import type {Attestation, Predicate, Subject} from './shared.types'
const INTOTO_PAYLOAD_TYPE = 'application/vnd.in-toto+json'
@ -13,11 +14,16 @@ const INTOTO_PAYLOAD_TYPE = 'application/vnd.in-toto+json'
* Options for attesting a subject / predicate.
*/
export type AttestOptions = {
// The name of the subject to be attested.
subjectName: string
// The digest of the subject to be attested. Should be a map of digest
// algorithms to their hex-encoded values.
subjectDigest: Record<string, string>
/**
* @deprecated Use `subjects` instead.
**/
subjectName?: string
/**
* @deprecated Use `subjects` instead.
**/
subjectDigest?: Record<string, string>
// Subjects to be attested.
subjects?: Subject[]
// Content type of the predicate being attested.
predicateType: string
// Predicate to be attested.
@ -27,6 +33,8 @@ export type AttestOptions = {
// Sigstore instance to use for signing. Must be one of "public-good" or
// "github".
sigstore?: SigstoreInstance
// HTTP headers to include in request to attestations API.
headers?: {[header: string]: string | number | undefined}
// Whether to skip writing the attestation to the GH attestations API.
skipWrite?: boolean
}
@ -39,15 +47,24 @@ export type AttestOptions = {
* @returns A promise that resolves to the attestation.
*/
export async function attest(options: AttestOptions): Promise<Attestation> {
const subject: Subject = {
name: options.subjectName,
digest: options.subjectDigest
let subjects: Subject[]
if (options.subjects) {
subjects = options.subjects
} else if (options.subjectName && options.subjectDigest) {
subjects = [{name: options.subjectName, digest: options.subjectDigest}]
} else {
throw new Error(
'Must provide either subjectName and subjectDigest or subjects'
)
}
const predicate: Predicate = {
type: options.predicateType,
params: options.predicate
}
const statement = buildIntotoStatement(subject, predicate)
const statement = buildIntotoStatement(subjects, predicate)
// Sign the provenance statement
const payload: Payload = {
@ -60,7 +77,11 @@ export async function attest(options: AttestOptions): Promise<Attestation> {
// Store the attestation
let attestationID: string | undefined
if (options.skipWrite !== true) {
attestationID = await writeAttestation(bundleToJSON(bundle), options.token)
attestationID = await writeAttestation(
bundleToJSON(bundle),
options.token,
{headers: options.headers}
)
}
return toAttestation(bundle, attestationID)

View File

@ -6,9 +6,6 @@ const GITHUB_ID = 'github'
const FULCIO_PUBLIC_GOOD_URL = 'https://fulcio.sigstore.dev'
const REKOR_PUBLIC_GOOD_URL = 'https://rekor.sigstore.dev'
const FULCIO_INTERNAL_URL = 'https://fulcio.githubapp.com'
const TSA_INTERNAL_URL = 'https://timestamp.githubapp.com'
export type SigstoreInstance = typeof PUBLIC_GOOD_ID | typeof GITHUB_ID
export type Endpoints = {
@ -22,11 +19,6 @@ export const SIGSTORE_PUBLIC_GOOD: Endpoints = {
rekorURL: REKOR_PUBLIC_GOOD_URL
}
export const SIGSTORE_GITHUB: Endpoints = {
fulcioURL: FULCIO_INTERNAL_URL,
tsaServerURL: TSA_INTERNAL_URL
}
export const signingEndpoints = (sigstore?: SigstoreInstance): Endpoints => {
let instance: SigstoreInstance
@ -45,6 +37,19 @@ export const signingEndpoints = (sigstore?: SigstoreInstance): Endpoints => {
case PUBLIC_GOOD_ID:
return SIGSTORE_PUBLIC_GOOD
case GITHUB_ID:
return SIGSTORE_GITHUB
return buildGitHubEndpoints()
}
}
function buildGitHubEndpoints(): Endpoints {
const serverURL = process.env.GITHUB_SERVER_URL || 'https://github.com'
let host = new URL(serverURL).hostname
if (host === 'github.com') {
host = 'githubapp.com'
}
return {
fulcioURL: `https://fulcio.${host}`,
tsaServerURL: `https://timestamp.${host}`
}
}

View File

@ -20,12 +20,12 @@ export type InTotoStatement = {
* @returns The constructed in-toto statement.
*/
export const buildIntotoStatement = (
subject: Subject,
subjects: Subject[],
predicate: Predicate
): InTotoStatement => {
return {
_type: INTOTO_STATEMENT_V1_TYPE,
subject: [subject],
subject: subjects,
predicateType: predicate.type,
predicate: predicate.params
}

View File

@ -1,16 +1,21 @@
import {getIDToken} from '@actions/core'
import {HttpClient} from '@actions/http-client'
import * as jwt from 'jsonwebtoken'
import jwks from 'jwks-rsa'
import * as jose from 'jose'
const OIDC_AUDIENCE = 'nobody'
const VALID_SERVER_URLS = [
'https://github.com',
new RegExp('^https://[a-z0-9-]+\\.ghe\\.com$')
] as const
const REQUIRED_CLAIMS = [
'iss',
'ref',
'sha',
'repository',
'event_name',
'job_workflow_ref',
'workflow_ref',
'repository_id',
'repository_owner_id',
@ -25,7 +30,8 @@ type OIDCConfig = {
jwks_uri: string
}
export const getIDTokenClaims = async (issuer: string): Promise<ClaimSet> => {
export const getIDTokenClaims = async (issuer?: string): Promise<ClaimSet> => {
issuer = issuer || getIssuer()
try {
const token = await getIDToken(OIDC_AUDIENCE)
const claims = await decodeOIDCToken(token, issuer)
@ -39,55 +45,46 @@ export const getIDTokenClaims = async (issuer: string): Promise<ClaimSet> => {
const decodeOIDCToken = async (
token: string,
issuer: string
): Promise<jwt.JwtPayload> => {
): Promise<jose.JWTPayload> => {
// Verify and decode token
return new Promise((resolve, reject) => {
jwt.verify(
token,
getPublicKey(issuer),
{audience: OIDC_AUDIENCE, issuer},
(err, decoded) => {
if (err) {
reject(err)
} else if (!decoded || typeof decoded === 'string') {
reject(new Error('No decoded token'))
} else {
resolve(decoded)
}
}
)
const jwks = jose.createLocalJWKSet(await getJWKS(issuer))
const {payload} = await jose.jwtVerify(token, jwks, {
audience: OIDC_AUDIENCE
})
}
// Returns a callback to locate the public key for the given JWT header. This
// involves two calls:
// 1. Fetch the OpenID configuration to get the JWKS URI.
// 2. Fetch the public key from the JWKS URI.
const getPublicKey =
(issuer: string): jwt.GetPublicKeyOrSecret =>
(header: jwt.JwtHeader, callback: jwt.SigningKeyCallback) => {
// Look up the JWKS URI from the issuer's OpenID configuration
new HttpClient('actions/attest')
.getJson<OIDCConfig>(`${issuer}/.well-known/openid-configuration`)
.then(data => {
if (!data.result) {
callback(new Error('No OpenID configuration found'))
} else {
// Fetch the public key from the JWKS URI
jwks({jwksUri: data.result.jwks_uri}).getSigningKey(
header.kid,
(err, key) => {
callback(err, key?.getPublicKey())
}
)
}
})
.catch(err => {
callback(err)
})
if (!payload.iss) {
throw new Error('Missing "iss" claim')
}
function assertClaimSet(claims: jwt.JwtPayload): asserts claims is ClaimSet {
// Check that the issuer STARTS WITH the expected issuer URL to account for
// the fact that the value may include an enterprise-specific slug
if (!payload.iss.startsWith(issuer)) {
throw new Error(`Unexpected "iss" claim: ${payload.iss}`)
}
return payload
}
const getJWKS = async (issuer: string): Promise<jose.JSONWebKeySet> => {
const client = new HttpClient('@actions/attest')
const config = await client.getJson<OIDCConfig>(
`${issuer}/.well-known/openid-configuration`
)
if (!config.result) {
throw new Error('No OpenID configuration found')
}
const jwks = await client.getJson<jose.JSONWebKeySet>(config.result.jwks_uri)
if (!jwks.result) {
throw new Error('No JWKS found for issuer')
}
return jwks.result
}
function assertClaimSet(claims: jose.JWTPayload): asserts claims is ClaimSet {
const missingClaims: string[] = []
for (const claim of REQUIRED_CLAIMS) {
@ -100,3 +97,21 @@ function assertClaimSet(claims: jwt.JwtPayload): asserts claims is ClaimSet {
throw new Error(`Missing claims: ${missingClaims.join(', ')}`)
}
}
// Derive the current OIDC issuer based on the server URL
function getIssuer(): string {
const serverURL = process.env.GITHUB_SERVER_URL || 'https://github.com'
// Ensure the server URL is a valid GitHub server URL
if (!VALID_SERVER_URLS.some(valid_url => serverURL.match(valid_url))) {
throw new Error(`Invalid server URL: ${serverURL}`)
}
let host = new URL(serverURL).hostname
if (host === 'github.com') {
host = 'githubusercontent.com'
}
return `https://token.actions.${host}`
}

View File

@ -3,12 +3,7 @@ import {getIDTokenClaims} from './oidc'
import type {Attestation, Predicate} from './shared.types'
const SLSA_PREDICATE_V1_TYPE = 'https://slsa.dev/provenance/v1'
const GITHUB_BUILDER_ID_PREFIX = 'https://github.com/actions/runner'
const GITHUB_BUILD_TYPE =
'https://slsa-framework.github.io/github-actions-buildtypes/workflow/v1'
const DEFAULT_ISSUER = 'https://token.actions.githubusercontent.com'
const GITHUB_BUILD_TYPE = 'https://actions.github.io/buildtypes/workflow/v1'
export type AttestProvenanceOptions = Omit<
AttestOptions,
@ -27,7 +22,7 @@ export type AttestProvenanceOptions = Omit<
* @returns The SLSA provenance predicate.
*/
export const buildSLSAProvenancePredicate = async (
issuer: string = DEFAULT_ISSUER
issuer?: string
): Promise<Predicate> => {
const serverURL = process.env.GITHUB_SERVER_URL
const claims = await getIDTokenClaims(issuer)
@ -35,9 +30,11 @@ export const buildSLSAProvenancePredicate = async (
// Split just the path and ref from the workflow string.
// owner/repo/.github/workflows/main.yml@main =>
// .github/workflows/main.yml, main
const [workflowPath, workflowRef] = claims.workflow_ref
const [workflowPath, ...workflowRefChunks] = claims.workflow_ref
.replace(`${claims.repository}/`, '')
.split('@')
// Handle case where tag contains `@` (e.g: when using changesets in a monorepo context),
const workflowRef = workflowRefChunks.join('@')
return {
type: SLSA_PREDICATE_V1_TYPE,
@ -55,7 +52,8 @@ export const buildSLSAProvenancePredicate = async (
github: {
event_name: claims.event_name,
repository_id: claims.repository_id,
repository_owner_id: claims.repository_owner_id
repository_owner_id: claims.repository_owner_id,
runner_environment: claims.runner_environment
}
},
resolvedDependencies: [
@ -69,7 +67,7 @@ export const buildSLSAProvenancePredicate = async (
},
runDetails: {
builder: {
id: `${GITHUB_BUILDER_ID_PREFIX}/${claims.runner_environment}`
id: `${serverURL}/${claims.job_workflow_ref}`
},
metadata: {
invocationId: `${serverURL}/${claims.repository}/actions/runs/${claims.run_id}/attempts/${claims.run_attempt}`

View File

@ -1,5 +1,5 @@
import {Bundle} from '@sigstore/bundle'
import {
Bundle,
BundleBuilder,
CIContextProvider,
DSSEBundleBuilder,
@ -86,7 +86,7 @@ const initBundleBuilder = (opts: SignOptions): BundleBuilder => {
witnesses.push(
new RekorWitness({
rekorBaseURL: opts.rekorURL,
entryType: 'dsse',
fetchOnConflict: true,
timeout,
retry
})
@ -103,5 +103,7 @@ const initBundleBuilder = (opts: SignOptions): BundleBuilder => {
)
}
// Build the bundle with the singleCertificate option which will
// trigger the creation of v0.3 DSSE bundles
return new DSSEBundleBuilder({signer, witnesses})
}

View File

@ -1,8 +1,14 @@
import * as github from '@actions/github'
import fetch from 'make-fetch-happen'
import {retry} from '@octokit/plugin-retry'
import {RequestHeaders} from '@octokit/types'
const CREATE_ATTESTATION_REQUEST = 'POST /repos/{owner}/{repo}/attestations'
const DEFAULT_RETRY_COUNT = 5
export type WriteOptions = {
retry?: number
headers?: RequestHeaders
}
/**
* Writes an attestation to the repository's attestations endpoint.
* @param attestation - The attestation to write.
@ -12,18 +18,25 @@ const CREATE_ATTESTATION_REQUEST = 'POST /repos/{owner}/{repo}/attestations'
*/
export const writeAttestation = async (
attestation: unknown,
token: string
token: string,
options: WriteOptions = {}
): Promise<string> => {
const octokit = github.getOctokit(token, {request: {fetch}})
const retries = options.retry ?? DEFAULT_RETRY_COUNT
const octokit = github.getOctokit(token, {retry: {retries}}, retry)
try {
const response = await octokit.request(CREATE_ATTESTATION_REQUEST, {
owner: github.context.repo.owner,
repo: github.context.repo.repo,
headers: options.headers,
data: {bundle: attestation}
})
return response.data?.id
const data =
typeof response.data == 'string'
? JSON.parse(response.data)
: response.data
return data?.id
} catch (err) {
const message = err instanceof Error ? err.message : err
throw new Error(`Failed to persist attestation: ${message}`)

View File

@ -6,6 +6,20 @@ See ["Caching dependencies to speed up workflows"](https://docs.github.com/en/ac
Note that GitHub will remove any cache entries that have not been accessed in over 7 days. There is no limit on the number of caches you can store, but the total size of all caches in a repository is limited to 10 GB. If you exceed this limit, GitHub will save your cache but will begin evicting caches until the total size is less than 10 GB.
## ⚠️ Important changes
The cache backend service has been rewritten from the ground up for improved performance and reliability. The [@actions/cache](https://github.com/actions/toolkit/tree/main/packages/cache) package now integrates with the new cache service (v2) APIs.
The new service will gradually roll out as of **February 1st, 2025**. The legacy service will also be sunset on the same date. Changes in this release are **fully backward compatible**.
**All previous versions of this package will be deprecated**. We recommend upgrading to version `4.0.0` as soon as possible before **February 1st, 2025.**
If you do not upgrade, all workflow runs using any of the deprecated [@actions/cache](https://github.com/actions/toolkit/tree/main/packages/cache) packages will fail.
Upgrading to the recommended version should not break or require any changes to your workflows beyond updating your `package.json` to version `4.0.0`.
Read more about the change & access the migration guide: [reference to the announcement](https://github.com/actions/toolkit/discussions/1890).
## Usage
This package is used by the v2+ versions of our first party cache action. You can find an example implementation in the cache repo [here](https://github.com/actions/cache).
@ -47,5 +61,3 @@ const cacheKey = await cache.restoreCache(paths, key, restoreKeys)
A cache gets downloaded in multiple segments of fixed sizes (now `128MB` to fail-fast, previously `1GB` for a `32-bit` runner and `2GB` for a `64-bit` runner were used). Sometimes, a segment download gets stuck which causes the workflow job to be stuck forever and fail. Version `v3.0.4` of cache package introduces a segment download timeout. The segment download timeout will allow the segment download to get aborted and hence allow the job to proceed with a cache miss.
Default value of this timeout is 10 minutes (starting `v3.2.1` and higher, previously 60 minutes in versions between `v.3.0.4` and `v3.2.0`, both included) and can be customized by specifying an [environment variable](https://docs.github.com/en/actions/learn-github-actions/environment-variables) named `SEGMENT_DOWNLOAD_TIMEOUT_MINS` with timeout value in minutes.

View File

@ -1,9 +1,41 @@
# @actions/cache Releases
### 4.0.1
- Remove runtime dependency on `twirp-ts` [#1947](https://github.com/actions/toolkit/pull/1947)
- Cache miss as debug, not warning annotation [#1954](https://github.com/actions/toolkit/pull/1954)
### 4.0.0
#### Important changes
The cache backend service has been rewritten from the ground up for improved performance and reliability. The [@actions/cache](https://github.com/actions/toolkit/tree/main/packages/cache) package now integrates with the new cache service (v2) APIs.
The new service will gradually roll out as of **February 1st, 2025**. The legacy service will also be sunset on the same date. Changes in this release are **fully backward compatible**.
**All previous versions of this package will be deprecated**. We recommend upgrading to version `4.0.0` as soon as possible before **February 1st, 2025.**
If you do not upgrade, all workflow runs using any of the deprecated [@actions/cache](https://github.com/actions/toolkit/tree/main/packages/cache) packages will fail.
Upgrading to the recommended version should not break or require any changes to your workflows beyond updating your `package.json` to version `4.0.0`.
Read more about the change & access the migration guide: [reference to the announcement](https://github.com/actions/toolkit/discussions/1890).
#### Minor changes
- Update `@actions/core` to `1.11.0`
- Update `semver` `6.3.1`
- Add `twirp-ts` `2.5.0` to dependencies
### 3.3.0
- Update `@actions/core` to `1.11.1`
- Remove dependency on `uuid` package [#1824](https://github.com/actions/toolkit/pull/1824), [#1842](https://github.com/actions/toolkit/pull/1842)
### 3.2.4
- Updated `isGhes` check to include `.ghe.com` and `.ghe.localhost` as accepted hosts
### 3.2.3
- Fixed a bug that mutated path arguments to `getCacheVersion` [#1378](https://github.com/actions/toolkit/pull/1378)

View File

@ -1,4 +1,5 @@
import {downloadCache, getCacheVersion} from '../src/internal/cacheHttpClient'
import {downloadCache} from '../src/internal/cacheHttpClient'
import {getCacheVersion} from '../src/internal/cacheUtils'
import {CompressionMethod} from '../src/internal/constants'
import * as downloadUtils from '../src/internal/downloadUtils'
import {DownloadOptions, getDownloadOptions} from '../src/options'

View File

@ -42,23 +42,3 @@ test('resolvePaths works on github workspace directory', async () => {
const paths = await cacheUtils.resolvePaths([workspace])
expect(paths.length).toBeGreaterThan(0)
})
test('isGhes returns false for github.com', async () => {
process.env.GITHUB_SERVER_URL = 'https://github.com'
expect(cacheUtils.isGhes()).toBe(false)
})
test('isGhes returns false for ghe.com', async () => {
process.env.GITHUB_SERVER_URL = 'https://somedomain.ghe.com'
expect(cacheUtils.isGhes()).toBe(false)
})
test('isGhes returns true for enterprise URL', async () => {
process.env.GITHUB_SERVER_URL = 'https://my-enterprise.github.com'
expect(cacheUtils.isGhes()).toBe(true)
})
test('isGhes returns false for ghe.localhost', () => {
process.env.GITHUB_SERVER_URL = 'https://my.domain.ghe.localhost'
expect(cacheUtils.isGhes()).toBe(false)
})

25
packages/cache/__tests__/config.test.ts vendored Normal file
View File

@ -0,0 +1,25 @@
import * as config from '../src/internal/config'
beforeEach(() => {
jest.resetModules()
})
test('isGhes returns false for github.com', async () => {
process.env.GITHUB_SERVER_URL = 'https://github.com'
expect(config.isGhes()).toBe(false)
})
test('isGhes returns false for ghe.com', async () => {
process.env.GITHUB_SERVER_URL = 'https://somedomain.ghe.com'
expect(config.isGhes()).toBe(false)
})
test('isGhes returns true for enterprise URL', async () => {
process.env.GITHUB_SERVER_URL = 'https://my-enterprise.github.com'
expect(config.isGhes()).toBe(true)
})
test('isGhes returns false for ghe.localhost', () => {
process.env.GITHUB_SERVER_URL = 'https://my.domain.ghe.localhost'
expect(config.isGhes()).toBe(false)
})

View File

@ -11,8 +11,6 @@ const downloadConcurrency = 8
const timeoutInMs = 30000
const segmentTimeoutInMs = 600000
const lookupOnly = false
const uploadConcurrency = 4
const uploadChunkSize = 32 * 1024 * 1024
test('getDownloadOptions sets defaults', async () => {
const actualOptions = getDownloadOptions()
@ -43,18 +41,21 @@ test('getDownloadOptions overrides all settings', async () => {
})
test('getUploadOptions sets defaults', async () => {
const expectedOptions: UploadOptions = {
uploadConcurrency: 4,
uploadChunkSize: 32 * 1024 * 1024,
useAzureSdk: false
}
const actualOptions = getUploadOptions()
expect(actualOptions).toEqual({
uploadConcurrency,
uploadChunkSize
})
expect(actualOptions).toEqual(expectedOptions)
})
test('getUploadOptions overrides all settings', async () => {
const expectedOptions: UploadOptions = {
uploadConcurrency: 2,
uploadChunkSize: 16 * 1024 * 1024
uploadChunkSize: 16 * 1024 * 1024,
useAzureSdk: true
}
const actualOptions = getUploadOptions(expectedOptions)
@ -62,6 +63,34 @@ test('getUploadOptions overrides all settings', async () => {
expect(actualOptions).toEqual(expectedOptions)
})
test('env variables override all getUploadOptions settings', async () => {
const expectedOptions: UploadOptions = {
uploadConcurrency: 16,
uploadChunkSize: 64 * 1024 * 1024,
useAzureSdk: true
}
process.env.CACHE_UPLOAD_CONCURRENCY = '16'
process.env.CACHE_UPLOAD_CHUNK_SIZE = '64'
const actualOptions = getUploadOptions(expectedOptions)
expect(actualOptions).toEqual(expectedOptions)
})
test('env variables override all getUploadOptions settings but do not exceed caps', async () => {
const expectedOptions: UploadOptions = {
uploadConcurrency: 32,
uploadChunkSize: 128 * 1024 * 1024,
useAzureSdk: true
}
process.env.CACHE_UPLOAD_CONCURRENCY = '64'
process.env.CACHE_UPLOAD_CHUNK_SIZE = '256'
const actualOptions = getUploadOptions(expectedOptions)
expect(actualOptions).toEqual(expectedOptions)
})
test('getDownloadOptions overrides download timeout minutes', async () => {
const expectedOptions: DownloadOptions = {
useAzureSdk: false,

View File

@ -0,0 +1,414 @@
import * as core from '@actions/core'
import * as path from 'path'
import * as tar from '../src/internal/tar'
import * as config from '../src/internal/config'
import * as cacheUtils from '../src/internal/cacheUtils'
import * as cacheHttpClient from '../src/internal/cacheHttpClient'
import {restoreCache} from '../src/cache'
import {CacheFilename, CompressionMethod} from '../src/internal/constants'
import {CacheServiceClientJSON} from '../src/generated/results/api/v1/cache.twirp-client'
import {DownloadOptions} from '../src/options'
jest.mock('../src/internal/cacheHttpClient')
jest.mock('../src/internal/cacheUtils')
jest.mock('../src/internal/config')
jest.mock('../src/internal/tar')
let logDebugMock: jest.SpyInstance
let logInfoMock: jest.SpyInstance
beforeAll(() => {
jest.spyOn(console, 'log').mockImplementation(() => {})
jest.spyOn(core, 'debug').mockImplementation(() => {})
jest.spyOn(core, 'info').mockImplementation(() => {})
jest.spyOn(core, 'warning').mockImplementation(() => {})
jest.spyOn(core, 'error').mockImplementation(() => {})
jest.spyOn(cacheUtils, 'getCacheFileName').mockImplementation(cm => {
const actualUtils = jest.requireActual('../src/internal/cacheUtils')
return actualUtils.getCacheFileName(cm)
})
// Ensure that we're using v2 for these tests
jest.spyOn(config, 'getCacheServiceVersion').mockReturnValue('v2')
logDebugMock = jest.spyOn(core, 'debug')
logInfoMock = jest.spyOn(core, 'info')
})
afterEach(() => {
expect(logDebugMock).toHaveBeenCalledWith('Cache service version: v2')
})
test('restore with no path should fail', async () => {
const paths: string[] = []
const key = 'node-test'
await expect(restoreCache(paths, key)).rejects.toThrowError(
`Path Validation Error: At least one directory or file path is required`
)
})
test('restore with too many keys should fail', async () => {
const paths = ['node_modules']
const key = 'node-test'
const restoreKeys = [...Array(20).keys()].map(x => x.toString())
await expect(restoreCache(paths, key, restoreKeys)).rejects.toThrowError(
`Key Validation Error: Keys are limited to a maximum of 10.`
)
})
test('restore with large key should fail', async () => {
const paths = ['node_modules']
const key = 'foo'.repeat(512) // Over the 512 character limit
await expect(restoreCache(paths, key)).rejects.toThrowError(
`Key Validation Error: ${key} cannot be larger than 512 characters.`
)
})
test('restore with invalid key should fail', async () => {
const paths = ['node_modules']
const key = 'comma,comma'
await expect(restoreCache(paths, key)).rejects.toThrowError(
`Key Validation Error: ${key} cannot contain commas.`
)
})
test('restore with no cache found', async () => {
const paths = ['node_modules']
const key = 'node-test'
jest
.spyOn(CacheServiceClientJSON.prototype, 'GetCacheEntryDownloadURL')
.mockReturnValue(
Promise.resolve({
ok: false,
signedDownloadUrl: '',
matchedKey: ''
})
)
const cacheKey = await restoreCache(paths, key)
expect(cacheKey).toBe(undefined)
})
test('restore with server error should fail', async () => {
const paths = ['node_modules']
const key = 'node-test'
const logWarningMock = jest.spyOn(core, 'warning')
jest
.spyOn(CacheServiceClientJSON.prototype, 'GetCacheEntryDownloadURL')
.mockImplementation(() => {
throw new Error('HTTP Error Occurred')
})
const cacheKey = await restoreCache(paths, key)
expect(cacheKey).toBe(undefined)
expect(logWarningMock).toHaveBeenCalledTimes(1)
expect(logWarningMock).toHaveBeenCalledWith(
'Failed to restore: HTTP Error Occurred'
)
})
test('restore with restore keys and no cache found', async () => {
const paths = ['node_modules']
const key = 'node-test'
const restoreKeys = ['node-']
jest
.spyOn(CacheServiceClientJSON.prototype, 'GetCacheEntryDownloadURL')
.mockReturnValue(
Promise.resolve({
ok: false,
signedDownloadUrl: '',
matchedKey: ''
})
)
const cacheKey = await restoreCache(paths, key, restoreKeys)
expect(cacheKey).toBe(undefined)
expect(logDebugMock).toHaveBeenCalledWith(
`Cache not found for keys: ${[key, ...restoreKeys].join(', ')}`
)
})
test('restore with gzip compressed cache found', async () => {
const paths = ['node_modules']
const key = 'node-test'
const compressionMethod = CompressionMethod.Gzip
const signedDownloadUrl = 'https://blob-storage.local?signed=true'
const cacheVersion =
'd90f107aaeb22920dba0c637a23c37b5bc497b4dfa3b07fe3f79bf88a273c11b'
const options = {useAzureSdk: true} as DownloadOptions
const getCacheVersionMock = jest.spyOn(cacheUtils, 'getCacheVersion')
getCacheVersionMock.mockReturnValue(cacheVersion)
const compressionMethodMock = jest.spyOn(cacheUtils, 'getCompressionMethod')
compressionMethodMock.mockReturnValue(Promise.resolve(compressionMethod))
const getCacheDownloadURLMock = jest.spyOn(
CacheServiceClientJSON.prototype,
'GetCacheEntryDownloadURL'
)
getCacheDownloadURLMock.mockReturnValue(
Promise.resolve({
ok: true,
signedDownloadUrl,
matchedKey: key
})
)
const tempPath = '/foo/bar'
const createTempDirectoryMock = jest.spyOn(cacheUtils, 'createTempDirectory')
createTempDirectoryMock.mockImplementation(async () => {
return Promise.resolve(tempPath)
})
const archivePath = path.join(tempPath, CacheFilename.Gzip)
const downloadCacheMock = jest.spyOn(cacheHttpClient, 'downloadCache')
const fileSize = 142
const getArchiveFileSizeInBytesMock = jest
.spyOn(cacheUtils, 'getArchiveFileSizeInBytes')
.mockReturnValue(fileSize)
const extractTarMock = jest.spyOn(tar, 'extractTar')
const unlinkFileMock = jest.spyOn(cacheUtils, 'unlinkFile')
const cacheKey = await restoreCache(paths, key, [], options)
expect(cacheKey).toBe(key)
expect(getCacheVersionMock).toHaveBeenCalledWith(
paths,
compressionMethod,
false
)
expect(getCacheDownloadURLMock).toHaveBeenCalledWith({
key,
restoreKeys: [],
version: cacheVersion
})
expect(createTempDirectoryMock).toHaveBeenCalledTimes(1)
expect(downloadCacheMock).toHaveBeenCalledWith(
signedDownloadUrl,
archivePath,
options
)
expect(getArchiveFileSizeInBytesMock).toHaveBeenCalledWith(archivePath)
expect(logInfoMock).toHaveBeenCalledWith(`Cache Size: ~0 MB (142 B)`)
expect(extractTarMock).toHaveBeenCalledTimes(1)
expect(extractTarMock).toHaveBeenCalledWith(archivePath, compressionMethod)
expect(unlinkFileMock).toHaveBeenCalledTimes(1)
expect(unlinkFileMock).toHaveBeenCalledWith(archivePath)
expect(compressionMethodMock).toHaveBeenCalledTimes(1)
})
test('restore with zstd compressed cache found', async () => {
const paths = ['node_modules']
const key = 'node-test'
const compressionMethod = CompressionMethod.Zstd
const signedDownloadUrl = 'https://blob-storage.local?signed=true'
const cacheVersion =
'8e2e96a184cb0cd6b48285b176c06a418f3d7fce14c29d9886fd1bb4f05c513d'
const options = {useAzureSdk: true} as DownloadOptions
const getCacheVersionMock = jest.spyOn(cacheUtils, 'getCacheVersion')
getCacheVersionMock.mockReturnValue(cacheVersion)
const compressionMethodMock = jest.spyOn(cacheUtils, 'getCompressionMethod')
compressionMethodMock.mockReturnValue(Promise.resolve(compressionMethod))
const getCacheDownloadURLMock = jest.spyOn(
CacheServiceClientJSON.prototype,
'GetCacheEntryDownloadURL'
)
getCacheDownloadURLMock.mockReturnValue(
Promise.resolve({
ok: true,
signedDownloadUrl,
matchedKey: key
})
)
const tempPath = '/foo/bar'
const createTempDirectoryMock = jest.spyOn(cacheUtils, 'createTempDirectory')
createTempDirectoryMock.mockImplementation(async () => {
return Promise.resolve(tempPath)
})
const archivePath = path.join(tempPath, CacheFilename.Zstd)
const downloadCacheMock = jest.spyOn(cacheHttpClient, 'downloadCache')
const fileSize = 62915000
const getArchiveFileSizeInBytesMock = jest
.spyOn(cacheUtils, 'getArchiveFileSizeInBytes')
.mockReturnValue(fileSize)
const extractTarMock = jest.spyOn(tar, 'extractTar')
const unlinkFileMock = jest.spyOn(cacheUtils, 'unlinkFile')
const cacheKey = await restoreCache(paths, key, [], options)
expect(cacheKey).toBe(key)
expect(getCacheVersionMock).toHaveBeenCalledWith(
paths,
compressionMethod,
false
)
expect(getCacheDownloadURLMock).toHaveBeenCalledWith({
key,
restoreKeys: [],
version: cacheVersion
})
expect(createTempDirectoryMock).toHaveBeenCalledTimes(1)
expect(downloadCacheMock).toHaveBeenCalledWith(
signedDownloadUrl,
archivePath,
options
)
expect(getArchiveFileSizeInBytesMock).toHaveBeenCalledWith(archivePath)
expect(logInfoMock).toHaveBeenCalledWith(`Cache Size: ~60 MB (62915000 B)`)
expect(extractTarMock).toHaveBeenCalledTimes(1)
expect(extractTarMock).toHaveBeenCalledWith(archivePath, compressionMethod)
expect(unlinkFileMock).toHaveBeenCalledTimes(1)
expect(unlinkFileMock).toHaveBeenCalledWith(archivePath)
expect(compressionMethodMock).toHaveBeenCalledTimes(1)
})
test('restore with cache found for restore key', async () => {
const paths = ['node_modules']
const key = 'node-test'
const restoreKeys = ['node-']
const compressionMethod = CompressionMethod.Gzip
const signedDownloadUrl = 'https://blob-storage.local?signed=true'
const cacheVersion =
'b8b58e9bd7b1e8f83d9f05c7e06ea865ba44a0330e07a14db74ac74386677bed'
const options = {useAzureSdk: true} as DownloadOptions
const getCacheVersionMock = jest.spyOn(cacheUtils, 'getCacheVersion')
getCacheVersionMock.mockReturnValue(cacheVersion)
const compressionMethodMock = jest.spyOn(cacheUtils, 'getCompressionMethod')
compressionMethodMock.mockReturnValue(Promise.resolve(compressionMethod))
const getCacheDownloadURLMock = jest.spyOn(
CacheServiceClientJSON.prototype,
'GetCacheEntryDownloadURL'
)
getCacheDownloadURLMock.mockReturnValue(
Promise.resolve({
ok: true,
signedDownloadUrl,
matchedKey: restoreKeys[0]
})
)
const tempPath = '/foo/bar'
const createTempDirectoryMock = jest.spyOn(cacheUtils, 'createTempDirectory')
createTempDirectoryMock.mockImplementation(async () => {
return Promise.resolve(tempPath)
})
const archivePath = path.join(tempPath, CacheFilename.Gzip)
const downloadCacheMock = jest.spyOn(cacheHttpClient, 'downloadCache')
const fileSize = 142
const getArchiveFileSizeInBytesMock = jest
.spyOn(cacheUtils, 'getArchiveFileSizeInBytes')
.mockReturnValue(fileSize)
const extractTarMock = jest.spyOn(tar, 'extractTar')
const unlinkFileMock = jest.spyOn(cacheUtils, 'unlinkFile')
const cacheKey = await restoreCache(paths, key, restoreKeys, options)
expect(cacheKey).toBe(restoreKeys[0])
expect(getCacheVersionMock).toHaveBeenCalledWith(
paths,
compressionMethod,
false
)
expect(getCacheDownloadURLMock).toHaveBeenCalledWith({
key,
restoreKeys,
version: cacheVersion
})
expect(createTempDirectoryMock).toHaveBeenCalledTimes(1)
expect(downloadCacheMock).toHaveBeenCalledWith(
signedDownloadUrl,
archivePath,
options
)
expect(getArchiveFileSizeInBytesMock).toHaveBeenCalledWith(archivePath)
expect(logInfoMock).toHaveBeenCalledWith(`Cache Size: ~0 MB (142 B)`)
expect(extractTarMock).toHaveBeenCalledTimes(1)
expect(extractTarMock).toHaveBeenCalledWith(archivePath, compressionMethod)
expect(unlinkFileMock).toHaveBeenCalledTimes(1)
expect(unlinkFileMock).toHaveBeenCalledWith(archivePath)
expect(compressionMethodMock).toHaveBeenCalledTimes(1)
})
test('restore with lookup only enabled', async () => {
const paths = ['node_modules']
const key = 'node-test'
const compressionMethod = CompressionMethod.Gzip
const signedDownloadUrl = 'https://blob-storage.local?signed=true'
const cacheVersion =
'd90f107aaeb22920dba0c637a23c37b5bc497b4dfa3b07fe3f79bf88a273c11b'
const options = {lookupOnly: true, useAzureSdk: true} as DownloadOptions
const getCacheVersionMock = jest.spyOn(cacheUtils, 'getCacheVersion')
getCacheVersionMock.mockReturnValue(cacheVersion)
const compressionMethodMock = jest.spyOn(cacheUtils, 'getCompressionMethod')
compressionMethodMock.mockReturnValue(Promise.resolve(compressionMethod))
const getCacheDownloadURLMock = jest.spyOn(
CacheServiceClientJSON.prototype,
'GetCacheEntryDownloadURL'
)
getCacheDownloadURLMock.mockReturnValue(
Promise.resolve({
ok: true,
signedDownloadUrl,
matchedKey: key
})
)
const createTempDirectoryMock = jest.spyOn(cacheUtils, 'createTempDirectory')
const downloadCacheMock = jest.spyOn(cacheHttpClient, 'downloadCache')
const cacheKey = await restoreCache(paths, key, undefined, options)
expect(cacheKey).toBe(key)
expect(getCacheVersionMock).toHaveBeenCalledWith(
paths,
compressionMethod,
false
)
expect(getCacheDownloadURLMock).toHaveBeenCalledWith({
key,
restoreKeys: [],
version: cacheVersion
})
expect(logInfoMock).toHaveBeenCalledWith('Lookup only - skipping download')
// creating a tempDir and downloading the cache are skipped
expect(createTempDirectoryMock).toHaveBeenCalledTimes(0)
expect(downloadCacheMock).toHaveBeenCalledTimes(0)
})

View File

@ -3,6 +3,7 @@ import * as path from 'path'
import {saveCache} from '../src/cache'
import * as cacheHttpClient from '../src/internal/cacheHttpClient'
import * as cacheUtils from '../src/internal/cacheUtils'
import * as config from '../src/internal/config'
import {CacheFilename, CompressionMethod} from '../src/internal/constants'
import * as tar from '../src/internal/tar'
import {TypedResponse} from '@actions/http-client/lib/interfaces'
@ -14,6 +15,7 @@ import {HttpClientError} from '@actions/http-client'
jest.mock('../src/internal/cacheHttpClient')
jest.mock('../src/internal/cacheUtils')
jest.mock('../src/internal/config')
jest.mock('../src/internal/tar')
beforeAll(() => {
@ -94,7 +96,7 @@ test('save with large cache outputs should fail in GHES with error message', asy
.spyOn(cacheUtils, 'getCompressionMethod')
.mockReturnValueOnce(Promise.resolve(compression))
jest.spyOn(cacheUtils, 'isGhes').mockReturnValueOnce(true)
jest.spyOn(config, 'isGhes').mockReturnValueOnce(true)
const reserveCacheMock = jest
.spyOn(cacheHttpClient, 'reserveCache')
@ -146,7 +148,7 @@ test('save with large cache outputs should fail in GHES without error message',
.spyOn(cacheUtils, 'getCompressionMethod')
.mockReturnValueOnce(Promise.resolve(compression))
jest.spyOn(cacheUtils, 'isGhes').mockReturnValueOnce(true)
jest.spyOn(config, 'isGhes').mockReturnValueOnce(true)
const reserveCacheMock = jest
.spyOn(cacheHttpClient, 'reserveCache')
@ -268,7 +270,12 @@ test('save with server error should fail', async () => {
compression
)
expect(saveCacheMock).toHaveBeenCalledTimes(1)
expect(saveCacheMock).toHaveBeenCalledWith(cacheId, archiveFile, undefined)
expect(saveCacheMock).toHaveBeenCalledWith(
cacheId,
archiveFile,
'',
undefined
)
expect(getCompressionMock).toHaveBeenCalledTimes(1)
})
@ -313,7 +320,12 @@ test('save with valid inputs uploads a cache', async () => {
compression
)
expect(saveCacheMock).toHaveBeenCalledTimes(1)
expect(saveCacheMock).toHaveBeenCalledWith(cacheId, archiveFile, undefined)
expect(saveCacheMock).toHaveBeenCalledWith(
cacheId,
archiveFile,
'',
undefined
)
expect(getCompressionMock).toHaveBeenCalledTimes(1)
})

View File

@ -0,0 +1,339 @@
import * as core from '@actions/core'
import * as path from 'path'
import {saveCache} from '../src/cache'
import * as cacheUtils from '../src/internal/cacheUtils'
import {CacheFilename, CompressionMethod} from '../src/internal/constants'
import * as config from '../src/internal/config'
import * as tar from '../src/internal/tar'
import {CacheServiceClientJSON} from '../src/generated/results/api/v1/cache.twirp-client'
import * as cacheHttpClient from '../src/internal/cacheHttpClient'
import {UploadOptions} from '../src/options'
let logDebugMock: jest.SpyInstance
jest.mock('../src/internal/tar')
const uploadFileMock = jest.fn()
const blockBlobClientMock = jest.fn().mockImplementation(() => ({
uploadFile: uploadFileMock
}))
jest.mock('@azure/storage-blob', () => ({
BlobClient: jest.fn().mockImplementation(() => {
return {
getBlockBlobClient: blockBlobClientMock
}
})
}))
beforeAll(() => {
process.env['ACTIONS_RUNTIME_TOKEN'] = 'token'
jest.spyOn(console, 'log').mockImplementation(() => {})
jest.spyOn(core, 'debug').mockImplementation(() => {})
jest.spyOn(core, 'info').mockImplementation(() => {})
jest.spyOn(core, 'warning').mockImplementation(() => {})
jest.spyOn(core, 'error').mockImplementation(() => {})
jest.spyOn(cacheUtils, 'resolvePaths').mockImplementation(async filePaths => {
return filePaths.map(x => path.resolve(x))
})
jest.spyOn(cacheUtils, 'createTempDirectory').mockImplementation(async () => {
return Promise.resolve('/foo/bar')
})
// Ensure that we're using v2 for these tests
jest.spyOn(config, 'getCacheServiceVersion').mockReturnValue('v2')
logDebugMock = jest.spyOn(core, 'debug')
})
afterEach(() => {
expect(logDebugMock).toHaveBeenCalledWith('Cache service version: v2')
jest.clearAllMocks()
})
test('save with missing input should fail', async () => {
const paths: string[] = []
const key = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43'
await expect(saveCache(paths, key)).rejects.toThrowError(
`Path Validation Error: At least one directory or file path is required`
)
})
test('save with large cache outputs should fail using', async () => {
const paths = 'node_modules'
const key = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43'
const cachePaths = [path.resolve(paths)]
const createTarMock = jest.spyOn(tar, 'createTar')
const logWarningMock = jest.spyOn(core, 'warning')
const cacheSize = 11 * 1024 * 1024 * 1024 //~11GB, over the 10GB limit
jest
.spyOn(cacheUtils, 'getArchiveFileSizeInBytes')
.mockReturnValueOnce(cacheSize)
const compression = CompressionMethod.Gzip
const getCompressionMock = jest
.spyOn(cacheUtils, 'getCompressionMethod')
.mockReturnValueOnce(Promise.resolve(compression))
const cacheId = await saveCache([paths], key)
expect(cacheId).toBe(-1)
expect(logWarningMock).toHaveBeenCalledWith(
'Failed to save: Cache size of ~11264 MB (11811160064 B) is over the 10GB limit, not saving cache.'
)
const archiveFolder = '/foo/bar'
expect(createTarMock).toHaveBeenCalledWith(
archiveFolder,
cachePaths,
compression
)
expect(getCompressionMock).toHaveBeenCalledTimes(1)
})
test('create cache entry failure', async () => {
const paths = ['node_modules']
const key = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43'
const infoLogMock = jest.spyOn(core, 'info')
const createCacheEntryMock = jest
.spyOn(CacheServiceClientJSON.prototype, 'CreateCacheEntry')
.mockReturnValue(Promise.resolve({ok: false, signedUploadUrl: ''}))
const createTarMock = jest.spyOn(tar, 'createTar')
const finalizeCacheEntryMock = jest.spyOn(
CacheServiceClientJSON.prototype,
'FinalizeCacheEntryUpload'
)
const compression = CompressionMethod.Zstd
const getCompressionMock = jest
.spyOn(cacheUtils, 'getCompressionMethod')
.mockReturnValueOnce(Promise.resolve(compression))
const archiveFileSize = 1024
jest
.spyOn(cacheUtils, 'getArchiveFileSizeInBytes')
.mockReturnValueOnce(archiveFileSize)
const cacheVersion = cacheUtils.getCacheVersion(paths, compression)
const saveCacheMock = jest.spyOn(cacheHttpClient, 'saveCache')
const cacheId = await saveCache(paths, key)
expect(cacheId).toBe(-1)
expect(infoLogMock).toHaveBeenCalledWith(
`Failed to save: Unable to reserve cache with key ${key}, another job may be creating this cache.`
)
expect(createCacheEntryMock).toHaveBeenCalledWith({
key,
version: cacheVersion
})
expect(createTarMock).toHaveBeenCalledTimes(1)
expect(getCompressionMock).toHaveBeenCalledTimes(1)
expect(finalizeCacheEntryMock).toHaveBeenCalledTimes(0)
expect(saveCacheMock).toHaveBeenCalledTimes(0)
})
test('save cache fails if a signedUploadURL was not passed', async () => {
const paths = 'node_modules'
const key = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43'
const cachePaths = [path.resolve(paths)]
const signedUploadURL = ''
const archiveFileSize = 1024
const options: UploadOptions = {
archiveSizeBytes: archiveFileSize, // These should always match
useAzureSdk: true,
uploadChunkSize: 64 * 1024 * 1024,
uploadConcurrency: 8
}
const createCacheEntryMock = jest
.spyOn(CacheServiceClientJSON.prototype, 'CreateCacheEntry')
.mockReturnValue(
Promise.resolve({ok: true, signedUploadUrl: signedUploadURL})
)
const createTarMock = jest.spyOn(tar, 'createTar')
const saveCacheMock = jest.spyOn(cacheHttpClient, 'saveCache')
const compression = CompressionMethod.Zstd
const getCompressionMock = jest
.spyOn(cacheUtils, 'getCompressionMethod')
.mockReturnValueOnce(Promise.resolve(compression))
const cacheVersion = cacheUtils.getCacheVersion([paths], compression)
jest
.spyOn(cacheUtils, 'getArchiveFileSizeInBytes')
.mockReturnValueOnce(archiveFileSize)
const cacheId = await saveCache([paths], key, options)
expect(cacheId).toBe(-1)
expect(createCacheEntryMock).toHaveBeenCalledWith({
key,
version: cacheVersion
})
const archiveFolder = '/foo/bar'
const archiveFile = path.join(archiveFolder, CacheFilename.Zstd)
expect(createTarMock).toHaveBeenCalledWith(
archiveFolder,
cachePaths,
compression
)
expect(saveCacheMock).toHaveBeenCalledWith(
-1,
archiveFile,
signedUploadURL,
options
)
expect(getCompressionMock).toHaveBeenCalledTimes(1)
})
test('finalize save cache failure', async () => {
const paths = 'node_modules'
const key = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43'
const cachePaths = [path.resolve(paths)]
const logWarningMock = jest.spyOn(core, 'warning')
const signedUploadURL = 'https://blob-storage.local?signed=true'
const archiveFileSize = 1024
const options: UploadOptions = {
archiveSizeBytes: archiveFileSize, // These should always match
useAzureSdk: true,
uploadChunkSize: 64 * 1024 * 1024,
uploadConcurrency: 8
}
const createCacheEntryMock = jest
.spyOn(CacheServiceClientJSON.prototype, 'CreateCacheEntry')
.mockReturnValue(
Promise.resolve({ok: true, signedUploadUrl: signedUploadURL})
)
const createTarMock = jest.spyOn(tar, 'createTar')
const saveCacheMock = jest
.spyOn(cacheHttpClient, 'saveCache')
.mockResolvedValue(Promise.resolve())
const compression = CompressionMethod.Zstd
const getCompressionMock = jest
.spyOn(cacheUtils, 'getCompressionMethod')
.mockReturnValueOnce(Promise.resolve(compression))
const cacheVersion = cacheUtils.getCacheVersion([paths], compression)
jest
.spyOn(cacheUtils, 'getArchiveFileSizeInBytes')
.mockReturnValueOnce(archiveFileSize)
const finalizeCacheEntryMock = jest
.spyOn(CacheServiceClientJSON.prototype, 'FinalizeCacheEntryUpload')
.mockReturnValue(Promise.resolve({ok: false, entryId: ''}))
const cacheId = await saveCache([paths], key, options)
expect(createCacheEntryMock).toHaveBeenCalledWith({
key,
version: cacheVersion
})
const archiveFolder = '/foo/bar'
const archiveFile = path.join(archiveFolder, CacheFilename.Zstd)
expect(createTarMock).toHaveBeenCalledWith(
archiveFolder,
cachePaths,
compression
)
expect(saveCacheMock).toHaveBeenCalledWith(
-1,
archiveFile,
signedUploadURL,
options
)
expect(getCompressionMock).toHaveBeenCalledTimes(1)
expect(finalizeCacheEntryMock).toHaveBeenCalledWith({
key,
version: cacheVersion,
sizeBytes: archiveFileSize.toString()
})
expect(cacheId).toBe(-1)
expect(logWarningMock).toHaveBeenCalledWith(
`Failed to save: Unable to finalize cache with key ${key}, another job may be finalizing this cache.`
)
})
test('save with valid inputs uploads a cache', async () => {
const paths = 'node_modules'
const key = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43'
const cachePaths = [path.resolve(paths)]
const signedUploadURL = 'https://blob-storage.local?signed=true'
const createTarMock = jest.spyOn(tar, 'createTar')
const archiveFileSize = 1024
const options: UploadOptions = {
archiveSizeBytes: archiveFileSize, // These should always match
useAzureSdk: true,
uploadChunkSize: 64 * 1024 * 1024,
uploadConcurrency: 8
}
jest
.spyOn(cacheUtils, 'getArchiveFileSizeInBytes')
.mockReturnValueOnce(archiveFileSize)
const cacheId = 4
jest
.spyOn(CacheServiceClientJSON.prototype, 'CreateCacheEntry')
.mockReturnValue(
Promise.resolve({ok: true, signedUploadUrl: signedUploadURL})
)
const saveCacheMock = jest.spyOn(cacheHttpClient, 'saveCache')
const compression = CompressionMethod.Zstd
const getCompressionMock = jest
.spyOn(cacheUtils, 'getCompressionMethod')
.mockReturnValue(Promise.resolve(compression))
const cacheVersion = cacheUtils.getCacheVersion([paths], compression)
const finalizeCacheEntryMock = jest
.spyOn(CacheServiceClientJSON.prototype, 'FinalizeCacheEntryUpload')
.mockReturnValue(Promise.resolve({ok: true, entryId: cacheId.toString()}))
const expectedCacheId = await saveCache([paths], key)
const archiveFolder = '/foo/bar'
const archiveFile = path.join(archiveFolder, CacheFilename.Zstd)
expect(saveCacheMock).toHaveBeenCalledWith(
-1,
archiveFile,
signedUploadURL,
options
)
expect(createTarMock).toHaveBeenCalledWith(
archiveFolder,
cachePaths,
compression
)
expect(finalizeCacheEntryMock).toHaveBeenCalledWith({
key,
version: cacheVersion,
sizeBytes: archiveFileSize.toString()
})
expect(getCompressionMock).toHaveBeenCalledTimes(1)
expect(expectedCacheId).toBe(cacheId)
})
test('save with non existing path should not save cache using v2 saveCache', async () => {
const path = 'node_modules'
const key = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43'
jest.spyOn(cacheUtils, 'resolvePaths').mockImplementation(async () => {
return []
})
await expect(saveCache([path], key)).rejects.toThrowError(
`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`
)
})

View File

@ -0,0 +1,58 @@
import * as uploadUtils from '../src/internal/uploadUtils'
import {TransferProgressEvent} from '@azure/ms-rest-js'
test('upload progress tracked correctly', () => {
const progress = new uploadUtils.UploadProgress(1000)
expect(progress.contentLength).toBe(1000)
expect(progress.sentBytes).toBe(0)
expect(progress.displayedComplete).toBe(false)
expect(progress.timeoutHandle).toBeUndefined()
expect(progress.getTransferredBytes()).toBe(0)
expect(progress.isDone()).toBe(false)
progress.onProgress()({loadedBytes: 0} as TransferProgressEvent)
expect(progress.contentLength).toBe(1000)
expect(progress.sentBytes).toBe(0)
expect(progress.displayedComplete).toBe(false)
expect(progress.timeoutHandle).toBeUndefined()
expect(progress.getTransferredBytes()).toBe(0)
expect(progress.isDone()).toBe(false)
progress.onProgress()({loadedBytes: 250} as TransferProgressEvent)
expect(progress.contentLength).toBe(1000)
expect(progress.sentBytes).toBe(250)
expect(progress.displayedComplete).toBe(false)
expect(progress.timeoutHandle).toBeUndefined()
expect(progress.getTransferredBytes()).toBe(250)
expect(progress.isDone()).toBe(false)
progress.onProgress()({loadedBytes: 500} as TransferProgressEvent)
expect(progress.contentLength).toBe(1000)
expect(progress.sentBytes).toBe(500)
expect(progress.displayedComplete).toBe(false)
expect(progress.timeoutHandle).toBeUndefined()
expect(progress.getTransferredBytes()).toBe(500)
expect(progress.isDone()).toBe(false)
progress.onProgress()({loadedBytes: 750} as TransferProgressEvent)
expect(progress.contentLength).toBe(1000)
expect(progress.sentBytes).toBe(750)
expect(progress.displayedComplete).toBe(false)
expect(progress.timeoutHandle).toBeUndefined()
expect(progress.getTransferredBytes()).toBe(750)
expect(progress.isDone()).toBe(false)
progress.onProgress()({loadedBytes: 1000} as TransferProgressEvent)
expect(progress.contentLength).toBe(1000)
expect(progress.sentBytes).toBe(1000)
expect(progress.displayedComplete).toBe(false)
expect(progress.timeoutHandle).toBeUndefined()
expect(progress.getTransferredBytes()).toBe(1000)
expect(progress.isDone()).toBe(true)
})

202
packages/cache/package-lock.json generated vendored
View File

@ -1,15 +1,15 @@
{
"name": "@actions/cache",
"version": "3.2.4",
"version": "4.0.1",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "@actions/cache",
"version": "3.2.4",
"version": "4.0.1",
"license": "MIT",
"dependencies": {
"@actions/core": "^1.10.0",
"@actions/core": "^1.11.1",
"@actions/exec": "^1.0.1",
"@actions/glob": "^0.1.0",
"@actions/http-client": "^2.1.1",
@ -17,30 +17,21 @@
"@azure/abort-controller": "^1.1.0",
"@azure/ms-rest-js": "^2.6.0",
"@azure/storage-blob": "^12.13.0",
"semver": "^6.3.1",
"uuid": "^3.3.3"
"@protobuf-ts/plugin": "^2.9.4",
"semver": "^6.3.1"
},
"devDependencies": {
"@types/semver": "^6.0.0",
"@types/uuid": "^3.4.5",
"typescript": "^5.2.2"
}
},
"node_modules/@actions/core": {
"version": "1.10.0",
"resolved": "https://registry.npmjs.org/@actions/core/-/core-1.10.0.tgz",
"integrity": "sha512-2aZDDa3zrrZbP5ZYg159sNoLRb61nQ7awl5pSvIq5Qpj81vwDzdMRKzkWJGJuwVvWpvZKx7vspJALyvaaIQyug==",
"version": "1.11.1",
"resolved": "https://registry.npmjs.org/@actions/core/-/core-1.11.1.tgz",
"integrity": "sha512-hXJCSrkwfA46Vd9Z3q4cpEpHB1rL5NG04+/rbqW9d3+CSvtB1tYe8UTpAlixa1vj0m/ULglfEK2UKxMGxCxv5A==",
"dependencies": {
"@actions/http-client": "^2.0.1",
"uuid": "^8.3.2"
}
},
"node_modules/@actions/core/node_modules/uuid": {
"version": "8.3.2",
"resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz",
"integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==",
"bin": {
"uuid": "dist/bin/uuid"
"@actions/exec": "^1.1.1",
"@actions/http-client": "^2.0.1"
}
},
"node_modules/@actions/exec": {
@ -255,6 +246,83 @@
"node": ">=8.0.0"
}
},
"node_modules/@protobuf-ts/plugin": {
"version": "2.9.4",
"resolved": "https://registry.npmjs.org/@protobuf-ts/plugin/-/plugin-2.9.4.tgz",
"integrity": "sha512-Db5Laq5T3mc6ERZvhIhkj1rn57/p8gbWiCKxQWbZBBl20wMuqKoHbRw4tuD7FyXi+IkwTToaNVXymv5CY3E8Rw==",
"license": "Apache-2.0",
"dependencies": {
"@protobuf-ts/plugin-framework": "^2.9.4",
"@protobuf-ts/protoc": "^2.9.4",
"@protobuf-ts/runtime": "^2.9.4",
"@protobuf-ts/runtime-rpc": "^2.9.4",
"typescript": "^3.9"
},
"bin": {
"protoc-gen-dump": "bin/protoc-gen-dump",
"protoc-gen-ts": "bin/protoc-gen-ts"
}
},
"node_modules/@protobuf-ts/plugin-framework": {
"version": "2.9.4",
"resolved": "https://registry.npmjs.org/@protobuf-ts/plugin-framework/-/plugin-framework-2.9.4.tgz",
"integrity": "sha512-9nuX1kjdMliv+Pes8dQCKyVhjKgNNfwxVHg+tx3fLXSfZZRcUHMc1PMwB9/vTvc6gBKt9QGz5ERqSqZc0++E9A==",
"license": "(Apache-2.0 AND BSD-3-Clause)",
"dependencies": {
"@protobuf-ts/runtime": "^2.9.4",
"typescript": "^3.9"
}
},
"node_modules/@protobuf-ts/plugin-framework/node_modules/typescript": {
"version": "3.9.10",
"resolved": "https://registry.npmjs.org/typescript/-/typescript-3.9.10.tgz",
"integrity": "sha512-w6fIxVE/H1PkLKcCPsFqKE7Kv7QUwhU8qQY2MueZXWx5cPZdwFupLgKK3vntcK98BtNHZtAF4LA/yl2a7k8R6Q==",
"license": "Apache-2.0",
"bin": {
"tsc": "bin/tsc",
"tsserver": "bin/tsserver"
},
"engines": {
"node": ">=4.2.0"
}
},
"node_modules/@protobuf-ts/plugin/node_modules/typescript": {
"version": "3.9.10",
"resolved": "https://registry.npmjs.org/typescript/-/typescript-3.9.10.tgz",
"integrity": "sha512-w6fIxVE/H1PkLKcCPsFqKE7Kv7QUwhU8qQY2MueZXWx5cPZdwFupLgKK3vntcK98BtNHZtAF4LA/yl2a7k8R6Q==",
"license": "Apache-2.0",
"bin": {
"tsc": "bin/tsc",
"tsserver": "bin/tsserver"
},
"engines": {
"node": ">=4.2.0"
}
},
"node_modules/@protobuf-ts/protoc": {
"version": "2.9.4",
"resolved": "https://registry.npmjs.org/@protobuf-ts/protoc/-/protoc-2.9.4.tgz",
"integrity": "sha512-hQX+nOhFtrA+YdAXsXEDrLoGJqXHpgv4+BueYF0S9hy/Jq0VRTVlJS1Etmf4qlMt/WdigEes5LOd/LDzui4GIQ==",
"license": "Apache-2.0",
"bin": {
"protoc": "protoc.js"
}
},
"node_modules/@protobuf-ts/runtime": {
"version": "2.9.4",
"resolved": "https://registry.npmjs.org/@protobuf-ts/runtime/-/runtime-2.9.4.tgz",
"integrity": "sha512-vHRFWtJJB/SiogWDF0ypoKfRIZ41Kq+G9cEFj6Qm1eQaAhJ1LDFvgZ7Ja4tb3iLOQhz0PaoPnnOijF1qmEqTxg==",
"license": "(Apache-2.0 AND BSD-3-Clause)"
},
"node_modules/@protobuf-ts/runtime-rpc": {
"version": "2.9.4",
"resolved": "https://registry.npmjs.org/@protobuf-ts/runtime-rpc/-/runtime-rpc-2.9.4.tgz",
"integrity": "sha512-y9L9JgnZxXFqH5vD4d7j9duWvIJ7AShyBRoNKJGhu9Q27qIbchfzli66H9RvrQNIFk5ER7z1Twe059WZGqERcA==",
"license": "Apache-2.0",
"dependencies": {
"@protobuf-ts/runtime": "^2.9.4"
}
},
"node_modules/@types/node": {
"version": "20.4.6",
"resolved": "https://registry.npmjs.org/@types/node/-/node-20.4.6.tgz",
@ -296,12 +364,6 @@
"@types/node": "*"
}
},
"node_modules/@types/uuid": {
"version": "3.4.10",
"resolved": "https://registry.npmjs.org/@types/uuid/-/uuid-3.4.10.tgz",
"integrity": "sha512-BgeaZuElf7DEYZhWYDTc/XcLZXdVgFkVSTa13BqKvbnmUrxr3TJFKofUxCtDO9UQOdhnV+HPOESdHiHKZOJV1A==",
"dev": true
},
"node_modules/abort-controller": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/abort-controller/-/abort-controller-3.0.0.tgz",
@ -486,15 +548,6 @@
"node": ">=14.17"
}
},
"node_modules/uuid": {
"version": "3.4.0",
"resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz",
"integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==",
"deprecated": "Please upgrade to version 7 or higher. Older versions may use Math.random() in certain circumstances, which is known to be problematic. See https://v8.dev/blog/math-random for details.",
"bin": {
"uuid": "bin/uuid"
}
},
"node_modules/webidl-conversions": {
"version": "3.0.1",
"resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz",
@ -532,19 +585,12 @@
},
"dependencies": {
"@actions/core": {
"version": "1.10.0",
"resolved": "https://registry.npmjs.org/@actions/core/-/core-1.10.0.tgz",
"integrity": "sha512-2aZDDa3zrrZbP5ZYg159sNoLRb61nQ7awl5pSvIq5Qpj81vwDzdMRKzkWJGJuwVvWpvZKx7vspJALyvaaIQyug==",
"version": "1.11.1",
"resolved": "https://registry.npmjs.org/@actions/core/-/core-1.11.1.tgz",
"integrity": "sha512-hXJCSrkwfA46Vd9Z3q4cpEpHB1rL5NG04+/rbqW9d3+CSvtB1tYe8UTpAlixa1vj0m/ULglfEK2UKxMGxCxv5A==",
"requires": {
"@actions/http-client": "^2.0.1",
"uuid": "^8.3.2"
},
"dependencies": {
"uuid": {
"version": "8.3.2",
"resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz",
"integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg=="
}
"@actions/exec": "^1.1.1",
"@actions/http-client": "^2.0.1"
}
},
"@actions/exec": {
@ -724,6 +770,59 @@
"resolved": "https://registry.npmjs.org/@opentelemetry/api/-/api-1.4.1.tgz",
"integrity": "sha512-O2yRJce1GOc6PAy3QxFM4NzFiWzvScDC1/5ihYBL6BUEVdq0XMWN01sppE+H6bBXbaFYipjwFLEWLg5PaSOThA=="
},
"@protobuf-ts/plugin": {
"version": "2.9.4",
"resolved": "https://registry.npmjs.org/@protobuf-ts/plugin/-/plugin-2.9.4.tgz",
"integrity": "sha512-Db5Laq5T3mc6ERZvhIhkj1rn57/p8gbWiCKxQWbZBBl20wMuqKoHbRw4tuD7FyXi+IkwTToaNVXymv5CY3E8Rw==",
"requires": {
"@protobuf-ts/plugin-framework": "^2.9.4",
"@protobuf-ts/protoc": "^2.9.4",
"@protobuf-ts/runtime": "^2.9.4",
"@protobuf-ts/runtime-rpc": "^2.9.4",
"typescript": "^3.9"
},
"dependencies": {
"typescript": {
"version": "3.9.10",
"resolved": "https://registry.npmjs.org/typescript/-/typescript-3.9.10.tgz",
"integrity": "sha512-w6fIxVE/H1PkLKcCPsFqKE7Kv7QUwhU8qQY2MueZXWx5cPZdwFupLgKK3vntcK98BtNHZtAF4LA/yl2a7k8R6Q=="
}
}
},
"@protobuf-ts/plugin-framework": {
"version": "2.9.4",
"resolved": "https://registry.npmjs.org/@protobuf-ts/plugin-framework/-/plugin-framework-2.9.4.tgz",
"integrity": "sha512-9nuX1kjdMliv+Pes8dQCKyVhjKgNNfwxVHg+tx3fLXSfZZRcUHMc1PMwB9/vTvc6gBKt9QGz5ERqSqZc0++E9A==",
"requires": {
"@protobuf-ts/runtime": "^2.9.4",
"typescript": "^3.9"
},
"dependencies": {
"typescript": {
"version": "3.9.10",
"resolved": "https://registry.npmjs.org/typescript/-/typescript-3.9.10.tgz",
"integrity": "sha512-w6fIxVE/H1PkLKcCPsFqKE7Kv7QUwhU8qQY2MueZXWx5cPZdwFupLgKK3vntcK98BtNHZtAF4LA/yl2a7k8R6Q=="
}
}
},
"@protobuf-ts/protoc": {
"version": "2.9.4",
"resolved": "https://registry.npmjs.org/@protobuf-ts/protoc/-/protoc-2.9.4.tgz",
"integrity": "sha512-hQX+nOhFtrA+YdAXsXEDrLoGJqXHpgv4+BueYF0S9hy/Jq0VRTVlJS1Etmf4qlMt/WdigEes5LOd/LDzui4GIQ=="
},
"@protobuf-ts/runtime": {
"version": "2.9.4",
"resolved": "https://registry.npmjs.org/@protobuf-ts/runtime/-/runtime-2.9.4.tgz",
"integrity": "sha512-vHRFWtJJB/SiogWDF0ypoKfRIZ41Kq+G9cEFj6Qm1eQaAhJ1LDFvgZ7Ja4tb3iLOQhz0PaoPnnOijF1qmEqTxg=="
},
"@protobuf-ts/runtime-rpc": {
"version": "2.9.4",
"resolved": "https://registry.npmjs.org/@protobuf-ts/runtime-rpc/-/runtime-rpc-2.9.4.tgz",
"integrity": "sha512-y9L9JgnZxXFqH5vD4d7j9duWvIJ7AShyBRoNKJGhu9Q27qIbchfzli66H9RvrQNIFk5ER7z1Twe059WZGqERcA==",
"requires": {
"@protobuf-ts/runtime": "^2.9.4"
}
},
"@types/node": {
"version": "20.4.6",
"resolved": "https://registry.npmjs.org/@types/node/-/node-20.4.6.tgz",
@ -764,12 +863,6 @@
"@types/node": "*"
}
},
"@types/uuid": {
"version": "3.4.10",
"resolved": "https://registry.npmjs.org/@types/uuid/-/uuid-3.4.10.tgz",
"integrity": "sha512-BgeaZuElf7DEYZhWYDTc/XcLZXdVgFkVSTa13BqKvbnmUrxr3TJFKofUxCtDO9UQOdhnV+HPOESdHiHKZOJV1A==",
"dev": true
},
"abort-controller": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/abort-controller/-/abort-controller-3.0.0.tgz",
@ -900,11 +993,6 @@
"integrity": "sha512-mI4WrpHsbCIcwT9cF4FZvr80QUeKvsUsUvKDoR+X/7XHQH98xYD8YHZg7ANtz2GtZt/CBq2QJ0thkGJMHfqc1w==",
"dev": true
},
"uuid": {
"version": "3.4.0",
"resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz",
"integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A=="
},
"webidl-conversions": {
"version": "3.0.1",
"resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz",

View File

@ -1,6 +1,6 @@
{
"name": "@actions/cache",
"version": "3.2.4",
"version": "4.0.1",
"preview": true,
"description": "Actions cache lib",
"keywords": [
@ -37,7 +37,7 @@
"url": "https://github.com/actions/toolkit/issues"
},
"dependencies": {
"@actions/core": "^1.10.0",
"@actions/core": "^1.11.1",
"@actions/exec": "^1.0.1",
"@actions/glob": "^0.1.0",
"@actions/http-client": "^2.1.1",
@ -45,12 +45,11 @@
"@azure/abort-controller": "^1.1.0",
"@azure/ms-rest-js": "^2.6.0",
"@azure/storage-blob": "^12.13.0",
"semver": "^6.3.1",
"uuid": "^3.3.3"
"@protobuf-ts/plugin": "^2.9.4",
"semver": "^6.3.1"
},
"devDependencies": {
"@types/semver": "^6.0.0",
"@types/uuid": "^3.4.5",
"typescript": "^5.2.2"
}
}

View File

@ -2,8 +2,17 @@ import * as core from '@actions/core'
import * as path from 'path'
import * as utils from './internal/cacheUtils'
import * as cacheHttpClient from './internal/cacheHttpClient'
import {createTar, extractTar, listTar} from './internal/tar'
import * as cacheTwirpClient from './internal/shared/cacheTwirpClient'
import {getCacheServiceVersion, isGhes} from './internal/config'
import {DownloadOptions, ExtraTarOptions, UploadOptions} from './options'
import {createTar, extractTar, listTar} from './internal/tar'
import {
CreateCacheEntryRequest,
FinalizeCacheEntryUploadRequest,
FinalizeCacheEntryUploadResponse,
GetCacheEntryDownloadURLRequest
} from './generated/results/api/v1/cache'
import {CacheFileSizeLimit} from './internal/constants'
export class ValidationError extends Error {
constructor(message: string) {
@ -48,7 +57,6 @@ function checkKey(key: string): void {
*
* @returns boolean return true if Actions cache service feature is available, otherwise false
*/
export function isFeatureAvailable(): boolean {
return !!process.env['ACTIONS_CACHE_URL']
}
@ -57,8 +65,8 @@ export function isFeatureAvailable(): boolean {
* Restores cache from keys
*
* @param paths a list of file paths to restore from the cache
* @param primaryKey an explicit key for restoring the cache
* @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for key
* @param primaryKey an explicit key for restoring the cache. Lookup is done with prefix matching.
* @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for primaryKey
* @param downloadOptions cache download options
* @param enableCrossOsArchive an optional boolean enabled to restore on windows any cache created on any platform
* @returns string returns the key for the cache hit, otherwise returns undefined
@ -71,8 +79,49 @@ export async function restoreCache(
enableCrossOsArchive = false,
extraTarOptions: ExtraTarOptions = []
): Promise<string | undefined> {
const cacheServiceVersion: string = getCacheServiceVersion()
core.debug(`Cache service version: ${cacheServiceVersion}`)
checkPaths(paths)
switch (cacheServiceVersion) {
case 'v2':
return await restoreCacheV2(
paths,
primaryKey,
restoreKeys,
options,
enableCrossOsArchive
)
case 'v1':
default:
return await restoreCacheV1(
paths,
primaryKey,
restoreKeys,
options,
enableCrossOsArchive
)
}
}
/**
* Restores cache using the legacy Cache Service
*
* @param paths a list of file paths to restore from the cache
* @param primaryKey an explicit key for restoring the cache. Lookup is done with prefix matching.
* @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for primaryKey
* @param options cache download options
* @param enableCrossOsArchive an optional boolean enabled to restore on Windows any cache created on any platform
* @returns string returns the key for the cache hit, otherwise returns undefined
*/
async function restoreCacheV1(
paths: string[],
primaryKey: string,
restoreKeys?: string[],
options?: DownloadOptions,
enableCrossOsArchive = false
): Promise<string | undefined> {
restoreKeys = restoreKeys || []
const keys = [primaryKey, ...restoreKeys]
@ -154,6 +203,121 @@ export async function restoreCache(
return undefined
}
/**
* Restores cache using Cache Service v2
*
* @param paths a list of file paths to restore from the cache
* @param primaryKey an explicit key for restoring the cache. Lookup is done with prefix matching
* @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for primaryKey
* @param downloadOptions cache download options
* @param enableCrossOsArchive an optional boolean enabled to restore on windows any cache created on any platform
* @returns string returns the key for the cache hit, otherwise returns undefined
*/
async function restoreCacheV2(
paths: string[],
primaryKey: string,
restoreKeys?: string[],
options?: DownloadOptions,
enableCrossOsArchive = false
): Promise<string | undefined> {
// Override UploadOptions to force the use of Azure
options = {
...options,
useAzureSdk: true
}
restoreKeys = restoreKeys || []
const keys = [primaryKey, ...restoreKeys]
core.debug('Resolved Keys:')
core.debug(JSON.stringify(keys))
if (keys.length > 10) {
throw new ValidationError(
`Key Validation Error: Keys are limited to a maximum of 10.`
)
}
for (const key of keys) {
checkKey(key)
}
let archivePath = ''
try {
const twirpClient = cacheTwirpClient.internalCacheTwirpClient()
const compressionMethod = await utils.getCompressionMethod()
const request: GetCacheEntryDownloadURLRequest = {
key: primaryKey,
restoreKeys,
version: utils.getCacheVersion(
paths,
compressionMethod,
enableCrossOsArchive
)
}
const response = await twirpClient.GetCacheEntryDownloadURL(request)
if (!response.ok) {
core.debug(`Cache not found for keys: ${keys.join(', ')}`)
return undefined
}
core.info(`Cache hit for: ${request.key}`)
if (options?.lookupOnly) {
core.info('Lookup only - skipping download')
return response.matchedKey
}
archivePath = path.join(
await utils.createTempDirectory(),
utils.getCacheFileName(compressionMethod)
)
core.debug(`Archive path: ${archivePath}`)
core.debug(`Starting download of archive to: ${archivePath}`)
await cacheHttpClient.downloadCache(
response.signedDownloadUrl,
archivePath,
options
)
const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath)
core.info(
`Cache Size: ~${Math.round(
archiveFileSize / (1024 * 1024)
)} MB (${archiveFileSize} B)`
)
if (core.isDebug()) {
await listTar(archivePath, compressionMethod)
}
await extractTar(archivePath, compressionMethod)
core.info('Cache restored successfully')
return response.matchedKey
} catch (error) {
const typedError = error as Error
if (typedError.name === ValidationError.name) {
throw error
} else {
// Supress all non-validation cache related errors because caching should be optional
core.warning(`Failed to restore: ${(error as Error).message}`)
}
} finally {
try {
if (archivePath) {
await utils.unlinkFile(archivePath)
}
} catch (error) {
core.debug(`Failed to delete archive: ${error}`)
}
}
return undefined
}
/**
* Saves a list of files with the specified key
*
@ -170,9 +334,34 @@ export async function saveCache(
enableCrossOsArchive = false,
extraTarOptions: ExtraTarOptions = []
): Promise<number> {
const cacheServiceVersion: string = getCacheServiceVersion()
core.debug(`Cache service version: ${cacheServiceVersion}`)
checkPaths(paths)
checkKey(key)
switch (cacheServiceVersion) {
case 'v2':
return await saveCacheV2(paths, key, options, enableCrossOsArchive)
case 'v1':
default:
return await saveCacheV1(paths, key, options, enableCrossOsArchive)
}
}
/**
* Save cache using the legacy Cache Service
*
* @param paths
* @param key
* @param options
* @param enableCrossOsArchive
* @returns
*/
async function saveCacheV1(
paths: string[],
key: string,
options?: UploadOptions,
enableCrossOsArchive = false
): Promise<number> {
const compressionMethod = await utils.getCompressionMethod()
let cacheId = -1
@ -209,7 +398,7 @@ export async function saveCache(
core.debug(`File Size: ${archiveFileSize}`)
// For GHES, this check will take place in ReserveCache API with enterprise file size limit
if (archiveFileSize > fileSizeLimit && !utils.isGhes()) {
if (archiveFileSize > fileSizeLimit && !isGhes()) {
throw new Error(
`Cache size of ~${Math.round(
archiveFileSize / (1024 * 1024)
@ -244,7 +433,138 @@ export async function saveCache(
}
core.debug(`Saving Cache (ID: ${cacheId})`)
await cacheHttpClient.saveCache(cacheId, archivePath, options)
await cacheHttpClient.saveCache(cacheId, archivePath, '', options)
} catch (error) {
const typedError = error as Error
if (typedError.name === ValidationError.name) {
throw error
} else if (typedError.name === ReserveCacheError.name) {
core.info(`Failed to save: ${typedError.message}`)
} else {
core.warning(`Failed to save: ${typedError.message}`)
}
} finally {
// Try to delete the archive to save space
try {
await utils.unlinkFile(archivePath)
} catch (error) {
core.debug(`Failed to delete archive: ${error}`)
}
}
return cacheId
}
/**
* Save cache using Cache Service v2
*
* @param paths a list of file paths to restore from the cache
* @param key an explicit key for restoring the cache
* @param options cache upload options
* @param enableCrossOsArchive an optional boolean enabled to save cache on windows which could be restored on any platform
* @returns
*/
async function saveCacheV2(
paths: string[],
key: string,
options?: UploadOptions,
enableCrossOsArchive = false
): Promise<number> {
// Override UploadOptions to force the use of Azure
// ...options goes first because we want to override the default values
// set in UploadOptions with these specific figures
options = {
...options,
uploadChunkSize: 64 * 1024 * 1024, // 64 MiB
uploadConcurrency: 8, // 8 workers for parallel upload
useAzureSdk: true
}
const compressionMethod = await utils.getCompressionMethod()
const twirpClient = cacheTwirpClient.internalCacheTwirpClient()
let cacheId = -1
const cachePaths = await utils.resolvePaths(paths)
core.debug('Cache Paths:')
core.debug(`${JSON.stringify(cachePaths)}`)
if (cachePaths.length === 0) {
throw new Error(
`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`
)
}
const archiveFolder = await utils.createTempDirectory()
const archivePath = path.join(
archiveFolder,
utils.getCacheFileName(compressionMethod)
)
core.debug(`Archive Path: ${archivePath}`)
try {
await createTar(archiveFolder, cachePaths, compressionMethod)
if (core.isDebug()) {
await listTar(archivePath, compressionMethod)
}
const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath)
core.debug(`File Size: ${archiveFileSize}`)
// For GHES, this check will take place in ReserveCache API with enterprise file size limit
if (archiveFileSize > CacheFileSizeLimit && !isGhes()) {
throw new Error(
`Cache size of ~${Math.round(
archiveFileSize / (1024 * 1024)
)} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`
)
}
// Set the archive size in the options, will be used to display the upload progress
options.archiveSizeBytes = archiveFileSize
core.debug('Reserving Cache')
const version = utils.getCacheVersion(
paths,
compressionMethod,
enableCrossOsArchive
)
const request: CreateCacheEntryRequest = {
key,
version
}
const response = await twirpClient.CreateCacheEntry(request)
if (!response.ok) {
throw new ReserveCacheError(
`Unable to reserve cache with key ${key}, another job may be creating this cache.`
)
}
core.debug(`Attempting to upload cache located at: ${archivePath}`)
await cacheHttpClient.saveCache(
cacheId,
archivePath,
response.signedUploadUrl,
options
)
const finalizeRequest: FinalizeCacheEntryUploadRequest = {
key,
version,
sizeBytes: `${archiveFileSize}`
}
const finalizeResponse: FinalizeCacheEntryUploadResponse =
await twirpClient.FinalizeCacheEntryUpload(finalizeRequest)
core.debug(`FinalizeCacheEntryUploadResponse: ${finalizeResponse.ok}`)
if (!finalizeResponse.ok) {
throw new Error(
`Unable to finalize cache with key ${key}, another job may be finalizing this cache.`
)
}
cacheId = parseInt(finalizeResponse.entryId)
} catch (error) {
const typedError = error as Error
if (typedError.name === ValidationError.name) {

View File

@ -0,0 +1,290 @@
// @generated by protobuf-ts 2.9.1 with parameter long_type_string,client_none,generate_dependencies
// @generated from protobuf file "google/protobuf/timestamp.proto" (package "google.protobuf", syntax proto3)
// tslint:disable
//
// Protocol Buffers - Google's data interchange format
// Copyright 2008 Google Inc. All rights reserved.
// https://developers.google.com/protocol-buffers/
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
//
import type { BinaryWriteOptions } from "@protobuf-ts/runtime";
import type { IBinaryWriter } from "@protobuf-ts/runtime";
import { WireType } from "@protobuf-ts/runtime";
import type { BinaryReadOptions } from "@protobuf-ts/runtime";
import type { IBinaryReader } from "@protobuf-ts/runtime";
import { UnknownFieldHandler } from "@protobuf-ts/runtime";
import type { PartialMessage } from "@protobuf-ts/runtime";
import { reflectionMergePartial } from "@protobuf-ts/runtime";
import { MESSAGE_TYPE } from "@protobuf-ts/runtime";
import { typeofJsonValue } from "@protobuf-ts/runtime";
import type { JsonValue } from "@protobuf-ts/runtime";
import type { JsonReadOptions } from "@protobuf-ts/runtime";
import type { JsonWriteOptions } from "@protobuf-ts/runtime";
import { PbLong } from "@protobuf-ts/runtime";
import { MessageType } from "@protobuf-ts/runtime";
/**
* A Timestamp represents a point in time independent of any time zone or local
* calendar, encoded as a count of seconds and fractions of seconds at
* nanosecond resolution. The count is relative to an epoch at UTC midnight on
* January 1, 1970, in the proleptic Gregorian calendar which extends the
* Gregorian calendar backwards to year one.
*
* All minutes are 60 seconds long. Leap seconds are "smeared" so that no leap
* second table is needed for interpretation, using a [24-hour linear
* smear](https://developers.google.com/time/smear).
*
* The range is from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59.999999999Z. By
* restricting to that range, we ensure that we can convert to and from [RFC
* 3339](https://www.ietf.org/rfc/rfc3339.txt) date strings.
*
* # Examples
*
* Example 1: Compute Timestamp from POSIX `time()`.
*
* Timestamp timestamp;
* timestamp.set_seconds(time(NULL));
* timestamp.set_nanos(0);
*
* Example 2: Compute Timestamp from POSIX `gettimeofday()`.
*
* struct timeval tv;
* gettimeofday(&tv, NULL);
*
* Timestamp timestamp;
* timestamp.set_seconds(tv.tv_sec);
* timestamp.set_nanos(tv.tv_usec * 1000);
*
* Example 3: Compute Timestamp from Win32 `GetSystemTimeAsFileTime()`.
*
* FILETIME ft;
* GetSystemTimeAsFileTime(&ft);
* UINT64 ticks = (((UINT64)ft.dwHighDateTime) << 32) | ft.dwLowDateTime;
*
* // A Windows tick is 100 nanoseconds. Windows epoch 1601-01-01T00:00:00Z
* // is 11644473600 seconds before Unix epoch 1970-01-01T00:00:00Z.
* Timestamp timestamp;
* timestamp.set_seconds((INT64) ((ticks / 10000000) - 11644473600LL));
* timestamp.set_nanos((INT32) ((ticks % 10000000) * 100));
*
* Example 4: Compute Timestamp from Java `System.currentTimeMillis()`.
*
* long millis = System.currentTimeMillis();
*
* Timestamp timestamp = Timestamp.newBuilder().setSeconds(millis / 1000)
* .setNanos((int) ((millis % 1000) * 1000000)).build();
*
*
* Example 5: Compute Timestamp from Java `Instant.now()`.
*
* Instant now = Instant.now();
*
* Timestamp timestamp =
* Timestamp.newBuilder().setSeconds(now.getEpochSecond())
* .setNanos(now.getNano()).build();
*
*
* Example 6: Compute Timestamp from current time in Python.
*
* timestamp = Timestamp()
* timestamp.GetCurrentTime()
*
* # JSON Mapping
*
* In JSON format, the Timestamp type is encoded as a string in the
* [RFC 3339](https://www.ietf.org/rfc/rfc3339.txt) format. That is, the
* format is "{year}-{month}-{day}T{hour}:{min}:{sec}[.{frac_sec}]Z"
* where {year} is always expressed using four digits while {month}, {day},
* {hour}, {min}, and {sec} are zero-padded to two digits each. The fractional
* seconds, which can go up to 9 digits (i.e. up to 1 nanosecond resolution),
* are optional. The "Z" suffix indicates the timezone ("UTC"); the timezone
* is required. A proto3 JSON serializer should always use UTC (as indicated by
* "Z") when printing the Timestamp type and a proto3 JSON parser should be
* able to accept both UTC and other timezones (as indicated by an offset).
*
* For example, "2017-01-15T01:30:15.01Z" encodes 15.01 seconds past
* 01:30 UTC on January 15, 2017.
*
* In JavaScript, one can convert a Date object to this format using the
* standard
* [toISOString()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toISOString)
* method. In Python, a standard `datetime.datetime` object can be converted
* to this format using
* [`strftime`](https://docs.python.org/2/library/time.html#time.strftime) with
* the time format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one can use
* the Joda Time's [`ISODateTimeFormat.dateTime()`](
* http://www.joda.org/joda-time/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime%2D%2D
* ) to obtain a formatter capable of generating timestamps in this format.
*
*
*
* @generated from protobuf message google.protobuf.Timestamp
*/
export interface Timestamp {
/**
* Represents seconds of UTC time since Unix epoch
* 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to
* 9999-12-31T23:59:59Z inclusive.
*
* @generated from protobuf field: int64 seconds = 1;
*/
seconds: string;
/**
* Non-negative fractions of a second at nanosecond resolution. Negative
* second values with fractions must still have non-negative nanos values
* that count forward in time. Must be from 0 to 999,999,999
* inclusive.
*
* @generated from protobuf field: int32 nanos = 2;
*/
nanos: number;
}
// @generated message type with reflection information, may provide speed optimized methods
class Timestamp$Type extends MessageType<Timestamp> {
constructor() {
super("google.protobuf.Timestamp", [
{ no: 1, name: "seconds", kind: "scalar", T: 3 /*ScalarType.INT64*/ },
{ no: 2, name: "nanos", kind: "scalar", T: 5 /*ScalarType.INT32*/ }
]);
}
/**
* Creates a new `Timestamp` for the current time.
*/
now(): Timestamp {
const msg = this.create();
const ms = Date.now();
msg.seconds = PbLong.from(Math.floor(ms / 1000)).toString();
msg.nanos = (ms % 1000) * 1000000;
return msg;
}
/**
* Converts a `Timestamp` to a JavaScript Date.
*/
toDate(message: Timestamp): Date {
return new Date(PbLong.from(message.seconds).toNumber() * 1000 + Math.ceil(message.nanos / 1000000));
}
/**
* Converts a JavaScript Date to a `Timestamp`.
*/
fromDate(date: Date): Timestamp {
const msg = this.create();
const ms = date.getTime();
msg.seconds = PbLong.from(Math.floor(ms / 1000)).toString();
msg.nanos = (ms % 1000) * 1000000;
return msg;
}
/**
* In JSON format, the `Timestamp` type is encoded as a string
* in the RFC 3339 format.
*/
internalJsonWrite(message: Timestamp, options: JsonWriteOptions): JsonValue {
let ms = PbLong.from(message.seconds).toNumber() * 1000;
if (ms < Date.parse("0001-01-01T00:00:00Z") || ms > Date.parse("9999-12-31T23:59:59Z"))
throw new Error("Unable to encode Timestamp to JSON. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive.");
if (message.nanos < 0)
throw new Error("Unable to encode invalid Timestamp to JSON. Nanos must not be negative.");
let z = "Z";
if (message.nanos > 0) {
let nanosStr = (message.nanos + 1000000000).toString().substring(1);
if (nanosStr.substring(3) === "000000")
z = "." + nanosStr.substring(0, 3) + "Z";
else if (nanosStr.substring(6) === "000")
z = "." + nanosStr.substring(0, 6) + "Z";
else
z = "." + nanosStr + "Z";
}
return new Date(ms).toISOString().replace(".000Z", z);
}
/**
* In JSON format, the `Timestamp` type is encoded as a string
* in the RFC 3339 format.
*/
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: Timestamp): Timestamp {
if (typeof json !== "string")
throw new Error("Unable to parse Timestamp from JSON " + typeofJsonValue(json) + ".");
let matches = json.match(/^([0-9]{4})-([0-9]{2})-([0-9]{2})T([0-9]{2}):([0-9]{2}):([0-9]{2})(?:Z|\.([0-9]{3,9})Z|([+-][0-9][0-9]:[0-9][0-9]))$/);
if (!matches)
throw new Error("Unable to parse Timestamp from JSON. Invalid format.");
let ms = Date.parse(matches[1] + "-" + matches[2] + "-" + matches[3] + "T" + matches[4] + ":" + matches[5] + ":" + matches[6] + (matches[8] ? matches[8] : "Z"));
if (Number.isNaN(ms))
throw new Error("Unable to parse Timestamp from JSON. Invalid value.");
if (ms < Date.parse("0001-01-01T00:00:00Z") || ms > Date.parse("9999-12-31T23:59:59Z"))
throw new globalThis.Error("Unable to parse Timestamp from JSON. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive.");
if (!target)
target = this.create();
target.seconds = PbLong.from(ms / 1000).toString();
target.nanos = 0;
if (matches[7])
target.nanos = (parseInt("1" + matches[7] + "0".repeat(9 - matches[7].length)) - 1000000000);
return target;
}
create(value?: PartialMessage<Timestamp>): Timestamp {
const message = { seconds: "0", nanos: 0 };
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
reflectionMergePartial<Timestamp>(this, message, value);
return message;
}
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: Timestamp): Timestamp {
let message = target ?? this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* int64 seconds */ 1:
message.seconds = reader.int64().toString();
break;
case /* int32 nanos */ 2:
message.nanos = reader.int32();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message: Timestamp, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
/* int64 seconds = 1; */
if (message.seconds !== "0")
writer.tag(1, WireType.Varint).int64(message.seconds);
/* int32 nanos = 2; */
if (message.nanos !== 0)
writer.tag(2, WireType.Varint).int32(message.nanos);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message google.protobuf.Timestamp
*/
export const Timestamp = new Timestamp$Type();

View File

@ -0,0 +1,753 @@
// @generated by protobuf-ts 2.9.1 with parameter long_type_string,client_none,generate_dependencies
// @generated from protobuf file "google/protobuf/wrappers.proto" (package "google.protobuf", syntax proto3)
// tslint:disable
//
// Protocol Buffers - Google's data interchange format
// Copyright 2008 Google Inc. All rights reserved.
// https://developers.google.com/protocol-buffers/
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
//
//
// Wrappers for primitive (non-message) types. These types are useful
// for embedding primitives in the `google.protobuf.Any` type and for places
// where we need to distinguish between the absence of a primitive
// typed field and its default value.
//
// These wrappers have no meaningful use within repeated fields as they lack
// the ability to detect presence on individual elements.
// These wrappers have no meaningful use within a map or a oneof since
// individual entries of a map or fields of a oneof can already detect presence.
//
import { ScalarType } from "@protobuf-ts/runtime";
import { LongType } from "@protobuf-ts/runtime";
import type { BinaryWriteOptions } from "@protobuf-ts/runtime";
import type { IBinaryWriter } from "@protobuf-ts/runtime";
import { WireType } from "@protobuf-ts/runtime";
import type { BinaryReadOptions } from "@protobuf-ts/runtime";
import type { IBinaryReader } from "@protobuf-ts/runtime";
import { UnknownFieldHandler } from "@protobuf-ts/runtime";
import type { PartialMessage } from "@protobuf-ts/runtime";
import { reflectionMergePartial } from "@protobuf-ts/runtime";
import { MESSAGE_TYPE } from "@protobuf-ts/runtime";
import type { JsonValue } from "@protobuf-ts/runtime";
import type { JsonReadOptions } from "@protobuf-ts/runtime";
import type { JsonWriteOptions } from "@protobuf-ts/runtime";
import { MessageType } from "@protobuf-ts/runtime";
/**
* Wrapper message for `double`.
*
* The JSON representation for `DoubleValue` is JSON number.
*
* @generated from protobuf message google.protobuf.DoubleValue
*/
export interface DoubleValue {
/**
* The double value.
*
* @generated from protobuf field: double value = 1;
*/
value: number;
}
/**
* Wrapper message for `float`.
*
* The JSON representation for `FloatValue` is JSON number.
*
* @generated from protobuf message google.protobuf.FloatValue
*/
export interface FloatValue {
/**
* The float value.
*
* @generated from protobuf field: float value = 1;
*/
value: number;
}
/**
* Wrapper message for `int64`.
*
* The JSON representation for `Int64Value` is JSON string.
*
* @generated from protobuf message google.protobuf.Int64Value
*/
export interface Int64Value {
/**
* The int64 value.
*
* @generated from protobuf field: int64 value = 1;
*/
value: string;
}
/**
* Wrapper message for `uint64`.
*
* The JSON representation for `UInt64Value` is JSON string.
*
* @generated from protobuf message google.protobuf.UInt64Value
*/
export interface UInt64Value {
/**
* The uint64 value.
*
* @generated from protobuf field: uint64 value = 1;
*/
value: string;
}
/**
* Wrapper message for `int32`.
*
* The JSON representation for `Int32Value` is JSON number.
*
* @generated from protobuf message google.protobuf.Int32Value
*/
export interface Int32Value {
/**
* The int32 value.
*
* @generated from protobuf field: int32 value = 1;
*/
value: number;
}
/**
* Wrapper message for `uint32`.
*
* The JSON representation for `UInt32Value` is JSON number.
*
* @generated from protobuf message google.protobuf.UInt32Value
*/
export interface UInt32Value {
/**
* The uint32 value.
*
* @generated from protobuf field: uint32 value = 1;
*/
value: number;
}
/**
* Wrapper message for `bool`.
*
* The JSON representation for `BoolValue` is JSON `true` and `false`.
*
* @generated from protobuf message google.protobuf.BoolValue
*/
export interface BoolValue {
/**
* The bool value.
*
* @generated from protobuf field: bool value = 1;
*/
value: boolean;
}
/**
* Wrapper message for `string`.
*
* The JSON representation for `StringValue` is JSON string.
*
* @generated from protobuf message google.protobuf.StringValue
*/
export interface StringValue {
/**
* The string value.
*
* @generated from protobuf field: string value = 1;
*/
value: string;
}
/**
* Wrapper message for `bytes`.
*
* The JSON representation for `BytesValue` is JSON string.
*
* @generated from protobuf message google.protobuf.BytesValue
*/
export interface BytesValue {
/**
* The bytes value.
*
* @generated from protobuf field: bytes value = 1;
*/
value: Uint8Array;
}
// @generated message type with reflection information, may provide speed optimized methods
class DoubleValue$Type extends MessageType<DoubleValue> {
constructor() {
super("google.protobuf.DoubleValue", [
{ no: 1, name: "value", kind: "scalar", T: 1 /*ScalarType.DOUBLE*/ }
]);
}
/**
* Encode `DoubleValue` to JSON number.
*/
internalJsonWrite(message: DoubleValue, options: JsonWriteOptions): JsonValue {
return this.refJsonWriter.scalar(2, message.value, "value", false, true);
}
/**
* Decode `DoubleValue` from JSON number.
*/
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: DoubleValue): DoubleValue {
if (!target)
target = this.create();
target.value = this.refJsonReader.scalar(json, 1, undefined, "value") as number;
return target;
}
create(value?: PartialMessage<DoubleValue>): DoubleValue {
const message = { value: 0 };
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
reflectionMergePartial<DoubleValue>(this, message, value);
return message;
}
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: DoubleValue): DoubleValue {
let message = target ?? this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* double value */ 1:
message.value = reader.double();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message: DoubleValue, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
/* double value = 1; */
if (message.value !== 0)
writer.tag(1, WireType.Bit64).double(message.value);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message google.protobuf.DoubleValue
*/
export const DoubleValue = new DoubleValue$Type();
// @generated message type with reflection information, may provide speed optimized methods
class FloatValue$Type extends MessageType<FloatValue> {
constructor() {
super("google.protobuf.FloatValue", [
{ no: 1, name: "value", kind: "scalar", T: 2 /*ScalarType.FLOAT*/ }
]);
}
/**
* Encode `FloatValue` to JSON number.
*/
internalJsonWrite(message: FloatValue, options: JsonWriteOptions): JsonValue {
return this.refJsonWriter.scalar(1, message.value, "value", false, true);
}
/**
* Decode `FloatValue` from JSON number.
*/
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: FloatValue): FloatValue {
if (!target)
target = this.create();
target.value = this.refJsonReader.scalar(json, 1, undefined, "value") as number;
return target;
}
create(value?: PartialMessage<FloatValue>): FloatValue {
const message = { value: 0 };
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
reflectionMergePartial<FloatValue>(this, message, value);
return message;
}
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: FloatValue): FloatValue {
let message = target ?? this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* float value */ 1:
message.value = reader.float();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message: FloatValue, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
/* float value = 1; */
if (message.value !== 0)
writer.tag(1, WireType.Bit32).float(message.value);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message google.protobuf.FloatValue
*/
export const FloatValue = new FloatValue$Type();
// @generated message type with reflection information, may provide speed optimized methods
class Int64Value$Type extends MessageType<Int64Value> {
constructor() {
super("google.protobuf.Int64Value", [
{ no: 1, name: "value", kind: "scalar", T: 3 /*ScalarType.INT64*/ }
]);
}
/**
* Encode `Int64Value` to JSON string.
*/
internalJsonWrite(message: Int64Value, options: JsonWriteOptions): JsonValue {
return this.refJsonWriter.scalar(ScalarType.INT64, message.value, "value", false, true);
}
/**
* Decode `Int64Value` from JSON string.
*/
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: Int64Value): Int64Value {
if (!target)
target = this.create();
target.value = this.refJsonReader.scalar(json, ScalarType.INT64, LongType.STRING, "value") as any;
return target;
}
create(value?: PartialMessage<Int64Value>): Int64Value {
const message = { value: "0" };
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
reflectionMergePartial<Int64Value>(this, message, value);
return message;
}
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: Int64Value): Int64Value {
let message = target ?? this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* int64 value */ 1:
message.value = reader.int64().toString();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message: Int64Value, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
/* int64 value = 1; */
if (message.value !== "0")
writer.tag(1, WireType.Varint).int64(message.value);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message google.protobuf.Int64Value
*/
export const Int64Value = new Int64Value$Type();
// @generated message type with reflection information, may provide speed optimized methods
class UInt64Value$Type extends MessageType<UInt64Value> {
constructor() {
super("google.protobuf.UInt64Value", [
{ no: 1, name: "value", kind: "scalar", T: 4 /*ScalarType.UINT64*/ }
]);
}
/**
* Encode `UInt64Value` to JSON string.
*/
internalJsonWrite(message: UInt64Value, options: JsonWriteOptions): JsonValue {
return this.refJsonWriter.scalar(ScalarType.UINT64, message.value, "value", false, true);
}
/**
* Decode `UInt64Value` from JSON string.
*/
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: UInt64Value): UInt64Value {
if (!target)
target = this.create();
target.value = this.refJsonReader.scalar(json, ScalarType.UINT64, LongType.STRING, "value") as any;
return target;
}
create(value?: PartialMessage<UInt64Value>): UInt64Value {
const message = { value: "0" };
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
reflectionMergePartial<UInt64Value>(this, message, value);
return message;
}
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: UInt64Value): UInt64Value {
let message = target ?? this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* uint64 value */ 1:
message.value = reader.uint64().toString();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message: UInt64Value, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
/* uint64 value = 1; */
if (message.value !== "0")
writer.tag(1, WireType.Varint).uint64(message.value);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message google.protobuf.UInt64Value
*/
export const UInt64Value = new UInt64Value$Type();
// @generated message type with reflection information, may provide speed optimized methods
class Int32Value$Type extends MessageType<Int32Value> {
constructor() {
super("google.protobuf.Int32Value", [
{ no: 1, name: "value", kind: "scalar", T: 5 /*ScalarType.INT32*/ }
]);
}
/**
* Encode `Int32Value` to JSON string.
*/
internalJsonWrite(message: Int32Value, options: JsonWriteOptions): JsonValue {
return this.refJsonWriter.scalar(5, message.value, "value", false, true);
}
/**
* Decode `Int32Value` from JSON string.
*/
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: Int32Value): Int32Value {
if (!target)
target = this.create();
target.value = this.refJsonReader.scalar(json, 5, undefined, "value") as number;
return target;
}
create(value?: PartialMessage<Int32Value>): Int32Value {
const message = { value: 0 };
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
reflectionMergePartial<Int32Value>(this, message, value);
return message;
}
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: Int32Value): Int32Value {
let message = target ?? this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* int32 value */ 1:
message.value = reader.int32();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message: Int32Value, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
/* int32 value = 1; */
if (message.value !== 0)
writer.tag(1, WireType.Varint).int32(message.value);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message google.protobuf.Int32Value
*/
export const Int32Value = new Int32Value$Type();
// @generated message type with reflection information, may provide speed optimized methods
class UInt32Value$Type extends MessageType<UInt32Value> {
constructor() {
super("google.protobuf.UInt32Value", [
{ no: 1, name: "value", kind: "scalar", T: 13 /*ScalarType.UINT32*/ }
]);
}
/**
* Encode `UInt32Value` to JSON string.
*/
internalJsonWrite(message: UInt32Value, options: JsonWriteOptions): JsonValue {
return this.refJsonWriter.scalar(13, message.value, "value", false, true);
}
/**
* Decode `UInt32Value` from JSON string.
*/
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: UInt32Value): UInt32Value {
if (!target)
target = this.create();
target.value = this.refJsonReader.scalar(json, 13, undefined, "value") as number;
return target;
}
create(value?: PartialMessage<UInt32Value>): UInt32Value {
const message = { value: 0 };
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
reflectionMergePartial<UInt32Value>(this, message, value);
return message;
}
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: UInt32Value): UInt32Value {
let message = target ?? this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* uint32 value */ 1:
message.value = reader.uint32();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message: UInt32Value, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
/* uint32 value = 1; */
if (message.value !== 0)
writer.tag(1, WireType.Varint).uint32(message.value);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message google.protobuf.UInt32Value
*/
export const UInt32Value = new UInt32Value$Type();
// @generated message type with reflection information, may provide speed optimized methods
class BoolValue$Type extends MessageType<BoolValue> {
constructor() {
super("google.protobuf.BoolValue", [
{ no: 1, name: "value", kind: "scalar", T: 8 /*ScalarType.BOOL*/ }
]);
}
/**
* Encode `BoolValue` to JSON bool.
*/
internalJsonWrite(message: BoolValue, options: JsonWriteOptions): JsonValue {
return message.value;
}
/**
* Decode `BoolValue` from JSON bool.
*/
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: BoolValue): BoolValue {
if (!target)
target = this.create();
target.value = this.refJsonReader.scalar(json, 8, undefined, "value") as boolean;
return target;
}
create(value?: PartialMessage<BoolValue>): BoolValue {
const message = { value: false };
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
reflectionMergePartial<BoolValue>(this, message, value);
return message;
}
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: BoolValue): BoolValue {
let message = target ?? this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* bool value */ 1:
message.value = reader.bool();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message: BoolValue, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
/* bool value = 1; */
if (message.value !== false)
writer.tag(1, WireType.Varint).bool(message.value);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message google.protobuf.BoolValue
*/
export const BoolValue = new BoolValue$Type();
// @generated message type with reflection information, may provide speed optimized methods
class StringValue$Type extends MessageType<StringValue> {
constructor() {
super("google.protobuf.StringValue", [
{ no: 1, name: "value", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
]);
}
/**
* Encode `StringValue` to JSON string.
*/
internalJsonWrite(message: StringValue, options: JsonWriteOptions): JsonValue {
return message.value;
}
/**
* Decode `StringValue` from JSON string.
*/
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: StringValue): StringValue {
if (!target)
target = this.create();
target.value = this.refJsonReader.scalar(json, 9, undefined, "value") as string;
return target;
}
create(value?: PartialMessage<StringValue>): StringValue {
const message = { value: "" };
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
reflectionMergePartial<StringValue>(this, message, value);
return message;
}
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: StringValue): StringValue {
let message = target ?? this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* string value */ 1:
message.value = reader.string();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message: StringValue, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
/* string value = 1; */
if (message.value !== "")
writer.tag(1, WireType.LengthDelimited).string(message.value);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message google.protobuf.StringValue
*/
export const StringValue = new StringValue$Type();
// @generated message type with reflection information, may provide speed optimized methods
class BytesValue$Type extends MessageType<BytesValue> {
constructor() {
super("google.protobuf.BytesValue", [
{ no: 1, name: "value", kind: "scalar", T: 12 /*ScalarType.BYTES*/ }
]);
}
/**
* Encode `BytesValue` to JSON string.
*/
internalJsonWrite(message: BytesValue, options: JsonWriteOptions): JsonValue {
return this.refJsonWriter.scalar(12, message.value, "value", false, true);
}
/**
* Decode `BytesValue` from JSON string.
*/
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: BytesValue): BytesValue {
if (!target)
target = this.create();
target.value = this.refJsonReader.scalar(json, 12, undefined, "value") as Uint8Array;
return target;
}
create(value?: PartialMessage<BytesValue>): BytesValue {
const message = { value: new Uint8Array(0) };
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
reflectionMergePartial<BytesValue>(this, message, value);
return message;
}
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: BytesValue): BytesValue {
let message = target ?? this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* bytes value */ 1:
message.value = reader.bytes();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message: BytesValue, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
/* bytes value = 1; */
if (message.value.length)
writer.tag(1, WireType.LengthDelimited).bytes(message.value);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message google.protobuf.BytesValue
*/
export const BytesValue = new BytesValue$Type();

View File

@ -0,0 +1,521 @@
// @generated by protobuf-ts 2.9.1 with parameter long_type_string,client_none,generate_dependencies
// @generated from protobuf file "results/api/v1/cache.proto" (package "github.actions.results.api.v1", syntax proto3)
// tslint:disable
import { ServiceType } from "@protobuf-ts/runtime-rpc";
import type { BinaryWriteOptions } from "@protobuf-ts/runtime";
import type { IBinaryWriter } from "@protobuf-ts/runtime";
import { WireType } from "@protobuf-ts/runtime";
import type { BinaryReadOptions } from "@protobuf-ts/runtime";
import type { IBinaryReader } from "@protobuf-ts/runtime";
import { UnknownFieldHandler } from "@protobuf-ts/runtime";
import type { PartialMessage } from "@protobuf-ts/runtime";
import { reflectionMergePartial } from "@protobuf-ts/runtime";
import { MESSAGE_TYPE } from "@protobuf-ts/runtime";
import { MessageType } from "@protobuf-ts/runtime";
import { CacheMetadata } from "../../entities/v1/cachemetadata";
/**
* @generated from protobuf message github.actions.results.api.v1.CreateCacheEntryRequest
*/
export interface CreateCacheEntryRequest {
/**
* Scope and other metadata for the cache entry
*
* @generated from protobuf field: github.actions.results.entities.v1.CacheMetadata metadata = 1;
*/
metadata?: CacheMetadata;
/**
* An explicit key for a cache entry
*
* @generated from protobuf field: string key = 2;
*/
key: string;
/**
* Hash of the compression tool, runner OS and paths cached
*
* @generated from protobuf field: string version = 3;
*/
version: string;
}
/**
* @generated from protobuf message github.actions.results.api.v1.CreateCacheEntryResponse
*/
export interface CreateCacheEntryResponse {
/**
* @generated from protobuf field: bool ok = 1;
*/
ok: boolean;
/**
* SAS URL to upload the cache archive
*
* @generated from protobuf field: string signed_upload_url = 2;
*/
signedUploadUrl: string;
}
/**
* @generated from protobuf message github.actions.results.api.v1.FinalizeCacheEntryUploadRequest
*/
export interface FinalizeCacheEntryUploadRequest {
/**
* Scope and other metadata for the cache entry
*
* @generated from protobuf field: github.actions.results.entities.v1.CacheMetadata metadata = 1;
*/
metadata?: CacheMetadata;
/**
* An explicit key for a cache entry
*
* @generated from protobuf field: string key = 2;
*/
key: string;
/**
* Size of the cache archive in Bytes
*
* @generated from protobuf field: int64 size_bytes = 3;
*/
sizeBytes: string;
/**
* Hash of the compression tool, runner OS and paths cached
*
* @generated from protobuf field: string version = 4;
*/
version: string;
}
/**
* @generated from protobuf message github.actions.results.api.v1.FinalizeCacheEntryUploadResponse
*/
export interface FinalizeCacheEntryUploadResponse {
/**
* @generated from protobuf field: bool ok = 1;
*/
ok: boolean;
/**
* Cache entry database ID
*
* @generated from protobuf field: int64 entry_id = 2;
*/
entryId: string;
}
/**
* @generated from protobuf message github.actions.results.api.v1.GetCacheEntryDownloadURLRequest
*/
export interface GetCacheEntryDownloadURLRequest {
/**
* Scope and other metadata for the cache entry
*
* @generated from protobuf field: github.actions.results.entities.v1.CacheMetadata metadata = 1;
*/
metadata?: CacheMetadata;
/**
* An explicit key for a cache entry
*
* @generated from protobuf field: string key = 2;
*/
key: string;
/**
* Restore keys used for prefix searching
*
* @generated from protobuf field: repeated string restore_keys = 3;
*/
restoreKeys: string[];
/**
* Hash of the compression tool, runner OS and paths cached
*
* @generated from protobuf field: string version = 4;
*/
version: string;
}
/**
* @generated from protobuf message github.actions.results.api.v1.GetCacheEntryDownloadURLResponse
*/
export interface GetCacheEntryDownloadURLResponse {
/**
* @generated from protobuf field: bool ok = 1;
*/
ok: boolean;
/**
* SAS URL to download the cache archive
*
* @generated from protobuf field: string signed_download_url = 2;
*/
signedDownloadUrl: string;
/**
* Key or restore key that matches the lookup
*
* @generated from protobuf field: string matched_key = 3;
*/
matchedKey: string;
}
// @generated message type with reflection information, may provide speed optimized methods
class CreateCacheEntryRequest$Type extends MessageType<CreateCacheEntryRequest> {
constructor() {
super("github.actions.results.api.v1.CreateCacheEntryRequest", [
{ no: 1, name: "metadata", kind: "message", T: () => CacheMetadata },
{ no: 2, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
{ no: 3, name: "version", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
]);
}
create(value?: PartialMessage<CreateCacheEntryRequest>): CreateCacheEntryRequest {
const message = { key: "", version: "" };
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
reflectionMergePartial<CreateCacheEntryRequest>(this, message, value);
return message;
}
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: CreateCacheEntryRequest): CreateCacheEntryRequest {
let message = target ?? this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* github.actions.results.entities.v1.CacheMetadata metadata */ 1:
message.metadata = CacheMetadata.internalBinaryRead(reader, reader.uint32(), options, message.metadata);
break;
case /* string key */ 2:
message.key = reader.string();
break;
case /* string version */ 3:
message.version = reader.string();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message: CreateCacheEntryRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
/* github.actions.results.entities.v1.CacheMetadata metadata = 1; */
if (message.metadata)
CacheMetadata.internalBinaryWrite(message.metadata, writer.tag(1, WireType.LengthDelimited).fork(), options).join();
/* string key = 2; */
if (message.key !== "")
writer.tag(2, WireType.LengthDelimited).string(message.key);
/* string version = 3; */
if (message.version !== "")
writer.tag(3, WireType.LengthDelimited).string(message.version);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message github.actions.results.api.v1.CreateCacheEntryRequest
*/
export const CreateCacheEntryRequest = new CreateCacheEntryRequest$Type();
// @generated message type with reflection information, may provide speed optimized methods
class CreateCacheEntryResponse$Type extends MessageType<CreateCacheEntryResponse> {
constructor() {
super("github.actions.results.api.v1.CreateCacheEntryResponse", [
{ no: 1, name: "ok", kind: "scalar", T: 8 /*ScalarType.BOOL*/ },
{ no: 2, name: "signed_upload_url", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
]);
}
create(value?: PartialMessage<CreateCacheEntryResponse>): CreateCacheEntryResponse {
const message = { ok: false, signedUploadUrl: "" };
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
reflectionMergePartial<CreateCacheEntryResponse>(this, message, value);
return message;
}
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: CreateCacheEntryResponse): CreateCacheEntryResponse {
let message = target ?? this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* bool ok */ 1:
message.ok = reader.bool();
break;
case /* string signed_upload_url */ 2:
message.signedUploadUrl = reader.string();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message: CreateCacheEntryResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
/* bool ok = 1; */
if (message.ok !== false)
writer.tag(1, WireType.Varint).bool(message.ok);
/* string signed_upload_url = 2; */
if (message.signedUploadUrl !== "")
writer.tag(2, WireType.LengthDelimited).string(message.signedUploadUrl);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message github.actions.results.api.v1.CreateCacheEntryResponse
*/
export const CreateCacheEntryResponse = new CreateCacheEntryResponse$Type();
// @generated message type with reflection information, may provide speed optimized methods
class FinalizeCacheEntryUploadRequest$Type extends MessageType<FinalizeCacheEntryUploadRequest> {
constructor() {
super("github.actions.results.api.v1.FinalizeCacheEntryUploadRequest", [
{ no: 1, name: "metadata", kind: "message", T: () => CacheMetadata },
{ no: 2, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
{ no: 3, name: "size_bytes", kind: "scalar", T: 3 /*ScalarType.INT64*/ },
{ no: 4, name: "version", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
]);
}
create(value?: PartialMessage<FinalizeCacheEntryUploadRequest>): FinalizeCacheEntryUploadRequest {
const message = { key: "", sizeBytes: "0", version: "" };
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
reflectionMergePartial<FinalizeCacheEntryUploadRequest>(this, message, value);
return message;
}
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: FinalizeCacheEntryUploadRequest): FinalizeCacheEntryUploadRequest {
let message = target ?? this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* github.actions.results.entities.v1.CacheMetadata metadata */ 1:
message.metadata = CacheMetadata.internalBinaryRead(reader, reader.uint32(), options, message.metadata);
break;
case /* string key */ 2:
message.key = reader.string();
break;
case /* int64 size_bytes */ 3:
message.sizeBytes = reader.int64().toString();
break;
case /* string version */ 4:
message.version = reader.string();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message: FinalizeCacheEntryUploadRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
/* github.actions.results.entities.v1.CacheMetadata metadata = 1; */
if (message.metadata)
CacheMetadata.internalBinaryWrite(message.metadata, writer.tag(1, WireType.LengthDelimited).fork(), options).join();
/* string key = 2; */
if (message.key !== "")
writer.tag(2, WireType.LengthDelimited).string(message.key);
/* int64 size_bytes = 3; */
if (message.sizeBytes !== "0")
writer.tag(3, WireType.Varint).int64(message.sizeBytes);
/* string version = 4; */
if (message.version !== "")
writer.tag(4, WireType.LengthDelimited).string(message.version);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message github.actions.results.api.v1.FinalizeCacheEntryUploadRequest
*/
export const FinalizeCacheEntryUploadRequest = new FinalizeCacheEntryUploadRequest$Type();
// @generated message type with reflection information, may provide speed optimized methods
class FinalizeCacheEntryUploadResponse$Type extends MessageType<FinalizeCacheEntryUploadResponse> {
constructor() {
super("github.actions.results.api.v1.FinalizeCacheEntryUploadResponse", [
{ no: 1, name: "ok", kind: "scalar", T: 8 /*ScalarType.BOOL*/ },
{ no: 2, name: "entry_id", kind: "scalar", T: 3 /*ScalarType.INT64*/ }
]);
}
create(value?: PartialMessage<FinalizeCacheEntryUploadResponse>): FinalizeCacheEntryUploadResponse {
const message = { ok: false, entryId: "0" };
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
reflectionMergePartial<FinalizeCacheEntryUploadResponse>(this, message, value);
return message;
}
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: FinalizeCacheEntryUploadResponse): FinalizeCacheEntryUploadResponse {
let message = target ?? this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* bool ok */ 1:
message.ok = reader.bool();
break;
case /* int64 entry_id */ 2:
message.entryId = reader.int64().toString();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message: FinalizeCacheEntryUploadResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
/* bool ok = 1; */
if (message.ok !== false)
writer.tag(1, WireType.Varint).bool(message.ok);
/* int64 entry_id = 2; */
if (message.entryId !== "0")
writer.tag(2, WireType.Varint).int64(message.entryId);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message github.actions.results.api.v1.FinalizeCacheEntryUploadResponse
*/
export const FinalizeCacheEntryUploadResponse = new FinalizeCacheEntryUploadResponse$Type();
// @generated message type with reflection information, may provide speed optimized methods
class GetCacheEntryDownloadURLRequest$Type extends MessageType<GetCacheEntryDownloadURLRequest> {
constructor() {
super("github.actions.results.api.v1.GetCacheEntryDownloadURLRequest", [
{ no: 1, name: "metadata", kind: "message", T: () => CacheMetadata },
{ no: 2, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
{ no: 3, name: "restore_keys", kind: "scalar", repeat: 2 /*RepeatType.UNPACKED*/, T: 9 /*ScalarType.STRING*/ },
{ no: 4, name: "version", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
]);
}
create(value?: PartialMessage<GetCacheEntryDownloadURLRequest>): GetCacheEntryDownloadURLRequest {
const message = { key: "", restoreKeys: [], version: "" };
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
reflectionMergePartial<GetCacheEntryDownloadURLRequest>(this, message, value);
return message;
}
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: GetCacheEntryDownloadURLRequest): GetCacheEntryDownloadURLRequest {
let message = target ?? this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* github.actions.results.entities.v1.CacheMetadata metadata */ 1:
message.metadata = CacheMetadata.internalBinaryRead(reader, reader.uint32(), options, message.metadata);
break;
case /* string key */ 2:
message.key = reader.string();
break;
case /* repeated string restore_keys */ 3:
message.restoreKeys.push(reader.string());
break;
case /* string version */ 4:
message.version = reader.string();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message: GetCacheEntryDownloadURLRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
/* github.actions.results.entities.v1.CacheMetadata metadata = 1; */
if (message.metadata)
CacheMetadata.internalBinaryWrite(message.metadata, writer.tag(1, WireType.LengthDelimited).fork(), options).join();
/* string key = 2; */
if (message.key !== "")
writer.tag(2, WireType.LengthDelimited).string(message.key);
/* repeated string restore_keys = 3; */
for (let i = 0; i < message.restoreKeys.length; i++)
writer.tag(3, WireType.LengthDelimited).string(message.restoreKeys[i]);
/* string version = 4; */
if (message.version !== "")
writer.tag(4, WireType.LengthDelimited).string(message.version);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message github.actions.results.api.v1.GetCacheEntryDownloadURLRequest
*/
export const GetCacheEntryDownloadURLRequest = new GetCacheEntryDownloadURLRequest$Type();
// @generated message type with reflection information, may provide speed optimized methods
class GetCacheEntryDownloadURLResponse$Type extends MessageType<GetCacheEntryDownloadURLResponse> {
constructor() {
super("github.actions.results.api.v1.GetCacheEntryDownloadURLResponse", [
{ no: 1, name: "ok", kind: "scalar", T: 8 /*ScalarType.BOOL*/ },
{ no: 2, name: "signed_download_url", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
{ no: 3, name: "matched_key", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
]);
}
create(value?: PartialMessage<GetCacheEntryDownloadURLResponse>): GetCacheEntryDownloadURLResponse {
const message = { ok: false, signedDownloadUrl: "", matchedKey: "" };
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
reflectionMergePartial<GetCacheEntryDownloadURLResponse>(this, message, value);
return message;
}
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: GetCacheEntryDownloadURLResponse): GetCacheEntryDownloadURLResponse {
let message = target ?? this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* bool ok */ 1:
message.ok = reader.bool();
break;
case /* string signed_download_url */ 2:
message.signedDownloadUrl = reader.string();
break;
case /* string matched_key */ 3:
message.matchedKey = reader.string();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message: GetCacheEntryDownloadURLResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
/* bool ok = 1; */
if (message.ok !== false)
writer.tag(1, WireType.Varint).bool(message.ok);
/* string signed_download_url = 2; */
if (message.signedDownloadUrl !== "")
writer.tag(2, WireType.LengthDelimited).string(message.signedDownloadUrl);
/* string matched_key = 3; */
if (message.matchedKey !== "")
writer.tag(3, WireType.LengthDelimited).string(message.matchedKey);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message github.actions.results.api.v1.GetCacheEntryDownloadURLResponse
*/
export const GetCacheEntryDownloadURLResponse = new GetCacheEntryDownloadURLResponse$Type();
/**
* @generated ServiceType for protobuf service github.actions.results.api.v1.CacheService
*/
export const CacheService = new ServiceType("github.actions.results.api.v1.CacheService", [
{ name: "CreateCacheEntry", options: {}, I: CreateCacheEntryRequest, O: CreateCacheEntryResponse },
{ name: "FinalizeCacheEntryUpload", options: {}, I: FinalizeCacheEntryUploadRequest, O: FinalizeCacheEntryUploadResponse },
{ name: "GetCacheEntryDownloadURL", options: {}, I: GetCacheEntryDownloadURLRequest, O: GetCacheEntryDownloadURLResponse }
]);

View File

@ -0,0 +1,157 @@
import {
CreateCacheEntryRequest,
CreateCacheEntryResponse,
FinalizeCacheEntryUploadRequest,
FinalizeCacheEntryUploadResponse,
GetCacheEntryDownloadURLRequest,
GetCacheEntryDownloadURLResponse,
} from "./cache";
//==================================//
// Client Code //
//==================================//
interface Rpc {
request(
service: string,
method: string,
contentType: "application/json" | "application/protobuf",
data: object | Uint8Array
): Promise<object | Uint8Array>;
}
export interface CacheServiceClient {
CreateCacheEntry(
request: CreateCacheEntryRequest
): Promise<CreateCacheEntryResponse>;
FinalizeCacheEntryUpload(
request: FinalizeCacheEntryUploadRequest
): Promise<FinalizeCacheEntryUploadResponse>;
GetCacheEntryDownloadURL(
request: GetCacheEntryDownloadURLRequest
): Promise<GetCacheEntryDownloadURLResponse>;
}
export class CacheServiceClientJSON implements CacheServiceClient {
private readonly rpc: Rpc;
constructor(rpc: Rpc) {
this.rpc = rpc;
this.CreateCacheEntry.bind(this);
this.FinalizeCacheEntryUpload.bind(this);
this.GetCacheEntryDownloadURL.bind(this);
}
CreateCacheEntry(
request: CreateCacheEntryRequest
): Promise<CreateCacheEntryResponse> {
const data = CreateCacheEntryRequest.toJson(request, {
useProtoFieldName: true,
emitDefaultValues: false,
});
const promise = this.rpc.request(
"github.actions.results.api.v1.CacheService",
"CreateCacheEntry",
"application/json",
data as object
);
return promise.then((data) =>
CreateCacheEntryResponse.fromJson(data as any, {
ignoreUnknownFields: true,
})
);
}
FinalizeCacheEntryUpload(
request: FinalizeCacheEntryUploadRequest
): Promise<FinalizeCacheEntryUploadResponse> {
const data = FinalizeCacheEntryUploadRequest.toJson(request, {
useProtoFieldName: true,
emitDefaultValues: false,
});
const promise = this.rpc.request(
"github.actions.results.api.v1.CacheService",
"FinalizeCacheEntryUpload",
"application/json",
data as object
);
return promise.then((data) =>
FinalizeCacheEntryUploadResponse.fromJson(data as any, {
ignoreUnknownFields: true,
})
);
}
GetCacheEntryDownloadURL(
request: GetCacheEntryDownloadURLRequest
): Promise<GetCacheEntryDownloadURLResponse> {
const data = GetCacheEntryDownloadURLRequest.toJson(request, {
useProtoFieldName: true,
emitDefaultValues: false,
});
const promise = this.rpc.request(
"github.actions.results.api.v1.CacheService",
"GetCacheEntryDownloadURL",
"application/json",
data as object
);
return promise.then((data) =>
GetCacheEntryDownloadURLResponse.fromJson(data as any, {
ignoreUnknownFields: true,
})
);
}
}
export class CacheServiceClientProtobuf implements CacheServiceClient {
private readonly rpc: Rpc;
constructor(rpc: Rpc) {
this.rpc = rpc;
this.CreateCacheEntry.bind(this);
this.FinalizeCacheEntryUpload.bind(this);
this.GetCacheEntryDownloadURL.bind(this);
}
CreateCacheEntry(
request: CreateCacheEntryRequest
): Promise<CreateCacheEntryResponse> {
const data = CreateCacheEntryRequest.toBinary(request);
const promise = this.rpc.request(
"github.actions.results.api.v1.CacheService",
"CreateCacheEntry",
"application/protobuf",
data
);
return promise.then((data) =>
CreateCacheEntryResponse.fromBinary(data as Uint8Array)
);
}
FinalizeCacheEntryUpload(
request: FinalizeCacheEntryUploadRequest
): Promise<FinalizeCacheEntryUploadResponse> {
const data = FinalizeCacheEntryUploadRequest.toBinary(request);
const promise = this.rpc.request(
"github.actions.results.api.v1.CacheService",
"FinalizeCacheEntryUpload",
"application/protobuf",
data
);
return promise.then((data) =>
FinalizeCacheEntryUploadResponse.fromBinary(data as Uint8Array)
);
}
GetCacheEntryDownloadURL(
request: GetCacheEntryDownloadURLRequest
): Promise<GetCacheEntryDownloadURLResponse> {
const data = GetCacheEntryDownloadURLRequest.toBinary(request);
const promise = this.rpc.request(
"github.actions.results.api.v1.CacheService",
"GetCacheEntryDownloadURL",
"application/protobuf",
data
);
return promise.then((data) =>
GetCacheEntryDownloadURLResponse.fromBinary(data as Uint8Array)
);
}
}

View File

@ -0,0 +1,85 @@
// @generated by protobuf-ts 2.9.1 with parameter long_type_string,client_none,generate_dependencies
// @generated from protobuf file "results/entities/v1/cachemetadata.proto" (package "github.actions.results.entities.v1", syntax proto3)
// tslint:disable
import type { BinaryWriteOptions } from "@protobuf-ts/runtime";
import type { IBinaryWriter } from "@protobuf-ts/runtime";
import { WireType } from "@protobuf-ts/runtime";
import type { BinaryReadOptions } from "@protobuf-ts/runtime";
import type { IBinaryReader } from "@protobuf-ts/runtime";
import { UnknownFieldHandler } from "@protobuf-ts/runtime";
import type { PartialMessage } from "@protobuf-ts/runtime";
import { reflectionMergePartial } from "@protobuf-ts/runtime";
import { MESSAGE_TYPE } from "@protobuf-ts/runtime";
import { MessageType } from "@protobuf-ts/runtime";
import { CacheScope } from "./cachescope";
/**
* @generated from protobuf message github.actions.results.entities.v1.CacheMetadata
*/
export interface CacheMetadata {
/**
* Backend repository id
*
* @generated from protobuf field: int64 repository_id = 1;
*/
repositoryId: string;
/**
* Scopes for the cache entry
*
* @generated from protobuf field: repeated github.actions.results.entities.v1.CacheScope scope = 2;
*/
scope: CacheScope[];
}
// @generated message type with reflection information, may provide speed optimized methods
class CacheMetadata$Type extends MessageType<CacheMetadata> {
constructor() {
super("github.actions.results.entities.v1.CacheMetadata", [
{ no: 1, name: "repository_id", kind: "scalar", T: 3 /*ScalarType.INT64*/ },
{ no: 2, name: "scope", kind: "message", repeat: 1 /*RepeatType.PACKED*/, T: () => CacheScope }
]);
}
create(value?: PartialMessage<CacheMetadata>): CacheMetadata {
const message = { repositoryId: "0", scope: [] };
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
reflectionMergePartial<CacheMetadata>(this, message, value);
return message;
}
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: CacheMetadata): CacheMetadata {
let message = target ?? this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* int64 repository_id */ 1:
message.repositoryId = reader.int64().toString();
break;
case /* repeated github.actions.results.entities.v1.CacheScope scope */ 2:
message.scope.push(CacheScope.internalBinaryRead(reader, reader.uint32(), options));
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message: CacheMetadata, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
/* int64 repository_id = 1; */
if (message.repositoryId !== "0")
writer.tag(1, WireType.Varint).int64(message.repositoryId);
/* repeated github.actions.results.entities.v1.CacheScope scope = 2; */
for (let i = 0; i < message.scope.length; i++)
CacheScope.internalBinaryWrite(message.scope[i], writer.tag(2, WireType.LengthDelimited).fork(), options).join();
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message github.actions.results.entities.v1.CacheMetadata
*/
export const CacheMetadata = new CacheMetadata$Type();

View File

@ -0,0 +1,84 @@
// @generated by protobuf-ts 2.9.1 with parameter long_type_string,client_none,generate_dependencies
// @generated from protobuf file "results/entities/v1/cachescope.proto" (package "github.actions.results.entities.v1", syntax proto3)
// tslint:disable
import type { BinaryWriteOptions } from "@protobuf-ts/runtime";
import type { IBinaryWriter } from "@protobuf-ts/runtime";
import { WireType } from "@protobuf-ts/runtime";
import type { BinaryReadOptions } from "@protobuf-ts/runtime";
import type { IBinaryReader } from "@protobuf-ts/runtime";
import { UnknownFieldHandler } from "@protobuf-ts/runtime";
import type { PartialMessage } from "@protobuf-ts/runtime";
import { reflectionMergePartial } from "@protobuf-ts/runtime";
import { MESSAGE_TYPE } from "@protobuf-ts/runtime";
import { MessageType } from "@protobuf-ts/runtime";
/**
* @generated from protobuf message github.actions.results.entities.v1.CacheScope
*/
export interface CacheScope {
/**
* Determines the scope of the cache entry
*
* @generated from protobuf field: string scope = 1;
*/
scope: string;
/**
* None: 0 | Read: 1 | Write: 2 | All: (1|2)
*
* @generated from protobuf field: int64 permission = 2;
*/
permission: string;
}
// @generated message type with reflection information, may provide speed optimized methods
class CacheScope$Type extends MessageType<CacheScope> {
constructor() {
super("github.actions.results.entities.v1.CacheScope", [
{ no: 1, name: "scope", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
{ no: 2, name: "permission", kind: "scalar", T: 3 /*ScalarType.INT64*/ }
]);
}
create(value?: PartialMessage<CacheScope>): CacheScope {
const message = { scope: "", permission: "0" };
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
reflectionMergePartial<CacheScope>(this, message, value);
return message;
}
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: CacheScope): CacheScope {
let message = target ?? this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* string scope */ 1:
message.scope = reader.string();
break;
case /* int64 permission */ 2:
message.permission = reader.int64().toString();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message: CacheScope, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
/* string scope = 1; */
if (message.scope !== "")
writer.tag(1, WireType.LengthDelimited).string(message.scope);
/* int64 permission = 2; */
if (message.permission !== "0")
writer.tag(2, WireType.Varint).int64(message.permission);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message github.actions.results.entities.v1.CacheScope
*/
export const CacheScope = new CacheScope$Type();

View File

@ -5,12 +5,10 @@ import {
RequestOptions,
TypedResponse
} from '@actions/http-client/lib/interfaces'
import * as crypto from 'crypto'
import * as fs from 'fs'
import {URL} from 'url'
import * as utils from './cacheUtils'
import {CompressionMethod} from './constants'
import {uploadCacheArchiveSDK} from './uploadUtils'
import {
ArtifactCacheEntry,
InternalCacheOptions,
@ -36,11 +34,11 @@ import {
retryHttpClientResponse,
retryTypedResponse
} from './requestUtils'
const versionSalt = '1.0'
import {getCacheServiceURL} from './config'
import {getUserAgentString} from './shared/user-agent'
function getCacheApiUrl(resource: string): string {
const baseUrl: string = process.env['ACTIONS_CACHE_URL'] || ''
const baseUrl: string = getCacheServiceURL()
if (!baseUrl) {
throw new Error('Cache Service Url not found, unable to restore cache.')
}
@ -69,48 +67,24 @@ function createHttpClient(): HttpClient {
const bearerCredentialHandler = new BearerCredentialHandler(token)
return new HttpClient(
'actions/cache',
getUserAgentString(),
[bearerCredentialHandler],
getRequestOptions()
)
}
export function getCacheVersion(
paths: string[],
compressionMethod?: CompressionMethod,
enableCrossOsArchive = false
): string {
// don't pass changes upstream
const components = paths.slice()
// Add compression method to cache version to restore
// compressed cache as per compression method
if (compressionMethod) {
components.push(compressionMethod)
}
// Only check for windows platforms if enableCrossOsArchive is false
if (process.platform === 'win32' && !enableCrossOsArchive) {
components.push('windows-only')
}
// Add salt to cache version to support breaking changes in cache entry
components.push(versionSalt)
return crypto.createHash('sha256').update(components.join('|')).digest('hex')
}
export async function getCacheEntry(
keys: string[],
paths: string[],
options?: InternalCacheOptions
): Promise<ArtifactCacheEntry | null> {
const httpClient = createHttpClient()
const version = getCacheVersion(
const version = utils.getCacheVersion(
paths,
options?.compressionMethod,
options?.enableCrossOsArchive
)
const resource = `cache?keys=${encodeURIComponent(
keys.join(',')
)}&version=${version}`
@ -207,7 +181,7 @@ export async function reserveCache(
options?: InternalCacheOptions
): Promise<ITypedResponseWithError<ReserveCacheResponse>> {
const httpClient = createHttpClient()
const version = getCacheVersion(
const version = utils.getCacheVersion(
paths,
options?.compressionMethod,
options?.enableCrossOsArchive
@ -353,26 +327,45 @@ async function commitCache(
export async function saveCache(
cacheId: number,
archivePath: string,
signedUploadURL?: string,
options?: UploadOptions
): Promise<void> {
const httpClient = createHttpClient()
const uploadOptions = getUploadOptions(options)
core.debug('Upload cache')
await uploadFile(httpClient, cacheId, archivePath, options)
if (uploadOptions.useAzureSdk) {
// Use Azure storage SDK to upload caches directly to Azure
if (!signedUploadURL) {
throw new Error(
'Azure Storage SDK can only be used when a signed URL is provided.'
)
}
await uploadCacheArchiveSDK(signedUploadURL, archivePath, options)
} else {
const httpClient = createHttpClient()
// Commit Cache
core.debug('Commiting cache')
const cacheSize = utils.getArchiveFileSizeInBytes(archivePath)
core.info(
`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`
)
core.debug('Upload cache')
await uploadFile(httpClient, cacheId, archivePath, options)
const commitCacheResponse = await commitCache(httpClient, cacheId, cacheSize)
if (!isSuccessStatusCode(commitCacheResponse.statusCode)) {
throw new Error(
`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`
// Commit Cache
core.debug('Commiting cache')
const cacheSize = utils.getArchiveFileSizeInBytes(archivePath)
core.info(
`Cache Size: ~${Math.round(
cacheSize / (1024 * 1024)
)} MB (${cacheSize} B)`
)
}
core.info('Cache saved successfully')
const commitCacheResponse = await commitCache(
httpClient,
cacheId,
cacheSize
)
if (!isSuccessStatusCode(commitCacheResponse.statusCode)) {
throw new Error(
`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`
)
}
core.info('Cache saved successfully')
}
}

View File

@ -2,17 +2,19 @@ import * as core from '@actions/core'
import * as exec from '@actions/exec'
import * as glob from '@actions/glob'
import * as io from '@actions/io'
import * as crypto from 'crypto'
import * as fs from 'fs'
import * as path from 'path'
import * as semver from 'semver'
import * as util from 'util'
import {v4 as uuidV4} from 'uuid'
import {
CacheFilename,
CompressionMethod,
GnuTarPathOnWindows
} from './constants'
const versionSalt = '1.0'
// From https://github.com/actions/toolkit/blob/main/packages/tool-cache/src/tool-cache.ts#L23
export async function createTempDirectory(): Promise<string> {
const IS_WINDOWS = process.platform === 'win32'
@ -34,7 +36,7 @@ export async function createTempDirectory(): Promise<string> {
tempDirectory = path.join(baseLocation, 'actions', 'temp')
}
const dest = path.join(tempDirectory, uuidV4())
const dest = path.join(tempDirectory, crypto.randomUUID())
await io.mkdirP(dest)
return dest
}
@ -131,15 +133,35 @@ export function assertDefined<T>(name: string, value?: T): T {
return value
}
export function isGhes(): boolean {
const ghUrl = new URL(
process.env['GITHUB_SERVER_URL'] || 'https://github.com'
)
export function getCacheVersion(
paths: string[],
compressionMethod?: CompressionMethod,
enableCrossOsArchive = false
): string {
// don't pass changes upstream
const components = paths.slice()
const hostname = ghUrl.hostname.trimEnd().toUpperCase()
const isGitHubHost = hostname === 'GITHUB.COM'
const isGheHost =
hostname.endsWith('.GHE.COM') || hostname.endsWith('.GHE.LOCALHOST')
// Add compression method to cache version to restore
// compressed cache as per compression method
if (compressionMethod) {
components.push(compressionMethod)
}
return !isGitHubHost && !isGheHost
// Only check for windows platforms if enableCrossOsArchive is false
if (process.platform === 'win32' && !enableCrossOsArchive) {
components.push('windows-only')
}
// Add salt to cache version to support breaking changes in cache entry
components.push(versionSalt)
return crypto.createHash('sha256').update(components.join('|')).digest('hex')
}
export function getRuntimeToken(): string {
const token = process.env['ACTIONS_RUNTIME_TOKEN']
if (!token) {
throw new Error('Unable to get the ACTIONS_RUNTIME_TOKEN env variable')
}
return token
}

39
packages/cache/src/internal/config.ts vendored Normal file
View File

@ -0,0 +1,39 @@
export function isGhes(): boolean {
const ghUrl = new URL(
process.env['GITHUB_SERVER_URL'] || 'https://github.com'
)
const hostname = ghUrl.hostname.trimEnd().toUpperCase()
const isGitHubHost = hostname === 'GITHUB.COM'
const isGheHost = hostname.endsWith('.GHE.COM')
const isLocalHost = hostname.endsWith('.LOCALHOST')
return !isGitHubHost && !isGheHost && !isLocalHost
}
export function getCacheServiceVersion(): string {
// Cache service v2 is not supported on GHES. We will default to
// cache service v1 even if the feature flag was enabled by user.
if (isGhes()) return 'v1'
return process.env['ACTIONS_CACHE_SERVICE_V2'] ? 'v2' : 'v1'
}
export function getCacheServiceURL(): string {
const version = getCacheServiceVersion()
// Based on the version of the cache service, we will determine which
// URL to use.
switch (version) {
case 'v1':
return (
process.env['ACTIONS_CACHE_URL'] ||
process.env['ACTIONS_RESULTS_URL'] ||
''
)
case 'v2':
return process.env['ACTIONS_RESULTS_URL'] || ''
default:
throw new Error(`Unsupported cache service version: ${version}`)
}
}

View File

@ -36,3 +36,5 @@ export const SystemTarPathOnWindows = `${process.env['SYSTEMDRIVE']}\\Windows\\S
export const TarFilename = 'cache.tar'
export const ManifestFilename = 'manifest.txt'
export const CacheFileSizeLimit = 10 * Math.pow(1024, 3) // 10GiB per repository

View File

@ -0,0 +1,204 @@
import {info, debug} from '@actions/core'
import {getUserAgentString} from './user-agent'
import {NetworkError, UsageError} from './errors'
import {getCacheServiceURL} from '../config'
import {getRuntimeToken} from '../cacheUtils'
import {BearerCredentialHandler} from '@actions/http-client/lib/auth'
import {HttpClient, HttpClientResponse, HttpCodes} from '@actions/http-client'
import {CacheServiceClientJSON} from '../../generated/results/api/v1/cache.twirp-client'
// The twirp http client must implement this interface
interface Rpc {
request(
service: string,
method: string,
contentType: 'application/json' | 'application/protobuf',
data: object | Uint8Array
): Promise<object | Uint8Array>
}
/**
* This class is a wrapper around the CacheServiceClientJSON class generated by Twirp.
*
* It adds retry logic to the request method, which is not present in the generated client.
*
* This class is used to interact with cache service v2.
*/
class CacheServiceClient implements Rpc {
private httpClient: HttpClient
private baseUrl: string
private maxAttempts = 5
private baseRetryIntervalMilliseconds = 3000
private retryMultiplier = 1.5
constructor(
userAgent: string,
maxAttempts?: number,
baseRetryIntervalMilliseconds?: number,
retryMultiplier?: number
) {
const token = getRuntimeToken()
this.baseUrl = getCacheServiceURL()
if (maxAttempts) {
this.maxAttempts = maxAttempts
}
if (baseRetryIntervalMilliseconds) {
this.baseRetryIntervalMilliseconds = baseRetryIntervalMilliseconds
}
if (retryMultiplier) {
this.retryMultiplier = retryMultiplier
}
this.httpClient = new HttpClient(userAgent, [
new BearerCredentialHandler(token)
])
}
// This function satisfies the Rpc interface. It is compatible with the JSON
// JSON generated client.
async request(
service: string,
method: string,
contentType: 'application/json' | 'application/protobuf',
data: object | Uint8Array
): Promise<object | Uint8Array> {
const url = new URL(`/twirp/${service}/${method}`, this.baseUrl).href
debug(`[Request] ${method} ${url}`)
const headers = {
'Content-Type': contentType
}
try {
const {body} = await this.retryableRequest(async () =>
this.httpClient.post(url, JSON.stringify(data), headers)
)
return body
} catch (error) {
throw new Error(`Failed to ${method}: ${error.message}`)
}
}
async retryableRequest(
operation: () => Promise<HttpClientResponse>
): Promise<{response: HttpClientResponse; body: object}> {
let attempt = 0
let errorMessage = ''
let rawBody = ''
while (attempt < this.maxAttempts) {
let isRetryable = false
try {
const response = await operation()
const statusCode = response.message.statusCode
rawBody = await response.readBody()
debug(`[Response] - ${response.message.statusCode}`)
debug(`Headers: ${JSON.stringify(response.message.headers, null, 2)}`)
const body = JSON.parse(rawBody)
debug(`Body: ${JSON.stringify(body, null, 2)}`)
if (this.isSuccessStatusCode(statusCode)) {
return {response, body}
}
isRetryable = this.isRetryableHttpStatusCode(statusCode)
errorMessage = `Failed request: (${statusCode}) ${response.message.statusMessage}`
if (body.msg) {
if (UsageError.isUsageErrorMessage(body.msg)) {
throw new UsageError()
}
errorMessage = `${errorMessage}: ${body.msg}`
}
} catch (error) {
if (error instanceof SyntaxError) {
debug(`Raw Body: ${rawBody}`)
}
if (error instanceof UsageError) {
throw error
}
if (NetworkError.isNetworkErrorCode(error?.code)) {
throw new NetworkError(error?.code)
}
isRetryable = true
errorMessage = error.message
}
if (!isRetryable) {
throw new Error(`Received non-retryable error: ${errorMessage}`)
}
if (attempt + 1 === this.maxAttempts) {
throw new Error(
`Failed to make request after ${this.maxAttempts} attempts: ${errorMessage}`
)
}
const retryTimeMilliseconds =
this.getExponentialRetryTimeMilliseconds(attempt)
info(
`Attempt ${attempt + 1} of ${
this.maxAttempts
} failed with error: ${errorMessage}. Retrying request in ${retryTimeMilliseconds} ms...`
)
await this.sleep(retryTimeMilliseconds)
attempt++
}
throw new Error(`Request failed`)
}
isSuccessStatusCode(statusCode?: number): boolean {
if (!statusCode) return false
return statusCode >= 200 && statusCode < 300
}
isRetryableHttpStatusCode(statusCode?: number): boolean {
if (!statusCode) return false
const retryableStatusCodes = [
HttpCodes.BadGateway,
HttpCodes.GatewayTimeout,
HttpCodes.InternalServerError,
HttpCodes.ServiceUnavailable,
HttpCodes.TooManyRequests
]
return retryableStatusCodes.includes(statusCode)
}
async sleep(milliseconds: number): Promise<void> {
return new Promise(resolve => setTimeout(resolve, milliseconds))
}
getExponentialRetryTimeMilliseconds(attempt: number): number {
if (attempt < 0) {
throw new Error('attempt should be a positive integer')
}
if (attempt === 0) {
return this.baseRetryIntervalMilliseconds
}
const minTime =
this.baseRetryIntervalMilliseconds * this.retryMultiplier ** attempt
const maxTime = minTime * this.retryMultiplier
// returns a random number between minTime and maxTime (exclusive)
return Math.trunc(Math.random() * (maxTime - minTime) + minTime)
}
}
export function internalCacheTwirpClient(options?: {
maxAttempts?: number
retryIntervalMs?: number
retryMultiplier?: number
}): CacheServiceClientJSON {
const client = new CacheServiceClient(
getUserAgentString(),
options?.maxAttempts,
options?.retryIntervalMs,
options?.retryMultiplier
)
return new CacheServiceClientJSON(client)
}

View File

@ -0,0 +1,72 @@
export class FilesNotFoundError extends Error {
files: string[]
constructor(files: string[] = []) {
let message = 'No files were found to upload'
if (files.length > 0) {
message += `: ${files.join(', ')}`
}
super(message)
this.files = files
this.name = 'FilesNotFoundError'
}
}
export class InvalidResponseError extends Error {
constructor(message: string) {
super(message)
this.name = 'InvalidResponseError'
}
}
export class CacheNotFoundError extends Error {
constructor(message = 'Cache not found') {
super(message)
this.name = 'CacheNotFoundError'
}
}
export class GHESNotSupportedError extends Error {
constructor(
message = '@actions/cache v4.1.4+, actions/cache/save@v4+ and actions/cache/restore@v4+ are not currently supported on GHES.'
) {
super(message)
this.name = 'GHESNotSupportedError'
}
}
export class NetworkError extends Error {
code: string
constructor(code: string) {
const message = `Unable to make request: ${code}\nIf you are using self-hosted runners, please make sure your runner has access to all GitHub endpoints: https://docs.github.com/en/actions/hosting-your-own-runners/managing-self-hosted-runners/about-self-hosted-runners#communication-between-self-hosted-runners-and-github`
super(message)
this.code = code
this.name = 'NetworkError'
}
static isNetworkErrorCode = (code?: string): boolean => {
if (!code) return false
return [
'ECONNRESET',
'ENOTFOUND',
'ETIMEDOUT',
'ECONNREFUSED',
'EHOSTUNREACH'
].includes(code)
}
}
export class UsageError extends Error {
constructor() {
const message = `Cache storage quota has been hit. Unable to upload any new cache entries. Usage is recalculated every 6-12 hours.\nMore info on storage limits: https://docs.github.com/en/billing/managing-billing-for-github-actions/about-billing-for-github-actions#calculating-minute-and-storage-spending`
super(message)
this.name = 'UsageError'
}
static isUsageErrorMessage = (msg?: string): boolean => {
if (!msg) return false
return msg.includes('insufficient usage')
}
}

View File

@ -0,0 +1,9 @@
// eslint-disable-next-line @typescript-eslint/no-var-requires, @typescript-eslint/no-require-imports
const packageJson = require('../../../package.json')
/**
* Ensure that this User Agent String is used in all HTTP calls so that we can monitor telemetry between different versions of this package
*/
export function getUserAgentString(): string {
return `@actions/cache-${packageJson.version}`
}

View File

@ -0,0 +1,177 @@
import * as core from '@actions/core'
import {
BlobClient,
BlobUploadCommonResponse,
BlockBlobClient,
BlockBlobParallelUploadOptions
} from '@azure/storage-blob'
import {TransferProgressEvent} from '@azure/ms-rest-js'
import {InvalidResponseError} from './shared/errors'
import {UploadOptions} from '../options'
/**
* Class for tracking the upload state and displaying stats.
*/
export class UploadProgress {
contentLength: number
sentBytes: number
startTime: number
displayedComplete: boolean
timeoutHandle?: ReturnType<typeof setTimeout>
constructor(contentLength: number) {
this.contentLength = contentLength
this.sentBytes = 0
this.displayedComplete = false
this.startTime = Date.now()
}
/**
* Sets the number of bytes sent
*
* @param sentBytes the number of bytes sent
*/
setSentBytes(sentBytes: number): void {
this.sentBytes = sentBytes
}
/**
* Returns the total number of bytes transferred.
*/
getTransferredBytes(): number {
return this.sentBytes
}
/**
* Returns true if the upload is complete.
*/
isDone(): boolean {
return this.getTransferredBytes() === this.contentLength
}
/**
* Prints the current upload stats. Once the upload completes, this will print one
* last line and then stop.
*/
display(): void {
if (this.displayedComplete) {
return
}
const transferredBytes = this.sentBytes
const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(
1
)
const elapsedTime = Date.now() - this.startTime
const uploadSpeed = (
transferredBytes /
(1024 * 1024) /
(elapsedTime / 1000)
).toFixed(1)
core.info(
`Sent ${transferredBytes} of ${this.contentLength} (${percentage}%), ${uploadSpeed} MBs/sec`
)
if (this.isDone()) {
this.displayedComplete = true
}
}
/**
* Returns a function used to handle TransferProgressEvents.
*/
onProgress(): (progress: TransferProgressEvent) => void {
return (progress: TransferProgressEvent) => {
this.setSentBytes(progress.loadedBytes)
}
}
/**
* Starts the timer that displays the stats.
*
* @param delayInMs the delay between each write
*/
startDisplayTimer(delayInMs = 1000): void {
const displayCallback = (): void => {
this.display()
if (!this.isDone()) {
this.timeoutHandle = setTimeout(displayCallback, delayInMs)
}
}
this.timeoutHandle = setTimeout(displayCallback, delayInMs)
}
/**
* Stops the timer that displays the stats. As this typically indicates the upload
* is complete, this will display one last line, unless the last line has already
* been written.
*/
stopDisplayTimer(): void {
if (this.timeoutHandle) {
clearTimeout(this.timeoutHandle)
this.timeoutHandle = undefined
}
this.display()
}
}
/**
* Uploads a cache archive directly to Azure Blob Storage using the Azure SDK.
* This function will display progress information to the console. Concurrency of the
* upload is determined by the calling functions.
*
* @param signedUploadURL
* @param archivePath
* @param options
* @returns
*/
export async function uploadCacheArchiveSDK(
signedUploadURL: string,
archivePath: string,
options?: UploadOptions
): Promise<BlobUploadCommonResponse> {
const blobClient: BlobClient = new BlobClient(signedUploadURL)
const blockBlobClient: BlockBlobClient = blobClient.getBlockBlobClient()
const uploadProgress = new UploadProgress(options?.archiveSizeBytes ?? 0)
// Specify data transfer options
const uploadOptions: BlockBlobParallelUploadOptions = {
blockSize: options?.uploadChunkSize,
concurrency: options?.uploadConcurrency, // maximum number of parallel transfer workers
maxSingleShotSize: 128 * 1024 * 1024, // 128 MiB initial transfer size
onProgress: uploadProgress.onProgress()
}
try {
uploadProgress.startDisplayTimer()
core.debug(
`BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}`
)
const response = await blockBlobClient.uploadFile(
archivePath,
uploadOptions
)
// TODO: better management of non-retryable errors
if (response._response.status >= 400) {
throw new InvalidResponseError(
`uploadCacheArchiveSDK: upload failed with status code ${response._response.status}`
)
}
return response
} catch (error) {
core.warning(
`uploadCacheArchiveSDK: internal error uploading cache archive: ${error.message}`
)
throw error
} finally {
uploadProgress.stopDisplayTimer()
}
}

View File

@ -4,6 +4,14 @@ import * as core from '@actions/core'
* Options to control cache upload
*/
export interface UploadOptions {
/**
* Indicates whether to use the Azure Blob SDK to download caches
* that are stored on Azure Blob Storage to improve reliability and
* performance
*
* @default false
*/
useAzureSdk?: boolean
/**
* Number of parallel cache upload
*
@ -16,6 +24,10 @@ export interface UploadOptions {
* @default 32MB
*/
uploadChunkSize?: number
/**
* Archive size in bytes
*/
archiveSizeBytes?: number
}
/**
@ -82,12 +94,18 @@ export type ExtraTarOptions = string[]
* @param copy the original upload options
*/
export function getUploadOptions(copy?: UploadOptions): UploadOptions {
// Defaults if not overriden
const result: UploadOptions = {
useAzureSdk: false,
uploadConcurrency: 4,
uploadChunkSize: 32 * 1024 * 1024
}
if (copy) {
if (typeof copy.useAzureSdk === 'boolean') {
result.useAzureSdk = copy.useAzureSdk
}
if (typeof copy.uploadConcurrency === 'number') {
result.uploadConcurrency = copy.uploadConcurrency
}
@ -97,6 +115,26 @@ export function getUploadOptions(copy?: UploadOptions): UploadOptions {
}
}
/**
* Add env var overrides
*/
// Cap the uploadConcurrency at 32
result.uploadConcurrency = !isNaN(
Number(process.env['CACHE_UPLOAD_CONCURRENCY'])
)
? Math.min(32, Number(process.env['CACHE_UPLOAD_CONCURRENCY']))
: result.uploadConcurrency
// Cap the uploadChunkSize at 128MiB
result.uploadChunkSize = !isNaN(
Number(process.env['CACHE_UPLOAD_CHUNK_SIZE'])
)
? Math.min(
128 * 1024 * 1024,
Number(process.env['CACHE_UPLOAD_CHUNK_SIZE']) * 1024 * 1024
)
: result.uploadChunkSize
core.debug(`Use Azure SDK: ${result.useAzureSdk}`)
core.debug(`Upload concurrency: ${result.uploadConcurrency}`)
core.debug(`Upload chunk size: ${result.uploadChunkSize}`)

View File

@ -1,5 +1,12 @@
# @actions/core Releases
### 1.11.1
- Fix uses of `crypto.randomUUID` on Node 18 and earlier [#1842](https://github.com/actions/toolkit/pull/1842)
### 1.11.0
- Add platform info utilities [#1551](https://github.com/actions/toolkit/pull/1551)
- Remove dependency on `uuid` package [#1824](https://github.com/actions/toolkit/pull/1824)
### 1.10.1
- Fix error message reference in oidc utils [#1511](https://github.com/actions/toolkit/pull/1511)

View File

@ -4,9 +4,6 @@ import * as path from 'path'
import * as core from '../src/core'
import {HttpClient} from '@actions/http-client'
import {toCommandProperties} from '../src/utils'
import * as uuid from 'uuid'
jest.mock('uuid')
/* eslint-disable @typescript-eslint/unbound-method */
@ -49,11 +46,23 @@ const testEnvVars = {
const UUID = '9b1deb4d-3b7d-4bad-9bdd-2b0d7b3dcb6d'
const DELIMITER = `ghadelimiter_${UUID}`
jest.mock('crypto', () => ({
...jest.requireActual('crypto'),
randomUUID: jest.fn(() => UUID)
}))
const TEMP_DIR = path.join(__dirname, '_temp')
describe('@actions/core', () => {
beforeAll(() => {
const filePath = path.join(__dirname, `test`)
const filePath = TEMP_DIR
if (!fs.existsSync(filePath)) {
fs.mkdirSync(filePath)
} else {
// Clear out the temp directory
for (const file of fs.readdirSync(filePath)) {
fs.unlinkSync(path.join(filePath, file))
}
}
})
@ -62,10 +71,6 @@ describe('@actions/core', () => {
process.env[key] = testEnvVars[key as keyof typeof testEnvVars]
}
process.stdout.write = jest.fn()
jest.spyOn(uuid, 'v4').mockImplementation(() => {
return UUID
})
})
afterEach(() => {
@ -141,7 +146,7 @@ describe('@actions/core', () => {
`Unexpected input: value should not contain the delimiter "${DELIMITER}"`
)
const filePath = path.join(__dirname, `test/${command}`)
const filePath = path.join(TEMP_DIR, command)
fs.unlinkSync(filePath)
})
@ -155,7 +160,7 @@ describe('@actions/core', () => {
`Unexpected input: name should not contain the delimiter "${DELIMITER}"`
)
const filePath = path.join(__dirname, `test/${command}`)
const filePath = path.join(TEMP_DIR, command)
fs.unlinkSync(filePath)
})
@ -347,7 +352,7 @@ describe('@actions/core', () => {
`Unexpected input: value should not contain the delimiter "${DELIMITER}"`
)
const filePath = path.join(__dirname, `test/${command}`)
const filePath = path.join(TEMP_DIR, command)
fs.unlinkSync(filePath)
})
@ -361,7 +366,7 @@ describe('@actions/core', () => {
`Unexpected input: name should not contain the delimiter "${DELIMITER}"`
)
const filePath = path.join(__dirname, `test/${command}`)
const filePath = path.join(TEMP_DIR, command)
fs.unlinkSync(filePath)
})
@ -585,7 +590,7 @@ describe('@actions/core', () => {
`Unexpected input: value should not contain the delimiter "${DELIMITER}"`
)
const filePath = path.join(__dirname, `test/${command}`)
const filePath = path.join(TEMP_DIR, command)
fs.unlinkSync(filePath)
})
@ -599,7 +604,7 @@ describe('@actions/core', () => {
`Unexpected input: name should not contain the delimiter "${DELIMITER}"`
)
const filePath = path.join(__dirname, `test/${command}`)
const filePath = path.join(TEMP_DIR, command)
fs.unlinkSync(filePath)
})
@ -641,7 +646,7 @@ function assertWriteCalls(calls: string[]): void {
}
function createFileCommandFile(command: string): void {
const filePath = path.join(__dirname, `test/${command}`)
const filePath = path.join(__dirname, `_temp/${command}`)
process.env[`GITHUB_${command}`] = filePath
fs.appendFileSync(filePath, '', {
encoding: 'utf8'
@ -649,7 +654,7 @@ function createFileCommandFile(command: string): void {
}
function verifyFileCommand(command: string, expectedContents: string): void {
const filePath = path.join(__dirname, `test/${command}`)
const filePath = path.join(__dirname, `_temp/${command}`)
const contents = fs.readFileSync(filePath, 'utf8')
try {
expect(contents).toEqual(expectedContents)

View File

@ -1,21 +1,19 @@
{
"name": "@actions/core",
"version": "1.10.1",
"version": "1.11.1",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "@actions/core",
"version": "1.10.1",
"version": "1.11.1",
"license": "MIT",
"dependencies": {
"@actions/exec": "^1.1.1",
"@actions/http-client": "^2.0.1",
"uuid": "^8.3.2"
"@actions/http-client": "^2.0.1"
},
"devDependencies": {
"@types/node": "^12.0.2",
"@types/uuid": "^8.3.4"
"@types/node": "^16.18.112"
}
},
"node_modules/@actions/exec": {
@ -40,15 +38,9 @@
"integrity": "sha512-wi9JjgKLYS7U/z8PPbco+PvTb/nRWjeoFlJ1Qer83k/3C5PHQi28hiVdeE2kHXmIL99mQFawx8qt/JPjZilJ8Q=="
},
"node_modules/@types/node": {
"version": "12.0.2",
"resolved": "https://registry.npmjs.org/@types/node/-/node-12.0.2.tgz",
"integrity": "sha512-5tabW/i+9mhrfEOUcLDu2xBPsHJ+X5Orqy9FKpale3SjDA17j5AEpYq5vfy3oAeAHGcvANRCO3NV3d2D6q3NiA==",
"dev": true
},
"node_modules/@types/uuid": {
"version": "8.3.4",
"resolved": "https://registry.npmjs.org/@types/uuid/-/uuid-8.3.4.tgz",
"integrity": "sha512-c/I8ZRb51j+pYGAu5CrFMRxqZ2ke4y2grEBO5AUjgSkSk+qT2Ea+OdWElz/OiMf5MNpn2b17kuVBwZLQJXzihw==",
"version": "16.18.112",
"resolved": "https://registry.npmjs.org/@types/node/-/node-16.18.112.tgz",
"integrity": "sha512-EKrbKUGJROm17+dY/gMi31aJlGLJ75e1IkTojt9n6u+hnaTBDs+M1bIdOawpk2m6YUAXq/R2W0SxCng1tndHCg==",
"dev": true
},
"node_modules/tunnel": {
@ -58,14 +50,6 @@
"engines": {
"node": ">=0.6.11 <=0.7.0 || >=0.7.3"
}
},
"node_modules/uuid": {
"version": "8.3.2",
"resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz",
"integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==",
"bin": {
"uuid": "dist/bin/uuid"
}
}
},
"dependencies": {
@ -91,26 +75,15 @@
"integrity": "sha512-wi9JjgKLYS7U/z8PPbco+PvTb/nRWjeoFlJ1Qer83k/3C5PHQi28hiVdeE2kHXmIL99mQFawx8qt/JPjZilJ8Q=="
},
"@types/node": {
"version": "12.0.2",
"resolved": "https://registry.npmjs.org/@types/node/-/node-12.0.2.tgz",
"integrity": "sha512-5tabW/i+9mhrfEOUcLDu2xBPsHJ+X5Orqy9FKpale3SjDA17j5AEpYq5vfy3oAeAHGcvANRCO3NV3d2D6q3NiA==",
"dev": true
},
"@types/uuid": {
"version": "8.3.4",
"resolved": "https://registry.npmjs.org/@types/uuid/-/uuid-8.3.4.tgz",
"integrity": "sha512-c/I8ZRb51j+pYGAu5CrFMRxqZ2ke4y2grEBO5AUjgSkSk+qT2Ea+OdWElz/OiMf5MNpn2b17kuVBwZLQJXzihw==",
"version": "16.18.112",
"resolved": "https://registry.npmjs.org/@types/node/-/node-16.18.112.tgz",
"integrity": "sha512-EKrbKUGJROm17+dY/gMi31aJlGLJ75e1IkTojt9n6u+hnaTBDs+M1bIdOawpk2m6YUAXq/R2W0SxCng1tndHCg==",
"dev": true
},
"tunnel": {
"version": "0.0.6",
"resolved": "https://registry.npmjs.org/tunnel/-/tunnel-0.0.6.tgz",
"integrity": "sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg=="
},
"uuid": {
"version": "8.3.2",
"resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz",
"integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg=="
}
}
}

View File

@ -1,6 +1,6 @@
{
"name": "@actions/core",
"version": "1.10.1",
"version": "1.11.1",
"description": "Actions core lib",
"keywords": [
"github",
@ -37,11 +37,9 @@
},
"dependencies": {
"@actions/exec": "^1.1.1",
"@actions/http-client": "^2.0.1",
"uuid": "^8.3.2"
"@actions/http-client": "^2.0.1"
},
"devDependencies": {
"@types/node": "^12.0.2",
"@types/uuid": "^8.3.4"
"@types/node": "^16.18.112"
}
}

View File

@ -3,9 +3,9 @@
// We use any as a valid input type
/* eslint-disable @typescript-eslint/no-explicit-any */
import * as crypto from 'crypto'
import * as fs from 'fs'
import * as os from 'os'
import {v4 as uuidv4} from 'uuid'
import {toCommandValue} from './utils'
export function issueFileCommand(command: string, message: any): void {
@ -25,7 +25,7 @@ export function issueFileCommand(command: string, message: any): void {
}
export function prepareKeyValueMessage(key: string, value: any): string {
const delimiter = `ghadelimiter_${uuidv4()}`
const delimiter = `ghadelimiter_${crypto.randomUUID()}`
const convertedValue = toCommandValue(value)
// These should realistically never happen, but just in case someone finds a

View File

@ -1,5 +1,8 @@
# @actions/glob Releases
### 0.5.0
- Added `excludeHiddenFiles` option, which is disabled by default to preserve existing behavior [#1791: Add glob option to ignore hidden files](https://github.com/actions/toolkit/pull/1791)
### 0.4.0
- Pass in the current workspace as a parameter to HashFiles [#1318](https://github.com/actions/toolkit/pull/1318)

View File

@ -708,7 +708,7 @@ describe('globber', () => {
expect(itemPaths).toEqual([])
})
it('returns hidden files', async () => {
it('returns hidden files by default', async () => {
// Create the following layout:
// <root>
// <root>/.emptyFolder
@ -734,6 +734,26 @@ describe('globber', () => {
])
})
it('ignores hidden files when excludeHiddenFiles is set', async () => {
// Create the following layout:
// <root>
// <root>/.emptyFolder
// <root>/.file
// <root>/.folder
// <root>/.folder/file
const root = path.join(getTestTemp(), 'ignores-hidden-files')
await createHiddenDirectory(path.join(root, '.emptyFolder'))
await createHiddenDirectory(path.join(root, '.folder'))
await createHiddenFile(path.join(root, '.file'), 'test .file content')
await fs.writeFile(
path.join(root, '.folder', 'file'),
'test .folder/file content'
)
const itemPaths = await glob(root, {excludeHiddenFiles: true})
expect(itemPaths).toEqual([root])
})
it('returns normalized paths', async () => {
// Create the following layout:
// <root>/hello/world.txt

View File

@ -1,6 +1,6 @@
{
"name": "@actions/glob",
"version": "0.4.0",
"version": "0.5.0",
"lockfileVersion": 3,
"requires": true,
"description": "Actions glob lib",
@ -21,7 +21,7 @@
"packages": {
"": {
"name": "@actions/glob",
"version": "0.4.0",
"version": "0.5.0",
"license": "MIT",
"dependencies": {
"@actions/core": "^1.9.1",

View File

@ -1,6 +1,6 @@
{
"name": "@actions/glob",
"version": "0.4.0",
"version": "0.5.0",
"preview": true,
"description": "Actions glob lib",
"keywords": [

Some files were not shown because too many files have changed in this diff Show More