1
0
Fork 0

Merge remote-tracking branch 'upstream/main' into sync-upstream

pull/1935/head
Surya Oruganti 2025-01-19 16:53:22 -08:00
commit bab449df7d
72 changed files with 5112 additions and 279 deletions

View File

@ -11,7 +11,7 @@ on:
jobs: jobs:
test: test:
runs-on: macos-latest runs-on: macos-latest-large
steps: steps:
- name: setup repo - name: setup repo
@ -48,7 +48,7 @@ jobs:
path: packages/${{ github.event.inputs.package }}/*.tgz path: packages/${{ github.event.inputs.package }}/*.tgz
publish: publish:
runs-on: macos-latest runs-on: macos-latest-large
needs: test needs: test
environment: npm-publish environment: npm-publish
permissions: permissions:

View File

@ -16,7 +16,11 @@ jobs:
strategy: strategy:
matrix: matrix:
runs-on: [ubuntu-latest, macos-latest, windows-latest] runs-on: [ubuntu-latest, macos-latest-large, windows-latest]
# Node 18 is the current default Node version in hosted runners, so users may still use the toolkit with it when running tests (see https://github.com/actions/toolkit/issues/1841)
# Node 20 is the currently support Node version for actions - https://docs.github.com/actions/sharing-automations/creating-actions/metadata-syntax-for-github-actions#runsusing-for-javascript-actions
node-version: [18.x, 20.x]
fail-fast: false fail-fast: false
runs-on: ${{ matrix.runs-on }} runs-on: ${{ matrix.runs-on }}
@ -25,10 +29,10 @@ jobs:
- name: Checkout - name: Checkout
uses: actions/checkout@v4 uses: actions/checkout@v4
- name: Set Node.js 20.x - name: Set up Node ${{ matrix.node-version }}
uses: actions/setup-node@v4 uses: actions/setup-node@v4
with: with:
node-version: 20.x node-version: ${{ matrix.node-version }}
- name: npm install - name: npm install
run: npm install run: npm install

37
package-lock.json generated
View File

@ -4936,20 +4936,6 @@
"proxy-from-env": "^1.1.0" "proxy-from-env": "^1.1.0"
} }
}, },
"node_modules/axios/node_modules/form-data": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz",
"integrity": "sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==",
"dev": true,
"dependencies": {
"asynckit": "^0.4.0",
"combined-stream": "^1.0.8",
"mime-types": "^2.1.12"
},
"engines": {
"node": ">= 6"
}
},
"node_modules/axobject-query": { "node_modules/axobject-query": {
"version": "3.2.1", "version": "3.2.1",
"resolved": "https://registry.npmjs.org/axobject-query/-/axobject-query-3.2.1.tgz", "resolved": "https://registry.npmjs.org/axobject-query/-/axobject-query-3.2.1.tgz",
@ -7554,6 +7540,20 @@
"is-callable": "^1.1.3" "is-callable": "^1.1.3"
} }
}, },
"node_modules/form-data": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz",
"integrity": "sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==",
"dev": true,
"dependencies": {
"asynckit": "^0.4.0",
"combined-stream": "^1.0.8",
"mime-types": "^2.1.12"
},
"engines": {
"node": ">= 6"
}
},
"node_modules/fs-constants": { "node_modules/fs-constants": {
"version": "1.0.0", "version": "1.0.0",
"resolved": "https://registry.npmjs.org/fs-constants/-/fs-constants-1.0.0.tgz", "resolved": "https://registry.npmjs.org/fs-constants/-/fs-constants-1.0.0.tgz",
@ -10647,12 +10647,13 @@
} }
}, },
"node_modules/micromatch": { "node_modules/micromatch": {
"version": "4.0.5", "version": "4.0.8",
"resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.5.tgz", "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz",
"integrity": "sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA==", "integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==",
"dev": true, "dev": true,
"license": "MIT",
"dependencies": { "dependencies": {
"braces": "^3.0.2", "braces": "^3.0.3",
"picomatch": "^2.3.1" "picomatch": "^2.3.1"
}, },
"engines": { "engines": {

View File

@ -1,5 +1,13 @@
# @actions/artifact Releases # @actions/artifact Releases
### 2.2.1
- Add `ACTIONS_ARTIFACT_UPLOAD_CONCURRENCY` and `ACTIONS_ARTIFACT_UPLOAD_TIMEOUT_MS` environment variables [#1928](https://github.com/actions/toolkit/pull/1928)
### 2.2.0
- Return artifact digest on upload [#1896](https://github.com/actions/toolkit/pull/1896)
### 2.1.11 ### 2.1.11
- Fixed a bug with relative symlinks resolution [#1844](https://github.com/actions/toolkit/pull/1844) - Fixed a bug with relative symlinks resolution [#1844](https://github.com/actions/toolkit/pull/1844)

View File

@ -1,4 +1,10 @@
import * as config from '../src/internal/shared/config' import * as config from '../src/internal/shared/config'
import os from 'os'
// Mock the 'os' module
jest.mock('os', () => ({
cpus: jest.fn()
}))
beforeEach(() => { beforeEach(() => {
jest.resetModules() jest.resetModules()
@ -30,3 +36,66 @@ describe('isGhes', () => {
expect(config.isGhes()).toBe(true) expect(config.isGhes()).toBe(true)
}) })
}) })
describe('uploadChunkTimeoutEnv', () => {
it('should return default 300000 when no env set', () => {
expect(config.getUploadChunkTimeout()).toBe(300000)
})
it('should return value set in ACTIONS_ARTIFACT_UPLOAD_TIMEOUT_MS', () => {
process.env.ACTIONS_ARTIFACT_UPLOAD_TIMEOUT_MS = '150000'
expect(config.getUploadChunkTimeout()).toBe(150000)
})
it('should throw if value set in ACTIONS_ARTIFACT_UPLOAD_TIMEOUT_MS is invalid', () => {
process.env.ACTIONS_ARTIFACT_UPLOAD_TIMEOUT_MS = 'abc'
expect(() => {
config.getUploadChunkTimeout()
}).toThrow()
})
})
describe('uploadConcurrencyEnv', () => {
it('should return default 32 when cpu num is <= 4', () => {
;(os.cpus as jest.Mock).mockReturnValue(new Array(4))
expect(config.getConcurrency()).toBe(32)
})
it('should return 16 * num of cpu when cpu num is > 4', () => {
;(os.cpus as jest.Mock).mockReturnValue(new Array(6))
expect(config.getConcurrency()).toBe(96)
})
it('should return up to 300 max value', () => {
;(os.cpus as jest.Mock).mockReturnValue(new Array(32))
expect(config.getConcurrency()).toBe(300)
})
it('should return override value when ACTIONS_ARTIFACT_UPLOAD_CONCURRENCY is set', () => {
;(os.cpus as jest.Mock).mockReturnValue(new Array(4))
process.env.ACTIONS_ARTIFACT_UPLOAD_CONCURRENCY = '10'
expect(config.getConcurrency()).toBe(10)
})
it('should throw with invalid value of ACTIONS_ARTIFACT_UPLOAD_CONCURRENCY', () => {
;(os.cpus as jest.Mock).mockReturnValue(new Array(4))
process.env.ACTIONS_ARTIFACT_UPLOAD_CONCURRENCY = 'abc'
expect(() => {
config.getConcurrency()
}).toThrow()
})
it('should throw if ACTIONS_ARTIFACT_UPLOAD_CONCURRENCY is < 1', () => {
;(os.cpus as jest.Mock).mockReturnValue(new Array(4))
process.env.ACTIONS_ARTIFACT_UPLOAD_CONCURRENCY = '0'
expect(() => {
config.getConcurrency()
}).toThrow()
})
it('cannot go over currency cap when override value is greater', () => {
;(os.cpus as jest.Mock).mockReturnValue(new Array(4))
process.env.ACTIONS_ARTIFACT_UPLOAD_CONCURRENCY = '40'
expect(config.getConcurrency()).toBe(32)
})
})

View File

@ -281,7 +281,7 @@ describe('upload-artifact', () => {
} }
) )
const {id, size} = await uploadArtifact( const {id, size, digest} = await uploadArtifact(
fixtures.inputs.artifactName, fixtures.inputs.artifactName,
fixtures.files.map(file => fixtures.files.map(file =>
path.join(fixtures.uploadDirectory, file.name) path.join(fixtures.uploadDirectory, file.name)
@ -291,6 +291,8 @@ describe('upload-artifact', () => {
expect(id).toBe(1) expect(id).toBe(1)
expect(size).toBe(loadedBytes) expect(size).toBe(loadedBytes)
expect(digest).toBeDefined()
expect(digest).toHaveLength(64)
const extractedDirectory = path.join( const extractedDirectory = path.join(
fixtures.uploadDirectory, fixtures.uploadDirectory,

View File

@ -40,4 +40,4 @@
#### Defined in #### Defined in
[src/artifact.ts:7](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/artifact.ts#L7) [src/artifact.ts:7](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/artifact.ts#L7)

View File

@ -48,7 +48,7 @@ Error.constructor
#### Defined in #### Defined in
[src/internal/shared/errors.ts:24](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/errors.ts#L24) [src/internal/shared/errors.ts:24](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/errors.ts#L24)
## Properties ## Properties

View File

@ -61,7 +61,7 @@ single DeleteArtifactResponse object
#### Defined in #### Defined in
[src/internal/client.ts:248](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/client.ts#L248) [src/internal/client.ts:248](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/client.ts#L248)
___ ___
@ -92,7 +92,7 @@ single DownloadArtifactResponse object
#### Defined in #### Defined in
[src/internal/client.ts:138](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/client.ts#L138) [src/internal/client.ts:138](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/client.ts#L138)
___ ___
@ -127,7 +127,7 @@ If there are multiple artifacts with the same name in the same workflow run this
#### Defined in #### Defined in
[src/internal/client.ts:212](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/client.ts#L212) [src/internal/client.ts:212](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/client.ts#L212)
___ ___
@ -159,7 +159,7 @@ ListArtifactResponse object
#### Defined in #### Defined in
[src/internal/client.ts:176](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/client.ts#L176) [src/internal/client.ts:176](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/client.ts#L176)
___ ___
@ -190,4 +190,4 @@ single UploadArtifactResponse object
#### Defined in #### Defined in
[src/internal/client.ts:113](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/client.ts#L113) [src/internal/client.ts:113](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/client.ts#L113)

View File

@ -49,7 +49,7 @@ Error.constructor
#### Defined in #### Defined in
[src/internal/shared/errors.ts:4](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/errors.ts#L4) [src/internal/shared/errors.ts:4](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/errors.ts#L4)
## Properties ## Properties
@ -59,7 +59,7 @@ Error.constructor
#### Defined in #### Defined in
[src/internal/shared/errors.ts:2](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/errors.ts#L2) [src/internal/shared/errors.ts:2](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/errors.ts#L2)
___ ___

View File

@ -48,7 +48,7 @@ Error.constructor
#### Defined in #### Defined in
[src/internal/shared/errors.ts:31](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/errors.ts#L31) [src/internal/shared/errors.ts:31](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/errors.ts#L31)
## Properties ## Properties

View File

@ -48,7 +48,7 @@ Error.constructor
#### Defined in #### Defined in
[src/internal/shared/errors.ts:17](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/errors.ts#L17) [src/internal/shared/errors.ts:17](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/errors.ts#L17)
## Properties ## Properties

View File

@ -50,7 +50,7 @@ Error.constructor
#### Defined in #### Defined in
[src/internal/shared/errors.ts:42](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/errors.ts#L42) [src/internal/shared/errors.ts:42](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/errors.ts#L42)
## Properties ## Properties
@ -60,7 +60,7 @@ Error.constructor
#### Defined in #### Defined in
[src/internal/shared/errors.ts:40](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/errors.ts#L40) [src/internal/shared/errors.ts:40](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/errors.ts#L40)
___ ___
@ -198,4 +198,4 @@ ___
#### Defined in #### Defined in
[src/internal/shared/errors.ts:49](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/errors.ts#L49) [src/internal/shared/errors.ts:49](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/errors.ts#L49)

View File

@ -43,7 +43,7 @@ Error.constructor
#### Defined in #### Defined in
[src/internal/shared/errors.ts:62](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/errors.ts#L62) [src/internal/shared/errors.ts:62](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/errors.ts#L62)
## Properties ## Properties
@ -181,4 +181,4 @@ ___
#### Defined in #### Defined in
[src/internal/shared/errors.ts:68](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/errors.ts#L68) [src/internal/shared/errors.ts:68](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/errors.ts#L68)

View File

@ -23,7 +23,7 @@ The time when the artifact was created
#### Defined in #### Defined in
[src/internal/shared/interfaces.ts:123](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/interfaces.ts#L123) [src/internal/shared/interfaces.ts:128](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L128)
___ ___
@ -35,7 +35,7 @@ The ID of the artifact
#### Defined in #### Defined in
[src/internal/shared/interfaces.ts:113](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/interfaces.ts#L113) [src/internal/shared/interfaces.ts:118](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L118)
___ ___
@ -47,7 +47,7 @@ The name of the artifact
#### Defined in #### Defined in
[src/internal/shared/interfaces.ts:108](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/interfaces.ts#L108) [src/internal/shared/interfaces.ts:113](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L113)
___ ___
@ -59,4 +59,4 @@ The size of the artifact in bytes
#### Defined in #### Defined in
[src/internal/shared/interfaces.ts:118](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/interfaces.ts#L118) [src/internal/shared/interfaces.ts:123](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L123)

View File

@ -43,7 +43,7 @@ single DeleteArtifactResponse object
#### Defined in #### Defined in
[src/internal/client.ts:103](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/client.ts#L103) [src/internal/client.ts:103](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/client.ts#L103)
___ ___
@ -70,7 +70,7 @@ single DownloadArtifactResponse object
#### Defined in #### Defined in
[src/internal/client.ts:89](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/client.ts#L89) [src/internal/client.ts:89](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/client.ts#L89)
___ ___
@ -101,7 +101,7 @@ If there are multiple artifacts with the same name in the same workflow run this
#### Defined in #### Defined in
[src/internal/client.ts:75](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/client.ts#L75) [src/internal/client.ts:75](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/client.ts#L75)
___ ___
@ -129,7 +129,7 @@ ListArtifactResponse object
#### Defined in #### Defined in
[src/internal/client.ts:57](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/client.ts#L57) [src/internal/client.ts:57](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/client.ts#L57)
___ ___
@ -156,4 +156,4 @@ single UploadArtifactResponse object
#### Defined in #### Defined in
[src/internal/client.ts:40](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/client.ts#L40) [src/internal/client.ts:40](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/client.ts#L40)

View File

@ -20,4 +20,4 @@ The id of the artifact that was deleted
#### Defined in #### Defined in
[src/internal/shared/interfaces.ts:158](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/interfaces.ts#L158) [src/internal/shared/interfaces.ts:163](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L163)

View File

@ -20,4 +20,4 @@ Denotes where the artifact will be downloaded to. If not specified then the arti
#### Defined in #### Defined in
[src/internal/shared/interfaces.ts:98](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/interfaces.ts#L98) [src/internal/shared/interfaces.ts:103](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L103)

View File

@ -20,4 +20,4 @@ The path where the artifact was downloaded to
#### Defined in #### Defined in
[src/internal/shared/interfaces.ts:88](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/interfaces.ts#L88) [src/internal/shared/interfaces.ts:93](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L93)

View File

@ -27,4 +27,4 @@ The criteria for finding Artifact(s) out of the scope of the current run.
#### Defined in #### Defined in
[src/internal/shared/interfaces.ts:131](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/interfaces.ts#L131) [src/internal/shared/interfaces.ts:136](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L136)

View File

@ -20,4 +20,4 @@ Metadata about the artifact that was found
#### Defined in #### Defined in
[src/internal/shared/interfaces.ts:57](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/interfaces.ts#L57) [src/internal/shared/interfaces.ts:62](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L62)

View File

@ -21,4 +21,4 @@ In the case of reruns, this can be useful to avoid duplicates
#### Defined in #### Defined in
[src/internal/shared/interfaces.ts:68](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/interfaces.ts#L68) [src/internal/shared/interfaces.ts:73](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L73)

View File

@ -20,4 +20,4 @@ A list of artifacts that were found
#### Defined in #### Defined in
[src/internal/shared/interfaces.ts:78](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/interfaces.ts#L78) [src/internal/shared/interfaces.ts:83](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L83)

View File

@ -28,7 +28,7 @@ For large files that are not easily compressed, a value of 0 is recommended for
#### Defined in #### Defined in
[src/internal/shared/interfaces.ts:47](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/interfaces.ts#L47) [src/internal/shared/interfaces.ts:52](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L52)
___ ___
@ -52,4 +52,4 @@ input of 0 assumes default retention setting.
#### Defined in #### Defined in
[src/internal/shared/interfaces.ts:36](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/interfaces.ts#L36) [src/internal/shared/interfaces.ts:41](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L41)

View File

@ -8,11 +8,24 @@ Response from the server when an artifact is uploaded
### Properties ### Properties
- [digest](UploadArtifactResponse.md#digest)
- [id](UploadArtifactResponse.md#id) - [id](UploadArtifactResponse.md#id)
- [size](UploadArtifactResponse.md#size) - [size](UploadArtifactResponse.md#size)
## Properties ## Properties
### digest
`Optional` **digest**: `string`
The SHA256 digest of the artifact that was created. Not provided if no artifact was uploaded
#### Defined in
[src/internal/shared/interfaces.ts:19](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L19)
___
### id ### id
`Optional` **id**: `number` `Optional` **id**: `number`
@ -22,7 +35,7 @@ This ID can be used as input to other APIs to download, delete or get more infor
#### Defined in #### Defined in
[src/internal/shared/interfaces.ts:14](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/interfaces.ts#L14) [src/internal/shared/interfaces.ts:14](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L14)
___ ___
@ -34,4 +47,4 @@ Total size of the artifact in bytes. Not provided if no artifact was uploaded
#### Defined in #### Defined in
[src/internal/shared/interfaces.ts:8](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/interfaces.ts#L8) [src/internal/shared/interfaces.ts:8](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L8)

View File

@ -1,12 +1,12 @@
{ {
"name": "@actions/artifact", "name": "@actions/artifact",
"version": "2.1.11", "version": "2.2.1",
"lockfileVersion": 3, "lockfileVersion": 3,
"requires": true, "requires": true,
"packages": { "packages": {
"": { "": {
"name": "@actions/artifact", "name": "@actions/artifact",
"version": "2.1.11", "version": "2.2.1",
"license": "MIT", "license": "MIT",
"dependencies": { "dependencies": {
"@actions/core": "^1.10.0", "@actions/core": "^1.10.0",
@ -839,9 +839,10 @@
} }
}, },
"node_modules/cross-spawn": { "node_modules/cross-spawn": {
"version": "7.0.3", "version": "7.0.6",
"resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz",
"integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", "integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==",
"license": "MIT",
"dependencies": { "dependencies": {
"path-key": "^3.1.0", "path-key": "^3.1.0",
"shebang-command": "^2.0.0", "shebang-command": "^2.0.0",

View File

@ -1,6 +1,6 @@
{ {
"name": "@actions/artifact", "name": "@actions/artifact",
"version": "2.1.11", "version": "2.2.1",
"preview": true, "preview": true,
"description": "Actions artifact lib", "description": "Actions artifact lib",
"keywords": [ "keywords": [

View File

@ -1,4 +1,5 @@
import os from 'os' import os from 'os'
import {info} from '@actions/core'
// Used for controlling the highWaterMark value of the zip that is being streamed // Used for controlling the highWaterMark value of the zip that is being streamed
// The same value is used as the chunk size that is use during upload to blob storage // The same value is used as the chunk size that is use during upload to blob storage
@ -47,17 +48,49 @@ export function getGitHubWorkspaceDir(): string {
// Mimics behavior of azcopy: https://learn.microsoft.com/en-us/azure/storage/common/storage-use-azcopy-optimize // Mimics behavior of azcopy: https://learn.microsoft.com/en-us/azure/storage/common/storage-use-azcopy-optimize
// If your machine has fewer than 5 CPUs, then the value of this variable is set to 32. // If your machine has fewer than 5 CPUs, then the value of this variable is set to 32.
// Otherwise, the default value is equal to 16 multiplied by the number of CPUs. The maximum value of this variable is 300. // Otherwise, the default value is equal to 16 multiplied by the number of CPUs. The maximum value of this variable is 300.
// This value can be lowered with ACTIONS_ARTIFACT_UPLOAD_CONCURRENCY variable.
export function getConcurrency(): number { export function getConcurrency(): number {
const numCPUs = os.cpus().length const numCPUs = os.cpus().length
let concurrencyCap = 32
if (numCPUs <= 4) { if (numCPUs > 4) {
return 32 const concurrency = 16 * numCPUs
concurrencyCap = concurrency > 300 ? 300 : concurrency
} }
const concurrency = 16 * numCPUs const concurrencyOverride = process.env['ACTIONS_ARTIFACT_UPLOAD_CONCURRENCY']
return concurrency > 300 ? 300 : concurrency if (concurrencyOverride) {
const concurrency = parseInt(concurrencyOverride)
if (isNaN(concurrency) || concurrency < 1) {
throw new Error(
'Invalid value set for ACTIONS_ARTIFACT_UPLOAD_CONCURRENCY env variable'
)
}
if (concurrency < concurrencyCap) {
return concurrency
}
info(
`ACTIONS_ARTIFACT_UPLOAD_CONCURRENCY is higher than the cap of ${concurrencyCap} based on the number of cpus. Lowering it to the cap.`
)
}
return concurrencyCap
} }
export function getUploadChunkTimeout(): number { export function getUploadChunkTimeout(): number {
return 300_000 // 5 minutes const timeoutVar = process.env['ACTIONS_ARTIFACT_UPLOAD_TIMEOUT_MS']
if (!timeoutVar) {
return 300000 // 5 minutes
}
const timeout = parseInt(timeoutVar)
if (isNaN(timeout)) {
throw new Error(
'Invalid value set for ACTIONS_ARTIFACT_UPLOAD_TIMEOUT_MS env variable'
)
}
return timeout
} }

View File

@ -12,6 +12,11 @@ export interface UploadArtifactResponse {
* This ID can be used as input to other APIs to download, delete or get more information about an artifact: https://docs.github.com/en/rest/actions/artifacts * This ID can be used as input to other APIs to download, delete or get more information about an artifact: https://docs.github.com/en/rest/actions/artifacts
*/ */
id?: number id?: number
/**
* The SHA256 digest of the artifact that was created. Not provided if no artifact was uploaded
*/
digest?: string
} }
/** /**

View File

@ -110,6 +110,7 @@ export async function uploadArtifact(
return { return {
size: uploadResult.uploadSize, size: uploadResult.uploadSize,
digest: uploadResult.sha256Hash,
id: Number(artifactId) id: Number(artifactId)
} }
} }

View File

@ -32,8 +32,7 @@ async function run() {
const ghToken = core.getInput('gh-token'); const ghToken = core.getInput('gh-token');
const attestation = await attest({ const attestation = await attest({
subjectName: 'my-artifact-name', subjects: [{name: 'my-artifact-name', digest: { 'sha256': '36ab4667...'}}],
subjectDigest: { 'sha256': '36ab4667...'},
predicateType: 'https://in-toto.io/attestation/release', predicateType: 'https://in-toto.io/attestation/release',
predicate: { . . . }, predicate: { . . . },
token: ghToken token: ghToken
@ -49,11 +48,12 @@ The `attest` function supports the following options:
```typescript ```typescript
export type AttestOptions = { export type AttestOptions = {
// The name of the subject to be attested. // Deprecated. Use 'subjects' instead.
subjectName: string subjectName?: string
// The digest of the subject to be attested. Should be a map of digest // Deprecated. Use 'subjects' instead.
// algorithms to their hex-encoded values. subjectDigest?: Record<string, string>
subjectDigest: Record<string, string> // Collection of subjects to be attested
subjects?: Subject[]
// URI identifying the content type of the predicate being attested. // URI identifying the content type of the predicate being attested.
predicateType: string predicateType: string
// Predicate to be attested. // Predicate to be attested.
@ -68,6 +68,13 @@ export type AttestOptions = {
// Whether to skip writing the attestation to the GH attestations API. // Whether to skip writing the attestation to the GH attestations API.
skipWrite?: boolean skipWrite?: boolean
} }
export type Subject = {
// Name of the subject.
name: string
// Digests of the subject. Should be a map of digest algorithms to their hex-encoded values.
digest: Record<string, string>
}
``` ```
### `attestProvenance` ### `attestProvenance`
@ -105,12 +112,13 @@ The `attestProvenance` function supports the following options:
```typescript ```typescript
export type AttestProvenanceOptions = { export type AttestProvenanceOptions = {
// The name of the subject to be attested. // Deprecated. Use 'subjects' instead.
subjectName: string subjectName?: string
// The digest of the subject to be attested. Should be a map of digest // Deprecated. Use 'subjects' instead.
// algorithms to their hex-encoded values. subjectDigest?: Record<string, string>
subjectDigest: Record<string, string> // Collection of subjects to be attested
// GitHub token for writing attestations. subjects?: Subject[]
// URI identifying the content type of the predicate being attested.
token: string token: string
// Sigstore instance to use for signing. Must be one of "public-good" or // Sigstore instance to use for signing. Must be one of "public-good" or
// "github". // "github".

View File

@ -5,6 +5,8 @@
- Bump @actions/core from 1.10.1 to 1.11.1 [#1847](https://github.com/actions/toolkit/pull/1847) - Bump @actions/core from 1.10.1 to 1.11.1 [#1847](https://github.com/actions/toolkit/pull/1847)
- Bump @sigstore/bundle from 2.3.2 to 3.0.0 [#1846](https://github.com/actions/toolkit/pull/1846) - Bump @sigstore/bundle from 2.3.2 to 3.0.0 [#1846](https://github.com/actions/toolkit/pull/1846)
- Bump @sigstore/sign from 2.3.2 to 3.0.0 [#1846](https://github.com/actions/toolkit/pull/1846) - Bump @sigstore/sign from 2.3.2 to 3.0.0 [#1846](https://github.com/actions/toolkit/pull/1846)
- Support for generating multi-subject attestations [#1864](https://github.com/actions/toolkit/pull/1865)
- Fix bug in `buildSLSAProvenancePredicate` related to `workflow_ref` OIDC token claims containing the "@" symbol in the tag name [#1863](https://github.com/actions/toolkit/pull/1863)
### 1.4.2 ### 1.4.2

View File

@ -1,5 +1,47 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP // Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`provenance functions buildSLSAProvenancePredicate handle tags including "@" character 1`] = `
{
"params": {
"buildDefinition": {
"buildType": "https://actions.github.io/buildtypes/workflow/v1",
"externalParameters": {
"workflow": {
"path": ".github/workflows/main.yml",
"ref": "foo@1.0.0",
"repository": "https://foo.ghe.com/owner/repo",
},
},
"internalParameters": {
"github": {
"event_name": "push",
"repository_id": "repo-id",
"repository_owner_id": "owner-id",
"runner_environment": "github-hosted",
},
},
"resolvedDependencies": [
{
"digest": {
"gitCommit": "babca52ab0c93ae16539e5923cb0d7403b9a093b",
},
"uri": "git+https://foo.ghe.com/owner/repo@refs/heads/main",
},
],
},
"runDetails": {
"builder": {
"id": "https://foo.ghe.com/owner/workflows/.github/workflows/publish.yml@main",
},
"metadata": {
"invocationId": "https://foo.ghe.com/owner/repo/actions/runs/run-id/attempts/run-attempt",
},
},
},
"type": "https://slsa.dev/provenance/v1",
}
`;
exports[`provenance functions buildSLSAProvenancePredicate returns a provenance hydrated from an OIDC token 1`] = ` exports[`provenance functions buildSLSAProvenancePredicate returns a provenance hydrated from an OIDC token 1`] = `
{ {
"params": { "params": {

View File

@ -0,0 +1,16 @@
import {attest} from '../src/attest'
describe('attest', () => {
describe('when no subject information is provided', () => {
it('throws an error', async () => {
const options = {
predicateType: 'foo',
predicate: {bar: 'baz'},
token: 'token'
}
expect(attest(options)).rejects.toThrowError(
'Must provide either subjectName and subjectDigest or subjects'
)
})
})
})

View File

@ -17,7 +17,7 @@ describe('buildIntotoStatement', () => {
} }
it('returns an intoto statement', () => { it('returns an intoto statement', () => {
const statement = buildIntotoStatement(subject, predicate) const statement = buildIntotoStatement([subject], predicate)
expect(statement).toMatchSnapshot() expect(statement).toMatchSnapshot()
}) })
}) })

View File

@ -33,15 +33,7 @@ describe('provenance functions', () => {
runner_environment: 'github-hosted' runner_environment: 'github-hosted'
} }
beforeEach(async () => { const mockIssuer = async (claims: jose.JWTPayload): Promise<void> => {
process.env = {
...originalEnv,
ACTIONS_ID_TOKEN_REQUEST_URL: `${issuer}${tokenPath}?`,
ACTIONS_ID_TOKEN_REQUEST_TOKEN: 'token',
GITHUB_SERVER_URL: 'https://foo.ghe.com',
GITHUB_REPOSITORY: claims.repository
}
// Generate JWT signing key // Generate JWT signing key
const key = await jose.generateKeyPair('PS256') const key = await jose.generateKeyPair('PS256')
@ -60,6 +52,18 @@ describe('provenance functions', () => {
// Mock OIDC token endpoint for populating the provenance // Mock OIDC token endpoint for populating the provenance
nock(issuer).get(tokenPath).query({audience}).reply(200, {value: jwt}) nock(issuer).get(tokenPath).query({audience}).reply(200, {value: jwt})
}
beforeEach(async () => {
process.env = {
...originalEnv,
ACTIONS_ID_TOKEN_REQUEST_URL: `${issuer}${tokenPath}?`,
ACTIONS_ID_TOKEN_REQUEST_TOKEN: 'token',
GITHUB_SERVER_URL: 'https://foo.ghe.com',
GITHUB_REPOSITORY: claims.repository
}
await mockIssuer(claims)
}) })
afterEach(() => { afterEach(() => {
@ -71,6 +75,16 @@ describe('provenance functions', () => {
const predicate = await buildSLSAProvenancePredicate() const predicate = await buildSLSAProvenancePredicate()
expect(predicate).toMatchSnapshot() expect(predicate).toMatchSnapshot()
}) })
it('handle tags including "@" character', async () => {
nock.cleanAll()
await mockIssuer({
...claims,
workflow_ref: 'owner/repo/.github/workflows/main.yml@foo@1.0.0'
})
const predicate = await buildSLSAProvenancePredicate()
expect(predicate).toMatchSnapshot()
})
}) })
describe('attestProvenance', () => { describe('attestProvenance', () => {
@ -115,8 +129,7 @@ describe('provenance functions', () => {
describe('when the sigstore instance is explicitly set', () => { describe('when the sigstore instance is explicitly set', () => {
it('attests provenance', async () => { it('attests provenance', async () => {
const attestation = await attestProvenance({ const attestation = await attestProvenance({
subjectName, subjects: [{name: subjectName, digest: subjectDigest}],
subjectDigest,
token: 'token', token: 'token',
sigstore: 'github' sigstore: 'github'
}) })
@ -143,8 +156,7 @@ describe('provenance functions', () => {
it('attests provenance', async () => { it('attests provenance', async () => {
const attestation = await attestProvenance({ const attestation = await attestProvenance({
subjectName, subjects: [{name: subjectName, digest: subjectDigest}],
subjectDigest,
token: 'token' token: 'token'
}) })
@ -178,8 +190,7 @@ describe('provenance functions', () => {
describe('when the sigstore instance is explicitly set', () => { describe('when the sigstore instance is explicitly set', () => {
it('attests provenance', async () => { it('attests provenance', async () => {
const attestation = await attestProvenance({ const attestation = await attestProvenance({
subjectName, subjects: [{name: subjectName, digest: subjectDigest}],
subjectDigest,
token: 'token', token: 'token',
sigstore: 'public-good' sigstore: 'public-good'
}) })
@ -206,8 +217,7 @@ describe('provenance functions', () => {
it('attests provenance', async () => { it('attests provenance', async () => {
const attestation = await attestProvenance({ const attestation = await attestProvenance({
subjectName, subjects: [{name: subjectName, digest: subjectDigest}],
subjectDigest,
token: 'token' token: 'token'
}) })

View File

@ -14,11 +14,16 @@ const INTOTO_PAYLOAD_TYPE = 'application/vnd.in-toto+json'
* Options for attesting a subject / predicate. * Options for attesting a subject / predicate.
*/ */
export type AttestOptions = { export type AttestOptions = {
// The name of the subject to be attested. /**
subjectName: string * @deprecated Use `subjects` instead.
// The digest of the subject to be attested. Should be a map of digest **/
// algorithms to their hex-encoded values. subjectName?: string
subjectDigest: Record<string, string> /**
* @deprecated Use `subjects` instead.
**/
subjectDigest?: Record<string, string>
// Subjects to be attested.
subjects?: Subject[]
// Content type of the predicate being attested. // Content type of the predicate being attested.
predicateType: string predicateType: string
// Predicate to be attested. // Predicate to be attested.
@ -42,15 +47,24 @@ export type AttestOptions = {
* @returns A promise that resolves to the attestation. * @returns A promise that resolves to the attestation.
*/ */
export async function attest(options: AttestOptions): Promise<Attestation> { export async function attest(options: AttestOptions): Promise<Attestation> {
const subject: Subject = { let subjects: Subject[]
name: options.subjectName,
digest: options.subjectDigest if (options.subjects) {
subjects = options.subjects
} else if (options.subjectName && options.subjectDigest) {
subjects = [{name: options.subjectName, digest: options.subjectDigest}]
} else {
throw new Error(
'Must provide either subjectName and subjectDigest or subjects'
)
} }
const predicate: Predicate = { const predicate: Predicate = {
type: options.predicateType, type: options.predicateType,
params: options.predicate params: options.predicate
} }
const statement = buildIntotoStatement(subject, predicate)
const statement = buildIntotoStatement(subjects, predicate)
// Sign the provenance statement // Sign the provenance statement
const payload: Payload = { const payload: Payload = {

View File

@ -20,12 +20,12 @@ export type InTotoStatement = {
* @returns The constructed in-toto statement. * @returns The constructed in-toto statement.
*/ */
export const buildIntotoStatement = ( export const buildIntotoStatement = (
subject: Subject, subjects: Subject[],
predicate: Predicate predicate: Predicate
): InTotoStatement => { ): InTotoStatement => {
return { return {
_type: INTOTO_STATEMENT_V1_TYPE, _type: INTOTO_STATEMENT_V1_TYPE,
subject: [subject], subject: subjects,
predicateType: predicate.type, predicateType: predicate.type,
predicate: predicate.params predicate: predicate.params
} }

View File

@ -30,9 +30,11 @@ export const buildSLSAProvenancePredicate = async (
// Split just the path and ref from the workflow string. // Split just the path and ref from the workflow string.
// owner/repo/.github/workflows/main.yml@main => // owner/repo/.github/workflows/main.yml@main =>
// .github/workflows/main.yml, main // .github/workflows/main.yml, main
const [workflowPath, workflowRef] = claims.workflow_ref const [workflowPath, ...workflowRefChunks] = claims.workflow_ref
.replace(`${claims.repository}/`, '') .replace(`${claims.repository}/`, '')
.split('@') .split('@')
// Handle case where tag contains `@` (e.g: when using changesets in a monorepo context),
const workflowRef = workflowRefChunks.join('@')
return { return {
type: SLSA_PREDICATE_V1_TYPE, type: SLSA_PREDICATE_V1_TYPE,

View File

@ -6,6 +6,20 @@ See ["Caching dependencies to speed up workflows"](https://docs.github.com/en/ac
Note that GitHub will remove any cache entries that have not been accessed in over 7 days. There is no limit on the number of caches you can store, but the total size of all caches in a repository is limited to 10 GB. If you exceed this limit, GitHub will save your cache but will begin evicting caches until the total size is less than 10 GB. Note that GitHub will remove any cache entries that have not been accessed in over 7 days. There is no limit on the number of caches you can store, but the total size of all caches in a repository is limited to 10 GB. If you exceed this limit, GitHub will save your cache but will begin evicting caches until the total size is less than 10 GB.
## ⚠️ Important changes
The cache backend service has been rewritten from the ground up for improved performance and reliability. The [@actions/cache](https://github.com/actions/toolkit/tree/main/packages/cache) package now integrates with the new cache service (v2) APIs.
The new service will gradually roll out as of **February 1st, 2025**. The legacy service will also be sunset on the same date. Changes in this release are **fully backward compatible**.
**All previous versions of this package will be deprecated**. We recommend upgrading to version `4.0.0` as soon as possible before **February 1st, 2025.**
If you do not upgrade, all workflow runs using any of the deprecated [@actions/cache](https://github.com/actions/toolkit/tree/main/packages/cache) packages will fail.
Upgrading to the recommended version should not break or require any changes to your workflows beyond updating your `package.json` to version `4.0.0`.
Read more about the change & access the migration guide: [reference to the announcement](https://github.com/actions/toolkit/discussions/1890).
## Usage ## Usage
This package is used by the v2+ versions of our first party cache action. You can find an example implementation in the cache repo [here](https://github.com/actions/cache). This package is used by the v2+ versions of our first party cache action. You can find an example implementation in the cache repo [here](https://github.com/actions/cache).
@ -47,5 +61,3 @@ const cacheKey = await cache.restoreCache(paths, key, restoreKeys)
A cache gets downloaded in multiple segments of fixed sizes (now `128MB` to fail-fast, previously `1GB` for a `32-bit` runner and `2GB` for a `64-bit` runner were used). Sometimes, a segment download gets stuck which causes the workflow job to be stuck forever and fail. Version `v3.0.4` of cache package introduces a segment download timeout. The segment download timeout will allow the segment download to get aborted and hence allow the job to proceed with a cache miss. A cache gets downloaded in multiple segments of fixed sizes (now `128MB` to fail-fast, previously `1GB` for a `32-bit` runner and `2GB` for a `64-bit` runner were used). Sometimes, a segment download gets stuck which causes the workflow job to be stuck forever and fail. Version `v3.0.4` of cache package introduces a segment download timeout. The segment download timeout will allow the segment download to get aborted and hence allow the job to proceed with a cache miss.
Default value of this timeout is 10 minutes (starting `v3.2.1` and higher, previously 60 minutes in versions between `v.3.0.4` and `v3.2.0`, both included) and can be customized by specifying an [environment variable](https://docs.github.com/en/actions/learn-github-actions/environment-variables) named `SEGMENT_DOWNLOAD_TIMEOUT_MINS` with timeout value in minutes. Default value of this timeout is 10 minutes (starting `v3.2.1` and higher, previously 60 minutes in versions between `v.3.0.4` and `v3.2.0`, both included) and can be customized by specifying an [environment variable](https://docs.github.com/en/actions/learn-github-actions/environment-variables) named `SEGMENT_DOWNLOAD_TIMEOUT_MINS` with timeout value in minutes.

View File

@ -1,9 +1,36 @@
# @actions/cache Releases # @actions/cache Releases
### 4.0.0
#### Important changes
The cache backend service has been rewritten from the ground up for improved performance and reliability. The [@actions/cache](https://github.com/actions/toolkit/tree/main/packages/cache) package now integrates with the new cache service (v2) APIs.
The new service will gradually roll out as of **February 1st, 2025**. The legacy service will also be sunset on the same date. Changes in this release are **fully backward compatible**.
**All previous versions of this package will be deprecated**. We recommend upgrading to version `4.0.0` as soon as possible before **February 1st, 2025.**
If you do not upgrade, all workflow runs using any of the deprecated [@actions/cache](https://github.com/actions/toolkit/tree/main/packages/cache) packages will fail.
Upgrading to the recommended version should not break or require any changes to your workflows beyond updating your `package.json` to version `4.0.0`.
Read more about the change & access the migration guide: [reference to the announcement](https://github.com/actions/toolkit/discussions/1890).
#### Minor changes
- Update `@actions/core` to `1.11.0`
- Update `semver` `6.3.1`
- Add `twirp-ts` `2.5.0` to dependencies
### 3.3.0
- Update `@actions/core` to `1.11.1`
- Remove dependency on `uuid` package [#1824](https://github.com/actions/toolkit/pull/1824), [#1842](https://github.com/actions/toolkit/pull/1842)
### 3.2.4 ### 3.2.4
- Updated `isGhes` check to include `.ghe.com` and `.ghe.localhost` as accepted hosts - Updated `isGhes` check to include `.ghe.com` and `.ghe.localhost` as accepted hosts
### 3.2.3 ### 3.2.3
- Fixed a bug that mutated path arguments to `getCacheVersion` [#1378](https://github.com/actions/toolkit/pull/1378) - Fixed a bug that mutated path arguments to `getCacheVersion` [#1378](https://github.com/actions/toolkit/pull/1378)

View File

@ -1,4 +1,5 @@
import {downloadCache, getCacheVersion} from '../src/internal/cacheHttpClient' import {downloadCache} from '../src/internal/cacheHttpClient'
import {getCacheVersion} from '../src/internal/cacheUtils'
import {CompressionMethod} from '../src/internal/constants' import {CompressionMethod} from '../src/internal/constants'
import * as downloadUtils from '../src/internal/downloadUtils' import * as downloadUtils from '../src/internal/downloadUtils'
import {DownloadOptions, getDownloadOptions} from '../src/options' import {DownloadOptions, getDownloadOptions} from '../src/options'

View File

@ -42,23 +42,3 @@ test('resolvePaths works on github workspace directory', async () => {
const paths = await cacheUtils.resolvePaths([workspace]) const paths = await cacheUtils.resolvePaths([workspace])
expect(paths.length).toBeGreaterThan(0) expect(paths.length).toBeGreaterThan(0)
}) })
test('isGhes returns false for github.com', async () => {
process.env.GITHUB_SERVER_URL = 'https://github.com'
expect(cacheUtils.isGhes()).toBe(false)
})
test('isGhes returns false for ghe.com', async () => {
process.env.GITHUB_SERVER_URL = 'https://somedomain.ghe.com'
expect(cacheUtils.isGhes()).toBe(false)
})
test('isGhes returns true for enterprise URL', async () => {
process.env.GITHUB_SERVER_URL = 'https://my-enterprise.github.com'
expect(cacheUtils.isGhes()).toBe(true)
})
test('isGhes returns false for ghe.localhost', () => {
process.env.GITHUB_SERVER_URL = 'https://my.domain.ghe.localhost'
expect(cacheUtils.isGhes()).toBe(false)
})

25
packages/cache/__tests__/config.test.ts vendored Normal file
View File

@ -0,0 +1,25 @@
import * as config from '../src/internal/config'
beforeEach(() => {
jest.resetModules()
})
test('isGhes returns false for github.com', async () => {
process.env.GITHUB_SERVER_URL = 'https://github.com'
expect(config.isGhes()).toBe(false)
})
test('isGhes returns false for ghe.com', async () => {
process.env.GITHUB_SERVER_URL = 'https://somedomain.ghe.com'
expect(config.isGhes()).toBe(false)
})
test('isGhes returns true for enterprise URL', async () => {
process.env.GITHUB_SERVER_URL = 'https://my-enterprise.github.com'
expect(config.isGhes()).toBe(true)
})
test('isGhes returns false for ghe.localhost', () => {
process.env.GITHUB_SERVER_URL = 'https://my.domain.ghe.localhost'
expect(config.isGhes()).toBe(false)
})

View File

@ -11,8 +11,6 @@ const downloadConcurrency = 8
const timeoutInMs = 30000 const timeoutInMs = 30000
const segmentTimeoutInMs = 600000 const segmentTimeoutInMs = 600000
const lookupOnly = false const lookupOnly = false
const uploadConcurrency = 4
const uploadChunkSize = 32 * 1024 * 1024
test('getDownloadOptions sets defaults', async () => { test('getDownloadOptions sets defaults', async () => {
const actualOptions = getDownloadOptions() const actualOptions = getDownloadOptions()
@ -43,18 +41,21 @@ test('getDownloadOptions overrides all settings', async () => {
}) })
test('getUploadOptions sets defaults', async () => { test('getUploadOptions sets defaults', async () => {
const expectedOptions: UploadOptions = {
uploadConcurrency: 4,
uploadChunkSize: 32 * 1024 * 1024,
useAzureSdk: false
}
const actualOptions = getUploadOptions() const actualOptions = getUploadOptions()
expect(actualOptions).toEqual({ expect(actualOptions).toEqual(expectedOptions)
uploadConcurrency,
uploadChunkSize
})
}) })
test('getUploadOptions overrides all settings', async () => { test('getUploadOptions overrides all settings', async () => {
const expectedOptions: UploadOptions = { const expectedOptions: UploadOptions = {
uploadConcurrency: 2, uploadConcurrency: 2,
uploadChunkSize: 16 * 1024 * 1024 uploadChunkSize: 16 * 1024 * 1024,
useAzureSdk: true
} }
const actualOptions = getUploadOptions(expectedOptions) const actualOptions = getUploadOptions(expectedOptions)
@ -62,6 +63,34 @@ test('getUploadOptions overrides all settings', async () => {
expect(actualOptions).toEqual(expectedOptions) expect(actualOptions).toEqual(expectedOptions)
}) })
test('env variables override all getUploadOptions settings', async () => {
const expectedOptions: UploadOptions = {
uploadConcurrency: 16,
uploadChunkSize: 64 * 1024 * 1024,
useAzureSdk: true
}
process.env.CACHE_UPLOAD_CONCURRENCY = '16'
process.env.CACHE_UPLOAD_CHUNK_SIZE = '64'
const actualOptions = getUploadOptions(expectedOptions)
expect(actualOptions).toEqual(expectedOptions)
})
test('env variables override all getUploadOptions settings but do not exceed caps', async () => {
const expectedOptions: UploadOptions = {
uploadConcurrency: 32,
uploadChunkSize: 128 * 1024 * 1024,
useAzureSdk: true
}
process.env.CACHE_UPLOAD_CONCURRENCY = '64'
process.env.CACHE_UPLOAD_CHUNK_SIZE = '256'
const actualOptions = getUploadOptions(expectedOptions)
expect(actualOptions).toEqual(expectedOptions)
})
test('getDownloadOptions overrides download timeout minutes', async () => { test('getDownloadOptions overrides download timeout minutes', async () => {
const expectedOptions: DownloadOptions = { const expectedOptions: DownloadOptions = {
useAzureSdk: false, useAzureSdk: false,

View File

@ -0,0 +1,415 @@
import * as core from '@actions/core'
import * as path from 'path'
import * as tar from '../src/internal/tar'
import * as config from '../src/internal/config'
import * as cacheUtils from '../src/internal/cacheUtils'
import * as cacheHttpClient from '../src/internal/cacheHttpClient'
import {restoreCache} from '../src/cache'
import {CacheFilename, CompressionMethod} from '../src/internal/constants'
import {CacheServiceClientJSON} from '../src/generated/results/api/v1/cache.twirp'
import {DownloadOptions} from '../src/options'
jest.mock('../src/internal/cacheHttpClient')
jest.mock('../src/internal/cacheUtils')
jest.mock('../src/internal/config')
jest.mock('../src/internal/tar')
let logDebugMock: jest.SpyInstance
let logInfoMock: jest.SpyInstance
beforeAll(() => {
jest.spyOn(console, 'log').mockImplementation(() => {})
jest.spyOn(core, 'debug').mockImplementation(() => {})
jest.spyOn(core, 'info').mockImplementation(() => {})
jest.spyOn(core, 'warning').mockImplementation(() => {})
jest.spyOn(core, 'error').mockImplementation(() => {})
jest.spyOn(cacheUtils, 'getCacheFileName').mockImplementation(cm => {
const actualUtils = jest.requireActual('../src/internal/cacheUtils')
return actualUtils.getCacheFileName(cm)
})
// Ensure that we're using v2 for these tests
jest.spyOn(config, 'getCacheServiceVersion').mockReturnValue('v2')
logDebugMock = jest.spyOn(core, 'debug')
logInfoMock = jest.spyOn(core, 'info')
})
afterEach(() => {
expect(logDebugMock).toHaveBeenCalledWith('Cache service version: v2')
})
test('restore with no path should fail', async () => {
const paths: string[] = []
const key = 'node-test'
await expect(restoreCache(paths, key)).rejects.toThrowError(
`Path Validation Error: At least one directory or file path is required`
)
})
test('restore with too many keys should fail', async () => {
const paths = ['node_modules']
const key = 'node-test'
const restoreKeys = [...Array(20).keys()].map(x => x.toString())
await expect(restoreCache(paths, key, restoreKeys)).rejects.toThrowError(
`Key Validation Error: Keys are limited to a maximum of 10.`
)
})
test('restore with large key should fail', async () => {
const paths = ['node_modules']
const key = 'foo'.repeat(512) // Over the 512 character limit
await expect(restoreCache(paths, key)).rejects.toThrowError(
`Key Validation Error: ${key} cannot be larger than 512 characters.`
)
})
test('restore with invalid key should fail', async () => {
const paths = ['node_modules']
const key = 'comma,comma'
await expect(restoreCache(paths, key)).rejects.toThrowError(
`Key Validation Error: ${key} cannot contain commas.`
)
})
test('restore with no cache found', async () => {
const paths = ['node_modules']
const key = 'node-test'
jest
.spyOn(CacheServiceClientJSON.prototype, 'GetCacheEntryDownloadURL')
.mockReturnValue(
Promise.resolve({
ok: false,
signedDownloadUrl: '',
matchedKey: ''
})
)
const cacheKey = await restoreCache(paths, key)
expect(cacheKey).toBe(undefined)
})
test('restore with server error should fail', async () => {
const paths = ['node_modules']
const key = 'node-test'
const logWarningMock = jest.spyOn(core, 'warning')
jest
.spyOn(CacheServiceClientJSON.prototype, 'GetCacheEntryDownloadURL')
.mockImplementation(() => {
throw new Error('HTTP Error Occurred')
})
const cacheKey = await restoreCache(paths, key)
expect(cacheKey).toBe(undefined)
expect(logWarningMock).toHaveBeenCalledTimes(1)
expect(logWarningMock).toHaveBeenCalledWith(
'Failed to restore: HTTP Error Occurred'
)
})
test('restore with restore keys and no cache found', async () => {
const paths = ['node_modules']
const key = 'node-test'
const restoreKeys = ['node-']
const logWarningMock = jest.spyOn(core, 'warning')
jest
.spyOn(CacheServiceClientJSON.prototype, 'GetCacheEntryDownloadURL')
.mockReturnValue(
Promise.resolve({
ok: false,
signedDownloadUrl: '',
matchedKey: ''
})
)
const cacheKey = await restoreCache(paths, key, restoreKeys)
expect(cacheKey).toBe(undefined)
expect(logWarningMock).toHaveBeenCalledWith(
`Cache not found for keys: ${[key, ...restoreKeys].join(', ')}`
)
})
test('restore with gzip compressed cache found', async () => {
const paths = ['node_modules']
const key = 'node-test'
const compressionMethod = CompressionMethod.Gzip
const signedDownloadUrl = 'https://blob-storage.local?signed=true'
const cacheVersion =
'd90f107aaeb22920dba0c637a23c37b5bc497b4dfa3b07fe3f79bf88a273c11b'
const options = {useAzureSdk: true} as DownloadOptions
const getCacheVersionMock = jest.spyOn(cacheUtils, 'getCacheVersion')
getCacheVersionMock.mockReturnValue(cacheVersion)
const compressionMethodMock = jest.spyOn(cacheUtils, 'getCompressionMethod')
compressionMethodMock.mockReturnValue(Promise.resolve(compressionMethod))
const getCacheDownloadURLMock = jest.spyOn(
CacheServiceClientJSON.prototype,
'GetCacheEntryDownloadURL'
)
getCacheDownloadURLMock.mockReturnValue(
Promise.resolve({
ok: true,
signedDownloadUrl,
matchedKey: key
})
)
const tempPath = '/foo/bar'
const createTempDirectoryMock = jest.spyOn(cacheUtils, 'createTempDirectory')
createTempDirectoryMock.mockImplementation(async () => {
return Promise.resolve(tempPath)
})
const archivePath = path.join(tempPath, CacheFilename.Gzip)
const downloadCacheMock = jest.spyOn(cacheHttpClient, 'downloadCache')
const fileSize = 142
const getArchiveFileSizeInBytesMock = jest
.spyOn(cacheUtils, 'getArchiveFileSizeInBytes')
.mockReturnValue(fileSize)
const extractTarMock = jest.spyOn(tar, 'extractTar')
const unlinkFileMock = jest.spyOn(cacheUtils, 'unlinkFile')
const cacheKey = await restoreCache(paths, key, [], options)
expect(cacheKey).toBe(key)
expect(getCacheVersionMock).toHaveBeenCalledWith(
paths,
compressionMethod,
false
)
expect(getCacheDownloadURLMock).toHaveBeenCalledWith({
key,
restoreKeys: [],
version: cacheVersion
})
expect(createTempDirectoryMock).toHaveBeenCalledTimes(1)
expect(downloadCacheMock).toHaveBeenCalledWith(
signedDownloadUrl,
archivePath,
options
)
expect(getArchiveFileSizeInBytesMock).toHaveBeenCalledWith(archivePath)
expect(logInfoMock).toHaveBeenCalledWith(`Cache Size: ~0 MB (142 B)`)
expect(extractTarMock).toHaveBeenCalledTimes(1)
expect(extractTarMock).toHaveBeenCalledWith(archivePath, compressionMethod)
expect(unlinkFileMock).toHaveBeenCalledTimes(1)
expect(unlinkFileMock).toHaveBeenCalledWith(archivePath)
expect(compressionMethodMock).toHaveBeenCalledTimes(1)
})
test('restore with zstd compressed cache found', async () => {
const paths = ['node_modules']
const key = 'node-test'
const compressionMethod = CompressionMethod.Zstd
const signedDownloadUrl = 'https://blob-storage.local?signed=true'
const cacheVersion =
'8e2e96a184cb0cd6b48285b176c06a418f3d7fce14c29d9886fd1bb4f05c513d'
const options = {useAzureSdk: true} as DownloadOptions
const getCacheVersionMock = jest.spyOn(cacheUtils, 'getCacheVersion')
getCacheVersionMock.mockReturnValue(cacheVersion)
const compressionMethodMock = jest.spyOn(cacheUtils, 'getCompressionMethod')
compressionMethodMock.mockReturnValue(Promise.resolve(compressionMethod))
const getCacheDownloadURLMock = jest.spyOn(
CacheServiceClientJSON.prototype,
'GetCacheEntryDownloadURL'
)
getCacheDownloadURLMock.mockReturnValue(
Promise.resolve({
ok: true,
signedDownloadUrl,
matchedKey: key
})
)
const tempPath = '/foo/bar'
const createTempDirectoryMock = jest.spyOn(cacheUtils, 'createTempDirectory')
createTempDirectoryMock.mockImplementation(async () => {
return Promise.resolve(tempPath)
})
const archivePath = path.join(tempPath, CacheFilename.Zstd)
const downloadCacheMock = jest.spyOn(cacheHttpClient, 'downloadCache')
const fileSize = 62915000
const getArchiveFileSizeInBytesMock = jest
.spyOn(cacheUtils, 'getArchiveFileSizeInBytes')
.mockReturnValue(fileSize)
const extractTarMock = jest.spyOn(tar, 'extractTar')
const unlinkFileMock = jest.spyOn(cacheUtils, 'unlinkFile')
const cacheKey = await restoreCache(paths, key, [], options)
expect(cacheKey).toBe(key)
expect(getCacheVersionMock).toHaveBeenCalledWith(
paths,
compressionMethod,
false
)
expect(getCacheDownloadURLMock).toHaveBeenCalledWith({
key,
restoreKeys: [],
version: cacheVersion
})
expect(createTempDirectoryMock).toHaveBeenCalledTimes(1)
expect(downloadCacheMock).toHaveBeenCalledWith(
signedDownloadUrl,
archivePath,
options
)
expect(getArchiveFileSizeInBytesMock).toHaveBeenCalledWith(archivePath)
expect(logInfoMock).toHaveBeenCalledWith(`Cache Size: ~60 MB (62915000 B)`)
expect(extractTarMock).toHaveBeenCalledTimes(1)
expect(extractTarMock).toHaveBeenCalledWith(archivePath, compressionMethod)
expect(unlinkFileMock).toHaveBeenCalledTimes(1)
expect(unlinkFileMock).toHaveBeenCalledWith(archivePath)
expect(compressionMethodMock).toHaveBeenCalledTimes(1)
})
test('restore with cache found for restore key', async () => {
const paths = ['node_modules']
const key = 'node-test'
const restoreKeys = ['node-']
const compressionMethod = CompressionMethod.Gzip
const signedDownloadUrl = 'https://blob-storage.local?signed=true'
const cacheVersion =
'b8b58e9bd7b1e8f83d9f05c7e06ea865ba44a0330e07a14db74ac74386677bed'
const options = {useAzureSdk: true} as DownloadOptions
const getCacheVersionMock = jest.spyOn(cacheUtils, 'getCacheVersion')
getCacheVersionMock.mockReturnValue(cacheVersion)
const compressionMethodMock = jest.spyOn(cacheUtils, 'getCompressionMethod')
compressionMethodMock.mockReturnValue(Promise.resolve(compressionMethod))
const getCacheDownloadURLMock = jest.spyOn(
CacheServiceClientJSON.prototype,
'GetCacheEntryDownloadURL'
)
getCacheDownloadURLMock.mockReturnValue(
Promise.resolve({
ok: true,
signedDownloadUrl,
matchedKey: restoreKeys[0]
})
)
const tempPath = '/foo/bar'
const createTempDirectoryMock = jest.spyOn(cacheUtils, 'createTempDirectory')
createTempDirectoryMock.mockImplementation(async () => {
return Promise.resolve(tempPath)
})
const archivePath = path.join(tempPath, CacheFilename.Gzip)
const downloadCacheMock = jest.spyOn(cacheHttpClient, 'downloadCache')
const fileSize = 142
const getArchiveFileSizeInBytesMock = jest
.spyOn(cacheUtils, 'getArchiveFileSizeInBytes')
.mockReturnValue(fileSize)
const extractTarMock = jest.spyOn(tar, 'extractTar')
const unlinkFileMock = jest.spyOn(cacheUtils, 'unlinkFile')
const cacheKey = await restoreCache(paths, key, restoreKeys, options)
expect(cacheKey).toBe(restoreKeys[0])
expect(getCacheVersionMock).toHaveBeenCalledWith(
paths,
compressionMethod,
false
)
expect(getCacheDownloadURLMock).toHaveBeenCalledWith({
key,
restoreKeys,
version: cacheVersion
})
expect(createTempDirectoryMock).toHaveBeenCalledTimes(1)
expect(downloadCacheMock).toHaveBeenCalledWith(
signedDownloadUrl,
archivePath,
options
)
expect(getArchiveFileSizeInBytesMock).toHaveBeenCalledWith(archivePath)
expect(logInfoMock).toHaveBeenCalledWith(`Cache Size: ~0 MB (142 B)`)
expect(extractTarMock).toHaveBeenCalledTimes(1)
expect(extractTarMock).toHaveBeenCalledWith(archivePath, compressionMethod)
expect(unlinkFileMock).toHaveBeenCalledTimes(1)
expect(unlinkFileMock).toHaveBeenCalledWith(archivePath)
expect(compressionMethodMock).toHaveBeenCalledTimes(1)
})
test('restore with lookup only enabled', async () => {
const paths = ['node_modules']
const key = 'node-test'
const compressionMethod = CompressionMethod.Gzip
const signedDownloadUrl = 'https://blob-storage.local?signed=true'
const cacheVersion =
'd90f107aaeb22920dba0c637a23c37b5bc497b4dfa3b07fe3f79bf88a273c11b'
const options = {lookupOnly: true, useAzureSdk: true} as DownloadOptions
const getCacheVersionMock = jest.spyOn(cacheUtils, 'getCacheVersion')
getCacheVersionMock.mockReturnValue(cacheVersion)
const compressionMethodMock = jest.spyOn(cacheUtils, 'getCompressionMethod')
compressionMethodMock.mockReturnValue(Promise.resolve(compressionMethod))
const getCacheDownloadURLMock = jest.spyOn(
CacheServiceClientJSON.prototype,
'GetCacheEntryDownloadURL'
)
getCacheDownloadURLMock.mockReturnValue(
Promise.resolve({
ok: true,
signedDownloadUrl,
matchedKey: key
})
)
const createTempDirectoryMock = jest.spyOn(cacheUtils, 'createTempDirectory')
const downloadCacheMock = jest.spyOn(cacheHttpClient, 'downloadCache')
const cacheKey = await restoreCache(paths, key, undefined, options)
expect(cacheKey).toBe(key)
expect(getCacheVersionMock).toHaveBeenCalledWith(
paths,
compressionMethod,
false
)
expect(getCacheDownloadURLMock).toHaveBeenCalledWith({
key,
restoreKeys: [],
version: cacheVersion
})
expect(logInfoMock).toHaveBeenCalledWith('Lookup only - skipping download')
// creating a tempDir and downloading the cache are skipped
expect(createTempDirectoryMock).toHaveBeenCalledTimes(0)
expect(downloadCacheMock).toHaveBeenCalledTimes(0)
})

View File

@ -3,6 +3,7 @@ import * as path from 'path'
import {saveCache} from '../src/cache' import {saveCache} from '../src/cache'
import * as cacheHttpClient from '../src/internal/cacheHttpClient' import * as cacheHttpClient from '../src/internal/cacheHttpClient'
import * as cacheUtils from '../src/internal/cacheUtils' import * as cacheUtils from '../src/internal/cacheUtils'
import * as config from '../src/internal/config'
import {CacheFilename, CompressionMethod} from '../src/internal/constants' import {CacheFilename, CompressionMethod} from '../src/internal/constants'
import * as tar from '../src/internal/tar' import * as tar from '../src/internal/tar'
import {TypedResponse} from '@actions/http-client/lib/interfaces' import {TypedResponse} from '@actions/http-client/lib/interfaces'
@ -14,6 +15,7 @@ import {HttpClientError} from '@actions/http-client'
jest.mock('../src/internal/cacheHttpClient') jest.mock('../src/internal/cacheHttpClient')
jest.mock('../src/internal/cacheUtils') jest.mock('../src/internal/cacheUtils')
jest.mock('../src/internal/config')
jest.mock('../src/internal/tar') jest.mock('../src/internal/tar')
beforeAll(() => { beforeAll(() => {
@ -94,7 +96,7 @@ test('save with large cache outputs should fail in GHES with error message', asy
.spyOn(cacheUtils, 'getCompressionMethod') .spyOn(cacheUtils, 'getCompressionMethod')
.mockReturnValueOnce(Promise.resolve(compression)) .mockReturnValueOnce(Promise.resolve(compression))
jest.spyOn(cacheUtils, 'isGhes').mockReturnValueOnce(true) jest.spyOn(config, 'isGhes').mockReturnValueOnce(true)
const reserveCacheMock = jest const reserveCacheMock = jest
.spyOn(cacheHttpClient, 'reserveCache') .spyOn(cacheHttpClient, 'reserveCache')
@ -146,7 +148,7 @@ test('save with large cache outputs should fail in GHES without error message',
.spyOn(cacheUtils, 'getCompressionMethod') .spyOn(cacheUtils, 'getCompressionMethod')
.mockReturnValueOnce(Promise.resolve(compression)) .mockReturnValueOnce(Promise.resolve(compression))
jest.spyOn(cacheUtils, 'isGhes').mockReturnValueOnce(true) jest.spyOn(config, 'isGhes').mockReturnValueOnce(true)
const reserveCacheMock = jest const reserveCacheMock = jest
.spyOn(cacheHttpClient, 'reserveCache') .spyOn(cacheHttpClient, 'reserveCache')
@ -268,7 +270,12 @@ test('save with server error should fail', async () => {
compression compression
) )
expect(saveCacheMock).toHaveBeenCalledTimes(1) expect(saveCacheMock).toHaveBeenCalledTimes(1)
expect(saveCacheMock).toHaveBeenCalledWith(cacheId, archiveFile, undefined) expect(saveCacheMock).toHaveBeenCalledWith(
cacheId,
archiveFile,
'',
undefined
)
expect(getCompressionMock).toHaveBeenCalledTimes(1) expect(getCompressionMock).toHaveBeenCalledTimes(1)
}) })
@ -313,7 +320,12 @@ test('save with valid inputs uploads a cache', async () => {
compression compression
) )
expect(saveCacheMock).toHaveBeenCalledTimes(1) expect(saveCacheMock).toHaveBeenCalledTimes(1)
expect(saveCacheMock).toHaveBeenCalledWith(cacheId, archiveFile, undefined) expect(saveCacheMock).toHaveBeenCalledWith(
cacheId,
archiveFile,
'',
undefined
)
expect(getCompressionMock).toHaveBeenCalledTimes(1) expect(getCompressionMock).toHaveBeenCalledTimes(1)
}) })

View File

@ -0,0 +1,339 @@
import * as core from '@actions/core'
import * as path from 'path'
import {saveCache} from '../src/cache'
import * as cacheUtils from '../src/internal/cacheUtils'
import {CacheFilename, CompressionMethod} from '../src/internal/constants'
import * as config from '../src/internal/config'
import * as tar from '../src/internal/tar'
import {CacheServiceClientJSON} from '../src/generated/results/api/v1/cache.twirp'
import * as cacheHttpClient from '../src/internal/cacheHttpClient'
import {UploadOptions} from '../src/options'
let logDebugMock: jest.SpyInstance
jest.mock('../src/internal/tar')
const uploadFileMock = jest.fn()
const blockBlobClientMock = jest.fn().mockImplementation(() => ({
uploadFile: uploadFileMock
}))
jest.mock('@azure/storage-blob', () => ({
BlobClient: jest.fn().mockImplementation(() => {
return {
getBlockBlobClient: blockBlobClientMock
}
})
}))
beforeAll(() => {
process.env['ACTIONS_RUNTIME_TOKEN'] = 'token'
jest.spyOn(console, 'log').mockImplementation(() => {})
jest.spyOn(core, 'debug').mockImplementation(() => {})
jest.spyOn(core, 'info').mockImplementation(() => {})
jest.spyOn(core, 'warning').mockImplementation(() => {})
jest.spyOn(core, 'error').mockImplementation(() => {})
jest.spyOn(cacheUtils, 'resolvePaths').mockImplementation(async filePaths => {
return filePaths.map(x => path.resolve(x))
})
jest.spyOn(cacheUtils, 'createTempDirectory').mockImplementation(async () => {
return Promise.resolve('/foo/bar')
})
// Ensure that we're using v2 for these tests
jest.spyOn(config, 'getCacheServiceVersion').mockReturnValue('v2')
logDebugMock = jest.spyOn(core, 'debug')
})
afterEach(() => {
expect(logDebugMock).toHaveBeenCalledWith('Cache service version: v2')
jest.clearAllMocks()
})
test('save with missing input should fail', async () => {
const paths: string[] = []
const key = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43'
await expect(saveCache(paths, key)).rejects.toThrowError(
`Path Validation Error: At least one directory or file path is required`
)
})
test('save with large cache outputs should fail using', async () => {
const paths = 'node_modules'
const key = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43'
const cachePaths = [path.resolve(paths)]
const createTarMock = jest.spyOn(tar, 'createTar')
const logWarningMock = jest.spyOn(core, 'warning')
const cacheSize = 11 * 1024 * 1024 * 1024 //~11GB, over the 10GB limit
jest
.spyOn(cacheUtils, 'getArchiveFileSizeInBytes')
.mockReturnValueOnce(cacheSize)
const compression = CompressionMethod.Gzip
const getCompressionMock = jest
.spyOn(cacheUtils, 'getCompressionMethod')
.mockReturnValueOnce(Promise.resolve(compression))
const cacheId = await saveCache([paths], key)
expect(cacheId).toBe(-1)
expect(logWarningMock).toHaveBeenCalledWith(
'Failed to save: Cache size of ~11264 MB (11811160064 B) is over the 10GB limit, not saving cache.'
)
const archiveFolder = '/foo/bar'
expect(createTarMock).toHaveBeenCalledWith(
archiveFolder,
cachePaths,
compression
)
expect(getCompressionMock).toHaveBeenCalledTimes(1)
})
test('create cache entry failure', async () => {
const paths = ['node_modules']
const key = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43'
const infoLogMock = jest.spyOn(core, 'info')
const createCacheEntryMock = jest
.spyOn(CacheServiceClientJSON.prototype, 'CreateCacheEntry')
.mockReturnValue(Promise.resolve({ok: false, signedUploadUrl: ''}))
const createTarMock = jest.spyOn(tar, 'createTar')
const finalizeCacheEntryMock = jest.spyOn(
CacheServiceClientJSON.prototype,
'FinalizeCacheEntryUpload'
)
const compression = CompressionMethod.Zstd
const getCompressionMock = jest
.spyOn(cacheUtils, 'getCompressionMethod')
.mockReturnValueOnce(Promise.resolve(compression))
const archiveFileSize = 1024
jest
.spyOn(cacheUtils, 'getArchiveFileSizeInBytes')
.mockReturnValueOnce(archiveFileSize)
const cacheVersion = cacheUtils.getCacheVersion(paths, compression)
const saveCacheMock = jest.spyOn(cacheHttpClient, 'saveCache')
const cacheId = await saveCache(paths, key)
expect(cacheId).toBe(-1)
expect(infoLogMock).toHaveBeenCalledWith(
`Failed to save: Unable to reserve cache with key ${key}, another job may be creating this cache.`
)
expect(createCacheEntryMock).toHaveBeenCalledWith({
key,
version: cacheVersion
})
expect(createTarMock).toHaveBeenCalledTimes(1)
expect(getCompressionMock).toHaveBeenCalledTimes(1)
expect(finalizeCacheEntryMock).toHaveBeenCalledTimes(0)
expect(saveCacheMock).toHaveBeenCalledTimes(0)
})
test('save cache fails if a signedUploadURL was not passed', async () => {
const paths = 'node_modules'
const key = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43'
const cachePaths = [path.resolve(paths)]
const signedUploadURL = ''
const archiveFileSize = 1024
const options: UploadOptions = {
archiveSizeBytes: archiveFileSize, // These should always match
useAzureSdk: true,
uploadChunkSize: 64 * 1024 * 1024,
uploadConcurrency: 8
}
const createCacheEntryMock = jest
.spyOn(CacheServiceClientJSON.prototype, 'CreateCacheEntry')
.mockReturnValue(
Promise.resolve({ok: true, signedUploadUrl: signedUploadURL})
)
const createTarMock = jest.spyOn(tar, 'createTar')
const saveCacheMock = jest.spyOn(cacheHttpClient, 'saveCache')
const compression = CompressionMethod.Zstd
const getCompressionMock = jest
.spyOn(cacheUtils, 'getCompressionMethod')
.mockReturnValueOnce(Promise.resolve(compression))
const cacheVersion = cacheUtils.getCacheVersion([paths], compression)
jest
.spyOn(cacheUtils, 'getArchiveFileSizeInBytes')
.mockReturnValueOnce(archiveFileSize)
const cacheId = await saveCache([paths], key, options)
expect(cacheId).toBe(-1)
expect(createCacheEntryMock).toHaveBeenCalledWith({
key,
version: cacheVersion
})
const archiveFolder = '/foo/bar'
const archiveFile = path.join(archiveFolder, CacheFilename.Zstd)
expect(createTarMock).toHaveBeenCalledWith(
archiveFolder,
cachePaths,
compression
)
expect(saveCacheMock).toHaveBeenCalledWith(
-1,
archiveFile,
signedUploadURL,
options
)
expect(getCompressionMock).toHaveBeenCalledTimes(1)
})
test('finalize save cache failure', async () => {
const paths = 'node_modules'
const key = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43'
const cachePaths = [path.resolve(paths)]
const logWarningMock = jest.spyOn(core, 'warning')
const signedUploadURL = 'https://blob-storage.local?signed=true'
const archiveFileSize = 1024
const options: UploadOptions = {
archiveSizeBytes: archiveFileSize, // These should always match
useAzureSdk: true,
uploadChunkSize: 64 * 1024 * 1024,
uploadConcurrency: 8
}
const createCacheEntryMock = jest
.spyOn(CacheServiceClientJSON.prototype, 'CreateCacheEntry')
.mockReturnValue(
Promise.resolve({ok: true, signedUploadUrl: signedUploadURL})
)
const createTarMock = jest.spyOn(tar, 'createTar')
const saveCacheMock = jest
.spyOn(cacheHttpClient, 'saveCache')
.mockResolvedValue(Promise.resolve())
const compression = CompressionMethod.Zstd
const getCompressionMock = jest
.spyOn(cacheUtils, 'getCompressionMethod')
.mockReturnValueOnce(Promise.resolve(compression))
const cacheVersion = cacheUtils.getCacheVersion([paths], compression)
jest
.spyOn(cacheUtils, 'getArchiveFileSizeInBytes')
.mockReturnValueOnce(archiveFileSize)
const finalizeCacheEntryMock = jest
.spyOn(CacheServiceClientJSON.prototype, 'FinalizeCacheEntryUpload')
.mockReturnValue(Promise.resolve({ok: false, entryId: ''}))
const cacheId = await saveCache([paths], key, options)
expect(createCacheEntryMock).toHaveBeenCalledWith({
key,
version: cacheVersion
})
const archiveFolder = '/foo/bar'
const archiveFile = path.join(archiveFolder, CacheFilename.Zstd)
expect(createTarMock).toHaveBeenCalledWith(
archiveFolder,
cachePaths,
compression
)
expect(saveCacheMock).toHaveBeenCalledWith(
-1,
archiveFile,
signedUploadURL,
options
)
expect(getCompressionMock).toHaveBeenCalledTimes(1)
expect(finalizeCacheEntryMock).toHaveBeenCalledWith({
key,
version: cacheVersion,
sizeBytes: archiveFileSize.toString()
})
expect(cacheId).toBe(-1)
expect(logWarningMock).toHaveBeenCalledWith(
`Failed to save: Unable to finalize cache with key ${key}, another job may be finalizing this cache.`
)
})
test('save with valid inputs uploads a cache', async () => {
const paths = 'node_modules'
const key = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43'
const cachePaths = [path.resolve(paths)]
const signedUploadURL = 'https://blob-storage.local?signed=true'
const createTarMock = jest.spyOn(tar, 'createTar')
const archiveFileSize = 1024
const options: UploadOptions = {
archiveSizeBytes: archiveFileSize, // These should always match
useAzureSdk: true,
uploadChunkSize: 64 * 1024 * 1024,
uploadConcurrency: 8
}
jest
.spyOn(cacheUtils, 'getArchiveFileSizeInBytes')
.mockReturnValueOnce(archiveFileSize)
const cacheId = 4
jest
.spyOn(CacheServiceClientJSON.prototype, 'CreateCacheEntry')
.mockReturnValue(
Promise.resolve({ok: true, signedUploadUrl: signedUploadURL})
)
const saveCacheMock = jest.spyOn(cacheHttpClient, 'saveCache')
const compression = CompressionMethod.Zstd
const getCompressionMock = jest
.spyOn(cacheUtils, 'getCompressionMethod')
.mockReturnValue(Promise.resolve(compression))
const cacheVersion = cacheUtils.getCacheVersion([paths], compression)
const finalizeCacheEntryMock = jest
.spyOn(CacheServiceClientJSON.prototype, 'FinalizeCacheEntryUpload')
.mockReturnValue(Promise.resolve({ok: true, entryId: cacheId.toString()}))
const expectedCacheId = await saveCache([paths], key)
const archiveFolder = '/foo/bar'
const archiveFile = path.join(archiveFolder, CacheFilename.Zstd)
expect(saveCacheMock).toHaveBeenCalledWith(
-1,
archiveFile,
signedUploadURL,
options
)
expect(createTarMock).toHaveBeenCalledWith(
archiveFolder,
cachePaths,
compression
)
expect(finalizeCacheEntryMock).toHaveBeenCalledWith({
key,
version: cacheVersion,
sizeBytes: archiveFileSize.toString()
})
expect(getCompressionMock).toHaveBeenCalledTimes(1)
expect(expectedCacheId).toBe(cacheId)
})
test('save with non existing path should not save cache using v2 saveCache', async () => {
const path = 'node_modules'
const key = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43'
jest.spyOn(cacheUtils, 'resolvePaths').mockImplementation(async () => {
return []
})
await expect(saveCache([path], key)).rejects.toThrowError(
`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`
)
})

View File

@ -0,0 +1,58 @@
import * as uploadUtils from '../src/internal/uploadUtils'
import {TransferProgressEvent} from '@azure/ms-rest-js'
test('upload progress tracked correctly', () => {
const progress = new uploadUtils.UploadProgress(1000)
expect(progress.contentLength).toBe(1000)
expect(progress.sentBytes).toBe(0)
expect(progress.displayedComplete).toBe(false)
expect(progress.timeoutHandle).toBeUndefined()
expect(progress.getTransferredBytes()).toBe(0)
expect(progress.isDone()).toBe(false)
progress.onProgress()({loadedBytes: 0} as TransferProgressEvent)
expect(progress.contentLength).toBe(1000)
expect(progress.sentBytes).toBe(0)
expect(progress.displayedComplete).toBe(false)
expect(progress.timeoutHandle).toBeUndefined()
expect(progress.getTransferredBytes()).toBe(0)
expect(progress.isDone()).toBe(false)
progress.onProgress()({loadedBytes: 250} as TransferProgressEvent)
expect(progress.contentLength).toBe(1000)
expect(progress.sentBytes).toBe(250)
expect(progress.displayedComplete).toBe(false)
expect(progress.timeoutHandle).toBeUndefined()
expect(progress.getTransferredBytes()).toBe(250)
expect(progress.isDone()).toBe(false)
progress.onProgress()({loadedBytes: 500} as TransferProgressEvent)
expect(progress.contentLength).toBe(1000)
expect(progress.sentBytes).toBe(500)
expect(progress.displayedComplete).toBe(false)
expect(progress.timeoutHandle).toBeUndefined()
expect(progress.getTransferredBytes()).toBe(500)
expect(progress.isDone()).toBe(false)
progress.onProgress()({loadedBytes: 750} as TransferProgressEvent)
expect(progress.contentLength).toBe(1000)
expect(progress.sentBytes).toBe(750)
expect(progress.displayedComplete).toBe(false)
expect(progress.timeoutHandle).toBeUndefined()
expect(progress.getTransferredBytes()).toBe(750)
expect(progress.isDone()).toBe(false)
progress.onProgress()({loadedBytes: 1000} as TransferProgressEvent)
expect(progress.contentLength).toBe(1000)
expect(progress.sentBytes).toBe(1000)
expect(progress.displayedComplete).toBe(false)
expect(progress.timeoutHandle).toBeUndefined()
expect(progress.getTransferredBytes()).toBe(1000)
expect(progress.isDone()).toBe(true)
})

520
packages/cache/package-lock.json generated vendored
View File

@ -1,15 +1,15 @@
{ {
"name": "@actions/cache", "name": "@actions/cache",
"version": "3.2.4", "version": "4.0.0",
"lockfileVersion": 2, "lockfileVersion": 2,
"requires": true, "requires": true,
"packages": { "packages": {
"": { "": {
"name": "@actions/cache", "name": "@actions/cache",
"version": "3.2.4", "version": "4.0.0",
"license": "MIT", "license": "MIT",
"dependencies": { "dependencies": {
"@actions/core": "^1.10.0", "@actions/core": "^1.11.1",
"@actions/exec": "^1.0.1", "@actions/exec": "^1.0.1",
"@actions/glob": "^0.1.0", "@actions/glob": "^0.1.0",
"@actions/http-client": "^2.1.1", "@actions/http-client": "^2.1.1",
@ -17,7 +17,9 @@
"@azure/abort-controller": "^1.1.0", "@azure/abort-controller": "^1.1.0",
"@azure/ms-rest-js": "^2.6.0", "@azure/ms-rest-js": "^2.6.0",
"@azure/storage-blob": "^12.13.0", "@azure/storage-blob": "^12.13.0",
"semver": "^6.3.1" "@protobuf-ts/plugin": "^2.9.4",
"semver": "^6.3.1",
"twirp-ts": "^2.5.0"
}, },
"devDependencies": { "devDependencies": {
"@types/semver": "^6.0.0", "@types/semver": "^6.0.0",
@ -25,20 +27,12 @@
} }
}, },
"node_modules/@actions/core": { "node_modules/@actions/core": {
"version": "1.10.0", "version": "1.11.1",
"resolved": "https://registry.npmjs.org/@actions/core/-/core-1.10.0.tgz", "resolved": "https://registry.npmjs.org/@actions/core/-/core-1.11.1.tgz",
"integrity": "sha512-2aZDDa3zrrZbP5ZYg159sNoLRb61nQ7awl5pSvIq5Qpj81vwDzdMRKzkWJGJuwVvWpvZKx7vspJALyvaaIQyug==", "integrity": "sha512-hXJCSrkwfA46Vd9Z3q4cpEpHB1rL5NG04+/rbqW9d3+CSvtB1tYe8UTpAlixa1vj0m/ULglfEK2UKxMGxCxv5A==",
"dependencies": { "dependencies": {
"@actions/http-client": "^2.0.1", "@actions/exec": "^1.1.1",
"uuid": "^8.3.2" "@actions/http-client": "^2.0.1"
}
},
"node_modules/@actions/core/node_modules/uuid": {
"version": "8.3.2",
"resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz",
"integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==",
"bin": {
"uuid": "dist/bin/uuid"
} }
}, },
"node_modules/@actions/exec": { "node_modules/@actions/exec": {
@ -253,6 +247,83 @@
"node": ">=8.0.0" "node": ">=8.0.0"
} }
}, },
"node_modules/@protobuf-ts/plugin": {
"version": "2.9.4",
"resolved": "https://registry.npmjs.org/@protobuf-ts/plugin/-/plugin-2.9.4.tgz",
"integrity": "sha512-Db5Laq5T3mc6ERZvhIhkj1rn57/p8gbWiCKxQWbZBBl20wMuqKoHbRw4tuD7FyXi+IkwTToaNVXymv5CY3E8Rw==",
"license": "Apache-2.0",
"dependencies": {
"@protobuf-ts/plugin-framework": "^2.9.4",
"@protobuf-ts/protoc": "^2.9.4",
"@protobuf-ts/runtime": "^2.9.4",
"@protobuf-ts/runtime-rpc": "^2.9.4",
"typescript": "^3.9"
},
"bin": {
"protoc-gen-dump": "bin/protoc-gen-dump",
"protoc-gen-ts": "bin/protoc-gen-ts"
}
},
"node_modules/@protobuf-ts/plugin-framework": {
"version": "2.9.4",
"resolved": "https://registry.npmjs.org/@protobuf-ts/plugin-framework/-/plugin-framework-2.9.4.tgz",
"integrity": "sha512-9nuX1kjdMliv+Pes8dQCKyVhjKgNNfwxVHg+tx3fLXSfZZRcUHMc1PMwB9/vTvc6gBKt9QGz5ERqSqZc0++E9A==",
"license": "(Apache-2.0 AND BSD-3-Clause)",
"dependencies": {
"@protobuf-ts/runtime": "^2.9.4",
"typescript": "^3.9"
}
},
"node_modules/@protobuf-ts/plugin-framework/node_modules/typescript": {
"version": "3.9.10",
"resolved": "https://registry.npmjs.org/typescript/-/typescript-3.9.10.tgz",
"integrity": "sha512-w6fIxVE/H1PkLKcCPsFqKE7Kv7QUwhU8qQY2MueZXWx5cPZdwFupLgKK3vntcK98BtNHZtAF4LA/yl2a7k8R6Q==",
"license": "Apache-2.0",
"bin": {
"tsc": "bin/tsc",
"tsserver": "bin/tsserver"
},
"engines": {
"node": ">=4.2.0"
}
},
"node_modules/@protobuf-ts/plugin/node_modules/typescript": {
"version": "3.9.10",
"resolved": "https://registry.npmjs.org/typescript/-/typescript-3.9.10.tgz",
"integrity": "sha512-w6fIxVE/H1PkLKcCPsFqKE7Kv7QUwhU8qQY2MueZXWx5cPZdwFupLgKK3vntcK98BtNHZtAF4LA/yl2a7k8R6Q==",
"license": "Apache-2.0",
"bin": {
"tsc": "bin/tsc",
"tsserver": "bin/tsserver"
},
"engines": {
"node": ">=4.2.0"
}
},
"node_modules/@protobuf-ts/protoc": {
"version": "2.9.4",
"resolved": "https://registry.npmjs.org/@protobuf-ts/protoc/-/protoc-2.9.4.tgz",
"integrity": "sha512-hQX+nOhFtrA+YdAXsXEDrLoGJqXHpgv4+BueYF0S9hy/Jq0VRTVlJS1Etmf4qlMt/WdigEes5LOd/LDzui4GIQ==",
"license": "Apache-2.0",
"bin": {
"protoc": "protoc.js"
}
},
"node_modules/@protobuf-ts/runtime": {
"version": "2.9.4",
"resolved": "https://registry.npmjs.org/@protobuf-ts/runtime/-/runtime-2.9.4.tgz",
"integrity": "sha512-vHRFWtJJB/SiogWDF0ypoKfRIZ41Kq+G9cEFj6Qm1eQaAhJ1LDFvgZ7Ja4tb3iLOQhz0PaoPnnOijF1qmEqTxg==",
"license": "(Apache-2.0 AND BSD-3-Clause)"
},
"node_modules/@protobuf-ts/runtime-rpc": {
"version": "2.9.4",
"resolved": "https://registry.npmjs.org/@protobuf-ts/runtime-rpc/-/runtime-rpc-2.9.4.tgz",
"integrity": "sha512-y9L9JgnZxXFqH5vD4d7j9duWvIJ7AShyBRoNKJGhu9Q27qIbchfzli66H9RvrQNIFk5ER7z1Twe059WZGqERcA==",
"license": "Apache-2.0",
"dependencies": {
"@protobuf-ts/runtime": "^2.9.4"
}
},
"node_modules/@types/node": { "node_modules/@types/node": {
"version": "20.4.6", "version": "20.4.6",
"resolved": "https://registry.npmjs.org/@types/node/-/node-20.4.6.tgz", "resolved": "https://registry.npmjs.org/@types/node/-/node-20.4.6.tgz",
@ -324,6 +395,16 @@
"concat-map": "0.0.1" "concat-map": "0.0.1"
} }
}, },
"node_modules/camel-case": {
"version": "4.1.2",
"resolved": "https://registry.npmjs.org/camel-case/-/camel-case-4.1.2.tgz",
"integrity": "sha512-gxGWBrTT1JuMx6R+o5PTXMmUnhnVzLQ9SNutD4YqKtI6ap897t3tKECYla6gCWEkplXnlNybEkZg9GEGxKFCgw==",
"license": "MIT",
"dependencies": {
"pascal-case": "^3.1.2",
"tslib": "^2.0.3"
}
},
"node_modules/combined-stream": { "node_modules/combined-stream": {
"version": "1.0.8", "version": "1.0.8",
"resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz",
@ -335,6 +416,15 @@
"node": ">= 0.8" "node": ">= 0.8"
} }
}, },
"node_modules/commander": {
"version": "6.2.1",
"resolved": "https://registry.npmjs.org/commander/-/commander-6.2.1.tgz",
"integrity": "sha512-U7VdrJFnJgo4xjrHpTzu0yrHPGImdsmD95ZlgYSEajAn2JKzDhDTPG9kBTefmObL2w/ngeZnilk+OV9CG3d7UA==",
"license": "MIT",
"engines": {
"node": ">= 6"
}
},
"node_modules/concat-map": { "node_modules/concat-map": {
"version": "0.0.1", "version": "0.0.1",
"resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz",
@ -348,6 +438,19 @@
"node": ">=0.4.0" "node": ">=0.4.0"
} }
}, },
"node_modules/dot-object": {
"version": "2.1.5",
"resolved": "https://registry.npmjs.org/dot-object/-/dot-object-2.1.5.tgz",
"integrity": "sha512-xHF8EP4XH/Ba9fvAF2LDd5O3IITVolerVV6xvkxoM8zlGEiCUrggpAnHyOoKJKCrhvPcGATFAUwIujj7bRG5UA==",
"license": "MIT",
"dependencies": {
"commander": "^6.1.0",
"glob": "^7.1.6"
},
"bin": {
"dot-object": "bin/dot-object"
}
},
"node_modules/event-target-shim": { "node_modules/event-target-shim": {
"version": "5.0.1", "version": "5.0.1",
"resolved": "https://registry.npmjs.org/event-target-shim/-/event-target-shim-5.0.1.tgz", "resolved": "https://registry.npmjs.org/event-target-shim/-/event-target-shim-5.0.1.tgz",
@ -377,6 +480,65 @@
"node": ">= 0.12" "node": ">= 0.12"
} }
}, },
"node_modules/fs.realpath": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz",
"integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==",
"license": "ISC"
},
"node_modules/glob": {
"version": "7.2.3",
"resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz",
"integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==",
"deprecated": "Glob versions prior to v9 are no longer supported",
"license": "ISC",
"dependencies": {
"fs.realpath": "^1.0.0",
"inflight": "^1.0.4",
"inherits": "2",
"minimatch": "^3.1.1",
"once": "^1.3.0",
"path-is-absolute": "^1.0.0"
},
"engines": {
"node": "*"
},
"funding": {
"url": "https://github.com/sponsors/isaacs"
}
},
"node_modules/inflight": {
"version": "1.0.6",
"resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz",
"integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==",
"deprecated": "This module is not supported, and leaks memory. Do not use it. Check out lru-cache if you want a good and tested way to coalesce async requests by a key value, which is much more comprehensive and powerful.",
"license": "ISC",
"dependencies": {
"once": "^1.3.0",
"wrappy": "1"
}
},
"node_modules/inherits": {
"version": "2.0.4",
"resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz",
"integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==",
"license": "ISC"
},
"node_modules/lodash": {
"version": "4.17.21",
"resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz",
"integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==",
"license": "MIT"
},
"node_modules/lower-case": {
"version": "2.0.2",
"resolved": "https://registry.npmjs.org/lower-case/-/lower-case-2.0.2.tgz",
"integrity": "sha512-7fm3l3NAF9WfN6W3JOmf5drwpVqX78JtoGJ3A6W0a6ZnldM41w2fV5D490psKFTpMds8TJse/eHLFFsNHHjHgg==",
"license": "MIT",
"dependencies": {
"tslib": "^2.0.3"
}
},
"node_modules/mime-db": { "node_modules/mime-db": {
"version": "1.52.0", "version": "1.52.0",
"resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz",
@ -407,6 +569,16 @@
"node": "*" "node": "*"
} }
}, },
"node_modules/no-case": {
"version": "3.0.4",
"resolved": "https://registry.npmjs.org/no-case/-/no-case-3.0.4.tgz",
"integrity": "sha512-fgAN3jGAh+RoxUGZHTSOLJIqUc2wmoBwGR4tbpNAKmmovFoWq0OdRkb0VkldReO2a2iBT/OEulG9XSUc10r3zg==",
"license": "MIT",
"dependencies": {
"lower-case": "^2.0.2",
"tslib": "^2.0.3"
}
},
"node_modules/node-fetch": { "node_modules/node-fetch": {
"version": "2.6.12", "version": "2.6.12",
"resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.12.tgz", "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.12.tgz",
@ -426,6 +598,55 @@
} }
} }
}, },
"node_modules/once": {
"version": "1.4.0",
"resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz",
"integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==",
"license": "ISC",
"dependencies": {
"wrappy": "1"
}
},
"node_modules/pascal-case": {
"version": "3.1.2",
"resolved": "https://registry.npmjs.org/pascal-case/-/pascal-case-3.1.2.tgz",
"integrity": "sha512-uWlGT3YSnK9x3BQJaOdcZwrnV6hPpd8jFH1/ucpiLRPh/2zCVJKS19E4GvYHvaCcACn3foXZ0cLB9Wrx1KGe5g==",
"license": "MIT",
"dependencies": {
"no-case": "^3.0.4",
"tslib": "^2.0.3"
}
},
"node_modules/path-is-absolute": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz",
"integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==",
"license": "MIT",
"engines": {
"node": ">=0.10.0"
}
},
"node_modules/path-to-regexp": {
"version": "6.3.0",
"resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-6.3.0.tgz",
"integrity": "sha512-Yhpw4T9C6hPpgPeA28us07OJeqZ5EzQTkbfwuhsUg0c237RomFoETJgmp2sa3F/41gfLE6G5cqcYwznmeEeOlQ==",
"license": "MIT"
},
"node_modules/prettier": {
"version": "2.8.8",
"resolved": "https://registry.npmjs.org/prettier/-/prettier-2.8.8.tgz",
"integrity": "sha512-tdN8qQGvNjw4CHbY+XXk0JgCXn9QiF21a55rBe5LJAU+kDyC4WQn4+awm2Xfk2lQMk5fKup9XgzTZtGkjBdP9Q==",
"license": "MIT",
"bin": {
"prettier": "bin-prettier.js"
},
"engines": {
"node": ">=10.13.0"
},
"funding": {
"url": "https://github.com/prettier/prettier?sponsor=1"
}
},
"node_modules/process": { "node_modules/process": {
"version": "0.11.10", "version": "0.11.10",
"resolved": "https://registry.npmjs.org/process/-/process-0.11.10.tgz", "resolved": "https://registry.npmjs.org/process/-/process-0.11.10.tgz",
@ -452,6 +673,16 @@
"resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz",
"integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==" "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw=="
}, },
"node_modules/ts-poet": {
"version": "4.15.0",
"resolved": "https://registry.npmjs.org/ts-poet/-/ts-poet-4.15.0.tgz",
"integrity": "sha512-sLLR8yQBvHzi9d4R1F4pd+AzQxBfzOSSjfxiJxQhkUoH5bL7RsAC6wgvtVUQdGqiCsyS9rT6/8X2FI7ipdir5g==",
"license": "Apache-2.0",
"dependencies": {
"lodash": "^4.17.15",
"prettier": "^2.5.1"
}
},
"node_modules/tslib": { "node_modules/tslib": {
"version": "2.6.1", "version": "2.6.1",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.1.tgz", "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.1.tgz",
@ -465,6 +696,35 @@
"node": ">=0.6.11 <=0.7.0 || >=0.7.3" "node": ">=0.6.11 <=0.7.0 || >=0.7.3"
} }
}, },
"node_modules/twirp-ts": {
"version": "2.5.0",
"resolved": "https://registry.npmjs.org/twirp-ts/-/twirp-ts-2.5.0.tgz",
"integrity": "sha512-JTKIK5Pf/+3qCrmYDFlqcPPUx+ohEWKBaZy8GL8TmvV2VvC0SXVyNYILO39+GCRbqnuP6hBIF+BVr8ZxRz+6fw==",
"license": "MIT",
"dependencies": {
"@protobuf-ts/plugin-framework": "^2.0.7",
"camel-case": "^4.1.2",
"dot-object": "^2.1.4",
"path-to-regexp": "^6.2.0",
"ts-poet": "^4.5.0",
"yaml": "^1.10.2"
},
"bin": {
"protoc-gen-twirp_ts": "protoc-gen-twirp_ts"
},
"peerDependencies": {
"@protobuf-ts/plugin": "^2.5.0",
"ts-proto": "^1.81.3"
},
"peerDependenciesMeta": {
"@protobuf-ts/plugin": {
"optional": true
},
"ts-proto": {
"optional": true
}
}
},
"node_modules/typescript": { "node_modules/typescript": {
"version": "5.2.2", "version": "5.2.2",
"resolved": "https://registry.npmjs.org/typescript/-/typescript-5.2.2.tgz", "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.2.2.tgz",
@ -492,6 +752,12 @@
"webidl-conversions": "^3.0.0" "webidl-conversions": "^3.0.0"
} }
}, },
"node_modules/wrappy": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz",
"integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==",
"license": "ISC"
},
"node_modules/xml2js": { "node_modules/xml2js": {
"version": "0.5.0", "version": "0.5.0",
"resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.5.0.tgz", "resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.5.0.tgz",
@ -511,23 +777,25 @@
"engines": { "engines": {
"node": ">=4.0" "node": ">=4.0"
} }
},
"node_modules/yaml": {
"version": "1.10.2",
"resolved": "https://registry.npmjs.org/yaml/-/yaml-1.10.2.tgz",
"integrity": "sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg==",
"license": "ISC",
"engines": {
"node": ">= 6"
}
} }
}, },
"dependencies": { "dependencies": {
"@actions/core": { "@actions/core": {
"version": "1.10.0", "version": "1.11.1",
"resolved": "https://registry.npmjs.org/@actions/core/-/core-1.10.0.tgz", "resolved": "https://registry.npmjs.org/@actions/core/-/core-1.11.1.tgz",
"integrity": "sha512-2aZDDa3zrrZbP5ZYg159sNoLRb61nQ7awl5pSvIq5Qpj81vwDzdMRKzkWJGJuwVvWpvZKx7vspJALyvaaIQyug==", "integrity": "sha512-hXJCSrkwfA46Vd9Z3q4cpEpHB1rL5NG04+/rbqW9d3+CSvtB1tYe8UTpAlixa1vj0m/ULglfEK2UKxMGxCxv5A==",
"requires": { "requires": {
"@actions/http-client": "^2.0.1", "@actions/exec": "^1.1.1",
"uuid": "^8.3.2" "@actions/http-client": "^2.0.1"
},
"dependencies": {
"uuid": {
"version": "8.3.2",
"resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz",
"integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg=="
}
} }
}, },
"@actions/exec": { "@actions/exec": {
@ -707,6 +975,59 @@
"resolved": "https://registry.npmjs.org/@opentelemetry/api/-/api-1.4.1.tgz", "resolved": "https://registry.npmjs.org/@opentelemetry/api/-/api-1.4.1.tgz",
"integrity": "sha512-O2yRJce1GOc6PAy3QxFM4NzFiWzvScDC1/5ihYBL6BUEVdq0XMWN01sppE+H6bBXbaFYipjwFLEWLg5PaSOThA==" "integrity": "sha512-O2yRJce1GOc6PAy3QxFM4NzFiWzvScDC1/5ihYBL6BUEVdq0XMWN01sppE+H6bBXbaFYipjwFLEWLg5PaSOThA=="
}, },
"@protobuf-ts/plugin": {
"version": "2.9.4",
"resolved": "https://registry.npmjs.org/@protobuf-ts/plugin/-/plugin-2.9.4.tgz",
"integrity": "sha512-Db5Laq5T3mc6ERZvhIhkj1rn57/p8gbWiCKxQWbZBBl20wMuqKoHbRw4tuD7FyXi+IkwTToaNVXymv5CY3E8Rw==",
"requires": {
"@protobuf-ts/plugin-framework": "^2.9.4",
"@protobuf-ts/protoc": "^2.9.4",
"@protobuf-ts/runtime": "^2.9.4",
"@protobuf-ts/runtime-rpc": "^2.9.4",
"typescript": "^3.9"
},
"dependencies": {
"typescript": {
"version": "3.9.10",
"resolved": "https://registry.npmjs.org/typescript/-/typescript-3.9.10.tgz",
"integrity": "sha512-w6fIxVE/H1PkLKcCPsFqKE7Kv7QUwhU8qQY2MueZXWx5cPZdwFupLgKK3vntcK98BtNHZtAF4LA/yl2a7k8R6Q=="
}
}
},
"@protobuf-ts/plugin-framework": {
"version": "2.9.4",
"resolved": "https://registry.npmjs.org/@protobuf-ts/plugin-framework/-/plugin-framework-2.9.4.tgz",
"integrity": "sha512-9nuX1kjdMliv+Pes8dQCKyVhjKgNNfwxVHg+tx3fLXSfZZRcUHMc1PMwB9/vTvc6gBKt9QGz5ERqSqZc0++E9A==",
"requires": {
"@protobuf-ts/runtime": "^2.9.4",
"typescript": "^3.9"
},
"dependencies": {
"typescript": {
"version": "3.9.10",
"resolved": "https://registry.npmjs.org/typescript/-/typescript-3.9.10.tgz",
"integrity": "sha512-w6fIxVE/H1PkLKcCPsFqKE7Kv7QUwhU8qQY2MueZXWx5cPZdwFupLgKK3vntcK98BtNHZtAF4LA/yl2a7k8R6Q=="
}
}
},
"@protobuf-ts/protoc": {
"version": "2.9.4",
"resolved": "https://registry.npmjs.org/@protobuf-ts/protoc/-/protoc-2.9.4.tgz",
"integrity": "sha512-hQX+nOhFtrA+YdAXsXEDrLoGJqXHpgv4+BueYF0S9hy/Jq0VRTVlJS1Etmf4qlMt/WdigEes5LOd/LDzui4GIQ=="
},
"@protobuf-ts/runtime": {
"version": "2.9.4",
"resolved": "https://registry.npmjs.org/@protobuf-ts/runtime/-/runtime-2.9.4.tgz",
"integrity": "sha512-vHRFWtJJB/SiogWDF0ypoKfRIZ41Kq+G9cEFj6Qm1eQaAhJ1LDFvgZ7Ja4tb3iLOQhz0PaoPnnOijF1qmEqTxg=="
},
"@protobuf-ts/runtime-rpc": {
"version": "2.9.4",
"resolved": "https://registry.npmjs.org/@protobuf-ts/runtime-rpc/-/runtime-rpc-2.9.4.tgz",
"integrity": "sha512-y9L9JgnZxXFqH5vD4d7j9duWvIJ7AShyBRoNKJGhu9Q27qIbchfzli66H9RvrQNIFk5ER7z1Twe059WZGqERcA==",
"requires": {
"@protobuf-ts/runtime": "^2.9.4"
}
},
"@types/node": { "@types/node": {
"version": "20.4.6", "version": "20.4.6",
"resolved": "https://registry.npmjs.org/@types/node/-/node-20.4.6.tgz", "resolved": "https://registry.npmjs.org/@types/node/-/node-20.4.6.tgz",
@ -774,6 +1095,15 @@
"concat-map": "0.0.1" "concat-map": "0.0.1"
} }
}, },
"camel-case": {
"version": "4.1.2",
"resolved": "https://registry.npmjs.org/camel-case/-/camel-case-4.1.2.tgz",
"integrity": "sha512-gxGWBrTT1JuMx6R+o5PTXMmUnhnVzLQ9SNutD4YqKtI6ap897t3tKECYla6gCWEkplXnlNybEkZg9GEGxKFCgw==",
"requires": {
"pascal-case": "^3.1.2",
"tslib": "^2.0.3"
}
},
"combined-stream": { "combined-stream": {
"version": "1.0.8", "version": "1.0.8",
"resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz",
@ -782,6 +1112,11 @@
"delayed-stream": "~1.0.0" "delayed-stream": "~1.0.0"
} }
}, },
"commander": {
"version": "6.2.1",
"resolved": "https://registry.npmjs.org/commander/-/commander-6.2.1.tgz",
"integrity": "sha512-U7VdrJFnJgo4xjrHpTzu0yrHPGImdsmD95ZlgYSEajAn2JKzDhDTPG9kBTefmObL2w/ngeZnilk+OV9CG3d7UA=="
},
"concat-map": { "concat-map": {
"version": "0.0.1", "version": "0.0.1",
"resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz",
@ -792,6 +1127,15 @@
"resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz",
"integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==" "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ=="
}, },
"dot-object": {
"version": "2.1.5",
"resolved": "https://registry.npmjs.org/dot-object/-/dot-object-2.1.5.tgz",
"integrity": "sha512-xHF8EP4XH/Ba9fvAF2LDd5O3IITVolerVV6xvkxoM8zlGEiCUrggpAnHyOoKJKCrhvPcGATFAUwIujj7bRG5UA==",
"requires": {
"commander": "^6.1.0",
"glob": "^7.1.6"
}
},
"event-target-shim": { "event-target-shim": {
"version": "5.0.1", "version": "5.0.1",
"resolved": "https://registry.npmjs.org/event-target-shim/-/event-target-shim-5.0.1.tgz", "resolved": "https://registry.npmjs.org/event-target-shim/-/event-target-shim-5.0.1.tgz",
@ -812,6 +1156,51 @@
"mime-types": "^2.1.12" "mime-types": "^2.1.12"
} }
}, },
"fs.realpath": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz",
"integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw=="
},
"glob": {
"version": "7.2.3",
"resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz",
"integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==",
"requires": {
"fs.realpath": "^1.0.0",
"inflight": "^1.0.4",
"inherits": "2",
"minimatch": "^3.1.1",
"once": "^1.3.0",
"path-is-absolute": "^1.0.0"
}
},
"inflight": {
"version": "1.0.6",
"resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz",
"integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==",
"requires": {
"once": "^1.3.0",
"wrappy": "1"
}
},
"inherits": {
"version": "2.0.4",
"resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz",
"integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ=="
},
"lodash": {
"version": "4.17.21",
"resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz",
"integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg=="
},
"lower-case": {
"version": "2.0.2",
"resolved": "https://registry.npmjs.org/lower-case/-/lower-case-2.0.2.tgz",
"integrity": "sha512-7fm3l3NAF9WfN6W3JOmf5drwpVqX78JtoGJ3A6W0a6ZnldM41w2fV5D490psKFTpMds8TJse/eHLFFsNHHjHgg==",
"requires": {
"tslib": "^2.0.3"
}
},
"mime-db": { "mime-db": {
"version": "1.52.0", "version": "1.52.0",
"resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz",
@ -833,6 +1222,15 @@
"brace-expansion": "^1.1.7" "brace-expansion": "^1.1.7"
} }
}, },
"no-case": {
"version": "3.0.4",
"resolved": "https://registry.npmjs.org/no-case/-/no-case-3.0.4.tgz",
"integrity": "sha512-fgAN3jGAh+RoxUGZHTSOLJIqUc2wmoBwGR4tbpNAKmmovFoWq0OdRkb0VkldReO2a2iBT/OEulG9XSUc10r3zg==",
"requires": {
"lower-case": "^2.0.2",
"tslib": "^2.0.3"
}
},
"node-fetch": { "node-fetch": {
"version": "2.6.12", "version": "2.6.12",
"resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.12.tgz", "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.12.tgz",
@ -841,6 +1239,38 @@
"whatwg-url": "^5.0.0" "whatwg-url": "^5.0.0"
} }
}, },
"once": {
"version": "1.4.0",
"resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz",
"integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==",
"requires": {
"wrappy": "1"
}
},
"pascal-case": {
"version": "3.1.2",
"resolved": "https://registry.npmjs.org/pascal-case/-/pascal-case-3.1.2.tgz",
"integrity": "sha512-uWlGT3YSnK9x3BQJaOdcZwrnV6hPpd8jFH1/ucpiLRPh/2zCVJKS19E4GvYHvaCcACn3foXZ0cLB9Wrx1KGe5g==",
"requires": {
"no-case": "^3.0.4",
"tslib": "^2.0.3"
}
},
"path-is-absolute": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz",
"integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg=="
},
"path-to-regexp": {
"version": "6.3.0",
"resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-6.3.0.tgz",
"integrity": "sha512-Yhpw4T9C6hPpgPeA28us07OJeqZ5EzQTkbfwuhsUg0c237RomFoETJgmp2sa3F/41gfLE6G5cqcYwznmeEeOlQ=="
},
"prettier": {
"version": "2.8.8",
"resolved": "https://registry.npmjs.org/prettier/-/prettier-2.8.8.tgz",
"integrity": "sha512-tdN8qQGvNjw4CHbY+XXk0JgCXn9QiF21a55rBe5LJAU+kDyC4WQn4+awm2Xfk2lQMk5fKup9XgzTZtGkjBdP9Q=="
},
"process": { "process": {
"version": "0.11.10", "version": "0.11.10",
"resolved": "https://registry.npmjs.org/process/-/process-0.11.10.tgz", "resolved": "https://registry.npmjs.org/process/-/process-0.11.10.tgz",
@ -861,6 +1291,15 @@
"resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz",
"integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==" "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw=="
}, },
"ts-poet": {
"version": "4.15.0",
"resolved": "https://registry.npmjs.org/ts-poet/-/ts-poet-4.15.0.tgz",
"integrity": "sha512-sLLR8yQBvHzi9d4R1F4pd+AzQxBfzOSSjfxiJxQhkUoH5bL7RsAC6wgvtVUQdGqiCsyS9rT6/8X2FI7ipdir5g==",
"requires": {
"lodash": "^4.17.15",
"prettier": "^2.5.1"
}
},
"tslib": { "tslib": {
"version": "2.6.1", "version": "2.6.1",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.1.tgz", "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.1.tgz",
@ -871,6 +1310,19 @@
"resolved": "https://registry.npmjs.org/tunnel/-/tunnel-0.0.6.tgz", "resolved": "https://registry.npmjs.org/tunnel/-/tunnel-0.0.6.tgz",
"integrity": "sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg==" "integrity": "sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg=="
}, },
"twirp-ts": {
"version": "2.5.0",
"resolved": "https://registry.npmjs.org/twirp-ts/-/twirp-ts-2.5.0.tgz",
"integrity": "sha512-JTKIK5Pf/+3qCrmYDFlqcPPUx+ohEWKBaZy8GL8TmvV2VvC0SXVyNYILO39+GCRbqnuP6hBIF+BVr8ZxRz+6fw==",
"requires": {
"@protobuf-ts/plugin-framework": "^2.0.7",
"camel-case": "^4.1.2",
"dot-object": "^2.1.4",
"path-to-regexp": "^6.2.0",
"ts-poet": "^4.5.0",
"yaml": "^1.10.2"
}
},
"typescript": { "typescript": {
"version": "5.2.2", "version": "5.2.2",
"resolved": "https://registry.npmjs.org/typescript/-/typescript-5.2.2.tgz", "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.2.2.tgz",
@ -891,6 +1343,11 @@
"webidl-conversions": "^3.0.0" "webidl-conversions": "^3.0.0"
} }
}, },
"wrappy": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz",
"integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ=="
},
"xml2js": { "xml2js": {
"version": "0.5.0", "version": "0.5.0",
"resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.5.0.tgz", "resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.5.0.tgz",
@ -904,6 +1361,11 @@
"version": "11.0.1", "version": "11.0.1",
"resolved": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-11.0.1.tgz", "resolved": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-11.0.1.tgz",
"integrity": "sha512-fDlsI/kFEx7gLvbecc0/ohLG50fugQp8ryHzMTuW9vSa1GJ0XYWKnhsUx7oie3G98+r56aTQIUB4kht42R3JvA==" "integrity": "sha512-fDlsI/kFEx7gLvbecc0/ohLG50fugQp8ryHzMTuW9vSa1GJ0XYWKnhsUx7oie3G98+r56aTQIUB4kht42R3JvA=="
},
"yaml": {
"version": "1.10.2",
"resolved": "https://registry.npmjs.org/yaml/-/yaml-1.10.2.tgz",
"integrity": "sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg=="
} }
} }
} }

View File

@ -1,6 +1,6 @@
{ {
"name": "@actions/cache", "name": "@actions/cache",
"version": "3.2.4", "version": "4.0.0",
"preview": true, "preview": true,
"description": "Actions cache lib", "description": "Actions cache lib",
"keywords": [ "keywords": [
@ -37,7 +37,7 @@
"url": "https://github.com/actions/toolkit/issues" "url": "https://github.com/actions/toolkit/issues"
}, },
"dependencies": { "dependencies": {
"@actions/core": "^1.10.0", "@actions/core": "^1.11.1",
"@actions/exec": "^1.0.1", "@actions/exec": "^1.0.1",
"@actions/glob": "^0.1.0", "@actions/glob": "^0.1.0",
"@actions/http-client": "^2.1.1", "@actions/http-client": "^2.1.1",
@ -45,10 +45,12 @@
"@azure/abort-controller": "^1.1.0", "@azure/abort-controller": "^1.1.0",
"@azure/ms-rest-js": "^2.6.0", "@azure/ms-rest-js": "^2.6.0",
"@azure/storage-blob": "^12.13.0", "@azure/storage-blob": "^12.13.0",
"semver": "^6.3.1" "@protobuf-ts/plugin": "^2.9.4",
"semver": "^6.3.1",
"twirp-ts": "^2.5.0"
}, },
"devDependencies": { "devDependencies": {
"@types/semver": "^6.0.0", "@types/semver": "^6.0.0",
"typescript": "^5.2.2" "typescript": "^5.2.2"
} }
} }

View File

@ -2,9 +2,17 @@ import * as core from '@actions/core'
import * as path from 'path' import * as path from 'path'
import * as utils from './internal/cacheUtils' import * as utils from './internal/cacheUtils'
import * as cacheHttpClient from './internal/cacheHttpClient' import * as cacheHttpClient from './internal/cacheHttpClient'
import {createTar, extractTar, listTar} from './internal/tar' import * as cacheTwirpClient from './internal/shared/cacheTwirpClient'
import {getCacheServiceVersion, isGhes} from './internal/config'
import {DownloadOptions, UploadOptions} from './options' import {DownloadOptions, UploadOptions} from './options'
import {createTar, extractTar, listTar} from './internal/tar'
import {
CreateCacheEntryRequest,
FinalizeCacheEntryUploadRequest,
FinalizeCacheEntryUploadResponse,
GetCacheEntryDownloadURLRequest
} from './generated/results/api/v1/cache'
import {CacheFileSizeLimit} from './internal/constants'
export class ValidationError extends Error { export class ValidationError extends Error {
constructor(message: string) { constructor(message: string) {
super(message) super(message)
@ -48,7 +56,6 @@ function checkKey(key: string): void {
* *
* @returns boolean return true if Actions cache service feature is available, otherwise false * @returns boolean return true if Actions cache service feature is available, otherwise false
*/ */
export function isFeatureAvailable(): boolean { export function isFeatureAvailable(): boolean {
return !!process.env['ACTIONS_CACHE_URL'] return !!process.env['ACTIONS_CACHE_URL']
} }
@ -57,8 +64,8 @@ export function isFeatureAvailable(): boolean {
* Restores cache from keys * Restores cache from keys
* *
* @param paths a list of file paths to restore from the cache * @param paths a list of file paths to restore from the cache
* @param primaryKey an explicit key for restoring the cache * @param primaryKey an explicit key for restoring the cache. Lookup is done with prefix matching.
* @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for key * @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for primaryKey
* @param downloadOptions cache download options * @param downloadOptions cache download options
* @param enableCrossOsArchive an optional boolean enabled to restore on windows any cache created on any platform * @param enableCrossOsArchive an optional boolean enabled to restore on windows any cache created on any platform
* @returns string returns the key for the cache hit, otherwise returns undefined * @returns string returns the key for the cache hit, otherwise returns undefined
@ -70,8 +77,49 @@ export async function restoreCache(
options?: DownloadOptions, options?: DownloadOptions,
enableCrossOsArchive = false enableCrossOsArchive = false
): Promise<string | undefined> { ): Promise<string | undefined> {
const cacheServiceVersion: string = getCacheServiceVersion()
core.debug(`Cache service version: ${cacheServiceVersion}`)
checkPaths(paths) checkPaths(paths)
switch (cacheServiceVersion) {
case 'v2':
return await restoreCacheV2(
paths,
primaryKey,
restoreKeys,
options,
enableCrossOsArchive
)
case 'v1':
default:
return await restoreCacheV1(
paths,
primaryKey,
restoreKeys,
options,
enableCrossOsArchive
)
}
}
/**
* Restores cache using the legacy Cache Service
*
* @param paths a list of file paths to restore from the cache
* @param primaryKey an explicit key for restoring the cache. Lookup is done with prefix matching.
* @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for primaryKey
* @param options cache download options
* @param enableCrossOsArchive an optional boolean enabled to restore on Windows any cache created on any platform
* @returns string returns the key for the cache hit, otherwise returns undefined
*/
async function restoreCacheV1(
paths: string[],
primaryKey: string,
restoreKeys?: string[],
options?: DownloadOptions,
enableCrossOsArchive = false
): Promise<string | undefined> {
restoreKeys = restoreKeys || [] restoreKeys = restoreKeys || []
const keys = [primaryKey, ...restoreKeys] const keys = [primaryKey, ...restoreKeys]
@ -153,6 +201,121 @@ export async function restoreCache(
return undefined return undefined
} }
/**
* Restores cache using Cache Service v2
*
* @param paths a list of file paths to restore from the cache
* @param primaryKey an explicit key for restoring the cache. Lookup is done with prefix matching
* @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for primaryKey
* @param downloadOptions cache download options
* @param enableCrossOsArchive an optional boolean enabled to restore on windows any cache created on any platform
* @returns string returns the key for the cache hit, otherwise returns undefined
*/
async function restoreCacheV2(
paths: string[],
primaryKey: string,
restoreKeys?: string[],
options?: DownloadOptions,
enableCrossOsArchive = false
): Promise<string | undefined> {
// Override UploadOptions to force the use of Azure
options = {
...options,
useAzureSdk: true
}
restoreKeys = restoreKeys || []
const keys = [primaryKey, ...restoreKeys]
core.debug('Resolved Keys:')
core.debug(JSON.stringify(keys))
if (keys.length > 10) {
throw new ValidationError(
`Key Validation Error: Keys are limited to a maximum of 10.`
)
}
for (const key of keys) {
checkKey(key)
}
let archivePath = ''
try {
const twirpClient = cacheTwirpClient.internalCacheTwirpClient()
const compressionMethod = await utils.getCompressionMethod()
const request: GetCacheEntryDownloadURLRequest = {
key: primaryKey,
restoreKeys,
version: utils.getCacheVersion(
paths,
compressionMethod,
enableCrossOsArchive
)
}
const response = await twirpClient.GetCacheEntryDownloadURL(request)
if (!response.ok) {
core.warning(`Cache not found for keys: ${keys.join(', ')}`)
return undefined
}
core.info(`Cache hit for: ${request.key}`)
if (options?.lookupOnly) {
core.info('Lookup only - skipping download')
return response.matchedKey
}
archivePath = path.join(
await utils.createTempDirectory(),
utils.getCacheFileName(compressionMethod)
)
core.debug(`Archive path: ${archivePath}`)
core.debug(`Starting download of archive to: ${archivePath}`)
await cacheHttpClient.downloadCache(
response.signedDownloadUrl,
archivePath,
options
)
const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath)
core.info(
`Cache Size: ~${Math.round(
archiveFileSize / (1024 * 1024)
)} MB (${archiveFileSize} B)`
)
if (core.isDebug()) {
await listTar(archivePath, compressionMethod)
}
await extractTar(archivePath, compressionMethod)
core.info('Cache restored successfully')
return response.matchedKey
} catch (error) {
const typedError = error as Error
if (typedError.name === ValidationError.name) {
throw error
} else {
// Supress all non-validation cache related errors because caching should be optional
core.warning(`Failed to restore: ${(error as Error).message}`)
}
} finally {
try {
if (archivePath) {
await utils.unlinkFile(archivePath)
}
} catch (error) {
core.debug(`Failed to delete archive: ${error}`)
}
}
return undefined
}
/** /**
* Saves a list of files with the specified key * Saves a list of files with the specified key
* *
@ -168,9 +331,34 @@ export async function saveCache(
options?: UploadOptions, options?: UploadOptions,
enableCrossOsArchive = false enableCrossOsArchive = false
): Promise<number> { ): Promise<number> {
const cacheServiceVersion: string = getCacheServiceVersion()
core.debug(`Cache service version: ${cacheServiceVersion}`)
checkPaths(paths) checkPaths(paths)
checkKey(key) checkKey(key)
switch (cacheServiceVersion) {
case 'v2':
return await saveCacheV2(paths, key, options, enableCrossOsArchive)
case 'v1':
default:
return await saveCacheV1(paths, key, options, enableCrossOsArchive)
}
}
/**
* Save cache using the legacy Cache Service
*
* @param paths
* @param key
* @param options
* @param enableCrossOsArchive
* @returns
*/
async function saveCacheV1(
paths: string[],
key: string,
options?: UploadOptions,
enableCrossOsArchive = false
): Promise<number> {
const compressionMethod = await utils.getCompressionMethod() const compressionMethod = await utils.getCompressionMethod()
let cacheId = -1 let cacheId = -1
@ -202,7 +390,7 @@ export async function saveCache(
core.debug(`File Size: ${archiveFileSize}`) core.debug(`File Size: ${archiveFileSize}`)
// For GHES, this check will take place in ReserveCache API with enterprise file size limit // For GHES, this check will take place in ReserveCache API with enterprise file size limit
if (archiveFileSize > fileSizeLimit && !utils.isGhes()) { if (archiveFileSize > fileSizeLimit && !isGhes()) {
throw new Error( throw new Error(
`Cache size of ~${Math.round( `Cache size of ~${Math.round(
archiveFileSize / (1024 * 1024) archiveFileSize / (1024 * 1024)
@ -237,7 +425,138 @@ export async function saveCache(
} }
core.debug(`Saving Cache (ID: ${cacheId})`) core.debug(`Saving Cache (ID: ${cacheId})`)
await cacheHttpClient.saveCache(cacheId, archivePath, options) await cacheHttpClient.saveCache(cacheId, archivePath, '', options)
} catch (error) {
const typedError = error as Error
if (typedError.name === ValidationError.name) {
throw error
} else if (typedError.name === ReserveCacheError.name) {
core.info(`Failed to save: ${typedError.message}`)
} else {
core.warning(`Failed to save: ${typedError.message}`)
}
} finally {
// Try to delete the archive to save space
try {
await utils.unlinkFile(archivePath)
} catch (error) {
core.debug(`Failed to delete archive: ${error}`)
}
}
return cacheId
}
/**
* Save cache using Cache Service v2
*
* @param paths a list of file paths to restore from the cache
* @param key an explicit key for restoring the cache
* @param options cache upload options
* @param enableCrossOsArchive an optional boolean enabled to save cache on windows which could be restored on any platform
* @returns
*/
async function saveCacheV2(
paths: string[],
key: string,
options?: UploadOptions,
enableCrossOsArchive = false
): Promise<number> {
// Override UploadOptions to force the use of Azure
// ...options goes first because we want to override the default values
// set in UploadOptions with these specific figures
options = {
...options,
uploadChunkSize: 64 * 1024 * 1024, // 64 MiB
uploadConcurrency: 8, // 8 workers for parallel upload
useAzureSdk: true
}
const compressionMethod = await utils.getCompressionMethod()
const twirpClient = cacheTwirpClient.internalCacheTwirpClient()
let cacheId = -1
const cachePaths = await utils.resolvePaths(paths)
core.debug('Cache Paths:')
core.debug(`${JSON.stringify(cachePaths)}`)
if (cachePaths.length === 0) {
throw new Error(
`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`
)
}
const archiveFolder = await utils.createTempDirectory()
const archivePath = path.join(
archiveFolder,
utils.getCacheFileName(compressionMethod)
)
core.debug(`Archive Path: ${archivePath}`)
try {
await createTar(archiveFolder, cachePaths, compressionMethod)
if (core.isDebug()) {
await listTar(archivePath, compressionMethod)
}
const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath)
core.debug(`File Size: ${archiveFileSize}`)
// For GHES, this check will take place in ReserveCache API with enterprise file size limit
if (archiveFileSize > CacheFileSizeLimit && !isGhes()) {
throw new Error(
`Cache size of ~${Math.round(
archiveFileSize / (1024 * 1024)
)} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`
)
}
// Set the archive size in the options, will be used to display the upload progress
options.archiveSizeBytes = archiveFileSize
core.debug('Reserving Cache')
const version = utils.getCacheVersion(
paths,
compressionMethod,
enableCrossOsArchive
)
const request: CreateCacheEntryRequest = {
key,
version
}
const response = await twirpClient.CreateCacheEntry(request)
if (!response.ok) {
throw new ReserveCacheError(
`Unable to reserve cache with key ${key}, another job may be creating this cache.`
)
}
core.debug(`Attempting to upload cache located at: ${archivePath}`)
await cacheHttpClient.saveCache(
cacheId,
archivePath,
response.signedUploadUrl,
options
)
const finalizeRequest: FinalizeCacheEntryUploadRequest = {
key,
version,
sizeBytes: `${archiveFileSize}`
}
const finalizeResponse: FinalizeCacheEntryUploadResponse =
await twirpClient.FinalizeCacheEntryUpload(finalizeRequest)
core.debug(`FinalizeCacheEntryUploadResponse: ${finalizeResponse.ok}`)
if (!finalizeResponse.ok) {
throw new Error(
`Unable to finalize cache with key ${key}, another job may be finalizing this cache.`
)
}
cacheId = parseInt(finalizeResponse.entryId)
} catch (error) { } catch (error) {
const typedError = error as Error const typedError = error as Error
if (typedError.name === ValidationError.name) { if (typedError.name === ValidationError.name) {

View File

@ -0,0 +1,290 @@
// @generated by protobuf-ts 2.9.1 with parameter long_type_string,client_none,generate_dependencies
// @generated from protobuf file "google/protobuf/timestamp.proto" (package "google.protobuf", syntax proto3)
// tslint:disable
//
// Protocol Buffers - Google's data interchange format
// Copyright 2008 Google Inc. All rights reserved.
// https://developers.google.com/protocol-buffers/
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
//
import type { BinaryWriteOptions } from "@protobuf-ts/runtime";
import type { IBinaryWriter } from "@protobuf-ts/runtime";
import { WireType } from "@protobuf-ts/runtime";
import type { BinaryReadOptions } from "@protobuf-ts/runtime";
import type { IBinaryReader } from "@protobuf-ts/runtime";
import { UnknownFieldHandler } from "@protobuf-ts/runtime";
import type { PartialMessage } from "@protobuf-ts/runtime";
import { reflectionMergePartial } from "@protobuf-ts/runtime";
import { MESSAGE_TYPE } from "@protobuf-ts/runtime";
import { typeofJsonValue } from "@protobuf-ts/runtime";
import type { JsonValue } from "@protobuf-ts/runtime";
import type { JsonReadOptions } from "@protobuf-ts/runtime";
import type { JsonWriteOptions } from "@protobuf-ts/runtime";
import { PbLong } from "@protobuf-ts/runtime";
import { MessageType } from "@protobuf-ts/runtime";
/**
* A Timestamp represents a point in time independent of any time zone or local
* calendar, encoded as a count of seconds and fractions of seconds at
* nanosecond resolution. The count is relative to an epoch at UTC midnight on
* January 1, 1970, in the proleptic Gregorian calendar which extends the
* Gregorian calendar backwards to year one.
*
* All minutes are 60 seconds long. Leap seconds are "smeared" so that no leap
* second table is needed for interpretation, using a [24-hour linear
* smear](https://developers.google.com/time/smear).
*
* The range is from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59.999999999Z. By
* restricting to that range, we ensure that we can convert to and from [RFC
* 3339](https://www.ietf.org/rfc/rfc3339.txt) date strings.
*
* # Examples
*
* Example 1: Compute Timestamp from POSIX `time()`.
*
* Timestamp timestamp;
* timestamp.set_seconds(time(NULL));
* timestamp.set_nanos(0);
*
* Example 2: Compute Timestamp from POSIX `gettimeofday()`.
*
* struct timeval tv;
* gettimeofday(&tv, NULL);
*
* Timestamp timestamp;
* timestamp.set_seconds(tv.tv_sec);
* timestamp.set_nanos(tv.tv_usec * 1000);
*
* Example 3: Compute Timestamp from Win32 `GetSystemTimeAsFileTime()`.
*
* FILETIME ft;
* GetSystemTimeAsFileTime(&ft);
* UINT64 ticks = (((UINT64)ft.dwHighDateTime) << 32) | ft.dwLowDateTime;
*
* // A Windows tick is 100 nanoseconds. Windows epoch 1601-01-01T00:00:00Z
* // is 11644473600 seconds before Unix epoch 1970-01-01T00:00:00Z.
* Timestamp timestamp;
* timestamp.set_seconds((INT64) ((ticks / 10000000) - 11644473600LL));
* timestamp.set_nanos((INT32) ((ticks % 10000000) * 100));
*
* Example 4: Compute Timestamp from Java `System.currentTimeMillis()`.
*
* long millis = System.currentTimeMillis();
*
* Timestamp timestamp = Timestamp.newBuilder().setSeconds(millis / 1000)
* .setNanos((int) ((millis % 1000) * 1000000)).build();
*
*
* Example 5: Compute Timestamp from Java `Instant.now()`.
*
* Instant now = Instant.now();
*
* Timestamp timestamp =
* Timestamp.newBuilder().setSeconds(now.getEpochSecond())
* .setNanos(now.getNano()).build();
*
*
* Example 6: Compute Timestamp from current time in Python.
*
* timestamp = Timestamp()
* timestamp.GetCurrentTime()
*
* # JSON Mapping
*
* In JSON format, the Timestamp type is encoded as a string in the
* [RFC 3339](https://www.ietf.org/rfc/rfc3339.txt) format. That is, the
* format is "{year}-{month}-{day}T{hour}:{min}:{sec}[.{frac_sec}]Z"
* where {year} is always expressed using four digits while {month}, {day},
* {hour}, {min}, and {sec} are zero-padded to two digits each. The fractional
* seconds, which can go up to 9 digits (i.e. up to 1 nanosecond resolution),
* are optional. The "Z" suffix indicates the timezone ("UTC"); the timezone
* is required. A proto3 JSON serializer should always use UTC (as indicated by
* "Z") when printing the Timestamp type and a proto3 JSON parser should be
* able to accept both UTC and other timezones (as indicated by an offset).
*
* For example, "2017-01-15T01:30:15.01Z" encodes 15.01 seconds past
* 01:30 UTC on January 15, 2017.
*
* In JavaScript, one can convert a Date object to this format using the
* standard
* [toISOString()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toISOString)
* method. In Python, a standard `datetime.datetime` object can be converted
* to this format using
* [`strftime`](https://docs.python.org/2/library/time.html#time.strftime) with
* the time format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one can use
* the Joda Time's [`ISODateTimeFormat.dateTime()`](
* http://www.joda.org/joda-time/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime%2D%2D
* ) to obtain a formatter capable of generating timestamps in this format.
*
*
*
* @generated from protobuf message google.protobuf.Timestamp
*/
export interface Timestamp {
/**
* Represents seconds of UTC time since Unix epoch
* 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to
* 9999-12-31T23:59:59Z inclusive.
*
* @generated from protobuf field: int64 seconds = 1;
*/
seconds: string;
/**
* Non-negative fractions of a second at nanosecond resolution. Negative
* second values with fractions must still have non-negative nanos values
* that count forward in time. Must be from 0 to 999,999,999
* inclusive.
*
* @generated from protobuf field: int32 nanos = 2;
*/
nanos: number;
}
// @generated message type with reflection information, may provide speed optimized methods
class Timestamp$Type extends MessageType<Timestamp> {
constructor() {
super("google.protobuf.Timestamp", [
{ no: 1, name: "seconds", kind: "scalar", T: 3 /*ScalarType.INT64*/ },
{ no: 2, name: "nanos", kind: "scalar", T: 5 /*ScalarType.INT32*/ }
]);
}
/**
* Creates a new `Timestamp` for the current time.
*/
now(): Timestamp {
const msg = this.create();
const ms = Date.now();
msg.seconds = PbLong.from(Math.floor(ms / 1000)).toString();
msg.nanos = (ms % 1000) * 1000000;
return msg;
}
/**
* Converts a `Timestamp` to a JavaScript Date.
*/
toDate(message: Timestamp): Date {
return new Date(PbLong.from(message.seconds).toNumber() * 1000 + Math.ceil(message.nanos / 1000000));
}
/**
* Converts a JavaScript Date to a `Timestamp`.
*/
fromDate(date: Date): Timestamp {
const msg = this.create();
const ms = date.getTime();
msg.seconds = PbLong.from(Math.floor(ms / 1000)).toString();
msg.nanos = (ms % 1000) * 1000000;
return msg;
}
/**
* In JSON format, the `Timestamp` type is encoded as a string
* in the RFC 3339 format.
*/
internalJsonWrite(message: Timestamp, options: JsonWriteOptions): JsonValue {
let ms = PbLong.from(message.seconds).toNumber() * 1000;
if (ms < Date.parse("0001-01-01T00:00:00Z") || ms > Date.parse("9999-12-31T23:59:59Z"))
throw new Error("Unable to encode Timestamp to JSON. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive.");
if (message.nanos < 0)
throw new Error("Unable to encode invalid Timestamp to JSON. Nanos must not be negative.");
let z = "Z";
if (message.nanos > 0) {
let nanosStr = (message.nanos + 1000000000).toString().substring(1);
if (nanosStr.substring(3) === "000000")
z = "." + nanosStr.substring(0, 3) + "Z";
else if (nanosStr.substring(6) === "000")
z = "." + nanosStr.substring(0, 6) + "Z";
else
z = "." + nanosStr + "Z";
}
return new Date(ms).toISOString().replace(".000Z", z);
}
/**
* In JSON format, the `Timestamp` type is encoded as a string
* in the RFC 3339 format.
*/
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: Timestamp): Timestamp {
if (typeof json !== "string")
throw new Error("Unable to parse Timestamp from JSON " + typeofJsonValue(json) + ".");
let matches = json.match(/^([0-9]{4})-([0-9]{2})-([0-9]{2})T([0-9]{2}):([0-9]{2}):([0-9]{2})(?:Z|\.([0-9]{3,9})Z|([+-][0-9][0-9]:[0-9][0-9]))$/);
if (!matches)
throw new Error("Unable to parse Timestamp from JSON. Invalid format.");
let ms = Date.parse(matches[1] + "-" + matches[2] + "-" + matches[3] + "T" + matches[4] + ":" + matches[5] + ":" + matches[6] + (matches[8] ? matches[8] : "Z"));
if (Number.isNaN(ms))
throw new Error("Unable to parse Timestamp from JSON. Invalid value.");
if (ms < Date.parse("0001-01-01T00:00:00Z") || ms > Date.parse("9999-12-31T23:59:59Z"))
throw new globalThis.Error("Unable to parse Timestamp from JSON. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive.");
if (!target)
target = this.create();
target.seconds = PbLong.from(ms / 1000).toString();
target.nanos = 0;
if (matches[7])
target.nanos = (parseInt("1" + matches[7] + "0".repeat(9 - matches[7].length)) - 1000000000);
return target;
}
create(value?: PartialMessage<Timestamp>): Timestamp {
const message = { seconds: "0", nanos: 0 };
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
reflectionMergePartial<Timestamp>(this, message, value);
return message;
}
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: Timestamp): Timestamp {
let message = target ?? this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* int64 seconds */ 1:
message.seconds = reader.int64().toString();
break;
case /* int32 nanos */ 2:
message.nanos = reader.int32();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message: Timestamp, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
/* int64 seconds = 1; */
if (message.seconds !== "0")
writer.tag(1, WireType.Varint).int64(message.seconds);
/* int32 nanos = 2; */
if (message.nanos !== 0)
writer.tag(2, WireType.Varint).int32(message.nanos);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message google.protobuf.Timestamp
*/
export const Timestamp = new Timestamp$Type();

View File

@ -0,0 +1,753 @@
// @generated by protobuf-ts 2.9.1 with parameter long_type_string,client_none,generate_dependencies
// @generated from protobuf file "google/protobuf/wrappers.proto" (package "google.protobuf", syntax proto3)
// tslint:disable
//
// Protocol Buffers - Google's data interchange format
// Copyright 2008 Google Inc. All rights reserved.
// https://developers.google.com/protocol-buffers/
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
//
//
// Wrappers for primitive (non-message) types. These types are useful
// for embedding primitives in the `google.protobuf.Any` type and for places
// where we need to distinguish between the absence of a primitive
// typed field and its default value.
//
// These wrappers have no meaningful use within repeated fields as they lack
// the ability to detect presence on individual elements.
// These wrappers have no meaningful use within a map or a oneof since
// individual entries of a map or fields of a oneof can already detect presence.
//
import { ScalarType } from "@protobuf-ts/runtime";
import { LongType } from "@protobuf-ts/runtime";
import type { BinaryWriteOptions } from "@protobuf-ts/runtime";
import type { IBinaryWriter } from "@protobuf-ts/runtime";
import { WireType } from "@protobuf-ts/runtime";
import type { BinaryReadOptions } from "@protobuf-ts/runtime";
import type { IBinaryReader } from "@protobuf-ts/runtime";
import { UnknownFieldHandler } from "@protobuf-ts/runtime";
import type { PartialMessage } from "@protobuf-ts/runtime";
import { reflectionMergePartial } from "@protobuf-ts/runtime";
import { MESSAGE_TYPE } from "@protobuf-ts/runtime";
import type { JsonValue } from "@protobuf-ts/runtime";
import type { JsonReadOptions } from "@protobuf-ts/runtime";
import type { JsonWriteOptions } from "@protobuf-ts/runtime";
import { MessageType } from "@protobuf-ts/runtime";
/**
* Wrapper message for `double`.
*
* The JSON representation for `DoubleValue` is JSON number.
*
* @generated from protobuf message google.protobuf.DoubleValue
*/
export interface DoubleValue {
/**
* The double value.
*
* @generated from protobuf field: double value = 1;
*/
value: number;
}
/**
* Wrapper message for `float`.
*
* The JSON representation for `FloatValue` is JSON number.
*
* @generated from protobuf message google.protobuf.FloatValue
*/
export interface FloatValue {
/**
* The float value.
*
* @generated from protobuf field: float value = 1;
*/
value: number;
}
/**
* Wrapper message for `int64`.
*
* The JSON representation for `Int64Value` is JSON string.
*
* @generated from protobuf message google.protobuf.Int64Value
*/
export interface Int64Value {
/**
* The int64 value.
*
* @generated from protobuf field: int64 value = 1;
*/
value: string;
}
/**
* Wrapper message for `uint64`.
*
* The JSON representation for `UInt64Value` is JSON string.
*
* @generated from protobuf message google.protobuf.UInt64Value
*/
export interface UInt64Value {
/**
* The uint64 value.
*
* @generated from protobuf field: uint64 value = 1;
*/
value: string;
}
/**
* Wrapper message for `int32`.
*
* The JSON representation for `Int32Value` is JSON number.
*
* @generated from protobuf message google.protobuf.Int32Value
*/
export interface Int32Value {
/**
* The int32 value.
*
* @generated from protobuf field: int32 value = 1;
*/
value: number;
}
/**
* Wrapper message for `uint32`.
*
* The JSON representation for `UInt32Value` is JSON number.
*
* @generated from protobuf message google.protobuf.UInt32Value
*/
export interface UInt32Value {
/**
* The uint32 value.
*
* @generated from protobuf field: uint32 value = 1;
*/
value: number;
}
/**
* Wrapper message for `bool`.
*
* The JSON representation for `BoolValue` is JSON `true` and `false`.
*
* @generated from protobuf message google.protobuf.BoolValue
*/
export interface BoolValue {
/**
* The bool value.
*
* @generated from protobuf field: bool value = 1;
*/
value: boolean;
}
/**
* Wrapper message for `string`.
*
* The JSON representation for `StringValue` is JSON string.
*
* @generated from protobuf message google.protobuf.StringValue
*/
export interface StringValue {
/**
* The string value.
*
* @generated from protobuf field: string value = 1;
*/
value: string;
}
/**
* Wrapper message for `bytes`.
*
* The JSON representation for `BytesValue` is JSON string.
*
* @generated from protobuf message google.protobuf.BytesValue
*/
export interface BytesValue {
/**
* The bytes value.
*
* @generated from protobuf field: bytes value = 1;
*/
value: Uint8Array;
}
// @generated message type with reflection information, may provide speed optimized methods
class DoubleValue$Type extends MessageType<DoubleValue> {
constructor() {
super("google.protobuf.DoubleValue", [
{ no: 1, name: "value", kind: "scalar", T: 1 /*ScalarType.DOUBLE*/ }
]);
}
/**
* Encode `DoubleValue` to JSON number.
*/
internalJsonWrite(message: DoubleValue, options: JsonWriteOptions): JsonValue {
return this.refJsonWriter.scalar(2, message.value, "value", false, true);
}
/**
* Decode `DoubleValue` from JSON number.
*/
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: DoubleValue): DoubleValue {
if (!target)
target = this.create();
target.value = this.refJsonReader.scalar(json, 1, undefined, "value") as number;
return target;
}
create(value?: PartialMessage<DoubleValue>): DoubleValue {
const message = { value: 0 };
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
reflectionMergePartial<DoubleValue>(this, message, value);
return message;
}
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: DoubleValue): DoubleValue {
let message = target ?? this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* double value */ 1:
message.value = reader.double();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message: DoubleValue, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
/* double value = 1; */
if (message.value !== 0)
writer.tag(1, WireType.Bit64).double(message.value);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message google.protobuf.DoubleValue
*/
export const DoubleValue = new DoubleValue$Type();
// @generated message type with reflection information, may provide speed optimized methods
class FloatValue$Type extends MessageType<FloatValue> {
constructor() {
super("google.protobuf.FloatValue", [
{ no: 1, name: "value", kind: "scalar", T: 2 /*ScalarType.FLOAT*/ }
]);
}
/**
* Encode `FloatValue` to JSON number.
*/
internalJsonWrite(message: FloatValue, options: JsonWriteOptions): JsonValue {
return this.refJsonWriter.scalar(1, message.value, "value", false, true);
}
/**
* Decode `FloatValue` from JSON number.
*/
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: FloatValue): FloatValue {
if (!target)
target = this.create();
target.value = this.refJsonReader.scalar(json, 1, undefined, "value") as number;
return target;
}
create(value?: PartialMessage<FloatValue>): FloatValue {
const message = { value: 0 };
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
reflectionMergePartial<FloatValue>(this, message, value);
return message;
}
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: FloatValue): FloatValue {
let message = target ?? this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* float value */ 1:
message.value = reader.float();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message: FloatValue, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
/* float value = 1; */
if (message.value !== 0)
writer.tag(1, WireType.Bit32).float(message.value);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message google.protobuf.FloatValue
*/
export const FloatValue = new FloatValue$Type();
// @generated message type with reflection information, may provide speed optimized methods
class Int64Value$Type extends MessageType<Int64Value> {
constructor() {
super("google.protobuf.Int64Value", [
{ no: 1, name: "value", kind: "scalar", T: 3 /*ScalarType.INT64*/ }
]);
}
/**
* Encode `Int64Value` to JSON string.
*/
internalJsonWrite(message: Int64Value, options: JsonWriteOptions): JsonValue {
return this.refJsonWriter.scalar(ScalarType.INT64, message.value, "value", false, true);
}
/**
* Decode `Int64Value` from JSON string.
*/
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: Int64Value): Int64Value {
if (!target)
target = this.create();
target.value = this.refJsonReader.scalar(json, ScalarType.INT64, LongType.STRING, "value") as any;
return target;
}
create(value?: PartialMessage<Int64Value>): Int64Value {
const message = { value: "0" };
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
reflectionMergePartial<Int64Value>(this, message, value);
return message;
}
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: Int64Value): Int64Value {
let message = target ?? this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* int64 value */ 1:
message.value = reader.int64().toString();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message: Int64Value, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
/* int64 value = 1; */
if (message.value !== "0")
writer.tag(1, WireType.Varint).int64(message.value);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message google.protobuf.Int64Value
*/
export const Int64Value = new Int64Value$Type();
// @generated message type with reflection information, may provide speed optimized methods
class UInt64Value$Type extends MessageType<UInt64Value> {
constructor() {
super("google.protobuf.UInt64Value", [
{ no: 1, name: "value", kind: "scalar", T: 4 /*ScalarType.UINT64*/ }
]);
}
/**
* Encode `UInt64Value` to JSON string.
*/
internalJsonWrite(message: UInt64Value, options: JsonWriteOptions): JsonValue {
return this.refJsonWriter.scalar(ScalarType.UINT64, message.value, "value", false, true);
}
/**
* Decode `UInt64Value` from JSON string.
*/
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: UInt64Value): UInt64Value {
if (!target)
target = this.create();
target.value = this.refJsonReader.scalar(json, ScalarType.UINT64, LongType.STRING, "value") as any;
return target;
}
create(value?: PartialMessage<UInt64Value>): UInt64Value {
const message = { value: "0" };
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
reflectionMergePartial<UInt64Value>(this, message, value);
return message;
}
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: UInt64Value): UInt64Value {
let message = target ?? this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* uint64 value */ 1:
message.value = reader.uint64().toString();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message: UInt64Value, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
/* uint64 value = 1; */
if (message.value !== "0")
writer.tag(1, WireType.Varint).uint64(message.value);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message google.protobuf.UInt64Value
*/
export const UInt64Value = new UInt64Value$Type();
// @generated message type with reflection information, may provide speed optimized methods
class Int32Value$Type extends MessageType<Int32Value> {
constructor() {
super("google.protobuf.Int32Value", [
{ no: 1, name: "value", kind: "scalar", T: 5 /*ScalarType.INT32*/ }
]);
}
/**
* Encode `Int32Value` to JSON string.
*/
internalJsonWrite(message: Int32Value, options: JsonWriteOptions): JsonValue {
return this.refJsonWriter.scalar(5, message.value, "value", false, true);
}
/**
* Decode `Int32Value` from JSON string.
*/
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: Int32Value): Int32Value {
if (!target)
target = this.create();
target.value = this.refJsonReader.scalar(json, 5, undefined, "value") as number;
return target;
}
create(value?: PartialMessage<Int32Value>): Int32Value {
const message = { value: 0 };
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
reflectionMergePartial<Int32Value>(this, message, value);
return message;
}
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: Int32Value): Int32Value {
let message = target ?? this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* int32 value */ 1:
message.value = reader.int32();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message: Int32Value, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
/* int32 value = 1; */
if (message.value !== 0)
writer.tag(1, WireType.Varint).int32(message.value);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message google.protobuf.Int32Value
*/
export const Int32Value = new Int32Value$Type();
// @generated message type with reflection information, may provide speed optimized methods
class UInt32Value$Type extends MessageType<UInt32Value> {
constructor() {
super("google.protobuf.UInt32Value", [
{ no: 1, name: "value", kind: "scalar", T: 13 /*ScalarType.UINT32*/ }
]);
}
/**
* Encode `UInt32Value` to JSON string.
*/
internalJsonWrite(message: UInt32Value, options: JsonWriteOptions): JsonValue {
return this.refJsonWriter.scalar(13, message.value, "value", false, true);
}
/**
* Decode `UInt32Value` from JSON string.
*/
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: UInt32Value): UInt32Value {
if (!target)
target = this.create();
target.value = this.refJsonReader.scalar(json, 13, undefined, "value") as number;
return target;
}
create(value?: PartialMessage<UInt32Value>): UInt32Value {
const message = { value: 0 };
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
reflectionMergePartial<UInt32Value>(this, message, value);
return message;
}
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: UInt32Value): UInt32Value {
let message = target ?? this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* uint32 value */ 1:
message.value = reader.uint32();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message: UInt32Value, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
/* uint32 value = 1; */
if (message.value !== 0)
writer.tag(1, WireType.Varint).uint32(message.value);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message google.protobuf.UInt32Value
*/
export const UInt32Value = new UInt32Value$Type();
// @generated message type with reflection information, may provide speed optimized methods
class BoolValue$Type extends MessageType<BoolValue> {
constructor() {
super("google.protobuf.BoolValue", [
{ no: 1, name: "value", kind: "scalar", T: 8 /*ScalarType.BOOL*/ }
]);
}
/**
* Encode `BoolValue` to JSON bool.
*/
internalJsonWrite(message: BoolValue, options: JsonWriteOptions): JsonValue {
return message.value;
}
/**
* Decode `BoolValue` from JSON bool.
*/
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: BoolValue): BoolValue {
if (!target)
target = this.create();
target.value = this.refJsonReader.scalar(json, 8, undefined, "value") as boolean;
return target;
}
create(value?: PartialMessage<BoolValue>): BoolValue {
const message = { value: false };
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
reflectionMergePartial<BoolValue>(this, message, value);
return message;
}
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: BoolValue): BoolValue {
let message = target ?? this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* bool value */ 1:
message.value = reader.bool();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message: BoolValue, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
/* bool value = 1; */
if (message.value !== false)
writer.tag(1, WireType.Varint).bool(message.value);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message google.protobuf.BoolValue
*/
export const BoolValue = new BoolValue$Type();
// @generated message type with reflection information, may provide speed optimized methods
class StringValue$Type extends MessageType<StringValue> {
constructor() {
super("google.protobuf.StringValue", [
{ no: 1, name: "value", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
]);
}
/**
* Encode `StringValue` to JSON string.
*/
internalJsonWrite(message: StringValue, options: JsonWriteOptions): JsonValue {
return message.value;
}
/**
* Decode `StringValue` from JSON string.
*/
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: StringValue): StringValue {
if (!target)
target = this.create();
target.value = this.refJsonReader.scalar(json, 9, undefined, "value") as string;
return target;
}
create(value?: PartialMessage<StringValue>): StringValue {
const message = { value: "" };
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
reflectionMergePartial<StringValue>(this, message, value);
return message;
}
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: StringValue): StringValue {
let message = target ?? this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* string value */ 1:
message.value = reader.string();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message: StringValue, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
/* string value = 1; */
if (message.value !== "")
writer.tag(1, WireType.LengthDelimited).string(message.value);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message google.protobuf.StringValue
*/
export const StringValue = new StringValue$Type();
// @generated message type with reflection information, may provide speed optimized methods
class BytesValue$Type extends MessageType<BytesValue> {
constructor() {
super("google.protobuf.BytesValue", [
{ no: 1, name: "value", kind: "scalar", T: 12 /*ScalarType.BYTES*/ }
]);
}
/**
* Encode `BytesValue` to JSON string.
*/
internalJsonWrite(message: BytesValue, options: JsonWriteOptions): JsonValue {
return this.refJsonWriter.scalar(12, message.value, "value", false, true);
}
/**
* Decode `BytesValue` from JSON string.
*/
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: BytesValue): BytesValue {
if (!target)
target = this.create();
target.value = this.refJsonReader.scalar(json, 12, undefined, "value") as Uint8Array;
return target;
}
create(value?: PartialMessage<BytesValue>): BytesValue {
const message = { value: new Uint8Array(0) };
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
reflectionMergePartial<BytesValue>(this, message, value);
return message;
}
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: BytesValue): BytesValue {
let message = target ?? this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* bytes value */ 1:
message.value = reader.bytes();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message: BytesValue, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
/* bytes value = 1; */
if (message.value.length)
writer.tag(1, WireType.LengthDelimited).bytes(message.value);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message google.protobuf.BytesValue
*/
export const BytesValue = new BytesValue$Type();

View File

@ -0,0 +1,521 @@
// @generated by protobuf-ts 2.9.1 with parameter long_type_string,client_none,generate_dependencies
// @generated from protobuf file "results/api/v1/cache.proto" (package "github.actions.results.api.v1", syntax proto3)
// tslint:disable
import { ServiceType } from "@protobuf-ts/runtime-rpc";
import type { BinaryWriteOptions } from "@protobuf-ts/runtime";
import type { IBinaryWriter } from "@protobuf-ts/runtime";
import { WireType } from "@protobuf-ts/runtime";
import type { BinaryReadOptions } from "@protobuf-ts/runtime";
import type { IBinaryReader } from "@protobuf-ts/runtime";
import { UnknownFieldHandler } from "@protobuf-ts/runtime";
import type { PartialMessage } from "@protobuf-ts/runtime";
import { reflectionMergePartial } from "@protobuf-ts/runtime";
import { MESSAGE_TYPE } from "@protobuf-ts/runtime";
import { MessageType } from "@protobuf-ts/runtime";
import { CacheMetadata } from "../../entities/v1/cachemetadata";
/**
* @generated from protobuf message github.actions.results.api.v1.CreateCacheEntryRequest
*/
export interface CreateCacheEntryRequest {
/**
* Scope and other metadata for the cache entry
*
* @generated from protobuf field: github.actions.results.entities.v1.CacheMetadata metadata = 1;
*/
metadata?: CacheMetadata;
/**
* An explicit key for a cache entry
*
* @generated from protobuf field: string key = 2;
*/
key: string;
/**
* Hash of the compression tool, runner OS and paths cached
*
* @generated from protobuf field: string version = 3;
*/
version: string;
}
/**
* @generated from protobuf message github.actions.results.api.v1.CreateCacheEntryResponse
*/
export interface CreateCacheEntryResponse {
/**
* @generated from protobuf field: bool ok = 1;
*/
ok: boolean;
/**
* SAS URL to upload the cache archive
*
* @generated from protobuf field: string signed_upload_url = 2;
*/
signedUploadUrl: string;
}
/**
* @generated from protobuf message github.actions.results.api.v1.FinalizeCacheEntryUploadRequest
*/
export interface FinalizeCacheEntryUploadRequest {
/**
* Scope and other metadata for the cache entry
*
* @generated from protobuf field: github.actions.results.entities.v1.CacheMetadata metadata = 1;
*/
metadata?: CacheMetadata;
/**
* An explicit key for a cache entry
*
* @generated from protobuf field: string key = 2;
*/
key: string;
/**
* Size of the cache archive in Bytes
*
* @generated from protobuf field: int64 size_bytes = 3;
*/
sizeBytes: string;
/**
* Hash of the compression tool, runner OS and paths cached
*
* @generated from protobuf field: string version = 4;
*/
version: string;
}
/**
* @generated from protobuf message github.actions.results.api.v1.FinalizeCacheEntryUploadResponse
*/
export interface FinalizeCacheEntryUploadResponse {
/**
* @generated from protobuf field: bool ok = 1;
*/
ok: boolean;
/**
* Cache entry database ID
*
* @generated from protobuf field: int64 entry_id = 2;
*/
entryId: string;
}
/**
* @generated from protobuf message github.actions.results.api.v1.GetCacheEntryDownloadURLRequest
*/
export interface GetCacheEntryDownloadURLRequest {
/**
* Scope and other metadata for the cache entry
*
* @generated from protobuf field: github.actions.results.entities.v1.CacheMetadata metadata = 1;
*/
metadata?: CacheMetadata;
/**
* An explicit key for a cache entry
*
* @generated from protobuf field: string key = 2;
*/
key: string;
/**
* Restore keys used for prefix searching
*
* @generated from protobuf field: repeated string restore_keys = 3;
*/
restoreKeys: string[];
/**
* Hash of the compression tool, runner OS and paths cached
*
* @generated from protobuf field: string version = 4;
*/
version: string;
}
/**
* @generated from protobuf message github.actions.results.api.v1.GetCacheEntryDownloadURLResponse
*/
export interface GetCacheEntryDownloadURLResponse {
/**
* @generated from protobuf field: bool ok = 1;
*/
ok: boolean;
/**
* SAS URL to download the cache archive
*
* @generated from protobuf field: string signed_download_url = 2;
*/
signedDownloadUrl: string;
/**
* Key or restore key that matches the lookup
*
* @generated from protobuf field: string matched_key = 3;
*/
matchedKey: string;
}
// @generated message type with reflection information, may provide speed optimized methods
class CreateCacheEntryRequest$Type extends MessageType<CreateCacheEntryRequest> {
constructor() {
super("github.actions.results.api.v1.CreateCacheEntryRequest", [
{ no: 1, name: "metadata", kind: "message", T: () => CacheMetadata },
{ no: 2, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
{ no: 3, name: "version", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
]);
}
create(value?: PartialMessage<CreateCacheEntryRequest>): CreateCacheEntryRequest {
const message = { key: "", version: "" };
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
reflectionMergePartial<CreateCacheEntryRequest>(this, message, value);
return message;
}
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: CreateCacheEntryRequest): CreateCacheEntryRequest {
let message = target ?? this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* github.actions.results.entities.v1.CacheMetadata metadata */ 1:
message.metadata = CacheMetadata.internalBinaryRead(reader, reader.uint32(), options, message.metadata);
break;
case /* string key */ 2:
message.key = reader.string();
break;
case /* string version */ 3:
message.version = reader.string();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message: CreateCacheEntryRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
/* github.actions.results.entities.v1.CacheMetadata metadata = 1; */
if (message.metadata)
CacheMetadata.internalBinaryWrite(message.metadata, writer.tag(1, WireType.LengthDelimited).fork(), options).join();
/* string key = 2; */
if (message.key !== "")
writer.tag(2, WireType.LengthDelimited).string(message.key);
/* string version = 3; */
if (message.version !== "")
writer.tag(3, WireType.LengthDelimited).string(message.version);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message github.actions.results.api.v1.CreateCacheEntryRequest
*/
export const CreateCacheEntryRequest = new CreateCacheEntryRequest$Type();
// @generated message type with reflection information, may provide speed optimized methods
class CreateCacheEntryResponse$Type extends MessageType<CreateCacheEntryResponse> {
constructor() {
super("github.actions.results.api.v1.CreateCacheEntryResponse", [
{ no: 1, name: "ok", kind: "scalar", T: 8 /*ScalarType.BOOL*/ },
{ no: 2, name: "signed_upload_url", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
]);
}
create(value?: PartialMessage<CreateCacheEntryResponse>): CreateCacheEntryResponse {
const message = { ok: false, signedUploadUrl: "" };
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
reflectionMergePartial<CreateCacheEntryResponse>(this, message, value);
return message;
}
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: CreateCacheEntryResponse): CreateCacheEntryResponse {
let message = target ?? this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* bool ok */ 1:
message.ok = reader.bool();
break;
case /* string signed_upload_url */ 2:
message.signedUploadUrl = reader.string();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message: CreateCacheEntryResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
/* bool ok = 1; */
if (message.ok !== false)
writer.tag(1, WireType.Varint).bool(message.ok);
/* string signed_upload_url = 2; */
if (message.signedUploadUrl !== "")
writer.tag(2, WireType.LengthDelimited).string(message.signedUploadUrl);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message github.actions.results.api.v1.CreateCacheEntryResponse
*/
export const CreateCacheEntryResponse = new CreateCacheEntryResponse$Type();
// @generated message type with reflection information, may provide speed optimized methods
class FinalizeCacheEntryUploadRequest$Type extends MessageType<FinalizeCacheEntryUploadRequest> {
constructor() {
super("github.actions.results.api.v1.FinalizeCacheEntryUploadRequest", [
{ no: 1, name: "metadata", kind: "message", T: () => CacheMetadata },
{ no: 2, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
{ no: 3, name: "size_bytes", kind: "scalar", T: 3 /*ScalarType.INT64*/ },
{ no: 4, name: "version", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
]);
}
create(value?: PartialMessage<FinalizeCacheEntryUploadRequest>): FinalizeCacheEntryUploadRequest {
const message = { key: "", sizeBytes: "0", version: "" };
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
reflectionMergePartial<FinalizeCacheEntryUploadRequest>(this, message, value);
return message;
}
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: FinalizeCacheEntryUploadRequest): FinalizeCacheEntryUploadRequest {
let message = target ?? this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* github.actions.results.entities.v1.CacheMetadata metadata */ 1:
message.metadata = CacheMetadata.internalBinaryRead(reader, reader.uint32(), options, message.metadata);
break;
case /* string key */ 2:
message.key = reader.string();
break;
case /* int64 size_bytes */ 3:
message.sizeBytes = reader.int64().toString();
break;
case /* string version */ 4:
message.version = reader.string();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message: FinalizeCacheEntryUploadRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
/* github.actions.results.entities.v1.CacheMetadata metadata = 1; */
if (message.metadata)
CacheMetadata.internalBinaryWrite(message.metadata, writer.tag(1, WireType.LengthDelimited).fork(), options).join();
/* string key = 2; */
if (message.key !== "")
writer.tag(2, WireType.LengthDelimited).string(message.key);
/* int64 size_bytes = 3; */
if (message.sizeBytes !== "0")
writer.tag(3, WireType.Varint).int64(message.sizeBytes);
/* string version = 4; */
if (message.version !== "")
writer.tag(4, WireType.LengthDelimited).string(message.version);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message github.actions.results.api.v1.FinalizeCacheEntryUploadRequest
*/
export const FinalizeCacheEntryUploadRequest = new FinalizeCacheEntryUploadRequest$Type();
// @generated message type with reflection information, may provide speed optimized methods
class FinalizeCacheEntryUploadResponse$Type extends MessageType<FinalizeCacheEntryUploadResponse> {
constructor() {
super("github.actions.results.api.v1.FinalizeCacheEntryUploadResponse", [
{ no: 1, name: "ok", kind: "scalar", T: 8 /*ScalarType.BOOL*/ },
{ no: 2, name: "entry_id", kind: "scalar", T: 3 /*ScalarType.INT64*/ }
]);
}
create(value?: PartialMessage<FinalizeCacheEntryUploadResponse>): FinalizeCacheEntryUploadResponse {
const message = { ok: false, entryId: "0" };
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
reflectionMergePartial<FinalizeCacheEntryUploadResponse>(this, message, value);
return message;
}
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: FinalizeCacheEntryUploadResponse): FinalizeCacheEntryUploadResponse {
let message = target ?? this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* bool ok */ 1:
message.ok = reader.bool();
break;
case /* int64 entry_id */ 2:
message.entryId = reader.int64().toString();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message: FinalizeCacheEntryUploadResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
/* bool ok = 1; */
if (message.ok !== false)
writer.tag(1, WireType.Varint).bool(message.ok);
/* int64 entry_id = 2; */
if (message.entryId !== "0")
writer.tag(2, WireType.Varint).int64(message.entryId);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message github.actions.results.api.v1.FinalizeCacheEntryUploadResponse
*/
export const FinalizeCacheEntryUploadResponse = new FinalizeCacheEntryUploadResponse$Type();
// @generated message type with reflection information, may provide speed optimized methods
class GetCacheEntryDownloadURLRequest$Type extends MessageType<GetCacheEntryDownloadURLRequest> {
constructor() {
super("github.actions.results.api.v1.GetCacheEntryDownloadURLRequest", [
{ no: 1, name: "metadata", kind: "message", T: () => CacheMetadata },
{ no: 2, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
{ no: 3, name: "restore_keys", kind: "scalar", repeat: 2 /*RepeatType.UNPACKED*/, T: 9 /*ScalarType.STRING*/ },
{ no: 4, name: "version", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
]);
}
create(value?: PartialMessage<GetCacheEntryDownloadURLRequest>): GetCacheEntryDownloadURLRequest {
const message = { key: "", restoreKeys: [], version: "" };
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
reflectionMergePartial<GetCacheEntryDownloadURLRequest>(this, message, value);
return message;
}
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: GetCacheEntryDownloadURLRequest): GetCacheEntryDownloadURLRequest {
let message = target ?? this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* github.actions.results.entities.v1.CacheMetadata metadata */ 1:
message.metadata = CacheMetadata.internalBinaryRead(reader, reader.uint32(), options, message.metadata);
break;
case /* string key */ 2:
message.key = reader.string();
break;
case /* repeated string restore_keys */ 3:
message.restoreKeys.push(reader.string());
break;
case /* string version */ 4:
message.version = reader.string();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message: GetCacheEntryDownloadURLRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
/* github.actions.results.entities.v1.CacheMetadata metadata = 1; */
if (message.metadata)
CacheMetadata.internalBinaryWrite(message.metadata, writer.tag(1, WireType.LengthDelimited).fork(), options).join();
/* string key = 2; */
if (message.key !== "")
writer.tag(2, WireType.LengthDelimited).string(message.key);
/* repeated string restore_keys = 3; */
for (let i = 0; i < message.restoreKeys.length; i++)
writer.tag(3, WireType.LengthDelimited).string(message.restoreKeys[i]);
/* string version = 4; */
if (message.version !== "")
writer.tag(4, WireType.LengthDelimited).string(message.version);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message github.actions.results.api.v1.GetCacheEntryDownloadURLRequest
*/
export const GetCacheEntryDownloadURLRequest = new GetCacheEntryDownloadURLRequest$Type();
// @generated message type with reflection information, may provide speed optimized methods
class GetCacheEntryDownloadURLResponse$Type extends MessageType<GetCacheEntryDownloadURLResponse> {
constructor() {
super("github.actions.results.api.v1.GetCacheEntryDownloadURLResponse", [
{ no: 1, name: "ok", kind: "scalar", T: 8 /*ScalarType.BOOL*/ },
{ no: 2, name: "signed_download_url", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
{ no: 3, name: "matched_key", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
]);
}
create(value?: PartialMessage<GetCacheEntryDownloadURLResponse>): GetCacheEntryDownloadURLResponse {
const message = { ok: false, signedDownloadUrl: "", matchedKey: "" };
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
reflectionMergePartial<GetCacheEntryDownloadURLResponse>(this, message, value);
return message;
}
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: GetCacheEntryDownloadURLResponse): GetCacheEntryDownloadURLResponse {
let message = target ?? this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* bool ok */ 1:
message.ok = reader.bool();
break;
case /* string signed_download_url */ 2:
message.signedDownloadUrl = reader.string();
break;
case /* string matched_key */ 3:
message.matchedKey = reader.string();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message: GetCacheEntryDownloadURLResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
/* bool ok = 1; */
if (message.ok !== false)
writer.tag(1, WireType.Varint).bool(message.ok);
/* string signed_download_url = 2; */
if (message.signedDownloadUrl !== "")
writer.tag(2, WireType.LengthDelimited).string(message.signedDownloadUrl);
/* string matched_key = 3; */
if (message.matchedKey !== "")
writer.tag(3, WireType.LengthDelimited).string(message.matchedKey);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message github.actions.results.api.v1.GetCacheEntryDownloadURLResponse
*/
export const GetCacheEntryDownloadURLResponse = new GetCacheEntryDownloadURLResponse$Type();
/**
* @generated ServiceType for protobuf service github.actions.results.api.v1.CacheService
*/
export const CacheService = new ServiceType("github.actions.results.api.v1.CacheService", [
{ name: "CreateCacheEntry", options: {}, I: CreateCacheEntryRequest, O: CreateCacheEntryResponse },
{ name: "FinalizeCacheEntryUpload", options: {}, I: FinalizeCacheEntryUploadRequest, O: FinalizeCacheEntryUploadResponse },
{ name: "GetCacheEntryDownloadURL", options: {}, I: GetCacheEntryDownloadURLRequest, O: GetCacheEntryDownloadURLResponse }
]);

View File

@ -0,0 +1,642 @@
import {
TwirpContext,
TwirpServer,
RouterEvents,
TwirpError,
TwirpErrorCode,
Interceptor,
TwirpContentType,
chainInterceptors,
} from "twirp-ts";
import {
CreateCacheEntryRequest,
CreateCacheEntryResponse,
FinalizeCacheEntryUploadRequest,
FinalizeCacheEntryUploadResponse,
GetCacheEntryDownloadURLRequest,
GetCacheEntryDownloadURLResponse,
} from "./cache";
//==================================//
// Client Code //
//==================================//
interface Rpc {
request(
service: string,
method: string,
contentType: "application/json" | "application/protobuf",
data: object | Uint8Array
): Promise<object | Uint8Array>;
}
export interface CacheServiceClient {
CreateCacheEntry(
request: CreateCacheEntryRequest
): Promise<CreateCacheEntryResponse>;
FinalizeCacheEntryUpload(
request: FinalizeCacheEntryUploadRequest
): Promise<FinalizeCacheEntryUploadResponse>;
GetCacheEntryDownloadURL(
request: GetCacheEntryDownloadURLRequest
): Promise<GetCacheEntryDownloadURLResponse>;
}
export class CacheServiceClientJSON implements CacheServiceClient {
private readonly rpc: Rpc;
constructor(rpc: Rpc) {
this.rpc = rpc;
this.CreateCacheEntry.bind(this);
this.FinalizeCacheEntryUpload.bind(this);
this.GetCacheEntryDownloadURL.bind(this);
}
CreateCacheEntry(
request: CreateCacheEntryRequest
): Promise<CreateCacheEntryResponse> {
const data = CreateCacheEntryRequest.toJson(request, {
useProtoFieldName: true,
emitDefaultValues: false,
});
const promise = this.rpc.request(
"github.actions.results.api.v1.CacheService",
"CreateCacheEntry",
"application/json",
data as object
);
return promise.then((data) =>
CreateCacheEntryResponse.fromJson(data as any, {
ignoreUnknownFields: true,
})
);
}
FinalizeCacheEntryUpload(
request: FinalizeCacheEntryUploadRequest
): Promise<FinalizeCacheEntryUploadResponse> {
const data = FinalizeCacheEntryUploadRequest.toJson(request, {
useProtoFieldName: true,
emitDefaultValues: false,
});
const promise = this.rpc.request(
"github.actions.results.api.v1.CacheService",
"FinalizeCacheEntryUpload",
"application/json",
data as object
);
return promise.then((data) =>
FinalizeCacheEntryUploadResponse.fromJson(data as any, {
ignoreUnknownFields: true,
})
);
}
GetCacheEntryDownloadURL(
request: GetCacheEntryDownloadURLRequest
): Promise<GetCacheEntryDownloadURLResponse> {
const data = GetCacheEntryDownloadURLRequest.toJson(request, {
useProtoFieldName: true,
emitDefaultValues: false,
});
const promise = this.rpc.request(
"github.actions.results.api.v1.CacheService",
"GetCacheEntryDownloadURL",
"application/json",
data as object
);
return promise.then((data) =>
GetCacheEntryDownloadURLResponse.fromJson(data as any, {
ignoreUnknownFields: true,
})
);
}
}
export class CacheServiceClientProtobuf implements CacheServiceClient {
private readonly rpc: Rpc;
constructor(rpc: Rpc) {
this.rpc = rpc;
this.CreateCacheEntry.bind(this);
this.FinalizeCacheEntryUpload.bind(this);
this.GetCacheEntryDownloadURL.bind(this);
}
CreateCacheEntry(
request: CreateCacheEntryRequest
): Promise<CreateCacheEntryResponse> {
const data = CreateCacheEntryRequest.toBinary(request);
const promise = this.rpc.request(
"github.actions.results.api.v1.CacheService",
"CreateCacheEntry",
"application/protobuf",
data
);
return promise.then((data) =>
CreateCacheEntryResponse.fromBinary(data as Uint8Array)
);
}
FinalizeCacheEntryUpload(
request: FinalizeCacheEntryUploadRequest
): Promise<FinalizeCacheEntryUploadResponse> {
const data = FinalizeCacheEntryUploadRequest.toBinary(request);
const promise = this.rpc.request(
"github.actions.results.api.v1.CacheService",
"FinalizeCacheEntryUpload",
"application/protobuf",
data
);
return promise.then((data) =>
FinalizeCacheEntryUploadResponse.fromBinary(data as Uint8Array)
);
}
GetCacheEntryDownloadURL(
request: GetCacheEntryDownloadURLRequest
): Promise<GetCacheEntryDownloadURLResponse> {
const data = GetCacheEntryDownloadURLRequest.toBinary(request);
const promise = this.rpc.request(
"github.actions.results.api.v1.CacheService",
"GetCacheEntryDownloadURL",
"application/protobuf",
data
);
return promise.then((data) =>
GetCacheEntryDownloadURLResponse.fromBinary(data as Uint8Array)
);
}
}
//==================================//
// Server Code //
//==================================//
export interface CacheServiceTwirp<T extends TwirpContext = TwirpContext> {
CreateCacheEntry(
ctx: T,
request: CreateCacheEntryRequest
): Promise<CreateCacheEntryResponse>;
FinalizeCacheEntryUpload(
ctx: T,
request: FinalizeCacheEntryUploadRequest
): Promise<FinalizeCacheEntryUploadResponse>;
GetCacheEntryDownloadURL(
ctx: T,
request: GetCacheEntryDownloadURLRequest
): Promise<GetCacheEntryDownloadURLResponse>;
}
export enum CacheServiceMethod {
CreateCacheEntry = "CreateCacheEntry",
FinalizeCacheEntryUpload = "FinalizeCacheEntryUpload",
GetCacheEntryDownloadURL = "GetCacheEntryDownloadURL",
}
export const CacheServiceMethodList = [
CacheServiceMethod.CreateCacheEntry,
CacheServiceMethod.FinalizeCacheEntryUpload,
CacheServiceMethod.GetCacheEntryDownloadURL,
];
export function createCacheServiceServer<T extends TwirpContext = TwirpContext>(
service: CacheServiceTwirp<T>
) {
return new TwirpServer<CacheServiceTwirp, T>({
service,
packageName: "github.actions.results.api.v1",
serviceName: "CacheService",
methodList: CacheServiceMethodList,
matchRoute: matchCacheServiceRoute,
});
}
function matchCacheServiceRoute<T extends TwirpContext = TwirpContext>(
method: string,
events: RouterEvents<T>
) {
switch (method) {
case "CreateCacheEntry":
return async (
ctx: T,
service: CacheServiceTwirp,
data: Buffer,
interceptors?: Interceptor<
T,
CreateCacheEntryRequest,
CreateCacheEntryResponse
>[]
) => {
ctx = { ...ctx, methodName: "CreateCacheEntry" };
await events.onMatch(ctx);
return handleCacheServiceCreateCacheEntryRequest(
ctx,
service,
data,
interceptors
);
};
case "FinalizeCacheEntryUpload":
return async (
ctx: T,
service: CacheServiceTwirp,
data: Buffer,
interceptors?: Interceptor<
T,
FinalizeCacheEntryUploadRequest,
FinalizeCacheEntryUploadResponse
>[]
) => {
ctx = { ...ctx, methodName: "FinalizeCacheEntryUpload" };
await events.onMatch(ctx);
return handleCacheServiceFinalizeCacheEntryUploadRequest(
ctx,
service,
data,
interceptors
);
};
case "GetCacheEntryDownloadURL":
return async (
ctx: T,
service: CacheServiceTwirp,
data: Buffer,
interceptors?: Interceptor<
T,
GetCacheEntryDownloadURLRequest,
GetCacheEntryDownloadURLResponse
>[]
) => {
ctx = { ...ctx, methodName: "GetCacheEntryDownloadURL" };
await events.onMatch(ctx);
return handleCacheServiceGetCacheEntryDownloadURLRequest(
ctx,
service,
data,
interceptors
);
};
default:
events.onNotFound();
const msg = `no handler found`;
throw new TwirpError(TwirpErrorCode.BadRoute, msg);
}
}
function handleCacheServiceCreateCacheEntryRequest<
T extends TwirpContext = TwirpContext
>(
ctx: T,
service: CacheServiceTwirp,
data: Buffer,
interceptors?: Interceptor<
T,
CreateCacheEntryRequest,
CreateCacheEntryResponse
>[]
): Promise<string | Uint8Array> {
switch (ctx.contentType) {
case TwirpContentType.JSON:
return handleCacheServiceCreateCacheEntryJSON<T>(
ctx,
service,
data,
interceptors
);
case TwirpContentType.Protobuf:
return handleCacheServiceCreateCacheEntryProtobuf<T>(
ctx,
service,
data,
interceptors
);
default:
const msg = "unexpected Content-Type";
throw new TwirpError(TwirpErrorCode.BadRoute, msg);
}
}
function handleCacheServiceFinalizeCacheEntryUploadRequest<
T extends TwirpContext = TwirpContext
>(
ctx: T,
service: CacheServiceTwirp,
data: Buffer,
interceptors?: Interceptor<
T,
FinalizeCacheEntryUploadRequest,
FinalizeCacheEntryUploadResponse
>[]
): Promise<string | Uint8Array> {
switch (ctx.contentType) {
case TwirpContentType.JSON:
return handleCacheServiceFinalizeCacheEntryUploadJSON<T>(
ctx,
service,
data,
interceptors
);
case TwirpContentType.Protobuf:
return handleCacheServiceFinalizeCacheEntryUploadProtobuf<T>(
ctx,
service,
data,
interceptors
);
default:
const msg = "unexpected Content-Type";
throw new TwirpError(TwirpErrorCode.BadRoute, msg);
}
}
function handleCacheServiceGetCacheEntryDownloadURLRequest<
T extends TwirpContext = TwirpContext
>(
ctx: T,
service: CacheServiceTwirp,
data: Buffer,
interceptors?: Interceptor<
T,
GetCacheEntryDownloadURLRequest,
GetCacheEntryDownloadURLResponse
>[]
): Promise<string | Uint8Array> {
switch (ctx.contentType) {
case TwirpContentType.JSON:
return handleCacheServiceGetCacheEntryDownloadURLJSON<T>(
ctx,
service,
data,
interceptors
);
case TwirpContentType.Protobuf:
return handleCacheServiceGetCacheEntryDownloadURLProtobuf<T>(
ctx,
service,
data,
interceptors
);
default:
const msg = "unexpected Content-Type";
throw new TwirpError(TwirpErrorCode.BadRoute, msg);
}
}
async function handleCacheServiceCreateCacheEntryJSON<
T extends TwirpContext = TwirpContext
>(
ctx: T,
service: CacheServiceTwirp,
data: Buffer,
interceptors?: Interceptor<
T,
CreateCacheEntryRequest,
CreateCacheEntryResponse
>[]
) {
let request: CreateCacheEntryRequest;
let response: CreateCacheEntryResponse;
try {
const body = JSON.parse(data.toString() || "{}");
request = CreateCacheEntryRequest.fromJson(body, {
ignoreUnknownFields: true,
});
} catch (e) {
if (e instanceof Error) {
const msg = "the json request could not be decoded";
throw new TwirpError(TwirpErrorCode.Malformed, msg).withCause(e, true);
}
}
if (interceptors && interceptors.length > 0) {
const interceptor = chainInterceptors(...interceptors) as Interceptor<
T,
CreateCacheEntryRequest,
CreateCacheEntryResponse
>;
response = await interceptor(ctx, request!, (ctx, inputReq) => {
return service.CreateCacheEntry(ctx, inputReq);
});
} else {
response = await service.CreateCacheEntry(ctx, request!);
}
return JSON.stringify(
CreateCacheEntryResponse.toJson(response, {
useProtoFieldName: true,
emitDefaultValues: false,
}) as string
);
}
async function handleCacheServiceFinalizeCacheEntryUploadJSON<
T extends TwirpContext = TwirpContext
>(
ctx: T,
service: CacheServiceTwirp,
data: Buffer,
interceptors?: Interceptor<
T,
FinalizeCacheEntryUploadRequest,
FinalizeCacheEntryUploadResponse
>[]
) {
let request: FinalizeCacheEntryUploadRequest;
let response: FinalizeCacheEntryUploadResponse;
try {
const body = JSON.parse(data.toString() || "{}");
request = FinalizeCacheEntryUploadRequest.fromJson(body, {
ignoreUnknownFields: true,
});
} catch (e) {
if (e instanceof Error) {
const msg = "the json request could not be decoded";
throw new TwirpError(TwirpErrorCode.Malformed, msg).withCause(e, true);
}
}
if (interceptors && interceptors.length > 0) {
const interceptor = chainInterceptors(...interceptors) as Interceptor<
T,
FinalizeCacheEntryUploadRequest,
FinalizeCacheEntryUploadResponse
>;
response = await interceptor(ctx, request!, (ctx, inputReq) => {
return service.FinalizeCacheEntryUpload(ctx, inputReq);
});
} else {
response = await service.FinalizeCacheEntryUpload(ctx, request!);
}
return JSON.stringify(
FinalizeCacheEntryUploadResponse.toJson(response, {
useProtoFieldName: true,
emitDefaultValues: false,
}) as string
);
}
async function handleCacheServiceGetCacheEntryDownloadURLJSON<
T extends TwirpContext = TwirpContext
>(
ctx: T,
service: CacheServiceTwirp,
data: Buffer,
interceptors?: Interceptor<
T,
GetCacheEntryDownloadURLRequest,
GetCacheEntryDownloadURLResponse
>[]
) {
let request: GetCacheEntryDownloadURLRequest;
let response: GetCacheEntryDownloadURLResponse;
try {
const body = JSON.parse(data.toString() || "{}");
request = GetCacheEntryDownloadURLRequest.fromJson(body, {
ignoreUnknownFields: true,
});
} catch (e) {
if (e instanceof Error) {
const msg = "the json request could not be decoded";
throw new TwirpError(TwirpErrorCode.Malformed, msg).withCause(e, true);
}
}
if (interceptors && interceptors.length > 0) {
const interceptor = chainInterceptors(...interceptors) as Interceptor<
T,
GetCacheEntryDownloadURLRequest,
GetCacheEntryDownloadURLResponse
>;
response = await interceptor(ctx, request!, (ctx, inputReq) => {
return service.GetCacheEntryDownloadURL(ctx, inputReq);
});
} else {
response = await service.GetCacheEntryDownloadURL(ctx, request!);
}
return JSON.stringify(
GetCacheEntryDownloadURLResponse.toJson(response, {
useProtoFieldName: true,
emitDefaultValues: false,
}) as string
);
}
async function handleCacheServiceCreateCacheEntryProtobuf<
T extends TwirpContext = TwirpContext
>(
ctx: T,
service: CacheServiceTwirp,
data: Buffer,
interceptors?: Interceptor<
T,
CreateCacheEntryRequest,
CreateCacheEntryResponse
>[]
) {
let request: CreateCacheEntryRequest;
let response: CreateCacheEntryResponse;
try {
request = CreateCacheEntryRequest.fromBinary(data);
} catch (e) {
if (e instanceof Error) {
const msg = "the protobuf request could not be decoded";
throw new TwirpError(TwirpErrorCode.Malformed, msg).withCause(e, true);
}
}
if (interceptors && interceptors.length > 0) {
const interceptor = chainInterceptors(...interceptors) as Interceptor<
T,
CreateCacheEntryRequest,
CreateCacheEntryResponse
>;
response = await interceptor(ctx, request!, (ctx, inputReq) => {
return service.CreateCacheEntry(ctx, inputReq);
});
} else {
response = await service.CreateCacheEntry(ctx, request!);
}
return Buffer.from(CreateCacheEntryResponse.toBinary(response));
}
async function handleCacheServiceFinalizeCacheEntryUploadProtobuf<
T extends TwirpContext = TwirpContext
>(
ctx: T,
service: CacheServiceTwirp,
data: Buffer,
interceptors?: Interceptor<
T,
FinalizeCacheEntryUploadRequest,
FinalizeCacheEntryUploadResponse
>[]
) {
let request: FinalizeCacheEntryUploadRequest;
let response: FinalizeCacheEntryUploadResponse;
try {
request = FinalizeCacheEntryUploadRequest.fromBinary(data);
} catch (e) {
if (e instanceof Error) {
const msg = "the protobuf request could not be decoded";
throw new TwirpError(TwirpErrorCode.Malformed, msg).withCause(e, true);
}
}
if (interceptors && interceptors.length > 0) {
const interceptor = chainInterceptors(...interceptors) as Interceptor<
T,
FinalizeCacheEntryUploadRequest,
FinalizeCacheEntryUploadResponse
>;
response = await interceptor(ctx, request!, (ctx, inputReq) => {
return service.FinalizeCacheEntryUpload(ctx, inputReq);
});
} else {
response = await service.FinalizeCacheEntryUpload(ctx, request!);
}
return Buffer.from(FinalizeCacheEntryUploadResponse.toBinary(response));
}
async function handleCacheServiceGetCacheEntryDownloadURLProtobuf<
T extends TwirpContext = TwirpContext
>(
ctx: T,
service: CacheServiceTwirp,
data: Buffer,
interceptors?: Interceptor<
T,
GetCacheEntryDownloadURLRequest,
GetCacheEntryDownloadURLResponse
>[]
) {
let request: GetCacheEntryDownloadURLRequest;
let response: GetCacheEntryDownloadURLResponse;
try {
request = GetCacheEntryDownloadURLRequest.fromBinary(data);
} catch (e) {
if (e instanceof Error) {
const msg = "the protobuf request could not be decoded";
throw new TwirpError(TwirpErrorCode.Malformed, msg).withCause(e, true);
}
}
if (interceptors && interceptors.length > 0) {
const interceptor = chainInterceptors(...interceptors) as Interceptor<
T,
GetCacheEntryDownloadURLRequest,
GetCacheEntryDownloadURLResponse
>;
response = await interceptor(ctx, request!, (ctx, inputReq) => {
return service.GetCacheEntryDownloadURL(ctx, inputReq);
});
} else {
response = await service.GetCacheEntryDownloadURL(ctx, request!);
}
return Buffer.from(GetCacheEntryDownloadURLResponse.toBinary(response));
}

View File

@ -0,0 +1,85 @@
// @generated by protobuf-ts 2.9.1 with parameter long_type_string,client_none,generate_dependencies
// @generated from protobuf file "results/entities/v1/cachemetadata.proto" (package "github.actions.results.entities.v1", syntax proto3)
// tslint:disable
import type { BinaryWriteOptions } from "@protobuf-ts/runtime";
import type { IBinaryWriter } from "@protobuf-ts/runtime";
import { WireType } from "@protobuf-ts/runtime";
import type { BinaryReadOptions } from "@protobuf-ts/runtime";
import type { IBinaryReader } from "@protobuf-ts/runtime";
import { UnknownFieldHandler } from "@protobuf-ts/runtime";
import type { PartialMessage } from "@protobuf-ts/runtime";
import { reflectionMergePartial } from "@protobuf-ts/runtime";
import { MESSAGE_TYPE } from "@protobuf-ts/runtime";
import { MessageType } from "@protobuf-ts/runtime";
import { CacheScope } from "./cachescope";
/**
* @generated from protobuf message github.actions.results.entities.v1.CacheMetadata
*/
export interface CacheMetadata {
/**
* Backend repository id
*
* @generated from protobuf field: int64 repository_id = 1;
*/
repositoryId: string;
/**
* Scopes for the cache entry
*
* @generated from protobuf field: repeated github.actions.results.entities.v1.CacheScope scope = 2;
*/
scope: CacheScope[];
}
// @generated message type with reflection information, may provide speed optimized methods
class CacheMetadata$Type extends MessageType<CacheMetadata> {
constructor() {
super("github.actions.results.entities.v1.CacheMetadata", [
{ no: 1, name: "repository_id", kind: "scalar", T: 3 /*ScalarType.INT64*/ },
{ no: 2, name: "scope", kind: "message", repeat: 1 /*RepeatType.PACKED*/, T: () => CacheScope }
]);
}
create(value?: PartialMessage<CacheMetadata>): CacheMetadata {
const message = { repositoryId: "0", scope: [] };
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
reflectionMergePartial<CacheMetadata>(this, message, value);
return message;
}
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: CacheMetadata): CacheMetadata {
let message = target ?? this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* int64 repository_id */ 1:
message.repositoryId = reader.int64().toString();
break;
case /* repeated github.actions.results.entities.v1.CacheScope scope */ 2:
message.scope.push(CacheScope.internalBinaryRead(reader, reader.uint32(), options));
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message: CacheMetadata, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
/* int64 repository_id = 1; */
if (message.repositoryId !== "0")
writer.tag(1, WireType.Varint).int64(message.repositoryId);
/* repeated github.actions.results.entities.v1.CacheScope scope = 2; */
for (let i = 0; i < message.scope.length; i++)
CacheScope.internalBinaryWrite(message.scope[i], writer.tag(2, WireType.LengthDelimited).fork(), options).join();
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message github.actions.results.entities.v1.CacheMetadata
*/
export const CacheMetadata = new CacheMetadata$Type();

View File

@ -0,0 +1,84 @@
// @generated by protobuf-ts 2.9.1 with parameter long_type_string,client_none,generate_dependencies
// @generated from protobuf file "results/entities/v1/cachescope.proto" (package "github.actions.results.entities.v1", syntax proto3)
// tslint:disable
import type { BinaryWriteOptions } from "@protobuf-ts/runtime";
import type { IBinaryWriter } from "@protobuf-ts/runtime";
import { WireType } from "@protobuf-ts/runtime";
import type { BinaryReadOptions } from "@protobuf-ts/runtime";
import type { IBinaryReader } from "@protobuf-ts/runtime";
import { UnknownFieldHandler } from "@protobuf-ts/runtime";
import type { PartialMessage } from "@protobuf-ts/runtime";
import { reflectionMergePartial } from "@protobuf-ts/runtime";
import { MESSAGE_TYPE } from "@protobuf-ts/runtime";
import { MessageType } from "@protobuf-ts/runtime";
/**
* @generated from protobuf message github.actions.results.entities.v1.CacheScope
*/
export interface CacheScope {
/**
* Determines the scope of the cache entry
*
* @generated from protobuf field: string scope = 1;
*/
scope: string;
/**
* None: 0 | Read: 1 | Write: 2 | All: (1|2)
*
* @generated from protobuf field: int64 permission = 2;
*/
permission: string;
}
// @generated message type with reflection information, may provide speed optimized methods
class CacheScope$Type extends MessageType<CacheScope> {
constructor() {
super("github.actions.results.entities.v1.CacheScope", [
{ no: 1, name: "scope", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
{ no: 2, name: "permission", kind: "scalar", T: 3 /*ScalarType.INT64*/ }
]);
}
create(value?: PartialMessage<CacheScope>): CacheScope {
const message = { scope: "", permission: "0" };
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
reflectionMergePartial<CacheScope>(this, message, value);
return message;
}
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: CacheScope): CacheScope {
let message = target ?? this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* string scope */ 1:
message.scope = reader.string();
break;
case /* int64 permission */ 2:
message.permission = reader.int64().toString();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message: CacheScope, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
/* string scope = 1; */
if (message.scope !== "")
writer.tag(1, WireType.LengthDelimited).string(message.scope);
/* int64 permission = 2; */
if (message.permission !== "0")
writer.tag(2, WireType.Varint).int64(message.permission);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message github.actions.results.entities.v1.CacheScope
*/
export const CacheScope = new CacheScope$Type();

View File

@ -5,12 +5,10 @@ import {
RequestOptions, RequestOptions,
TypedResponse TypedResponse
} from '@actions/http-client/lib/interfaces' } from '@actions/http-client/lib/interfaces'
import * as crypto from 'crypto'
import * as fs from 'fs' import * as fs from 'fs'
import {URL} from 'url' import {URL} from 'url'
import * as utils from './cacheUtils' import * as utils from './cacheUtils'
import {CompressionMethod} from './constants' import {uploadCacheArchiveSDK} from './uploadUtils'
import { import {
ArtifactCacheEntry, ArtifactCacheEntry,
InternalCacheOptions, InternalCacheOptions,
@ -36,11 +34,11 @@ import {
retryHttpClientResponse, retryHttpClientResponse,
retryTypedResponse retryTypedResponse
} from './requestUtils' } from './requestUtils'
import {getCacheServiceURL} from './config'
const versionSalt = '1.0' import {getUserAgentString} from './shared/user-agent'
function getCacheApiUrl(resource: string): string { function getCacheApiUrl(resource: string): string {
const baseUrl: string = process.env['ACTIONS_CACHE_URL'] || '' const baseUrl: string = getCacheServiceURL()
if (!baseUrl) { if (!baseUrl) {
throw new Error('Cache Service Url not found, unable to restore cache.') throw new Error('Cache Service Url not found, unable to restore cache.')
} }
@ -69,48 +67,24 @@ function createHttpClient(): HttpClient {
const bearerCredentialHandler = new BearerCredentialHandler(token) const bearerCredentialHandler = new BearerCredentialHandler(token)
return new HttpClient( return new HttpClient(
'actions/cache', getUserAgentString(),
[bearerCredentialHandler], [bearerCredentialHandler],
getRequestOptions() getRequestOptions()
) )
} }
export function getCacheVersion(
paths: string[],
compressionMethod?: CompressionMethod,
enableCrossOsArchive = false
): string {
// don't pass changes upstream
const components = paths.slice()
// Add compression method to cache version to restore
// compressed cache as per compression method
if (compressionMethod) {
components.push(compressionMethod)
}
// Only check for windows platforms if enableCrossOsArchive is false
if (process.platform === 'win32' && !enableCrossOsArchive) {
components.push('windows-only')
}
// Add salt to cache version to support breaking changes in cache entry
components.push(versionSalt)
return crypto.createHash('sha256').update(components.join('|')).digest('hex')
}
export async function getCacheEntry( export async function getCacheEntry(
keys: string[], keys: string[],
paths: string[], paths: string[],
options?: InternalCacheOptions options?: InternalCacheOptions
): Promise<ArtifactCacheEntry | null> { ): Promise<ArtifactCacheEntry | null> {
const httpClient = createHttpClient() const httpClient = createHttpClient()
const version = getCacheVersion( const version = utils.getCacheVersion(
paths, paths,
options?.compressionMethod, options?.compressionMethod,
options?.enableCrossOsArchive options?.enableCrossOsArchive
) )
const resource = `cache?keys=${encodeURIComponent( const resource = `cache?keys=${encodeURIComponent(
keys.join(',') keys.join(',')
)}&version=${version}` )}&version=${version}`
@ -207,7 +181,7 @@ export async function reserveCache(
options?: InternalCacheOptions options?: InternalCacheOptions
): Promise<ITypedResponseWithError<ReserveCacheResponse>> { ): Promise<ITypedResponseWithError<ReserveCacheResponse>> {
const httpClient = createHttpClient() const httpClient = createHttpClient()
const version = getCacheVersion( const version = utils.getCacheVersion(
paths, paths,
options?.compressionMethod, options?.compressionMethod,
options?.enableCrossOsArchive options?.enableCrossOsArchive
@ -353,26 +327,45 @@ async function commitCache(
export async function saveCache( export async function saveCache(
cacheId: number, cacheId: number,
archivePath: string, archivePath: string,
signedUploadURL?: string,
options?: UploadOptions options?: UploadOptions
): Promise<void> { ): Promise<void> {
const httpClient = createHttpClient() const uploadOptions = getUploadOptions(options)
core.debug('Upload cache') if (uploadOptions.useAzureSdk) {
await uploadFile(httpClient, cacheId, archivePath, options) // Use Azure storage SDK to upload caches directly to Azure
if (!signedUploadURL) {
throw new Error(
'Azure Storage SDK can only be used when a signed URL is provided.'
)
}
await uploadCacheArchiveSDK(signedUploadURL, archivePath, options)
} else {
const httpClient = createHttpClient()
// Commit Cache core.debug('Upload cache')
core.debug('Commiting cache') await uploadFile(httpClient, cacheId, archivePath, options)
const cacheSize = utils.getArchiveFileSizeInBytes(archivePath)
core.info(
`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`
)
const commitCacheResponse = await commitCache(httpClient, cacheId, cacheSize) // Commit Cache
if (!isSuccessStatusCode(commitCacheResponse.statusCode)) { core.debug('Commiting cache')
throw new Error( const cacheSize = utils.getArchiveFileSizeInBytes(archivePath)
`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.` core.info(
`Cache Size: ~${Math.round(
cacheSize / (1024 * 1024)
)} MB (${cacheSize} B)`
) )
}
core.info('Cache saved successfully') const commitCacheResponse = await commitCache(
httpClient,
cacheId,
cacheSize
)
if (!isSuccessStatusCode(commitCacheResponse.statusCode)) {
throw new Error(
`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`
)
}
core.info('Cache saved successfully')
}
} }

View File

@ -13,6 +13,8 @@ import {
GnuTarPathOnWindows GnuTarPathOnWindows
} from './constants' } from './constants'
const versionSalt = '1.0'
// From https://github.com/actions/toolkit/blob/main/packages/tool-cache/src/tool-cache.ts#L23 // From https://github.com/actions/toolkit/blob/main/packages/tool-cache/src/tool-cache.ts#L23
export async function createTempDirectory(): Promise<string> { export async function createTempDirectory(): Promise<string> {
const IS_WINDOWS = process.platform === 'win32' const IS_WINDOWS = process.platform === 'win32'
@ -131,15 +133,35 @@ export function assertDefined<T>(name: string, value?: T): T {
return value return value
} }
export function isGhes(): boolean { export function getCacheVersion(
const ghUrl = new URL( paths: string[],
process.env['GITHUB_SERVER_URL'] || 'https://github.com' compressionMethod?: CompressionMethod,
) enableCrossOsArchive = false
): string {
// don't pass changes upstream
const components = paths.slice()
const hostname = ghUrl.hostname.trimEnd().toUpperCase() // Add compression method to cache version to restore
const isGitHubHost = hostname === 'GITHUB.COM' // compressed cache as per compression method
const isGheHost = if (compressionMethod) {
hostname.endsWith('.GHE.COM') || hostname.endsWith('.GHE.LOCALHOST') components.push(compressionMethod)
}
return !isGitHubHost && !isGheHost // Only check for windows platforms if enableCrossOsArchive is false
if (process.platform === 'win32' && !enableCrossOsArchive) {
components.push('windows-only')
}
// Add salt to cache version to support breaking changes in cache entry
components.push(versionSalt)
return crypto.createHash('sha256').update(components.join('|')).digest('hex')
}
export function getRuntimeToken(): string {
const token = process.env['ACTIONS_RUNTIME_TOKEN']
if (!token) {
throw new Error('Unable to get the ACTIONS_RUNTIME_TOKEN env variable')
}
return token
} }

39
packages/cache/src/internal/config.ts vendored Normal file
View File

@ -0,0 +1,39 @@
export function isGhes(): boolean {
const ghUrl = new URL(
process.env['GITHUB_SERVER_URL'] || 'https://github.com'
)
const hostname = ghUrl.hostname.trimEnd().toUpperCase()
const isGitHubHost = hostname === 'GITHUB.COM'
const isGheHost = hostname.endsWith('.GHE.COM')
const isLocalHost = hostname.endsWith('.LOCALHOST')
return !isGitHubHost && !isGheHost && !isLocalHost
}
export function getCacheServiceVersion(): string {
// Cache service v2 is not supported on GHES. We will default to
// cache service v1 even if the feature flag was enabled by user.
if (isGhes()) return 'v1'
return process.env['ACTIONS_CACHE_SERVICE_V2'] ? 'v2' : 'v1'
}
export function getCacheServiceURL(): string {
const version = getCacheServiceVersion()
// Based on the version of the cache service, we will determine which
// URL to use.
switch (version) {
case 'v1':
return (
process.env['ACTIONS_CACHE_URL'] ||
process.env['ACTIONS_RESULTS_URL'] ||
''
)
case 'v2':
return process.env['ACTIONS_RESULTS_URL'] || ''
default:
throw new Error(`Unsupported cache service version: ${version}`)
}
}

View File

@ -36,3 +36,5 @@ export const SystemTarPathOnWindows = `${process.env['SYSTEMDRIVE']}\\Windows\\S
export const TarFilename = 'cache.tar' export const TarFilename = 'cache.tar'
export const ManifestFilename = 'manifest.txt' export const ManifestFilename = 'manifest.txt'
export const CacheFileSizeLimit = 10 * Math.pow(1024, 3) // 10GiB per repository

View File

@ -0,0 +1,204 @@
import {info, debug} from '@actions/core'
import {getUserAgentString} from './user-agent'
import {NetworkError, UsageError} from './errors'
import {getCacheServiceURL} from '../config'
import {getRuntimeToken} from '../cacheUtils'
import {BearerCredentialHandler} from '@actions/http-client/lib/auth'
import {HttpClient, HttpClientResponse, HttpCodes} from '@actions/http-client'
import {CacheServiceClientJSON} from '../../generated/results/api/v1/cache.twirp'
// The twirp http client must implement this interface
interface Rpc {
request(
service: string,
method: string,
contentType: 'application/json' | 'application/protobuf',
data: object | Uint8Array
): Promise<object | Uint8Array>
}
/**
* This class is a wrapper around the CacheServiceClientJSON class generated by Twirp.
*
* It adds retry logic to the request method, which is not present in the generated client.
*
* This class is used to interact with cache service v2.
*/
class CacheServiceClient implements Rpc {
private httpClient: HttpClient
private baseUrl: string
private maxAttempts = 5
private baseRetryIntervalMilliseconds = 3000
private retryMultiplier = 1.5
constructor(
userAgent: string,
maxAttempts?: number,
baseRetryIntervalMilliseconds?: number,
retryMultiplier?: number
) {
const token = getRuntimeToken()
this.baseUrl = getCacheServiceURL()
if (maxAttempts) {
this.maxAttempts = maxAttempts
}
if (baseRetryIntervalMilliseconds) {
this.baseRetryIntervalMilliseconds = baseRetryIntervalMilliseconds
}
if (retryMultiplier) {
this.retryMultiplier = retryMultiplier
}
this.httpClient = new HttpClient(userAgent, [
new BearerCredentialHandler(token)
])
}
// This function satisfies the Rpc interface. It is compatible with the JSON
// JSON generated client.
async request(
service: string,
method: string,
contentType: 'application/json' | 'application/protobuf',
data: object | Uint8Array
): Promise<object | Uint8Array> {
const url = new URL(`/twirp/${service}/${method}`, this.baseUrl).href
debug(`[Request] ${method} ${url}`)
const headers = {
'Content-Type': contentType
}
try {
const {body} = await this.retryableRequest(async () =>
this.httpClient.post(url, JSON.stringify(data), headers)
)
return body
} catch (error) {
throw new Error(`Failed to ${method}: ${error.message}`)
}
}
async retryableRequest(
operation: () => Promise<HttpClientResponse>
): Promise<{response: HttpClientResponse; body: object}> {
let attempt = 0
let errorMessage = ''
let rawBody = ''
while (attempt < this.maxAttempts) {
let isRetryable = false
try {
const response = await operation()
const statusCode = response.message.statusCode
rawBody = await response.readBody()
debug(`[Response] - ${response.message.statusCode}`)
debug(`Headers: ${JSON.stringify(response.message.headers, null, 2)}`)
const body = JSON.parse(rawBody)
debug(`Body: ${JSON.stringify(body, null, 2)}`)
if (this.isSuccessStatusCode(statusCode)) {
return {response, body}
}
isRetryable = this.isRetryableHttpStatusCode(statusCode)
errorMessage = `Failed request: (${statusCode}) ${response.message.statusMessage}`
if (body.msg) {
if (UsageError.isUsageErrorMessage(body.msg)) {
throw new UsageError()
}
errorMessage = `${errorMessage}: ${body.msg}`
}
} catch (error) {
if (error instanceof SyntaxError) {
debug(`Raw Body: ${rawBody}`)
}
if (error instanceof UsageError) {
throw error
}
if (NetworkError.isNetworkErrorCode(error?.code)) {
throw new NetworkError(error?.code)
}
isRetryable = true
errorMessage = error.message
}
if (!isRetryable) {
throw new Error(`Received non-retryable error: ${errorMessage}`)
}
if (attempt + 1 === this.maxAttempts) {
throw new Error(
`Failed to make request after ${this.maxAttempts} attempts: ${errorMessage}`
)
}
const retryTimeMilliseconds =
this.getExponentialRetryTimeMilliseconds(attempt)
info(
`Attempt ${attempt + 1} of ${
this.maxAttempts
} failed with error: ${errorMessage}. Retrying request in ${retryTimeMilliseconds} ms...`
)
await this.sleep(retryTimeMilliseconds)
attempt++
}
throw new Error(`Request failed`)
}
isSuccessStatusCode(statusCode?: number): boolean {
if (!statusCode) return false
return statusCode >= 200 && statusCode < 300
}
isRetryableHttpStatusCode(statusCode?: number): boolean {
if (!statusCode) return false
const retryableStatusCodes = [
HttpCodes.BadGateway,
HttpCodes.GatewayTimeout,
HttpCodes.InternalServerError,
HttpCodes.ServiceUnavailable,
HttpCodes.TooManyRequests
]
return retryableStatusCodes.includes(statusCode)
}
async sleep(milliseconds: number): Promise<void> {
return new Promise(resolve => setTimeout(resolve, milliseconds))
}
getExponentialRetryTimeMilliseconds(attempt: number): number {
if (attempt < 0) {
throw new Error('attempt should be a positive integer')
}
if (attempt === 0) {
return this.baseRetryIntervalMilliseconds
}
const minTime =
this.baseRetryIntervalMilliseconds * this.retryMultiplier ** attempt
const maxTime = minTime * this.retryMultiplier
// returns a random number between minTime and maxTime (exclusive)
return Math.trunc(Math.random() * (maxTime - minTime) + minTime)
}
}
export function internalCacheTwirpClient(options?: {
maxAttempts?: number
retryIntervalMs?: number
retryMultiplier?: number
}): CacheServiceClientJSON {
const client = new CacheServiceClient(
getUserAgentString(),
options?.maxAttempts,
options?.retryIntervalMs,
options?.retryMultiplier
)
return new CacheServiceClientJSON(client)
}

View File

@ -0,0 +1,72 @@
export class FilesNotFoundError extends Error {
files: string[]
constructor(files: string[] = []) {
let message = 'No files were found to upload'
if (files.length > 0) {
message += `: ${files.join(', ')}`
}
super(message)
this.files = files
this.name = 'FilesNotFoundError'
}
}
export class InvalidResponseError extends Error {
constructor(message: string) {
super(message)
this.name = 'InvalidResponseError'
}
}
export class CacheNotFoundError extends Error {
constructor(message = 'Cache not found') {
super(message)
this.name = 'CacheNotFoundError'
}
}
export class GHESNotSupportedError extends Error {
constructor(
message = '@actions/cache v4.1.4+, actions/cache/save@v4+ and actions/cache/restore@v4+ are not currently supported on GHES.'
) {
super(message)
this.name = 'GHESNotSupportedError'
}
}
export class NetworkError extends Error {
code: string
constructor(code: string) {
const message = `Unable to make request: ${code}\nIf you are using self-hosted runners, please make sure your runner has access to all GitHub endpoints: https://docs.github.com/en/actions/hosting-your-own-runners/managing-self-hosted-runners/about-self-hosted-runners#communication-between-self-hosted-runners-and-github`
super(message)
this.code = code
this.name = 'NetworkError'
}
static isNetworkErrorCode = (code?: string): boolean => {
if (!code) return false
return [
'ECONNRESET',
'ENOTFOUND',
'ETIMEDOUT',
'ECONNREFUSED',
'EHOSTUNREACH'
].includes(code)
}
}
export class UsageError extends Error {
constructor() {
const message = `Cache storage quota has been hit. Unable to upload any new cache entries. Usage is recalculated every 6-12 hours.\nMore info on storage limits: https://docs.github.com/en/billing/managing-billing-for-github-actions/about-billing-for-github-actions#calculating-minute-and-storage-spending`
super(message)
this.name = 'UsageError'
}
static isUsageErrorMessage = (msg?: string): boolean => {
if (!msg) return false
return msg.includes('insufficient usage')
}
}

View File

@ -0,0 +1,9 @@
// eslint-disable-next-line @typescript-eslint/no-var-requires, @typescript-eslint/no-require-imports
const packageJson = require('../../../package.json')
/**
* Ensure that this User Agent String is used in all HTTP calls so that we can monitor telemetry between different versions of this package
*/
export function getUserAgentString(): string {
return `@actions/cache-${packageJson.version}`
}

View File

@ -0,0 +1,177 @@
import * as core from '@actions/core'
import {
BlobClient,
BlobUploadCommonResponse,
BlockBlobClient,
BlockBlobParallelUploadOptions
} from '@azure/storage-blob'
import {TransferProgressEvent} from '@azure/ms-rest-js'
import {InvalidResponseError} from './shared/errors'
import {UploadOptions} from '../options'
/**
* Class for tracking the upload state and displaying stats.
*/
export class UploadProgress {
contentLength: number
sentBytes: number
startTime: number
displayedComplete: boolean
timeoutHandle?: ReturnType<typeof setTimeout>
constructor(contentLength: number) {
this.contentLength = contentLength
this.sentBytes = 0
this.displayedComplete = false
this.startTime = Date.now()
}
/**
* Sets the number of bytes sent
*
* @param sentBytes the number of bytes sent
*/
setSentBytes(sentBytes: number): void {
this.sentBytes = sentBytes
}
/**
* Returns the total number of bytes transferred.
*/
getTransferredBytes(): number {
return this.sentBytes
}
/**
* Returns true if the upload is complete.
*/
isDone(): boolean {
return this.getTransferredBytes() === this.contentLength
}
/**
* Prints the current upload stats. Once the upload completes, this will print one
* last line and then stop.
*/
display(): void {
if (this.displayedComplete) {
return
}
const transferredBytes = this.sentBytes
const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(
1
)
const elapsedTime = Date.now() - this.startTime
const uploadSpeed = (
transferredBytes /
(1024 * 1024) /
(elapsedTime / 1000)
).toFixed(1)
core.info(
`Sent ${transferredBytes} of ${this.contentLength} (${percentage}%), ${uploadSpeed} MBs/sec`
)
if (this.isDone()) {
this.displayedComplete = true
}
}
/**
* Returns a function used to handle TransferProgressEvents.
*/
onProgress(): (progress: TransferProgressEvent) => void {
return (progress: TransferProgressEvent) => {
this.setSentBytes(progress.loadedBytes)
}
}
/**
* Starts the timer that displays the stats.
*
* @param delayInMs the delay between each write
*/
startDisplayTimer(delayInMs = 1000): void {
const displayCallback = (): void => {
this.display()
if (!this.isDone()) {
this.timeoutHandle = setTimeout(displayCallback, delayInMs)
}
}
this.timeoutHandle = setTimeout(displayCallback, delayInMs)
}
/**
* Stops the timer that displays the stats. As this typically indicates the upload
* is complete, this will display one last line, unless the last line has already
* been written.
*/
stopDisplayTimer(): void {
if (this.timeoutHandle) {
clearTimeout(this.timeoutHandle)
this.timeoutHandle = undefined
}
this.display()
}
}
/**
* Uploads a cache archive directly to Azure Blob Storage using the Azure SDK.
* This function will display progress information to the console. Concurrency of the
* upload is determined by the calling functions.
*
* @param signedUploadURL
* @param archivePath
* @param options
* @returns
*/
export async function uploadCacheArchiveSDK(
signedUploadURL: string,
archivePath: string,
options?: UploadOptions
): Promise<BlobUploadCommonResponse> {
const blobClient: BlobClient = new BlobClient(signedUploadURL)
const blockBlobClient: BlockBlobClient = blobClient.getBlockBlobClient()
const uploadProgress = new UploadProgress(options?.archiveSizeBytes ?? 0)
// Specify data transfer options
const uploadOptions: BlockBlobParallelUploadOptions = {
blockSize: options?.uploadChunkSize,
concurrency: options?.uploadConcurrency, // maximum number of parallel transfer workers
maxSingleShotSize: 128 * 1024 * 1024, // 128 MiB initial transfer size
onProgress: uploadProgress.onProgress()
}
try {
uploadProgress.startDisplayTimer()
core.debug(
`BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}`
)
const response = await blockBlobClient.uploadFile(
archivePath,
uploadOptions
)
// TODO: better management of non-retryable errors
if (response._response.status >= 400) {
throw new InvalidResponseError(
`uploadCacheArchiveSDK: upload failed with status code ${response._response.status}`
)
}
return response
} catch (error) {
core.warning(
`uploadCacheArchiveSDK: internal error uploading cache archive: ${error.message}`
)
throw error
} finally {
uploadProgress.stopDisplayTimer()
}
}

View File

@ -4,6 +4,14 @@ import * as core from '@actions/core'
* Options to control cache upload * Options to control cache upload
*/ */
export interface UploadOptions { export interface UploadOptions {
/**
* Indicates whether to use the Azure Blob SDK to download caches
* that are stored on Azure Blob Storage to improve reliability and
* performance
*
* @default false
*/
useAzureSdk?: boolean
/** /**
* Number of parallel cache upload * Number of parallel cache upload
* *
@ -16,6 +24,10 @@ export interface UploadOptions {
* @default 32MB * @default 32MB
*/ */
uploadChunkSize?: number uploadChunkSize?: number
/**
* Archive size in bytes
*/
archiveSizeBytes?: number
} }
/** /**
@ -76,12 +88,18 @@ export interface DownloadOptions {
* @param copy the original upload options * @param copy the original upload options
*/ */
export function getUploadOptions(copy?: UploadOptions): UploadOptions { export function getUploadOptions(copy?: UploadOptions): UploadOptions {
// Defaults if not overriden
const result: UploadOptions = { const result: UploadOptions = {
useAzureSdk: false,
uploadConcurrency: 4, uploadConcurrency: 4,
uploadChunkSize: 32 * 1024 * 1024 uploadChunkSize: 32 * 1024 * 1024
} }
if (copy) { if (copy) {
if (typeof copy.useAzureSdk === 'boolean') {
result.useAzureSdk = copy.useAzureSdk
}
if (typeof copy.uploadConcurrency === 'number') { if (typeof copy.uploadConcurrency === 'number') {
result.uploadConcurrency = copy.uploadConcurrency result.uploadConcurrency = copy.uploadConcurrency
} }
@ -91,6 +109,26 @@ export function getUploadOptions(copy?: UploadOptions): UploadOptions {
} }
} }
/**
* Add env var overrides
*/
// Cap the uploadConcurrency at 32
result.uploadConcurrency = !isNaN(
Number(process.env['CACHE_UPLOAD_CONCURRENCY'])
)
? Math.min(32, Number(process.env['CACHE_UPLOAD_CONCURRENCY']))
: result.uploadConcurrency
// Cap the uploadChunkSize at 128MiB
result.uploadChunkSize = !isNaN(
Number(process.env['CACHE_UPLOAD_CHUNK_SIZE'])
)
? Math.min(
128 * 1024 * 1024,
Number(process.env['CACHE_UPLOAD_CHUNK_SIZE']) * 1024 * 1024
)
: result.uploadChunkSize
core.debug(`Use Azure SDK: ${result.useAzureSdk}`)
core.debug(`Upload concurrency: ${result.uploadConcurrency}`) core.debug(`Upload concurrency: ${result.uploadConcurrency}`)
core.debug(`Upload chunk size: ${result.uploadChunkSize}`) core.debug(`Upload chunk size: ${result.uploadChunkSize}`)

View File

@ -21,7 +21,7 @@
"packages": { "packages": {
"": { "": {
"name": "@actions/glob", "name": "@actions/glob",
"version": "0.4.0", "version": "0.5.0",
"license": "MIT", "license": "MIT",
"dependencies": { "dependencies": {
"@actions/core": "^1.9.1", "@actions/core": "^1.9.1",

View File

@ -6,7 +6,7 @@
"packages": { "packages": {
"": { "": {
"name": "@actions/http-client", "name": "@actions/http-client",
"version": "2.2.1", "version": "2.2.3",
"license": "MIT", "license": "MIT",
"dependencies": { "dependencies": {
"tunnel": "^0.0.6", "tunnel": "^0.0.6",

View File

@ -1,5 +1,10 @@
# @actions/tool-cache Releases # @actions/tool-cache Releases
### 2.0.2
- Update `@actions/core` to v1.11.1 [#1872](https://github.com/actions/toolkit/pull/1872)
- Remove dependency on `uuid` package [#1824](https://github.com/actions/toolkit/pull/1824), [#1842](https://github.com/actions/toolkit/pull/1842)
### 2.0.1 ### 2.0.1
- Update to v2.0.1 of `@actions/http-client` [#1087](https://github.com/actions/toolkit/pull/1087) - Update to v2.0.1 of `@actions/http-client` [#1087](https://github.com/actions/toolkit/pull/1087)

View File

@ -1,15 +1,15 @@
{ {
"name": "@actions/tool-cache", "name": "@actions/tool-cache",
"version": "2.0.1", "version": "2.0.2",
"lockfileVersion": 2, "lockfileVersion": 2,
"requires": true, "requires": true,
"packages": { "packages": {
"": { "": {
"name": "@actions/tool-cache", "name": "@actions/tool-cache",
"version": "2.0.1", "version": "2.0.2",
"license": "MIT", "license": "MIT",
"dependencies": { "dependencies": {
"@actions/core": "^1.2.6", "@actions/core": "^1.11.1",
"@actions/exec": "^1.0.0", "@actions/exec": "^1.0.0",
"@actions/http-client": "^2.0.1", "@actions/http-client": "^2.0.1",
"@actions/io": "^1.1.1", "@actions/io": "^1.1.1",
@ -22,20 +22,12 @@
} }
}, },
"node_modules/@actions/core": { "node_modules/@actions/core": {
"version": "1.10.0", "version": "1.11.1",
"resolved": "https://registry.npmjs.org/@actions/core/-/core-1.10.0.tgz", "resolved": "https://registry.npmjs.org/@actions/core/-/core-1.11.1.tgz",
"integrity": "sha512-2aZDDa3zrrZbP5ZYg159sNoLRb61nQ7awl5pSvIq5Qpj81vwDzdMRKzkWJGJuwVvWpvZKx7vspJALyvaaIQyug==", "integrity": "sha512-hXJCSrkwfA46Vd9Z3q4cpEpHB1rL5NG04+/rbqW9d3+CSvtB1tYe8UTpAlixa1vj0m/ULglfEK2UKxMGxCxv5A==",
"dependencies": { "dependencies": {
"@actions/http-client": "^2.0.1", "@actions/exec": "^1.1.1",
"uuid": "^8.3.2" "@actions/http-client": "^2.0.1"
}
},
"node_modules/@actions/core/node_modules/uuid": {
"version": "8.3.2",
"resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz",
"integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==",
"bin": {
"uuid": "dist/bin/uuid"
} }
}, },
"node_modules/@actions/exec": { "node_modules/@actions/exec": {
@ -153,19 +145,12 @@
}, },
"dependencies": { "dependencies": {
"@actions/core": { "@actions/core": {
"version": "1.10.0", "version": "1.11.1",
"resolved": "https://registry.npmjs.org/@actions/core/-/core-1.10.0.tgz", "resolved": "https://registry.npmjs.org/@actions/core/-/core-1.11.1.tgz",
"integrity": "sha512-2aZDDa3zrrZbP5ZYg159sNoLRb61nQ7awl5pSvIq5Qpj81vwDzdMRKzkWJGJuwVvWpvZKx7vspJALyvaaIQyug==", "integrity": "sha512-hXJCSrkwfA46Vd9Z3q4cpEpHB1rL5NG04+/rbqW9d3+CSvtB1tYe8UTpAlixa1vj0m/ULglfEK2UKxMGxCxv5A==",
"requires": { "requires": {
"@actions/http-client": "^2.0.1", "@actions/exec": "^1.1.1",
"uuid": "^8.3.2" "@actions/http-client": "^2.0.1"
},
"dependencies": {
"uuid": {
"version": "8.3.2",
"resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz",
"integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg=="
}
} }
}, },
"@actions/exec": { "@actions/exec": {

View File

@ -1,6 +1,6 @@
{ {
"name": "@actions/tool-cache", "name": "@actions/tool-cache",
"version": "2.0.1", "version": "2.0.2",
"description": "Actions tool-cache lib", "description": "Actions tool-cache lib",
"keywords": [ "keywords": [
"github", "github",
@ -36,7 +36,7 @@
"url": "https://github.com/actions/toolkit/issues" "url": "https://github.com/actions/toolkit/issues"
}, },
"dependencies": { "dependencies": {
"@actions/core": "^1.2.6", "@actions/core": "^1.11.1",
"@actions/exec": "^1.0.0", "@actions/exec": "^1.0.0",
"@actions/http-client": "^2.0.1", "@actions/http-client": "^2.0.1",
"@actions/io": "^1.1.1", "@actions/io": "^1.1.1",