mirror of https://github.com/actions/toolkit
Merge branch 'main' into main
commit
3b0c1eb007
|
@ -1,4 +1,5 @@
|
|||
node_modules/
|
||||
packages/*/node_modules/
|
||||
packages/*/lib/
|
||||
packages/glob/__tests__/_temp
|
||||
packages/glob/__tests__/_temp
|
||||
packages/*/src/generated/*/
|
||||
|
|
|
@ -1,6 +1,13 @@
|
|||
{
|
||||
"plugins": ["jest", "@typescript-eslint"],
|
||||
"extends": ["plugin:github/recommended"],
|
||||
"plugins": [
|
||||
"jest",
|
||||
"@typescript-eslint",
|
||||
"prettier"
|
||||
],
|
||||
"extends": [
|
||||
"plugin:github/recommended",
|
||||
"plugin:prettier/recommended"
|
||||
],
|
||||
"parser": "@typescript-eslint/parser",
|
||||
"parserOptions": {
|
||||
"ecmaVersion": 9,
|
||||
|
@ -8,14 +15,33 @@
|
|||
"project": "./tsconfig.eslint.json"
|
||||
},
|
||||
"rules": {
|
||||
"prettier/prettier": [
|
||||
"error",
|
||||
{
|
||||
"endOfLine": "auto"
|
||||
}
|
||||
],
|
||||
"eslint-comments/no-use": "off",
|
||||
"no-constant-condition": ["error", { "checkLoops": false }],
|
||||
"github/no-then": "off",
|
||||
"import/no-namespace": "off",
|
||||
"no-shadow": "off",
|
||||
"no-unused-vars": "off",
|
||||
"i18n-text/no-en": "off",
|
||||
"filenames/match-regex": "off",
|
||||
"import/no-commonjs": "off",
|
||||
"import/named": "off",
|
||||
"no-sequences": "off",
|
||||
"import/no-unresolved": "off",
|
||||
"no-undef": "off",
|
||||
"no-only-tests/no-only-tests": "off",
|
||||
"@typescript-eslint/no-unused-vars": "error",
|
||||
"@typescript-eslint/explicit-member-accessibility": ["error", {"accessibility": "no-public"}],
|
||||
"@typescript-eslint/explicit-member-accessibility": [
|
||||
"error",
|
||||
{
|
||||
"accessibility": "no-public"
|
||||
}
|
||||
],
|
||||
"@typescript-eslint/no-require-imports": "error",
|
||||
"@typescript-eslint/array-type": "error",
|
||||
"@typescript-eslint/await-thenable": "error",
|
||||
|
@ -23,8 +49,16 @@
|
|||
"camelcase": "off",
|
||||
"@typescript-eslint/camelcase": "off",
|
||||
"@typescript-eslint/consistent-type-assertions": "off",
|
||||
"@typescript-eslint/explicit-function-return-type": ["error", {"allowExpressions": true}],
|
||||
"@typescript-eslint/func-call-spacing": ["error", "never"],
|
||||
"@typescript-eslint/explicit-function-return-type": [
|
||||
"error",
|
||||
{
|
||||
"allowExpressions": true
|
||||
}
|
||||
],
|
||||
"@typescript-eslint/func-call-spacing": [
|
||||
"error",
|
||||
"never"
|
||||
],
|
||||
"@typescript-eslint/naming-convention": [
|
||||
"error",
|
||||
{
|
||||
|
@ -56,15 +90,18 @@
|
|||
"@typescript-eslint/prefer-string-starts-ends-with": "error",
|
||||
"@typescript-eslint/promise-function-async": "error",
|
||||
"@typescript-eslint/require-array-sort-compare": "error",
|
||||
"@typescript-eslint/restrict-plus-operands": "error",
|
||||
"semi": "off",
|
||||
"@typescript-eslint/semi": ["error", "never"],
|
||||
"@typescript-eslint/semi": [
|
||||
"error",
|
||||
"never"
|
||||
],
|
||||
"@typescript-eslint/type-annotation-spacing": "error",
|
||||
"@typescript-eslint/unbound-method": "error"
|
||||
},
|
||||
"ignorePatterns": "packages/glob/__tests__/_temp/**/",
|
||||
"env": {
|
||||
"node": true,
|
||||
"es6": true,
|
||||
"jest/globals": true
|
||||
}
|
||||
}
|
||||
}
|
|
@ -28,7 +28,7 @@ Note that before a PR will be accepted, you must ensure:
|
|||
|
||||
### Useful Scripts
|
||||
|
||||
- `npm run bootstrap` This runs `lerna bootstrap` which will install dependencies in this repository's packages and cross-link packages where necessary.
|
||||
- `npm run bootstrap` This runs `lerna exec -- npm install` which will install dependencies in this repository's packages and cross-link packages where necessary.
|
||||
- `npm run build` This compiles TypeScript code in each package (this is especially important if one package relies on changes in another when you're running tests). This is just an alias for `lerna run tsc`.
|
||||
- `npm run format` This checks that formatting has been applied with Prettier.
|
||||
- `npm test` This runs all Jest tests in all packages in this repository.
|
||||
|
|
|
@ -10,8 +10,8 @@ on:
|
|||
- '**.md'
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: Build
|
||||
upload:
|
||||
name: Upload
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
|
@ -22,17 +22,12 @@ jobs:
|
|||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set Node.js 12.x
|
||||
uses: actions/setup-node@v1
|
||||
- name: Set Node.js 20.x
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 12.x
|
||||
|
||||
# In order to upload & download artifacts from a shell script, certain env variables need to be set that are only available in the
|
||||
# node context. This runs a local action that gets and sets the necessary env variables that are needed
|
||||
- name: Set env variables
|
||||
uses: ./packages/artifact/__tests__/ci-test-action/
|
||||
node-version: 20.x
|
||||
|
||||
# Need root node_modules because certain npm packages like jest are configured for the entire repository and it won't be possible
|
||||
# without these to just compile the artifacts package
|
||||
|
@ -44,45 +39,156 @@ jobs:
|
|||
npm ci
|
||||
npm run tsc
|
||||
working-directory: packages/artifact
|
||||
|
||||
- name: Set artifact file contents
|
||||
shell: bash
|
||||
run: |
|
||||
echo "non-gzip-artifact-content=hello" >> $GITHUB_ENV
|
||||
echo "gzip-artifact-content=Some large amount of text that has a compression ratio that is greater than 100%. If greater than 100%, gzip is used to upload the file" >> $GITHUB_ENV
|
||||
|
||||
- name: Create files that will be uploaded
|
||||
run: |
|
||||
mkdir artifact-path
|
||||
echo ${{ env.non-gzip-artifact-content }} > artifact-path/world.txt
|
||||
echo ${{ env.gzip-artifact-content }} > artifact-path/gzip.txt
|
||||
mkdir artifact-path
|
||||
echo -n 'hello from file 1' > artifact-path/first.txt
|
||||
echo -n 'hello from file 2' > artifact-path/second.txt
|
||||
|
||||
# We're using node -e to call the functions directly available in the @actions/artifact package
|
||||
- name: Upload artifacts using uploadArtifact()
|
||||
run: |
|
||||
node -e "Promise.resolve(require('./packages/artifact/lib/artifact-client').create().uploadArtifact('my-artifact-1',['artifact-path/world.txt'], '${{ github.workspace }}'))"
|
||||
node -e "Promise.resolve(require('./packages/artifact/lib/artifact-client').create().uploadArtifact('my-artifact-2',['artifact-path/gzip.txt'], '${{ github.workspace }}'))"
|
||||
- name: Upload Artifacts
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
const {default: artifact} = require('./packages/artifact/lib/artifact')
|
||||
|
||||
- name: Download artifacts using downloadArtifact()
|
||||
run: |
|
||||
mkdir artifact-1-directory
|
||||
node -e "Promise.resolve(require('./packages/artifact/lib/artifact-client').create().downloadArtifact('my-artifact-1','artifact-1-directory'))"
|
||||
mkdir artifact-2-directory
|
||||
node -e "Promise.resolve(require('./packages/artifact/lib/artifact-client').create().downloadArtifact('my-artifact-2','artifact-2-directory'))"
|
||||
|
||||
- name: Verify downloadArtifact()
|
||||
shell: bash
|
||||
run: |
|
||||
packages/artifact/__tests__/test-artifact-file.sh "artifact-1-directory/artifact-path/world.txt" "${{ env.non-gzip-artifact-content }}"
|
||||
packages/artifact/__tests__/test-artifact-file.sh "artifact-2-directory/artifact-path/gzip.txt" "${{ env.gzip-artifact-content }}"
|
||||
const artifactName = 'my-artifact-${{ matrix.runs-on }}'
|
||||
console.log('artifactName: ' + artifactName)
|
||||
|
||||
- name: Download artifacts using downloadAllArtifacts()
|
||||
run: |
|
||||
mkdir multi-artifact-directory
|
||||
node -e "Promise.resolve(require('./packages/artifact/lib/artifact-client').create().downloadAllArtifacts('multi-artifact-directory'))"
|
||||
const fileContents = ['artifact-path/first.txt','artifact-path/second.txt']
|
||||
|
||||
- name: Verify downloadAllArtifacts()
|
||||
shell: bash
|
||||
const uploadResult = await artifact.uploadArtifact(artifactName, fileContents, './')
|
||||
console.log(uploadResult)
|
||||
|
||||
const size = uploadResult.size
|
||||
const id = uploadResult.id
|
||||
|
||||
console.log(`Successfully uploaded artifact ${id}`)
|
||||
|
||||
try {
|
||||
await artifact.uploadArtifact(artifactName, fileContents, './')
|
||||
throw new Error('should have failed second upload')
|
||||
} catch (err) {
|
||||
console.log('Successfully blocked second artifact upload')
|
||||
}
|
||||
verify:
|
||||
name: Verify and Delete
|
||||
runs-on: ubuntu-latest
|
||||
needs: [upload]
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set Node.js 20.x
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20.x
|
||||
|
||||
# Need root node_modules because certain npm packages like jest are configured for the entire repository and it won't be possible
|
||||
# without these to just compile the artifacts package
|
||||
- name: Install root npm packages
|
||||
run: npm ci
|
||||
|
||||
- name: Compile artifact package
|
||||
run: |
|
||||
packages/artifact/__tests__/test-artifact-file.sh "multi-artifact-directory/my-artifact-1/artifact-path/world.txt" "${{ env.non-gzip-artifact-content }}"
|
||||
packages/artifact/__tests__/test-artifact-file.sh "multi-artifact-directory/my-artifact-2/artifact-path/gzip.txt" "${{ env.gzip-artifact-content }}"
|
||||
npm ci
|
||||
npm run tsc
|
||||
working-directory: packages/artifact
|
||||
|
||||
- name: List and Download Artifacts
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
const {default: artifactClient} = require('./packages/artifact/lib/artifact')
|
||||
|
||||
const {readFile} = require('fs/promises')
|
||||
const path = require('path')
|
||||
|
||||
const findBy = {
|
||||
repositoryOwner: process.env.GITHUB_REPOSITORY.split('/')[0],
|
||||
repositoryName: process.env.GITHUB_REPOSITORY.split('/')[1],
|
||||
token: '${{ secrets.GITHUB_TOKEN }}',
|
||||
workflowRunId: process.env.GITHUB_RUN_ID
|
||||
}
|
||||
|
||||
const listResult = await artifactClient.listArtifacts({latest: true, findBy})
|
||||
console.log(listResult)
|
||||
|
||||
const artifacts = listResult.artifacts
|
||||
const expected = [
|
||||
'my-artifact-ubuntu-latest',
|
||||
'my-artifact-windows-latest',
|
||||
'my-artifact-macos-latest'
|
||||
]
|
||||
|
||||
const foundArtifacts = artifacts.filter(artifact =>
|
||||
expected.includes(artifact.name)
|
||||
)
|
||||
|
||||
if (foundArtifacts.length !== 3) {
|
||||
console.log('Unexpected length of found artifacts', foundArtifacts)
|
||||
throw new Error(
|
||||
`Expected 3 artifacts but found ${foundArtifacts.length} artifacts.`
|
||||
)
|
||||
}
|
||||
|
||||
console.log('Successfully listed artifacts that were uploaded')
|
||||
|
||||
const files = [
|
||||
{name: 'artifact-path/first.txt', content: 'hello from file 1'},
|
||||
{name: 'artifact-path/second.txt', content: 'hello from file 2'}
|
||||
]
|
||||
|
||||
for (const artifact of foundArtifacts) {
|
||||
const {downloadPath} = await artifactClient.downloadArtifact(artifact.id, {
|
||||
path: artifact.name,
|
||||
findBy
|
||||
})
|
||||
|
||||
console.log('Downloaded artifact to:', downloadPath)
|
||||
|
||||
for (const file of files) {
|
||||
const filepath = path.join(
|
||||
process.env.GITHUB_WORKSPACE,
|
||||
downloadPath,
|
||||
file.name
|
||||
)
|
||||
|
||||
console.log('Checking file:', filepath)
|
||||
|
||||
const content = await readFile(filepath, 'utf8')
|
||||
if (content.trim() !== file.content.trim()) {
|
||||
throw new Error(
|
||||
`Expected file '${file.name}' to contain '${file.content}' but found '${content}'`
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
- name: Delete Artifacts
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
const {default: artifactClient} = require('./packages/artifact/lib/artifact')
|
||||
|
||||
const artifactsToDelete = [
|
||||
'my-artifact-ubuntu-latest',
|
||||
'my-artifact-windows-latest',
|
||||
'my-artifact-macos-latest'
|
||||
]
|
||||
|
||||
for (const artifactName of artifactsToDelete) {
|
||||
const {id} = await artifactClient.deleteArtifact(artifactName)
|
||||
}
|
||||
|
||||
const {artifacts} = await artifactClient.listArtifacts({latest: true})
|
||||
const foundArtifacts = artifacts.filter(artifact =>
|
||||
artifactsToDelete.includes(artifact.name)
|
||||
)
|
||||
|
||||
if (foundArtifacts.length !== 0) {
|
||||
console.log('Unexpected length of found artifacts:', foundArtifacts)
|
||||
throw new Error(
|
||||
`Expected 0 artifacts but found ${foundArtifacts.length} artifacts.`
|
||||
)
|
||||
}
|
||||
|
||||
|
|
|
@ -18,12 +18,12 @@ jobs:
|
|||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set Node.js 12.x
|
||||
uses: actions/setup-node@v1
|
||||
- name: Set Node.js 20.x
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 12.x
|
||||
node-version: 20.x
|
||||
|
||||
- name: npm install
|
||||
run: npm install
|
||||
|
@ -31,9 +31,8 @@ jobs:
|
|||
- name: Bootstrap
|
||||
run: npm run bootstrap
|
||||
|
||||
- name: audit tools
|
||||
# `|| npm audit` to pretty-print the output if vulnerabilies are found after filtering.
|
||||
run: npm audit --audit-level=moderate --json | scripts/audit-allow-list || npm audit --audit-level=moderate
|
||||
- name: audit tools (without allow-list)
|
||||
run: npm audit --audit-level=moderate
|
||||
|
||||
- name: audit packages
|
||||
run: npm run audit-all
|
||||
|
|
|
@ -22,12 +22,12 @@ jobs:
|
|||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set Node.js 12.x
|
||||
uses: actions/setup-node@v1
|
||||
- name: Set Node.js 20.x
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 12.x
|
||||
node-version: 20.x
|
||||
|
||||
# In order to save & restore cache from a shell script, certain env variables need to be set that are only available in the
|
||||
# node context. This runs a local action that gets and sets the necessary env variables that are needed
|
||||
|
|
|
@ -0,0 +1,90 @@
|
|||
name: cache-windows-bsd-unit-tests
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
paths-ignore:
|
||||
- '**.md'
|
||||
pull_request:
|
||||
paths-ignore:
|
||||
- '**.md'
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: Build
|
||||
|
||||
runs-on: windows-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- shell: bash
|
||||
run: |
|
||||
rm "C:\Program Files\Git\usr\bin\tar.exe"
|
||||
|
||||
- name: Set Node.js 20.x
|
||||
uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: 20.x
|
||||
|
||||
# In order to save & restore cache from a shell script, certain env variables need to be set that are only available in the
|
||||
# node context. This runs a local action that gets and sets the necessary env variables that are needed
|
||||
- name: Set env variables
|
||||
uses: ./packages/cache/__tests__/__fixtures__/
|
||||
|
||||
# Need root node_modules because certain npm packages like jest are configured for the entire repository and it won't be possible
|
||||
# without these to just compile the cache package
|
||||
- name: Install root npm packages
|
||||
run: npm ci
|
||||
|
||||
- name: Compile cache package
|
||||
run: |
|
||||
npm ci
|
||||
npm run tsc
|
||||
working-directory: packages/cache
|
||||
|
||||
- name: Generate files in working directory
|
||||
shell: bash
|
||||
run: packages/cache/__tests__/create-cache-files.sh ${{ runner.os }} test-cache
|
||||
|
||||
- name: Generate files outside working directory
|
||||
shell: bash
|
||||
run: packages/cache/__tests__/create-cache-files.sh ${{ runner.os }} ~/test-cache
|
||||
|
||||
# We're using node -e to call the functions directly available in the @actions/cache package
|
||||
- name: Save cache using saveCache()
|
||||
run: |
|
||||
node -e "Promise.resolve(require('./packages/cache/lib/cache').saveCache(['test-cache','~/test-cache'],'test-${{ runner.os }}-${{ github.run_id }}'))"
|
||||
|
||||
- name: Delete cache folders before restoring
|
||||
shell: bash
|
||||
run: |
|
||||
rm -rf test-cache
|
||||
rm -rf ~/test-cache
|
||||
|
||||
- name: Restore cache using restoreCache() with http-client
|
||||
run: |
|
||||
node -e "Promise.resolve(require('./packages/cache/lib/cache').restoreCache(['test-cache','~/test-cache'],'test-${{ runner.os }}-${{ github.run_id }}',[],{useAzureSdk: false}))"
|
||||
|
||||
- name: Verify cache restored with http-client
|
||||
shell: bash
|
||||
run: |
|
||||
packages/cache/__tests__/verify-cache-files.sh ${{ runner.os }} test-cache
|
||||
packages/cache/__tests__/verify-cache-files.sh ${{ runner.os }} ~/test-cache
|
||||
|
||||
- name: Delete cache folders before restoring
|
||||
shell: bash
|
||||
run: |
|
||||
rm -rf test-cache
|
||||
rm -rf ~/test-cache
|
||||
|
||||
- name: Restore cache using restoreCache() with Azure SDK
|
||||
run: |
|
||||
node -e "Promise.resolve(require('./packages/cache/lib/cache').restoreCache(['test-cache','~/test-cache'],'test-${{ runner.os }}-${{ github.run_id }}'))"
|
||||
|
||||
- name: Verify cache restored with Azure SDK
|
||||
shell: bash
|
||||
run: |
|
||||
packages/cache/__tests__/verify-cache-files.sh ${{ runner.os }} test-cache
|
||||
packages/cache/__tests__/verify-cache-files.sh ${{ runner.os }} ~/test-cache
|
|
@ -20,18 +20,18 @@ jobs:
|
|||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v4
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v1
|
||||
uses: github/codeql-action/init@v2
|
||||
with:
|
||||
languages: javascript
|
||||
|
||||
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
||||
# If this step fails, then you should remove it and run the build manually (see below)
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@v1
|
||||
uses: github/codeql-action/autobuild@v2
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v1
|
||||
uses: github/codeql-action/analyze@v2
|
||||
|
|
|
@ -5,71 +5,79 @@ on:
|
|||
inputs:
|
||||
package:
|
||||
required: true
|
||||
description: 'core, artifact, cache, exec, github, glob, io, tool-cache'
|
||||
|
||||
description: 'core, artifact, cache, exec, github, glob, http-client, io, tool-cache, attest'
|
||||
|
||||
jobs:
|
||||
test:
|
||||
runs-on: macos-latest
|
||||
|
||||
steps:
|
||||
- name: setup repo
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: verify package exists
|
||||
run: ls packages/${{ github.event.inputs.package }}
|
||||
|
||||
|
||||
- name: Set Node.js 20.x
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20.x
|
||||
|
||||
- name: npm install
|
||||
run: npm install
|
||||
|
||||
|
||||
- name: bootstrap
|
||||
run: npm run bootstrap
|
||||
|
||||
|
||||
- name: build
|
||||
run: npm run build
|
||||
|
||||
|
||||
- name: test
|
||||
run: npm run test
|
||||
|
||||
- name: pack
|
||||
run: npm pack
|
||||
working-directory: packages/${{ github.event.inputs.package }}
|
||||
|
||||
|
||||
- name: upload artifact
|
||||
uses: actions/upload-artifact@v2
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ github.event.inputs.package }}
|
||||
path: packages/${{ github.event.inputs.package }}/*.tgz
|
||||
|
||||
|
||||
publish:
|
||||
runs-on: macos-latest
|
||||
needs: test
|
||||
environment: npm-publish
|
||||
permissions:
|
||||
contents: read
|
||||
id-token: write
|
||||
steps:
|
||||
|
||||
|
||||
- name: download artifact
|
||||
uses: actions/download-artifact@v2
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: ${{ github.event.inputs.package }}
|
||||
|
||||
- name: setup authentication
|
||||
run: echo "//registry.npmjs.org/:_authToken=${NPM_TOKEN}" >> .npmrc
|
||||
env:
|
||||
env:
|
||||
NPM_TOKEN: ${{ secrets.TOKEN }}
|
||||
|
||||
- name: publish
|
||||
run: npm publish *.tgz
|
||||
run: npm publish --provenance *.tgz
|
||||
|
||||
- name: notify slack on failure
|
||||
if: failure()
|
||||
run: |
|
||||
curl -X POST -H 'Content-type: application/json' --data '{"text":":pb__failed: Failed to publish a new version of ${{ github.event.inputs.package }}"}' $SLACK_WEBHOOK
|
||||
env:
|
||||
env:
|
||||
SLACK_WEBHOOK: ${{ secrets.SLACK }}
|
||||
|
||||
|
||||
- name: notify slack on success
|
||||
if: success()
|
||||
run: |
|
||||
curl -X POST -H 'Content-type: application/json' --data '{"text":":dance: Successfully published a new version of ${{ github.event.inputs.package }}"}' $SLACK_WEBHOOK
|
||||
env:
|
||||
env:
|
||||
SLACK_WEBHOOK: ${{ secrets.SLACK }}
|
||||
|
||||
|
||||
|
|
|
@ -23,12 +23,12 @@ jobs:
|
|||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set Node.js 12.x
|
||||
uses: actions/setup-node@v1
|
||||
- name: Set Node.js 20.x
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 12.x
|
||||
node-version: 20.x
|
||||
|
||||
- name: npm install
|
||||
run: npm install
|
||||
|
@ -40,7 +40,7 @@ jobs:
|
|||
run: npm run build
|
||||
|
||||
- name: npm test
|
||||
run: npm test
|
||||
run: npm test -- --runInBand --forceExit
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ github.token }}
|
||||
|
||||
|
|
|
@ -9,7 +9,7 @@ jobs:
|
|||
if: ${{ github.repository_owner == 'actions' }}
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v4
|
||||
- name: Update Octokit
|
||||
working-directory: packages/github
|
||||
run: |
|
||||
|
@ -30,7 +30,7 @@ jobs:
|
|||
fi
|
||||
- name: Create PR
|
||||
if: ${{steps.status.outputs.createPR}}
|
||||
uses: actions/github-script@v2
|
||||
uses: actions/github-script@v6
|
||||
with:
|
||||
github-token: ${{secrets.GITHUB_TOKEN}}
|
||||
script: |
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
node_modules/
|
||||
packages/*/node_modules/
|
||||
packages/*/lib/
|
||||
packages/*/lib/
|
||||
packages/glob/__tests__/_temp/**/
|
||||
packages/*/src/generated/*/
|
||||
|
|
|
@ -7,5 +7,6 @@
|
|||
"trailingComma": "none",
|
||||
"bracketSpacing": false,
|
||||
"arrowParens": "avoid",
|
||||
"parser": "typescript"
|
||||
"parser": "typescript",
|
||||
"endOfLine": "auto"
|
||||
}
|
|
@ -1,4 +1,5 @@
|
|||
* @actions/actions-runtime
|
||||
|
||||
/packages/artifact/ @actions/actions-service
|
||||
/packages/cache/ @actions/actions-service
|
||||
/packages/artifact/ @actions/artifacts-actions
|
||||
/packages/cache/ @actions/actions-cache
|
||||
/packages/attest/ @actions/package-security
|
||||
|
|
18
README.md
18
README.md
|
@ -46,6 +46,15 @@ $ npm install @actions/glob
|
|||
```
|
||||
<br/>
|
||||
|
||||
:phone: [@actions/http-client](packages/http-client)
|
||||
|
||||
A lightweight HTTP client optimized for building actions. Read more [here](packages/http-client)
|
||||
|
||||
```bash
|
||||
$ npm install @actions/http-client
|
||||
```
|
||||
<br/>
|
||||
|
||||
:pencil2: [@actions/io](packages/io)
|
||||
|
||||
Provides disk i/o functions like cp, mv, rmRF, which etc. Read more [here](packages/io)
|
||||
|
@ -93,6 +102,15 @@ $ npm install @actions/cache
|
|||
```
|
||||
<br/>
|
||||
|
||||
:lock_with_ink_pen: [@actions/attest](packages/attest)
|
||||
|
||||
Provides functions to write attestations for workflow artifacts. Read more [here](packages/attest)
|
||||
|
||||
```bash
|
||||
$ npm install @actions/attest
|
||||
```
|
||||
<br/>
|
||||
|
||||
## Creating an Action with the Toolkit
|
||||
|
||||
:question: [Choosing an action type](docs/action-types.md)
|
||||
|
|
|
@ -32,7 +32,7 @@ jobs:
|
|||
os: [ubuntu-16.04, windows-2019]
|
||||
runs-on: ${{matrix.os}}
|
||||
actions:
|
||||
- uses: actions/setup-node@v1
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
version: ${{matrix.node}}
|
||||
- run: |
|
||||
|
|
|
@ -50,7 +50,18 @@ function setSecret(secret: string): void {}
|
|||
|
||||
Now, future logs containing BAR will be masked. E.g. running `echo "Hello FOO BAR World"` will now print `Hello FOO **** World`.
|
||||
|
||||
**WARNING** The add-mask and setSecret commands only support single line secrets. To register a multiline secrets you must register each line individually otherwise it will not be masked.
|
||||
**WARNING** The add-mask and setSecret commands only support single-line
|
||||
secrets or multi-line secrets that have been escaped. `@actions/core`
|
||||
`setSecret` will escape the string you provide by default. When an escaped
|
||||
multi-line string is provided the whole string and each of its lines
|
||||
individually will be masked. For example you can mask `first\nsecond\r\nthird`
|
||||
using:
|
||||
|
||||
```sh
|
||||
echo "::add-mask::first%0Asecond%0D%0Athird"
|
||||
```
|
||||
|
||||
This will mask `first%0Asecond%0D%0Athird`, `first`, `second` and `third`.
|
||||
|
||||
**WARNING** Do **not** mask short values if you can avoid it, it could render your output unreadable (and future steps' output as well).
|
||||
For example, if you mask the letter `l`, running `echo "Hello FOO BAR World"` will now print `He*********o FOO BAR Wor****d`
|
||||
|
@ -100,9 +111,12 @@ There are several commands to emit different levels of log output:
|
|||
| log level | example usage |
|
||||
|---|---|
|
||||
| [debug](action-debugging.md) | `echo "::debug::My debug message"` |
|
||||
| notice | `echo "::notice::My notice message"` |
|
||||
| warning | `echo "::warning::My warning message"` |
|
||||
| error | `echo "::error::My error message"` |
|
||||
|
||||
Additional syntax options are described at [the workflow command documentation](https://docs.github.com/en/actions/reference/workflow-commands-for-github-actions#setting-a-debug-message).
|
||||
|
||||
### Command Echoing
|
||||
|
||||
By default, the echoing of commands to stdout only occurs if [Step Debugging is enabled](./action-debugging.md#How-to-Access-Step-Debug-Logs)
|
||||
|
|
|
@ -18,7 +18,7 @@ e.g. To use https://github.com/actions/setup-node, users will author:
|
|||
|
||||
```yaml
|
||||
steps:
|
||||
using: actions/setup-node@v1
|
||||
using: actions/setup-node@v4
|
||||
```
|
||||
|
||||
# Define Metadata
|
||||
|
|
|
@ -4,7 +4,7 @@ Self-hosted runners [can be configured](https://help.github.com/en/actions/hosti
|
|||
|
||||
For actions to **just work** behind a proxy server:
|
||||
|
||||
1. Use [tool-cache] version >= 1.3.1
|
||||
2. Optionally use [actions/http-client](https://github.com/actions/http-client)
|
||||
1. Use [tool-cache](/packages/tool-cache) version >= 1.3.1
|
||||
2. Optionally use [actions/http-client](/packages/http-client)
|
||||
|
||||
If you are using other http clients, refer to the [environment variables set by the runner](https://help.github.com/en/actions/hosting-your-own-runners/using-a-proxy-server-with-self-hosted-runners).
|
||||
If you are using other http clients, refer to the [environment variables set by the runner](https://help.github.com/en/actions/hosting-your-own-runners/using-a-proxy-server-with-self-hosted-runners).
|
||||
|
|
|
@ -8,4 +8,4 @@ module.exports = {
|
|||
'^.+\\.ts$': 'ts-jest'
|
||||
},
|
||||
verbose: true
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"packages": [
|
||||
"packages/*"
|
||||
"packages/**/*"
|
||||
],
|
||||
"version": "independent"
|
||||
}
|
||||
}
|
|
@ -0,0 +1,24 @@
|
|||
{
|
||||
"tasksRunnerOptions": {
|
||||
"default": {
|
||||
"runner": "nx/tasks-runners/default",
|
||||
"options": {
|
||||
"cacheableOperations": []
|
||||
}
|
||||
}
|
||||
},
|
||||
"affected": {
|
||||
"defaultBase": "master"
|
||||
},
|
||||
"$schema": "./node_modules/nx/schemas/nx-schema.json",
|
||||
"namedInputs": {
|
||||
"default": [
|
||||
"{projectRoot}/**/*",
|
||||
"sharedGlobals"
|
||||
],
|
||||
"sharedGlobals": [],
|
||||
"production": [
|
||||
"default"
|
||||
]
|
||||
}
|
||||
}
|
File diff suppressed because it is too large
Load Diff
32
package.json
32
package.json
|
@ -3,30 +3,34 @@
|
|||
"private": true,
|
||||
"scripts": {
|
||||
"audit-all": "lerna run audit-moderate",
|
||||
"bootstrap": "lerna bootstrap",
|
||||
"bootstrap": "lerna exec -- npm install",
|
||||
"build": "lerna run tsc",
|
||||
"clean": "lerna clean",
|
||||
"repair": "lerna repair",
|
||||
"check-all": "concurrently \"npm:format-check\" \"npm:lint\" \"npm:test\" \"npm:build -- -- --noEmit\"",
|
||||
"format": "prettier --write packages/**/*.ts",
|
||||
"format-check": "prettier --check packages/**/*.ts",
|
||||
"lint": "eslint packages/**/*.ts",
|
||||
"lint-fix": "eslint packages/**/*.ts --fix",
|
||||
"new-package": "scripts/create-package",
|
||||
"test": "jest --testTimeout 10000"
|
||||
"test": "jest --testTimeout 60000"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/jest": "^27.0.2",
|
||||
"@types/node": "^12.20.13",
|
||||
"@types/jest": "^29.5.4",
|
||||
"@types/node": "^20.5.7",
|
||||
"@types/signale": "^1.4.1",
|
||||
"@typescript-eslint/parser": "^4.0.0",
|
||||
"concurrently": "^6.1.0",
|
||||
"eslint": "^7.23.0",
|
||||
"eslint-plugin-github": "^4.1.3",
|
||||
"eslint-plugin-jest": "^22.21.0",
|
||||
"eslint": "^8.0.1",
|
||||
"eslint-config-prettier": "^8.9.0",
|
||||
"eslint-plugin-github": "^4.9.2",
|
||||
"eslint-plugin-jest": "^27.2.3",
|
||||
"eslint-plugin-prettier": "^5.0.0",
|
||||
"flow-bin": "^0.115.0",
|
||||
"jest": "^27.2.5",
|
||||
"lerna": "^4.0.0",
|
||||
"prettier": "^1.19.1",
|
||||
"ts-jest": "^27.0.5",
|
||||
"typescript": "^3.9.9"
|
||||
"jest": "^29.6.4",
|
||||
"lerna": "^7.1.4",
|
||||
"nx": "16.6.0",
|
||||
"prettier": "^3.0.0",
|
||||
"ts-jest": "^29.1.1",
|
||||
"typescript": "^5.2.2"
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,30 +1,43 @@
|
|||
# Contributions
|
||||
|
||||
This package is used internally by the v2+ versions of [upload-artifact](https://github.com/actions/upload-artifact) and [download-artifact](https://github.com/actions/download-artifact). This package can also be used by other actions to interact with artifacts. Any changes or updates to this package will propagate updates to these actions so it is important that major changes or updates get properly tested.
|
||||
This package is used internally by the v4 versions of [upload-artifact](https://github.com/actions/upload-artifact) and [download-artifact](https://github.com/actions/download-artifact). This package can also be used by other actions to interact with artifacts. Any changes or updates to this package will propagate updates to these actions so it is important that major changes or updates get properly tested.
|
||||
|
||||
Any issues or feature requests that are related to the artifact actions should be filled in the appropriate repo.
|
||||
|
||||
A limited range of unit tests run as part of each PR when making changes to the artifact packages. For small contributions and fixes, they should be sufficient.
|
||||
|
||||
If making large changes, there are a few scenarios that should be tested.
|
||||
If making large changes, there are a few scenarios that should be tested:
|
||||
|
||||
- Uploading very large artifacts (large artifacts get compressed using gzip so compression/decompression must be tested)
|
||||
- Uploading artifacts with lots of small files (each file is uploaded with its own HTTP call, timeouts and non-success HTTP responses can be expected so they must be properly handled)
|
||||
- Uploading very large artifacts
|
||||
- Uploading artifacts with lots of small files
|
||||
- Uploading artifacts using a self-hosted runner (uploads and downloads behave differently due to extra latency)
|
||||
- Downloading a single artifact (large and small, if lots of small files are part of an artifact, timeouts and non-success HTTP responses can be expected)
|
||||
- Downloading all artifacts at once
|
||||
|
||||
Large architectural changes can impact upload/download performance so it is important to separately run extra tests. We request that any large contributions/changes have extra detailed testing so we can verify performance and possible regressions.
|
||||
|
||||
It is not possible to run end-to-end tests for artifacts as part of a PR in this repo because certain env variables such as `ACTIONS_RUNTIME_URL` are only available from the context of an action as opposed to a shell script. These env variables are needed in order to make the necessary API calls.
|
||||
Tests will run for every push/pull_request [via Actions](https://github.com/actions/toolkit/blob/main/.github/workflows/artifact-tests.yml).
|
||||
|
||||
# Testing
|
||||
|
||||
Any easy way to test changes is to fork the artifact actions and to use `npm link` to test your changes.
|
||||
## Package tests
|
||||
|
||||
1. Fork the [upload-artifact](https://github.com/actions/upload-artifact) and [download-artifact](https://github.com/actions/download-artifact) repos
|
||||
2. Clone the forks locally
|
||||
3. With your local changes to the toolkit repo, type `npm link` after ensuring there are no errors when running `tsc`
|
||||
4. In the locally cloned fork, type `npm link @actions/artifact`
|
||||
4. Create a new release for your local fork using `tsc` and `npm run release` (this will create a new `dist/index.js` file using `@vercel/ncc`)
|
||||
5. Commit and push your local changes, you will then be able to test your changes with your forked action
|
||||
To run unit tests for the `@actions/artifact` package:
|
||||
|
||||
1. Clone `actions/toolkit` locally
|
||||
2. Install dependencies: `npm bootstrap`
|
||||
3. Change working directory to `packages/artifact`
|
||||
4. Run jest tests: `npm run test`
|
||||
|
||||
## Within upload-artifact or download-artifact actions
|
||||
|
||||
Any easy way to test changes for the official upload/download actions is to fork them, compile changes and run them.
|
||||
|
||||
1. For your local `actions/toolkit` changes:
|
||||
1. Change directory to `packages/artifact`
|
||||
2. Compile the changes: `npm run tsc`
|
||||
3. Symlink your package change: `npm link`
|
||||
2. Fork and clone either [upload-artifact](https://github.com/actions/upload-artifact) and [download-artifact](https://github.com/actions/download-artifact)
|
||||
1. In the locally cloned fork, link to your local toolkit changes: `npm link @actions/artifact`
|
||||
2. Then, compile your changes with: `npm run release`. The local `dist/index.js` should be updated with your changes.
|
||||
3. Commit and push to your fork, you can then test with a `uses:` in your workflow pointed at your fork.
|
||||
|
|
|
@ -1,213 +1,192 @@
|
|||
# `@actions/artifact`
|
||||
|
||||
## Usage
|
||||
Interact programmatically with [Actions Artifacts](https://docs.github.com/en/actions/using-workflows/storing-workflow-data-as-artifacts).
|
||||
|
||||
You can use this package to interact with the actions artifacts.
|
||||
- [Upload an Artifact](#Upload-an-Artifact)
|
||||
- [Download a Single Artifact](#Download-a-Single-Artifact)
|
||||
- [Download All Artifacts](#Download-all-Artifacts)
|
||||
- [Additional Documentation](#Additional-Documentation)
|
||||
- [Contributions](#Contributions)
|
||||
This is the core library that powers the [`@actions/upload-artifact`](https://github.com/actions/upload-artifact) and [`@actions/download-artifact`](https://github.com/actions/download-artifact) actions.
|
||||
|
||||
Relative paths and absolute paths are both allowed. Relative paths are rooted against the current working directory.
|
||||
|
||||
## Upload an Artifact
|
||||
- [`@actions/artifact`](#actionsartifact)
|
||||
- [v2 - What's New](#v2---whats-new)
|
||||
- [Improvements](#improvements)
|
||||
- [Breaking changes](#breaking-changes)
|
||||
- [Quick Start](#quick-start)
|
||||
- [Examples](#examples)
|
||||
- [Upload and Download](#upload-and-download)
|
||||
- [Delete an Artifact](#delete-an-artifact)
|
||||
- [Downloading from other workflow runs or repos](#downloading-from-other-workflow-runs-or-repos)
|
||||
- [Speeding up large uploads](#speeding-up-large-uploads)
|
||||
- [Additional Resources](#additional-resources)
|
||||
|
||||
Method Name: `uploadArtifact`
|
||||
## v2 - What's New
|
||||
|
||||
#### Inputs
|
||||
- `name`
|
||||
- The name of the artifact that is being uploaded
|
||||
- Required
|
||||
- `files`
|
||||
- A list of file paths that describe what should be uploaded as part of the artifact
|
||||
- If a path is provided that does not exist, an error will be thrown
|
||||
- Can be absolute or relative. Internally everything is normalized and resolved
|
||||
- Required
|
||||
- `rootDirectory`
|
||||
- A file path that denotes the root directory of the files being uploaded. This path is used to strip the paths provided in `files` to control how they are uploaded and structured
|
||||
- If a file specified in `files` is not in the `rootDirectory`, an error will be thrown
|
||||
- Required
|
||||
- `options`
|
||||
- Extra options that allow for the customization of the upload behavior
|
||||
- Optional
|
||||
> [!IMPORTANT]
|
||||
> @actions/artifact v2+, upload-artifact@v4+, and download-artifact@v4+ are not currently supported on GHES yet. The previous version of this package can be found at [this tag](https://github.com/actions/toolkit/tree/@actions/artifact@1.1.2/packages/artifact) and [on npm](https://www.npmjs.com/package/@actions/artifact/v/1.1.2).
|
||||
|
||||
#### Available Options
|
||||
The release of `@actions/artifact@v2` (including `upload-artifact@v4` and `download-artifact@v4`) are major changes to the backend architecture of Artifacts. They have numerous performance and behavioral improvements.
|
||||
|
||||
- `continueOnError`
|
||||
- Indicates if the artifact upload should continue in the event a file fails to upload. If there is a error during upload, a partial artifact will always be created and available for download at the end. The `size` reported will be the amount of storage that the user or org will be charged for the partial artifact.
|
||||
- If set to `false`, and an error is encountered, all other uploads will stop and any files that were queued will not be attempted to be uploaded. The partial artifact available will only include files up until the failure.
|
||||
- If set to `true` and an error is encountered, the failed file will be skipped and ignored and all other queued files will be attempted to be uploaded. There will be an artifact available for download at the end with everything excluding the file that failed to upload
|
||||
- Optional, defaults to `true` if not specified
|
||||
- `retentionDays`
|
||||
- Duration after which artifact will expire in days
|
||||
- Minimum value: 1
|
||||
- Maximum value: 90 unless changed by repository setting
|
||||
- If this is set to a greater value than the retention settings allowed, the retention on artifacts will be reduced to match the max value allowed on the server, and the upload process will continue. An input of 0 assumes default retention value.
|
||||
### Improvements
|
||||
|
||||
#### Example using Absolute File Paths
|
||||
1. All upload and download operations are much quicker, up to 80% faster download times and 96% faster upload times in worst case scenarios.
|
||||
2. Once uploaded, an Artifact ID is returned and Artifacts are immediately available in the UI and [REST API](https://docs.github.com/en/rest/actions/artifacts). Previously, you would have to wait for the run to be completed before an ID was available or any APIs could be utilized.
|
||||
3. Artifacts can now be downloaded and deleted from the UI _before_ the entire workflow run finishes.
|
||||
4. The contents of an Artifact are uploaded together into an _immutable_ archive. They cannot be altered by subsequent jobs. Both of these factors help reduce the possibility of accidentally corrupting Artifact files. (Digest/integrity hash coming soon in the API!)
|
||||
5. This library (and `actions/download-artifact`) now support downloading Artifacts from _other_ repositories and runs if a `GITHUB_TOKEN` with sufficient `actions:read` permissions are provided.
|
||||
|
||||
### Breaking changes
|
||||
|
||||
1. Firewall rules required for self-hosted runners.
|
||||
|
||||
If you are using self-hosted runners behind a firewall, you must have flows open to [Actions endpoints](https://docs.github.com/en/actions/hosting-your-own-runners/managing-self-hosted-runners/about-self-hosted-runners#communication-between-self-hosted-runners-and-github). If you cannot use wildcard rules for your firewall, see the GitHub [meta endpoint](https://api.github.com/meta) for specific endpoints.
|
||||
|
||||
e.g.
|
||||
|
||||
```bash
|
||||
curl https://api.github.com/meta | jq .domains.actions
|
||||
```
|
||||
|
||||
2. Uploading to the same named Artifact multiple times.
|
||||
|
||||
Due to how Artifacts are created in this new version, it is no longer possible to upload to the same named Artifact multiple times. You must either split the uploads into multiple Artifacts with different names, or only upload once.
|
||||
|
||||
3. Limit of Artifacts for an individual job.
|
||||
|
||||
Each job in a workflow run now has a limit of 10 artifacts.
|
||||
|
||||
## Quick Start
|
||||
|
||||
Install the package:
|
||||
|
||||
```bash
|
||||
npm i @actions/artifact
|
||||
```
|
||||
|
||||
Import the module:
|
||||
|
||||
```js
|
||||
const artifact = require('@actions/artifact');
|
||||
const artifactClient = artifact.create()
|
||||
const artifactName = 'my-artifact';
|
||||
const files = [
|
||||
'/home/user/files/plz-upload/file1.txt',
|
||||
'/home/user/files/plz-upload/file2.txt',
|
||||
'/home/user/files/plz-upload/dir/file3.txt'
|
||||
]
|
||||
const rootDirectory = '/home/user/files/plz-upload'
|
||||
const options = {
|
||||
continueOnError: true
|
||||
// ES6 module
|
||||
import {DefaultArtifactClient} from '@actions/artifact'
|
||||
|
||||
// CommonJS
|
||||
const {DefaultArtifactClient} = require('@actions/artifact')
|
||||
```
|
||||
|
||||
Then instantiate:
|
||||
|
||||
```js
|
||||
const artifact = new DefaultArtifactClient()
|
||||
```
|
||||
|
||||
ℹ️ For a comprehensive list of classes, interfaces, functions and more, see the [generated documentation](./docs/generated/README.md).
|
||||
|
||||
## Examples
|
||||
|
||||
### Upload and Download
|
||||
|
||||
The most basic scenario is uploading one or more files to an Artifact, then downloading that Artifact. Downloads are based on the Artifact ID, which can be obtained in the response of `uploadArtifact`, `getArtifact`, `listArtifacts` or via the [REST API](https://docs.github.com/en/rest/actions/artifacts).
|
||||
|
||||
```js
|
||||
const {id, size} = await artifact.uploadArtifact(
|
||||
// name of the artifact
|
||||
'my-artifact',
|
||||
// files to include (supports absolute and relative paths)
|
||||
['/absolute/path/file1.txt', './relative/file2.txt'],
|
||||
{
|
||||
// optional: how long to retain the artifact
|
||||
// if unspecified, defaults to repository/org retention settings (the limit of this value)
|
||||
retentionDays: 10
|
||||
}
|
||||
)
|
||||
|
||||
console.log(`Created artifact with id: ${id} (bytes: ${size}`)
|
||||
|
||||
const {downloadPath} = await artifact.downloadArtifact(id, {
|
||||
// optional: download destination path. otherwise defaults to $GITHUB_WORKSPACE
|
||||
path: '/tmp/dst/path',
|
||||
})
|
||||
|
||||
console.log(`Downloaded artifact ${id} to: ${downloadPath}`)
|
||||
```
|
||||
|
||||
### Delete an Artifact
|
||||
|
||||
To delete an artifact, all you need is the name.
|
||||
|
||||
```js
|
||||
const {id} = await artifact.deleteArtifact(
|
||||
// name of the artifact
|
||||
'my-artifact'
|
||||
)
|
||||
|
||||
console.log(`Deleted Artifact ID '${id}'`)
|
||||
```
|
||||
|
||||
It also supports options to delete from other repos/runs given a github token with `actions:write` permissions on the target repository is supplied.
|
||||
|
||||
```js
|
||||
const findBy = {
|
||||
// must have actions:write permission on target repository
|
||||
token: process.env['GITHUB_TOKEN'],
|
||||
workflowRunId: 123,
|
||||
repositoryOwner: 'actions',
|
||||
repositoryName: 'toolkit'
|
||||
}
|
||||
|
||||
const uploadResult = await artifactClient.uploadArtifact(artifactName, files, rootDirectory, options)
|
||||
|
||||
const {id} = await artifact.deleteArtifact(
|
||||
// name of the artifact
|
||||
'my-artifact',
|
||||
// options to find by other repo/owner
|
||||
{ findBy }
|
||||
)
|
||||
|
||||
console.log(`Deleted Artifact ID '${id}' from ${findBy.repositoryOwner}/ ${findBy.repositoryName}`)
|
||||
```
|
||||
|
||||
#### Example using Relative File Paths
|
||||
```js
|
||||
// Assuming the current working directory is /home/user/files/plz-upload
|
||||
const artifact = require('@actions/artifact');
|
||||
const artifactClient = artifact.create()
|
||||
const artifactName = 'my-artifact';
|
||||
const files = [
|
||||
'file1.txt',
|
||||
'file2.txt',
|
||||
'dir/file3.txt'
|
||||
]
|
||||
### Downloading from other workflow runs or repos
|
||||
|
||||
const rootDirectory = '.' // Also possible to use __dirname
|
||||
const options = {
|
||||
continueOnError: false
|
||||
It may be useful to download Artifacts from other workflow runs, or even other repositories. By default, the permissions are scoped so they can only download Artifacts within the current workflow run. To elevate permissions for this scenario, you must specify `options.findBy` to `downloadArtifact`.
|
||||
|
||||
```ts
|
||||
const findBy = {
|
||||
// must have actions:read permission on target repository
|
||||
token: process.env['GITHUB_TOKEN'],
|
||||
workflowRunId: 123,
|
||||
repositoryOwner: 'actions',
|
||||
repositoryName: 'toolkit'
|
||||
}
|
||||
|
||||
const uploadResponse = await artifactClient.uploadArtifact(artifactName, files, rootDirectory, options)
|
||||
await artifact.downloadArtifact(1337, {
|
||||
findBy
|
||||
})
|
||||
|
||||
// can also be used in other methods
|
||||
|
||||
await artifact.getArtifact('my-artifact', {
|
||||
findBy
|
||||
})
|
||||
|
||||
await artifact.listArtifacts({
|
||||
findBy
|
||||
})
|
||||
```
|
||||
|
||||
#### Upload Result
|
||||
### Speeding up large uploads
|
||||
|
||||
The returned `UploadResponse` will contain the following information
|
||||
If you have large files that need to be uploaded (or file types that don't compress well), you may benefit from changing the compression level of the Artifact archive. NOTE: This is a tradeoff between artifact upload time and stored data size.
|
||||
|
||||
- `artifactName`
|
||||
- The name of the artifact that was uploaded
|
||||
- `artifactItems`
|
||||
- A list of all files that describe what is uploaded if there are no errors encountered. Usually this will be equal to the inputted `files` with the exception of empty directories (will not be uploaded)
|
||||
- `size`
|
||||
- Total size of the artifact that was uploaded in bytes
|
||||
- `failedItems`
|
||||
- A list of items that were not uploaded successfully (this will include queued items that were not uploaded if `continueOnError` is set to false). This is a subset of `artifactItems`
|
||||
|
||||
## Download a Single Artifact
|
||||
|
||||
Method Name: `downloadArtifact`
|
||||
|
||||
#### Inputs
|
||||
- `name`
|
||||
- The name of the artifact to download
|
||||
- Required
|
||||
- `path`
|
||||
- Path that denotes where the artifact will be downloaded to
|
||||
- Optional. Defaults to the GitHub workspace directory(`$GITHUB_WORKSPACE`) if not specified
|
||||
- `options`
|
||||
- Extra options that allow for the customization of the download behavior
|
||||
- Optional
|
||||
|
||||
|
||||
#### Available Options
|
||||
|
||||
- `createArtifactFolder`
|
||||
- Specifies if a folder (the artifact name) is created for the artifact that is downloaded (contents downloaded into this folder),
|
||||
- Optional. Defaults to false if not specified
|
||||
|
||||
#### Example
|
||||
|
||||
```js
|
||||
const artifact = require('@actions/artifact');
|
||||
const artifactClient = artifact.create()
|
||||
const artifactName = 'my-artifact';
|
||||
const path = 'some/directory'
|
||||
const options = {
|
||||
createArtifactFolder: false
|
||||
}
|
||||
|
||||
const downloadResponse = await artifactClient.downloadArtifact(artifactName, path, options)
|
||||
|
||||
// Post download, the directory structure will look like this
|
||||
/some
|
||||
/directory
|
||||
/file1.txt
|
||||
/file2.txt
|
||||
/dir
|
||||
/file3.txt
|
||||
|
||||
// If createArtifactFolder is set to true, the directory structure will look like this
|
||||
/some
|
||||
/directory
|
||||
/my-artifact
|
||||
/file1.txt
|
||||
/file2.txt
|
||||
/dir
|
||||
/file3.txt
|
||||
```ts
|
||||
await artifact.uploadArtifact('my-massive-artifact', ['big_file.bin'], {
|
||||
// The level of compression for Zlib to be applied to the artifact archive.
|
||||
// - 0: No compression
|
||||
// - 1: Best speed
|
||||
// - 6: Default compression (same as GNU Gzip)
|
||||
// - 9: Best compression
|
||||
compressionLevel: 0
|
||||
})
|
||||
```
|
||||
|
||||
#### Download Response
|
||||
## Additional Resources
|
||||
|
||||
The returned `DownloadResponse` will contain the following information
|
||||
|
||||
- `artifactName`
|
||||
- The name of the artifact that was downloaded
|
||||
- `downloadPath`
|
||||
- The full Path to where the artifact was downloaded
|
||||
|
||||
|
||||
## Download All Artifacts
|
||||
|
||||
Method Name: `downloadAllArtifacts`
|
||||
|
||||
#### Inputs
|
||||
- `path`
|
||||
- Path that denotes where the artifact will be downloaded to
|
||||
- Optional. Defaults to the GitHub workspace directory(`$GITHUB_WORKSPACE`) if not specified
|
||||
|
||||
```js
|
||||
const artifact = require('@actions/artifact');
|
||||
const artifactClient = artifact.create();
|
||||
const downloadResponse = await artifactClient.downloadAllArtifacts();
|
||||
|
||||
// output result
|
||||
for (response in downloadResponse) {
|
||||
console.log(response.artifactName);
|
||||
console.log(response.downloadPath);
|
||||
}
|
||||
```
|
||||
|
||||
Because there are multiple artifacts, an extra directory (denoted by the name of the artifact) will be created for each artifact in the path. With 2 artifacts(`my-artifact-1` and `my-artifact-2` for example) and the default path, the directory structure will be as follows:
|
||||
```js
|
||||
/GITHUB_WORKSPACE
|
||||
/my-artifact-1
|
||||
/ .. contents of `my-artifact-1`
|
||||
/my-artifact-2
|
||||
/ .. contents of `my-artifact-2`
|
||||
```
|
||||
|
||||
#### Download Result
|
||||
|
||||
An array will be returned that describes the results for downloading all artifacts. The number of items in the array indicates the number of artifacts that were downloaded.
|
||||
|
||||
Each artifact will have the same `DownloadResponse` as if it was individually downloaded
|
||||
- `artifactName`
|
||||
- The name of the artifact that was downloaded
|
||||
- `downloadPath`
|
||||
- The full Path to where the artifact was downloaded
|
||||
|
||||
## Additional Documentation
|
||||
|
||||
Check out [additional-information](docs/additional-information.md) for extra documentation around usage, restrictions and behavior.
|
||||
|
||||
Check out [implementation-details](docs/implementation-details.md) for extra information about the implementation of this package.
|
||||
|
||||
## Contributions
|
||||
|
||||
See [contributor guidelines](https://github.com/actions/toolkit/blob/main/.github/CONTRIBUTING.md) for general guidelines and information about toolkit contributions.
|
||||
|
||||
For contributions related to this package, see [artifact contributions](CONTRIBUTIONS.md) for more information.
|
||||
- [Releases](./RELEASES.md)
|
||||
- [Contribution Guide](./CONTRIBUTIONS.md)
|
||||
- [Frequently Asked Questions](./docs/faq.md)
|
||||
|
|
|
@ -1,15 +1,111 @@
|
|||
# @actions/artifact Releases
|
||||
|
||||
### 0.1.0
|
||||
### 2.1.4
|
||||
|
||||
- Initial release
|
||||
- Adds info-level logging for zip extraction
|
||||
|
||||
### 0.2.0
|
||||
### 2.1.3
|
||||
|
||||
- Fixes to TCP connections not closing
|
||||
- GZip file compression to speed up downloads
|
||||
- Improved logging and output
|
||||
- Extra documentation
|
||||
- Fixes a bug in the extract logic updated in 2.1.2
|
||||
|
||||
### 2.1.2
|
||||
|
||||
- Updated the stream extract functionality to use `unzip.Parse()` instead of `unzip.Extract()` for greater control of unzipping artifacts
|
||||
|
||||
### 2.1.1
|
||||
|
||||
- Updated `isGhes` check to include `.ghe.com` and `.ghe.localhost` as accepted hosts
|
||||
|
||||
### 2.1.0
|
||||
|
||||
- Added `ArtifactClient#deleteArtifact` to delete artifacts by name [#1626](https://github.com/actions/toolkit/pull/1626)
|
||||
- Update error messaging to be more useful [#1628](https://github.com/actions/toolkit/pull/1628)
|
||||
|
||||
### 2.0.1
|
||||
|
||||
- Patch to fix transient request timeouts https://github.com/actions/download-artifact/issues/249
|
||||
|
||||
### 2.0.0
|
||||
|
||||
- Major release. Supports new Artifact backend for improved speed, reliability and behavior.
|
||||
- Numerous API changes, [some breaking](./README.md#breaking-changes).
|
||||
|
||||
- [Blog post with more info](https://github.blog/2024-02-12-get-started-with-v4-of-github-actions-artifacts/)
|
||||
|
||||
### 1.1.1
|
||||
|
||||
- Fixed a bug in Node16 where if an HTTP download finished too quickly (<1ms, e.g. when it's mocked) we attempt to delete a temp file that has not been created yet [#1278](https://github.com/actions/toolkit/pull/1278/commits/b9de68a590daf37c6747e38d3cb4f1dd2cfb791c)
|
||||
|
||||
### 1.1.0
|
||||
|
||||
- Add `x-actions-results-crc64` and `x-actions-results-md5` checksum headers on upload [#1063](https://github.com/actions/toolkit/pull/1063)
|
||||
|
||||
### 1.0.2
|
||||
|
||||
- Update to v2.0.1 of `@actions/http-client` [#1087](https://github.com/actions/toolkit/pull/1087)
|
||||
|
||||
### 1.0.1
|
||||
|
||||
- Update to v2.0.0 of `@actions/http-client`
|
||||
|
||||
### 1.0.0
|
||||
|
||||
- Update `lockfileVersion` to `v2` in `package-lock.json` [#1009](https://github.com/actions/toolkit/pull/1009)
|
||||
|
||||
### 0.6.1
|
||||
|
||||
- Fix for failing 0 byte file uploads on Windows [#962](https://github.com/actions/toolkit/pull/962)
|
||||
|
||||
### 0.6.0
|
||||
|
||||
- Support upload from named pipes [#748](https://github.com/actions/toolkit/pull/748)
|
||||
- Fixes to percentage values being greater than 100% when downloading all artifacts [#889](https://github.com/actions/toolkit/pull/889)
|
||||
- Improved logging and output during artifact upload [#949](https://github.com/actions/toolkit/pull/949)
|
||||
- Improvements to client-side validation for certain invalid characters not allowed during upload: [#951](https://github.com/actions/toolkit/pull/951)
|
||||
- Faster upload speeds for certain types of large files by exempting gzip compression [#956](https://github.com/actions/toolkit/pull/956)
|
||||
- More detailed logging when dealing with chunked uploads [#957](https://github.com/actions/toolkit/pull/957)
|
||||
|
||||
### 0.5.2
|
||||
|
||||
- Add HTTP 500 as a retryable status code for artifact upload and download.
|
||||
|
||||
### 0.5.1
|
||||
|
||||
- Bump @actions/http-client to version 1.0.11 to fix proxy related issues during artifact upload and download
|
||||
|
||||
### 0.5.0
|
||||
|
||||
- Improved retry-ability for all http calls during artifact upload and download if an error is encountered
|
||||
|
||||
### 0.4.2
|
||||
|
||||
- Improved retry-ability when a partial artifact download is encountered
|
||||
|
||||
### 0.4.1
|
||||
|
||||
- Update to latest @actions/core version
|
||||
|
||||
### 0.4.0
|
||||
|
||||
- Add option to specify custom retentions on artifacts
|
||||
-
|
||||
### 0.3.5
|
||||
|
||||
- Retry in the event of a 413 response
|
||||
|
||||
### 0.3.3
|
||||
|
||||
- Increase chunk size during upload from 4MB to 8MB
|
||||
- Improve user-agent strings during API calls to help internally diagnose issues
|
||||
|
||||
### 0.3.2
|
||||
|
||||
- Fix to ensure readstreams get correctly reset in the event of a retry
|
||||
|
||||
### 0.3.1
|
||||
|
||||
- Fix to ensure temporary gzip files get correctly deleted during artifact upload
|
||||
- Remove spaces as a forbidden character during upload
|
||||
|
||||
### 0.3.0
|
||||
|
||||
|
@ -20,44 +116,13 @@
|
|||
- Clearer error message if storage quota has been reached
|
||||
- Improved logging and output during artifact download
|
||||
|
||||
### 0.3.1
|
||||
### 0.2.0
|
||||
|
||||
- Fix to ensure temporary gzip files get correctly deleted during artifact upload
|
||||
- Remove spaces as a forbidden character during upload
|
||||
- Fixes to TCP connections not closing
|
||||
- GZip file compression to speed up downloads
|
||||
- Improved logging and output
|
||||
- Extra documentation
|
||||
|
||||
### 0.3.2
|
||||
### 0.1.0
|
||||
|
||||
- Fix to ensure readstreams get correctly reset in the event of a retry
|
||||
|
||||
### 0.3.3
|
||||
|
||||
- Increase chunk size during upload from 4MB to 8MB
|
||||
- Improve user-agent strings during API calls to help internally diagnose issues
|
||||
|
||||
### 0.3.5
|
||||
|
||||
- Retry in the event of a 413 response
|
||||
|
||||
### 0.4.0
|
||||
|
||||
- Add option to specify custom retentions on artifacts
|
||||
|
||||
### 0.4.1
|
||||
|
||||
- Update to latest @actions/core version
|
||||
|
||||
### 0.4.2
|
||||
|
||||
- Improved retry-ability when a partial artifact download is encountered
|
||||
|
||||
### 0.5.0
|
||||
|
||||
- Improved retry-ability for all http calls during artifact upload and download if an error is encountered
|
||||
|
||||
### 0.5.1
|
||||
|
||||
- Bump @actions/http-client to version 1.0.11 to fix proxy related issues during artifact upload and download
|
||||
|
||||
### 0.5.2
|
||||
|
||||
- Add HTTP 500 as a retryable status code for artifact upload and download.
|
||||
- Initial release
|
||||
|
|
|
@ -0,0 +1,300 @@
|
|||
import * as http from 'http'
|
||||
import * as net from 'net'
|
||||
import {HttpClient} from '@actions/http-client'
|
||||
import * as config from '../src/internal/shared/config'
|
||||
import {internalArtifactTwirpClient} from '../src/internal/shared/artifact-twirp-client'
|
||||
import {noopLogs} from './common'
|
||||
import {NetworkError, UsageError} from '../src/internal/shared/errors'
|
||||
|
||||
jest.mock('@actions/http-client')
|
||||
|
||||
const clientOptions = {
|
||||
maxAttempts: 5,
|
||||
retryIntervalMs: 1,
|
||||
retryMultiplier: 1.5
|
||||
}
|
||||
|
||||
describe('artifact-http-client', () => {
|
||||
beforeAll(() => {
|
||||
noopLogs()
|
||||
jest
|
||||
.spyOn(config, 'getResultsServiceUrl')
|
||||
.mockReturnValue('http://localhost:8080')
|
||||
jest.spyOn(config, 'getRuntimeToken').mockReturnValue('token')
|
||||
})
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks()
|
||||
})
|
||||
|
||||
it('should successfully create a client', () => {
|
||||
const client = internalArtifactTwirpClient()
|
||||
expect(client).toBeDefined()
|
||||
})
|
||||
|
||||
it('should make a request', async () => {
|
||||
const mockPost = jest.fn(() => {
|
||||
const msg = new http.IncomingMessage(new net.Socket())
|
||||
msg.statusCode = 200
|
||||
return {
|
||||
message: msg,
|
||||
readBody: async () => {
|
||||
return Promise.resolve(
|
||||
`{"ok": true, "signedUploadUrl": "http://localhost:8080/upload"}`
|
||||
)
|
||||
}
|
||||
}
|
||||
})
|
||||
const mockHttpClient = (
|
||||
HttpClient as unknown as jest.Mock
|
||||
).mockImplementation(() => {
|
||||
return {
|
||||
post: mockPost
|
||||
}
|
||||
})
|
||||
|
||||
const client = internalArtifactTwirpClient()
|
||||
const artifact = await client.CreateArtifact({
|
||||
workflowRunBackendId: '1234',
|
||||
workflowJobRunBackendId: '5678',
|
||||
name: 'artifact',
|
||||
version: 4
|
||||
})
|
||||
|
||||
expect(mockHttpClient).toHaveBeenCalledTimes(1)
|
||||
expect(mockPost).toHaveBeenCalledTimes(1)
|
||||
expect(artifact).toBeDefined()
|
||||
expect(artifact.ok).toBe(true)
|
||||
expect(artifact.signedUploadUrl).toBe('http://localhost:8080/upload')
|
||||
})
|
||||
|
||||
it('should retry if the request fails', async () => {
|
||||
const mockPost = jest
|
||||
.fn(() => {
|
||||
const msgSucceeded = new http.IncomingMessage(new net.Socket())
|
||||
msgSucceeded.statusCode = 200
|
||||
return {
|
||||
message: msgSucceeded,
|
||||
readBody: async () => {
|
||||
return Promise.resolve(
|
||||
`{"ok": true, "signedUploadUrl": "http://localhost:8080/upload"}`
|
||||
)
|
||||
}
|
||||
}
|
||||
})
|
||||
.mockImplementationOnce(() => {
|
||||
const msgFailed = new http.IncomingMessage(new net.Socket())
|
||||
msgFailed.statusCode = 500
|
||||
msgFailed.statusMessage = 'Internal Server Error'
|
||||
return {
|
||||
message: msgFailed,
|
||||
readBody: async () => {
|
||||
return Promise.resolve(`{"ok": false}`)
|
||||
}
|
||||
}
|
||||
})
|
||||
const mockHttpClient = (
|
||||
HttpClient as unknown as jest.Mock
|
||||
).mockImplementation(() => {
|
||||
return {
|
||||
post: mockPost
|
||||
}
|
||||
})
|
||||
|
||||
const client = internalArtifactTwirpClient(clientOptions)
|
||||
const artifact = await client.CreateArtifact({
|
||||
workflowRunBackendId: '1234',
|
||||
workflowJobRunBackendId: '5678',
|
||||
name: 'artifact',
|
||||
version: 4
|
||||
})
|
||||
|
||||
expect(mockHttpClient).toHaveBeenCalledTimes(1)
|
||||
expect(artifact).toBeDefined()
|
||||
expect(artifact.ok).toBe(true)
|
||||
expect(artifact.signedUploadUrl).toBe('http://localhost:8080/upload')
|
||||
expect(mockPost).toHaveBeenCalledTimes(2)
|
||||
})
|
||||
|
||||
it('should fail if the request fails 5 times', async () => {
|
||||
const mockPost = jest.fn(() => {
|
||||
const msgFailed = new http.IncomingMessage(new net.Socket())
|
||||
msgFailed.statusCode = 500
|
||||
msgFailed.statusMessage = 'Internal Server Error'
|
||||
return {
|
||||
message: msgFailed,
|
||||
readBody: async () => {
|
||||
return Promise.resolve(`{"ok": false}`)
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
const mockHttpClient = (
|
||||
HttpClient as unknown as jest.Mock
|
||||
).mockImplementation(() => {
|
||||
return {
|
||||
post: mockPost
|
||||
}
|
||||
})
|
||||
const client = internalArtifactTwirpClient(clientOptions)
|
||||
await expect(async () => {
|
||||
await client.CreateArtifact({
|
||||
workflowRunBackendId: '1234',
|
||||
workflowJobRunBackendId: '5678',
|
||||
name: 'artifact',
|
||||
version: 4
|
||||
})
|
||||
}).rejects.toThrowError(
|
||||
'Failed to make request after 5 attempts: Failed request: (500) Internal Server Error'
|
||||
)
|
||||
expect(mockHttpClient).toHaveBeenCalledTimes(1)
|
||||
expect(mockPost).toHaveBeenCalledTimes(5)
|
||||
})
|
||||
|
||||
it('should fail immediately if there is a non-retryable error', async () => {
|
||||
const mockPost = jest.fn(() => {
|
||||
const msgFailed = new http.IncomingMessage(new net.Socket())
|
||||
msgFailed.statusCode = 401
|
||||
msgFailed.statusMessage = 'Unauthorized'
|
||||
return {
|
||||
message: msgFailed,
|
||||
readBody: async () => {
|
||||
return Promise.resolve(`{"ok": false}`)
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
const mockHttpClient = (
|
||||
HttpClient as unknown as jest.Mock
|
||||
).mockImplementation(() => {
|
||||
return {
|
||||
post: mockPost
|
||||
}
|
||||
})
|
||||
const client = internalArtifactTwirpClient(clientOptions)
|
||||
await expect(async () => {
|
||||
await client.CreateArtifact({
|
||||
workflowRunBackendId: '1234',
|
||||
workflowJobRunBackendId: '5678',
|
||||
name: 'artifact',
|
||||
version: 4
|
||||
})
|
||||
}).rejects.toThrowError(
|
||||
'Received non-retryable error: Failed request: (401) Unauthorized'
|
||||
)
|
||||
expect(mockHttpClient).toHaveBeenCalledTimes(1)
|
||||
expect(mockPost).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
|
||||
it('should fail with a descriptive error', async () => {
|
||||
// 409 duplicate error
|
||||
const mockPost = jest.fn(() => {
|
||||
const msgFailed = new http.IncomingMessage(new net.Socket())
|
||||
msgFailed.statusCode = 409
|
||||
msgFailed.statusMessage = 'Conflict'
|
||||
return {
|
||||
message: msgFailed,
|
||||
readBody: async () => {
|
||||
return Promise.resolve(
|
||||
`{"msg": "an artifact with this name already exists on the workflow run"}`
|
||||
)
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
const mockHttpClient = (
|
||||
HttpClient as unknown as jest.Mock
|
||||
).mockImplementation(() => {
|
||||
return {
|
||||
post: mockPost
|
||||
}
|
||||
})
|
||||
const client = internalArtifactTwirpClient(clientOptions)
|
||||
await expect(async () => {
|
||||
await client.CreateArtifact({
|
||||
workflowRunBackendId: '1234',
|
||||
workflowJobRunBackendId: '5678',
|
||||
name: 'artifact',
|
||||
version: 4
|
||||
})
|
||||
await client.CreateArtifact({
|
||||
workflowRunBackendId: '1234',
|
||||
workflowJobRunBackendId: '5678',
|
||||
name: 'artifact',
|
||||
version: 4
|
||||
})
|
||||
}).rejects.toThrowError(
|
||||
'Failed to CreateArtifact: Received non-retryable error: Failed request: (409) Conflict: an artifact with this name already exists on the workflow run'
|
||||
)
|
||||
expect(mockHttpClient).toHaveBeenCalledTimes(1)
|
||||
expect(mockPost).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
|
||||
it('should properly describe a network failure', async () => {
|
||||
class FakeNodeError extends Error {
|
||||
code: string
|
||||
constructor(code: string) {
|
||||
super()
|
||||
this.code = code
|
||||
}
|
||||
}
|
||||
|
||||
const mockPost = jest.fn(() => {
|
||||
throw new FakeNodeError('ENOTFOUND')
|
||||
})
|
||||
|
||||
const mockHttpClient = (
|
||||
HttpClient as unknown as jest.Mock
|
||||
).mockImplementation(() => {
|
||||
return {
|
||||
post: mockPost
|
||||
}
|
||||
})
|
||||
const client = internalArtifactTwirpClient()
|
||||
await expect(async () => {
|
||||
await client.CreateArtifact({
|
||||
workflowRunBackendId: '1234',
|
||||
workflowJobRunBackendId: '5678',
|
||||
name: 'artifact',
|
||||
version: 4
|
||||
})
|
||||
}).rejects.toThrowError(new NetworkError('ENOTFOUND').message)
|
||||
expect(mockHttpClient).toHaveBeenCalledTimes(1)
|
||||
expect(mockPost).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
|
||||
it('should properly describe a usage error', async () => {
|
||||
const mockPost = jest.fn(() => {
|
||||
const msgFailed = new http.IncomingMessage(new net.Socket())
|
||||
msgFailed.statusCode = 403
|
||||
msgFailed.statusMessage = 'Forbidden'
|
||||
return {
|
||||
message: msgFailed,
|
||||
readBody: async () => {
|
||||
return Promise.resolve(
|
||||
`{"msg": "insufficient usage to create artifact"}`
|
||||
)
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
const mockHttpClient = (
|
||||
HttpClient as unknown as jest.Mock
|
||||
).mockImplementation(() => {
|
||||
return {
|
||||
post: mockPost
|
||||
}
|
||||
})
|
||||
const client = internalArtifactTwirpClient()
|
||||
await expect(async () => {
|
||||
await client.CreateArtifact({
|
||||
workflowRunBackendId: '1234',
|
||||
workflowJobRunBackendId: '5678',
|
||||
name: 'artifact',
|
||||
version: 4
|
||||
})
|
||||
}).rejects.toThrowError(new UsageError().message)
|
||||
expect(mockHttpClient).toHaveBeenCalledTimes(1)
|
||||
expect(mockPost).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
})
|
|
@ -1,5 +0,0 @@
|
|||
name: 'Set env variables'
|
||||
description: 'Sets certain env variables so that e2e artifact upload and download can be tested in a shell'
|
||||
runs:
|
||||
using: 'node12'
|
||||
main: 'index.js'
|
|
@ -1,14 +0,0 @@
|
|||
// Certain env variables are not set by default in a shell context and are only available in a node context from a running action
|
||||
// In order to be able to upload and download artifacts e2e in a shell when running CI tests, we need these env variables set
|
||||
const fs = require('fs');
|
||||
const os = require('os');
|
||||
const filePath = process.env[`GITHUB_ENV`]
|
||||
fs.appendFileSync(filePath, `ACTIONS_RUNTIME_URL=${process.env.ACTIONS_RUNTIME_URL}${os.EOL}`, {
|
||||
encoding: 'utf8'
|
||||
})
|
||||
fs.appendFileSync(filePath, `ACTIONS_RUNTIME_TOKEN=${process.env.ACTIONS_RUNTIME_TOKEN}${os.EOL}`, {
|
||||
encoding: 'utf8'
|
||||
})
|
||||
fs.appendFileSync(filePath, `GITHUB_RUN_ID=${process.env.GITHUB_RUN_ID}${os.EOL}`, {
|
||||
encoding: 'utf8'
|
||||
})
|
|
@ -0,0 +1,9 @@
|
|||
import * as core from '@actions/core'
|
||||
|
||||
// noopLogs mocks the console.log and core.* functions to prevent output in the console while testing
|
||||
export const noopLogs = (): void => {
|
||||
jest.spyOn(console, 'log').mockImplementation(() => {})
|
||||
jest.spyOn(core, 'debug').mockImplementation(() => {})
|
||||
jest.spyOn(core, 'info').mockImplementation(() => {})
|
||||
jest.spyOn(core, 'warning').mockImplementation(() => {})
|
||||
}
|
|
@ -0,0 +1,27 @@
|
|||
import * as config from '../src/internal/shared/config'
|
||||
|
||||
beforeEach(() => {
|
||||
jest.resetModules()
|
||||
})
|
||||
|
||||
describe('isGhes', () => {
|
||||
it('should return false when the request domain is github.com', () => {
|
||||
process.env.GITHUB_SERVER_URL = 'https://github.com'
|
||||
expect(config.isGhes()).toBe(false)
|
||||
})
|
||||
|
||||
it('should return false when the request domain ends with ghe.com', () => {
|
||||
process.env.GITHUB_SERVER_URL = 'https://my.domain.ghe.com'
|
||||
expect(config.isGhes()).toBe(false)
|
||||
})
|
||||
|
||||
it('should return false when the request domain ends with ghe.localhost', () => {
|
||||
process.env.GITHUB_SERVER_URL = 'https://my.domain.ghe.localhost'
|
||||
expect(config.isGhes()).toBe(false)
|
||||
})
|
||||
|
||||
it('should return false when the request domain is specific to an enterprise', () => {
|
||||
process.env.GITHUB_SERVER_URL = 'https://my-enterprise.github.com'
|
||||
expect(config.isGhes()).toBe(true)
|
||||
})
|
||||
})
|
|
@ -0,0 +1,192 @@
|
|||
import * as github from '@actions/github'
|
||||
import type {RestEndpointMethods} from '@octokit/plugin-rest-endpoint-methods/dist-types/generated/method-types'
|
||||
import type {RequestInterface} from '@octokit/types'
|
||||
import {
|
||||
deleteArtifactInternal,
|
||||
deleteArtifactPublic
|
||||
} from '../src/internal/delete/delete-artifact'
|
||||
import * as config from '../src/internal/shared/config'
|
||||
import {ArtifactServiceClientJSON, Timestamp} from '../src/generated'
|
||||
import * as util from '../src/internal/shared/util'
|
||||
import {noopLogs} from './common'
|
||||
|
||||
type MockedRequest = jest.MockedFunction<RequestInterface<object>>
|
||||
|
||||
type MockedDeleteArtifact = jest.MockedFunction<
|
||||
RestEndpointMethods['actions']['deleteArtifact']
|
||||
>
|
||||
|
||||
jest.mock('@actions/github', () => ({
|
||||
getOctokit: jest.fn().mockReturnValue({
|
||||
request: jest.fn(),
|
||||
rest: {
|
||||
actions: {
|
||||
deleteArtifact: jest.fn()
|
||||
}
|
||||
}
|
||||
})
|
||||
}))
|
||||
|
||||
const fixtures = {
|
||||
repo: 'toolkit',
|
||||
owner: 'actions',
|
||||
token: 'ghp_1234567890',
|
||||
runId: 123,
|
||||
backendIds: {
|
||||
workflowRunBackendId: 'c4d7c21f-ba3f-4ddc-a8c8-6f2f626f8422',
|
||||
workflowJobRunBackendId: '760803a1-f890-4d25-9a6e-a3fc01a0c7cf'
|
||||
},
|
||||
artifacts: [
|
||||
{
|
||||
id: 1,
|
||||
name: 'my-artifact',
|
||||
size: 456,
|
||||
createdAt: new Date('2023-12-01')
|
||||
},
|
||||
{
|
||||
id: 2,
|
||||
name: 'my-artifact',
|
||||
size: 456,
|
||||
createdAt: new Date('2023-12-02')
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
describe('delete-artifact', () => {
|
||||
beforeAll(() => {
|
||||
noopLogs()
|
||||
})
|
||||
|
||||
describe('public', () => {
|
||||
it('should delete an artifact', async () => {
|
||||
const mockRequest = github.getOctokit(fixtures.token)
|
||||
.request as MockedRequest
|
||||
mockRequest.mockResolvedValueOnce({
|
||||
status: 200,
|
||||
headers: {},
|
||||
url: '',
|
||||
data: {
|
||||
artifacts: [
|
||||
{
|
||||
name: fixtures.artifacts[0].name,
|
||||
id: fixtures.artifacts[0].id,
|
||||
size_in_bytes: fixtures.artifacts[0].size,
|
||||
created_at: fixtures.artifacts[0].createdAt.toISOString()
|
||||
}
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
const mockDeleteArtifact = github.getOctokit(fixtures.token).rest.actions
|
||||
.deleteArtifact as MockedDeleteArtifact
|
||||
mockDeleteArtifact.mockResolvedValueOnce({
|
||||
status: 204,
|
||||
headers: {},
|
||||
url: '',
|
||||
data: null as never
|
||||
})
|
||||
|
||||
const response = await deleteArtifactPublic(
|
||||
fixtures.artifacts[0].name,
|
||||
fixtures.runId,
|
||||
fixtures.owner,
|
||||
fixtures.repo,
|
||||
fixtures.token
|
||||
)
|
||||
|
||||
expect(response).toEqual({
|
||||
id: fixtures.artifacts[0].id
|
||||
})
|
||||
})
|
||||
|
||||
it('should fail if non-200 response', async () => {
|
||||
const mockRequest = github.getOctokit(fixtures.token)
|
||||
.request as MockedRequest
|
||||
mockRequest.mockResolvedValueOnce({
|
||||
status: 200,
|
||||
headers: {},
|
||||
url: '',
|
||||
data: {
|
||||
artifacts: [
|
||||
{
|
||||
name: fixtures.artifacts[0].name,
|
||||
id: fixtures.artifacts[0].id,
|
||||
size_in_bytes: fixtures.artifacts[0].size,
|
||||
created_at: fixtures.artifacts[0].createdAt.toISOString()
|
||||
}
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
const mockDeleteArtifact = github.getOctokit(fixtures.token).rest.actions
|
||||
.deleteArtifact as MockedDeleteArtifact
|
||||
mockDeleteArtifact.mockRejectedValue(new Error('boom'))
|
||||
|
||||
await expect(
|
||||
deleteArtifactPublic(
|
||||
fixtures.artifacts[0].name,
|
||||
fixtures.runId,
|
||||
fixtures.owner,
|
||||
fixtures.repo,
|
||||
fixtures.token
|
||||
)
|
||||
).rejects.toThrow('boom')
|
||||
})
|
||||
})
|
||||
|
||||
describe('internal', () => {
|
||||
beforeEach(() => {
|
||||
jest.spyOn(config, 'getRuntimeToken').mockReturnValue('test-token')
|
||||
jest
|
||||
.spyOn(util, 'getBackendIdsFromToken')
|
||||
.mockReturnValue(fixtures.backendIds)
|
||||
jest
|
||||
.spyOn(config, 'getResultsServiceUrl')
|
||||
.mockReturnValue('https://results.local')
|
||||
})
|
||||
|
||||
it('should delete an artifact', async () => {
|
||||
jest
|
||||
.spyOn(ArtifactServiceClientJSON.prototype, 'ListArtifacts')
|
||||
.mockResolvedValue({
|
||||
artifacts: fixtures.artifacts.map(artifact => ({
|
||||
...fixtures.backendIds,
|
||||
databaseId: artifact.id.toString(),
|
||||
name: artifact.name,
|
||||
size: artifact.size.toString(),
|
||||
createdAt: Timestamp.fromDate(artifact.createdAt)
|
||||
}))
|
||||
})
|
||||
jest
|
||||
.spyOn(ArtifactServiceClientJSON.prototype, 'DeleteArtifact')
|
||||
.mockResolvedValue({
|
||||
ok: true,
|
||||
artifactId: fixtures.artifacts[0].id.toString()
|
||||
})
|
||||
const response = await deleteArtifactInternal(fixtures.artifacts[0].name)
|
||||
expect(response).toEqual({
|
||||
id: fixtures.artifacts[0].id
|
||||
})
|
||||
})
|
||||
|
||||
it('should fail if non-200 response', async () => {
|
||||
jest
|
||||
.spyOn(ArtifactServiceClientJSON.prototype, 'ListArtifacts')
|
||||
.mockResolvedValue({
|
||||
artifacts: fixtures.artifacts.map(artifact => ({
|
||||
...fixtures.backendIds,
|
||||
databaseId: artifact.id.toString(),
|
||||
name: artifact.name,
|
||||
size: artifact.size.toString(),
|
||||
createdAt: Timestamp.fromDate(artifact.createdAt)
|
||||
}))
|
||||
})
|
||||
jest
|
||||
.spyOn(ArtifactServiceClientJSON.prototype, 'DeleteArtifact')
|
||||
.mockRejectedValue(new Error('boom'))
|
||||
await expect(
|
||||
deleteArtifactInternal(fixtures.artifacts[0].id)
|
||||
).rejects.toThrow('boom')
|
||||
})
|
||||
})
|
||||
})
|
|
@ -0,0 +1,614 @@
|
|||
import fs from 'fs'
|
||||
import * as http from 'http'
|
||||
import * as net from 'net'
|
||||
import * as path from 'path'
|
||||
import * as github from '@actions/github'
|
||||
import {HttpClient} from '@actions/http-client'
|
||||
import type {RestEndpointMethods} from '@octokit/plugin-rest-endpoint-methods/dist-types/generated/method-types'
|
||||
import archiver from 'archiver'
|
||||
|
||||
import {
|
||||
downloadArtifactInternal,
|
||||
downloadArtifactPublic,
|
||||
streamExtractExternal
|
||||
} from '../src/internal/download/download-artifact'
|
||||
import {getUserAgentString} from '../src/internal/shared/user-agent'
|
||||
import {noopLogs} from './common'
|
||||
import * as config from '../src/internal/shared/config'
|
||||
import {ArtifactServiceClientJSON} from '../src/generated'
|
||||
import * as util from '../src/internal/shared/util'
|
||||
|
||||
type MockedDownloadArtifact = jest.MockedFunction<
|
||||
RestEndpointMethods['actions']['downloadArtifact']
|
||||
>
|
||||
|
||||
const testDir = path.join(__dirname, '_temp', 'download-artifact')
|
||||
const fixtures = {
|
||||
workspaceDir: path.join(testDir, 'workspace'),
|
||||
exampleArtifact: {
|
||||
path: path.join(testDir, 'artifact.zip'),
|
||||
files: [
|
||||
{
|
||||
path: 'hello.txt',
|
||||
content: 'Hello World!'
|
||||
},
|
||||
{
|
||||
path: 'goodbye.txt',
|
||||
content: 'Goodbye World!'
|
||||
}
|
||||
]
|
||||
},
|
||||
artifactID: 1234,
|
||||
artifactName: 'my-artifact',
|
||||
artifactSize: 123456,
|
||||
repositoryOwner: 'actions',
|
||||
repositoryName: 'toolkit',
|
||||
token: 'ghp_1234567890',
|
||||
blobStorageUrl: 'https://blob-storage.local?signed=true',
|
||||
backendIds: {
|
||||
workflowRunBackendId: 'c4d7c21f-ba3f-4ddc-a8c8-6f2f626f8422',
|
||||
workflowJobRunBackendId: '760803a1-f890-4d25-9a6e-a3fc01a0c7cf'
|
||||
}
|
||||
}
|
||||
|
||||
jest.mock('@actions/github', () => ({
|
||||
getOctokit: jest.fn().mockReturnValue({
|
||||
rest: {
|
||||
actions: {
|
||||
downloadArtifact: jest.fn()
|
||||
}
|
||||
}
|
||||
})
|
||||
}))
|
||||
|
||||
jest.mock('@actions/http-client')
|
||||
|
||||
// Create a zip archive with the contents of the example artifact
|
||||
const createTestArchive = async (): Promise<void> => {
|
||||
const archive = archiver('zip', {
|
||||
zlib: {level: 9}
|
||||
})
|
||||
for (const file of fixtures.exampleArtifact.files) {
|
||||
archive.append(file.content, {name: file.path})
|
||||
}
|
||||
archive.finalize()
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
archive.pipe(fs.createWriteStream(fixtures.exampleArtifact.path))
|
||||
archive.on('error', reject)
|
||||
archive.on('finish', resolve)
|
||||
})
|
||||
}
|
||||
|
||||
const expectExtractedArchive = async (dir: string): Promise<void> => {
|
||||
for (const file of fixtures.exampleArtifact.files) {
|
||||
const filePath = path.join(dir, file.path)
|
||||
expect(fs.readFileSync(filePath, 'utf8')).toEqual(file.content)
|
||||
}
|
||||
}
|
||||
|
||||
const setup = async (): Promise<void> => {
|
||||
noopLogs()
|
||||
await fs.promises.mkdir(testDir, {recursive: true})
|
||||
await createTestArchive()
|
||||
|
||||
process.env['GITHUB_WORKSPACE'] = fixtures.workspaceDir
|
||||
}
|
||||
|
||||
const cleanup = async (): Promise<void> => {
|
||||
jest.restoreAllMocks()
|
||||
await fs.promises.rm(testDir, {recursive: true})
|
||||
delete process.env['GITHUB_WORKSPACE']
|
||||
}
|
||||
|
||||
const mockGetArtifactSuccess = jest.fn(() => {
|
||||
const message = new http.IncomingMessage(new net.Socket())
|
||||
message.statusCode = 200
|
||||
message.push(fs.readFileSync(fixtures.exampleArtifact.path))
|
||||
message.push(null)
|
||||
return {
|
||||
message
|
||||
}
|
||||
})
|
||||
|
||||
const mockGetArtifactFailure = jest.fn(() => {
|
||||
const message = new http.IncomingMessage(new net.Socket())
|
||||
message.statusCode = 500
|
||||
message.push('Internal Server Error')
|
||||
message.push(null)
|
||||
return {
|
||||
message
|
||||
}
|
||||
})
|
||||
|
||||
const mockGetArtifactMalicious = jest.fn(() => {
|
||||
const message = new http.IncomingMessage(new net.Socket())
|
||||
message.statusCode = 200
|
||||
message.push(fs.readFileSync(path.join(__dirname, 'fixtures', 'evil.zip'))) // evil.zip contains files that are formatted x/../../etc/hosts
|
||||
message.push(null)
|
||||
return {
|
||||
message
|
||||
}
|
||||
})
|
||||
|
||||
describe('download-artifact', () => {
|
||||
describe('public', () => {
|
||||
beforeEach(setup)
|
||||
afterEach(cleanup)
|
||||
|
||||
it('should successfully download an artifact to $GITHUB_WORKSPACE', async () => {
|
||||
const downloadArtifactMock = github.getOctokit(fixtures.token).rest
|
||||
.actions.downloadArtifact as MockedDownloadArtifact
|
||||
downloadArtifactMock.mockResolvedValueOnce({
|
||||
headers: {
|
||||
location: fixtures.blobStorageUrl
|
||||
},
|
||||
status: 302,
|
||||
url: '',
|
||||
data: Buffer.from('')
|
||||
})
|
||||
|
||||
const mockHttpClient = (HttpClient as jest.Mock).mockImplementation(
|
||||
() => {
|
||||
return {
|
||||
get: mockGetArtifactSuccess
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
const response = await downloadArtifactPublic(
|
||||
fixtures.artifactID,
|
||||
fixtures.repositoryOwner,
|
||||
fixtures.repositoryName,
|
||||
fixtures.token
|
||||
)
|
||||
|
||||
expect(downloadArtifactMock).toHaveBeenCalledWith({
|
||||
owner: fixtures.repositoryOwner,
|
||||
repo: fixtures.repositoryName,
|
||||
artifact_id: fixtures.artifactID,
|
||||
archive_format: 'zip',
|
||||
request: {
|
||||
redirect: 'manual'
|
||||
}
|
||||
})
|
||||
expect(mockHttpClient).toHaveBeenCalledWith(getUserAgentString())
|
||||
expect(mockGetArtifactSuccess).toHaveBeenCalledWith(
|
||||
fixtures.blobStorageUrl
|
||||
)
|
||||
expectExtractedArchive(fixtures.workspaceDir)
|
||||
expect(response.downloadPath).toBe(fixtures.workspaceDir)
|
||||
})
|
||||
|
||||
it('should not allow path traversal from malicious artifacts', async () => {
|
||||
const downloadArtifactMock = github.getOctokit(fixtures.token).rest
|
||||
.actions.downloadArtifact as MockedDownloadArtifact
|
||||
downloadArtifactMock.mockResolvedValueOnce({
|
||||
headers: {
|
||||
location: fixtures.blobStorageUrl
|
||||
},
|
||||
status: 302,
|
||||
url: '',
|
||||
data: Buffer.from('')
|
||||
})
|
||||
|
||||
const mockHttpClient = (HttpClient as jest.Mock).mockImplementation(
|
||||
() => {
|
||||
return {
|
||||
get: mockGetArtifactMalicious
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
await expect(
|
||||
downloadArtifactPublic(
|
||||
fixtures.artifactID,
|
||||
fixtures.repositoryOwner,
|
||||
fixtures.repositoryName,
|
||||
fixtures.token
|
||||
)
|
||||
).rejects.toBeInstanceOf(Error)
|
||||
|
||||
expect(downloadArtifactMock).toHaveBeenCalledWith({
|
||||
owner: fixtures.repositoryOwner,
|
||||
repo: fixtures.repositoryName,
|
||||
artifact_id: fixtures.artifactID,
|
||||
archive_format: 'zip',
|
||||
request: {
|
||||
redirect: 'manual'
|
||||
}
|
||||
})
|
||||
|
||||
expect(mockHttpClient).toHaveBeenCalledWith(getUserAgentString())
|
||||
expect(mockGetArtifactMalicious).toHaveBeenCalledWith(
|
||||
fixtures.blobStorageUrl
|
||||
)
|
||||
})
|
||||
|
||||
it('should successfully download an artifact to user defined path', async () => {
|
||||
const customPath = path.join(testDir, 'custom')
|
||||
|
||||
const downloadArtifactMock = github.getOctokit(fixtures.token).rest
|
||||
.actions.downloadArtifact as MockedDownloadArtifact
|
||||
downloadArtifactMock.mockResolvedValueOnce({
|
||||
headers: {
|
||||
location: fixtures.blobStorageUrl
|
||||
},
|
||||
status: 302,
|
||||
url: '',
|
||||
data: Buffer.from('')
|
||||
})
|
||||
|
||||
const mockHttpClient = (HttpClient as jest.Mock).mockImplementation(
|
||||
() => {
|
||||
return {
|
||||
get: mockGetArtifactSuccess
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
const response = await downloadArtifactPublic(
|
||||
fixtures.artifactID,
|
||||
fixtures.repositoryOwner,
|
||||
fixtures.repositoryName,
|
||||
fixtures.token,
|
||||
{
|
||||
path: customPath
|
||||
}
|
||||
)
|
||||
|
||||
expect(downloadArtifactMock).toHaveBeenCalledWith({
|
||||
owner: fixtures.repositoryOwner,
|
||||
repo: fixtures.repositoryName,
|
||||
artifact_id: fixtures.artifactID,
|
||||
archive_format: 'zip',
|
||||
request: {
|
||||
redirect: 'manual'
|
||||
}
|
||||
})
|
||||
expect(mockHttpClient).toHaveBeenCalledWith(getUserAgentString())
|
||||
expect(mockGetArtifactSuccess).toHaveBeenCalledWith(
|
||||
fixtures.blobStorageUrl
|
||||
)
|
||||
expectExtractedArchive(customPath)
|
||||
expect(response.downloadPath).toBe(customPath)
|
||||
})
|
||||
|
||||
it('should fail if download artifact API does not respond with location', async () => {
|
||||
const downloadArtifactMock = github.getOctokit(fixtures.token).rest
|
||||
.actions.downloadArtifact as MockedDownloadArtifact
|
||||
downloadArtifactMock.mockResolvedValueOnce({
|
||||
headers: {},
|
||||
status: 302,
|
||||
url: '',
|
||||
data: Buffer.from('')
|
||||
})
|
||||
|
||||
await expect(
|
||||
downloadArtifactPublic(
|
||||
fixtures.artifactID,
|
||||
fixtures.repositoryOwner,
|
||||
fixtures.repositoryName,
|
||||
fixtures.token
|
||||
)
|
||||
).rejects.toBeInstanceOf(Error)
|
||||
|
||||
expect(downloadArtifactMock).toHaveBeenCalledWith({
|
||||
owner: fixtures.repositoryOwner,
|
||||
repo: fixtures.repositoryName,
|
||||
artifact_id: fixtures.artifactID,
|
||||
archive_format: 'zip',
|
||||
request: {
|
||||
redirect: 'manual'
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
it('should fail if blob storage storage chunk does not respond within 30s', async () => {
|
||||
// mock http client to delay response data by 30s
|
||||
const msg = new http.IncomingMessage(new net.Socket())
|
||||
msg.statusCode = 200
|
||||
|
||||
const mockGet = jest.fn(async () => {
|
||||
return new Promise((resolve, reject) => {
|
||||
// Resolve with a 200 status code immediately
|
||||
resolve({
|
||||
message: msg,
|
||||
readBody: async () => {
|
||||
return Promise.resolve(`{"ok": true}`)
|
||||
}
|
||||
})
|
||||
|
||||
// Reject with an error after 31 seconds
|
||||
setTimeout(() => {
|
||||
reject(new Error('Request timeout'))
|
||||
}, 31000) // Timeout after 31 seconds
|
||||
})
|
||||
})
|
||||
|
||||
const mockHttpClient = (HttpClient as jest.Mock).mockImplementation(
|
||||
() => {
|
||||
return {
|
||||
get: mockGet
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
await expect(
|
||||
streamExtractExternal(fixtures.blobStorageUrl, fixtures.workspaceDir)
|
||||
).rejects.toBeInstanceOf(Error)
|
||||
|
||||
expect(mockHttpClient).toHaveBeenCalledWith(getUserAgentString())
|
||||
}, 35000) // add longer timeout to allow for timer to run out
|
||||
|
||||
it('should fail if blob storage response is non-200 after 5 retries', async () => {
|
||||
const downloadArtifactMock = github.getOctokit(fixtures.token).rest
|
||||
.actions.downloadArtifact as MockedDownloadArtifact
|
||||
downloadArtifactMock.mockResolvedValueOnce({
|
||||
headers: {
|
||||
location: fixtures.blobStorageUrl
|
||||
},
|
||||
status: 302,
|
||||
url: '',
|
||||
data: Buffer.from('')
|
||||
})
|
||||
|
||||
const mockHttpClient = (HttpClient as jest.Mock).mockImplementation(
|
||||
() => {
|
||||
return {
|
||||
get: mockGetArtifactFailure
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
await expect(
|
||||
downloadArtifactPublic(
|
||||
fixtures.artifactID,
|
||||
fixtures.repositoryOwner,
|
||||
fixtures.repositoryName,
|
||||
fixtures.token
|
||||
)
|
||||
).rejects.toBeInstanceOf(Error)
|
||||
|
||||
expect(downloadArtifactMock).toHaveBeenCalledWith({
|
||||
owner: fixtures.repositoryOwner,
|
||||
repo: fixtures.repositoryName,
|
||||
artifact_id: fixtures.artifactID,
|
||||
archive_format: 'zip',
|
||||
request: {
|
||||
redirect: 'manual'
|
||||
}
|
||||
})
|
||||
expect(mockHttpClient).toHaveBeenCalledWith(getUserAgentString())
|
||||
expect(mockGetArtifactFailure).toHaveBeenCalledWith(
|
||||
fixtures.blobStorageUrl
|
||||
)
|
||||
expect(mockGetArtifactFailure).toHaveBeenCalledTimes(5)
|
||||
}, 38000)
|
||||
|
||||
it('should retry if blob storage response is non-200 and then succeed with a 200', async () => {
|
||||
const downloadArtifactMock = github.getOctokit(fixtures.token).rest
|
||||
.actions.downloadArtifact as MockedDownloadArtifact
|
||||
downloadArtifactMock.mockResolvedValueOnce({
|
||||
headers: {
|
||||
location: fixtures.blobStorageUrl
|
||||
},
|
||||
status: 302,
|
||||
url: '',
|
||||
data: Buffer.from('')
|
||||
})
|
||||
|
||||
const mockGetArtifact = jest
|
||||
.fn(mockGetArtifactSuccess)
|
||||
.mockImplementationOnce(mockGetArtifactFailure)
|
||||
|
||||
const mockHttpClient = (HttpClient as jest.Mock).mockImplementation(
|
||||
() => {
|
||||
return {
|
||||
get: mockGetArtifact
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
const response = await downloadArtifactPublic(
|
||||
fixtures.artifactID,
|
||||
fixtures.repositoryOwner,
|
||||
fixtures.repositoryName,
|
||||
fixtures.token
|
||||
)
|
||||
|
||||
expect(downloadArtifactMock).toHaveBeenCalledWith({
|
||||
owner: fixtures.repositoryOwner,
|
||||
repo: fixtures.repositoryName,
|
||||
artifact_id: fixtures.artifactID,
|
||||
archive_format: 'zip',
|
||||
request: {
|
||||
redirect: 'manual'
|
||||
}
|
||||
})
|
||||
expect(mockHttpClient).toHaveBeenCalledWith(getUserAgentString())
|
||||
expect(mockGetArtifactFailure).toHaveBeenCalledWith(
|
||||
fixtures.blobStorageUrl
|
||||
)
|
||||
expect(mockGetArtifactFailure).toHaveBeenCalledTimes(1)
|
||||
expect(mockGetArtifactSuccess).toHaveBeenCalledWith(
|
||||
fixtures.blobStorageUrl
|
||||
)
|
||||
expect(mockGetArtifactSuccess).toHaveBeenCalledTimes(1)
|
||||
expect(response.downloadPath).toBe(fixtures.workspaceDir)
|
||||
}, 28000)
|
||||
})
|
||||
|
||||
describe('internal', () => {
|
||||
beforeEach(async () => {
|
||||
await setup()
|
||||
|
||||
jest.spyOn(config, 'getRuntimeToken').mockReturnValue('test-token')
|
||||
|
||||
jest
|
||||
.spyOn(util, 'getBackendIdsFromToken')
|
||||
.mockReturnValue(fixtures.backendIds)
|
||||
|
||||
jest
|
||||
.spyOn(config, 'getResultsServiceUrl')
|
||||
.mockReturnValue('https://results.local')
|
||||
})
|
||||
afterEach(async () => {
|
||||
await cleanup()
|
||||
})
|
||||
|
||||
it('should successfully download an artifact to $GITHUB_WORKSPACE', async () => {
|
||||
const mockListArtifacts = jest
|
||||
.spyOn(ArtifactServiceClientJSON.prototype, 'ListArtifacts')
|
||||
.mockResolvedValue({
|
||||
artifacts: [
|
||||
{
|
||||
...fixtures.backendIds,
|
||||
databaseId: fixtures.artifactID.toString(),
|
||||
name: fixtures.artifactName,
|
||||
size: fixtures.artifactSize.toString()
|
||||
}
|
||||
]
|
||||
})
|
||||
|
||||
const mockGetSignedArtifactURL = jest
|
||||
.spyOn(ArtifactServiceClientJSON.prototype, 'GetSignedArtifactURL')
|
||||
.mockReturnValue(
|
||||
Promise.resolve({
|
||||
signedUrl: fixtures.blobStorageUrl
|
||||
})
|
||||
)
|
||||
|
||||
const mockHttpClient = (HttpClient as jest.Mock).mockImplementation(
|
||||
() => {
|
||||
return {
|
||||
get: mockGetArtifactSuccess
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
const response = await downloadArtifactInternal(fixtures.artifactID)
|
||||
|
||||
expectExtractedArchive(fixtures.workspaceDir)
|
||||
expect(response.downloadPath).toBe(fixtures.workspaceDir)
|
||||
expect(mockHttpClient).toHaveBeenCalledWith(getUserAgentString())
|
||||
expect(mockListArtifacts).toHaveBeenCalledWith({
|
||||
idFilter: {
|
||||
value: fixtures.artifactID.toString()
|
||||
},
|
||||
...fixtures.backendIds
|
||||
})
|
||||
expect(mockGetSignedArtifactURL).toHaveBeenCalledWith({
|
||||
...fixtures.backendIds,
|
||||
name: fixtures.artifactName
|
||||
})
|
||||
})
|
||||
|
||||
it('should successfully download an artifact to user defined path', async () => {
|
||||
const customPath = path.join(testDir, 'custom')
|
||||
|
||||
const mockListArtifacts = jest
|
||||
.spyOn(ArtifactServiceClientJSON.prototype, 'ListArtifacts')
|
||||
.mockResolvedValue({
|
||||
artifacts: [
|
||||
{
|
||||
...fixtures.backendIds,
|
||||
databaseId: fixtures.artifactID.toString(),
|
||||
name: fixtures.artifactName,
|
||||
size: fixtures.artifactSize.toString()
|
||||
}
|
||||
]
|
||||
})
|
||||
|
||||
const mockGetSignedArtifactURL = jest
|
||||
.spyOn(ArtifactServiceClientJSON.prototype, 'GetSignedArtifactURL')
|
||||
.mockReturnValue(
|
||||
Promise.resolve({
|
||||
signedUrl: fixtures.blobStorageUrl
|
||||
})
|
||||
)
|
||||
|
||||
const mockHttpClient = (HttpClient as jest.Mock).mockImplementation(
|
||||
() => {
|
||||
return {
|
||||
get: mockGetArtifactSuccess
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
const response = await downloadArtifactInternal(fixtures.artifactID, {
|
||||
path: customPath
|
||||
})
|
||||
|
||||
expectExtractedArchive(customPath)
|
||||
expect(response.downloadPath).toBe(customPath)
|
||||
expect(mockHttpClient).toHaveBeenCalledWith(getUserAgentString())
|
||||
expect(mockListArtifacts).toHaveBeenCalledWith({
|
||||
idFilter: {
|
||||
value: fixtures.artifactID.toString()
|
||||
},
|
||||
...fixtures.backendIds
|
||||
})
|
||||
expect(mockGetSignedArtifactURL).toHaveBeenCalledWith({
|
||||
...fixtures.backendIds,
|
||||
name: fixtures.artifactName
|
||||
})
|
||||
})
|
||||
|
||||
it('should fail if download artifact API does not respond with location', async () => {
|
||||
jest
|
||||
.spyOn(ArtifactServiceClientJSON.prototype, 'ListArtifacts')
|
||||
.mockRejectedValue(new Error('boom'))
|
||||
|
||||
await expect(
|
||||
downloadArtifactInternal(fixtures.artifactID)
|
||||
).rejects.toBeInstanceOf(Error)
|
||||
})
|
||||
|
||||
it('should fail if blob storage response is non-200', async () => {
|
||||
const mockListArtifacts = jest
|
||||
.spyOn(ArtifactServiceClientJSON.prototype, 'ListArtifacts')
|
||||
.mockResolvedValue({
|
||||
artifacts: [
|
||||
{
|
||||
...fixtures.backendIds,
|
||||
databaseId: fixtures.artifactID.toString(),
|
||||
name: fixtures.artifactName,
|
||||
size: fixtures.artifactSize.toString()
|
||||
}
|
||||
]
|
||||
})
|
||||
|
||||
const mockGetSignedArtifactURL = jest
|
||||
.spyOn(ArtifactServiceClientJSON.prototype, 'GetSignedArtifactURL')
|
||||
.mockReturnValue(
|
||||
Promise.resolve({
|
||||
signedUrl: fixtures.blobStorageUrl
|
||||
})
|
||||
)
|
||||
|
||||
const mockHttpClient = (HttpClient as jest.Mock).mockImplementation(
|
||||
() => {
|
||||
return {
|
||||
get: mockGetArtifactFailure
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
await expect(
|
||||
downloadArtifactInternal(fixtures.artifactID)
|
||||
).rejects.toBeInstanceOf(Error)
|
||||
expect(mockHttpClient).toHaveBeenCalledWith(getUserAgentString())
|
||||
expect(mockListArtifacts).toHaveBeenCalledWith({
|
||||
idFilter: {
|
||||
value: fixtures.artifactID.toString()
|
||||
},
|
||||
...fixtures.backendIds
|
||||
})
|
||||
expect(mockGetSignedArtifactURL).toHaveBeenCalledWith({
|
||||
...fixtures.backendIds,
|
||||
name: fixtures.artifactName
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
|
@ -1,552 +0,0 @@
|
|||
import * as path from 'path'
|
||||
import * as core from '@actions/core'
|
||||
import {URL} from 'url'
|
||||
import {getDownloadSpecification} from '../src/internal/download-specification'
|
||||
import {ContainerEntry} from '../src/internal/contracts'
|
||||
|
||||
const artifact1Name = 'my-artifact'
|
||||
const artifact2Name = 'my-artifact-extra'
|
||||
|
||||
// Populating with only the information that is necessary
|
||||
function getPartialContainerEntry(): ContainerEntry {
|
||||
return {
|
||||
containerId: 10,
|
||||
scopeIdentifier: '00000000-0000-0000-0000-000000000000',
|
||||
path: 'ADD_INFORMATION',
|
||||
itemType: 'ADD_INFORMATION',
|
||||
status: 'created',
|
||||
dateCreated: '2020-02-06T22:13:35.373Z',
|
||||
dateLastModified: '2020-02-06T22:13:35.453Z',
|
||||
createdBy: '82f0bf89-6e55-4e5a-b8b6-f75eb992578c',
|
||||
lastModifiedBy: '82f0bf89-6e55-4e5a-b8b6-f75eb992578c',
|
||||
itemLocation: 'ADD_INFORMATION',
|
||||
contentLocation: 'ADD_INFORMATION',
|
||||
contentId: '',
|
||||
fileLength: 100
|
||||
}
|
||||
}
|
||||
|
||||
function createFileEntry(entryPath: string): ContainerEntry {
|
||||
const newFileEntry = getPartialContainerEntry()
|
||||
newFileEntry.path = entryPath
|
||||
newFileEntry.itemType = 'file'
|
||||
newFileEntry.itemLocation = createItemLocation(entryPath)
|
||||
newFileEntry.contentLocation = createContentLocation(entryPath)
|
||||
return newFileEntry
|
||||
}
|
||||
|
||||
function createDirectoryEntry(directoryPath: string): ContainerEntry {
|
||||
const newDirectoryEntry = getPartialContainerEntry()
|
||||
newDirectoryEntry.path = directoryPath
|
||||
newDirectoryEntry.itemType = 'folder'
|
||||
newDirectoryEntry.itemLocation = createItemLocation(directoryPath)
|
||||
newDirectoryEntry.contentLocation = createContentLocation(directoryPath)
|
||||
return newDirectoryEntry
|
||||
}
|
||||
|
||||
function createItemLocation(relativePath: string): string {
|
||||
const itemLocation = new URL(
|
||||
'https://testing/_apis/resources/Containers/10000'
|
||||
)
|
||||
itemLocation.searchParams.append('itemPath', relativePath)
|
||||
itemLocation.searchParams.append('metadata', 'true')
|
||||
return itemLocation.toString()
|
||||
}
|
||||
|
||||
function createContentLocation(relativePath: string): string {
|
||||
const itemLocation = new URL(
|
||||
'https://testing/_apis/resources/Containers/10000'
|
||||
)
|
||||
itemLocation.searchParams.append('itemPath', relativePath)
|
||||
return itemLocation.toString()
|
||||
}
|
||||
|
||||
/*
|
||||
Represents a set of container entries for two artifacts with the following directory structure
|
||||
|
||||
/my-artifact
|
||||
/file1.txt
|
||||
/file2.txt
|
||||
/dir1
|
||||
/file3.txt
|
||||
/dir2
|
||||
/dir3
|
||||
/dir4
|
||||
file4.txt
|
||||
file5.txt (no length property)
|
||||
file6.txt (empty file)
|
||||
/my-artifact-extra
|
||||
/file1.txt
|
||||
*/
|
||||
|
||||
// main artifact
|
||||
const file1Path = path.join(artifact1Name, 'file1.txt')
|
||||
const file2Path = path.join(artifact1Name, 'file2.txt')
|
||||
const dir1Path = path.join(artifact1Name, 'dir1')
|
||||
const file3Path = path.join(dir1Path, 'file3.txt')
|
||||
const dir2Path = path.join(dir1Path, 'dir2')
|
||||
const dir3Path = path.join(dir2Path, 'dir3')
|
||||
const dir4Path = path.join(dir3Path, 'dir4')
|
||||
const file4Path = path.join(dir4Path, 'file4.txt')
|
||||
const file5Path = path.join(dir4Path, 'file5.txt')
|
||||
const file6Path = path.join(dir4Path, 'file6.txt')
|
||||
|
||||
const rootDirectoryEntry = createDirectoryEntry(artifact1Name)
|
||||
const directoryEntry1 = createDirectoryEntry(dir1Path)
|
||||
const directoryEntry2 = createDirectoryEntry(dir2Path)
|
||||
const directoryEntry3 = createDirectoryEntry(dir3Path)
|
||||
const directoryEntry4 = createDirectoryEntry(dir4Path)
|
||||
const fileEntry1 = createFileEntry(file1Path)
|
||||
const fileEntry2 = createFileEntry(file2Path)
|
||||
const fileEntry3 = createFileEntry(file3Path)
|
||||
const fileEntry4 = createFileEntry(file4Path)
|
||||
|
||||
const missingLengthFileEntry = createFileEntry(file5Path)
|
||||
missingLengthFileEntry.fileLength = undefined // one file does not have a fileLength
|
||||
const emptyLengthFileEntry = createFileEntry(file6Path)
|
||||
emptyLengthFileEntry.fileLength = 0 // empty file path
|
||||
|
||||
// extra artifact
|
||||
const artifact2File1Path = path.join(artifact2Name, 'file1.txt')
|
||||
const rootDirectoryEntry2 = createDirectoryEntry(artifact2Name)
|
||||
const extraFileEntry = createFileEntry(artifact2File1Path)
|
||||
|
||||
const artifactContainerEntries: ContainerEntry[] = [
|
||||
rootDirectoryEntry,
|
||||
fileEntry1,
|
||||
fileEntry2,
|
||||
directoryEntry1,
|
||||
fileEntry3,
|
||||
directoryEntry2,
|
||||
directoryEntry3,
|
||||
directoryEntry4,
|
||||
fileEntry4,
|
||||
missingLengthFileEntry,
|
||||
emptyLengthFileEntry,
|
||||
rootDirectoryEntry2,
|
||||
extraFileEntry
|
||||
]
|
||||
|
||||
describe('Search', () => {
|
||||
beforeAll(async () => {
|
||||
// mock all output so that there is less noise when running tests
|
||||
jest.spyOn(console, 'log').mockImplementation(() => {})
|
||||
jest.spyOn(core, 'debug').mockImplementation(() => {})
|
||||
jest.spyOn(core, 'info').mockImplementation(() => {})
|
||||
jest.spyOn(core, 'warning').mockImplementation(() => {})
|
||||
})
|
||||
|
||||
it('Download Specification - Absolute Path with no root directory', () => {
|
||||
const testDownloadPath = path.join(
|
||||
__dirname,
|
||||
'some',
|
||||
'destination',
|
||||
'folder'
|
||||
)
|
||||
|
||||
const specification = getDownloadSpecification(
|
||||
artifact1Name,
|
||||
artifactContainerEntries,
|
||||
testDownloadPath,
|
||||
false
|
||||
)
|
||||
|
||||
expect(specification.rootDownloadLocation).toEqual(testDownloadPath)
|
||||
expect(specification.filesToDownload.length).toEqual(5)
|
||||
|
||||
const item1ExpectedTargetPath = path.join(testDownloadPath, 'file1.txt')
|
||||
const item2ExpectedTargetPath = path.join(testDownloadPath, 'file2.txt')
|
||||
const item3ExpectedTargetPath = path.join(
|
||||
testDownloadPath,
|
||||
'dir1',
|
||||
'file3.txt'
|
||||
)
|
||||
const item4ExpectedTargetPath = path.join(
|
||||
testDownloadPath,
|
||||
'dir1',
|
||||
'dir2',
|
||||
'dir3',
|
||||
'dir4',
|
||||
'file4.txt'
|
||||
)
|
||||
const item5ExpectedTargetPath = path.join(
|
||||
testDownloadPath,
|
||||
'dir1',
|
||||
'dir2',
|
||||
'dir3',
|
||||
'dir4',
|
||||
'file5.txt'
|
||||
)
|
||||
const item6ExpectedTargetPath = path.join(
|
||||
testDownloadPath,
|
||||
'dir1',
|
||||
'dir2',
|
||||
'dir3',
|
||||
'dir4',
|
||||
'file6.txt'
|
||||
)
|
||||
|
||||
const targetLocations = specification.filesToDownload.map(
|
||||
item => item.targetPath
|
||||
)
|
||||
expect(targetLocations).toContain(item1ExpectedTargetPath)
|
||||
expect(targetLocations).toContain(item2ExpectedTargetPath)
|
||||
expect(targetLocations).toContain(item3ExpectedTargetPath)
|
||||
expect(targetLocations).toContain(item4ExpectedTargetPath)
|
||||
expect(targetLocations).toContain(item5ExpectedTargetPath)
|
||||
|
||||
for (const downloadItem of specification.filesToDownload) {
|
||||
if (downloadItem.targetPath === item1ExpectedTargetPath) {
|
||||
expect(downloadItem.sourceLocation).toEqual(
|
||||
createContentLocation(file1Path)
|
||||
)
|
||||
} else if (downloadItem.targetPath === item2ExpectedTargetPath) {
|
||||
expect(downloadItem.sourceLocation).toEqual(
|
||||
createContentLocation(file2Path)
|
||||
)
|
||||
} else if (downloadItem.targetPath === item3ExpectedTargetPath) {
|
||||
expect(downloadItem.sourceLocation).toEqual(
|
||||
createContentLocation(file3Path)
|
||||
)
|
||||
} else if (downloadItem.targetPath === item4ExpectedTargetPath) {
|
||||
expect(downloadItem.sourceLocation).toEqual(
|
||||
createContentLocation(file4Path)
|
||||
)
|
||||
} else if (downloadItem.targetPath === item5ExpectedTargetPath) {
|
||||
expect(downloadItem.sourceLocation).toEqual(
|
||||
createContentLocation(file5Path)
|
||||
)
|
||||
} else {
|
||||
throw new Error('this should never be reached')
|
||||
}
|
||||
}
|
||||
|
||||
expect(specification.directoryStructure.length).toEqual(3)
|
||||
expect(specification.directoryStructure).toContain(testDownloadPath)
|
||||
expect(specification.directoryStructure).toContain(
|
||||
path.join(testDownloadPath, 'dir1')
|
||||
)
|
||||
expect(specification.directoryStructure).toContain(
|
||||
path.join(testDownloadPath, 'dir1', 'dir2', 'dir3', 'dir4')
|
||||
)
|
||||
|
||||
expect(specification.emptyFilesToCreate.length).toEqual(1)
|
||||
expect(specification.emptyFilesToCreate).toContain(item6ExpectedTargetPath)
|
||||
})
|
||||
|
||||
it('Download Specification - Relative Path with no root directory', () => {
|
||||
const testDownloadPath = path.join('some', 'destination', 'folder')
|
||||
|
||||
const specification = getDownloadSpecification(
|
||||
artifact1Name,
|
||||
artifactContainerEntries,
|
||||
testDownloadPath,
|
||||
false
|
||||
)
|
||||
|
||||
expect(specification.rootDownloadLocation).toEqual(testDownloadPath)
|
||||
expect(specification.filesToDownload.length).toEqual(5)
|
||||
|
||||
const item1ExpectedTargetPath = path.join(testDownloadPath, 'file1.txt')
|
||||
const item2ExpectedTargetPath = path.join(testDownloadPath, 'file2.txt')
|
||||
const item3ExpectedTargetPath = path.join(
|
||||
testDownloadPath,
|
||||
'dir1',
|
||||
'file3.txt'
|
||||
)
|
||||
const item4ExpectedTargetPath = path.join(
|
||||
testDownloadPath,
|
||||
'dir1',
|
||||
'dir2',
|
||||
'dir3',
|
||||
'dir4',
|
||||
'file4.txt'
|
||||
)
|
||||
const item5ExpectedTargetPath = path.join(
|
||||
testDownloadPath,
|
||||
'dir1',
|
||||
'dir2',
|
||||
'dir3',
|
||||
'dir4',
|
||||
'file5.txt'
|
||||
)
|
||||
const item6ExpectedTargetPath = path.join(
|
||||
testDownloadPath,
|
||||
'dir1',
|
||||
'dir2',
|
||||
'dir3',
|
||||
'dir4',
|
||||
'file6.txt'
|
||||
)
|
||||
|
||||
const targetLocations = specification.filesToDownload.map(
|
||||
item => item.targetPath
|
||||
)
|
||||
expect(targetLocations).toContain(item1ExpectedTargetPath)
|
||||
expect(targetLocations).toContain(item2ExpectedTargetPath)
|
||||
expect(targetLocations).toContain(item3ExpectedTargetPath)
|
||||
expect(targetLocations).toContain(item4ExpectedTargetPath)
|
||||
expect(targetLocations).toContain(item5ExpectedTargetPath)
|
||||
|
||||
for (const downloadItem of specification.filesToDownload) {
|
||||
if (downloadItem.targetPath === item1ExpectedTargetPath) {
|
||||
expect(downloadItem.sourceLocation).toEqual(
|
||||
createContentLocation(file1Path)
|
||||
)
|
||||
} else if (downloadItem.targetPath === item2ExpectedTargetPath) {
|
||||
expect(downloadItem.sourceLocation).toEqual(
|
||||
createContentLocation(file2Path)
|
||||
)
|
||||
} else if (downloadItem.targetPath === item3ExpectedTargetPath) {
|
||||
expect(downloadItem.sourceLocation).toEqual(
|
||||
createContentLocation(file3Path)
|
||||
)
|
||||
} else if (downloadItem.targetPath === item4ExpectedTargetPath) {
|
||||
expect(downloadItem.sourceLocation).toEqual(
|
||||
createContentLocation(file4Path)
|
||||
)
|
||||
} else if (downloadItem.targetPath === item5ExpectedTargetPath) {
|
||||
expect(downloadItem.sourceLocation).toEqual(
|
||||
createContentLocation(file5Path)
|
||||
)
|
||||
} else {
|
||||
throw new Error('this should never be reached')
|
||||
}
|
||||
}
|
||||
|
||||
expect(specification.directoryStructure.length).toEqual(3)
|
||||
expect(specification.directoryStructure).toContain(testDownloadPath)
|
||||
expect(specification.directoryStructure).toContain(
|
||||
path.join(testDownloadPath, 'dir1')
|
||||
)
|
||||
expect(specification.directoryStructure).toContain(
|
||||
path.join(testDownloadPath, 'dir1', 'dir2', 'dir3', 'dir4')
|
||||
)
|
||||
|
||||
expect(specification.emptyFilesToCreate.length).toEqual(1)
|
||||
expect(specification.emptyFilesToCreate).toContain(item6ExpectedTargetPath)
|
||||
})
|
||||
|
||||
it('Download Specification - Absolute Path with root directory', () => {
|
||||
const testDownloadPath = path.join(
|
||||
__dirname,
|
||||
'some',
|
||||
'destination',
|
||||
'folder'
|
||||
)
|
||||
|
||||
const specification = getDownloadSpecification(
|
||||
artifact1Name,
|
||||
artifactContainerEntries,
|
||||
testDownloadPath,
|
||||
true
|
||||
)
|
||||
|
||||
expect(specification.rootDownloadLocation).toEqual(
|
||||
path.join(testDownloadPath, artifact1Name)
|
||||
)
|
||||
expect(specification.filesToDownload.length).toEqual(5)
|
||||
|
||||
const item1ExpectedTargetPath = path.join(
|
||||
testDownloadPath,
|
||||
artifact1Name,
|
||||
'file1.txt'
|
||||
)
|
||||
const item2ExpectedTargetPath = path.join(
|
||||
testDownloadPath,
|
||||
artifact1Name,
|
||||
'file2.txt'
|
||||
)
|
||||
const item3ExpectedTargetPath = path.join(
|
||||
testDownloadPath,
|
||||
artifact1Name,
|
||||
'dir1',
|
||||
'file3.txt'
|
||||
)
|
||||
const item4ExpectedTargetPath = path.join(
|
||||
testDownloadPath,
|
||||
artifact1Name,
|
||||
'dir1',
|
||||
'dir2',
|
||||
'dir3',
|
||||
'dir4',
|
||||
'file4.txt'
|
||||
)
|
||||
const item5ExpectedTargetPath = path.join(
|
||||
testDownloadPath,
|
||||
artifact1Name,
|
||||
'dir1',
|
||||
'dir2',
|
||||
'dir3',
|
||||
'dir4',
|
||||
'file5.txt'
|
||||
)
|
||||
const item6ExpectedTargetPath = path.join(
|
||||
testDownloadPath,
|
||||
artifact1Name,
|
||||
'dir1',
|
||||
'dir2',
|
||||
'dir3',
|
||||
'dir4',
|
||||
'file6.txt'
|
||||
)
|
||||
|
||||
const targetLocations = specification.filesToDownload.map(
|
||||
item => item.targetPath
|
||||
)
|
||||
expect(targetLocations).toContain(item1ExpectedTargetPath)
|
||||
expect(targetLocations).toContain(item2ExpectedTargetPath)
|
||||
expect(targetLocations).toContain(item3ExpectedTargetPath)
|
||||
expect(targetLocations).toContain(item4ExpectedTargetPath)
|
||||
expect(targetLocations).toContain(item5ExpectedTargetPath)
|
||||
|
||||
for (const downloadItem of specification.filesToDownload) {
|
||||
if (downloadItem.targetPath === item1ExpectedTargetPath) {
|
||||
expect(downloadItem.sourceLocation).toEqual(
|
||||
createContentLocation(file1Path)
|
||||
)
|
||||
} else if (downloadItem.targetPath === item2ExpectedTargetPath) {
|
||||
expect(downloadItem.sourceLocation).toEqual(
|
||||
createContentLocation(file2Path)
|
||||
)
|
||||
} else if (downloadItem.targetPath === item3ExpectedTargetPath) {
|
||||
expect(downloadItem.sourceLocation).toEqual(
|
||||
createContentLocation(file3Path)
|
||||
)
|
||||
} else if (downloadItem.targetPath === item4ExpectedTargetPath) {
|
||||
expect(downloadItem.sourceLocation).toEqual(
|
||||
createContentLocation(file4Path)
|
||||
)
|
||||
} else if (downloadItem.targetPath === item5ExpectedTargetPath) {
|
||||
expect(downloadItem.sourceLocation).toEqual(
|
||||
createContentLocation(file5Path)
|
||||
)
|
||||
} else {
|
||||
throw new Error('this should never be reached')
|
||||
}
|
||||
}
|
||||
|
||||
expect(specification.directoryStructure.length).toEqual(3)
|
||||
expect(specification.directoryStructure).toContain(
|
||||
path.join(testDownloadPath, artifact1Name)
|
||||
)
|
||||
expect(specification.directoryStructure).toContain(
|
||||
path.join(testDownloadPath, dir1Path)
|
||||
)
|
||||
expect(specification.directoryStructure).toContain(
|
||||
path.join(testDownloadPath, dir4Path)
|
||||
)
|
||||
|
||||
expect(specification.emptyFilesToCreate.length).toEqual(1)
|
||||
expect(specification.emptyFilesToCreate).toContain(item6ExpectedTargetPath)
|
||||
})
|
||||
|
||||
it('Download Specification - Relative Path with root directory', () => {
|
||||
const testDownloadPath = path.join('some', 'destination', 'folder')
|
||||
|
||||
const specification = getDownloadSpecification(
|
||||
artifact1Name,
|
||||
artifactContainerEntries,
|
||||
testDownloadPath,
|
||||
true
|
||||
)
|
||||
|
||||
expect(specification.rootDownloadLocation).toEqual(
|
||||
path.join(testDownloadPath, artifact1Name)
|
||||
)
|
||||
expect(specification.filesToDownload.length).toEqual(5)
|
||||
|
||||
const item1ExpectedTargetPath = path.join(
|
||||
testDownloadPath,
|
||||
artifact1Name,
|
||||
'file1.txt'
|
||||
)
|
||||
const item2ExpectedTargetPath = path.join(
|
||||
testDownloadPath,
|
||||
artifact1Name,
|
||||
'file2.txt'
|
||||
)
|
||||
const item3ExpectedTargetPath = path.join(
|
||||
testDownloadPath,
|
||||
artifact1Name,
|
||||
'dir1',
|
||||
'file3.txt'
|
||||
)
|
||||
const item4ExpectedTargetPath = path.join(
|
||||
testDownloadPath,
|
||||
artifact1Name,
|
||||
'dir1',
|
||||
'dir2',
|
||||
'dir3',
|
||||
'dir4',
|
||||
'file4.txt'
|
||||
)
|
||||
const item5ExpectedTargetPath = path.join(
|
||||
testDownloadPath,
|
||||
artifact1Name,
|
||||
'dir1',
|
||||
'dir2',
|
||||
'dir3',
|
||||
'dir4',
|
||||
'file5.txt'
|
||||
)
|
||||
const item6ExpectedTargetPath = path.join(
|
||||
testDownloadPath,
|
||||
artifact1Name,
|
||||
'dir1',
|
||||
'dir2',
|
||||
'dir3',
|
||||
'dir4',
|
||||
'file6.txt'
|
||||
)
|
||||
|
||||
const targetLocations = specification.filesToDownload.map(
|
||||
item => item.targetPath
|
||||
)
|
||||
expect(targetLocations).toContain(item1ExpectedTargetPath)
|
||||
expect(targetLocations).toContain(item2ExpectedTargetPath)
|
||||
expect(targetLocations).toContain(item3ExpectedTargetPath)
|
||||
expect(targetLocations).toContain(item4ExpectedTargetPath)
|
||||
expect(targetLocations).toContain(item5ExpectedTargetPath)
|
||||
|
||||
for (const downloadItem of specification.filesToDownload) {
|
||||
if (downloadItem.targetPath === item1ExpectedTargetPath) {
|
||||
expect(downloadItem.sourceLocation).toEqual(
|
||||
createContentLocation(file1Path)
|
||||
)
|
||||
} else if (downloadItem.targetPath === item2ExpectedTargetPath) {
|
||||
expect(downloadItem.sourceLocation).toEqual(
|
||||
createContentLocation(file2Path)
|
||||
)
|
||||
} else if (downloadItem.targetPath === item3ExpectedTargetPath) {
|
||||
expect(downloadItem.sourceLocation).toEqual(
|
||||
createContentLocation(file3Path)
|
||||
)
|
||||
} else if (downloadItem.targetPath === item4ExpectedTargetPath) {
|
||||
expect(downloadItem.sourceLocation).toEqual(
|
||||
createContentLocation(file4Path)
|
||||
)
|
||||
} else if (downloadItem.targetPath === item5ExpectedTargetPath) {
|
||||
expect(downloadItem.sourceLocation).toEqual(
|
||||
createContentLocation(file5Path)
|
||||
)
|
||||
} else {
|
||||
throw new Error('this should never be reached')
|
||||
}
|
||||
}
|
||||
|
||||
expect(specification.directoryStructure.length).toEqual(3)
|
||||
expect(specification.directoryStructure).toContain(
|
||||
path.join(testDownloadPath, artifact1Name)
|
||||
)
|
||||
expect(specification.directoryStructure).toContain(
|
||||
path.join(testDownloadPath, dir1Path)
|
||||
)
|
||||
expect(specification.directoryStructure).toContain(
|
||||
path.join(testDownloadPath, dir4Path)
|
||||
)
|
||||
|
||||
expect(specification.emptyFilesToCreate.length).toEqual(1)
|
||||
expect(specification.emptyFilesToCreate).toContain(item6ExpectedTargetPath)
|
||||
})
|
||||
})
|
|
@ -1,490 +0,0 @@
|
|||
import * as core from '@actions/core'
|
||||
import * as http from 'http'
|
||||
import * as io from '../../io/src/io'
|
||||
import * as net from 'net'
|
||||
import * as path from 'path'
|
||||
import * as configVariables from '../src/internal/config-variables'
|
||||
import {promises as fs} from 'fs'
|
||||
import {DownloadItem} from '../src/internal/download-specification'
|
||||
import {HttpClient, HttpClientResponse} from '@actions/http-client'
|
||||
import {DownloadHttpClient} from '../src/internal/download-http-client'
|
||||
import {
|
||||
ListArtifactsResponse,
|
||||
QueryArtifactResponse
|
||||
} from '../src/internal/contracts'
|
||||
import * as stream from 'stream'
|
||||
import {gzip} from 'zlib'
|
||||
import {promisify} from 'util'
|
||||
|
||||
const root = path.join(__dirname, '_temp', 'artifact-download-tests')
|
||||
const defaultEncoding = 'utf8'
|
||||
|
||||
jest.mock('../src/internal/config-variables')
|
||||
jest.mock('@actions/http-client')
|
||||
|
||||
describe('Download Tests', () => {
|
||||
beforeAll(async () => {
|
||||
await io.rmRF(root)
|
||||
await fs.mkdir(path.join(root), {
|
||||
recursive: true
|
||||
})
|
||||
|
||||
// mock all output so that there is less noise when running tests
|
||||
jest.spyOn(console, 'log').mockImplementation(() => {})
|
||||
jest.spyOn(core, 'debug').mockImplementation(() => {})
|
||||
jest.spyOn(core, 'info').mockImplementation(() => {})
|
||||
jest.spyOn(core, 'warning').mockImplementation(() => {})
|
||||
jest.spyOn(core, 'error').mockImplementation(() => {})
|
||||
})
|
||||
|
||||
/**
|
||||
* Test Listing Artifacts
|
||||
*/
|
||||
it('List Artifacts - Success', async () => {
|
||||
setupSuccessfulListArtifactsResponse()
|
||||
const downloadHttpClient = new DownloadHttpClient()
|
||||
const artifacts = await downloadHttpClient.listArtifacts()
|
||||
expect(artifacts.count).toEqual(2)
|
||||
|
||||
const artifactNames = artifacts.value.map(item => item.name)
|
||||
expect(artifactNames).toContain('artifact1-name')
|
||||
expect(artifactNames).toContain('artifact2-name')
|
||||
|
||||
for (const artifact of artifacts.value) {
|
||||
if (artifact.name === 'artifact1-name') {
|
||||
expect(artifact.url).toEqual(
|
||||
`${configVariables.getRuntimeUrl()}_apis/pipelines/1/runs/1/artifacts?artifactName=artifact1-name`
|
||||
)
|
||||
} else if (artifact.name === 'artifact2-name') {
|
||||
expect(artifact.url).toEqual(
|
||||
`${configVariables.getRuntimeUrl()}_apis/pipelines/1/runs/1/artifacts?artifactName=artifact2-name`
|
||||
)
|
||||
} else {
|
||||
throw new Error(
|
||||
'Invalid artifact combination. This should never be reached'
|
||||
)
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
it('List Artifacts - Failure', async () => {
|
||||
setupFailedResponse()
|
||||
const downloadHttpClient = new DownloadHttpClient()
|
||||
expect(downloadHttpClient.listArtifacts()).rejects.toThrow(
|
||||
'List Artifacts failed: Artifact service responded with 400'
|
||||
)
|
||||
})
|
||||
|
||||
/**
|
||||
* Test Container Items
|
||||
*/
|
||||
it('Container Items - Success', async () => {
|
||||
setupSuccessfulContainerItemsResponse()
|
||||
const downloadHttpClient = new DownloadHttpClient()
|
||||
const response = await downloadHttpClient.getContainerItems(
|
||||
'artifact-name',
|
||||
configVariables.getRuntimeUrl()
|
||||
)
|
||||
expect(response.count).toEqual(2)
|
||||
|
||||
const itemPaths = response.value.map(item => item.path)
|
||||
expect(itemPaths).toContain('artifact-name')
|
||||
expect(itemPaths).toContain('artifact-name/file1.txt')
|
||||
|
||||
for (const containerEntry of response.value) {
|
||||
if (containerEntry.path === 'artifact-name') {
|
||||
expect(containerEntry.itemType).toEqual('folder')
|
||||
} else if (containerEntry.path === 'artifact-name/file1.txt') {
|
||||
expect(containerEntry.itemType).toEqual('file')
|
||||
} else {
|
||||
throw new Error(
|
||||
'Invalid container combination. This should never be reached'
|
||||
)
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
it('Container Items - Failure', async () => {
|
||||
setupFailedResponse()
|
||||
const downloadHttpClient = new DownloadHttpClient()
|
||||
expect(
|
||||
downloadHttpClient.getContainerItems(
|
||||
'artifact-name',
|
||||
configVariables.getRuntimeUrl()
|
||||
)
|
||||
).rejects.toThrow(
|
||||
`Get Container Items failed: Artifact service responded with 400`
|
||||
)
|
||||
})
|
||||
|
||||
it('Test downloading an individual artifact with gzip', async () => {
|
||||
const fileContents = Buffer.from(
|
||||
'gzip worked on the first try\n',
|
||||
defaultEncoding
|
||||
)
|
||||
const targetPath = path.join(root, 'FileA.txt')
|
||||
|
||||
setupDownloadItemResponse(fileContents, true, 200, false, false)
|
||||
const downloadHttpClient = new DownloadHttpClient()
|
||||
|
||||
const items: DownloadItem[] = []
|
||||
items.push({
|
||||
sourceLocation: `${configVariables.getRuntimeUrl()}_apis/resources/Containers/13?itemPath=my-artifact%2FFileA.txt`,
|
||||
targetPath
|
||||
})
|
||||
|
||||
await expect(
|
||||
downloadHttpClient.downloadSingleArtifact(items)
|
||||
).resolves.not.toThrow()
|
||||
|
||||
await checkDestinationFile(targetPath, fileContents)
|
||||
})
|
||||
|
||||
it('Test downloading an individual artifact without gzip', async () => {
|
||||
const fileContents = Buffer.from(
|
||||
'plaintext worked on the first try\n',
|
||||
defaultEncoding
|
||||
)
|
||||
const targetPath = path.join(root, 'FileB.txt')
|
||||
|
||||
setupDownloadItemResponse(fileContents, false, 200, false, false)
|
||||
const downloadHttpClient = new DownloadHttpClient()
|
||||
|
||||
const items: DownloadItem[] = []
|
||||
items.push({
|
||||
sourceLocation: `${configVariables.getRuntimeUrl()}_apis/resources/Containers/13?itemPath=my-artifact%2FFileB.txt`,
|
||||
targetPath
|
||||
})
|
||||
|
||||
await expect(
|
||||
downloadHttpClient.downloadSingleArtifact(items)
|
||||
).resolves.not.toThrow()
|
||||
|
||||
await checkDestinationFile(targetPath, fileContents)
|
||||
})
|
||||
|
||||
it('Test retryable status codes during artifact download', async () => {
|
||||
// The first http response should return a retryable status call while the subsequent call should return a 200 so
|
||||
// the download should successfully finish
|
||||
const retryableStatusCodes = [429, 500, 502, 503, 504]
|
||||
for (const statusCode of retryableStatusCodes) {
|
||||
const fileContents = Buffer.from('try, try again\n', defaultEncoding)
|
||||
const targetPath = path.join(root, `FileC-${statusCode}.txt`)
|
||||
|
||||
setupDownloadItemResponse(fileContents, false, statusCode, false, true)
|
||||
const downloadHttpClient = new DownloadHttpClient()
|
||||
|
||||
const items: DownloadItem[] = []
|
||||
items.push({
|
||||
sourceLocation: `${configVariables.getRuntimeUrl()}_apis/resources/Containers/13?itemPath=my-artifact%2FFileC.txt`,
|
||||
targetPath
|
||||
})
|
||||
|
||||
await expect(
|
||||
downloadHttpClient.downloadSingleArtifact(items)
|
||||
).resolves.not.toThrow()
|
||||
|
||||
await checkDestinationFile(targetPath, fileContents)
|
||||
}
|
||||
})
|
||||
|
||||
it('Test retry on truncated response with gzip', async () => {
|
||||
const fileContents = Buffer.from(
|
||||
'Sometimes gunzip fails on the first try\n',
|
||||
defaultEncoding
|
||||
)
|
||||
const targetPath = path.join(root, 'FileD.txt')
|
||||
|
||||
setupDownloadItemResponse(fileContents, true, 200, true, true)
|
||||
const downloadHttpClient = new DownloadHttpClient()
|
||||
|
||||
const items: DownloadItem[] = []
|
||||
items.push({
|
||||
sourceLocation: `${configVariables.getRuntimeUrl()}_apis/resources/Containers/13?itemPath=my-artifact%2FFileD.txt`,
|
||||
targetPath
|
||||
})
|
||||
|
||||
await expect(
|
||||
downloadHttpClient.downloadSingleArtifact(items)
|
||||
).resolves.not.toThrow()
|
||||
|
||||
await checkDestinationFile(targetPath, fileContents)
|
||||
})
|
||||
|
||||
it('Test retry on truncated response without gzip', async () => {
|
||||
const fileContents = Buffer.from(
|
||||
'You have to inspect the content-length header to know if you got everything\n',
|
||||
defaultEncoding
|
||||
)
|
||||
const targetPath = path.join(root, 'FileE.txt')
|
||||
|
||||
setupDownloadItemResponse(fileContents, false, 200, true, true)
|
||||
const downloadHttpClient = new DownloadHttpClient()
|
||||
|
||||
const items: DownloadItem[] = []
|
||||
items.push({
|
||||
sourceLocation: `${configVariables.getRuntimeUrl()}_apis/resources/Containers/13?itemPath=my-artifact%2FFileD.txt`,
|
||||
targetPath
|
||||
})
|
||||
|
||||
await expect(
|
||||
downloadHttpClient.downloadSingleArtifact(items)
|
||||
).resolves.not.toThrow()
|
||||
|
||||
await checkDestinationFile(targetPath, fileContents)
|
||||
})
|
||||
|
||||
/**
|
||||
* Helper used to setup mocking for the HttpClient
|
||||
*/
|
||||
async function emptyMockReadBody(): Promise<string> {
|
||||
return new Promise(resolve => {
|
||||
resolve()
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Setups up HTTP GET response for a successful listArtifacts() call
|
||||
*/
|
||||
function setupSuccessfulListArtifactsResponse(): void {
|
||||
jest.spyOn(HttpClient.prototype, 'get').mockImplementationOnce(async () => {
|
||||
const mockMessage = new http.IncomingMessage(new net.Socket())
|
||||
let mockReadBody = emptyMockReadBody
|
||||
|
||||
mockMessage.statusCode = 201
|
||||
const response: ListArtifactsResponse = {
|
||||
count: 2,
|
||||
value: [
|
||||
{
|
||||
containerId: '13',
|
||||
size: -1,
|
||||
signedContent: 'false',
|
||||
fileContainerResourceUrl: `${configVariables.getRuntimeUrl()}_apis/resources/Containers/13`,
|
||||
type: 'actions_storage',
|
||||
name: 'artifact1-name',
|
||||
url: `${configVariables.getRuntimeUrl()}_apis/pipelines/1/runs/1/artifacts?artifactName=artifact1-name`
|
||||
},
|
||||
{
|
||||
containerId: '13',
|
||||
size: -1,
|
||||
signedContent: 'false',
|
||||
fileContainerResourceUrl: `${configVariables.getRuntimeUrl()}_apis/resources/Containers/13`,
|
||||
type: 'actions_storage',
|
||||
name: 'artifact2-name',
|
||||
url: `${configVariables.getRuntimeUrl()}_apis/pipelines/1/runs/1/artifacts?artifactName=artifact2-name`
|
||||
}
|
||||
]
|
||||
}
|
||||
const returnData: string = JSON.stringify(response, null, 2)
|
||||
mockReadBody = async function(): Promise<string> {
|
||||
return new Promise(resolve => {
|
||||
resolve(returnData)
|
||||
})
|
||||
}
|
||||
|
||||
return new Promise<HttpClientResponse>(resolve => {
|
||||
resolve({
|
||||
message: mockMessage,
|
||||
readBody: mockReadBody
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Setups up HTTP GET response for downloading items
|
||||
* @param isGzip is the downloaded item gzip encoded
|
||||
* @param firstHttpResponseCode the http response code that should be returned
|
||||
*/
|
||||
function setupDownloadItemResponse(
|
||||
fileContents: Buffer,
|
||||
isGzip: boolean,
|
||||
firstHttpResponseCode: number,
|
||||
truncateFirstResponse: boolean,
|
||||
retryExpected: boolean
|
||||
): void {
|
||||
const spyInstance = jest
|
||||
.spyOn(HttpClient.prototype, 'get')
|
||||
.mockImplementationOnce(async () => {
|
||||
if (firstHttpResponseCode === 200) {
|
||||
const fullResponse = await constructResponse(isGzip, fileContents)
|
||||
const actualResponse = truncateFirstResponse
|
||||
? fullResponse.subarray(0, 3)
|
||||
: fullResponse
|
||||
|
||||
return {
|
||||
message: getDownloadResponseMessage(
|
||||
firstHttpResponseCode,
|
||||
isGzip,
|
||||
fullResponse.length,
|
||||
actualResponse
|
||||
),
|
||||
readBody: emptyMockReadBody
|
||||
}
|
||||
} else {
|
||||
return {
|
||||
message: getDownloadResponseMessage(
|
||||
firstHttpResponseCode,
|
||||
false,
|
||||
0,
|
||||
null
|
||||
),
|
||||
readBody: emptyMockReadBody
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
// set up a second mock only if we expect a retry. Otherwise this mock will affect other tests.
|
||||
if (retryExpected) {
|
||||
spyInstance.mockImplementationOnce(async () => {
|
||||
// chained response, if the HTTP GET function gets called again, return a successful response
|
||||
const fullResponse = await constructResponse(isGzip, fileContents)
|
||||
return {
|
||||
message: getDownloadResponseMessage(
|
||||
200,
|
||||
isGzip,
|
||||
fullResponse.length,
|
||||
fullResponse
|
||||
),
|
||||
readBody: emptyMockReadBody
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
async function constructResponse(
|
||||
isGzip: boolean,
|
||||
plaintext: Buffer | string
|
||||
): Promise<Buffer> {
|
||||
if (isGzip) {
|
||||
return await promisify(gzip)(plaintext)
|
||||
} else if (typeof plaintext === 'string') {
|
||||
return Buffer.from(plaintext, defaultEncoding)
|
||||
} else {
|
||||
return plaintext
|
||||
}
|
||||
}
|
||||
|
||||
function getDownloadResponseMessage(
|
||||
httpResponseCode: number,
|
||||
isGzip: boolean,
|
||||
contentLength: number,
|
||||
response: Buffer | null
|
||||
): http.IncomingMessage {
|
||||
let readCallCount = 0
|
||||
const mockMessage = <http.IncomingMessage>new stream.Readable({
|
||||
read(size) {
|
||||
switch (readCallCount++) {
|
||||
case 0:
|
||||
if (!!response && response.byteLength > size) {
|
||||
throw new Error(
|
||||
`test response larger than requested size (${size})`
|
||||
)
|
||||
}
|
||||
this.push(response)
|
||||
break
|
||||
|
||||
default:
|
||||
// end the stream
|
||||
this.push(null)
|
||||
break
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
mockMessage.statusCode = httpResponseCode
|
||||
mockMessage.headers = {
|
||||
'content-length': contentLength.toString()
|
||||
}
|
||||
|
||||
if (isGzip) {
|
||||
mockMessage.headers['content-encoding'] = 'gzip'
|
||||
}
|
||||
|
||||
return mockMessage
|
||||
}
|
||||
|
||||
/**
|
||||
* Setups up HTTP GET response when querying for container items
|
||||
*/
|
||||
function setupSuccessfulContainerItemsResponse(): void {
|
||||
jest.spyOn(HttpClient.prototype, 'get').mockImplementationOnce(async () => {
|
||||
const mockMessage = new http.IncomingMessage(new net.Socket())
|
||||
let mockReadBody = emptyMockReadBody
|
||||
|
||||
mockMessage.statusCode = 201
|
||||
const response: QueryArtifactResponse = {
|
||||
count: 2,
|
||||
value: [
|
||||
{
|
||||
containerId: 10000,
|
||||
scopeIdentifier: '00000000-0000-0000-0000-000000000000',
|
||||
path: 'artifact-name',
|
||||
itemType: 'folder',
|
||||
status: 'created',
|
||||
dateCreated: '2020-02-06T22:13:35.373Z',
|
||||
dateLastModified: '2020-02-06T22:13:35.453Z',
|
||||
createdBy: '82f0bf89-6e55-4e5a-b8b6-f75eb992578c',
|
||||
lastModifiedBy: '82f0bf89-6e55-4e5a-b8b6-f75eb992578c',
|
||||
itemLocation: `${configVariables.getRuntimeUrl()}/_apis/resources/Containers/10000?itemPath=artifact-name&metadata=True`,
|
||||
contentLocation: `${configVariables.getRuntimeUrl()}/_apis/resources/Containers/10000?itemPath=artifact-name`,
|
||||
contentId: ''
|
||||
},
|
||||
{
|
||||
containerId: 10000,
|
||||
scopeIdentifier: '00000000-0000-0000-0000-000000000000',
|
||||
path: 'artifact-name/file1.txt',
|
||||
itemType: 'file',
|
||||
status: 'created',
|
||||
dateCreated: '2020-02-06T22:13:35.373Z',
|
||||
dateLastModified: '2020-02-06T22:13:35.453Z',
|
||||
createdBy: '82f0bf89-6e55-4e5a-b8b6-f75eb992578c',
|
||||
lastModifiedBy: '82f0bf89-6e55-4e5a-b8b6-f75eb992578c',
|
||||
itemLocation: `${configVariables.getRuntimeUrl()}/_apis/resources/Containers/10000?itemPath=artifact-name%2Ffile1.txt&metadata=True`,
|
||||
contentLocation: `${configVariables.getRuntimeUrl()}/_apis/resources/Containers/10000?itemPath=artifact-name%2Ffile1.txt`,
|
||||
contentId: ''
|
||||
}
|
||||
]
|
||||
}
|
||||
const returnData: string = JSON.stringify(response, null, 2)
|
||||
mockReadBody = async function(): Promise<string> {
|
||||
return new Promise(resolve => {
|
||||
resolve(returnData)
|
||||
})
|
||||
}
|
||||
|
||||
return new Promise<HttpClientResponse>(resolve => {
|
||||
resolve({
|
||||
message: mockMessage,
|
||||
readBody: mockReadBody
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Setups up HTTP GET response for a generic failed request
|
||||
*/
|
||||
function setupFailedResponse(): void {
|
||||
jest.spyOn(HttpClient.prototype, 'get').mockImplementationOnce(async () => {
|
||||
const mockMessage = new http.IncomingMessage(new net.Socket())
|
||||
mockMessage.statusCode = 400
|
||||
return new Promise<HttpClientResponse>(resolve => {
|
||||
resolve({
|
||||
message: mockMessage,
|
||||
readBody: emptyMockReadBody
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
async function checkDestinationFile(
|
||||
targetPath: string,
|
||||
expectedContents: Buffer
|
||||
): Promise<void> {
|
||||
const fileContents = await fs.readFile(targetPath)
|
||||
|
||||
expect(fileContents.byteLength).toEqual(expectedContents.byteLength)
|
||||
expect(fileContents.equals(expectedContents)).toBeTruthy()
|
||||
}
|
||||
})
|
Binary file not shown.
|
@ -0,0 +1,239 @@
|
|||
import * as github from '@actions/github'
|
||||
import type {RequestInterface} from '@octokit/types'
|
||||
import {
|
||||
getArtifactInternal,
|
||||
getArtifactPublic
|
||||
} from '../src/internal/find/get-artifact'
|
||||
import * as config from '../src/internal/shared/config'
|
||||
import {ArtifactServiceClientJSON, Timestamp} from '../src/generated'
|
||||
import * as util from '../src/internal/shared/util'
|
||||
import {noopLogs} from './common'
|
||||
import {
|
||||
ArtifactNotFoundError,
|
||||
InvalidResponseError
|
||||
} from '../src/internal/shared/errors'
|
||||
|
||||
type MockedRequest = jest.MockedFunction<RequestInterface<object>>
|
||||
|
||||
jest.mock('@actions/github', () => ({
|
||||
getOctokit: jest.fn().mockReturnValue({
|
||||
request: jest.fn()
|
||||
})
|
||||
}))
|
||||
|
||||
const fixtures = {
|
||||
repo: 'toolkit',
|
||||
owner: 'actions',
|
||||
token: 'ghp_1234567890',
|
||||
runId: 123,
|
||||
backendIds: {
|
||||
workflowRunBackendId: 'c4d7c21f-ba3f-4ddc-a8c8-6f2f626f8422',
|
||||
workflowJobRunBackendId: '760803a1-f890-4d25-9a6e-a3fc01a0c7cf'
|
||||
},
|
||||
artifacts: [
|
||||
{
|
||||
id: 1,
|
||||
name: 'my-artifact',
|
||||
size: 456,
|
||||
createdAt: new Date('2023-12-01')
|
||||
},
|
||||
{
|
||||
id: 2,
|
||||
name: 'my-artifact',
|
||||
size: 456,
|
||||
createdAt: new Date('2023-12-02')
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
describe('get-artifact', () => {
|
||||
beforeAll(() => {
|
||||
noopLogs()
|
||||
})
|
||||
|
||||
describe('public', () => {
|
||||
it('should return the artifact if it is found', async () => {
|
||||
const mockRequest = github.getOctokit(fixtures.token)
|
||||
.request as MockedRequest
|
||||
mockRequest.mockResolvedValueOnce({
|
||||
status: 200,
|
||||
headers: {},
|
||||
url: '',
|
||||
data: {
|
||||
artifacts: [
|
||||
{
|
||||
name: fixtures.artifacts[0].name,
|
||||
id: fixtures.artifacts[0].id,
|
||||
size_in_bytes: fixtures.artifacts[0].size,
|
||||
created_at: fixtures.artifacts[0].createdAt.toISOString()
|
||||
}
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
const response = await getArtifactPublic(
|
||||
fixtures.artifacts[0].name,
|
||||
fixtures.runId,
|
||||
fixtures.owner,
|
||||
fixtures.repo,
|
||||
fixtures.token
|
||||
)
|
||||
|
||||
expect(response).toEqual({
|
||||
artifact: fixtures.artifacts[0]
|
||||
})
|
||||
})
|
||||
|
||||
it('should return the latest artifact if multiple are found', async () => {
|
||||
const mockRequest = github.getOctokit(fixtures.token)
|
||||
.request as MockedRequest
|
||||
mockRequest.mockResolvedValueOnce({
|
||||
status: 200,
|
||||
headers: {},
|
||||
url: '',
|
||||
data: {
|
||||
artifacts: fixtures.artifacts.map(artifact => ({
|
||||
name: artifact.name,
|
||||
id: artifact.id,
|
||||
size_in_bytes: artifact.size,
|
||||
created_at: artifact.createdAt.toISOString()
|
||||
}))
|
||||
}
|
||||
})
|
||||
|
||||
const response = await getArtifactPublic(
|
||||
fixtures.artifacts[0].name,
|
||||
fixtures.runId,
|
||||
fixtures.owner,
|
||||
fixtures.repo,
|
||||
fixtures.token
|
||||
)
|
||||
|
||||
expect(response).toEqual({
|
||||
artifact: fixtures.artifacts[1]
|
||||
})
|
||||
})
|
||||
|
||||
it('should fail if no artifacts are found', async () => {
|
||||
const mockRequest = github.getOctokit(fixtures.token)
|
||||
.request as MockedRequest
|
||||
mockRequest.mockResolvedValueOnce({
|
||||
status: 200,
|
||||
headers: {},
|
||||
url: '',
|
||||
data: {
|
||||
artifacts: []
|
||||
}
|
||||
})
|
||||
|
||||
const response = getArtifactPublic(
|
||||
fixtures.artifacts[0].name,
|
||||
fixtures.runId,
|
||||
fixtures.owner,
|
||||
fixtures.repo,
|
||||
fixtures.token
|
||||
)
|
||||
|
||||
expect(response).rejects.toThrowError(ArtifactNotFoundError)
|
||||
})
|
||||
|
||||
it('should fail if non-200 response', async () => {
|
||||
const mockRequest = github.getOctokit(fixtures.token)
|
||||
.request as MockedRequest
|
||||
mockRequest.mockResolvedValueOnce({
|
||||
status: 404,
|
||||
headers: {},
|
||||
url: '',
|
||||
data: {}
|
||||
})
|
||||
|
||||
const response = getArtifactPublic(
|
||||
fixtures.artifacts[0].name,
|
||||
fixtures.runId,
|
||||
fixtures.owner,
|
||||
fixtures.repo,
|
||||
fixtures.token
|
||||
)
|
||||
|
||||
expect(response).rejects.toThrowError(InvalidResponseError)
|
||||
})
|
||||
})
|
||||
|
||||
describe('internal', () => {
|
||||
beforeEach(() => {
|
||||
jest.spyOn(config, 'getRuntimeToken').mockReturnValue('test-token')
|
||||
|
||||
jest
|
||||
.spyOn(util, 'getBackendIdsFromToken')
|
||||
.mockReturnValue(fixtures.backendIds)
|
||||
|
||||
jest
|
||||
.spyOn(config, 'getResultsServiceUrl')
|
||||
.mockReturnValue('https://results.local')
|
||||
})
|
||||
|
||||
it('should return the artifact if it is found', async () => {
|
||||
jest
|
||||
.spyOn(ArtifactServiceClientJSON.prototype, 'ListArtifacts')
|
||||
.mockResolvedValue({
|
||||
artifacts: [
|
||||
{
|
||||
...fixtures.backendIds,
|
||||
databaseId: fixtures.artifacts[0].id.toString(),
|
||||
name: fixtures.artifacts[0].name,
|
||||
size: fixtures.artifacts[0].size.toString(),
|
||||
createdAt: Timestamp.fromDate(fixtures.artifacts[0].createdAt)
|
||||
}
|
||||
]
|
||||
})
|
||||
|
||||
const response = await getArtifactInternal(fixtures.artifacts[0].name)
|
||||
|
||||
expect(response).toEqual({
|
||||
artifact: fixtures.artifacts[0]
|
||||
})
|
||||
})
|
||||
|
||||
it('should return the latest artifact if multiple are found', async () => {
|
||||
jest
|
||||
.spyOn(ArtifactServiceClientJSON.prototype, 'ListArtifacts')
|
||||
.mockResolvedValue({
|
||||
artifacts: fixtures.artifacts.map(artifact => ({
|
||||
...fixtures.backendIds,
|
||||
databaseId: artifact.id.toString(),
|
||||
name: artifact.name,
|
||||
size: artifact.size.toString(),
|
||||
createdAt: Timestamp.fromDate(artifact.createdAt)
|
||||
}))
|
||||
})
|
||||
|
||||
const response = await getArtifactInternal(fixtures.artifacts[0].name)
|
||||
|
||||
expect(response).toEqual({
|
||||
artifact: fixtures.artifacts[1]
|
||||
})
|
||||
})
|
||||
|
||||
it('should fail if no artifacts are found', async () => {
|
||||
jest
|
||||
.spyOn(ArtifactServiceClientJSON.prototype, 'ListArtifacts')
|
||||
.mockResolvedValue({
|
||||
artifacts: []
|
||||
})
|
||||
|
||||
const response = getArtifactInternal(fixtures.artifacts[0].name)
|
||||
|
||||
expect(response).rejects.toThrowError(ArtifactNotFoundError)
|
||||
})
|
||||
|
||||
it('should fail if non-200 response', async () => {
|
||||
jest
|
||||
.spyOn(ArtifactServiceClientJSON.prototype, 'ListArtifacts')
|
||||
.mockRejectedValue(new Error('boom'))
|
||||
|
||||
const response = getArtifactInternal(fixtures.artifacts[0].name)
|
||||
|
||||
expect(response).rejects.toThrow()
|
||||
})
|
||||
})
|
||||
})
|
|
@ -0,0 +1,242 @@
|
|||
import * as github from '@actions/github'
|
||||
import type {RestEndpointMethods} from '@octokit/plugin-rest-endpoint-methods/dist-types/generated/method-types'
|
||||
import type {RestEndpointMethodTypes} from '@octokit/plugin-rest-endpoint-methods/dist-types/generated/parameters-and-response-types'
|
||||
import {
|
||||
listArtifactsInternal,
|
||||
listArtifactsPublic
|
||||
} from '../src/internal/find/list-artifacts'
|
||||
import * as config from '../src/internal/shared/config'
|
||||
import {ArtifactServiceClientJSON, Timestamp} from '../src/generated'
|
||||
import * as util from '../src/internal/shared/util'
|
||||
import {noopLogs} from './common'
|
||||
import {Artifact} from '../src/internal/shared/interfaces'
|
||||
|
||||
type MockedListWorkflowRunArtifacts = jest.MockedFunction<
|
||||
RestEndpointMethods['actions']['listWorkflowRunArtifacts']
|
||||
>
|
||||
|
||||
jest.mock('@actions/github', () => ({
|
||||
getOctokit: jest.fn().mockReturnValue({
|
||||
rest: {
|
||||
actions: {
|
||||
listWorkflowRunArtifacts: jest.fn()
|
||||
}
|
||||
}
|
||||
})
|
||||
}))
|
||||
|
||||
const artifactsToListResponse = (
|
||||
artifacts: Artifact[]
|
||||
): RestEndpointMethodTypes['actions']['listWorkflowRunArtifacts']['response']['data'] => {
|
||||
return {
|
||||
total_count: artifacts.length,
|
||||
artifacts: artifacts.map(artifact => ({
|
||||
name: artifact.name,
|
||||
id: artifact.id,
|
||||
size_in_bytes: artifact.size,
|
||||
created_at: artifact.createdAt?.toISOString() || '',
|
||||
run_id: fixtures.runId,
|
||||
// unused fields for tests
|
||||
url: '',
|
||||
archive_download_url: '',
|
||||
expired: false,
|
||||
expires_at: '',
|
||||
node_id: '',
|
||||
run_url: '',
|
||||
type: '',
|
||||
updated_at: ''
|
||||
}))
|
||||
}
|
||||
}
|
||||
|
||||
const fixtures = {
|
||||
repo: 'toolkit',
|
||||
owner: 'actions',
|
||||
token: 'ghp_1234567890',
|
||||
runId: 123,
|
||||
backendIds: {
|
||||
workflowRunBackendId: 'c4d7c21f-ba3f-4ddc-a8c8-6f2f626f8422',
|
||||
workflowJobRunBackendId: '760803a1-f890-4d25-9a6e-a3fc01a0c7cf'
|
||||
},
|
||||
artifacts: [
|
||||
{
|
||||
id: 1,
|
||||
name: 'my-artifact',
|
||||
size: 456,
|
||||
createdAt: new Date('2023-12-01')
|
||||
},
|
||||
{
|
||||
id: 2,
|
||||
name: 'my-artifact',
|
||||
size: 456,
|
||||
createdAt: new Date('2023-12-02')
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
describe('list-artifact', () => {
|
||||
beforeAll(() => {
|
||||
noopLogs()
|
||||
})
|
||||
|
||||
describe('public', () => {
|
||||
it('should return a list of artifacts', async () => {
|
||||
const mockListArtifacts = github.getOctokit(fixtures.token).rest.actions
|
||||
.listWorkflowRunArtifacts as MockedListWorkflowRunArtifacts
|
||||
|
||||
mockListArtifacts.mockResolvedValueOnce({
|
||||
status: 200,
|
||||
headers: {},
|
||||
url: '',
|
||||
data: artifactsToListResponse(fixtures.artifacts)
|
||||
})
|
||||
|
||||
const response = await listArtifactsPublic(
|
||||
fixtures.runId,
|
||||
fixtures.owner,
|
||||
fixtures.repo,
|
||||
fixtures.token,
|
||||
false
|
||||
)
|
||||
|
||||
expect(response).toEqual({
|
||||
artifacts: fixtures.artifacts
|
||||
})
|
||||
})
|
||||
|
||||
it('should return the latest artifact when latest is specified', async () => {
|
||||
const mockListArtifacts = github.getOctokit(fixtures.token).rest.actions
|
||||
.listWorkflowRunArtifacts as MockedListWorkflowRunArtifacts
|
||||
|
||||
mockListArtifacts.mockResolvedValueOnce({
|
||||
status: 200,
|
||||
headers: {},
|
||||
url: '',
|
||||
data: artifactsToListResponse(fixtures.artifacts)
|
||||
})
|
||||
|
||||
const response = await listArtifactsPublic(
|
||||
fixtures.runId,
|
||||
fixtures.owner,
|
||||
fixtures.repo,
|
||||
fixtures.token,
|
||||
true
|
||||
)
|
||||
|
||||
expect(response).toEqual({
|
||||
artifacts: [fixtures.artifacts[1]]
|
||||
})
|
||||
})
|
||||
|
||||
it('can return empty artifacts', async () => {
|
||||
const mockListArtifacts = github.getOctokit(fixtures.token).rest.actions
|
||||
.listWorkflowRunArtifacts as MockedListWorkflowRunArtifacts
|
||||
|
||||
mockListArtifacts.mockResolvedValueOnce({
|
||||
status: 200,
|
||||
headers: {},
|
||||
url: '',
|
||||
data: {
|
||||
total_count: 0,
|
||||
artifacts: []
|
||||
}
|
||||
})
|
||||
|
||||
const response = await listArtifactsPublic(
|
||||
fixtures.runId,
|
||||
fixtures.owner,
|
||||
fixtures.repo,
|
||||
fixtures.token,
|
||||
true
|
||||
)
|
||||
|
||||
expect(response).toEqual({
|
||||
artifacts: []
|
||||
})
|
||||
})
|
||||
|
||||
it('should fail if non-200 response', async () => {
|
||||
const mockListArtifacts = github.getOctokit(fixtures.token).rest.actions
|
||||
.listWorkflowRunArtifacts as MockedListWorkflowRunArtifacts
|
||||
|
||||
mockListArtifacts.mockRejectedValue(new Error('boom'))
|
||||
|
||||
await expect(
|
||||
listArtifactsPublic(
|
||||
fixtures.runId,
|
||||
fixtures.owner,
|
||||
fixtures.repo,
|
||||
fixtures.token,
|
||||
false
|
||||
)
|
||||
).rejects.toThrow('boom')
|
||||
})
|
||||
})
|
||||
|
||||
describe('internal', () => {
|
||||
beforeEach(() => {
|
||||
jest.spyOn(config, 'getRuntimeToken').mockReturnValue('test-token')
|
||||
jest
|
||||
.spyOn(util, 'getBackendIdsFromToken')
|
||||
.mockReturnValue(fixtures.backendIds)
|
||||
jest
|
||||
.spyOn(config, 'getResultsServiceUrl')
|
||||
.mockReturnValue('https://results.local')
|
||||
})
|
||||
|
||||
it('should return a list of artifacts', async () => {
|
||||
jest
|
||||
.spyOn(ArtifactServiceClientJSON.prototype, 'ListArtifacts')
|
||||
.mockResolvedValue({
|
||||
artifacts: fixtures.artifacts.map(artifact => ({
|
||||
...fixtures.backendIds,
|
||||
databaseId: artifact.id.toString(),
|
||||
name: artifact.name,
|
||||
size: artifact.size.toString(),
|
||||
createdAt: Timestamp.fromDate(artifact.createdAt)
|
||||
}))
|
||||
})
|
||||
const response = await listArtifactsInternal(false)
|
||||
expect(response).toEqual({
|
||||
artifacts: fixtures.artifacts
|
||||
})
|
||||
})
|
||||
|
||||
it('should return the latest artifact when latest is specified', async () => {
|
||||
jest
|
||||
.spyOn(ArtifactServiceClientJSON.prototype, 'ListArtifacts')
|
||||
.mockResolvedValue({
|
||||
artifacts: fixtures.artifacts.map(artifact => ({
|
||||
...fixtures.backendIds,
|
||||
databaseId: artifact.id.toString(),
|
||||
name: artifact.name,
|
||||
size: artifact.size.toString(),
|
||||
createdAt: Timestamp.fromDate(artifact.createdAt)
|
||||
}))
|
||||
})
|
||||
const response = await listArtifactsInternal(true)
|
||||
expect(response).toEqual({
|
||||
artifacts: [fixtures.artifacts[1]]
|
||||
})
|
||||
})
|
||||
|
||||
it('can return empty artifacts', async () => {
|
||||
jest
|
||||
.spyOn(ArtifactServiceClientJSON.prototype, 'ListArtifacts')
|
||||
.mockResolvedValue({
|
||||
artifacts: []
|
||||
})
|
||||
const response = await listArtifactsInternal(false)
|
||||
expect(response).toEqual({
|
||||
artifacts: []
|
||||
})
|
||||
})
|
||||
|
||||
it('should fail if non-200 response', async () => {
|
||||
jest
|
||||
.spyOn(ArtifactServiceClientJSON.prototype, 'ListArtifacts')
|
||||
.mockRejectedValue(new Error('boom'))
|
||||
await expect(listArtifactsInternal(false)).rejects.toThrow('boom')
|
||||
})
|
||||
})
|
||||
})
|
|
@ -0,0 +1,75 @@
|
|||
import {
|
||||
validateArtifactName,
|
||||
validateFilePath
|
||||
} from '../src/internal/upload/path-and-artifact-name-validation'
|
||||
|
||||
import {noopLogs} from './common'
|
||||
|
||||
describe('Path and artifact name validation', () => {
|
||||
beforeAll(() => {
|
||||
noopLogs()
|
||||
})
|
||||
|
||||
it('Check Artifact Name for any invalid characters', () => {
|
||||
const invalidNames = [
|
||||
'my\\artifact',
|
||||
'my/artifact',
|
||||
'my"artifact',
|
||||
'my:artifact',
|
||||
'my<artifact',
|
||||
'my>artifact',
|
||||
'my|artifact',
|
||||
'my*artifact',
|
||||
'my?artifact',
|
||||
''
|
||||
]
|
||||
for (const invalidName of invalidNames) {
|
||||
expect(() => {
|
||||
validateArtifactName(invalidName)
|
||||
}).toThrow()
|
||||
}
|
||||
|
||||
const validNames = [
|
||||
'my-normal-artifact',
|
||||
'myNormalArtifact',
|
||||
'm¥ñðrmålÄr†ï£å¢†'
|
||||
]
|
||||
for (const validName of validNames) {
|
||||
expect(() => {
|
||||
validateArtifactName(validName)
|
||||
}).not.toThrow()
|
||||
}
|
||||
})
|
||||
|
||||
it('Check Artifact File Path for any invalid characters', () => {
|
||||
const invalidNames = [
|
||||
'some/invalid"artifact/path',
|
||||
'some/invalid:artifact/path',
|
||||
'some/invalid<artifact/path',
|
||||
'some/invalid>artifact/path',
|
||||
'some/invalid|artifact/path',
|
||||
'some/invalid*artifact/path',
|
||||
'some/invalid?artifact/path',
|
||||
'some/invalid\rartifact/path',
|
||||
'some/invalid\nartifact/path',
|
||||
'some/invalid\r\nartifact/path',
|
||||
''
|
||||
]
|
||||
for (const invalidName of invalidNames) {
|
||||
expect(() => {
|
||||
validateFilePath(invalidName)
|
||||
}).toThrow()
|
||||
}
|
||||
|
||||
const validNames = [
|
||||
'my/perfectly-normal/artifact-path',
|
||||
'my/perfectly\\Normal/Artifact-path',
|
||||
'm¥/ñðrmål/Är†ï£å¢†'
|
||||
]
|
||||
for (const validName of validNames) {
|
||||
expect(() => {
|
||||
validateFilePath(validName)
|
||||
}).not.toThrow()
|
||||
}
|
||||
})
|
||||
})
|
|
@ -0,0 +1,65 @@
|
|||
import {Timestamp} from '../src/generated'
|
||||
import * as retention from '../src/internal/upload/retention'
|
||||
|
||||
describe('retention', () => {
|
||||
beforeEach(() => {
|
||||
delete process.env['GITHUB_RETENTION_DAYS']
|
||||
})
|
||||
it('should return the inputted retention days if it is less than the max retention days', () => {
|
||||
// setup
|
||||
const mockDate = new Date('2020-01-01')
|
||||
jest.useFakeTimers().setSystemTime(mockDate)
|
||||
process.env['GITHUB_RETENTION_DAYS'] = '90'
|
||||
|
||||
const exp = retention.getExpiration(30)
|
||||
|
||||
expect(exp).toBeDefined()
|
||||
if (exp) {
|
||||
const expDate = Timestamp.toDate(exp)
|
||||
const expected = new Date()
|
||||
expected.setDate(expected.getDate() + 30)
|
||||
|
||||
expect(expDate).toEqual(expected)
|
||||
}
|
||||
})
|
||||
|
||||
it('should return the max retention days if the inputted retention days is greater than the max retention days', () => {
|
||||
// setup
|
||||
const mockDate = new Date('2020-01-01')
|
||||
jest.useFakeTimers().setSystemTime(mockDate)
|
||||
process.env['GITHUB_RETENTION_DAYS'] = '90'
|
||||
|
||||
const exp = retention.getExpiration(120)
|
||||
|
||||
expect(exp).toBeDefined()
|
||||
if (exp) {
|
||||
const expDate = Timestamp.toDate(exp) // we check whether exp is defined above
|
||||
const expected = new Date()
|
||||
expected.setDate(expected.getDate() + 90)
|
||||
|
||||
expect(expDate).toEqual(expected)
|
||||
}
|
||||
})
|
||||
|
||||
it('should return undefined if the inputted retention days is undefined', () => {
|
||||
const exp = retention.getExpiration()
|
||||
expect(exp).toBeUndefined()
|
||||
})
|
||||
|
||||
it('should return the inputted retention days if there is no max retention days', () => {
|
||||
// setup
|
||||
const mockDate = new Date('2020-01-01')
|
||||
jest.useFakeTimers().setSystemTime(mockDate)
|
||||
|
||||
const exp = retention.getExpiration(30)
|
||||
|
||||
expect(exp).toBeDefined()
|
||||
if (exp) {
|
||||
const expDate = Timestamp.toDate(exp) // we check whether exp is defined above
|
||||
const expected = new Date()
|
||||
expected.setDate(expected.getDate() + 30)
|
||||
|
||||
expect(expDate).toEqual(expected)
|
||||
}
|
||||
})
|
||||
})
|
|
@ -1,114 +0,0 @@
|
|||
import * as http from 'http'
|
||||
import * as net from 'net'
|
||||
import * as core from '@actions/core'
|
||||
import * as configVariables from '../src/internal/config-variables'
|
||||
import {retry} from '../src/internal/requestUtils'
|
||||
import {IHttpClientResponse} from '@actions/http-client/interfaces'
|
||||
import {HttpClientResponse} from '@actions/http-client'
|
||||
|
||||
jest.mock('../src/internal/config-variables')
|
||||
|
||||
interface ITestResult {
|
||||
responseCode: number
|
||||
errorMessage: string | null
|
||||
}
|
||||
|
||||
async function testRetry(
|
||||
responseCodes: number[],
|
||||
expectedResult: ITestResult
|
||||
): Promise<void> {
|
||||
const reverse = responseCodes.reverse() // Reverse responses since we pop from end
|
||||
if (expectedResult.errorMessage) {
|
||||
// we expect some exception to be thrown
|
||||
expect(
|
||||
retry(
|
||||
'test',
|
||||
async () => handleResponse(reverse.pop()),
|
||||
new Map(), // extra error message for any particular http codes
|
||||
configVariables.getRetryLimit()
|
||||
)
|
||||
).rejects.toThrow(expectedResult.errorMessage)
|
||||
} else {
|
||||
// we expect a correct status code to be returned
|
||||
const actualResult = await retry(
|
||||
'test',
|
||||
async () => handleResponse(reverse.pop()),
|
||||
new Map(), // extra error message for any particular http codes
|
||||
configVariables.getRetryLimit()
|
||||
)
|
||||
expect(actualResult.message.statusCode).toEqual(expectedResult.responseCode)
|
||||
}
|
||||
}
|
||||
|
||||
async function handleResponse(
|
||||
testResponseCode: number | undefined
|
||||
): Promise<IHttpClientResponse> {
|
||||
if (!testResponseCode) {
|
||||
throw new Error(
|
||||
'Test incorrectly set up. reverse.pop() was called too many times so not enough test response codes were supplied'
|
||||
)
|
||||
}
|
||||
|
||||
return setupSingleMockResponse(testResponseCode)
|
||||
}
|
||||
|
||||
beforeAll(async () => {
|
||||
// mock all output so that there is less noise when running tests
|
||||
jest.spyOn(console, 'log').mockImplementation(() => {})
|
||||
jest.spyOn(core, 'debug').mockImplementation(() => {})
|
||||
jest.spyOn(core, 'info').mockImplementation(() => {})
|
||||
jest.spyOn(core, 'warning').mockImplementation(() => {})
|
||||
jest.spyOn(core, 'error').mockImplementation(() => {})
|
||||
})
|
||||
|
||||
/**
|
||||
* Helpers used to setup mocking for the HttpClient
|
||||
*/
|
||||
async function emptyMockReadBody(): Promise<string> {
|
||||
return new Promise(resolve => {
|
||||
resolve()
|
||||
})
|
||||
}
|
||||
|
||||
async function setupSingleMockResponse(
|
||||
statusCode: number
|
||||
): Promise<IHttpClientResponse> {
|
||||
const mockMessage = new http.IncomingMessage(new net.Socket())
|
||||
const mockReadBody = emptyMockReadBody
|
||||
mockMessage.statusCode = statusCode
|
||||
return new Promise<HttpClientResponse>(resolve => {
|
||||
resolve({
|
||||
message: mockMessage,
|
||||
readBody: mockReadBody
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
test('retry works on successful response', async () => {
|
||||
await testRetry([200], {
|
||||
responseCode: 200,
|
||||
errorMessage: null
|
||||
})
|
||||
})
|
||||
|
||||
test('retry works after retryable status code', async () => {
|
||||
await testRetry([503, 200], {
|
||||
responseCode: 200,
|
||||
errorMessage: null
|
||||
})
|
||||
})
|
||||
|
||||
test('retry fails after exhausting retries', async () => {
|
||||
// __mocks__/config-variables caps the max retry count in tests to 2
|
||||
await testRetry([503, 503, 200], {
|
||||
responseCode: 200,
|
||||
errorMessage: 'test failed: Artifact service responded with 503'
|
||||
})
|
||||
})
|
||||
|
||||
test('retry fails after non-retryable status code', async () => {
|
||||
await testRetry([400, 200], {
|
||||
responseCode: 400,
|
||||
errorMessage: 'test failed: Artifact service responded with 400'
|
||||
})
|
||||
})
|
|
@ -1,25 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
path="$1"
|
||||
expectedContent="$2"
|
||||
|
||||
if [ "$path" == "" ]; then
|
||||
echo "File path not provided"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ "$expectedContent" == "" ]; then
|
||||
echo "Expected file contents not provided"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ ! -f "$path" ]; then
|
||||
echo "Expected file $path does not exist"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
actualContent=$(cat $path)
|
||||
if [ "$actualContent" != "$expectedContent" ];then
|
||||
echo "File contents are not correct, expected $expectedContent, received $actualContent"
|
||||
exit 1
|
||||
fi
|
|
@ -0,0 +1,354 @@
|
|||
import * as uploadZipSpecification from '../src/internal/upload/upload-zip-specification'
|
||||
import * as zip from '../src/internal/upload/zip'
|
||||
import * as util from '../src/internal/shared/util'
|
||||
import * as retention from '../src/internal/upload/retention'
|
||||
import * as config from '../src/internal/shared/config'
|
||||
import {Timestamp, ArtifactServiceClientJSON} from '../src/generated'
|
||||
import * as blobUpload from '../src/internal/upload/blob-upload'
|
||||
import {uploadArtifact} from '../src/internal/upload/upload-artifact'
|
||||
import {noopLogs} from './common'
|
||||
import {FilesNotFoundError} from '../src/internal/shared/errors'
|
||||
|
||||
describe('upload-artifact', () => {
|
||||
beforeEach(() => {
|
||||
noopLogs()
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
jest.restoreAllMocks()
|
||||
})
|
||||
|
||||
it('should successfully upload an artifact', () => {
|
||||
const mockDate = new Date('2020-01-01')
|
||||
jest
|
||||
.spyOn(uploadZipSpecification, 'validateRootDirectory')
|
||||
.mockReturnValue()
|
||||
jest
|
||||
.spyOn(uploadZipSpecification, 'getUploadZipSpecification')
|
||||
.mockReturnValue([
|
||||
{
|
||||
sourcePath: '/home/user/files/plz-upload/file1.txt',
|
||||
destinationPath: 'file1.txt'
|
||||
},
|
||||
{
|
||||
sourcePath: '/home/user/files/plz-upload/file2.txt',
|
||||
destinationPath: 'file2.txt'
|
||||
},
|
||||
{
|
||||
sourcePath: '/home/user/files/plz-upload/dir/file3.txt',
|
||||
destinationPath: 'dir/file3.txt'
|
||||
}
|
||||
])
|
||||
|
||||
jest
|
||||
.spyOn(zip, 'createZipUploadStream')
|
||||
.mockReturnValue(Promise.resolve(new zip.ZipUploadStream(1)))
|
||||
jest.spyOn(util, 'getBackendIdsFromToken').mockReturnValue({
|
||||
workflowRunBackendId: '1234',
|
||||
workflowJobRunBackendId: '5678'
|
||||
})
|
||||
jest
|
||||
.spyOn(retention, 'getExpiration')
|
||||
.mockReturnValue(Timestamp.fromDate(mockDate))
|
||||
jest
|
||||
.spyOn(ArtifactServiceClientJSON.prototype, 'CreateArtifact')
|
||||
.mockReturnValue(
|
||||
Promise.resolve({
|
||||
ok: true,
|
||||
signedUploadUrl: 'https://signed-upload-url.com'
|
||||
})
|
||||
)
|
||||
jest.spyOn(blobUpload, 'uploadZipToBlobStorage').mockReturnValue(
|
||||
Promise.resolve({
|
||||
uploadSize: 1234,
|
||||
sha256Hash: 'test-sha256-hash'
|
||||
})
|
||||
)
|
||||
jest
|
||||
.spyOn(ArtifactServiceClientJSON.prototype, 'FinalizeArtifact')
|
||||
.mockReturnValue(Promise.resolve({ok: true, artifactId: '1'}))
|
||||
|
||||
// ArtifactHttpClient mocks
|
||||
jest.spyOn(config, 'getRuntimeToken').mockReturnValue('test-token')
|
||||
jest
|
||||
.spyOn(config, 'getResultsServiceUrl')
|
||||
.mockReturnValue('https://test-url.com')
|
||||
|
||||
const uploadResp = uploadArtifact(
|
||||
'test-artifact',
|
||||
[
|
||||
'/home/user/files/plz-upload/file1.txt',
|
||||
'/home/user/files/plz-upload/file2.txt',
|
||||
'/home/user/files/plz-upload/dir/file3.txt'
|
||||
],
|
||||
'/home/user/files/plz-upload'
|
||||
)
|
||||
|
||||
expect(uploadResp).resolves.toEqual({size: 1234, id: 1})
|
||||
})
|
||||
|
||||
it('should throw an error if the root directory is invalid', () => {
|
||||
jest
|
||||
.spyOn(uploadZipSpecification, 'validateRootDirectory')
|
||||
.mockImplementation(() => {
|
||||
throw new Error('Invalid root directory')
|
||||
})
|
||||
|
||||
const uploadResp = uploadArtifact(
|
||||
'test-artifact',
|
||||
[
|
||||
'/home/user/files/plz-upload/file1.txt',
|
||||
'/home/user/files/plz-upload/file2.txt',
|
||||
'/home/user/files/plz-upload/dir/file3.txt'
|
||||
],
|
||||
'/home/user/files/plz-upload'
|
||||
)
|
||||
|
||||
expect(uploadResp).rejects.toThrow('Invalid root directory')
|
||||
})
|
||||
|
||||
it('should reject if there are no files to upload', () => {
|
||||
jest
|
||||
.spyOn(uploadZipSpecification, 'validateRootDirectory')
|
||||
.mockReturnValue()
|
||||
jest
|
||||
.spyOn(uploadZipSpecification, 'getUploadZipSpecification')
|
||||
.mockReturnValue([])
|
||||
|
||||
const uploadResp = uploadArtifact(
|
||||
'test-artifact',
|
||||
[
|
||||
'/home/user/files/plz-upload/file1.txt',
|
||||
'/home/user/files/plz-upload/file2.txt',
|
||||
'/home/user/files/plz-upload/dir/file3.txt'
|
||||
],
|
||||
'/home/user/files/plz-upload'
|
||||
)
|
||||
expect(uploadResp).rejects.toThrowError(FilesNotFoundError)
|
||||
})
|
||||
|
||||
it('should reject if no backend IDs are found', () => {
|
||||
jest
|
||||
.spyOn(uploadZipSpecification, 'validateRootDirectory')
|
||||
.mockReturnValue()
|
||||
jest
|
||||
.spyOn(uploadZipSpecification, 'getUploadZipSpecification')
|
||||
.mockReturnValue([
|
||||
{
|
||||
sourcePath: '/home/user/files/plz-upload/file1.txt',
|
||||
destinationPath: 'file1.txt'
|
||||
},
|
||||
{
|
||||
sourcePath: '/home/user/files/plz-upload/file2.txt',
|
||||
destinationPath: 'file2.txt'
|
||||
},
|
||||
{
|
||||
sourcePath: '/home/user/files/plz-upload/dir/file3.txt',
|
||||
destinationPath: 'dir/file3.txt'
|
||||
}
|
||||
])
|
||||
|
||||
jest
|
||||
.spyOn(zip, 'createZipUploadStream')
|
||||
.mockReturnValue(Promise.resolve(new zip.ZipUploadStream(1)))
|
||||
|
||||
const uploadResp = uploadArtifact(
|
||||
'test-artifact',
|
||||
[
|
||||
'/home/user/files/plz-upload/file1.txt',
|
||||
'/home/user/files/plz-upload/file2.txt',
|
||||
'/home/user/files/plz-upload/dir/file3.txt'
|
||||
],
|
||||
'/home/user/files/plz-upload'
|
||||
)
|
||||
|
||||
expect(uploadResp).rejects.toThrow()
|
||||
})
|
||||
|
||||
it('should return false if the creation request fails', () => {
|
||||
const mockDate = new Date('2020-01-01')
|
||||
jest
|
||||
.spyOn(uploadZipSpecification, 'validateRootDirectory')
|
||||
.mockReturnValue()
|
||||
jest
|
||||
.spyOn(uploadZipSpecification, 'getUploadZipSpecification')
|
||||
.mockReturnValue([
|
||||
{
|
||||
sourcePath: '/home/user/files/plz-upload/file1.txt',
|
||||
destinationPath: 'file1.txt'
|
||||
},
|
||||
{
|
||||
sourcePath: '/home/user/files/plz-upload/file2.txt',
|
||||
destinationPath: 'file2.txt'
|
||||
},
|
||||
{
|
||||
sourcePath: '/home/user/files/plz-upload/dir/file3.txt',
|
||||
destinationPath: 'dir/file3.txt'
|
||||
}
|
||||
])
|
||||
|
||||
jest
|
||||
.spyOn(zip, 'createZipUploadStream')
|
||||
.mockReturnValue(Promise.resolve(new zip.ZipUploadStream(1)))
|
||||
jest.spyOn(util, 'getBackendIdsFromToken').mockReturnValue({
|
||||
workflowRunBackendId: '1234',
|
||||
workflowJobRunBackendId: '5678'
|
||||
})
|
||||
jest
|
||||
.spyOn(retention, 'getExpiration')
|
||||
.mockReturnValue(Timestamp.fromDate(mockDate))
|
||||
jest
|
||||
.spyOn(ArtifactServiceClientJSON.prototype, 'CreateArtifact')
|
||||
.mockReturnValue(Promise.resolve({ok: false, signedUploadUrl: ''}))
|
||||
|
||||
// ArtifactHttpClient mocks
|
||||
jest.spyOn(config, 'getRuntimeToken').mockReturnValue('test-token')
|
||||
jest
|
||||
.spyOn(config, 'getResultsServiceUrl')
|
||||
.mockReturnValue('https://test-url.com')
|
||||
|
||||
const uploadResp = uploadArtifact(
|
||||
'test-artifact',
|
||||
[
|
||||
'/home/user/files/plz-upload/file1.txt',
|
||||
'/home/user/files/plz-upload/file2.txt',
|
||||
'/home/user/files/plz-upload/dir/file3.txt'
|
||||
],
|
||||
'/home/user/files/plz-upload'
|
||||
)
|
||||
|
||||
expect(uploadResp).rejects.toThrow()
|
||||
})
|
||||
|
||||
it('should return false if blob storage upload is unsuccessful', () => {
|
||||
const mockDate = new Date('2020-01-01')
|
||||
jest
|
||||
.spyOn(uploadZipSpecification, 'validateRootDirectory')
|
||||
.mockReturnValue()
|
||||
jest
|
||||
.spyOn(uploadZipSpecification, 'getUploadZipSpecification')
|
||||
.mockReturnValue([
|
||||
{
|
||||
sourcePath: '/home/user/files/plz-upload/file1.txt',
|
||||
destinationPath: 'file1.txt'
|
||||
},
|
||||
{
|
||||
sourcePath: '/home/user/files/plz-upload/file2.txt',
|
||||
destinationPath: 'file2.txt'
|
||||
},
|
||||
{
|
||||
sourcePath: '/home/user/files/plz-upload/dir/file3.txt',
|
||||
destinationPath: 'dir/file3.txt'
|
||||
}
|
||||
])
|
||||
|
||||
jest
|
||||
.spyOn(zip, 'createZipUploadStream')
|
||||
.mockReturnValue(Promise.resolve(new zip.ZipUploadStream(1)))
|
||||
jest.spyOn(util, 'getBackendIdsFromToken').mockReturnValue({
|
||||
workflowRunBackendId: '1234',
|
||||
workflowJobRunBackendId: '5678'
|
||||
})
|
||||
jest
|
||||
.spyOn(retention, 'getExpiration')
|
||||
.mockReturnValue(Timestamp.fromDate(mockDate))
|
||||
jest
|
||||
.spyOn(ArtifactServiceClientJSON.prototype, 'CreateArtifact')
|
||||
.mockReturnValue(
|
||||
Promise.resolve({
|
||||
ok: true,
|
||||
signedUploadUrl: 'https://signed-upload-url.com'
|
||||
})
|
||||
)
|
||||
jest
|
||||
.spyOn(blobUpload, 'uploadZipToBlobStorage')
|
||||
.mockReturnValue(Promise.reject(new Error('boom')))
|
||||
|
||||
// ArtifactHttpClient mocks
|
||||
jest.spyOn(config, 'getRuntimeToken').mockReturnValue('test-token')
|
||||
jest
|
||||
.spyOn(config, 'getResultsServiceUrl')
|
||||
.mockReturnValue('https://test-url.com')
|
||||
|
||||
const uploadResp = uploadArtifact(
|
||||
'test-artifact',
|
||||
[
|
||||
'/home/user/files/plz-upload/file1.txt',
|
||||
'/home/user/files/plz-upload/file2.txt',
|
||||
'/home/user/files/plz-upload/dir/file3.txt'
|
||||
],
|
||||
'/home/user/files/plz-upload'
|
||||
)
|
||||
|
||||
expect(uploadResp).rejects.toThrow()
|
||||
})
|
||||
|
||||
it('should reject if finalize artifact fails', () => {
|
||||
const mockDate = new Date('2020-01-01')
|
||||
jest
|
||||
.spyOn(uploadZipSpecification, 'validateRootDirectory')
|
||||
.mockReturnValue()
|
||||
jest
|
||||
.spyOn(uploadZipSpecification, 'getUploadZipSpecification')
|
||||
.mockReturnValue([
|
||||
{
|
||||
sourcePath: '/home/user/files/plz-upload/file1.txt',
|
||||
destinationPath: 'file1.txt'
|
||||
},
|
||||
{
|
||||
sourcePath: '/home/user/files/plz-upload/file2.txt',
|
||||
destinationPath: 'file2.txt'
|
||||
},
|
||||
{
|
||||
sourcePath: '/home/user/files/plz-upload/dir/file3.txt',
|
||||
destinationPath: 'dir/file3.txt'
|
||||
}
|
||||
])
|
||||
|
||||
jest
|
||||
.spyOn(zip, 'createZipUploadStream')
|
||||
.mockReturnValue(Promise.resolve(new zip.ZipUploadStream(1)))
|
||||
jest.spyOn(util, 'getBackendIdsFromToken').mockReturnValue({
|
||||
workflowRunBackendId: '1234',
|
||||
workflowJobRunBackendId: '5678'
|
||||
})
|
||||
jest
|
||||
.spyOn(retention, 'getExpiration')
|
||||
.mockReturnValue(Timestamp.fromDate(mockDate))
|
||||
jest
|
||||
.spyOn(ArtifactServiceClientJSON.prototype, 'CreateArtifact')
|
||||
.mockReturnValue(
|
||||
Promise.resolve({
|
||||
ok: true,
|
||||
signedUploadUrl: 'https://signed-upload-url.com'
|
||||
})
|
||||
)
|
||||
jest.spyOn(blobUpload, 'uploadZipToBlobStorage').mockReturnValue(
|
||||
Promise.resolve({
|
||||
uploadSize: 1234,
|
||||
sha256Hash: 'test-sha256-hash'
|
||||
})
|
||||
)
|
||||
jest
|
||||
.spyOn(ArtifactServiceClientJSON.prototype, 'FinalizeArtifact')
|
||||
.mockReturnValue(Promise.resolve({ok: false, artifactId: ''}))
|
||||
|
||||
// ArtifactHttpClient mocks
|
||||
jest.spyOn(config, 'getRuntimeToken').mockReturnValue('test-token')
|
||||
jest
|
||||
.spyOn(config, 'getResultsServiceUrl')
|
||||
.mockReturnValue('https://test-url.com')
|
||||
|
||||
const uploadResp = uploadArtifact(
|
||||
'test-artifact',
|
||||
[
|
||||
'/home/user/files/plz-upload/file1.txt',
|
||||
'/home/user/files/plz-upload/file2.txt',
|
||||
'/home/user/files/plz-upload/dir/file3.txt'
|
||||
],
|
||||
'/home/user/files/plz-upload'
|
||||
)
|
||||
|
||||
expect(uploadResp).rejects.toThrow()
|
||||
})
|
||||
})
|
|
@ -1,10 +1,12 @@
|
|||
import * as io from '../../io/src/io'
|
||||
import * as path from 'path'
|
||||
import {promises as fs} from 'fs'
|
||||
import * as core from '@actions/core'
|
||||
import {getUploadSpecification} from '../src/internal/upload-specification'
|
||||
import {
|
||||
getUploadZipSpecification,
|
||||
validateRootDirectory
|
||||
} from '../src/internal/upload/upload-zip-specification'
|
||||
import {noopLogs} from './common'
|
||||
|
||||
const artifactName = 'my-artifact'
|
||||
const root = path.join(__dirname, '_temp', 'upload-specification')
|
||||
const goodItem1Path = path.join(
|
||||
root,
|
||||
|
@ -49,11 +51,7 @@ const artifactFilesToUpload = [
|
|||
|
||||
describe('Search', () => {
|
||||
beforeAll(async () => {
|
||||
// mock all output so that there is less noise when running tests
|
||||
jest.spyOn(console, 'log').mockImplementation(() => {})
|
||||
jest.spyOn(core, 'debug').mockImplementation(() => {})
|
||||
jest.spyOn(core, 'info').mockImplementation(() => {})
|
||||
jest.spyOn(core, 'warning').mockImplementation(() => {})
|
||||
noopLogs()
|
||||
|
||||
// clear temp directory
|
||||
await io.rmRF(root)
|
||||
|
@ -125,31 +123,28 @@ describe('Search', () => {
|
|||
'upload-specification-invalid'
|
||||
)
|
||||
expect(() => {
|
||||
getUploadSpecification(
|
||||
artifactName,
|
||||
invalidRootDirectory,
|
||||
artifactFilesToUpload
|
||||
)
|
||||
}).toThrow(`Provided rootDirectory ${invalidRootDirectory} does not exist`)
|
||||
validateRootDirectory(invalidRootDirectory)
|
||||
}).toThrow(
|
||||
`The provided rootDirectory ${invalidRootDirectory} does not exist`
|
||||
)
|
||||
})
|
||||
|
||||
it('Upload Specification - Fail invalid rootDirectory', async () => {
|
||||
expect(() => {
|
||||
getUploadSpecification(artifactName, goodItem1Path, artifactFilesToUpload)
|
||||
validateRootDirectory(goodItem1Path)
|
||||
}).toThrow(
|
||||
`Provided rootDirectory ${goodItem1Path} is not a valid directory`
|
||||
`The provided rootDirectory ${goodItem1Path} is not a valid directory`
|
||||
)
|
||||
})
|
||||
|
||||
it('Upload Specification - File does not exist', async () => {
|
||||
const fakeFilePath = path.join(
|
||||
artifactName,
|
||||
'folder-a',
|
||||
'folder-b',
|
||||
'non-existent-file.txt'
|
||||
)
|
||||
expect(() => {
|
||||
getUploadSpecification(artifactName, root, [fakeFilePath])
|
||||
getUploadZipSpecification([fakeFilePath], root)
|
||||
}).toThrow(`File ${fakeFilePath} does not exist`)
|
||||
})
|
||||
|
||||
|
@ -162,21 +157,20 @@ describe('Search', () => {
|
|||
goodItem5Path
|
||||
]
|
||||
expect(() => {
|
||||
getUploadSpecification(artifactName, folderADirectory, artifactFiles)
|
||||
getUploadZipSpecification(artifactFiles, folderADirectory)
|
||||
}).toThrow(
|
||||
`The rootDirectory: ${folderADirectory} is not a parent directory of the file: ${goodItem5Path}`
|
||||
)
|
||||
})
|
||||
|
||||
it('Upload Specification - Success', async () => {
|
||||
const specifications = getUploadSpecification(
|
||||
artifactName,
|
||||
root,
|
||||
artifactFilesToUpload
|
||||
const specifications = getUploadZipSpecification(
|
||||
artifactFilesToUpload,
|
||||
root
|
||||
)
|
||||
expect(specifications.length).toEqual(7)
|
||||
|
||||
const absolutePaths = specifications.map(item => item.absoluteFilePath)
|
||||
const absolutePaths = specifications.map(item => item.sourcePath)
|
||||
expect(absolutePaths).toContain(goodItem1Path)
|
||||
expect(absolutePaths).toContain(goodItem2Path)
|
||||
expect(absolutePaths).toContain(goodItem3Path)
|
||||
|
@ -186,45 +180,38 @@ describe('Search', () => {
|
|||
expect(absolutePaths).toContain(amazingFileInFolderHPath)
|
||||
|
||||
for (const specification of specifications) {
|
||||
if (specification.absoluteFilePath === goodItem1Path) {
|
||||
expect(specification.uploadFilePath).toEqual(
|
||||
if (specification.sourcePath === goodItem1Path) {
|
||||
expect(specification.destinationPath).toEqual(
|
||||
path.join('/folder-a', 'folder-b', 'folder-c', 'good-item1.txt')
|
||||
)
|
||||
} else if (specification.sourcePath === goodItem2Path) {
|
||||
expect(specification.destinationPath).toEqual(
|
||||
path.join('/folder-d', 'good-item2.txt')
|
||||
)
|
||||
} else if (specification.sourcePath === goodItem3Path) {
|
||||
expect(specification.destinationPath).toEqual(
|
||||
path.join('/folder-d', 'good-item3.txt')
|
||||
)
|
||||
} else if (specification.sourcePath === goodItem4Path) {
|
||||
expect(specification.destinationPath).toEqual(
|
||||
path.join('/folder-d', 'good-item4.txt')
|
||||
)
|
||||
} else if (specification.sourcePath === goodItem5Path) {
|
||||
expect(specification.destinationPath).toEqual(
|
||||
path.join('/good-item5.txt')
|
||||
)
|
||||
} else if (specification.sourcePath === extraFileInFolderCPath) {
|
||||
expect(specification.destinationPath).toEqual(
|
||||
path.join(
|
||||
artifactName,
|
||||
'folder-a',
|
||||
'folder-b',
|
||||
'folder-c',
|
||||
'good-item1.txt'
|
||||
)
|
||||
)
|
||||
} else if (specification.absoluteFilePath === goodItem2Path) {
|
||||
expect(specification.uploadFilePath).toEqual(
|
||||
path.join(artifactName, 'folder-d', 'good-item2.txt')
|
||||
)
|
||||
} else if (specification.absoluteFilePath === goodItem3Path) {
|
||||
expect(specification.uploadFilePath).toEqual(
|
||||
path.join(artifactName, 'folder-d', 'good-item3.txt')
|
||||
)
|
||||
} else if (specification.absoluteFilePath === goodItem4Path) {
|
||||
expect(specification.uploadFilePath).toEqual(
|
||||
path.join(artifactName, 'folder-d', 'good-item4.txt')
|
||||
)
|
||||
} else if (specification.absoluteFilePath === goodItem5Path) {
|
||||
expect(specification.uploadFilePath).toEqual(
|
||||
path.join(artifactName, 'good-item5.txt')
|
||||
)
|
||||
} else if (specification.absoluteFilePath === extraFileInFolderCPath) {
|
||||
expect(specification.uploadFilePath).toEqual(
|
||||
path.join(
|
||||
artifactName,
|
||||
'folder-a',
|
||||
'/folder-a',
|
||||
'folder-b',
|
||||
'folder-c',
|
||||
'extra-file-in-folder-c.txt'
|
||||
)
|
||||
)
|
||||
} else if (specification.absoluteFilePath === amazingFileInFolderHPath) {
|
||||
expect(specification.uploadFilePath).toEqual(
|
||||
path.join(artifactName, 'folder-h', 'amazing-item.txt')
|
||||
} else if (specification.sourcePath === amazingFileInFolderHPath) {
|
||||
expect(specification.destinationPath).toEqual(
|
||||
path.join('/folder-h', 'amazing-item.txt')
|
||||
)
|
||||
} else {
|
||||
throw new Error(
|
||||
|
@ -236,14 +223,13 @@ describe('Search', () => {
|
|||
|
||||
it('Upload Specification - Success with extra slash', async () => {
|
||||
const rootWithSlash = `${root}/`
|
||||
const specifications = getUploadSpecification(
|
||||
artifactName,
|
||||
rootWithSlash,
|
||||
artifactFilesToUpload
|
||||
const specifications = getUploadZipSpecification(
|
||||
artifactFilesToUpload,
|
||||
rootWithSlash
|
||||
)
|
||||
expect(specifications.length).toEqual(7)
|
||||
|
||||
const absolutePaths = specifications.map(item => item.absoluteFilePath)
|
||||
const absolutePaths = specifications.map(item => item.sourcePath)
|
||||
expect(absolutePaths).toContain(goodItem1Path)
|
||||
expect(absolutePaths).toContain(goodItem2Path)
|
||||
expect(absolutePaths).toContain(goodItem3Path)
|
||||
|
@ -253,45 +239,38 @@ describe('Search', () => {
|
|||
expect(absolutePaths).toContain(amazingFileInFolderHPath)
|
||||
|
||||
for (const specification of specifications) {
|
||||
if (specification.absoluteFilePath === goodItem1Path) {
|
||||
expect(specification.uploadFilePath).toEqual(
|
||||
if (specification.sourcePath === goodItem1Path) {
|
||||
expect(specification.destinationPath).toEqual(
|
||||
path.join('/folder-a', 'folder-b', 'folder-c', 'good-item1.txt')
|
||||
)
|
||||
} else if (specification.sourcePath === goodItem2Path) {
|
||||
expect(specification.destinationPath).toEqual(
|
||||
path.join('/folder-d', 'good-item2.txt')
|
||||
)
|
||||
} else if (specification.sourcePath === goodItem3Path) {
|
||||
expect(specification.destinationPath).toEqual(
|
||||
path.join('/folder-d', 'good-item3.txt')
|
||||
)
|
||||
} else if (specification.sourcePath === goodItem4Path) {
|
||||
expect(specification.destinationPath).toEqual(
|
||||
path.join('/folder-d', 'good-item4.txt')
|
||||
)
|
||||
} else if (specification.sourcePath === goodItem5Path) {
|
||||
expect(specification.destinationPath).toEqual(
|
||||
path.join('/good-item5.txt')
|
||||
)
|
||||
} else if (specification.sourcePath === extraFileInFolderCPath) {
|
||||
expect(specification.destinationPath).toEqual(
|
||||
path.join(
|
||||
artifactName,
|
||||
'folder-a',
|
||||
'folder-b',
|
||||
'folder-c',
|
||||
'good-item1.txt'
|
||||
)
|
||||
)
|
||||
} else if (specification.absoluteFilePath === goodItem2Path) {
|
||||
expect(specification.uploadFilePath).toEqual(
|
||||
path.join(artifactName, 'folder-d', 'good-item2.txt')
|
||||
)
|
||||
} else if (specification.absoluteFilePath === goodItem3Path) {
|
||||
expect(specification.uploadFilePath).toEqual(
|
||||
path.join(artifactName, 'folder-d', 'good-item3.txt')
|
||||
)
|
||||
} else if (specification.absoluteFilePath === goodItem4Path) {
|
||||
expect(specification.uploadFilePath).toEqual(
|
||||
path.join(artifactName, 'folder-d', 'good-item4.txt')
|
||||
)
|
||||
} else if (specification.absoluteFilePath === goodItem5Path) {
|
||||
expect(specification.uploadFilePath).toEqual(
|
||||
path.join(artifactName, 'good-item5.txt')
|
||||
)
|
||||
} else if (specification.absoluteFilePath === extraFileInFolderCPath) {
|
||||
expect(specification.uploadFilePath).toEqual(
|
||||
path.join(
|
||||
artifactName,
|
||||
'folder-a',
|
||||
'/folder-a',
|
||||
'folder-b',
|
||||
'folder-c',
|
||||
'extra-file-in-folder-c.txt'
|
||||
)
|
||||
)
|
||||
} else if (specification.absoluteFilePath === amazingFileInFolderHPath) {
|
||||
expect(specification.uploadFilePath).toEqual(
|
||||
path.join(artifactName, 'folder-h', 'amazing-item.txt')
|
||||
} else if (specification.sourcePath === amazingFileInFolderHPath) {
|
||||
expect(specification.destinationPath).toEqual(
|
||||
path.join('/folder-h', 'amazing-item.txt')
|
||||
)
|
||||
} else {
|
||||
throw new Error(
|
||||
|
@ -301,47 +280,23 @@ describe('Search', () => {
|
|||
}
|
||||
})
|
||||
|
||||
it('Upload Specification - Directories should not be included', async () => {
|
||||
it('Upload Specification - Empty Directories are included', async () => {
|
||||
const folderEPath = path.join(root, 'folder-a', 'folder-b', 'folder-e')
|
||||
const filesWithDirectory = [
|
||||
goodItem1Path,
|
||||
goodItem4Path,
|
||||
folderEPath,
|
||||
badItem3Path
|
||||
]
|
||||
const specifications = getUploadSpecification(
|
||||
artifactName,
|
||||
root,
|
||||
filesWithDirectory
|
||||
)
|
||||
expect(specifications.length).toEqual(3)
|
||||
const absolutePaths = specifications.map(item => item.absoluteFilePath)
|
||||
const filesWithDirectory = [goodItem1Path, folderEPath]
|
||||
const specifications = getUploadZipSpecification(filesWithDirectory, root)
|
||||
expect(specifications.length).toEqual(2)
|
||||
const absolutePaths = specifications.map(item => item.sourcePath)
|
||||
expect(absolutePaths).toContain(goodItem1Path)
|
||||
expect(absolutePaths).toContain(goodItem4Path)
|
||||
expect(absolutePaths).toContain(badItem3Path)
|
||||
expect(absolutePaths).toContain(null)
|
||||
|
||||
for (const specification of specifications) {
|
||||
if (specification.absoluteFilePath === goodItem1Path) {
|
||||
expect(specification.uploadFilePath).toEqual(
|
||||
path.join(
|
||||
artifactName,
|
||||
'folder-a',
|
||||
'folder-b',
|
||||
'folder-c',
|
||||
'good-item1.txt'
|
||||
)
|
||||
if (specification.sourcePath === goodItem1Path) {
|
||||
expect(specification.destinationPath).toEqual(
|
||||
path.join('/folder-a', 'folder-b', 'folder-c', 'good-item1.txt')
|
||||
)
|
||||
} else if (specification.absoluteFilePath === goodItem2Path) {
|
||||
expect(specification.uploadFilePath).toEqual(
|
||||
path.join(artifactName, 'folder-d', 'good-item2.txt')
|
||||
)
|
||||
} else if (specification.absoluteFilePath === goodItem4Path) {
|
||||
expect(specification.uploadFilePath).toEqual(
|
||||
path.join(artifactName, 'folder-d', 'good-item4.txt')
|
||||
)
|
||||
} else if (specification.absoluteFilePath === badItem3Path) {
|
||||
expect(specification.uploadFilePath).toEqual(
|
||||
path.join(artifactName, 'folder-f', 'bad-item3.txt')
|
||||
} else if (specification.sourcePath === null) {
|
||||
expect(specification.destinationPath).toEqual(
|
||||
path.join('/folder-a', 'folder-b', 'folder-e')
|
||||
)
|
||||
} else {
|
||||
throw new Error(
|
|
@ -1,494 +0,0 @@
|
|||
import * as http from 'http'
|
||||
import * as io from '../../io/src/io'
|
||||
import * as net from 'net'
|
||||
import * as path from 'path'
|
||||
import {UploadHttpClient} from '../src/internal/upload-http-client'
|
||||
import * as core from '@actions/core'
|
||||
import {promises as fs} from 'fs'
|
||||
import {getRuntimeUrl} from '../src/internal/config-variables'
|
||||
import {HttpClient, HttpClientResponse} from '@actions/http-client'
|
||||
import {
|
||||
ArtifactResponse,
|
||||
PatchArtifactSizeSuccessResponse
|
||||
} from '../src/internal/contracts'
|
||||
import {UploadSpecification} from '../src/internal/upload-specification'
|
||||
import {getArtifactUrl} from '../src/internal/utils'
|
||||
import {UploadOptions} from '../src/internal/upload-options'
|
||||
|
||||
const root = path.join(__dirname, '_temp', 'artifact-upload')
|
||||
const file1Path = path.join(root, 'file1.txt')
|
||||
const file2Path = path.join(root, 'file2.txt')
|
||||
const file3Path = path.join(root, 'folder1', 'file3.txt')
|
||||
const file4Path = path.join(root, 'folder1', 'file4.txt')
|
||||
const file5Path = path.join(root, 'folder1', 'folder2', 'folder3', 'file5.txt')
|
||||
|
||||
let file1Size = 0
|
||||
let file2Size = 0
|
||||
let file3Size = 0
|
||||
let file4Size = 0
|
||||
let file5Size = 0
|
||||
|
||||
jest.mock('../src/internal/config-variables')
|
||||
jest.mock('@actions/http-client')
|
||||
|
||||
describe('Upload Tests', () => {
|
||||
beforeAll(async () => {
|
||||
// mock all output so that there is less noise when running tests
|
||||
jest.spyOn(console, 'log').mockImplementation(() => {})
|
||||
jest.spyOn(core, 'debug').mockImplementation(() => {})
|
||||
jest.spyOn(core, 'info').mockImplementation(() => {})
|
||||
jest.spyOn(core, 'warning').mockImplementation(() => {})
|
||||
jest.spyOn(core, 'error').mockImplementation(() => {})
|
||||
|
||||
// setup mocking for calls that got through the HttpClient
|
||||
setupHttpClientMock()
|
||||
|
||||
// clear temp directory and create files that will be "uploaded"
|
||||
await io.rmRF(root)
|
||||
await fs.mkdir(path.join(root, 'folder1', 'folder2', 'folder3'), {
|
||||
recursive: true
|
||||
})
|
||||
await fs.writeFile(file1Path, 'this is file 1')
|
||||
await fs.writeFile(file2Path, 'this is file 2')
|
||||
await fs.writeFile(file3Path, 'this is file 3')
|
||||
await fs.writeFile(file4Path, 'this is file 4')
|
||||
await fs.writeFile(file5Path, 'this is file 5')
|
||||
/*
|
||||
Directory structure for files that get created:
|
||||
root/
|
||||
file1.txt
|
||||
file2.txt
|
||||
folder1/
|
||||
file3.txt
|
||||
file4.txt
|
||||
folder2/
|
||||
folder3/
|
||||
file5.txt
|
||||
*/
|
||||
|
||||
file1Size = (await fs.stat(file1Path)).size
|
||||
file2Size = (await fs.stat(file2Path)).size
|
||||
file3Size = (await fs.stat(file3Path)).size
|
||||
file4Size = (await fs.stat(file4Path)).size
|
||||
file5Size = (await fs.stat(file5Path)).size
|
||||
})
|
||||
|
||||
/**
|
||||
* Artifact Creation Tests
|
||||
*/
|
||||
it('Create Artifact - Success', async () => {
|
||||
const artifactName = 'valid-artifact-name'
|
||||
const uploadHttpClient = new UploadHttpClient()
|
||||
const response = await uploadHttpClient.createArtifactInFileContainer(
|
||||
artifactName
|
||||
)
|
||||
expect(response.containerId).toEqual('13')
|
||||
expect(response.size).toEqual(-1)
|
||||
expect(response.signedContent).toEqual('false')
|
||||
expect(response.fileContainerResourceUrl).toEqual(
|
||||
`${getRuntimeUrl()}_apis/resources/Containers/13`
|
||||
)
|
||||
expect(response.type).toEqual('actions_storage')
|
||||
expect(response.name).toEqual(artifactName)
|
||||
expect(response.url).toEqual(
|
||||
`${getRuntimeUrl()}_apis/pipelines/1/runs/1/artifacts?artifactName=${artifactName}`
|
||||
)
|
||||
})
|
||||
|
||||
it('Create Artifact - Failure', async () => {
|
||||
const artifactName = 'invalid-artifact-name'
|
||||
const uploadHttpClient = new UploadHttpClient()
|
||||
expect(
|
||||
uploadHttpClient.createArtifactInFileContainer(artifactName)
|
||||
).rejects.toEqual(
|
||||
new Error(
|
||||
`Create Artifact Container failed: The artifact name invalid-artifact-name is not valid. Request URL ${getArtifactUrl()}`
|
||||
)
|
||||
)
|
||||
})
|
||||
|
||||
it('Create Artifact - Retention Less Than Min Value Error', async () => {
|
||||
const artifactName = 'valid-artifact-name'
|
||||
const options: UploadOptions = {
|
||||
retentionDays: -1
|
||||
}
|
||||
const uploadHttpClient = new UploadHttpClient()
|
||||
expect(
|
||||
uploadHttpClient.createArtifactInFileContainer(artifactName, options)
|
||||
).rejects.toEqual(new Error('Invalid retention, minimum value is 1.'))
|
||||
})
|
||||
|
||||
it('Create Artifact - Storage Quota Error', async () => {
|
||||
const artifactName = 'storage-quota-hit'
|
||||
const uploadHttpClient = new UploadHttpClient()
|
||||
expect(
|
||||
uploadHttpClient.createArtifactInFileContainer(artifactName)
|
||||
).rejects.toEqual(
|
||||
new Error(
|
||||
'Create Artifact Container failed: Artifact storage quota has been hit. Unable to upload any new artifacts'
|
||||
)
|
||||
)
|
||||
})
|
||||
|
||||
/**
|
||||
* Artifact Upload Tests
|
||||
*/
|
||||
it('Upload Artifact - Success', async () => {
|
||||
/**
|
||||
* Normally search.findFilesToUpload() would be used for providing information about what to upload. These tests however
|
||||
* focuses solely on the upload APIs so searchResult[] will be hard-coded
|
||||
*/
|
||||
const artifactName = 'successful-artifact'
|
||||
const uploadSpecification: UploadSpecification[] = [
|
||||
{
|
||||
absoluteFilePath: file1Path,
|
||||
uploadFilePath: `${artifactName}/file1.txt`
|
||||
},
|
||||
{
|
||||
absoluteFilePath: file2Path,
|
||||
uploadFilePath: `${artifactName}/file2.txt`
|
||||
},
|
||||
{
|
||||
absoluteFilePath: file3Path,
|
||||
uploadFilePath: `${artifactName}/folder1/file3.txt`
|
||||
},
|
||||
{
|
||||
absoluteFilePath: file4Path,
|
||||
uploadFilePath: `${artifactName}/folder1/file4.txt`
|
||||
},
|
||||
{
|
||||
absoluteFilePath: file5Path,
|
||||
uploadFilePath: `${artifactName}/folder1/folder2/folder3/file5.txt`
|
||||
}
|
||||
]
|
||||
|
||||
const expectedTotalSize =
|
||||
file1Size + file2Size + file3Size + file4Size + file5Size
|
||||
const uploadUrl = `${getRuntimeUrl()}_apis/resources/Containers/13`
|
||||
const uploadHttpClient = new UploadHttpClient()
|
||||
const uploadResult = await uploadHttpClient.uploadArtifactToFileContainer(
|
||||
uploadUrl,
|
||||
uploadSpecification
|
||||
)
|
||||
expect(uploadResult.failedItems.length).toEqual(0)
|
||||
expect(uploadResult.uploadSize).toEqual(expectedTotalSize)
|
||||
})
|
||||
|
||||
it('Upload Artifact - Failed Single File Upload', async () => {
|
||||
const uploadSpecification: UploadSpecification[] = [
|
||||
{
|
||||
absoluteFilePath: file1Path,
|
||||
uploadFilePath: `this-file-upload-will-fail`
|
||||
}
|
||||
]
|
||||
|
||||
const uploadUrl = `${getRuntimeUrl()}_apis/resources/Containers/13`
|
||||
const uploadHttpClient = new UploadHttpClient()
|
||||
const uploadResult = await uploadHttpClient.uploadArtifactToFileContainer(
|
||||
uploadUrl,
|
||||
uploadSpecification
|
||||
)
|
||||
expect(uploadResult.failedItems.length).toEqual(1)
|
||||
expect(uploadResult.uploadSize).toEqual(0)
|
||||
})
|
||||
|
||||
it('Upload Artifact - Partial Upload Continue On Error', async () => {
|
||||
const artifactName = 'partial-artifact'
|
||||
const uploadSpecification: UploadSpecification[] = [
|
||||
{
|
||||
absoluteFilePath: file1Path,
|
||||
uploadFilePath: `${artifactName}/file1.txt`
|
||||
},
|
||||
{
|
||||
absoluteFilePath: file2Path,
|
||||
uploadFilePath: `${artifactName}/file2.txt`
|
||||
},
|
||||
{
|
||||
absoluteFilePath: file3Path,
|
||||
uploadFilePath: `${artifactName}/folder1/file3.txt`
|
||||
},
|
||||
{
|
||||
absoluteFilePath: file4Path,
|
||||
uploadFilePath: `this-file-upload-will-fail`
|
||||
},
|
||||
{
|
||||
absoluteFilePath: file5Path,
|
||||
uploadFilePath: `${artifactName}/folder1/folder2/folder3/file5.txt`
|
||||
}
|
||||
]
|
||||
|
||||
const expectedPartialSize = file1Size + file2Size + file4Size + file5Size
|
||||
const uploadUrl = `${getRuntimeUrl()}_apis/resources/Containers/13`
|
||||
const uploadHttpClient = new UploadHttpClient()
|
||||
const uploadResult = await uploadHttpClient.uploadArtifactToFileContainer(
|
||||
uploadUrl,
|
||||
uploadSpecification,
|
||||
{continueOnError: true}
|
||||
)
|
||||
expect(uploadResult.failedItems.length).toEqual(1)
|
||||
expect(uploadResult.uploadSize).toEqual(expectedPartialSize)
|
||||
})
|
||||
|
||||
it('Upload Artifact - Partial Upload Fail Fast', async () => {
|
||||
const artifactName = 'partial-artifact'
|
||||
const uploadSpecification: UploadSpecification[] = [
|
||||
{
|
||||
absoluteFilePath: file1Path,
|
||||
uploadFilePath: `${artifactName}/file1.txt`
|
||||
},
|
||||
{
|
||||
absoluteFilePath: file2Path,
|
||||
uploadFilePath: `${artifactName}/file2.txt`
|
||||
},
|
||||
{
|
||||
absoluteFilePath: file3Path,
|
||||
uploadFilePath: `${artifactName}/folder1/file3.txt`
|
||||
},
|
||||
{
|
||||
absoluteFilePath: file4Path,
|
||||
uploadFilePath: `this-file-upload-will-fail`
|
||||
},
|
||||
{
|
||||
absoluteFilePath: file5Path,
|
||||
uploadFilePath: `${artifactName}/folder1/folder2/folder3/file5.txt`
|
||||
}
|
||||
]
|
||||
|
||||
const expectedPartialSize = file1Size + file2Size + file3Size
|
||||
const uploadUrl = `${getRuntimeUrl()}_apis/resources/Containers/13`
|
||||
const uploadHttpClient = new UploadHttpClient()
|
||||
const uploadResult = await uploadHttpClient.uploadArtifactToFileContainer(
|
||||
uploadUrl,
|
||||
uploadSpecification,
|
||||
{continueOnError: false}
|
||||
)
|
||||
expect(uploadResult.failedItems.length).toEqual(2)
|
||||
expect(uploadResult.uploadSize).toEqual(expectedPartialSize)
|
||||
})
|
||||
|
||||
it('Upload Artifact - Failed upload with no options', async () => {
|
||||
const artifactName = 'partial-artifact'
|
||||
const uploadSpecification: UploadSpecification[] = [
|
||||
{
|
||||
absoluteFilePath: file1Path,
|
||||
uploadFilePath: `${artifactName}/file1.txt`
|
||||
},
|
||||
{
|
||||
absoluteFilePath: file2Path,
|
||||
uploadFilePath: `${artifactName}/file2.txt`
|
||||
},
|
||||
{
|
||||
absoluteFilePath: file3Path,
|
||||
uploadFilePath: `${artifactName}/folder1/file3.txt`
|
||||
},
|
||||
{
|
||||
absoluteFilePath: file4Path,
|
||||
uploadFilePath: `this-file-upload-will-fail`
|
||||
},
|
||||
{
|
||||
absoluteFilePath: file5Path,
|
||||
uploadFilePath: `${artifactName}/folder1/folder2/folder3/file5.txt`
|
||||
}
|
||||
]
|
||||
|
||||
const expectedPartialSize = file1Size + file2Size + file3Size + file5Size
|
||||
const uploadUrl = `${getRuntimeUrl()}_apis/resources/Containers/13`
|
||||
const uploadHttpClient = new UploadHttpClient()
|
||||
const uploadResult = await uploadHttpClient.uploadArtifactToFileContainer(
|
||||
uploadUrl,
|
||||
uploadSpecification
|
||||
)
|
||||
expect(uploadResult.failedItems.length).toEqual(1)
|
||||
expect(uploadResult.uploadSize).toEqual(expectedPartialSize)
|
||||
})
|
||||
|
||||
it('Upload Artifact - Failed upload with empty options', async () => {
|
||||
const artifactName = 'partial-artifact'
|
||||
const uploadSpecification: UploadSpecification[] = [
|
||||
{
|
||||
absoluteFilePath: file1Path,
|
||||
uploadFilePath: `${artifactName}/file1.txt`
|
||||
},
|
||||
{
|
||||
absoluteFilePath: file2Path,
|
||||
uploadFilePath: `${artifactName}/file2.txt`
|
||||
},
|
||||
{
|
||||
absoluteFilePath: file3Path,
|
||||
uploadFilePath: `${artifactName}/folder1/file3.txt`
|
||||
},
|
||||
{
|
||||
absoluteFilePath: file4Path,
|
||||
uploadFilePath: `this-file-upload-will-fail`
|
||||
},
|
||||
{
|
||||
absoluteFilePath: file5Path,
|
||||
uploadFilePath: `${artifactName}/folder1/folder2/folder3/file5.txt`
|
||||
}
|
||||
]
|
||||
|
||||
const expectedPartialSize = file1Size + file2Size + file3Size + file5Size
|
||||
const uploadUrl = `${getRuntimeUrl()}_apis/resources/Containers/13`
|
||||
const uploadHttpClient = new UploadHttpClient()
|
||||
const uploadResult = await uploadHttpClient.uploadArtifactToFileContainer(
|
||||
uploadUrl,
|
||||
uploadSpecification,
|
||||
{}
|
||||
)
|
||||
expect(uploadResult.failedItems.length).toEqual(1)
|
||||
expect(uploadResult.uploadSize).toEqual(expectedPartialSize)
|
||||
})
|
||||
|
||||
/**
|
||||
* Artifact Association Tests
|
||||
*/
|
||||
it('Associate Artifact - Success', async () => {
|
||||
const uploadHttpClient = new UploadHttpClient()
|
||||
expect(async () => {
|
||||
uploadHttpClient.patchArtifactSize(130, 'my-artifact')
|
||||
}).not.toThrow()
|
||||
})
|
||||
|
||||
it('Associate Artifact - Not Found', async () => {
|
||||
const uploadHttpClient = new UploadHttpClient()
|
||||
expect(
|
||||
uploadHttpClient.patchArtifactSize(100, 'non-existent-artifact')
|
||||
).rejects.toThrow(
|
||||
'An Artifact with the name non-existent-artifact was not found'
|
||||
)
|
||||
})
|
||||
|
||||
it('Associate Artifact - Error', async () => {
|
||||
const uploadHttpClient = new UploadHttpClient()
|
||||
expect(
|
||||
uploadHttpClient.patchArtifactSize(-2, 'my-artifact')
|
||||
).rejects.toThrow(
|
||||
'Finalize artifact upload failed: Artifact service responded with 400'
|
||||
)
|
||||
})
|
||||
|
||||
/**
|
||||
* Helpers used to setup mocking for the HttpClient
|
||||
*/
|
||||
async function emptyMockReadBody(): Promise<string> {
|
||||
return new Promise(resolve => {
|
||||
resolve()
|
||||
})
|
||||
}
|
||||
|
||||
function setupHttpClientMock(): void {
|
||||
/**
|
||||
* Mocks Post calls that are used during Artifact Creation tests
|
||||
*
|
||||
* Simulates success and non-success status codes depending on the artifact name along with an appropriate
|
||||
* payload that represents an expected response
|
||||
*/
|
||||
jest
|
||||
.spyOn(HttpClient.prototype, 'post')
|
||||
.mockImplementation(async (requestdata, data) => {
|
||||
// parse the input data and use the provided artifact name as part of the response
|
||||
const inputData = JSON.parse(data)
|
||||
const mockMessage = new http.IncomingMessage(new net.Socket())
|
||||
let mockReadBody = emptyMockReadBody
|
||||
|
||||
if (inputData.Name === 'invalid-artifact-name') {
|
||||
mockMessage.statusCode = 400
|
||||
} else if (inputData.Name === 'storage-quota-hit') {
|
||||
mockMessage.statusCode = 403
|
||||
} else {
|
||||
mockMessage.statusCode = 201
|
||||
const response: ArtifactResponse = {
|
||||
containerId: '13',
|
||||
size: -1,
|
||||
signedContent: 'false',
|
||||
fileContainerResourceUrl: `${getRuntimeUrl()}_apis/resources/Containers/13`,
|
||||
type: 'actions_storage',
|
||||
name: inputData.Name,
|
||||
url: `${getRuntimeUrl()}_apis/pipelines/1/runs/1/artifacts?artifactName=${
|
||||
inputData.Name
|
||||
}`
|
||||
}
|
||||
const returnData: string = JSON.stringify(response, null, 2)
|
||||
mockReadBody = async function(): Promise<string> {
|
||||
return new Promise(resolve => {
|
||||
resolve(returnData)
|
||||
})
|
||||
}
|
||||
}
|
||||
return new Promise<HttpClientResponse>(resolve => {
|
||||
resolve({
|
||||
message: mockMessage,
|
||||
readBody: mockReadBody
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
/**
|
||||
* Mocks SendStream calls that are made during Artifact Upload tests
|
||||
*
|
||||
* A 500 response is used to simulate a failed upload stream. The uploadUrl can be set to
|
||||
* include 'fail' to specify that the upload should fail
|
||||
*/
|
||||
jest
|
||||
.spyOn(HttpClient.prototype, 'sendStream')
|
||||
.mockImplementation(async (verb, requestUrl) => {
|
||||
const mockMessage = new http.IncomingMessage(new net.Socket())
|
||||
mockMessage.statusCode = 200
|
||||
if (requestUrl.includes('fail')) {
|
||||
mockMessage.statusCode = 500
|
||||
}
|
||||
|
||||
return new Promise<HttpClientResponse>(resolve => {
|
||||
resolve({
|
||||
message: mockMessage,
|
||||
readBody: emptyMockReadBody
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
/**
|
||||
* Mocks Patch calls that are made during Artifact Association tests
|
||||
*
|
||||
* Simulates success and non-success status codes depending on the input size along with an appropriate
|
||||
* payload that represents an expected response
|
||||
*/
|
||||
jest
|
||||
.spyOn(HttpClient.prototype, 'patch')
|
||||
.mockImplementation(async (requestdata, data) => {
|
||||
const inputData = JSON.parse(data)
|
||||
const mockMessage = new http.IncomingMessage(new net.Socket())
|
||||
|
||||
// Get the name from the end of requestdata. Will be something like https://www.example.com/_apis/pipelines/workflows/15/artifacts?api-version=6.0-preview&artifactName=my-artifact
|
||||
const artifactName = requestdata.split('=')[2]
|
||||
let mockReadBody = emptyMockReadBody
|
||||
if (inputData.Size < 1) {
|
||||
mockMessage.statusCode = 400
|
||||
} else if (artifactName === 'non-existent-artifact') {
|
||||
mockMessage.statusCode = 404
|
||||
} else {
|
||||
mockMessage.statusCode = 200
|
||||
const response: PatchArtifactSizeSuccessResponse = {
|
||||
containerId: 13,
|
||||
size: inputData.Size,
|
||||
signedContent: 'false',
|
||||
type: 'actions_storage',
|
||||
name: artifactName,
|
||||
url: `${getRuntimeUrl()}_apis/pipelines/1/runs/1/artifacts?artifactName=${artifactName}`,
|
||||
uploadUrl: `${getRuntimeUrl()}_apis/resources/Containers/13`
|
||||
}
|
||||
const returnData: string = JSON.stringify(response, null, 2)
|
||||
mockReadBody = async function(): Promise<string> {
|
||||
return new Promise(resolve => {
|
||||
resolve(returnData)
|
||||
})
|
||||
}
|
||||
}
|
||||
return new Promise<HttpClientResponse>(resolve => {
|
||||
resolve({
|
||||
message: mockMessage,
|
||||
readBody: mockReadBody
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
})
|
|
@ -1,282 +1,61 @@
|
|||
import * as fs from 'fs'
|
||||
import * as io from '../../io/src/io'
|
||||
import * as path from 'path'
|
||||
import * as utils from '../src/internal/utils'
|
||||
import * as core from '@actions/core'
|
||||
import {HttpCodes} from '@actions/http-client'
|
||||
import {
|
||||
getRuntimeUrl,
|
||||
getWorkFlowRunId,
|
||||
getInitialRetryIntervalInMilliseconds,
|
||||
getRetryMultiplier
|
||||
} from '../src/internal/config-variables'
|
||||
import * as config from '../src/internal/shared/config'
|
||||
import * as util from '../src/internal/shared/util'
|
||||
|
||||
jest.mock('../src/internal/config-variables')
|
||||
export const testRuntimeToken =
|
||||
'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwic2NwIjoiQWN0aW9ucy5FeGFtcGxlIEFjdGlvbnMuQW5vdGhlckV4YW1wbGU6dGVzdCBBY3Rpb25zLlJlc3VsdHM6Y2U3ZjU0YzctNjFjNy00YWFlLTg4N2YtMzBkYTQ3NWY1ZjFhOmNhMzk1MDg1LTA0MGEtNTI2Yi0yY2U4LWJkYzg1ZjY5Mjc3NCIsImlhdCI6MTUxNjIzOTAyMn0.XYnI_wHPBlUi1mqYveJnnkJhp4dlFjqxzRmISPsqfw8'
|
||||
|
||||
describe('Utils', () => {
|
||||
beforeAll(() => {
|
||||
// mock all output so that there is less noise when running tests
|
||||
jest.spyOn(console, 'log').mockImplementation(() => {})
|
||||
jest.spyOn(core, 'debug').mockImplementation(() => {})
|
||||
jest.spyOn(core, 'info').mockImplementation(() => {})
|
||||
jest.spyOn(core, 'warning').mockImplementation(() => {})
|
||||
describe('get-backend-ids-from-token', () => {
|
||||
it('should return backend ids when the token is valid', () => {
|
||||
jest.spyOn(config, 'getRuntimeToken').mockReturnValue(testRuntimeToken)
|
||||
|
||||
const backendIds = util.getBackendIdsFromToken()
|
||||
expect(backendIds.workflowRunBackendId).toBe(
|
||||
'ce7f54c7-61c7-4aae-887f-30da475f5f1a'
|
||||
)
|
||||
expect(backendIds.workflowJobRunBackendId).toBe(
|
||||
'ca395085-040a-526b-2ce8-bdc85f692774'
|
||||
)
|
||||
})
|
||||
|
||||
it('Check exponential retry range', () => {
|
||||
// No retries should return the initial retry interval
|
||||
const retryWaitTime0 = utils.getExponentialRetryTimeInMilliseconds(0)
|
||||
expect(retryWaitTime0).toEqual(getInitialRetryIntervalInMilliseconds())
|
||||
|
||||
const testMinMaxRange = (retryCount: number): void => {
|
||||
const retryWaitTime = utils.getExponentialRetryTimeInMilliseconds(
|
||||
retryCount
|
||||
it("should throw an error when the token doesn't have the right scope", () => {
|
||||
jest
|
||||
.spyOn(config, 'getRuntimeToken')
|
||||
.mockReturnValue(
|
||||
'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwic2NwIjoiQWN0aW9ucy5FeGFtcGxlIEFjdGlvbnMuQW5vdGhlckV4YW1wbGU6dGVzdCIsImlhdCI6MTUxNjIzOTAyMn0.K0IEoULZteGevF38G94xiaA8zcZ5UlKWfGfqE6q3dhw'
|
||||
)
|
||||
const minRange =
|
||||
getInitialRetryIntervalInMilliseconds() *
|
||||
getRetryMultiplier() *
|
||||
retryCount
|
||||
const maxRange = minRange * getRetryMultiplier()
|
||||
|
||||
expect(retryWaitTime).toBeGreaterThanOrEqual(minRange)
|
||||
expect(retryWaitTime).toBeLessThan(maxRange)
|
||||
}
|
||||
|
||||
for (let i = 1; i < 10; i++) {
|
||||
testMinMaxRange(i)
|
||||
}
|
||||
})
|
||||
|
||||
it('Check Artifact Name for any invalid characters', () => {
|
||||
const invalidNames = [
|
||||
'my\\artifact',
|
||||
'my/artifact',
|
||||
'my"artifact',
|
||||
'my:artifact',
|
||||
'my<artifact',
|
||||
'my>artifact',
|
||||
'my|artifact',
|
||||
'my*artifact',
|
||||
'my?artifact',
|
||||
''
|
||||
]
|
||||
for (const invalidName of invalidNames) {
|
||||
expect(() => {
|
||||
utils.checkArtifactName(invalidName)
|
||||
}).toThrow()
|
||||
}
|
||||
|
||||
const validNames = [
|
||||
'my-normal-artifact',
|
||||
'myNormalArtifact',
|
||||
'm¥ñðrmålÄr†ï£å¢†'
|
||||
]
|
||||
for (const validName of validNames) {
|
||||
expect(() => {
|
||||
utils.checkArtifactName(validName)
|
||||
}).not.toThrow()
|
||||
}
|
||||
})
|
||||
|
||||
it('Check Artifact File Path for any invalid characters', () => {
|
||||
const invalidNames = [
|
||||
'some/invalid"artifact/path',
|
||||
'some/invalid:artifact/path',
|
||||
'some/invalid<artifact/path',
|
||||
'some/invalid>artifact/path',
|
||||
'some/invalid|artifact/path',
|
||||
'some/invalid*artifact/path',
|
||||
'some/invalid?artifact/path',
|
||||
''
|
||||
]
|
||||
for (const invalidName of invalidNames) {
|
||||
expect(() => {
|
||||
utils.checkArtifactFilePath(invalidName)
|
||||
}).toThrow()
|
||||
}
|
||||
|
||||
const validNames = [
|
||||
'my/perfectly-normal/artifact-path',
|
||||
'my/perfectly\\Normal/Artifact-path',
|
||||
'm¥/ñðrmål/Är†ï£å¢†'
|
||||
]
|
||||
for (const validName of validNames) {
|
||||
expect(() => {
|
||||
utils.checkArtifactFilePath(validName)
|
||||
}).not.toThrow()
|
||||
}
|
||||
})
|
||||
|
||||
it('Test negative artifact retention throws', () => {
|
||||
expect(() => {
|
||||
utils.getProperRetention(-1, undefined)
|
||||
}).toThrow()
|
||||
})
|
||||
|
||||
it('Test no setting specified takes artifact retention input', () => {
|
||||
expect(utils.getProperRetention(180, undefined)).toEqual(180)
|
||||
})
|
||||
|
||||
it('Test artifact retention must conform to max allowed', () => {
|
||||
expect(utils.getProperRetention(180, '45')).toEqual(45)
|
||||
})
|
||||
|
||||
it('Test constructing artifact URL', () => {
|
||||
const runtimeUrl = getRuntimeUrl()
|
||||
const runId = getWorkFlowRunId()
|
||||
const artifactUrl = utils.getArtifactUrl()
|
||||
expect(artifactUrl).toEqual(
|
||||
`${runtimeUrl}_apis/pipelines/workflows/${runId}/artifacts?api-version=${utils.getApiVersion()}`
|
||||
expect(util.getBackendIdsFromToken).toThrowError(
|
||||
'Failed to get backend IDs: The provided JWT token is invalid'
|
||||
)
|
||||
})
|
||||
|
||||
it('Test constructing upload headers with all optional parameters', () => {
|
||||
const contentType = 'application/octet-stream'
|
||||
const size = 24
|
||||
const uncompressedLength = 100
|
||||
const range = 'bytes 0-199/200'
|
||||
const headers = utils.getUploadHeaders(
|
||||
contentType,
|
||||
true,
|
||||
true,
|
||||
uncompressedLength,
|
||||
size,
|
||||
range
|
||||
)
|
||||
expect(Object.keys(headers).length).toEqual(8)
|
||||
expect(headers['Accept']).toEqual(
|
||||
`application/json;api-version=${utils.getApiVersion()}`
|
||||
)
|
||||
expect(headers['Content-Type']).toEqual(contentType)
|
||||
expect(headers['Connection']).toEqual('Keep-Alive')
|
||||
expect(headers['Keep-Alive']).toEqual('10')
|
||||
expect(headers['Content-Encoding']).toEqual('gzip')
|
||||
expect(headers['x-tfs-filelength']).toEqual(uncompressedLength)
|
||||
expect(headers['Content-Length']).toEqual(size)
|
||||
expect(headers['Content-Range']).toEqual(range)
|
||||
})
|
||||
it('should throw an error when the token has a malformed scope', () => {
|
||||
jest
|
||||
.spyOn(config, 'getRuntimeToken')
|
||||
.mockReturnValue(
|
||||
'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwic2NwIjoiQWN0aW9ucy5FeGFtcGxlIEFjdGlvbnMuQW5vdGhlckV4YW1wbGU6dGVzdCBBY3Rpb25zLlJlc3VsdHM6Y2U3ZjU0YzctNjFjNy00YWFlLTg4N2YtMzBkYTQ3NWY1ZjFhIiwiaWF0IjoxNTE2MjM5MDIyfQ.7D0_LRfRFRZFImHQ7GxH2S6ZyFjjZ5U0ujjGCfle1XE'
|
||||
)
|
||||
|
||||
it('Test constructing upload headers with only required parameter', () => {
|
||||
const headers = utils.getUploadHeaders('application/octet-stream')
|
||||
expect(Object.keys(headers).length).toEqual(2)
|
||||
expect(headers['Accept']).toEqual(
|
||||
`application/json;api-version=${utils.getApiVersion()}`
|
||||
)
|
||||
expect(headers['Content-Type']).toEqual('application/octet-stream')
|
||||
})
|
||||
|
||||
it('Test constructing download headers with all optional parameters', () => {
|
||||
const contentType = 'application/json'
|
||||
const headers = utils.getDownloadHeaders(contentType, true, true)
|
||||
expect(Object.keys(headers).length).toEqual(5)
|
||||
expect(headers['Content-Type']).toEqual(contentType)
|
||||
expect(headers['Connection']).toEqual('Keep-Alive')
|
||||
expect(headers['Keep-Alive']).toEqual('10')
|
||||
expect(headers['Accept-Encoding']).toEqual('gzip')
|
||||
expect(headers['Accept']).toEqual(
|
||||
`application/octet-stream;api-version=${utils.getApiVersion()}`
|
||||
expect(util.getBackendIdsFromToken).toThrowError(
|
||||
'Failed to get backend IDs: The provided JWT token is invalid'
|
||||
)
|
||||
})
|
||||
|
||||
it('Test constructing download headers with only required parameter', () => {
|
||||
const headers = utils.getDownloadHeaders('application/octet-stream')
|
||||
expect(Object.keys(headers).length).toEqual(2)
|
||||
expect(headers['Content-Type']).toEqual('application/octet-stream')
|
||||
// check for default accept type
|
||||
expect(headers['Accept']).toEqual(
|
||||
`application/json;api-version=${utils.getApiVersion()}`
|
||||
)
|
||||
it('should throw an error when the token is in an invalid format', () => {
|
||||
jest.spyOn(config, 'getRuntimeToken').mockReturnValue('token')
|
||||
|
||||
expect(util.getBackendIdsFromToken).toThrowError('Invalid token specified')
|
||||
})
|
||||
|
||||
it('Test Success Status Code', () => {
|
||||
expect(utils.isSuccessStatusCode(HttpCodes.OK)).toEqual(true)
|
||||
expect(utils.isSuccessStatusCode(201)).toEqual(true)
|
||||
expect(utils.isSuccessStatusCode(299)).toEqual(true)
|
||||
expect(utils.isSuccessStatusCode(HttpCodes.NotFound)).toEqual(false)
|
||||
expect(utils.isSuccessStatusCode(HttpCodes.BadGateway)).toEqual(false)
|
||||
expect(utils.isSuccessStatusCode(HttpCodes.Forbidden)).toEqual(false)
|
||||
})
|
||||
it("should throw an error when the token doesn't have the right field", () => {
|
||||
jest
|
||||
.spyOn(config, 'getRuntimeToken')
|
||||
.mockReturnValue(
|
||||
'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6IkpvaG4gRG9lIiwiaWF0IjoxNTE2MjM5MDIyfQ.SflKxwRJSMeKKF2QT4fwpMeJf36POk6yJV_adQssw5c'
|
||||
)
|
||||
|
||||
it('Test Retry Status Code', () => {
|
||||
expect(utils.isRetryableStatusCode(HttpCodes.BadGateway)).toEqual(true)
|
||||
expect(utils.isRetryableStatusCode(HttpCodes.ServiceUnavailable)).toEqual(
|
||||
true
|
||||
expect(util.getBackendIdsFromToken).toThrowError(
|
||||
'Failed to get backend IDs: The provided JWT token is invalid'
|
||||
)
|
||||
expect(utils.isRetryableStatusCode(HttpCodes.GatewayTimeout)).toEqual(true)
|
||||
expect(utils.isRetryableStatusCode(HttpCodes.TooManyRequests)).toEqual(true)
|
||||
expect(utils.isRetryableStatusCode(HttpCodes.OK)).toEqual(false)
|
||||
expect(utils.isRetryableStatusCode(HttpCodes.NotFound)).toEqual(false)
|
||||
expect(utils.isRetryableStatusCode(HttpCodes.Forbidden)).toEqual(false)
|
||||
expect(utils.isRetryableStatusCode(413)).toEqual(true) // Payload Too Large
|
||||
})
|
||||
|
||||
it('Test Throttled Status Code', () => {
|
||||
expect(utils.isThrottledStatusCode(HttpCodes.TooManyRequests)).toEqual(true)
|
||||
expect(utils.isThrottledStatusCode(HttpCodes.InternalServerError)).toEqual(
|
||||
false
|
||||
)
|
||||
expect(utils.isThrottledStatusCode(HttpCodes.BadGateway)).toEqual(false)
|
||||
expect(utils.isThrottledStatusCode(HttpCodes.ServiceUnavailable)).toEqual(
|
||||
false
|
||||
)
|
||||
})
|
||||
|
||||
it('Test Forbidden Status Code', () => {
|
||||
expect(utils.isForbiddenStatusCode(HttpCodes.Forbidden)).toEqual(true)
|
||||
expect(utils.isForbiddenStatusCode(HttpCodes.InternalServerError)).toEqual(
|
||||
false
|
||||
)
|
||||
expect(utils.isForbiddenStatusCode(HttpCodes.TooManyRequests)).toEqual(
|
||||
false
|
||||
)
|
||||
expect(utils.isForbiddenStatusCode(HttpCodes.OK)).toEqual(false)
|
||||
})
|
||||
|
||||
it('Test Creating Artifact Directories', async () => {
|
||||
const root = path.join(__dirname, '_temp', 'artifact-download')
|
||||
// remove directory before starting
|
||||
await io.rmRF(root)
|
||||
|
||||
const directory1 = path.join(root, 'folder2', 'folder3')
|
||||
const directory2 = path.join(directory1, 'folder1')
|
||||
|
||||
// Initially should not exist
|
||||
await expect(fs.promises.access(directory1)).rejects.not.toBeUndefined()
|
||||
await expect(fs.promises.access(directory2)).rejects.not.toBeUndefined()
|
||||
const directoryStructure = [directory1, directory2]
|
||||
await utils.createDirectoriesForArtifact(directoryStructure)
|
||||
// directories should now be created
|
||||
await expect(fs.promises.access(directory1)).resolves.toEqual(undefined)
|
||||
await expect(fs.promises.access(directory2)).resolves.toEqual(undefined)
|
||||
})
|
||||
|
||||
it('Test Creating Empty Files', async () => {
|
||||
const root = path.join(__dirname, '_temp', 'empty-files')
|
||||
await io.rmRF(root)
|
||||
|
||||
const emptyFile1 = path.join(root, 'emptyFile1')
|
||||
const directoryToCreate = path.join(root, 'folder1')
|
||||
const emptyFile2 = path.join(directoryToCreate, 'emptyFile2')
|
||||
|
||||
// empty files should only be created after the directory structure is fully setup
|
||||
// ensure they are first created by using the createDirectoriesForArtifact method
|
||||
const directoryStructure = [root, directoryToCreate]
|
||||
await utils.createDirectoriesForArtifact(directoryStructure)
|
||||
await expect(fs.promises.access(root)).resolves.toEqual(undefined)
|
||||
await expect(fs.promises.access(directoryToCreate)).resolves.toEqual(
|
||||
undefined
|
||||
)
|
||||
|
||||
await expect(fs.promises.access(emptyFile1)).rejects.not.toBeUndefined()
|
||||
await expect(fs.promises.access(emptyFile2)).rejects.not.toBeUndefined()
|
||||
|
||||
const emptyFilesToCreate = [emptyFile1, emptyFile2]
|
||||
await utils.createEmptyFilesForArtifact(emptyFilesToCreate)
|
||||
|
||||
await expect(fs.promises.access(emptyFile1)).resolves.toEqual(undefined)
|
||||
const size1 = (await fs.promises.stat(emptyFile1)).size
|
||||
expect(size1).toEqual(0)
|
||||
await expect(fs.promises.access(emptyFile2)).resolves.toEqual(undefined)
|
||||
const size2 = (await fs.promises.stat(emptyFile2)).size
|
||||
expect(size2).toEqual(0)
|
||||
})
|
||||
})
|
||||
|
|
|
@ -1,53 +0,0 @@
|
|||
# Additional Information
|
||||
|
||||
Extra information
|
||||
- [Non-Supported Characters](#Non-Supported-Characters)
|
||||
- [Permission loss](#Permission-Loss)
|
||||
- [Considerations](#Considerations)
|
||||
- [Compression](#Is-my-artifact-compressed)
|
||||
|
||||
## Non-Supported Characters
|
||||
|
||||
When uploading an artifact, the inputted `name` parameter along with the files specified in `files` cannot contain any of the following characters. They will be rejected by the server if attempted to be sent over and the upload will fail. These characters are not allowed due to limitations and restrictions with certain file systems such as NTFS. To maintain platform-agnostic behavior, all characters that are not supported by an individual filesystem/platform will not be supported on all filesystems/platforms.
|
||||
|
||||
- "
|
||||
- :
|
||||
- <
|
||||
- \>
|
||||
- |
|
||||
- \*
|
||||
- ?
|
||||
|
||||
In addition to the aforementioned characters, the inputted `name` also cannot include the following
|
||||
- \
|
||||
- /
|
||||
|
||||
|
||||
## Permission Loss
|
||||
|
||||
File permissions are not maintained between uploaded and downloaded artifacts. If file permissions are something that need to be maintained (such as an executable), consider archiving all of the files using something like `tar` and then uploading the single archive. After downloading the artifact, you can `un-tar` the individual file and permissions will be preserved.
|
||||
|
||||
```js
|
||||
const artifact = require('@actions/artifact');
|
||||
const artifactClient = artifact.create()
|
||||
const artifactName = 'my-artifact';
|
||||
const files = [
|
||||
'/home/user/files/plz-upload/my-archive.tgz',
|
||||
]
|
||||
const rootDirectory = '/home/user/files/plz-upload'
|
||||
const uploadResult = await artifactClient.uploadArtifact(artifactName, files, rootDirectory)
|
||||
```
|
||||
|
||||
## Considerations
|
||||
|
||||
During upload, each file is uploaded concurrently in 4MB chunks using a separate HTTPS connection per file. Chunked uploads are used so that in the event of a failure (which is entirely possible because the internet is not perfect), the upload can be retried. If there is an error, a retry will be attempted after a certain period of time.
|
||||
|
||||
Uploading will be generally be faster if there are fewer files that are larger in size vs if there are lots of smaller files. Depending on the types and quantities of files being uploaded, it might be beneficial to separately compress and archive everything into a single archive (using something like `tar` or `zip`) before starting and artifact upload to speed things up.
|
||||
|
||||
## Is my artifact compressed?
|
||||
|
||||
GZip is used internally to compress individual files before starting an upload. Compression helps reduce the total amount of data that must be uploaded and stored while helping to speed up uploads (this performance benefit is significant especially on self hosted runners). If GZip does not reduce the size of the file that is being uploaded, the original file is uploaded as-is.
|
||||
|
||||
Compression using GZip also helps speed up artifact download as part of a workflow. Header information is used to determine if an individual file was uploaded using GZip and if necessary, decompression is used.
|
||||
|
||||
When downloading an artifact from the GitHub UI (this differs from downloading an artifact during a workflow), a single Zip file is dynamically created that contains all of the files uploaded as part of an artifact. Any files that were uploaded using GZip will be decompressed on the server before being added to the Zip file with the remaining files.
|
|
@ -0,0 +1,62 @@
|
|||
# Frequently Asked Questions
|
||||
|
||||
- [Frequently Asked Questions](#frequently-asked-questions)
|
||||
- [Supported Characters](#supported-characters)
|
||||
- [Compression? ZIP? How is my artifact stored?](#compression-zip-how-is-my-artifact-stored)
|
||||
- [Which versions of the artifacts packages are compatible?](#which-versions-of-the-artifacts-packages-are-compatible)
|
||||
- [How long will my artifact be available?](#how-long-will-my-artifact-be-available)
|
||||
|
||||
## Supported Characters
|
||||
|
||||
When uploading an artifact, the inputted `name` parameter along with the files specified in `files` cannot contain any of the following characters. If they are present in `name` or `files`, the Artifact will be rejected by the server and the upload will fail. These characters are not allowed due to limitations and restrictions with certain file systems such as NTFS. To maintain platform-agnostic behavior, characters that are not supported by an individual filesystem/platform will not be supported on all filesystems/platforms.
|
||||
|
||||
- "
|
||||
- :
|
||||
- <
|
||||
- \>
|
||||
- |
|
||||
- \*
|
||||
- ?
|
||||
|
||||
In addition to the aforementioned characters, the inputted `name` also cannot include the following
|
||||
- \
|
||||
- /
|
||||
|
||||
## Compression? ZIP? How is my artifact stored?
|
||||
|
||||
When creating an Artifact, the files are dynamically compressed and streamed into a ZIP archive. Since they are stored in a ZIP, they can be compressed by Zlib in varying levels.
|
||||
|
||||
The value can range from 0 to 9:
|
||||
|
||||
- 0: No compression
|
||||
- 1: Best speed
|
||||
- 6: Default compression (same as GNU Gzip)
|
||||
- 9: Best compression
|
||||
|
||||
Higher levels will result in better compression, but will take longer to complete.
|
||||
For large files that are not easily compressed, a value of 0 is recommended for significantly faster uploads.
|
||||
|
||||
## Which versions of the artifacts packages are compatible?
|
||||
[actions/upload-artifact](https://github.com/actions/upload-artifact) and [actions/download-artifact](https://github.com/actions/download-artifact), leverage [GitHub Actions toolkit](https://github.com/actions/toolkit) and are typically used together to upload and download artifacts in your workflows.
|
||||
|
||||
| upload-artifact | download-artifact | toolkit |
|
||||
|---|---|---|
|
||||
| v4 | v4 | v2 |
|
||||
| < v3 | < v3 | < v1 |
|
||||
|
||||
Use matching versions of `actions/upload-artifact` and `actions/download-artifact` to ensure compatibility.
|
||||
|
||||
In your GitHub Actions workflow YAML file, you specify the version of the actions you want to use. For example:
|
||||
|
||||
```yaml
|
||||
uses: actions/upload-artifact@v4
|
||||
# ...
|
||||
uses: actions/download-artifact@v4
|
||||
# ...
|
||||
```
|
||||
|
||||
**Release Notes:**
|
||||
Check the release notes for each repository to see if there are any specific notes about compatibility or changes in behavior.
|
||||
|
||||
## How long will my artifact be available?
|
||||
The default retention period is **90 days**. For more information, visit: https://github.com/actions/upload-artifact?tab=readme-ov-file#retention-period
|
|
@ -0,0 +1,43 @@
|
|||
@actions/artifact
|
||||
|
||||
# @actions/artifact
|
||||
|
||||
## Table of contents
|
||||
|
||||
### Classes
|
||||
|
||||
- [ArtifactNotFoundError](classes/ArtifactNotFoundError.md)
|
||||
- [DefaultArtifactClient](classes/DefaultArtifactClient.md)
|
||||
- [FilesNotFoundError](classes/FilesNotFoundError.md)
|
||||
- [GHESNotSupportedError](classes/GHESNotSupportedError.md)
|
||||
- [InvalidResponseError](classes/InvalidResponseError.md)
|
||||
- [NetworkError](classes/NetworkError.md)
|
||||
- [UsageError](classes/UsageError.md)
|
||||
|
||||
### Interfaces
|
||||
|
||||
- [Artifact](interfaces/Artifact.md)
|
||||
- [ArtifactClient](interfaces/ArtifactClient.md)
|
||||
- [DeleteArtifactResponse](interfaces/DeleteArtifactResponse.md)
|
||||
- [DownloadArtifactOptions](interfaces/DownloadArtifactOptions.md)
|
||||
- [DownloadArtifactResponse](interfaces/DownloadArtifactResponse.md)
|
||||
- [FindOptions](interfaces/FindOptions.md)
|
||||
- [GetArtifactResponse](interfaces/GetArtifactResponse.md)
|
||||
- [ListArtifactsOptions](interfaces/ListArtifactsOptions.md)
|
||||
- [ListArtifactsResponse](interfaces/ListArtifactsResponse.md)
|
||||
- [UploadArtifactOptions](interfaces/UploadArtifactOptions.md)
|
||||
- [UploadArtifactResponse](interfaces/UploadArtifactResponse.md)
|
||||
|
||||
### Variables
|
||||
|
||||
- [default](README.md#default)
|
||||
|
||||
## Variables
|
||||
|
||||
### default
|
||||
|
||||
• `Const` **default**: [`ArtifactClient`](interfaces/ArtifactClient.md)
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/artifact.ts:7](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/artifact.ts#L7)
|
|
@ -0,0 +1,169 @@
|
|||
[@actions/artifact](../README.md) / ArtifactNotFoundError
|
||||
|
||||
# Class: ArtifactNotFoundError
|
||||
|
||||
## Hierarchy
|
||||
|
||||
- `Error`
|
||||
|
||||
↳ **`ArtifactNotFoundError`**
|
||||
|
||||
## Table of contents
|
||||
|
||||
### Constructors
|
||||
|
||||
- [constructor](ArtifactNotFoundError.md#constructor)
|
||||
|
||||
### Properties
|
||||
|
||||
- [message](ArtifactNotFoundError.md#message)
|
||||
- [name](ArtifactNotFoundError.md#name)
|
||||
- [stack](ArtifactNotFoundError.md#stack)
|
||||
- [prepareStackTrace](ArtifactNotFoundError.md#preparestacktrace)
|
||||
- [stackTraceLimit](ArtifactNotFoundError.md#stacktracelimit)
|
||||
|
||||
### Methods
|
||||
|
||||
- [captureStackTrace](ArtifactNotFoundError.md#capturestacktrace)
|
||||
|
||||
## Constructors
|
||||
|
||||
### constructor
|
||||
|
||||
• **new ArtifactNotFoundError**(`message?`): [`ArtifactNotFoundError`](ArtifactNotFoundError.md)
|
||||
|
||||
#### Parameters
|
||||
|
||||
| Name | Type | Default value |
|
||||
| :------ | :------ | :------ |
|
||||
| `message` | `string` | `'Artifact not found'` |
|
||||
|
||||
#### Returns
|
||||
|
||||
[`ArtifactNotFoundError`](ArtifactNotFoundError.md)
|
||||
|
||||
#### Overrides
|
||||
|
||||
Error.constructor
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/errors.ts:24](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/errors.ts#L24)
|
||||
|
||||
## Properties
|
||||
|
||||
### message
|
||||
|
||||
• **message**: `string`
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.message
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/typescript/lib/lib.es5.d.ts:1068
|
||||
|
||||
___
|
||||
|
||||
### name
|
||||
|
||||
• **name**: `string`
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.name
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/typescript/lib/lib.es5.d.ts:1067
|
||||
|
||||
___
|
||||
|
||||
### stack
|
||||
|
||||
• `Optional` **stack**: `string`
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.stack
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/typescript/lib/lib.es5.d.ts:1069
|
||||
|
||||
___
|
||||
|
||||
### prepareStackTrace
|
||||
|
||||
▪ `Static` `Optional` **prepareStackTrace**: (`err`: `Error`, `stackTraces`: `CallSite`[]) => `any`
|
||||
|
||||
#### Type declaration
|
||||
|
||||
▸ (`err`, `stackTraces`): `any`
|
||||
|
||||
Optional override for formatting stack traces
|
||||
|
||||
##### Parameters
|
||||
|
||||
| Name | Type |
|
||||
| :------ | :------ |
|
||||
| `err` | `Error` |
|
||||
| `stackTraces` | `CallSite`[] |
|
||||
|
||||
##### Returns
|
||||
|
||||
`any`
|
||||
|
||||
**`See`**
|
||||
|
||||
https://v8.dev/docs/stack-trace-api#customizing-stack-traces
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.prepareStackTrace
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/@types/node/globals.d.ts:11
|
||||
|
||||
___
|
||||
|
||||
### stackTraceLimit
|
||||
|
||||
▪ `Static` **stackTraceLimit**: `number`
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.stackTraceLimit
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/@types/node/globals.d.ts:13
|
||||
|
||||
## Methods
|
||||
|
||||
### captureStackTrace
|
||||
|
||||
▸ **captureStackTrace**(`targetObject`, `constructorOpt?`): `void`
|
||||
|
||||
Create .stack property on a target object
|
||||
|
||||
#### Parameters
|
||||
|
||||
| Name | Type |
|
||||
| :------ | :------ |
|
||||
| `targetObject` | `object` |
|
||||
| `constructorOpt?` | `Function` |
|
||||
|
||||
#### Returns
|
||||
|
||||
`void`
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.captureStackTrace
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/@types/node/globals.d.ts:4
|
|
@ -0,0 +1,193 @@
|
|||
[@actions/artifact](../README.md) / DefaultArtifactClient
|
||||
|
||||
# Class: DefaultArtifactClient
|
||||
|
||||
The default artifact client that is used by the artifact action(s).
|
||||
|
||||
## Implements
|
||||
|
||||
- [`ArtifactClient`](../interfaces/ArtifactClient.md)
|
||||
|
||||
## Table of contents
|
||||
|
||||
### Constructors
|
||||
|
||||
- [constructor](DefaultArtifactClient.md#constructor)
|
||||
|
||||
### Methods
|
||||
|
||||
- [deleteArtifact](DefaultArtifactClient.md#deleteartifact)
|
||||
- [downloadArtifact](DefaultArtifactClient.md#downloadartifact)
|
||||
- [getArtifact](DefaultArtifactClient.md#getartifact)
|
||||
- [listArtifacts](DefaultArtifactClient.md#listartifacts)
|
||||
- [uploadArtifact](DefaultArtifactClient.md#uploadartifact)
|
||||
|
||||
## Constructors
|
||||
|
||||
### constructor
|
||||
|
||||
• **new DefaultArtifactClient**(): [`DefaultArtifactClient`](DefaultArtifactClient.md)
|
||||
|
||||
#### Returns
|
||||
|
||||
[`DefaultArtifactClient`](DefaultArtifactClient.md)
|
||||
|
||||
## Methods
|
||||
|
||||
### deleteArtifact
|
||||
|
||||
▸ **deleteArtifact**(`artifactName`, `options?`): `Promise`\<[`DeleteArtifactResponse`](../interfaces/DeleteArtifactResponse.md)\>
|
||||
|
||||
Delete an Artifact
|
||||
|
||||
If `options.findBy` is specified, this will use the public Delete Artifact API https://docs.github.com/en/rest/actions/artifacts?apiVersion=2022-11-28#delete-an-artifact
|
||||
|
||||
#### Parameters
|
||||
|
||||
| Name | Type | Description |
|
||||
| :------ | :------ | :------ |
|
||||
| `artifactName` | `string` | The name of the artifact to delete |
|
||||
| `options?` | [`FindOptions`](../interfaces/FindOptions.md) | Extra options that allow for the customization of the delete behavior |
|
||||
|
||||
#### Returns
|
||||
|
||||
`Promise`\<[`DeleteArtifactResponse`](../interfaces/DeleteArtifactResponse.md)\>
|
||||
|
||||
single DeleteArtifactResponse object
|
||||
|
||||
#### Implementation of
|
||||
|
||||
[ArtifactClient](../interfaces/ArtifactClient.md).[deleteArtifact](../interfaces/ArtifactClient.md#deleteartifact)
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/client.ts:248](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/client.ts#L248)
|
||||
|
||||
___
|
||||
|
||||
### downloadArtifact
|
||||
|
||||
▸ **downloadArtifact**(`artifactId`, `options?`): `Promise`\<[`DownloadArtifactResponse`](../interfaces/DownloadArtifactResponse.md)\>
|
||||
|
||||
Downloads an artifact and unzips the content.
|
||||
|
||||
If `options.findBy` is specified, this will use the public Download Artifact API https://docs.github.com/en/rest/actions/artifacts?apiVersion=2022-11-28#download-an-artifact
|
||||
|
||||
#### Parameters
|
||||
|
||||
| Name | Type | Description |
|
||||
| :------ | :------ | :------ |
|
||||
| `artifactId` | `number` | The id of the artifact to download |
|
||||
| `options?` | [`DownloadArtifactOptions`](../interfaces/DownloadArtifactOptions.md) & [`FindOptions`](../interfaces/FindOptions.md) | Extra options that allow for the customization of the download behavior |
|
||||
|
||||
#### Returns
|
||||
|
||||
`Promise`\<[`DownloadArtifactResponse`](../interfaces/DownloadArtifactResponse.md)\>
|
||||
|
||||
single DownloadArtifactResponse object
|
||||
|
||||
#### Implementation of
|
||||
|
||||
[ArtifactClient](../interfaces/ArtifactClient.md).[downloadArtifact](../interfaces/ArtifactClient.md#downloadartifact)
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/client.ts:138](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/client.ts#L138)
|
||||
|
||||
___
|
||||
|
||||
### getArtifact
|
||||
|
||||
▸ **getArtifact**(`artifactName`, `options?`): `Promise`\<[`GetArtifactResponse`](../interfaces/GetArtifactResponse.md)\>
|
||||
|
||||
Finds an artifact by name.
|
||||
If there are multiple artifacts with the same name in the same workflow run, this will return the latest.
|
||||
If the artifact is not found, it will throw.
|
||||
|
||||
If `options.findBy` is specified, this will use the public List Artifacts API with a name filter which can get artifacts from other runs.
|
||||
https://docs.github.com/en/rest/actions/artifacts?apiVersion=2022-11-28#list-workflow-run-artifacts
|
||||
`@actions/artifact` v2+ does not allow for creating multiple artifacts with the same name in the same workflow run.
|
||||
It is possible to have multiple artifacts with the same name in the same workflow run by using old versions of upload-artifact (v1,v2 and v3), @actions/artifact < v2 or it is a rerun.
|
||||
If there are multiple artifacts with the same name in the same workflow run this function will return the first artifact that matches the name.
|
||||
|
||||
#### Parameters
|
||||
|
||||
| Name | Type | Description |
|
||||
| :------ | :------ | :------ |
|
||||
| `artifactName` | `string` | The name of the artifact to find |
|
||||
| `options?` | [`FindOptions`](../interfaces/FindOptions.md) | Extra options that allow for the customization of the get behavior |
|
||||
|
||||
#### Returns
|
||||
|
||||
`Promise`\<[`GetArtifactResponse`](../interfaces/GetArtifactResponse.md)\>
|
||||
|
||||
#### Implementation of
|
||||
|
||||
[ArtifactClient](../interfaces/ArtifactClient.md).[getArtifact](../interfaces/ArtifactClient.md#getartifact)
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/client.ts:212](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/client.ts#L212)
|
||||
|
||||
___
|
||||
|
||||
### listArtifacts
|
||||
|
||||
▸ **listArtifacts**(`options?`): `Promise`\<[`ListArtifactsResponse`](../interfaces/ListArtifactsResponse.md)\>
|
||||
|
||||
Lists all artifacts that are part of the current workflow run.
|
||||
This function will return at most 1000 artifacts per workflow run.
|
||||
|
||||
If `options.findBy` is specified, this will call the public List-Artifacts API which can list from other runs.
|
||||
https://docs.github.com/en/rest/actions/artifacts?apiVersion=2022-11-28#list-workflow-run-artifacts
|
||||
|
||||
#### Parameters
|
||||
|
||||
| Name | Type | Description |
|
||||
| :------ | :------ | :------ |
|
||||
| `options?` | [`ListArtifactsOptions`](../interfaces/ListArtifactsOptions.md) & [`FindOptions`](../interfaces/FindOptions.md) | Extra options that allow for the customization of the list behavior |
|
||||
|
||||
#### Returns
|
||||
|
||||
`Promise`\<[`ListArtifactsResponse`](../interfaces/ListArtifactsResponse.md)\>
|
||||
|
||||
ListArtifactResponse object
|
||||
|
||||
#### Implementation of
|
||||
|
||||
[ArtifactClient](../interfaces/ArtifactClient.md).[listArtifacts](../interfaces/ArtifactClient.md#listartifacts)
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/client.ts:176](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/client.ts#L176)
|
||||
|
||||
___
|
||||
|
||||
### uploadArtifact
|
||||
|
||||
▸ **uploadArtifact**(`name`, `files`, `rootDirectory`, `options?`): `Promise`\<[`UploadArtifactResponse`](../interfaces/UploadArtifactResponse.md)\>
|
||||
|
||||
Uploads an artifact.
|
||||
|
||||
#### Parameters
|
||||
|
||||
| Name | Type | Description |
|
||||
| :------ | :------ | :------ |
|
||||
| `name` | `string` | The name of the artifact, required |
|
||||
| `files` | `string`[] | A list of absolute or relative paths that denote what files should be uploaded |
|
||||
| `rootDirectory` | `string` | An absolute or relative file path that denotes the root parent directory of the files being uploaded |
|
||||
| `options?` | [`UploadArtifactOptions`](../interfaces/UploadArtifactOptions.md) | Extra options for customizing the upload behavior |
|
||||
|
||||
#### Returns
|
||||
|
||||
`Promise`\<[`UploadArtifactResponse`](../interfaces/UploadArtifactResponse.md)\>
|
||||
|
||||
single UploadArtifactResponse object
|
||||
|
||||
#### Implementation of
|
||||
|
||||
[ArtifactClient](../interfaces/ArtifactClient.md).[uploadArtifact](../interfaces/ArtifactClient.md#uploadartifact)
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/client.ts:113](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/client.ts#L113)
|
|
@ -0,0 +1,180 @@
|
|||
[@actions/artifact](../README.md) / FilesNotFoundError
|
||||
|
||||
# Class: FilesNotFoundError
|
||||
|
||||
## Hierarchy
|
||||
|
||||
- `Error`
|
||||
|
||||
↳ **`FilesNotFoundError`**
|
||||
|
||||
## Table of contents
|
||||
|
||||
### Constructors
|
||||
|
||||
- [constructor](FilesNotFoundError.md#constructor)
|
||||
|
||||
### Properties
|
||||
|
||||
- [files](FilesNotFoundError.md#files)
|
||||
- [message](FilesNotFoundError.md#message)
|
||||
- [name](FilesNotFoundError.md#name)
|
||||
- [stack](FilesNotFoundError.md#stack)
|
||||
- [prepareStackTrace](FilesNotFoundError.md#preparestacktrace)
|
||||
- [stackTraceLimit](FilesNotFoundError.md#stacktracelimit)
|
||||
|
||||
### Methods
|
||||
|
||||
- [captureStackTrace](FilesNotFoundError.md#capturestacktrace)
|
||||
|
||||
## Constructors
|
||||
|
||||
### constructor
|
||||
|
||||
• **new FilesNotFoundError**(`files?`): [`FilesNotFoundError`](FilesNotFoundError.md)
|
||||
|
||||
#### Parameters
|
||||
|
||||
| Name | Type | Default value |
|
||||
| :------ | :------ | :------ |
|
||||
| `files` | `string`[] | `[]` |
|
||||
|
||||
#### Returns
|
||||
|
||||
[`FilesNotFoundError`](FilesNotFoundError.md)
|
||||
|
||||
#### Overrides
|
||||
|
||||
Error.constructor
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/errors.ts:4](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/errors.ts#L4)
|
||||
|
||||
## Properties
|
||||
|
||||
### files
|
||||
|
||||
• **files**: `string`[]
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/errors.ts:2](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/errors.ts#L2)
|
||||
|
||||
___
|
||||
|
||||
### message
|
||||
|
||||
• **message**: `string`
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.message
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/typescript/lib/lib.es5.d.ts:1068
|
||||
|
||||
___
|
||||
|
||||
### name
|
||||
|
||||
• **name**: `string`
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.name
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/typescript/lib/lib.es5.d.ts:1067
|
||||
|
||||
___
|
||||
|
||||
### stack
|
||||
|
||||
• `Optional` **stack**: `string`
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.stack
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/typescript/lib/lib.es5.d.ts:1069
|
||||
|
||||
___
|
||||
|
||||
### prepareStackTrace
|
||||
|
||||
▪ `Static` `Optional` **prepareStackTrace**: (`err`: `Error`, `stackTraces`: `CallSite`[]) => `any`
|
||||
|
||||
#### Type declaration
|
||||
|
||||
▸ (`err`, `stackTraces`): `any`
|
||||
|
||||
Optional override for formatting stack traces
|
||||
|
||||
##### Parameters
|
||||
|
||||
| Name | Type |
|
||||
| :------ | :------ |
|
||||
| `err` | `Error` |
|
||||
| `stackTraces` | `CallSite`[] |
|
||||
|
||||
##### Returns
|
||||
|
||||
`any`
|
||||
|
||||
**`See`**
|
||||
|
||||
https://v8.dev/docs/stack-trace-api#customizing-stack-traces
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.prepareStackTrace
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/@types/node/globals.d.ts:11
|
||||
|
||||
___
|
||||
|
||||
### stackTraceLimit
|
||||
|
||||
▪ `Static` **stackTraceLimit**: `number`
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.stackTraceLimit
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/@types/node/globals.d.ts:13
|
||||
|
||||
## Methods
|
||||
|
||||
### captureStackTrace
|
||||
|
||||
▸ **captureStackTrace**(`targetObject`, `constructorOpt?`): `void`
|
||||
|
||||
Create .stack property on a target object
|
||||
|
||||
#### Parameters
|
||||
|
||||
| Name | Type |
|
||||
| :------ | :------ |
|
||||
| `targetObject` | `object` |
|
||||
| `constructorOpt?` | `Function` |
|
||||
|
||||
#### Returns
|
||||
|
||||
`void`
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.captureStackTrace
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/@types/node/globals.d.ts:4
|
|
@ -0,0 +1,169 @@
|
|||
[@actions/artifact](../README.md) / GHESNotSupportedError
|
||||
|
||||
# Class: GHESNotSupportedError
|
||||
|
||||
## Hierarchy
|
||||
|
||||
- `Error`
|
||||
|
||||
↳ **`GHESNotSupportedError`**
|
||||
|
||||
## Table of contents
|
||||
|
||||
### Constructors
|
||||
|
||||
- [constructor](GHESNotSupportedError.md#constructor)
|
||||
|
||||
### Properties
|
||||
|
||||
- [message](GHESNotSupportedError.md#message)
|
||||
- [name](GHESNotSupportedError.md#name)
|
||||
- [stack](GHESNotSupportedError.md#stack)
|
||||
- [prepareStackTrace](GHESNotSupportedError.md#preparestacktrace)
|
||||
- [stackTraceLimit](GHESNotSupportedError.md#stacktracelimit)
|
||||
|
||||
### Methods
|
||||
|
||||
- [captureStackTrace](GHESNotSupportedError.md#capturestacktrace)
|
||||
|
||||
## Constructors
|
||||
|
||||
### constructor
|
||||
|
||||
• **new GHESNotSupportedError**(`message?`): [`GHESNotSupportedError`](GHESNotSupportedError.md)
|
||||
|
||||
#### Parameters
|
||||
|
||||
| Name | Type | Default value |
|
||||
| :------ | :------ | :------ |
|
||||
| `message` | `string` | `'@actions/artifact v2.0.0+, upload-artifact@v4+ and download-artifact@v4+ are not currently supported on GHES.'` |
|
||||
|
||||
#### Returns
|
||||
|
||||
[`GHESNotSupportedError`](GHESNotSupportedError.md)
|
||||
|
||||
#### Overrides
|
||||
|
||||
Error.constructor
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/errors.ts:31](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/errors.ts#L31)
|
||||
|
||||
## Properties
|
||||
|
||||
### message
|
||||
|
||||
• **message**: `string`
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.message
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/typescript/lib/lib.es5.d.ts:1068
|
||||
|
||||
___
|
||||
|
||||
### name
|
||||
|
||||
• **name**: `string`
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.name
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/typescript/lib/lib.es5.d.ts:1067
|
||||
|
||||
___
|
||||
|
||||
### stack
|
||||
|
||||
• `Optional` **stack**: `string`
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.stack
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/typescript/lib/lib.es5.d.ts:1069
|
||||
|
||||
___
|
||||
|
||||
### prepareStackTrace
|
||||
|
||||
▪ `Static` `Optional` **prepareStackTrace**: (`err`: `Error`, `stackTraces`: `CallSite`[]) => `any`
|
||||
|
||||
#### Type declaration
|
||||
|
||||
▸ (`err`, `stackTraces`): `any`
|
||||
|
||||
Optional override for formatting stack traces
|
||||
|
||||
##### Parameters
|
||||
|
||||
| Name | Type |
|
||||
| :------ | :------ |
|
||||
| `err` | `Error` |
|
||||
| `stackTraces` | `CallSite`[] |
|
||||
|
||||
##### Returns
|
||||
|
||||
`any`
|
||||
|
||||
**`See`**
|
||||
|
||||
https://v8.dev/docs/stack-trace-api#customizing-stack-traces
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.prepareStackTrace
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/@types/node/globals.d.ts:11
|
||||
|
||||
___
|
||||
|
||||
### stackTraceLimit
|
||||
|
||||
▪ `Static` **stackTraceLimit**: `number`
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.stackTraceLimit
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/@types/node/globals.d.ts:13
|
||||
|
||||
## Methods
|
||||
|
||||
### captureStackTrace
|
||||
|
||||
▸ **captureStackTrace**(`targetObject`, `constructorOpt?`): `void`
|
||||
|
||||
Create .stack property on a target object
|
||||
|
||||
#### Parameters
|
||||
|
||||
| Name | Type |
|
||||
| :------ | :------ |
|
||||
| `targetObject` | `object` |
|
||||
| `constructorOpt?` | `Function` |
|
||||
|
||||
#### Returns
|
||||
|
||||
`void`
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.captureStackTrace
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/@types/node/globals.d.ts:4
|
|
@ -0,0 +1,169 @@
|
|||
[@actions/artifact](../README.md) / InvalidResponseError
|
||||
|
||||
# Class: InvalidResponseError
|
||||
|
||||
## Hierarchy
|
||||
|
||||
- `Error`
|
||||
|
||||
↳ **`InvalidResponseError`**
|
||||
|
||||
## Table of contents
|
||||
|
||||
### Constructors
|
||||
|
||||
- [constructor](InvalidResponseError.md#constructor)
|
||||
|
||||
### Properties
|
||||
|
||||
- [message](InvalidResponseError.md#message)
|
||||
- [name](InvalidResponseError.md#name)
|
||||
- [stack](InvalidResponseError.md#stack)
|
||||
- [prepareStackTrace](InvalidResponseError.md#preparestacktrace)
|
||||
- [stackTraceLimit](InvalidResponseError.md#stacktracelimit)
|
||||
|
||||
### Methods
|
||||
|
||||
- [captureStackTrace](InvalidResponseError.md#capturestacktrace)
|
||||
|
||||
## Constructors
|
||||
|
||||
### constructor
|
||||
|
||||
• **new InvalidResponseError**(`message`): [`InvalidResponseError`](InvalidResponseError.md)
|
||||
|
||||
#### Parameters
|
||||
|
||||
| Name | Type |
|
||||
| :------ | :------ |
|
||||
| `message` | `string` |
|
||||
|
||||
#### Returns
|
||||
|
||||
[`InvalidResponseError`](InvalidResponseError.md)
|
||||
|
||||
#### Overrides
|
||||
|
||||
Error.constructor
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/errors.ts:17](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/errors.ts#L17)
|
||||
|
||||
## Properties
|
||||
|
||||
### message
|
||||
|
||||
• **message**: `string`
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.message
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/typescript/lib/lib.es5.d.ts:1068
|
||||
|
||||
___
|
||||
|
||||
### name
|
||||
|
||||
• **name**: `string`
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.name
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/typescript/lib/lib.es5.d.ts:1067
|
||||
|
||||
___
|
||||
|
||||
### stack
|
||||
|
||||
• `Optional` **stack**: `string`
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.stack
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/typescript/lib/lib.es5.d.ts:1069
|
||||
|
||||
___
|
||||
|
||||
### prepareStackTrace
|
||||
|
||||
▪ `Static` `Optional` **prepareStackTrace**: (`err`: `Error`, `stackTraces`: `CallSite`[]) => `any`
|
||||
|
||||
#### Type declaration
|
||||
|
||||
▸ (`err`, `stackTraces`): `any`
|
||||
|
||||
Optional override for formatting stack traces
|
||||
|
||||
##### Parameters
|
||||
|
||||
| Name | Type |
|
||||
| :------ | :------ |
|
||||
| `err` | `Error` |
|
||||
| `stackTraces` | `CallSite`[] |
|
||||
|
||||
##### Returns
|
||||
|
||||
`any`
|
||||
|
||||
**`See`**
|
||||
|
||||
https://v8.dev/docs/stack-trace-api#customizing-stack-traces
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.prepareStackTrace
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/@types/node/globals.d.ts:11
|
||||
|
||||
___
|
||||
|
||||
### stackTraceLimit
|
||||
|
||||
▪ `Static` **stackTraceLimit**: `number`
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.stackTraceLimit
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/@types/node/globals.d.ts:13
|
||||
|
||||
## Methods
|
||||
|
||||
### captureStackTrace
|
||||
|
||||
▸ **captureStackTrace**(`targetObject`, `constructorOpt?`): `void`
|
||||
|
||||
Create .stack property on a target object
|
||||
|
||||
#### Parameters
|
||||
|
||||
| Name | Type |
|
||||
| :------ | :------ |
|
||||
| `targetObject` | `object` |
|
||||
| `constructorOpt?` | `Function` |
|
||||
|
||||
#### Returns
|
||||
|
||||
`void`
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.captureStackTrace
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/@types/node/globals.d.ts:4
|
|
@ -0,0 +1,201 @@
|
|||
[@actions/artifact](../README.md) / NetworkError
|
||||
|
||||
# Class: NetworkError
|
||||
|
||||
## Hierarchy
|
||||
|
||||
- `Error`
|
||||
|
||||
↳ **`NetworkError`**
|
||||
|
||||
## Table of contents
|
||||
|
||||
### Constructors
|
||||
|
||||
- [constructor](NetworkError.md#constructor)
|
||||
|
||||
### Properties
|
||||
|
||||
- [code](NetworkError.md#code)
|
||||
- [message](NetworkError.md#message)
|
||||
- [name](NetworkError.md#name)
|
||||
- [stack](NetworkError.md#stack)
|
||||
- [prepareStackTrace](NetworkError.md#preparestacktrace)
|
||||
- [stackTraceLimit](NetworkError.md#stacktracelimit)
|
||||
|
||||
### Methods
|
||||
|
||||
- [captureStackTrace](NetworkError.md#capturestacktrace)
|
||||
- [isNetworkErrorCode](NetworkError.md#isnetworkerrorcode)
|
||||
|
||||
## Constructors
|
||||
|
||||
### constructor
|
||||
|
||||
• **new NetworkError**(`code`): [`NetworkError`](NetworkError.md)
|
||||
|
||||
#### Parameters
|
||||
|
||||
| Name | Type |
|
||||
| :------ | :------ |
|
||||
| `code` | `string` |
|
||||
|
||||
#### Returns
|
||||
|
||||
[`NetworkError`](NetworkError.md)
|
||||
|
||||
#### Overrides
|
||||
|
||||
Error.constructor
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/errors.ts:42](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/errors.ts#L42)
|
||||
|
||||
## Properties
|
||||
|
||||
### code
|
||||
|
||||
• **code**: `string`
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/errors.ts:40](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/errors.ts#L40)
|
||||
|
||||
___
|
||||
|
||||
### message
|
||||
|
||||
• **message**: `string`
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.message
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/typescript/lib/lib.es5.d.ts:1068
|
||||
|
||||
___
|
||||
|
||||
### name
|
||||
|
||||
• **name**: `string`
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.name
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/typescript/lib/lib.es5.d.ts:1067
|
||||
|
||||
___
|
||||
|
||||
### stack
|
||||
|
||||
• `Optional` **stack**: `string`
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.stack
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/typescript/lib/lib.es5.d.ts:1069
|
||||
|
||||
___
|
||||
|
||||
### prepareStackTrace
|
||||
|
||||
▪ `Static` `Optional` **prepareStackTrace**: (`err`: `Error`, `stackTraces`: `CallSite`[]) => `any`
|
||||
|
||||
#### Type declaration
|
||||
|
||||
▸ (`err`, `stackTraces`): `any`
|
||||
|
||||
Optional override for formatting stack traces
|
||||
|
||||
##### Parameters
|
||||
|
||||
| Name | Type |
|
||||
| :------ | :------ |
|
||||
| `err` | `Error` |
|
||||
| `stackTraces` | `CallSite`[] |
|
||||
|
||||
##### Returns
|
||||
|
||||
`any`
|
||||
|
||||
**`See`**
|
||||
|
||||
https://v8.dev/docs/stack-trace-api#customizing-stack-traces
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.prepareStackTrace
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/@types/node/globals.d.ts:11
|
||||
|
||||
___
|
||||
|
||||
### stackTraceLimit
|
||||
|
||||
▪ `Static` **stackTraceLimit**: `number`
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.stackTraceLimit
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/@types/node/globals.d.ts:13
|
||||
|
||||
## Methods
|
||||
|
||||
### captureStackTrace
|
||||
|
||||
▸ **captureStackTrace**(`targetObject`, `constructorOpt?`): `void`
|
||||
|
||||
Create .stack property on a target object
|
||||
|
||||
#### Parameters
|
||||
|
||||
| Name | Type |
|
||||
| :------ | :------ |
|
||||
| `targetObject` | `object` |
|
||||
| `constructorOpt?` | `Function` |
|
||||
|
||||
#### Returns
|
||||
|
||||
`void`
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.captureStackTrace
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/@types/node/globals.d.ts:4
|
||||
|
||||
___
|
||||
|
||||
### isNetworkErrorCode
|
||||
|
||||
▸ **isNetworkErrorCode**(`code?`): `boolean`
|
||||
|
||||
#### Parameters
|
||||
|
||||
| Name | Type |
|
||||
| :------ | :------ |
|
||||
| `code?` | `string` |
|
||||
|
||||
#### Returns
|
||||
|
||||
`boolean`
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/errors.ts:49](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/errors.ts#L49)
|
|
@ -0,0 +1,184 @@
|
|||
[@actions/artifact](../README.md) / UsageError
|
||||
|
||||
# Class: UsageError
|
||||
|
||||
## Hierarchy
|
||||
|
||||
- `Error`
|
||||
|
||||
↳ **`UsageError`**
|
||||
|
||||
## Table of contents
|
||||
|
||||
### Constructors
|
||||
|
||||
- [constructor](UsageError.md#constructor)
|
||||
|
||||
### Properties
|
||||
|
||||
- [message](UsageError.md#message)
|
||||
- [name](UsageError.md#name)
|
||||
- [stack](UsageError.md#stack)
|
||||
- [prepareStackTrace](UsageError.md#preparestacktrace)
|
||||
- [stackTraceLimit](UsageError.md#stacktracelimit)
|
||||
|
||||
### Methods
|
||||
|
||||
- [captureStackTrace](UsageError.md#capturestacktrace)
|
||||
- [isUsageErrorMessage](UsageError.md#isusageerrormessage)
|
||||
|
||||
## Constructors
|
||||
|
||||
### constructor
|
||||
|
||||
• **new UsageError**(): [`UsageError`](UsageError.md)
|
||||
|
||||
#### Returns
|
||||
|
||||
[`UsageError`](UsageError.md)
|
||||
|
||||
#### Overrides
|
||||
|
||||
Error.constructor
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/errors.ts:62](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/errors.ts#L62)
|
||||
|
||||
## Properties
|
||||
|
||||
### message
|
||||
|
||||
• **message**: `string`
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.message
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/typescript/lib/lib.es5.d.ts:1068
|
||||
|
||||
___
|
||||
|
||||
### name
|
||||
|
||||
• **name**: `string`
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.name
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/typescript/lib/lib.es5.d.ts:1067
|
||||
|
||||
___
|
||||
|
||||
### stack
|
||||
|
||||
• `Optional` **stack**: `string`
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.stack
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/typescript/lib/lib.es5.d.ts:1069
|
||||
|
||||
___
|
||||
|
||||
### prepareStackTrace
|
||||
|
||||
▪ `Static` `Optional` **prepareStackTrace**: (`err`: `Error`, `stackTraces`: `CallSite`[]) => `any`
|
||||
|
||||
#### Type declaration
|
||||
|
||||
▸ (`err`, `stackTraces`): `any`
|
||||
|
||||
Optional override for formatting stack traces
|
||||
|
||||
##### Parameters
|
||||
|
||||
| Name | Type |
|
||||
| :------ | :------ |
|
||||
| `err` | `Error` |
|
||||
| `stackTraces` | `CallSite`[] |
|
||||
|
||||
##### Returns
|
||||
|
||||
`any`
|
||||
|
||||
**`See`**
|
||||
|
||||
https://v8.dev/docs/stack-trace-api#customizing-stack-traces
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.prepareStackTrace
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/@types/node/globals.d.ts:11
|
||||
|
||||
___
|
||||
|
||||
### stackTraceLimit
|
||||
|
||||
▪ `Static` **stackTraceLimit**: `number`
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.stackTraceLimit
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/@types/node/globals.d.ts:13
|
||||
|
||||
## Methods
|
||||
|
||||
### captureStackTrace
|
||||
|
||||
▸ **captureStackTrace**(`targetObject`, `constructorOpt?`): `void`
|
||||
|
||||
Create .stack property on a target object
|
||||
|
||||
#### Parameters
|
||||
|
||||
| Name | Type |
|
||||
| :------ | :------ |
|
||||
| `targetObject` | `object` |
|
||||
| `constructorOpt?` | `Function` |
|
||||
|
||||
#### Returns
|
||||
|
||||
`void`
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.captureStackTrace
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/@types/node/globals.d.ts:4
|
||||
|
||||
___
|
||||
|
||||
### isUsageErrorMessage
|
||||
|
||||
▸ **isUsageErrorMessage**(`msg?`): `boolean`
|
||||
|
||||
#### Parameters
|
||||
|
||||
| Name | Type |
|
||||
| :------ | :------ |
|
||||
| `msg?` | `string` |
|
||||
|
||||
#### Returns
|
||||
|
||||
`boolean`
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/errors.ts:68](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/errors.ts#L68)
|
|
@ -0,0 +1,62 @@
|
|||
[@actions/artifact](../README.md) / Artifact
|
||||
|
||||
# Interface: Artifact
|
||||
|
||||
An Actions Artifact
|
||||
|
||||
## Table of contents
|
||||
|
||||
### Properties
|
||||
|
||||
- [createdAt](Artifact.md#createdat)
|
||||
- [id](Artifact.md#id)
|
||||
- [name](Artifact.md#name)
|
||||
- [size](Artifact.md#size)
|
||||
|
||||
## Properties
|
||||
|
||||
### createdAt
|
||||
|
||||
• `Optional` **createdAt**: `Date`
|
||||
|
||||
The time when the artifact was created
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/interfaces.ts:123](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/interfaces.ts#L123)
|
||||
|
||||
___
|
||||
|
||||
### id
|
||||
|
||||
• **id**: `number`
|
||||
|
||||
The ID of the artifact
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/interfaces.ts:113](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/interfaces.ts#L113)
|
||||
|
||||
___
|
||||
|
||||
### name
|
||||
|
||||
• **name**: `string`
|
||||
|
||||
The name of the artifact
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/interfaces.ts:108](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/interfaces.ts#L108)
|
||||
|
||||
___
|
||||
|
||||
### size
|
||||
|
||||
• **size**: `number`
|
||||
|
||||
The size of the artifact in bytes
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/interfaces.ts:118](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/interfaces.ts#L118)
|
|
@ -0,0 +1,159 @@
|
|||
[@actions/artifact](../README.md) / ArtifactClient
|
||||
|
||||
# Interface: ArtifactClient
|
||||
|
||||
Generic interface for the artifact client.
|
||||
|
||||
## Implemented by
|
||||
|
||||
- [`DefaultArtifactClient`](../classes/DefaultArtifactClient.md)
|
||||
|
||||
## Table of contents
|
||||
|
||||
### Methods
|
||||
|
||||
- [deleteArtifact](ArtifactClient.md#deleteartifact)
|
||||
- [downloadArtifact](ArtifactClient.md#downloadartifact)
|
||||
- [getArtifact](ArtifactClient.md#getartifact)
|
||||
- [listArtifacts](ArtifactClient.md#listartifacts)
|
||||
- [uploadArtifact](ArtifactClient.md#uploadartifact)
|
||||
|
||||
## Methods
|
||||
|
||||
### deleteArtifact
|
||||
|
||||
▸ **deleteArtifact**(`artifactName`, `options?`): `Promise`\<[`DeleteArtifactResponse`](DeleteArtifactResponse.md)\>
|
||||
|
||||
Delete an Artifact
|
||||
|
||||
If `options.findBy` is specified, this will use the public Delete Artifact API https://docs.github.com/en/rest/actions/artifacts?apiVersion=2022-11-28#delete-an-artifact
|
||||
|
||||
#### Parameters
|
||||
|
||||
| Name | Type | Description |
|
||||
| :------ | :------ | :------ |
|
||||
| `artifactName` | `string` | The name of the artifact to delete |
|
||||
| `options?` | [`FindOptions`](FindOptions.md) | Extra options that allow for the customization of the delete behavior |
|
||||
|
||||
#### Returns
|
||||
|
||||
`Promise`\<[`DeleteArtifactResponse`](DeleteArtifactResponse.md)\>
|
||||
|
||||
single DeleteArtifactResponse object
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/client.ts:103](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/client.ts#L103)
|
||||
|
||||
___
|
||||
|
||||
### downloadArtifact
|
||||
|
||||
▸ **downloadArtifact**(`artifactId`, `options?`): `Promise`\<[`DownloadArtifactResponse`](DownloadArtifactResponse.md)\>
|
||||
|
||||
Downloads an artifact and unzips the content.
|
||||
|
||||
If `options.findBy` is specified, this will use the public Download Artifact API https://docs.github.com/en/rest/actions/artifacts?apiVersion=2022-11-28#download-an-artifact
|
||||
|
||||
#### Parameters
|
||||
|
||||
| Name | Type | Description |
|
||||
| :------ | :------ | :------ |
|
||||
| `artifactId` | `number` | The id of the artifact to download |
|
||||
| `options?` | [`DownloadArtifactOptions`](DownloadArtifactOptions.md) & [`FindOptions`](FindOptions.md) | Extra options that allow for the customization of the download behavior |
|
||||
|
||||
#### Returns
|
||||
|
||||
`Promise`\<[`DownloadArtifactResponse`](DownloadArtifactResponse.md)\>
|
||||
|
||||
single DownloadArtifactResponse object
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/client.ts:89](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/client.ts#L89)
|
||||
|
||||
___
|
||||
|
||||
### getArtifact
|
||||
|
||||
▸ **getArtifact**(`artifactName`, `options?`): `Promise`\<[`GetArtifactResponse`](GetArtifactResponse.md)\>
|
||||
|
||||
Finds an artifact by name.
|
||||
If there are multiple artifacts with the same name in the same workflow run, this will return the latest.
|
||||
If the artifact is not found, it will throw.
|
||||
|
||||
If `options.findBy` is specified, this will use the public List Artifacts API with a name filter which can get artifacts from other runs.
|
||||
https://docs.github.com/en/rest/actions/artifacts?apiVersion=2022-11-28#list-workflow-run-artifacts
|
||||
`@actions/artifact` v2+ does not allow for creating multiple artifacts with the same name in the same workflow run.
|
||||
It is possible to have multiple artifacts with the same name in the same workflow run by using old versions of upload-artifact (v1,v2 and v3), @actions/artifact < v2 or it is a rerun.
|
||||
If there are multiple artifacts with the same name in the same workflow run this function will return the first artifact that matches the name.
|
||||
|
||||
#### Parameters
|
||||
|
||||
| Name | Type | Description |
|
||||
| :------ | :------ | :------ |
|
||||
| `artifactName` | `string` | The name of the artifact to find |
|
||||
| `options?` | [`FindOptions`](FindOptions.md) | Extra options that allow for the customization of the get behavior |
|
||||
|
||||
#### Returns
|
||||
|
||||
`Promise`\<[`GetArtifactResponse`](GetArtifactResponse.md)\>
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/client.ts:75](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/client.ts#L75)
|
||||
|
||||
___
|
||||
|
||||
### listArtifacts
|
||||
|
||||
▸ **listArtifacts**(`options?`): `Promise`\<[`ListArtifactsResponse`](ListArtifactsResponse.md)\>
|
||||
|
||||
Lists all artifacts that are part of the current workflow run.
|
||||
This function will return at most 1000 artifacts per workflow run.
|
||||
|
||||
If `options.findBy` is specified, this will call the public List-Artifacts API which can list from other runs.
|
||||
https://docs.github.com/en/rest/actions/artifacts?apiVersion=2022-11-28#list-workflow-run-artifacts
|
||||
|
||||
#### Parameters
|
||||
|
||||
| Name | Type | Description |
|
||||
| :------ | :------ | :------ |
|
||||
| `options?` | [`ListArtifactsOptions`](ListArtifactsOptions.md) & [`FindOptions`](FindOptions.md) | Extra options that allow for the customization of the list behavior |
|
||||
|
||||
#### Returns
|
||||
|
||||
`Promise`\<[`ListArtifactsResponse`](ListArtifactsResponse.md)\>
|
||||
|
||||
ListArtifactResponse object
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/client.ts:57](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/client.ts#L57)
|
||||
|
||||
___
|
||||
|
||||
### uploadArtifact
|
||||
|
||||
▸ **uploadArtifact**(`name`, `files`, `rootDirectory`, `options?`): `Promise`\<[`UploadArtifactResponse`](UploadArtifactResponse.md)\>
|
||||
|
||||
Uploads an artifact.
|
||||
|
||||
#### Parameters
|
||||
|
||||
| Name | Type | Description |
|
||||
| :------ | :------ | :------ |
|
||||
| `name` | `string` | The name of the artifact, required |
|
||||
| `files` | `string`[] | A list of absolute or relative paths that denote what files should be uploaded |
|
||||
| `rootDirectory` | `string` | An absolute or relative file path that denotes the root parent directory of the files being uploaded |
|
||||
| `options?` | [`UploadArtifactOptions`](UploadArtifactOptions.md) | Extra options for customizing the upload behavior |
|
||||
|
||||
#### Returns
|
||||
|
||||
`Promise`\<[`UploadArtifactResponse`](UploadArtifactResponse.md)\>
|
||||
|
||||
single UploadArtifactResponse object
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/client.ts:40](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/client.ts#L40)
|
|
@ -0,0 +1,23 @@
|
|||
[@actions/artifact](../README.md) / DeleteArtifactResponse
|
||||
|
||||
# Interface: DeleteArtifactResponse
|
||||
|
||||
Response from the server when deleting an artifact
|
||||
|
||||
## Table of contents
|
||||
|
||||
### Properties
|
||||
|
||||
- [id](DeleteArtifactResponse.md#id)
|
||||
|
||||
## Properties
|
||||
|
||||
### id
|
||||
|
||||
• **id**: `number`
|
||||
|
||||
The id of the artifact that was deleted
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/interfaces.ts:158](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/interfaces.ts#L158)
|
|
@ -0,0 +1,23 @@
|
|||
[@actions/artifact](../README.md) / DownloadArtifactOptions
|
||||
|
||||
# Interface: DownloadArtifactOptions
|
||||
|
||||
Options for downloading an artifact
|
||||
|
||||
## Table of contents
|
||||
|
||||
### Properties
|
||||
|
||||
- [path](DownloadArtifactOptions.md#path)
|
||||
|
||||
## Properties
|
||||
|
||||
### path
|
||||
|
||||
• `Optional` **path**: `string`
|
||||
|
||||
Denotes where the artifact will be downloaded to. If not specified then the artifact is download to GITHUB_WORKSPACE
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/interfaces.ts:98](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/interfaces.ts#L98)
|
|
@ -0,0 +1,23 @@
|
|||
[@actions/artifact](../README.md) / DownloadArtifactResponse
|
||||
|
||||
# Interface: DownloadArtifactResponse
|
||||
|
||||
Response from the server when downloading an artifact
|
||||
|
||||
## Table of contents
|
||||
|
||||
### Properties
|
||||
|
||||
- [downloadPath](DownloadArtifactResponse.md#downloadpath)
|
||||
|
||||
## Properties
|
||||
|
||||
### downloadPath
|
||||
|
||||
• `Optional` **downloadPath**: `string`
|
||||
|
||||
The path where the artifact was downloaded to
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/interfaces.ts:88](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/interfaces.ts#L88)
|
|
@ -0,0 +1,30 @@
|
|||
[@actions/artifact](../README.md) / FindOptions
|
||||
|
||||
# Interface: FindOptions
|
||||
|
||||
## Table of contents
|
||||
|
||||
### Properties
|
||||
|
||||
- [findBy](FindOptions.md#findby)
|
||||
|
||||
## Properties
|
||||
|
||||
### findBy
|
||||
|
||||
• `Optional` **findBy**: `Object`
|
||||
|
||||
The criteria for finding Artifact(s) out of the scope of the current run.
|
||||
|
||||
#### Type declaration
|
||||
|
||||
| Name | Type | Description |
|
||||
| :------ | :------ | :------ |
|
||||
| `repositoryName` | `string` | Repository owner (eg. 'toolkit') |
|
||||
| `repositoryOwner` | `string` | Repository owner (eg. 'actions') |
|
||||
| `token` | `string` | Token with actions:read permissions |
|
||||
| `workflowRunId` | `number` | WorkflowRun of the artifact(s) to lookup |
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/interfaces.ts:131](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/interfaces.ts#L131)
|
|
@ -0,0 +1,23 @@
|
|||
[@actions/artifact](../README.md) / GetArtifactResponse
|
||||
|
||||
# Interface: GetArtifactResponse
|
||||
|
||||
Response from the server when getting an artifact
|
||||
|
||||
## Table of contents
|
||||
|
||||
### Properties
|
||||
|
||||
- [artifact](GetArtifactResponse.md#artifact)
|
||||
|
||||
## Properties
|
||||
|
||||
### artifact
|
||||
|
||||
• **artifact**: [`Artifact`](Artifact.md)
|
||||
|
||||
Metadata about the artifact that was found
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/interfaces.ts:57](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/interfaces.ts#L57)
|
|
@ -0,0 +1,24 @@
|
|||
[@actions/artifact](../README.md) / ListArtifactsOptions
|
||||
|
||||
# Interface: ListArtifactsOptions
|
||||
|
||||
Options for listing artifacts
|
||||
|
||||
## Table of contents
|
||||
|
||||
### Properties
|
||||
|
||||
- [latest](ListArtifactsOptions.md#latest)
|
||||
|
||||
## Properties
|
||||
|
||||
### latest
|
||||
|
||||
• `Optional` **latest**: `boolean`
|
||||
|
||||
Filter the workflow run's artifacts to the latest by name
|
||||
In the case of reruns, this can be useful to avoid duplicates
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/interfaces.ts:68](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/interfaces.ts#L68)
|
|
@ -0,0 +1,23 @@
|
|||
[@actions/artifact](../README.md) / ListArtifactsResponse
|
||||
|
||||
# Interface: ListArtifactsResponse
|
||||
|
||||
Response from the server when listing artifacts
|
||||
|
||||
## Table of contents
|
||||
|
||||
### Properties
|
||||
|
||||
- [artifacts](ListArtifactsResponse.md#artifacts)
|
||||
|
||||
## Properties
|
||||
|
||||
### artifacts
|
||||
|
||||
• **artifacts**: [`Artifact`](Artifact.md)[]
|
||||
|
||||
A list of artifacts that were found
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/interfaces.ts:78](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/interfaces.ts#L78)
|
|
@ -0,0 +1,55 @@
|
|||
[@actions/artifact](../README.md) / UploadArtifactOptions
|
||||
|
||||
# Interface: UploadArtifactOptions
|
||||
|
||||
Options for uploading an artifact
|
||||
|
||||
## Table of contents
|
||||
|
||||
### Properties
|
||||
|
||||
- [compressionLevel](UploadArtifactOptions.md#compressionlevel)
|
||||
- [retentionDays](UploadArtifactOptions.md#retentiondays)
|
||||
|
||||
## Properties
|
||||
|
||||
### compressionLevel
|
||||
|
||||
• `Optional` **compressionLevel**: `number`
|
||||
|
||||
The level of compression for Zlib to be applied to the artifact archive.
|
||||
The value can range from 0 to 9:
|
||||
- 0: No compression
|
||||
- 1: Best speed
|
||||
- 6: Default compression (same as GNU Gzip)
|
||||
- 9: Best compression
|
||||
Higher levels will result in better compression, but will take longer to complete.
|
||||
For large files that are not easily compressed, a value of 0 is recommended for significantly faster uploads.
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/interfaces.ts:47](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/interfaces.ts#L47)
|
||||
|
||||
___
|
||||
|
||||
### retentionDays
|
||||
|
||||
• `Optional` **retentionDays**: `number`
|
||||
|
||||
Duration after which artifact will expire in days.
|
||||
|
||||
By default artifact expires after 90 days:
|
||||
https://docs.github.com/en/actions/configuring-and-managing-workflows/persisting-workflow-data-using-artifacts#downloading-and-deleting-artifacts-after-a-workflow-run-is-complete
|
||||
|
||||
Use this option to override the default expiry.
|
||||
|
||||
Min value: 1
|
||||
Max value: 90 unless changed by repository setting
|
||||
|
||||
If this is set to a greater value than the retention settings allowed, the retention on artifacts
|
||||
will be reduced to match the max value allowed on server, and the upload process will continue. An
|
||||
input of 0 assumes default retention setting.
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/interfaces.ts:36](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/interfaces.ts#L36)
|
|
@ -0,0 +1,37 @@
|
|||
[@actions/artifact](../README.md) / UploadArtifactResponse
|
||||
|
||||
# Interface: UploadArtifactResponse
|
||||
|
||||
Response from the server when an artifact is uploaded
|
||||
|
||||
## Table of contents
|
||||
|
||||
### Properties
|
||||
|
||||
- [id](UploadArtifactResponse.md#id)
|
||||
- [size](UploadArtifactResponse.md#size)
|
||||
|
||||
## Properties
|
||||
|
||||
### id
|
||||
|
||||
• `Optional` **id**: `number`
|
||||
|
||||
The id of the artifact that was created. Not provided if no artifact was uploaded
|
||||
This ID can be used as input to other APIs to download, delete or get more information about an artifact: https://docs.github.com/en/rest/actions/artifacts
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/interfaces.ts:14](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/interfaces.ts#L14)
|
||||
|
||||
___
|
||||
|
||||
### size
|
||||
|
||||
• `Optional` **size**: `number`
|
||||
|
||||
Total size of the artifact in bytes. Not provided if no artifact was uploaded
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/interfaces.ts:8](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/interfaces.ts#L8)
|
|
@ -1,57 +0,0 @@
|
|||
# Implementation Details
|
||||
|
||||
Warning: Implementation details may change at any time without notice. This is meant to serve as a reference to help users understand the package.
|
||||
|
||||
## Upload/Compression flow
|
||||
|
||||
![image](https://user-images.githubusercontent.com/16109154/79765587-19522b00-8327-11ea-9679-410bb10e1b13.png)
|
||||
|
||||
During artifact upload, gzip is used to compress individual files that then get uploaded. This is used to minimize the amount of data that gets uploaded which reduces the total amount of HTTP calls (upload happens in 4MB chunks). This results in considerably faster uploads with huge performance implications especially on self-hosted runners.
|
||||
|
||||
If a file is less than 64KB in size, a passthrough stream (readable and writable) is used to convert an in-memory buffer into a readable stream without any extra streams or pipping.
|
||||
|
||||
## Retry Logic when downloading an individual file
|
||||
|
||||
![image](https://user-images.githubusercontent.com/16109154/78555461-5be71400-780d-11ea-9abd-b05b77a95a3f.png)
|
||||
|
||||
## Proxy support
|
||||
|
||||
This package uses the `@actions/http-client` NPM package internally which supports proxied requests out of the box.
|
||||
|
||||
## HttpManager
|
||||
|
||||
### `keep-alive` header
|
||||
|
||||
When an HTTP call is made to upload or download an individual file, the server will close the HTTP connection after the upload/download is complete and respond with a header indicating `Connection: close`.
|
||||
|
||||
[HTTP closed connection header information](https://tools.ietf.org/html/rfc2616#section-14.10)
|
||||
|
||||
TCP connections are sometimes not immediately closed by the node client (Windows might hold on to the port for an extra period of time before actually releasing it for example) and a large amount of closed connections can cause port exhaustion before ports get released and are available again.
|
||||
|
||||
VMs hosted by GitHub Actions have 1024 available ports so uploading 1000+ files very quickly can cause port exhaustion if connections get closed immediately. This can start to cause strange undefined behavior and timeouts.
|
||||
|
||||
In order for connections to not close immediately, the `keep-alive` header is used to indicate to the server that the connection should stay open. If a `keep-alive` header is used, the connection needs to be disposed of by calling `dispose()` in the `HttpClient`.
|
||||
|
||||
[`keep-alive` header information](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Keep-Alive)
|
||||
[@actions/http-client client disposal](https://github.com/actions/http-client/blob/04e5ad73cd3fd1f5610a32116b0759eddf6570d2/index.ts#L292)
|
||||
|
||||
|
||||
### Multiple HTTP clients
|
||||
|
||||
During an artifact upload or download, files are concurrently uploaded or downloaded using `async/await`. When an error or retry is encountered, the `HttpClient` that made a call is disposed of and a new one is created. If a single `HttpClient` was used for all HTTP calls and it had to be disposed, it could inadvertently effect any other calls that could be concurrently happening.
|
||||
|
||||
Any other concurrent uploads or downloads should be left untouched. Because of this, each concurrent upload or download gets its own `HttpClient`. The `http-manager` is used to manage all available clients and each concurrent upload or download maintains a `httpClientIndex` that keep track of which client should be used (and potentially disposed and recycled if necessary)
|
||||
|
||||
### Potential resource leaks
|
||||
|
||||
When an HTTP response is received, it consists of two parts
|
||||
- `message`
|
||||
- `body`
|
||||
|
||||
The `message` contains information such as the response code and header information and it is available immediately. The body however is not available immediately and it can be read by calling `await response.readBody()`.
|
||||
|
||||
TCP connections consist of an input and output buffer to manage what is sent and received across a connection. If the body is not read (even if its contents are not needed) the buffers can stay in use even after `dispose()` gets called on the `HttpClient`. The buffers get released automatically after a certain period of time, but in order for them to be explicitly cleared, `readBody()` is always called.
|
||||
|
||||
### Non Concurrent calls
|
||||
|
||||
Both `upload-http-client` and `download-http-client` do not instantiate or create any HTTP clients (the `HttpManager` has that responsibility). If an HTTP call has to be made that does not require the `keep-alive` header (such as when calling `listArtifacts` or `patchArtifactSize`), the first `HttpClient` in the `HttpManager` is used. The number of available clients is equal to the upload or download concurrency and there will always be at least one available.
|
File diff suppressed because it is too large
Load Diff
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@actions/artifact",
|
||||
"version": "0.5.2",
|
||||
"version": "2.1.4",
|
||||
"preview": true,
|
||||
"description": "Actions artifact lib",
|
||||
"keywords": [
|
||||
|
@ -10,8 +10,8 @@
|
|||
],
|
||||
"homepage": "https://github.com/actions/toolkit/tree/main/packages/artifact",
|
||||
"license": "MIT",
|
||||
"main": "lib/artifact-client.js",
|
||||
"types": "lib/artifact-client.d.ts",
|
||||
"main": "lib/artifact.js",
|
||||
"types": "lib/artifact.d.ts",
|
||||
"directories": {
|
||||
"lib": "lib",
|
||||
"test": "__tests__"
|
||||
|
@ -30,20 +30,36 @@
|
|||
},
|
||||
"scripts": {
|
||||
"audit-moderate": "npm install && npm audit --json --audit-level=moderate > audit.json",
|
||||
"test": "echo \"Error: run tests from root\" && exit 1",
|
||||
"tsc": "tsc"
|
||||
"test": "cd ../../ && npm run test ./packages/artifact",
|
||||
"bootstrap": "cd ../../ && npm run bootstrap",
|
||||
"tsc-run": "tsc",
|
||||
"tsc": "npm run bootstrap && npm run tsc-run",
|
||||
"gen:docs": "typedoc --plugin typedoc-plugin-markdown --out docs/generated src/artifact.ts --githubPages false --readme none"
|
||||
},
|
||||
"bugs": {
|
||||
"url": "https://github.com/actions/toolkit/issues"
|
||||
},
|
||||
"dependencies": {
|
||||
"@actions/core": "^1.2.6",
|
||||
"@actions/http-client": "^1.0.11",
|
||||
"@types/tmp": "^0.1.0",
|
||||
"tmp": "^0.1.0",
|
||||
"tmp-promise": "^2.0.2"
|
||||
"@actions/core": "^1.10.0",
|
||||
"@actions/github": "^5.1.1",
|
||||
"@actions/http-client": "^2.1.0",
|
||||
"@azure/storage-blob": "^12.15.0",
|
||||
"@octokit/core": "^3.5.1",
|
||||
"@octokit/plugin-request-log": "^1.0.4",
|
||||
"@octokit/plugin-retry": "^3.0.9",
|
||||
"@octokit/request-error": "^5.0.0",
|
||||
"@protobuf-ts/plugin": "^2.2.3-alpha.1",
|
||||
"archiver": "^5.3.1",
|
||||
"crypto": "^1.0.1",
|
||||
"jwt-decode": "^3.1.2",
|
||||
"twirp-ts": "^2.5.0",
|
||||
"unzip-stream": "^0.3.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"typescript": "^3.8.3"
|
||||
"@types/archiver": "^5.3.2",
|
||||
"@types/unzip-stream": "^0.3.4",
|
||||
"typedoc": "^0.25.4",
|
||||
"typedoc-plugin-markdown": "^3.17.1",
|
||||
"typescript": "^5.2.2"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,20 +0,0 @@
|
|||
import {UploadOptions} from './internal/upload-options'
|
||||
import {UploadResponse} from './internal/upload-response'
|
||||
import {DownloadOptions} from './internal/download-options'
|
||||
import {DownloadResponse} from './internal/download-response'
|
||||
import {ArtifactClient, DefaultArtifactClient} from './internal/artifact-client'
|
||||
|
||||
export {
|
||||
ArtifactClient,
|
||||
UploadResponse,
|
||||
UploadOptions,
|
||||
DownloadResponse,
|
||||
DownloadOptions
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructs an ArtifactClient
|
||||
*/
|
||||
export function create(): ArtifactClient {
|
||||
return DefaultArtifactClient.create()
|
||||
}
|
|
@ -0,0 +1,8 @@
|
|||
import {ArtifactClient, DefaultArtifactClient} from './internal/client'
|
||||
|
||||
export * from './internal/shared/interfaces'
|
||||
export * from './internal/shared/errors'
|
||||
export * from './internal/client'
|
||||
|
||||
const client: ArtifactClient = new DefaultArtifactClient()
|
||||
export default client
|
|
@ -0,0 +1,277 @@
|
|||
// @generated by protobuf-ts 2.9.1 with parameter long_type_string,client_none,generate_dependencies
|
||||
// @generated from protobuf file "google/protobuf/timestamp.proto" (package "google.protobuf", syntax proto3)
|
||||
// tslint:disable
|
||||
//
|
||||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
//
|
||||
import type { BinaryWriteOptions } from "@protobuf-ts/runtime";
|
||||
import type { IBinaryWriter } from "@protobuf-ts/runtime";
|
||||
import { WireType } from "@protobuf-ts/runtime";
|
||||
import type { BinaryReadOptions } from "@protobuf-ts/runtime";
|
||||
import type { IBinaryReader } from "@protobuf-ts/runtime";
|
||||
import { UnknownFieldHandler } from "@protobuf-ts/runtime";
|
||||
import type { PartialMessage } from "@protobuf-ts/runtime";
|
||||
import { reflectionMergePartial } from "@protobuf-ts/runtime";
|
||||
import { MESSAGE_TYPE } from "@protobuf-ts/runtime";
|
||||
import { typeofJsonValue } from "@protobuf-ts/runtime";
|
||||
import type { JsonValue } from "@protobuf-ts/runtime";
|
||||
import type { JsonReadOptions } from "@protobuf-ts/runtime";
|
||||
import type { JsonWriteOptions } from "@protobuf-ts/runtime";
|
||||
import { PbLong } from "@protobuf-ts/runtime";
|
||||
import { MessageType } from "@protobuf-ts/runtime";
|
||||
/**
|
||||
* A Timestamp represents a point in time independent of any time zone
|
||||
* or calendar, represented as seconds and fractions of seconds at
|
||||
* nanosecond resolution in UTC Epoch time. It is encoded using the
|
||||
* Proleptic Gregorian Calendar which extends the Gregorian calendar
|
||||
* backwards to year one. It is encoded assuming all minutes are 60
|
||||
* seconds long, i.e. leap seconds are "smeared" so that no leap second
|
||||
* table is needed for interpretation. Range is from
|
||||
* 0001-01-01T00:00:00Z to 9999-12-31T23:59:59.999999999Z.
|
||||
* By restricting to that range, we ensure that we can convert to
|
||||
* and from RFC 3339 date strings.
|
||||
* See [https://www.ietf.org/rfc/rfc3339.txt](https://www.ietf.org/rfc/rfc3339.txt).
|
||||
*
|
||||
* # Examples
|
||||
*
|
||||
* Example 1: Compute Timestamp from POSIX `time()`.
|
||||
*
|
||||
* Timestamp timestamp;
|
||||
* timestamp.set_seconds(time(NULL));
|
||||
* timestamp.set_nanos(0);
|
||||
*
|
||||
* Example 2: Compute Timestamp from POSIX `gettimeofday()`.
|
||||
*
|
||||
* struct timeval tv;
|
||||
* gettimeofday(&tv, NULL);
|
||||
*
|
||||
* Timestamp timestamp;
|
||||
* timestamp.set_seconds(tv.tv_sec);
|
||||
* timestamp.set_nanos(tv.tv_usec * 1000);
|
||||
*
|
||||
* Example 3: Compute Timestamp from Win32 `GetSystemTimeAsFileTime()`.
|
||||
*
|
||||
* FILETIME ft;
|
||||
* GetSystemTimeAsFileTime(&ft);
|
||||
* UINT64 ticks = (((UINT64)ft.dwHighDateTime) << 32) | ft.dwLowDateTime;
|
||||
*
|
||||
* // A Windows tick is 100 nanoseconds. Windows epoch 1601-01-01T00:00:00Z
|
||||
* // is 11644473600 seconds before Unix epoch 1970-01-01T00:00:00Z.
|
||||
* Timestamp timestamp;
|
||||
* timestamp.set_seconds((INT64) ((ticks / 10000000) - 11644473600LL));
|
||||
* timestamp.set_nanos((INT32) ((ticks % 10000000) * 100));
|
||||
*
|
||||
* Example 4: Compute Timestamp from Java `System.currentTimeMillis()`.
|
||||
*
|
||||
* long millis = System.currentTimeMillis();
|
||||
*
|
||||
* Timestamp timestamp = Timestamp.newBuilder().setSeconds(millis / 1000)
|
||||
* .setNanos((int) ((millis % 1000) * 1000000)).build();
|
||||
*
|
||||
*
|
||||
* Example 5: Compute Timestamp from current time in Python.
|
||||
*
|
||||
* timestamp = Timestamp()
|
||||
* timestamp.GetCurrentTime()
|
||||
*
|
||||
* # JSON Mapping
|
||||
*
|
||||
* In JSON format, the Timestamp type is encoded as a string in the
|
||||
* [RFC 3339](https://www.ietf.org/rfc/rfc3339.txt) format. That is, the
|
||||
* format is "{year}-{month}-{day}T{hour}:{min}:{sec}[.{frac_sec}]Z"
|
||||
* where {year} is always expressed using four digits while {month}, {day},
|
||||
* {hour}, {min}, and {sec} are zero-padded to two digits each. The fractional
|
||||
* seconds, which can go up to 9 digits (i.e. up to 1 nanosecond resolution),
|
||||
* are optional. The "Z" suffix indicates the timezone ("UTC"); the timezone
|
||||
* is required. A proto3 JSON serializer should always use UTC (as indicated by
|
||||
* "Z") when printing the Timestamp type and a proto3 JSON parser should be
|
||||
* able to accept both UTC and other timezones (as indicated by an offset).
|
||||
*
|
||||
* For example, "2017-01-15T01:30:15.01Z" encodes 15.01 seconds past
|
||||
* 01:30 UTC on January 15, 2017.
|
||||
*
|
||||
* In JavaScript, one can convert a Date object to this format using the
|
||||
* standard [toISOString()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toISOString]
|
||||
* method. In Python, a standard `datetime.datetime` object can be converted
|
||||
* to this format using [`strftime`](https://docs.python.org/2/library/time.html#time.strftime)
|
||||
* with the time format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one
|
||||
* can use the Joda Time's [`ISODateTimeFormat.dateTime()`](
|
||||
* http://www.joda.org/joda-time/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime--
|
||||
* ) to obtain a formatter capable of generating timestamps in this format.
|
||||
*
|
||||
*
|
||||
*
|
||||
* @generated from protobuf message google.protobuf.Timestamp
|
||||
*/
|
||||
export interface Timestamp {
|
||||
/**
|
||||
* Represents seconds of UTC time since Unix epoch
|
||||
* 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to
|
||||
* 9999-12-31T23:59:59Z inclusive.
|
||||
*
|
||||
* @generated from protobuf field: int64 seconds = 1;
|
||||
*/
|
||||
seconds: string;
|
||||
/**
|
||||
* Non-negative fractions of a second at nanosecond resolution. Negative
|
||||
* second values with fractions must still have non-negative nanos values
|
||||
* that count forward in time. Must be from 0 to 999,999,999
|
||||
* inclusive.
|
||||
*
|
||||
* @generated from protobuf field: int32 nanos = 2;
|
||||
*/
|
||||
nanos: number;
|
||||
}
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class Timestamp$Type extends MessageType<Timestamp> {
|
||||
constructor() {
|
||||
super("google.protobuf.Timestamp", [
|
||||
{ no: 1, name: "seconds", kind: "scalar", T: 3 /*ScalarType.INT64*/ },
|
||||
{ no: 2, name: "nanos", kind: "scalar", T: 5 /*ScalarType.INT32*/ }
|
||||
]);
|
||||
}
|
||||
/**
|
||||
* Creates a new `Timestamp` for the current time.
|
||||
*/
|
||||
now(): Timestamp {
|
||||
const msg = this.create();
|
||||
const ms = Date.now();
|
||||
msg.seconds = PbLong.from(Math.floor(ms / 1000)).toString();
|
||||
msg.nanos = (ms % 1000) * 1000000;
|
||||
return msg;
|
||||
}
|
||||
/**
|
||||
* Converts a `Timestamp` to a JavaScript Date.
|
||||
*/
|
||||
toDate(message: Timestamp): Date {
|
||||
return new Date(PbLong.from(message.seconds).toNumber() * 1000 + Math.ceil(message.nanos / 1000000));
|
||||
}
|
||||
/**
|
||||
* Converts a JavaScript Date to a `Timestamp`.
|
||||
*/
|
||||
fromDate(date: Date): Timestamp {
|
||||
const msg = this.create();
|
||||
const ms = date.getTime();
|
||||
msg.seconds = PbLong.from(Math.floor(ms / 1000)).toString();
|
||||
msg.nanos = (ms % 1000) * 1000000;
|
||||
return msg;
|
||||
}
|
||||
/**
|
||||
* In JSON format, the `Timestamp` type is encoded as a string
|
||||
* in the RFC 3339 format.
|
||||
*/
|
||||
internalJsonWrite(message: Timestamp, options: JsonWriteOptions): JsonValue {
|
||||
let ms = PbLong.from(message.seconds).toNumber() * 1000;
|
||||
if (ms < Date.parse("0001-01-01T00:00:00Z") || ms > Date.parse("9999-12-31T23:59:59Z"))
|
||||
throw new Error("Unable to encode Timestamp to JSON. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive.");
|
||||
if (message.nanos < 0)
|
||||
throw new Error("Unable to encode invalid Timestamp to JSON. Nanos must not be negative.");
|
||||
let z = "Z";
|
||||
if (message.nanos > 0) {
|
||||
let nanosStr = (message.nanos + 1000000000).toString().substring(1);
|
||||
if (nanosStr.substring(3) === "000000")
|
||||
z = "." + nanosStr.substring(0, 3) + "Z";
|
||||
else if (nanosStr.substring(6) === "000")
|
||||
z = "." + nanosStr.substring(0, 6) + "Z";
|
||||
else
|
||||
z = "." + nanosStr + "Z";
|
||||
}
|
||||
return new Date(ms).toISOString().replace(".000Z", z);
|
||||
}
|
||||
/**
|
||||
* In JSON format, the `Timestamp` type is encoded as a string
|
||||
* in the RFC 3339 format.
|
||||
*/
|
||||
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: Timestamp): Timestamp {
|
||||
if (typeof json !== "string")
|
||||
throw new Error("Unable to parse Timestamp from JSON " + typeofJsonValue(json) + ".");
|
||||
let matches = json.match(/^([0-9]{4})-([0-9]{2})-([0-9]{2})T([0-9]{2}):([0-9]{2}):([0-9]{2})(?:Z|\.([0-9]{3,9})Z|([+-][0-9][0-9]:[0-9][0-9]))$/);
|
||||
if (!matches)
|
||||
throw new Error("Unable to parse Timestamp from JSON. Invalid format.");
|
||||
let ms = Date.parse(matches[1] + "-" + matches[2] + "-" + matches[3] + "T" + matches[4] + ":" + matches[5] + ":" + matches[6] + (matches[8] ? matches[8] : "Z"));
|
||||
if (Number.isNaN(ms))
|
||||
throw new Error("Unable to parse Timestamp from JSON. Invalid value.");
|
||||
if (ms < Date.parse("0001-01-01T00:00:00Z") || ms > Date.parse("9999-12-31T23:59:59Z"))
|
||||
throw new globalThis.Error("Unable to parse Timestamp from JSON. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive.");
|
||||
if (!target)
|
||||
target = this.create();
|
||||
target.seconds = PbLong.from(ms / 1000).toString();
|
||||
target.nanos = 0;
|
||||
if (matches[7])
|
||||
target.nanos = (parseInt("1" + matches[7] + "0".repeat(9 - matches[7].length)) - 1000000000);
|
||||
return target;
|
||||
}
|
||||
create(value?: PartialMessage<Timestamp>): Timestamp {
|
||||
const message = { seconds: "0", nanos: 0 };
|
||||
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
reflectionMergePartial<Timestamp>(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: Timestamp): Timestamp {
|
||||
let message = target ?? this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* int64 seconds */ 1:
|
||||
message.seconds = reader.int64().toString();
|
||||
break;
|
||||
case /* int32 nanos */ 2:
|
||||
message.nanos = reader.int32();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message: Timestamp, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||
/* int64 seconds = 1; */
|
||||
if (message.seconds !== "0")
|
||||
writer.tag(1, WireType.Varint).int64(message.seconds);
|
||||
/* int32 nanos = 2; */
|
||||
if (message.nanos !== 0)
|
||||
writer.tag(2, WireType.Varint).int32(message.nanos);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message google.protobuf.Timestamp
|
||||
*/
|
||||
export const Timestamp = new Timestamp$Type();
|
|
@ -0,0 +1,748 @@
|
|||
// @generated by protobuf-ts 2.9.1 with parameter long_type_string,client_none,generate_dependencies
|
||||
// @generated from protobuf file "google/protobuf/wrappers.proto" (package "google.protobuf", syntax proto3)
|
||||
// tslint:disable
|
||||
//
|
||||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
//
|
||||
//
|
||||
// Wrappers for primitive (non-message) types. These types are useful
|
||||
// for embedding primitives in the `google.protobuf.Any` type and for places
|
||||
// where we need to distinguish between the absence of a primitive
|
||||
// typed field and its default value.
|
||||
//
|
||||
import { ScalarType } from "@protobuf-ts/runtime";
|
||||
import { LongType } from "@protobuf-ts/runtime";
|
||||
import type { BinaryWriteOptions } from "@protobuf-ts/runtime";
|
||||
import type { IBinaryWriter } from "@protobuf-ts/runtime";
|
||||
import { WireType } from "@protobuf-ts/runtime";
|
||||
import type { BinaryReadOptions } from "@protobuf-ts/runtime";
|
||||
import type { IBinaryReader } from "@protobuf-ts/runtime";
|
||||
import { UnknownFieldHandler } from "@protobuf-ts/runtime";
|
||||
import type { PartialMessage } from "@protobuf-ts/runtime";
|
||||
import { reflectionMergePartial } from "@protobuf-ts/runtime";
|
||||
import { MESSAGE_TYPE } from "@protobuf-ts/runtime";
|
||||
import type { JsonValue } from "@protobuf-ts/runtime";
|
||||
import type { JsonReadOptions } from "@protobuf-ts/runtime";
|
||||
import type { JsonWriteOptions } from "@protobuf-ts/runtime";
|
||||
import { MessageType } from "@protobuf-ts/runtime";
|
||||
/**
|
||||
* Wrapper message for `double`.
|
||||
*
|
||||
* The JSON representation for `DoubleValue` is JSON number.
|
||||
*
|
||||
* @generated from protobuf message google.protobuf.DoubleValue
|
||||
*/
|
||||
export interface DoubleValue {
|
||||
/**
|
||||
* The double value.
|
||||
*
|
||||
* @generated from protobuf field: double value = 1;
|
||||
*/
|
||||
value: number;
|
||||
}
|
||||
/**
|
||||
* Wrapper message for `float`.
|
||||
*
|
||||
* The JSON representation for `FloatValue` is JSON number.
|
||||
*
|
||||
* @generated from protobuf message google.protobuf.FloatValue
|
||||
*/
|
||||
export interface FloatValue {
|
||||
/**
|
||||
* The float value.
|
||||
*
|
||||
* @generated from protobuf field: float value = 1;
|
||||
*/
|
||||
value: number;
|
||||
}
|
||||
/**
|
||||
* Wrapper message for `int64`.
|
||||
*
|
||||
* The JSON representation for `Int64Value` is JSON string.
|
||||
*
|
||||
* @generated from protobuf message google.protobuf.Int64Value
|
||||
*/
|
||||
export interface Int64Value {
|
||||
/**
|
||||
* The int64 value.
|
||||
*
|
||||
* @generated from protobuf field: int64 value = 1;
|
||||
*/
|
||||
value: string;
|
||||
}
|
||||
/**
|
||||
* Wrapper message for `uint64`.
|
||||
*
|
||||
* The JSON representation for `UInt64Value` is JSON string.
|
||||
*
|
||||
* @generated from protobuf message google.protobuf.UInt64Value
|
||||
*/
|
||||
export interface UInt64Value {
|
||||
/**
|
||||
* The uint64 value.
|
||||
*
|
||||
* @generated from protobuf field: uint64 value = 1;
|
||||
*/
|
||||
value: string;
|
||||
}
|
||||
/**
|
||||
* Wrapper message for `int32`.
|
||||
*
|
||||
* The JSON representation for `Int32Value` is JSON number.
|
||||
*
|
||||
* @generated from protobuf message google.protobuf.Int32Value
|
||||
*/
|
||||
export interface Int32Value {
|
||||
/**
|
||||
* The int32 value.
|
||||
*
|
||||
* @generated from protobuf field: int32 value = 1;
|
||||
*/
|
||||
value: number;
|
||||
}
|
||||
/**
|
||||
* Wrapper message for `uint32`.
|
||||
*
|
||||
* The JSON representation for `UInt32Value` is JSON number.
|
||||
*
|
||||
* @generated from protobuf message google.protobuf.UInt32Value
|
||||
*/
|
||||
export interface UInt32Value {
|
||||
/**
|
||||
* The uint32 value.
|
||||
*
|
||||
* @generated from protobuf field: uint32 value = 1;
|
||||
*/
|
||||
value: number;
|
||||
}
|
||||
/**
|
||||
* Wrapper message for `bool`.
|
||||
*
|
||||
* The JSON representation for `BoolValue` is JSON `true` and `false`.
|
||||
*
|
||||
* @generated from protobuf message google.protobuf.BoolValue
|
||||
*/
|
||||
export interface BoolValue {
|
||||
/**
|
||||
* The bool value.
|
||||
*
|
||||
* @generated from protobuf field: bool value = 1;
|
||||
*/
|
||||
value: boolean;
|
||||
}
|
||||
/**
|
||||
* Wrapper message for `string`.
|
||||
*
|
||||
* The JSON representation for `StringValue` is JSON string.
|
||||
*
|
||||
* @generated from protobuf message google.protobuf.StringValue
|
||||
*/
|
||||
export interface StringValue {
|
||||
/**
|
||||
* The string value.
|
||||
*
|
||||
* @generated from protobuf field: string value = 1;
|
||||
*/
|
||||
value: string;
|
||||
}
|
||||
/**
|
||||
* Wrapper message for `bytes`.
|
||||
*
|
||||
* The JSON representation for `BytesValue` is JSON string.
|
||||
*
|
||||
* @generated from protobuf message google.protobuf.BytesValue
|
||||
*/
|
||||
export interface BytesValue {
|
||||
/**
|
||||
* The bytes value.
|
||||
*
|
||||
* @generated from protobuf field: bytes value = 1;
|
||||
*/
|
||||
value: Uint8Array;
|
||||
}
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class DoubleValue$Type extends MessageType<DoubleValue> {
|
||||
constructor() {
|
||||
super("google.protobuf.DoubleValue", [
|
||||
{ no: 1, name: "value", kind: "scalar", T: 1 /*ScalarType.DOUBLE*/ }
|
||||
]);
|
||||
}
|
||||
/**
|
||||
* Encode `DoubleValue` to JSON number.
|
||||
*/
|
||||
internalJsonWrite(message: DoubleValue, options: JsonWriteOptions): JsonValue {
|
||||
return this.refJsonWriter.scalar(2, message.value, "value", false, true);
|
||||
}
|
||||
/**
|
||||
* Decode `DoubleValue` from JSON number.
|
||||
*/
|
||||
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: DoubleValue): DoubleValue {
|
||||
if (!target)
|
||||
target = this.create();
|
||||
target.value = this.refJsonReader.scalar(json, 1, undefined, "value") as number;
|
||||
return target;
|
||||
}
|
||||
create(value?: PartialMessage<DoubleValue>): DoubleValue {
|
||||
const message = { value: 0 };
|
||||
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
reflectionMergePartial<DoubleValue>(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: DoubleValue): DoubleValue {
|
||||
let message = target ?? this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* double value */ 1:
|
||||
message.value = reader.double();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message: DoubleValue, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||
/* double value = 1; */
|
||||
if (message.value !== 0)
|
||||
writer.tag(1, WireType.Bit64).double(message.value);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message google.protobuf.DoubleValue
|
||||
*/
|
||||
export const DoubleValue = new DoubleValue$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class FloatValue$Type extends MessageType<FloatValue> {
|
||||
constructor() {
|
||||
super("google.protobuf.FloatValue", [
|
||||
{ no: 1, name: "value", kind: "scalar", T: 2 /*ScalarType.FLOAT*/ }
|
||||
]);
|
||||
}
|
||||
/**
|
||||
* Encode `FloatValue` to JSON number.
|
||||
*/
|
||||
internalJsonWrite(message: FloatValue, options: JsonWriteOptions): JsonValue {
|
||||
return this.refJsonWriter.scalar(1, message.value, "value", false, true);
|
||||
}
|
||||
/**
|
||||
* Decode `FloatValue` from JSON number.
|
||||
*/
|
||||
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: FloatValue): FloatValue {
|
||||
if (!target)
|
||||
target = this.create();
|
||||
target.value = this.refJsonReader.scalar(json, 1, undefined, "value") as number;
|
||||
return target;
|
||||
}
|
||||
create(value?: PartialMessage<FloatValue>): FloatValue {
|
||||
const message = { value: 0 };
|
||||
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
reflectionMergePartial<FloatValue>(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: FloatValue): FloatValue {
|
||||
let message = target ?? this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* float value */ 1:
|
||||
message.value = reader.float();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message: FloatValue, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||
/* float value = 1; */
|
||||
if (message.value !== 0)
|
||||
writer.tag(1, WireType.Bit32).float(message.value);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message google.protobuf.FloatValue
|
||||
*/
|
||||
export const FloatValue = new FloatValue$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class Int64Value$Type extends MessageType<Int64Value> {
|
||||
constructor() {
|
||||
super("google.protobuf.Int64Value", [
|
||||
{ no: 1, name: "value", kind: "scalar", T: 3 /*ScalarType.INT64*/ }
|
||||
]);
|
||||
}
|
||||
/**
|
||||
* Encode `Int64Value` to JSON string.
|
||||
*/
|
||||
internalJsonWrite(message: Int64Value, options: JsonWriteOptions): JsonValue {
|
||||
return this.refJsonWriter.scalar(ScalarType.INT64, message.value, "value", false, true);
|
||||
}
|
||||
/**
|
||||
* Decode `Int64Value` from JSON string.
|
||||
*/
|
||||
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: Int64Value): Int64Value {
|
||||
if (!target)
|
||||
target = this.create();
|
||||
target.value = this.refJsonReader.scalar(json, ScalarType.INT64, LongType.STRING, "value") as any;
|
||||
return target;
|
||||
}
|
||||
create(value?: PartialMessage<Int64Value>): Int64Value {
|
||||
const message = { value: "0" };
|
||||
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
reflectionMergePartial<Int64Value>(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: Int64Value): Int64Value {
|
||||
let message = target ?? this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* int64 value */ 1:
|
||||
message.value = reader.int64().toString();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message: Int64Value, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||
/* int64 value = 1; */
|
||||
if (message.value !== "0")
|
||||
writer.tag(1, WireType.Varint).int64(message.value);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message google.protobuf.Int64Value
|
||||
*/
|
||||
export const Int64Value = new Int64Value$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class UInt64Value$Type extends MessageType<UInt64Value> {
|
||||
constructor() {
|
||||
super("google.protobuf.UInt64Value", [
|
||||
{ no: 1, name: "value", kind: "scalar", T: 4 /*ScalarType.UINT64*/ }
|
||||
]);
|
||||
}
|
||||
/**
|
||||
* Encode `UInt64Value` to JSON string.
|
||||
*/
|
||||
internalJsonWrite(message: UInt64Value, options: JsonWriteOptions): JsonValue {
|
||||
return this.refJsonWriter.scalar(ScalarType.UINT64, message.value, "value", false, true);
|
||||
}
|
||||
/**
|
||||
* Decode `UInt64Value` from JSON string.
|
||||
*/
|
||||
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: UInt64Value): UInt64Value {
|
||||
if (!target)
|
||||
target = this.create();
|
||||
target.value = this.refJsonReader.scalar(json, ScalarType.UINT64, LongType.STRING, "value") as any;
|
||||
return target;
|
||||
}
|
||||
create(value?: PartialMessage<UInt64Value>): UInt64Value {
|
||||
const message = { value: "0" };
|
||||
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
reflectionMergePartial<UInt64Value>(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: UInt64Value): UInt64Value {
|
||||
let message = target ?? this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* uint64 value */ 1:
|
||||
message.value = reader.uint64().toString();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message: UInt64Value, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||
/* uint64 value = 1; */
|
||||
if (message.value !== "0")
|
||||
writer.tag(1, WireType.Varint).uint64(message.value);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message google.protobuf.UInt64Value
|
||||
*/
|
||||
export const UInt64Value = new UInt64Value$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class Int32Value$Type extends MessageType<Int32Value> {
|
||||
constructor() {
|
||||
super("google.protobuf.Int32Value", [
|
||||
{ no: 1, name: "value", kind: "scalar", T: 5 /*ScalarType.INT32*/ }
|
||||
]);
|
||||
}
|
||||
/**
|
||||
* Encode `Int32Value` to JSON string.
|
||||
*/
|
||||
internalJsonWrite(message: Int32Value, options: JsonWriteOptions): JsonValue {
|
||||
return this.refJsonWriter.scalar(5, message.value, "value", false, true);
|
||||
}
|
||||
/**
|
||||
* Decode `Int32Value` from JSON string.
|
||||
*/
|
||||
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: Int32Value): Int32Value {
|
||||
if (!target)
|
||||
target = this.create();
|
||||
target.value = this.refJsonReader.scalar(json, 5, undefined, "value") as number;
|
||||
return target;
|
||||
}
|
||||
create(value?: PartialMessage<Int32Value>): Int32Value {
|
||||
const message = { value: 0 };
|
||||
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
reflectionMergePartial<Int32Value>(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: Int32Value): Int32Value {
|
||||
let message = target ?? this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* int32 value */ 1:
|
||||
message.value = reader.int32();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message: Int32Value, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||
/* int32 value = 1; */
|
||||
if (message.value !== 0)
|
||||
writer.tag(1, WireType.Varint).int32(message.value);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message google.protobuf.Int32Value
|
||||
*/
|
||||
export const Int32Value = new Int32Value$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class UInt32Value$Type extends MessageType<UInt32Value> {
|
||||
constructor() {
|
||||
super("google.protobuf.UInt32Value", [
|
||||
{ no: 1, name: "value", kind: "scalar", T: 13 /*ScalarType.UINT32*/ }
|
||||
]);
|
||||
}
|
||||
/**
|
||||
* Encode `UInt32Value` to JSON string.
|
||||
*/
|
||||
internalJsonWrite(message: UInt32Value, options: JsonWriteOptions): JsonValue {
|
||||
return this.refJsonWriter.scalar(13, message.value, "value", false, true);
|
||||
}
|
||||
/**
|
||||
* Decode `UInt32Value` from JSON string.
|
||||
*/
|
||||
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: UInt32Value): UInt32Value {
|
||||
if (!target)
|
||||
target = this.create();
|
||||
target.value = this.refJsonReader.scalar(json, 13, undefined, "value") as number;
|
||||
return target;
|
||||
}
|
||||
create(value?: PartialMessage<UInt32Value>): UInt32Value {
|
||||
const message = { value: 0 };
|
||||
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
reflectionMergePartial<UInt32Value>(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: UInt32Value): UInt32Value {
|
||||
let message = target ?? this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* uint32 value */ 1:
|
||||
message.value = reader.uint32();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message: UInt32Value, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||
/* uint32 value = 1; */
|
||||
if (message.value !== 0)
|
||||
writer.tag(1, WireType.Varint).uint32(message.value);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message google.protobuf.UInt32Value
|
||||
*/
|
||||
export const UInt32Value = new UInt32Value$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class BoolValue$Type extends MessageType<BoolValue> {
|
||||
constructor() {
|
||||
super("google.protobuf.BoolValue", [
|
||||
{ no: 1, name: "value", kind: "scalar", T: 8 /*ScalarType.BOOL*/ }
|
||||
]);
|
||||
}
|
||||
/**
|
||||
* Encode `BoolValue` to JSON bool.
|
||||
*/
|
||||
internalJsonWrite(message: BoolValue, options: JsonWriteOptions): JsonValue {
|
||||
return message.value;
|
||||
}
|
||||
/**
|
||||
* Decode `BoolValue` from JSON bool.
|
||||
*/
|
||||
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: BoolValue): BoolValue {
|
||||
if (!target)
|
||||
target = this.create();
|
||||
target.value = this.refJsonReader.scalar(json, 8, undefined, "value") as boolean;
|
||||
return target;
|
||||
}
|
||||
create(value?: PartialMessage<BoolValue>): BoolValue {
|
||||
const message = { value: false };
|
||||
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
reflectionMergePartial<BoolValue>(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: BoolValue): BoolValue {
|
||||
let message = target ?? this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* bool value */ 1:
|
||||
message.value = reader.bool();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message: BoolValue, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||
/* bool value = 1; */
|
||||
if (message.value !== false)
|
||||
writer.tag(1, WireType.Varint).bool(message.value);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message google.protobuf.BoolValue
|
||||
*/
|
||||
export const BoolValue = new BoolValue$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class StringValue$Type extends MessageType<StringValue> {
|
||||
constructor() {
|
||||
super("google.protobuf.StringValue", [
|
||||
{ no: 1, name: "value", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
|
||||
]);
|
||||
}
|
||||
/**
|
||||
* Encode `StringValue` to JSON string.
|
||||
*/
|
||||
internalJsonWrite(message: StringValue, options: JsonWriteOptions): JsonValue {
|
||||
return message.value;
|
||||
}
|
||||
/**
|
||||
* Decode `StringValue` from JSON string.
|
||||
*/
|
||||
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: StringValue): StringValue {
|
||||
if (!target)
|
||||
target = this.create();
|
||||
target.value = this.refJsonReader.scalar(json, 9, undefined, "value") as string;
|
||||
return target;
|
||||
}
|
||||
create(value?: PartialMessage<StringValue>): StringValue {
|
||||
const message = { value: "" };
|
||||
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
reflectionMergePartial<StringValue>(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: StringValue): StringValue {
|
||||
let message = target ?? this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* string value */ 1:
|
||||
message.value = reader.string();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message: StringValue, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||
/* string value = 1; */
|
||||
if (message.value !== "")
|
||||
writer.tag(1, WireType.LengthDelimited).string(message.value);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message google.protobuf.StringValue
|
||||
*/
|
||||
export const StringValue = new StringValue$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class BytesValue$Type extends MessageType<BytesValue> {
|
||||
constructor() {
|
||||
super("google.protobuf.BytesValue", [
|
||||
{ no: 1, name: "value", kind: "scalar", T: 12 /*ScalarType.BYTES*/ }
|
||||
]);
|
||||
}
|
||||
/**
|
||||
* Encode `BytesValue` to JSON string.
|
||||
*/
|
||||
internalJsonWrite(message: BytesValue, options: JsonWriteOptions): JsonValue {
|
||||
return this.refJsonWriter.scalar(12, message.value, "value", false, true);
|
||||
}
|
||||
/**
|
||||
* Decode `BytesValue` from JSON string.
|
||||
*/
|
||||
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: BytesValue): BytesValue {
|
||||
if (!target)
|
||||
target = this.create();
|
||||
target.value = this.refJsonReader.scalar(json, 12, undefined, "value") as Uint8Array;
|
||||
return target;
|
||||
}
|
||||
create(value?: PartialMessage<BytesValue>): BytesValue {
|
||||
const message = { value: new Uint8Array(0) };
|
||||
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
reflectionMergePartial<BytesValue>(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: BytesValue): BytesValue {
|
||||
let message = target ?? this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* bytes value */ 1:
|
||||
message.value = reader.bytes();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message: BytesValue, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||
/* bytes value = 1; */
|
||||
if (message.value.length)
|
||||
writer.tag(1, WireType.LengthDelimited).bytes(message.value);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message google.protobuf.BytesValue
|
||||
*/
|
||||
export const BytesValue = new BytesValue$Type();
|
|
@ -0,0 +1,4 @@
|
|||
export * from './google/protobuf/timestamp'
|
||||
export * from './google/protobuf/wrappers'
|
||||
export * from './results/api/v1/artifact'
|
||||
export * from './results/api/v1/artifact.twirp'
|
|
@ -0,0 +1,916 @@
|
|||
// @generated by protobuf-ts 2.9.1 with parameter long_type_string,client_none,generate_dependencies
|
||||
// @generated from protobuf file "results/api/v1/artifact.proto" (package "github.actions.results.api.v1", syntax proto3)
|
||||
// tslint:disable
|
||||
import { ServiceType } from "@protobuf-ts/runtime-rpc";
|
||||
import type { BinaryWriteOptions } from "@protobuf-ts/runtime";
|
||||
import type { IBinaryWriter } from "@protobuf-ts/runtime";
|
||||
import { WireType } from "@protobuf-ts/runtime";
|
||||
import type { BinaryReadOptions } from "@protobuf-ts/runtime";
|
||||
import type { IBinaryReader } from "@protobuf-ts/runtime";
|
||||
import { UnknownFieldHandler } from "@protobuf-ts/runtime";
|
||||
import type { PartialMessage } from "@protobuf-ts/runtime";
|
||||
import { reflectionMergePartial } from "@protobuf-ts/runtime";
|
||||
import { MESSAGE_TYPE } from "@protobuf-ts/runtime";
|
||||
import { MessageType } from "@protobuf-ts/runtime";
|
||||
import { Int64Value } from "../../../google/protobuf/wrappers";
|
||||
import { StringValue } from "../../../google/protobuf/wrappers";
|
||||
import { Timestamp } from "../../../google/protobuf/timestamp";
|
||||
/**
|
||||
* @generated from protobuf message github.actions.results.api.v1.CreateArtifactRequest
|
||||
*/
|
||||
export interface CreateArtifactRequest {
|
||||
/**
|
||||
* @generated from protobuf field: string workflow_run_backend_id = 1;
|
||||
*/
|
||||
workflowRunBackendId: string;
|
||||
/**
|
||||
* @generated from protobuf field: string workflow_job_run_backend_id = 2;
|
||||
*/
|
||||
workflowJobRunBackendId: string;
|
||||
/**
|
||||
* @generated from protobuf field: string name = 3;
|
||||
*/
|
||||
name: string;
|
||||
/**
|
||||
* @generated from protobuf field: google.protobuf.Timestamp expires_at = 4;
|
||||
*/
|
||||
expiresAt?: Timestamp;
|
||||
/**
|
||||
* @generated from protobuf field: int32 version = 5;
|
||||
*/
|
||||
version: number;
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf message github.actions.results.api.v1.CreateArtifactResponse
|
||||
*/
|
||||
export interface CreateArtifactResponse {
|
||||
/**
|
||||
* @generated from protobuf field: bool ok = 1;
|
||||
*/
|
||||
ok: boolean;
|
||||
/**
|
||||
* @generated from protobuf field: string signed_upload_url = 2;
|
||||
*/
|
||||
signedUploadUrl: string;
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf message github.actions.results.api.v1.FinalizeArtifactRequest
|
||||
*/
|
||||
export interface FinalizeArtifactRequest {
|
||||
/**
|
||||
* @generated from protobuf field: string workflow_run_backend_id = 1;
|
||||
*/
|
||||
workflowRunBackendId: string;
|
||||
/**
|
||||
* @generated from protobuf field: string workflow_job_run_backend_id = 2;
|
||||
*/
|
||||
workflowJobRunBackendId: string;
|
||||
/**
|
||||
* @generated from protobuf field: string name = 3;
|
||||
*/
|
||||
name: string;
|
||||
/**
|
||||
* @generated from protobuf field: int64 size = 4;
|
||||
*/
|
||||
size: string;
|
||||
/**
|
||||
* @generated from protobuf field: google.protobuf.StringValue hash = 5;
|
||||
*/
|
||||
hash?: StringValue; // optional
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf message github.actions.results.api.v1.FinalizeArtifactResponse
|
||||
*/
|
||||
export interface FinalizeArtifactResponse {
|
||||
/**
|
||||
* @generated from protobuf field: bool ok = 1;
|
||||
*/
|
||||
ok: boolean;
|
||||
/**
|
||||
* @generated from protobuf field: int64 artifact_id = 2;
|
||||
*/
|
||||
artifactId: string;
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf message github.actions.results.api.v1.ListArtifactsRequest
|
||||
*/
|
||||
export interface ListArtifactsRequest {
|
||||
/**
|
||||
* The backend plan ID
|
||||
*
|
||||
* @generated from protobuf field: string workflow_run_backend_id = 1;
|
||||
*/
|
||||
workflowRunBackendId: string;
|
||||
/**
|
||||
* The backend job ID
|
||||
*
|
||||
* @generated from protobuf field: string workflow_job_run_backend_id = 2;
|
||||
*/
|
||||
workflowJobRunBackendId: string;
|
||||
/**
|
||||
* Name of the artifact to filter on
|
||||
*
|
||||
* @generated from protobuf field: google.protobuf.StringValue name_filter = 3;
|
||||
*/
|
||||
nameFilter?: StringValue; // optional
|
||||
/**
|
||||
* Monolith Database ID of the artifact to filter on
|
||||
*
|
||||
* @generated from protobuf field: google.protobuf.Int64Value id_filter = 4;
|
||||
*/
|
||||
idFilter?: Int64Value; // optional
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf message github.actions.results.api.v1.ListArtifactsResponse
|
||||
*/
|
||||
export interface ListArtifactsResponse {
|
||||
/**
|
||||
* @generated from protobuf field: repeated github.actions.results.api.v1.ListArtifactsResponse.MonolithArtifact artifacts = 1;
|
||||
*/
|
||||
artifacts: ListArtifactsResponse_MonolithArtifact[];
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf message github.actions.results.api.v1.ListArtifactsResponse.MonolithArtifact
|
||||
*/
|
||||
export interface ListArtifactsResponse_MonolithArtifact {
|
||||
/**
|
||||
* The backend plan ID
|
||||
*
|
||||
* @generated from protobuf field: string workflow_run_backend_id = 1;
|
||||
*/
|
||||
workflowRunBackendId: string;
|
||||
/**
|
||||
* The backend job ID
|
||||
*
|
||||
* @generated from protobuf field: string workflow_job_run_backend_id = 2;
|
||||
*/
|
||||
workflowJobRunBackendId: string;
|
||||
/**
|
||||
* Monolith database ID of the artifact
|
||||
*
|
||||
* @generated from protobuf field: int64 database_id = 3;
|
||||
*/
|
||||
databaseId: string;
|
||||
/**
|
||||
* Name of the artifact
|
||||
*
|
||||
* @generated from protobuf field: string name = 4;
|
||||
*/
|
||||
name: string;
|
||||
/**
|
||||
* Size of the artifact in bytes
|
||||
*
|
||||
* @generated from protobuf field: int64 size = 5;
|
||||
*/
|
||||
size: string;
|
||||
/**
|
||||
* When the artifact was created in the monolith
|
||||
*
|
||||
* @generated from protobuf field: google.protobuf.Timestamp created_at = 6;
|
||||
*/
|
||||
createdAt?: Timestamp;
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf message github.actions.results.api.v1.GetSignedArtifactURLRequest
|
||||
*/
|
||||
export interface GetSignedArtifactURLRequest {
|
||||
/**
|
||||
* @generated from protobuf field: string workflow_run_backend_id = 1;
|
||||
*/
|
||||
workflowRunBackendId: string;
|
||||
/**
|
||||
* @generated from protobuf field: string workflow_job_run_backend_id = 2;
|
||||
*/
|
||||
workflowJobRunBackendId: string;
|
||||
/**
|
||||
* @generated from protobuf field: string name = 3;
|
||||
*/
|
||||
name: string;
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf message github.actions.results.api.v1.GetSignedArtifactURLResponse
|
||||
*/
|
||||
export interface GetSignedArtifactURLResponse {
|
||||
/**
|
||||
* @generated from protobuf field: string signed_url = 1;
|
||||
*/
|
||||
signedUrl: string;
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf message github.actions.results.api.v1.DeleteArtifactRequest
|
||||
*/
|
||||
export interface DeleteArtifactRequest {
|
||||
/**
|
||||
* @generated from protobuf field: string workflow_run_backend_id = 1;
|
||||
*/
|
||||
workflowRunBackendId: string;
|
||||
/**
|
||||
* @generated from protobuf field: string workflow_job_run_backend_id = 2;
|
||||
*/
|
||||
workflowJobRunBackendId: string;
|
||||
/**
|
||||
* @generated from protobuf field: string name = 3;
|
||||
*/
|
||||
name: string;
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf message github.actions.results.api.v1.DeleteArtifactResponse
|
||||
*/
|
||||
export interface DeleteArtifactResponse {
|
||||
/**
|
||||
* @generated from protobuf field: bool ok = 1;
|
||||
*/
|
||||
ok: boolean;
|
||||
/**
|
||||
* @generated from protobuf field: int64 artifact_id = 2;
|
||||
*/
|
||||
artifactId: string;
|
||||
}
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class CreateArtifactRequest$Type extends MessageType<CreateArtifactRequest> {
|
||||
constructor() {
|
||||
super("github.actions.results.api.v1.CreateArtifactRequest", [
|
||||
{ no: 1, name: "workflow_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 2, name: "workflow_job_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 3, name: "name", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 4, name: "expires_at", kind: "message", T: () => Timestamp },
|
||||
{ no: 5, name: "version", kind: "scalar", T: 5 /*ScalarType.INT32*/ }
|
||||
]);
|
||||
}
|
||||
create(value?: PartialMessage<CreateArtifactRequest>): CreateArtifactRequest {
|
||||
const message = { workflowRunBackendId: "", workflowJobRunBackendId: "", name: "", version: 0 };
|
||||
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
reflectionMergePartial<CreateArtifactRequest>(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: CreateArtifactRequest): CreateArtifactRequest {
|
||||
let message = target ?? this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* string workflow_run_backend_id */ 1:
|
||||
message.workflowRunBackendId = reader.string();
|
||||
break;
|
||||
case /* string workflow_job_run_backend_id */ 2:
|
||||
message.workflowJobRunBackendId = reader.string();
|
||||
break;
|
||||
case /* string name */ 3:
|
||||
message.name = reader.string();
|
||||
break;
|
||||
case /* google.protobuf.Timestamp expires_at */ 4:
|
||||
message.expiresAt = Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.expiresAt);
|
||||
break;
|
||||
case /* int32 version */ 5:
|
||||
message.version = reader.int32();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message: CreateArtifactRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||
/* string workflow_run_backend_id = 1; */
|
||||
if (message.workflowRunBackendId !== "")
|
||||
writer.tag(1, WireType.LengthDelimited).string(message.workflowRunBackendId);
|
||||
/* string workflow_job_run_backend_id = 2; */
|
||||
if (message.workflowJobRunBackendId !== "")
|
||||
writer.tag(2, WireType.LengthDelimited).string(message.workflowJobRunBackendId);
|
||||
/* string name = 3; */
|
||||
if (message.name !== "")
|
||||
writer.tag(3, WireType.LengthDelimited).string(message.name);
|
||||
/* google.protobuf.Timestamp expires_at = 4; */
|
||||
if (message.expiresAt)
|
||||
Timestamp.internalBinaryWrite(message.expiresAt, writer.tag(4, WireType.LengthDelimited).fork(), options).join();
|
||||
/* int32 version = 5; */
|
||||
if (message.version !== 0)
|
||||
writer.tag(5, WireType.Varint).int32(message.version);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.CreateArtifactRequest
|
||||
*/
|
||||
export const CreateArtifactRequest = new CreateArtifactRequest$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class CreateArtifactResponse$Type extends MessageType<CreateArtifactResponse> {
|
||||
constructor() {
|
||||
super("github.actions.results.api.v1.CreateArtifactResponse", [
|
||||
{ no: 1, name: "ok", kind: "scalar", T: 8 /*ScalarType.BOOL*/ },
|
||||
{ no: 2, name: "signed_upload_url", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
|
||||
]);
|
||||
}
|
||||
create(value?: PartialMessage<CreateArtifactResponse>): CreateArtifactResponse {
|
||||
const message = { ok: false, signedUploadUrl: "" };
|
||||
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
reflectionMergePartial<CreateArtifactResponse>(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: CreateArtifactResponse): CreateArtifactResponse {
|
||||
let message = target ?? this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* bool ok */ 1:
|
||||
message.ok = reader.bool();
|
||||
break;
|
||||
case /* string signed_upload_url */ 2:
|
||||
message.signedUploadUrl = reader.string();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message: CreateArtifactResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||
/* bool ok = 1; */
|
||||
if (message.ok !== false)
|
||||
writer.tag(1, WireType.Varint).bool(message.ok);
|
||||
/* string signed_upload_url = 2; */
|
||||
if (message.signedUploadUrl !== "")
|
||||
writer.tag(2, WireType.LengthDelimited).string(message.signedUploadUrl);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.CreateArtifactResponse
|
||||
*/
|
||||
export const CreateArtifactResponse = new CreateArtifactResponse$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class FinalizeArtifactRequest$Type extends MessageType<FinalizeArtifactRequest> {
|
||||
constructor() {
|
||||
super("github.actions.results.api.v1.FinalizeArtifactRequest", [
|
||||
{ no: 1, name: "workflow_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 2, name: "workflow_job_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 3, name: "name", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 4, name: "size", kind: "scalar", T: 3 /*ScalarType.INT64*/ },
|
||||
{ no: 5, name: "hash", kind: "message", T: () => StringValue }
|
||||
]);
|
||||
}
|
||||
create(value?: PartialMessage<FinalizeArtifactRequest>): FinalizeArtifactRequest {
|
||||
const message = { workflowRunBackendId: "", workflowJobRunBackendId: "", name: "", size: "0" };
|
||||
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
reflectionMergePartial<FinalizeArtifactRequest>(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: FinalizeArtifactRequest): FinalizeArtifactRequest {
|
||||
let message = target ?? this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* string workflow_run_backend_id */ 1:
|
||||
message.workflowRunBackendId = reader.string();
|
||||
break;
|
||||
case /* string workflow_job_run_backend_id */ 2:
|
||||
message.workflowJobRunBackendId = reader.string();
|
||||
break;
|
||||
case /* string name */ 3:
|
||||
message.name = reader.string();
|
||||
break;
|
||||
case /* int64 size */ 4:
|
||||
message.size = reader.int64().toString();
|
||||
break;
|
||||
case /* google.protobuf.StringValue hash */ 5:
|
||||
message.hash = StringValue.internalBinaryRead(reader, reader.uint32(), options, message.hash);
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message: FinalizeArtifactRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||
/* string workflow_run_backend_id = 1; */
|
||||
if (message.workflowRunBackendId !== "")
|
||||
writer.tag(1, WireType.LengthDelimited).string(message.workflowRunBackendId);
|
||||
/* string workflow_job_run_backend_id = 2; */
|
||||
if (message.workflowJobRunBackendId !== "")
|
||||
writer.tag(2, WireType.LengthDelimited).string(message.workflowJobRunBackendId);
|
||||
/* string name = 3; */
|
||||
if (message.name !== "")
|
||||
writer.tag(3, WireType.LengthDelimited).string(message.name);
|
||||
/* int64 size = 4; */
|
||||
if (message.size !== "0")
|
||||
writer.tag(4, WireType.Varint).int64(message.size);
|
||||
/* google.protobuf.StringValue hash = 5; */
|
||||
if (message.hash)
|
||||
StringValue.internalBinaryWrite(message.hash, writer.tag(5, WireType.LengthDelimited).fork(), options).join();
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.FinalizeArtifactRequest
|
||||
*/
|
||||
export const FinalizeArtifactRequest = new FinalizeArtifactRequest$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class FinalizeArtifactResponse$Type extends MessageType<FinalizeArtifactResponse> {
|
||||
constructor() {
|
||||
super("github.actions.results.api.v1.FinalizeArtifactResponse", [
|
||||
{ no: 1, name: "ok", kind: "scalar", T: 8 /*ScalarType.BOOL*/ },
|
||||
{ no: 2, name: "artifact_id", kind: "scalar", T: 3 /*ScalarType.INT64*/ }
|
||||
]);
|
||||
}
|
||||
create(value?: PartialMessage<FinalizeArtifactResponse>): FinalizeArtifactResponse {
|
||||
const message = { ok: false, artifactId: "0" };
|
||||
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
reflectionMergePartial<FinalizeArtifactResponse>(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: FinalizeArtifactResponse): FinalizeArtifactResponse {
|
||||
let message = target ?? this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* bool ok */ 1:
|
||||
message.ok = reader.bool();
|
||||
break;
|
||||
case /* int64 artifact_id */ 2:
|
||||
message.artifactId = reader.int64().toString();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message: FinalizeArtifactResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||
/* bool ok = 1; */
|
||||
if (message.ok !== false)
|
||||
writer.tag(1, WireType.Varint).bool(message.ok);
|
||||
/* int64 artifact_id = 2; */
|
||||
if (message.artifactId !== "0")
|
||||
writer.tag(2, WireType.Varint).int64(message.artifactId);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.FinalizeArtifactResponse
|
||||
*/
|
||||
export const FinalizeArtifactResponse = new FinalizeArtifactResponse$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class ListArtifactsRequest$Type extends MessageType<ListArtifactsRequest> {
|
||||
constructor() {
|
||||
super("github.actions.results.api.v1.ListArtifactsRequest", [
|
||||
{ no: 1, name: "workflow_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 2, name: "workflow_job_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 3, name: "name_filter", kind: "message", T: () => StringValue },
|
||||
{ no: 4, name: "id_filter", kind: "message", T: () => Int64Value }
|
||||
]);
|
||||
}
|
||||
create(value?: PartialMessage<ListArtifactsRequest>): ListArtifactsRequest {
|
||||
const message = { workflowRunBackendId: "", workflowJobRunBackendId: "" };
|
||||
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
reflectionMergePartial<ListArtifactsRequest>(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: ListArtifactsRequest): ListArtifactsRequest {
|
||||
let message = target ?? this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* string workflow_run_backend_id */ 1:
|
||||
message.workflowRunBackendId = reader.string();
|
||||
break;
|
||||
case /* string workflow_job_run_backend_id */ 2:
|
||||
message.workflowJobRunBackendId = reader.string();
|
||||
break;
|
||||
case /* google.protobuf.StringValue name_filter */ 3:
|
||||
message.nameFilter = StringValue.internalBinaryRead(reader, reader.uint32(), options, message.nameFilter);
|
||||
break;
|
||||
case /* google.protobuf.Int64Value id_filter */ 4:
|
||||
message.idFilter = Int64Value.internalBinaryRead(reader, reader.uint32(), options, message.idFilter);
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message: ListArtifactsRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||
/* string workflow_run_backend_id = 1; */
|
||||
if (message.workflowRunBackendId !== "")
|
||||
writer.tag(1, WireType.LengthDelimited).string(message.workflowRunBackendId);
|
||||
/* string workflow_job_run_backend_id = 2; */
|
||||
if (message.workflowJobRunBackendId !== "")
|
||||
writer.tag(2, WireType.LengthDelimited).string(message.workflowJobRunBackendId);
|
||||
/* google.protobuf.StringValue name_filter = 3; */
|
||||
if (message.nameFilter)
|
||||
StringValue.internalBinaryWrite(message.nameFilter, writer.tag(3, WireType.LengthDelimited).fork(), options).join();
|
||||
/* google.protobuf.Int64Value id_filter = 4; */
|
||||
if (message.idFilter)
|
||||
Int64Value.internalBinaryWrite(message.idFilter, writer.tag(4, WireType.LengthDelimited).fork(), options).join();
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.ListArtifactsRequest
|
||||
*/
|
||||
export const ListArtifactsRequest = new ListArtifactsRequest$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class ListArtifactsResponse$Type extends MessageType<ListArtifactsResponse> {
|
||||
constructor() {
|
||||
super("github.actions.results.api.v1.ListArtifactsResponse", [
|
||||
{ no: 1, name: "artifacts", kind: "message", repeat: 1 /*RepeatType.PACKED*/, T: () => ListArtifactsResponse_MonolithArtifact }
|
||||
]);
|
||||
}
|
||||
create(value?: PartialMessage<ListArtifactsResponse>): ListArtifactsResponse {
|
||||
const message = { artifacts: [] };
|
||||
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
reflectionMergePartial<ListArtifactsResponse>(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: ListArtifactsResponse): ListArtifactsResponse {
|
||||
let message = target ?? this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* repeated github.actions.results.api.v1.ListArtifactsResponse.MonolithArtifact artifacts */ 1:
|
||||
message.artifacts.push(ListArtifactsResponse_MonolithArtifact.internalBinaryRead(reader, reader.uint32(), options));
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message: ListArtifactsResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||
/* repeated github.actions.results.api.v1.ListArtifactsResponse.MonolithArtifact artifacts = 1; */
|
||||
for (let i = 0; i < message.artifacts.length; i++)
|
||||
ListArtifactsResponse_MonolithArtifact.internalBinaryWrite(message.artifacts[i], writer.tag(1, WireType.LengthDelimited).fork(), options).join();
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.ListArtifactsResponse
|
||||
*/
|
||||
export const ListArtifactsResponse = new ListArtifactsResponse$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class ListArtifactsResponse_MonolithArtifact$Type extends MessageType<ListArtifactsResponse_MonolithArtifact> {
|
||||
constructor() {
|
||||
super("github.actions.results.api.v1.ListArtifactsResponse.MonolithArtifact", [
|
||||
{ no: 1, name: "workflow_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 2, name: "workflow_job_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 3, name: "database_id", kind: "scalar", T: 3 /*ScalarType.INT64*/ },
|
||||
{ no: 4, name: "name", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 5, name: "size", kind: "scalar", T: 3 /*ScalarType.INT64*/ },
|
||||
{ no: 6, name: "created_at", kind: "message", T: () => Timestamp }
|
||||
]);
|
||||
}
|
||||
create(value?: PartialMessage<ListArtifactsResponse_MonolithArtifact>): ListArtifactsResponse_MonolithArtifact {
|
||||
const message = { workflowRunBackendId: "", workflowJobRunBackendId: "", databaseId: "0", name: "", size: "0" };
|
||||
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
reflectionMergePartial<ListArtifactsResponse_MonolithArtifact>(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: ListArtifactsResponse_MonolithArtifact): ListArtifactsResponse_MonolithArtifact {
|
||||
let message = target ?? this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* string workflow_run_backend_id */ 1:
|
||||
message.workflowRunBackendId = reader.string();
|
||||
break;
|
||||
case /* string workflow_job_run_backend_id */ 2:
|
||||
message.workflowJobRunBackendId = reader.string();
|
||||
break;
|
||||
case /* int64 database_id */ 3:
|
||||
message.databaseId = reader.int64().toString();
|
||||
break;
|
||||
case /* string name */ 4:
|
||||
message.name = reader.string();
|
||||
break;
|
||||
case /* int64 size */ 5:
|
||||
message.size = reader.int64().toString();
|
||||
break;
|
||||
case /* google.protobuf.Timestamp created_at */ 6:
|
||||
message.createdAt = Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.createdAt);
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message: ListArtifactsResponse_MonolithArtifact, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||
/* string workflow_run_backend_id = 1; */
|
||||
if (message.workflowRunBackendId !== "")
|
||||
writer.tag(1, WireType.LengthDelimited).string(message.workflowRunBackendId);
|
||||
/* string workflow_job_run_backend_id = 2; */
|
||||
if (message.workflowJobRunBackendId !== "")
|
||||
writer.tag(2, WireType.LengthDelimited).string(message.workflowJobRunBackendId);
|
||||
/* int64 database_id = 3; */
|
||||
if (message.databaseId !== "0")
|
||||
writer.tag(3, WireType.Varint).int64(message.databaseId);
|
||||
/* string name = 4; */
|
||||
if (message.name !== "")
|
||||
writer.tag(4, WireType.LengthDelimited).string(message.name);
|
||||
/* int64 size = 5; */
|
||||
if (message.size !== "0")
|
||||
writer.tag(5, WireType.Varint).int64(message.size);
|
||||
/* google.protobuf.Timestamp created_at = 6; */
|
||||
if (message.createdAt)
|
||||
Timestamp.internalBinaryWrite(message.createdAt, writer.tag(6, WireType.LengthDelimited).fork(), options).join();
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.ListArtifactsResponse.MonolithArtifact
|
||||
*/
|
||||
export const ListArtifactsResponse_MonolithArtifact = new ListArtifactsResponse_MonolithArtifact$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class GetSignedArtifactURLRequest$Type extends MessageType<GetSignedArtifactURLRequest> {
|
||||
constructor() {
|
||||
super("github.actions.results.api.v1.GetSignedArtifactURLRequest", [
|
||||
{ no: 1, name: "workflow_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 2, name: "workflow_job_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 3, name: "name", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
|
||||
]);
|
||||
}
|
||||
create(value?: PartialMessage<GetSignedArtifactURLRequest>): GetSignedArtifactURLRequest {
|
||||
const message = { workflowRunBackendId: "", workflowJobRunBackendId: "", name: "" };
|
||||
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
reflectionMergePartial<GetSignedArtifactURLRequest>(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: GetSignedArtifactURLRequest): GetSignedArtifactURLRequest {
|
||||
let message = target ?? this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* string workflow_run_backend_id */ 1:
|
||||
message.workflowRunBackendId = reader.string();
|
||||
break;
|
||||
case /* string workflow_job_run_backend_id */ 2:
|
||||
message.workflowJobRunBackendId = reader.string();
|
||||
break;
|
||||
case /* string name */ 3:
|
||||
message.name = reader.string();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message: GetSignedArtifactURLRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||
/* string workflow_run_backend_id = 1; */
|
||||
if (message.workflowRunBackendId !== "")
|
||||
writer.tag(1, WireType.LengthDelimited).string(message.workflowRunBackendId);
|
||||
/* string workflow_job_run_backend_id = 2; */
|
||||
if (message.workflowJobRunBackendId !== "")
|
||||
writer.tag(2, WireType.LengthDelimited).string(message.workflowJobRunBackendId);
|
||||
/* string name = 3; */
|
||||
if (message.name !== "")
|
||||
writer.tag(3, WireType.LengthDelimited).string(message.name);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.GetSignedArtifactURLRequest
|
||||
*/
|
||||
export const GetSignedArtifactURLRequest = new GetSignedArtifactURLRequest$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class GetSignedArtifactURLResponse$Type extends MessageType<GetSignedArtifactURLResponse> {
|
||||
constructor() {
|
||||
super("github.actions.results.api.v1.GetSignedArtifactURLResponse", [
|
||||
{ no: 1, name: "signed_url", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
|
||||
]);
|
||||
}
|
||||
create(value?: PartialMessage<GetSignedArtifactURLResponse>): GetSignedArtifactURLResponse {
|
||||
const message = { signedUrl: "" };
|
||||
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
reflectionMergePartial<GetSignedArtifactURLResponse>(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: GetSignedArtifactURLResponse): GetSignedArtifactURLResponse {
|
||||
let message = target ?? this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* string signed_url */ 1:
|
||||
message.signedUrl = reader.string();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message: GetSignedArtifactURLResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||
/* string signed_url = 1; */
|
||||
if (message.signedUrl !== "")
|
||||
writer.tag(1, WireType.LengthDelimited).string(message.signedUrl);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.GetSignedArtifactURLResponse
|
||||
*/
|
||||
export const GetSignedArtifactURLResponse = new GetSignedArtifactURLResponse$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class DeleteArtifactRequest$Type extends MessageType<DeleteArtifactRequest> {
|
||||
constructor() {
|
||||
super("github.actions.results.api.v1.DeleteArtifactRequest", [
|
||||
{ no: 1, name: "workflow_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 2, name: "workflow_job_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 3, name: "name", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
|
||||
]);
|
||||
}
|
||||
create(value?: PartialMessage<DeleteArtifactRequest>): DeleteArtifactRequest {
|
||||
const message = { workflowRunBackendId: "", workflowJobRunBackendId: "", name: "" };
|
||||
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
reflectionMergePartial<DeleteArtifactRequest>(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: DeleteArtifactRequest): DeleteArtifactRequest {
|
||||
let message = target ?? this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* string workflow_run_backend_id */ 1:
|
||||
message.workflowRunBackendId = reader.string();
|
||||
break;
|
||||
case /* string workflow_job_run_backend_id */ 2:
|
||||
message.workflowJobRunBackendId = reader.string();
|
||||
break;
|
||||
case /* string name */ 3:
|
||||
message.name = reader.string();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message: DeleteArtifactRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||
/* string workflow_run_backend_id = 1; */
|
||||
if (message.workflowRunBackendId !== "")
|
||||
writer.tag(1, WireType.LengthDelimited).string(message.workflowRunBackendId);
|
||||
/* string workflow_job_run_backend_id = 2; */
|
||||
if (message.workflowJobRunBackendId !== "")
|
||||
writer.tag(2, WireType.LengthDelimited).string(message.workflowJobRunBackendId);
|
||||
/* string name = 3; */
|
||||
if (message.name !== "")
|
||||
writer.tag(3, WireType.LengthDelimited).string(message.name);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.DeleteArtifactRequest
|
||||
*/
|
||||
export const DeleteArtifactRequest = new DeleteArtifactRequest$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class DeleteArtifactResponse$Type extends MessageType<DeleteArtifactResponse> {
|
||||
constructor() {
|
||||
super("github.actions.results.api.v1.DeleteArtifactResponse", [
|
||||
{ no: 1, name: "ok", kind: "scalar", T: 8 /*ScalarType.BOOL*/ },
|
||||
{ no: 2, name: "artifact_id", kind: "scalar", T: 3 /*ScalarType.INT64*/ }
|
||||
]);
|
||||
}
|
||||
create(value?: PartialMessage<DeleteArtifactResponse>): DeleteArtifactResponse {
|
||||
const message = { ok: false, artifactId: "0" };
|
||||
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
reflectionMergePartial<DeleteArtifactResponse>(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: DeleteArtifactResponse): DeleteArtifactResponse {
|
||||
let message = target ?? this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* bool ok */ 1:
|
||||
message.ok = reader.bool();
|
||||
break;
|
||||
case /* int64 artifact_id */ 2:
|
||||
message.artifactId = reader.int64().toString();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message: DeleteArtifactResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||
/* bool ok = 1; */
|
||||
if (message.ok !== false)
|
||||
writer.tag(1, WireType.Varint).bool(message.ok);
|
||||
/* int64 artifact_id = 2; */
|
||||
if (message.artifactId !== "0")
|
||||
writer.tag(2, WireType.Varint).int64(message.artifactId);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.DeleteArtifactResponse
|
||||
*/
|
||||
export const DeleteArtifactResponse = new DeleteArtifactResponse$Type();
|
||||
/**
|
||||
* @generated ServiceType for protobuf service github.actions.results.api.v1.ArtifactService
|
||||
*/
|
||||
export const ArtifactService = new ServiceType("github.actions.results.api.v1.ArtifactService", [
|
||||
{ name: "CreateArtifact", options: {}, I: CreateArtifactRequest, O: CreateArtifactResponse },
|
||||
{ name: "FinalizeArtifact", options: {}, I: FinalizeArtifactRequest, O: FinalizeArtifactResponse },
|
||||
{ name: "ListArtifacts", options: {}, I: ListArtifactsRequest, O: ListArtifactsResponse },
|
||||
{ name: "GetSignedArtifactURL", options: {}, I: GetSignedArtifactURLRequest, O: GetSignedArtifactURLResponse },
|
||||
{ name: "DeleteArtifact", options: {}, I: DeleteArtifactRequest, O: DeleteArtifactResponse }
|
||||
]);
|
|
@ -0,0 +1,976 @@
|
|||
import {
|
||||
TwirpContext,
|
||||
TwirpServer,
|
||||
RouterEvents,
|
||||
TwirpError,
|
||||
TwirpErrorCode,
|
||||
Interceptor,
|
||||
TwirpContentType,
|
||||
chainInterceptors,
|
||||
} from "twirp-ts";
|
||||
import {
|
||||
CreateArtifactRequest,
|
||||
CreateArtifactResponse,
|
||||
FinalizeArtifactRequest,
|
||||
FinalizeArtifactResponse,
|
||||
ListArtifactsRequest,
|
||||
ListArtifactsResponse,
|
||||
GetSignedArtifactURLRequest,
|
||||
GetSignedArtifactURLResponse,
|
||||
DeleteArtifactRequest,
|
||||
DeleteArtifactResponse,
|
||||
} from "./artifact";
|
||||
|
||||
//==================================//
|
||||
// Client Code //
|
||||
//==================================//
|
||||
|
||||
interface Rpc {
|
||||
request(
|
||||
service: string,
|
||||
method: string,
|
||||
contentType: "application/json" | "application/protobuf",
|
||||
data: object | Uint8Array
|
||||
): Promise<object | Uint8Array>;
|
||||
}
|
||||
|
||||
export interface ArtifactServiceClient {
|
||||
CreateArtifact(
|
||||
request: CreateArtifactRequest
|
||||
): Promise<CreateArtifactResponse>;
|
||||
FinalizeArtifact(
|
||||
request: FinalizeArtifactRequest
|
||||
): Promise<FinalizeArtifactResponse>;
|
||||
ListArtifacts(request: ListArtifactsRequest): Promise<ListArtifactsResponse>;
|
||||
GetSignedArtifactURL(
|
||||
request: GetSignedArtifactURLRequest
|
||||
): Promise<GetSignedArtifactURLResponse>;
|
||||
DeleteArtifact(
|
||||
request: DeleteArtifactRequest
|
||||
): Promise<DeleteArtifactResponse>;
|
||||
}
|
||||
|
||||
export class ArtifactServiceClientJSON implements ArtifactServiceClient {
|
||||
private readonly rpc: Rpc;
|
||||
constructor(rpc: Rpc) {
|
||||
this.rpc = rpc;
|
||||
this.CreateArtifact.bind(this);
|
||||
this.FinalizeArtifact.bind(this);
|
||||
this.ListArtifacts.bind(this);
|
||||
this.GetSignedArtifactURL.bind(this);
|
||||
this.DeleteArtifact.bind(this);
|
||||
}
|
||||
CreateArtifact(
|
||||
request: CreateArtifactRequest
|
||||
): Promise<CreateArtifactResponse> {
|
||||
const data = CreateArtifactRequest.toJson(request, {
|
||||
useProtoFieldName: true,
|
||||
emitDefaultValues: false,
|
||||
});
|
||||
const promise = this.rpc.request(
|
||||
"github.actions.results.api.v1.ArtifactService",
|
||||
"CreateArtifact",
|
||||
"application/json",
|
||||
data as object
|
||||
);
|
||||
return promise.then((data) =>
|
||||
CreateArtifactResponse.fromJson(data as any, {
|
||||
ignoreUnknownFields: true,
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
FinalizeArtifact(
|
||||
request: FinalizeArtifactRequest
|
||||
): Promise<FinalizeArtifactResponse> {
|
||||
const data = FinalizeArtifactRequest.toJson(request, {
|
||||
useProtoFieldName: true,
|
||||
emitDefaultValues: false,
|
||||
});
|
||||
const promise = this.rpc.request(
|
||||
"github.actions.results.api.v1.ArtifactService",
|
||||
"FinalizeArtifact",
|
||||
"application/json",
|
||||
data as object
|
||||
);
|
||||
return promise.then((data) =>
|
||||
FinalizeArtifactResponse.fromJson(data as any, {
|
||||
ignoreUnknownFields: true,
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
ListArtifacts(request: ListArtifactsRequest): Promise<ListArtifactsResponse> {
|
||||
const data = ListArtifactsRequest.toJson(request, {
|
||||
useProtoFieldName: true,
|
||||
emitDefaultValues: false,
|
||||
});
|
||||
const promise = this.rpc.request(
|
||||
"github.actions.results.api.v1.ArtifactService",
|
||||
"ListArtifacts",
|
||||
"application/json",
|
||||
data as object
|
||||
);
|
||||
return promise.then((data) =>
|
||||
ListArtifactsResponse.fromJson(data as any, { ignoreUnknownFields: true })
|
||||
);
|
||||
}
|
||||
|
||||
GetSignedArtifactURL(
|
||||
request: GetSignedArtifactURLRequest
|
||||
): Promise<GetSignedArtifactURLResponse> {
|
||||
const data = GetSignedArtifactURLRequest.toJson(request, {
|
||||
useProtoFieldName: true,
|
||||
emitDefaultValues: false,
|
||||
});
|
||||
const promise = this.rpc.request(
|
||||
"github.actions.results.api.v1.ArtifactService",
|
||||
"GetSignedArtifactURL",
|
||||
"application/json",
|
||||
data as object
|
||||
);
|
||||
return promise.then((data) =>
|
||||
GetSignedArtifactURLResponse.fromJson(data as any, {
|
||||
ignoreUnknownFields: true,
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
DeleteArtifact(
|
||||
request: DeleteArtifactRequest
|
||||
): Promise<DeleteArtifactResponse> {
|
||||
const data = DeleteArtifactRequest.toJson(request, {
|
||||
useProtoFieldName: true,
|
||||
emitDefaultValues: false,
|
||||
});
|
||||
const promise = this.rpc.request(
|
||||
"github.actions.results.api.v1.ArtifactService",
|
||||
"DeleteArtifact",
|
||||
"application/json",
|
||||
data as object
|
||||
);
|
||||
return promise.then((data) =>
|
||||
DeleteArtifactResponse.fromJson(data as any, {
|
||||
ignoreUnknownFields: true,
|
||||
})
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
export class ArtifactServiceClientProtobuf implements ArtifactServiceClient {
|
||||
private readonly rpc: Rpc;
|
||||
constructor(rpc: Rpc) {
|
||||
this.rpc = rpc;
|
||||
this.CreateArtifact.bind(this);
|
||||
this.FinalizeArtifact.bind(this);
|
||||
this.ListArtifacts.bind(this);
|
||||
this.GetSignedArtifactURL.bind(this);
|
||||
this.DeleteArtifact.bind(this);
|
||||
}
|
||||
CreateArtifact(
|
||||
request: CreateArtifactRequest
|
||||
): Promise<CreateArtifactResponse> {
|
||||
const data = CreateArtifactRequest.toBinary(request);
|
||||
const promise = this.rpc.request(
|
||||
"github.actions.results.api.v1.ArtifactService",
|
||||
"CreateArtifact",
|
||||
"application/protobuf",
|
||||
data
|
||||
);
|
||||
return promise.then((data) =>
|
||||
CreateArtifactResponse.fromBinary(data as Uint8Array)
|
||||
);
|
||||
}
|
||||
|
||||
FinalizeArtifact(
|
||||
request: FinalizeArtifactRequest
|
||||
): Promise<FinalizeArtifactResponse> {
|
||||
const data = FinalizeArtifactRequest.toBinary(request);
|
||||
const promise = this.rpc.request(
|
||||
"github.actions.results.api.v1.ArtifactService",
|
||||
"FinalizeArtifact",
|
||||
"application/protobuf",
|
||||
data
|
||||
);
|
||||
return promise.then((data) =>
|
||||
FinalizeArtifactResponse.fromBinary(data as Uint8Array)
|
||||
);
|
||||
}
|
||||
|
||||
ListArtifacts(request: ListArtifactsRequest): Promise<ListArtifactsResponse> {
|
||||
const data = ListArtifactsRequest.toBinary(request);
|
||||
const promise = this.rpc.request(
|
||||
"github.actions.results.api.v1.ArtifactService",
|
||||
"ListArtifacts",
|
||||
"application/protobuf",
|
||||
data
|
||||
);
|
||||
return promise.then((data) =>
|
||||
ListArtifactsResponse.fromBinary(data as Uint8Array)
|
||||
);
|
||||
}
|
||||
|
||||
GetSignedArtifactURL(
|
||||
request: GetSignedArtifactURLRequest
|
||||
): Promise<GetSignedArtifactURLResponse> {
|
||||
const data = GetSignedArtifactURLRequest.toBinary(request);
|
||||
const promise = this.rpc.request(
|
||||
"github.actions.results.api.v1.ArtifactService",
|
||||
"GetSignedArtifactURL",
|
||||
"application/protobuf",
|
||||
data
|
||||
);
|
||||
return promise.then((data) =>
|
||||
GetSignedArtifactURLResponse.fromBinary(data as Uint8Array)
|
||||
);
|
||||
}
|
||||
|
||||
DeleteArtifact(
|
||||
request: DeleteArtifactRequest
|
||||
): Promise<DeleteArtifactResponse> {
|
||||
const data = DeleteArtifactRequest.toBinary(request);
|
||||
const promise = this.rpc.request(
|
||||
"github.actions.results.api.v1.ArtifactService",
|
||||
"DeleteArtifact",
|
||||
"application/protobuf",
|
||||
data
|
||||
);
|
||||
return promise.then((data) =>
|
||||
DeleteArtifactResponse.fromBinary(data as Uint8Array)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
//==================================//
|
||||
// Server Code //
|
||||
//==================================//
|
||||
|
||||
export interface ArtifactServiceTwirp<T extends TwirpContext = TwirpContext> {
|
||||
CreateArtifact(
|
||||
ctx: T,
|
||||
request: CreateArtifactRequest
|
||||
): Promise<CreateArtifactResponse>;
|
||||
FinalizeArtifact(
|
||||
ctx: T,
|
||||
request: FinalizeArtifactRequest
|
||||
): Promise<FinalizeArtifactResponse>;
|
||||
ListArtifacts(
|
||||
ctx: T,
|
||||
request: ListArtifactsRequest
|
||||
): Promise<ListArtifactsResponse>;
|
||||
GetSignedArtifactURL(
|
||||
ctx: T,
|
||||
request: GetSignedArtifactURLRequest
|
||||
): Promise<GetSignedArtifactURLResponse>;
|
||||
DeleteArtifact(
|
||||
ctx: T,
|
||||
request: DeleteArtifactRequest
|
||||
): Promise<DeleteArtifactResponse>;
|
||||
}
|
||||
|
||||
export enum ArtifactServiceMethod {
|
||||
CreateArtifact = "CreateArtifact",
|
||||
FinalizeArtifact = "FinalizeArtifact",
|
||||
ListArtifacts = "ListArtifacts",
|
||||
GetSignedArtifactURL = "GetSignedArtifactURL",
|
||||
DeleteArtifact = "DeleteArtifact",
|
||||
}
|
||||
|
||||
export const ArtifactServiceMethodList = [
|
||||
ArtifactServiceMethod.CreateArtifact,
|
||||
ArtifactServiceMethod.FinalizeArtifact,
|
||||
ArtifactServiceMethod.ListArtifacts,
|
||||
ArtifactServiceMethod.GetSignedArtifactURL,
|
||||
ArtifactServiceMethod.DeleteArtifact,
|
||||
];
|
||||
|
||||
export function createArtifactServiceServer<
|
||||
T extends TwirpContext = TwirpContext
|
||||
>(service: ArtifactServiceTwirp<T>) {
|
||||
return new TwirpServer<ArtifactServiceTwirp, T>({
|
||||
service,
|
||||
packageName: "github.actions.results.api.v1",
|
||||
serviceName: "ArtifactService",
|
||||
methodList: ArtifactServiceMethodList,
|
||||
matchRoute: matchArtifactServiceRoute,
|
||||
});
|
||||
}
|
||||
|
||||
function matchArtifactServiceRoute<T extends TwirpContext = TwirpContext>(
|
||||
method: string,
|
||||
events: RouterEvents<T>
|
||||
) {
|
||||
switch (method) {
|
||||
case "CreateArtifact":
|
||||
return async (
|
||||
ctx: T,
|
||||
service: ArtifactServiceTwirp,
|
||||
data: Buffer,
|
||||
interceptors?: Interceptor<
|
||||
T,
|
||||
CreateArtifactRequest,
|
||||
CreateArtifactResponse
|
||||
>[]
|
||||
) => {
|
||||
ctx = { ...ctx, methodName: "CreateArtifact" };
|
||||
await events.onMatch(ctx);
|
||||
return handleArtifactServiceCreateArtifactRequest(
|
||||
ctx,
|
||||
service,
|
||||
data,
|
||||
interceptors
|
||||
);
|
||||
};
|
||||
case "FinalizeArtifact":
|
||||
return async (
|
||||
ctx: T,
|
||||
service: ArtifactServiceTwirp,
|
||||
data: Buffer,
|
||||
interceptors?: Interceptor<
|
||||
T,
|
||||
FinalizeArtifactRequest,
|
||||
FinalizeArtifactResponse
|
||||
>[]
|
||||
) => {
|
||||
ctx = { ...ctx, methodName: "FinalizeArtifact" };
|
||||
await events.onMatch(ctx);
|
||||
return handleArtifactServiceFinalizeArtifactRequest(
|
||||
ctx,
|
||||
service,
|
||||
data,
|
||||
interceptors
|
||||
);
|
||||
};
|
||||
case "ListArtifacts":
|
||||
return async (
|
||||
ctx: T,
|
||||
service: ArtifactServiceTwirp,
|
||||
data: Buffer,
|
||||
interceptors?: Interceptor<
|
||||
T,
|
||||
ListArtifactsRequest,
|
||||
ListArtifactsResponse
|
||||
>[]
|
||||
) => {
|
||||
ctx = { ...ctx, methodName: "ListArtifacts" };
|
||||
await events.onMatch(ctx);
|
||||
return handleArtifactServiceListArtifactsRequest(
|
||||
ctx,
|
||||
service,
|
||||
data,
|
||||
interceptors
|
||||
);
|
||||
};
|
||||
case "GetSignedArtifactURL":
|
||||
return async (
|
||||
ctx: T,
|
||||
service: ArtifactServiceTwirp,
|
||||
data: Buffer,
|
||||
interceptors?: Interceptor<
|
||||
T,
|
||||
GetSignedArtifactURLRequest,
|
||||
GetSignedArtifactURLResponse
|
||||
>[]
|
||||
) => {
|
||||
ctx = { ...ctx, methodName: "GetSignedArtifactURL" };
|
||||
await events.onMatch(ctx);
|
||||
return handleArtifactServiceGetSignedArtifactURLRequest(
|
||||
ctx,
|
||||
service,
|
||||
data,
|
||||
interceptors
|
||||
);
|
||||
};
|
||||
case "DeleteArtifact":
|
||||
return async (
|
||||
ctx: T,
|
||||
service: ArtifactServiceTwirp,
|
||||
data: Buffer,
|
||||
interceptors?: Interceptor<
|
||||
T,
|
||||
DeleteArtifactRequest,
|
||||
DeleteArtifactResponse
|
||||
>[]
|
||||
) => {
|
||||
ctx = { ...ctx, methodName: "DeleteArtifact" };
|
||||
await events.onMatch(ctx);
|
||||
return handleArtifactServiceDeleteArtifactRequest(
|
||||
ctx,
|
||||
service,
|
||||
data,
|
||||
interceptors
|
||||
);
|
||||
};
|
||||
default:
|
||||
events.onNotFound();
|
||||
const msg = `no handler found`;
|
||||
throw new TwirpError(TwirpErrorCode.BadRoute, msg);
|
||||
}
|
||||
}
|
||||
|
||||
function handleArtifactServiceCreateArtifactRequest<
|
||||
T extends TwirpContext = TwirpContext
|
||||
>(
|
||||
ctx: T,
|
||||
service: ArtifactServiceTwirp,
|
||||
data: Buffer,
|
||||
interceptors?: Interceptor<T, CreateArtifactRequest, CreateArtifactResponse>[]
|
||||
): Promise<string | Uint8Array> {
|
||||
switch (ctx.contentType) {
|
||||
case TwirpContentType.JSON:
|
||||
return handleArtifactServiceCreateArtifactJSON<T>(
|
||||
ctx,
|
||||
service,
|
||||
data,
|
||||
interceptors
|
||||
);
|
||||
case TwirpContentType.Protobuf:
|
||||
return handleArtifactServiceCreateArtifactProtobuf<T>(
|
||||
ctx,
|
||||
service,
|
||||
data,
|
||||
interceptors
|
||||
);
|
||||
default:
|
||||
const msg = "unexpected Content-Type";
|
||||
throw new TwirpError(TwirpErrorCode.BadRoute, msg);
|
||||
}
|
||||
}
|
||||
|
||||
function handleArtifactServiceFinalizeArtifactRequest<
|
||||
T extends TwirpContext = TwirpContext
|
||||
>(
|
||||
ctx: T,
|
||||
service: ArtifactServiceTwirp,
|
||||
data: Buffer,
|
||||
interceptors?: Interceptor<
|
||||
T,
|
||||
FinalizeArtifactRequest,
|
||||
FinalizeArtifactResponse
|
||||
>[]
|
||||
): Promise<string | Uint8Array> {
|
||||
switch (ctx.contentType) {
|
||||
case TwirpContentType.JSON:
|
||||
return handleArtifactServiceFinalizeArtifactJSON<T>(
|
||||
ctx,
|
||||
service,
|
||||
data,
|
||||
interceptors
|
||||
);
|
||||
case TwirpContentType.Protobuf:
|
||||
return handleArtifactServiceFinalizeArtifactProtobuf<T>(
|
||||
ctx,
|
||||
service,
|
||||
data,
|
||||
interceptors
|
||||
);
|
||||
default:
|
||||
const msg = "unexpected Content-Type";
|
||||
throw new TwirpError(TwirpErrorCode.BadRoute, msg);
|
||||
}
|
||||
}
|
||||
|
||||
function handleArtifactServiceListArtifactsRequest<
|
||||
T extends TwirpContext = TwirpContext
|
||||
>(
|
||||
ctx: T,
|
||||
service: ArtifactServiceTwirp,
|
||||
data: Buffer,
|
||||
interceptors?: Interceptor<T, ListArtifactsRequest, ListArtifactsResponse>[]
|
||||
): Promise<string | Uint8Array> {
|
||||
switch (ctx.contentType) {
|
||||
case TwirpContentType.JSON:
|
||||
return handleArtifactServiceListArtifactsJSON<T>(
|
||||
ctx,
|
||||
service,
|
||||
data,
|
||||
interceptors
|
||||
);
|
||||
case TwirpContentType.Protobuf:
|
||||
return handleArtifactServiceListArtifactsProtobuf<T>(
|
||||
ctx,
|
||||
service,
|
||||
data,
|
||||
interceptors
|
||||
);
|
||||
default:
|
||||
const msg = "unexpected Content-Type";
|
||||
throw new TwirpError(TwirpErrorCode.BadRoute, msg);
|
||||
}
|
||||
}
|
||||
|
||||
function handleArtifactServiceGetSignedArtifactURLRequest<
|
||||
T extends TwirpContext = TwirpContext
|
||||
>(
|
||||
ctx: T,
|
||||
service: ArtifactServiceTwirp,
|
||||
data: Buffer,
|
||||
interceptors?: Interceptor<
|
||||
T,
|
||||
GetSignedArtifactURLRequest,
|
||||
GetSignedArtifactURLResponse
|
||||
>[]
|
||||
): Promise<string | Uint8Array> {
|
||||
switch (ctx.contentType) {
|
||||
case TwirpContentType.JSON:
|
||||
return handleArtifactServiceGetSignedArtifactURLJSON<T>(
|
||||
ctx,
|
||||
service,
|
||||
data,
|
||||
interceptors
|
||||
);
|
||||
case TwirpContentType.Protobuf:
|
||||
return handleArtifactServiceGetSignedArtifactURLProtobuf<T>(
|
||||
ctx,
|
||||
service,
|
||||
data,
|
||||
interceptors
|
||||
);
|
||||
default:
|
||||
const msg = "unexpected Content-Type";
|
||||
throw new TwirpError(TwirpErrorCode.BadRoute, msg);
|
||||
}
|
||||
}
|
||||
|
||||
function handleArtifactServiceDeleteArtifactRequest<
|
||||
T extends TwirpContext = TwirpContext
|
||||
>(
|
||||
ctx: T,
|
||||
service: ArtifactServiceTwirp,
|
||||
data: Buffer,
|
||||
interceptors?: Interceptor<T, DeleteArtifactRequest, DeleteArtifactResponse>[]
|
||||
): Promise<string | Uint8Array> {
|
||||
switch (ctx.contentType) {
|
||||
case TwirpContentType.JSON:
|
||||
return handleArtifactServiceDeleteArtifactJSON<T>(
|
||||
ctx,
|
||||
service,
|
||||
data,
|
||||
interceptors
|
||||
);
|
||||
case TwirpContentType.Protobuf:
|
||||
return handleArtifactServiceDeleteArtifactProtobuf<T>(
|
||||
ctx,
|
||||
service,
|
||||
data,
|
||||
interceptors
|
||||
);
|
||||
default:
|
||||
const msg = "unexpected Content-Type";
|
||||
throw new TwirpError(TwirpErrorCode.BadRoute, msg);
|
||||
}
|
||||
}
|
||||
async function handleArtifactServiceCreateArtifactJSON<
|
||||
T extends TwirpContext = TwirpContext
|
||||
>(
|
||||
ctx: T,
|
||||
service: ArtifactServiceTwirp,
|
||||
data: Buffer,
|
||||
interceptors?: Interceptor<T, CreateArtifactRequest, CreateArtifactResponse>[]
|
||||
) {
|
||||
let request: CreateArtifactRequest;
|
||||
let response: CreateArtifactResponse;
|
||||
|
||||
try {
|
||||
const body = JSON.parse(data.toString() || "{}");
|
||||
request = CreateArtifactRequest.fromJson(body, {
|
||||
ignoreUnknownFields: true,
|
||||
});
|
||||
} catch (e) {
|
||||
if (e instanceof Error) {
|
||||
const msg = "the json request could not be decoded";
|
||||
throw new TwirpError(TwirpErrorCode.Malformed, msg).withCause(e, true);
|
||||
}
|
||||
}
|
||||
|
||||
if (interceptors && interceptors.length > 0) {
|
||||
const interceptor = chainInterceptors(...interceptors) as Interceptor<
|
||||
T,
|
||||
CreateArtifactRequest,
|
||||
CreateArtifactResponse
|
||||
>;
|
||||
response = await interceptor(ctx, request!, (ctx, inputReq) => {
|
||||
return service.CreateArtifact(ctx, inputReq);
|
||||
});
|
||||
} else {
|
||||
response = await service.CreateArtifact(ctx, request!);
|
||||
}
|
||||
|
||||
return JSON.stringify(
|
||||
CreateArtifactResponse.toJson(response, {
|
||||
useProtoFieldName: true,
|
||||
emitDefaultValues: false,
|
||||
}) as string
|
||||
);
|
||||
}
|
||||
|
||||
async function handleArtifactServiceFinalizeArtifactJSON<
|
||||
T extends TwirpContext = TwirpContext
|
||||
>(
|
||||
ctx: T,
|
||||
service: ArtifactServiceTwirp,
|
||||
data: Buffer,
|
||||
interceptors?: Interceptor<
|
||||
T,
|
||||
FinalizeArtifactRequest,
|
||||
FinalizeArtifactResponse
|
||||
>[]
|
||||
) {
|
||||
let request: FinalizeArtifactRequest;
|
||||
let response: FinalizeArtifactResponse;
|
||||
|
||||
try {
|
||||
const body = JSON.parse(data.toString() || "{}");
|
||||
request = FinalizeArtifactRequest.fromJson(body, {
|
||||
ignoreUnknownFields: true,
|
||||
});
|
||||
} catch (e) {
|
||||
if (e instanceof Error) {
|
||||
const msg = "the json request could not be decoded";
|
||||
throw new TwirpError(TwirpErrorCode.Malformed, msg).withCause(e, true);
|
||||
}
|
||||
}
|
||||
|
||||
if (interceptors && interceptors.length > 0) {
|
||||
const interceptor = chainInterceptors(...interceptors) as Interceptor<
|
||||
T,
|
||||
FinalizeArtifactRequest,
|
||||
FinalizeArtifactResponse
|
||||
>;
|
||||
response = await interceptor(ctx, request!, (ctx, inputReq) => {
|
||||
return service.FinalizeArtifact(ctx, inputReq);
|
||||
});
|
||||
} else {
|
||||
response = await service.FinalizeArtifact(ctx, request!);
|
||||
}
|
||||
|
||||
return JSON.stringify(
|
||||
FinalizeArtifactResponse.toJson(response, {
|
||||
useProtoFieldName: true,
|
||||
emitDefaultValues: false,
|
||||
}) as string
|
||||
);
|
||||
}
|
||||
|
||||
async function handleArtifactServiceListArtifactsJSON<
|
||||
T extends TwirpContext = TwirpContext
|
||||
>(
|
||||
ctx: T,
|
||||
service: ArtifactServiceTwirp,
|
||||
data: Buffer,
|
||||
interceptors?: Interceptor<T, ListArtifactsRequest, ListArtifactsResponse>[]
|
||||
) {
|
||||
let request: ListArtifactsRequest;
|
||||
let response: ListArtifactsResponse;
|
||||
|
||||
try {
|
||||
const body = JSON.parse(data.toString() || "{}");
|
||||
request = ListArtifactsRequest.fromJson(body, {
|
||||
ignoreUnknownFields: true,
|
||||
});
|
||||
} catch (e) {
|
||||
if (e instanceof Error) {
|
||||
const msg = "the json request could not be decoded";
|
||||
throw new TwirpError(TwirpErrorCode.Malformed, msg).withCause(e, true);
|
||||
}
|
||||
}
|
||||
|
||||
if (interceptors && interceptors.length > 0) {
|
||||
const interceptor = chainInterceptors(...interceptors) as Interceptor<
|
||||
T,
|
||||
ListArtifactsRequest,
|
||||
ListArtifactsResponse
|
||||
>;
|
||||
response = await interceptor(ctx, request!, (ctx, inputReq) => {
|
||||
return service.ListArtifacts(ctx, inputReq);
|
||||
});
|
||||
} else {
|
||||
response = await service.ListArtifacts(ctx, request!);
|
||||
}
|
||||
|
||||
return JSON.stringify(
|
||||
ListArtifactsResponse.toJson(response, {
|
||||
useProtoFieldName: true,
|
||||
emitDefaultValues: false,
|
||||
}) as string
|
||||
);
|
||||
}
|
||||
|
||||
async function handleArtifactServiceGetSignedArtifactURLJSON<
|
||||
T extends TwirpContext = TwirpContext
|
||||
>(
|
||||
ctx: T,
|
||||
service: ArtifactServiceTwirp,
|
||||
data: Buffer,
|
||||
interceptors?: Interceptor<
|
||||
T,
|
||||
GetSignedArtifactURLRequest,
|
||||
GetSignedArtifactURLResponse
|
||||
>[]
|
||||
) {
|
||||
let request: GetSignedArtifactURLRequest;
|
||||
let response: GetSignedArtifactURLResponse;
|
||||
|
||||
try {
|
||||
const body = JSON.parse(data.toString() || "{}");
|
||||
request = GetSignedArtifactURLRequest.fromJson(body, {
|
||||
ignoreUnknownFields: true,
|
||||
});
|
||||
} catch (e) {
|
||||
if (e instanceof Error) {
|
||||
const msg = "the json request could not be decoded";
|
||||
throw new TwirpError(TwirpErrorCode.Malformed, msg).withCause(e, true);
|
||||
}
|
||||
}
|
||||
|
||||
if (interceptors && interceptors.length > 0) {
|
||||
const interceptor = chainInterceptors(...interceptors) as Interceptor<
|
||||
T,
|
||||
GetSignedArtifactURLRequest,
|
||||
GetSignedArtifactURLResponse
|
||||
>;
|
||||
response = await interceptor(ctx, request!, (ctx, inputReq) => {
|
||||
return service.GetSignedArtifactURL(ctx, inputReq);
|
||||
});
|
||||
} else {
|
||||
response = await service.GetSignedArtifactURL(ctx, request!);
|
||||
}
|
||||
|
||||
return JSON.stringify(
|
||||
GetSignedArtifactURLResponse.toJson(response, {
|
||||
useProtoFieldName: true,
|
||||
emitDefaultValues: false,
|
||||
}) as string
|
||||
);
|
||||
}
|
||||
|
||||
async function handleArtifactServiceDeleteArtifactJSON<
|
||||
T extends TwirpContext = TwirpContext
|
||||
>(
|
||||
ctx: T,
|
||||
service: ArtifactServiceTwirp,
|
||||
data: Buffer,
|
||||
interceptors?: Interceptor<T, DeleteArtifactRequest, DeleteArtifactResponse>[]
|
||||
) {
|
||||
let request: DeleteArtifactRequest;
|
||||
let response: DeleteArtifactResponse;
|
||||
|
||||
try {
|
||||
const body = JSON.parse(data.toString() || "{}");
|
||||
request = DeleteArtifactRequest.fromJson(body, {
|
||||
ignoreUnknownFields: true,
|
||||
});
|
||||
} catch (e) {
|
||||
if (e instanceof Error) {
|
||||
const msg = "the json request could not be decoded";
|
||||
throw new TwirpError(TwirpErrorCode.Malformed, msg).withCause(e, true);
|
||||
}
|
||||
}
|
||||
|
||||
if (interceptors && interceptors.length > 0) {
|
||||
const interceptor = chainInterceptors(...interceptors) as Interceptor<
|
||||
T,
|
||||
DeleteArtifactRequest,
|
||||
DeleteArtifactResponse
|
||||
>;
|
||||
response = await interceptor(ctx, request!, (ctx, inputReq) => {
|
||||
return service.DeleteArtifact(ctx, inputReq);
|
||||
});
|
||||
} else {
|
||||
response = await service.DeleteArtifact(ctx, request!);
|
||||
}
|
||||
|
||||
return JSON.stringify(
|
||||
DeleteArtifactResponse.toJson(response, {
|
||||
useProtoFieldName: true,
|
||||
emitDefaultValues: false,
|
||||
}) as string
|
||||
);
|
||||
}
|
||||
async function handleArtifactServiceCreateArtifactProtobuf<
|
||||
T extends TwirpContext = TwirpContext
|
||||
>(
|
||||
ctx: T,
|
||||
service: ArtifactServiceTwirp,
|
||||
data: Buffer,
|
||||
interceptors?: Interceptor<T, CreateArtifactRequest, CreateArtifactResponse>[]
|
||||
) {
|
||||
let request: CreateArtifactRequest;
|
||||
let response: CreateArtifactResponse;
|
||||
|
||||
try {
|
||||
request = CreateArtifactRequest.fromBinary(data);
|
||||
} catch (e) {
|
||||
if (e instanceof Error) {
|
||||
const msg = "the protobuf request could not be decoded";
|
||||
throw new TwirpError(TwirpErrorCode.Malformed, msg).withCause(e, true);
|
||||
}
|
||||
}
|
||||
|
||||
if (interceptors && interceptors.length > 0) {
|
||||
const interceptor = chainInterceptors(...interceptors) as Interceptor<
|
||||
T,
|
||||
CreateArtifactRequest,
|
||||
CreateArtifactResponse
|
||||
>;
|
||||
response = await interceptor(ctx, request!, (ctx, inputReq) => {
|
||||
return service.CreateArtifact(ctx, inputReq);
|
||||
});
|
||||
} else {
|
||||
response = await service.CreateArtifact(ctx, request!);
|
||||
}
|
||||
|
||||
return Buffer.from(CreateArtifactResponse.toBinary(response));
|
||||
}
|
||||
|
||||
async function handleArtifactServiceFinalizeArtifactProtobuf<
|
||||
T extends TwirpContext = TwirpContext
|
||||
>(
|
||||
ctx: T,
|
||||
service: ArtifactServiceTwirp,
|
||||
data: Buffer,
|
||||
interceptors?: Interceptor<
|
||||
T,
|
||||
FinalizeArtifactRequest,
|
||||
FinalizeArtifactResponse
|
||||
>[]
|
||||
) {
|
||||
let request: FinalizeArtifactRequest;
|
||||
let response: FinalizeArtifactResponse;
|
||||
|
||||
try {
|
||||
request = FinalizeArtifactRequest.fromBinary(data);
|
||||
} catch (e) {
|
||||
if (e instanceof Error) {
|
||||
const msg = "the protobuf request could not be decoded";
|
||||
throw new TwirpError(TwirpErrorCode.Malformed, msg).withCause(e, true);
|
||||
}
|
||||
}
|
||||
|
||||
if (interceptors && interceptors.length > 0) {
|
||||
const interceptor = chainInterceptors(...interceptors) as Interceptor<
|
||||
T,
|
||||
FinalizeArtifactRequest,
|
||||
FinalizeArtifactResponse
|
||||
>;
|
||||
response = await interceptor(ctx, request!, (ctx, inputReq) => {
|
||||
return service.FinalizeArtifact(ctx, inputReq);
|
||||
});
|
||||
} else {
|
||||
response = await service.FinalizeArtifact(ctx, request!);
|
||||
}
|
||||
|
||||
return Buffer.from(FinalizeArtifactResponse.toBinary(response));
|
||||
}
|
||||
|
||||
async function handleArtifactServiceListArtifactsProtobuf<
|
||||
T extends TwirpContext = TwirpContext
|
||||
>(
|
||||
ctx: T,
|
||||
service: ArtifactServiceTwirp,
|
||||
data: Buffer,
|
||||
interceptors?: Interceptor<T, ListArtifactsRequest, ListArtifactsResponse>[]
|
||||
) {
|
||||
let request: ListArtifactsRequest;
|
||||
let response: ListArtifactsResponse;
|
||||
|
||||
try {
|
||||
request = ListArtifactsRequest.fromBinary(data);
|
||||
} catch (e) {
|
||||
if (e instanceof Error) {
|
||||
const msg = "the protobuf request could not be decoded";
|
||||
throw new TwirpError(TwirpErrorCode.Malformed, msg).withCause(e, true);
|
||||
}
|
||||
}
|
||||
|
||||
if (interceptors && interceptors.length > 0) {
|
||||
const interceptor = chainInterceptors(...interceptors) as Interceptor<
|
||||
T,
|
||||
ListArtifactsRequest,
|
||||
ListArtifactsResponse
|
||||
>;
|
||||
response = await interceptor(ctx, request!, (ctx, inputReq) => {
|
||||
return service.ListArtifacts(ctx, inputReq);
|
||||
});
|
||||
} else {
|
||||
response = await service.ListArtifacts(ctx, request!);
|
||||
}
|
||||
|
||||
return Buffer.from(ListArtifactsResponse.toBinary(response));
|
||||
}
|
||||
|
||||
async function handleArtifactServiceGetSignedArtifactURLProtobuf<
|
||||
T extends TwirpContext = TwirpContext
|
||||
>(
|
||||
ctx: T,
|
||||
service: ArtifactServiceTwirp,
|
||||
data: Buffer,
|
||||
interceptors?: Interceptor<
|
||||
T,
|
||||
GetSignedArtifactURLRequest,
|
||||
GetSignedArtifactURLResponse
|
||||
>[]
|
||||
) {
|
||||
let request: GetSignedArtifactURLRequest;
|
||||
let response: GetSignedArtifactURLResponse;
|
||||
|
||||
try {
|
||||
request = GetSignedArtifactURLRequest.fromBinary(data);
|
||||
} catch (e) {
|
||||
if (e instanceof Error) {
|
||||
const msg = "the protobuf request could not be decoded";
|
||||
throw new TwirpError(TwirpErrorCode.Malformed, msg).withCause(e, true);
|
||||
}
|
||||
}
|
||||
|
||||
if (interceptors && interceptors.length > 0) {
|
||||
const interceptor = chainInterceptors(...interceptors) as Interceptor<
|
||||
T,
|
||||
GetSignedArtifactURLRequest,
|
||||
GetSignedArtifactURLResponse
|
||||
>;
|
||||
response = await interceptor(ctx, request!, (ctx, inputReq) => {
|
||||
return service.GetSignedArtifactURL(ctx, inputReq);
|
||||
});
|
||||
} else {
|
||||
response = await service.GetSignedArtifactURL(ctx, request!);
|
||||
}
|
||||
|
||||
return Buffer.from(GetSignedArtifactURLResponse.toBinary(response));
|
||||
}
|
||||
|
||||
async function handleArtifactServiceDeleteArtifactProtobuf<
|
||||
T extends TwirpContext = TwirpContext
|
||||
>(
|
||||
ctx: T,
|
||||
service: ArtifactServiceTwirp,
|
||||
data: Buffer,
|
||||
interceptors?: Interceptor<T, DeleteArtifactRequest, DeleteArtifactResponse>[]
|
||||
) {
|
||||
let request: DeleteArtifactRequest;
|
||||
let response: DeleteArtifactResponse;
|
||||
|
||||
try {
|
||||
request = DeleteArtifactRequest.fromBinary(data);
|
||||
} catch (e) {
|
||||
if (e instanceof Error) {
|
||||
const msg = "the protobuf request could not be decoded";
|
||||
throw new TwirpError(TwirpErrorCode.Malformed, msg).withCause(e, true);
|
||||
}
|
||||
}
|
||||
|
||||
if (interceptors && interceptors.length > 0) {
|
||||
const interceptor = chainInterceptors(...interceptors) as Interceptor<
|
||||
T,
|
||||
DeleteArtifactRequest,
|
||||
DeleteArtifactResponse
|
||||
>;
|
||||
response = await interceptor(ctx, request!, (ctx, inputReq) => {
|
||||
return service.DeleteArtifact(ctx, inputReq);
|
||||
});
|
||||
} else {
|
||||
response = await service.DeleteArtifact(ctx, request!);
|
||||
}
|
||||
|
||||
return Buffer.from(DeleteArtifactResponse.toBinary(response));
|
||||
}
|
|
@ -1,51 +0,0 @@
|
|||
/**
|
||||
* Mocks default limits for easier testing
|
||||
*/
|
||||
export function getUploadFileConcurrency(): number {
|
||||
return 1
|
||||
}
|
||||
|
||||
export function getUploadChunkConcurrency(): number {
|
||||
return 1
|
||||
}
|
||||
|
||||
export function getUploadChunkSize(): number {
|
||||
return 4 * 1024 * 1024 // 4 MB Chunks
|
||||
}
|
||||
|
||||
export function getRetryLimit(): number {
|
||||
return 2
|
||||
}
|
||||
|
||||
export function getRetryMultiplier(): number {
|
||||
return 1.5
|
||||
}
|
||||
|
||||
export function getInitialRetryIntervalInMilliseconds(): number {
|
||||
return 10
|
||||
}
|
||||
|
||||
export function getDownloadFileConcurrency(): number {
|
||||
return 1
|
||||
}
|
||||
|
||||
/**
|
||||
* Mocks the 'ACTIONS_RUNTIME_TOKEN', 'ACTIONS_RUNTIME_URL' and 'GITHUB_RUN_ID' env variables
|
||||
* that are only available from a node context on the runner. This allows for tests to run
|
||||
* locally without the env variables actually being set
|
||||
*/
|
||||
export function getRuntimeToken(): string {
|
||||
return 'totally-valid-token'
|
||||
}
|
||||
|
||||
export function getRuntimeUrl(): string {
|
||||
return 'https://www.example.com/'
|
||||
}
|
||||
|
||||
export function getWorkFlowRunId(): string {
|
||||
return '15'
|
||||
}
|
||||
|
||||
export function getRetentionDays(): string | undefined {
|
||||
return '45'
|
||||
}
|
|
@ -1,254 +0,0 @@
|
|||
import * as core from '@actions/core'
|
||||
import {
|
||||
UploadSpecification,
|
||||
getUploadSpecification
|
||||
} from './upload-specification'
|
||||
import {UploadHttpClient} from './upload-http-client'
|
||||
import {UploadResponse} from './upload-response'
|
||||
import {UploadOptions} from './upload-options'
|
||||
import {DownloadOptions} from './download-options'
|
||||
import {DownloadResponse} from './download-response'
|
||||
import {
|
||||
checkArtifactName,
|
||||
createDirectoriesForArtifact,
|
||||
createEmptyFilesForArtifact
|
||||
} from './utils'
|
||||
import {DownloadHttpClient} from './download-http-client'
|
||||
import {getDownloadSpecification} from './download-specification'
|
||||
import {getWorkSpaceDirectory} from './config-variables'
|
||||
import {normalize, resolve} from 'path'
|
||||
|
||||
export interface ArtifactClient {
|
||||
/**
|
||||
* Uploads an artifact
|
||||
*
|
||||
* @param name the name of the artifact, required
|
||||
* @param files a list of absolute or relative paths that denote what files should be uploaded
|
||||
* @param rootDirectory an absolute or relative file path that denotes the root parent directory of the files being uploaded
|
||||
* @param options extra options for customizing the upload behavior
|
||||
* @returns single UploadInfo object
|
||||
*/
|
||||
uploadArtifact(
|
||||
name: string,
|
||||
files: string[],
|
||||
rootDirectory: string,
|
||||
options?: UploadOptions
|
||||
): Promise<UploadResponse>
|
||||
|
||||
/**
|
||||
* Downloads a single artifact associated with a run
|
||||
*
|
||||
* @param name the name of the artifact being downloaded
|
||||
* @param path optional path that denotes where the artifact will be downloaded to
|
||||
* @param options extra options that allow for the customization of the download behavior
|
||||
*/
|
||||
downloadArtifact(
|
||||
name: string,
|
||||
path?: string,
|
||||
options?: DownloadOptions
|
||||
): Promise<DownloadResponse>
|
||||
|
||||
/**
|
||||
* Downloads all artifacts associated with a run. Because there are multiple artifacts being downloaded, a folder will be created for each one in the specified or default directory
|
||||
* @param path optional path that denotes where the artifacts will be downloaded to
|
||||
*/
|
||||
downloadAllArtifacts(path?: string): Promise<DownloadResponse[]>
|
||||
}
|
||||
|
||||
export class DefaultArtifactClient implements ArtifactClient {
|
||||
/**
|
||||
* Constructs a DefaultArtifactClient
|
||||
*/
|
||||
static create(): DefaultArtifactClient {
|
||||
return new DefaultArtifactClient()
|
||||
}
|
||||
|
||||
/**
|
||||
* Uploads an artifact
|
||||
*/
|
||||
async uploadArtifact(
|
||||
name: string,
|
||||
files: string[],
|
||||
rootDirectory: string,
|
||||
options?: UploadOptions | undefined
|
||||
): Promise<UploadResponse> {
|
||||
checkArtifactName(name)
|
||||
|
||||
// Get specification for the files being uploaded
|
||||
const uploadSpecification: UploadSpecification[] = getUploadSpecification(
|
||||
name,
|
||||
rootDirectory,
|
||||
files
|
||||
)
|
||||
const uploadResponse: UploadResponse = {
|
||||
artifactName: name,
|
||||
artifactItems: [],
|
||||
size: 0,
|
||||
failedItems: []
|
||||
}
|
||||
|
||||
const uploadHttpClient = new UploadHttpClient()
|
||||
|
||||
if (uploadSpecification.length === 0) {
|
||||
core.warning(`No files found that can be uploaded`)
|
||||
} else {
|
||||
// Create an entry for the artifact in the file container
|
||||
const response = await uploadHttpClient.createArtifactInFileContainer(
|
||||
name,
|
||||
options
|
||||
)
|
||||
if (!response.fileContainerResourceUrl) {
|
||||
core.debug(response.toString())
|
||||
throw new Error(
|
||||
'No URL provided by the Artifact Service to upload an artifact to'
|
||||
)
|
||||
}
|
||||
core.debug(`Upload Resource URL: ${response.fileContainerResourceUrl}`)
|
||||
|
||||
// Upload each of the files that were found concurrently
|
||||
const uploadResult = await uploadHttpClient.uploadArtifactToFileContainer(
|
||||
response.fileContainerResourceUrl,
|
||||
uploadSpecification,
|
||||
options
|
||||
)
|
||||
|
||||
// Update the size of the artifact to indicate we are done uploading
|
||||
// The uncompressed size is used for display when downloading a zip of the artifact from the UI
|
||||
await uploadHttpClient.patchArtifactSize(uploadResult.totalSize, name)
|
||||
|
||||
core.info(
|
||||
`Finished uploading artifact ${name}. Reported size is ${uploadResult.uploadSize} bytes. There were ${uploadResult.failedItems.length} items that failed to upload`
|
||||
)
|
||||
|
||||
uploadResponse.artifactItems = uploadSpecification.map(
|
||||
item => item.absoluteFilePath
|
||||
)
|
||||
uploadResponse.size = uploadResult.uploadSize
|
||||
uploadResponse.failedItems = uploadResult.failedItems
|
||||
}
|
||||
return uploadResponse
|
||||
}
|
||||
|
||||
async downloadArtifact(
|
||||
name: string,
|
||||
path?: string | undefined,
|
||||
options?: DownloadOptions | undefined
|
||||
): Promise<DownloadResponse> {
|
||||
const downloadHttpClient = new DownloadHttpClient()
|
||||
|
||||
const artifacts = await downloadHttpClient.listArtifacts()
|
||||
if (artifacts.count === 0) {
|
||||
throw new Error(
|
||||
`Unable to find any artifacts for the associated workflow`
|
||||
)
|
||||
}
|
||||
|
||||
const artifactToDownload = artifacts.value.find(artifact => {
|
||||
return artifact.name === name
|
||||
})
|
||||
if (!artifactToDownload) {
|
||||
throw new Error(`Unable to find an artifact with the name: ${name}`)
|
||||
}
|
||||
|
||||
const items = await downloadHttpClient.getContainerItems(
|
||||
artifactToDownload.name,
|
||||
artifactToDownload.fileContainerResourceUrl
|
||||
)
|
||||
|
||||
if (!path) {
|
||||
path = getWorkSpaceDirectory()
|
||||
}
|
||||
path = normalize(path)
|
||||
path = resolve(path)
|
||||
|
||||
// During upload, empty directories are rejected by the remote server so there should be no artifacts that consist of only empty directories
|
||||
const downloadSpecification = getDownloadSpecification(
|
||||
name,
|
||||
items.value,
|
||||
path,
|
||||
options?.createArtifactFolder || false
|
||||
)
|
||||
|
||||
if (downloadSpecification.filesToDownload.length === 0) {
|
||||
core.info(
|
||||
`No downloadable files were found for the artifact: ${artifactToDownload.name}`
|
||||
)
|
||||
} else {
|
||||
// Create all necessary directories recursively before starting any download
|
||||
await createDirectoriesForArtifact(
|
||||
downloadSpecification.directoryStructure
|
||||
)
|
||||
core.info('Directory structure has been setup for the artifact')
|
||||
await createEmptyFilesForArtifact(
|
||||
downloadSpecification.emptyFilesToCreate
|
||||
)
|
||||
await downloadHttpClient.downloadSingleArtifact(
|
||||
downloadSpecification.filesToDownload
|
||||
)
|
||||
}
|
||||
|
||||
return {
|
||||
artifactName: name,
|
||||
downloadPath: downloadSpecification.rootDownloadLocation
|
||||
}
|
||||
}
|
||||
|
||||
async downloadAllArtifacts(
|
||||
path?: string | undefined
|
||||
): Promise<DownloadResponse[]> {
|
||||
const downloadHttpClient = new DownloadHttpClient()
|
||||
|
||||
const response: DownloadResponse[] = []
|
||||
const artifacts = await downloadHttpClient.listArtifacts()
|
||||
if (artifacts.count === 0) {
|
||||
core.info('Unable to find any artifacts for the associated workflow')
|
||||
return response
|
||||
}
|
||||
|
||||
if (!path) {
|
||||
path = getWorkSpaceDirectory()
|
||||
}
|
||||
path = normalize(path)
|
||||
path = resolve(path)
|
||||
|
||||
let downloadedArtifacts = 0
|
||||
while (downloadedArtifacts < artifacts.count) {
|
||||
const currentArtifactToDownload = artifacts.value[downloadedArtifacts]
|
||||
downloadedArtifacts += 1
|
||||
|
||||
// Get container entries for the specific artifact
|
||||
const items = await downloadHttpClient.getContainerItems(
|
||||
currentArtifactToDownload.name,
|
||||
currentArtifactToDownload.fileContainerResourceUrl
|
||||
)
|
||||
|
||||
const downloadSpecification = getDownloadSpecification(
|
||||
currentArtifactToDownload.name,
|
||||
items.value,
|
||||
path,
|
||||
true
|
||||
)
|
||||
if (downloadSpecification.filesToDownload.length === 0) {
|
||||
core.info(
|
||||
`No downloadable files were found for any artifact ${currentArtifactToDownload.name}`
|
||||
)
|
||||
} else {
|
||||
await createDirectoriesForArtifact(
|
||||
downloadSpecification.directoryStructure
|
||||
)
|
||||
await createEmptyFilesForArtifact(
|
||||
downloadSpecification.emptyFilesToCreate
|
||||
)
|
||||
await downloadHttpClient.downloadSingleArtifact(
|
||||
downloadSpecification.filesToDownload
|
||||
)
|
||||
}
|
||||
|
||||
response.push({
|
||||
artifactName: currentArtifactToDownload.name,
|
||||
downloadPath: downloadSpecification.rootDownloadLocation
|
||||
})
|
||||
}
|
||||
return response
|
||||
}
|
||||
}
|
|
@ -0,0 +1,284 @@
|
|||
import {warning} from '@actions/core'
|
||||
import {isGhes} from './shared/config'
|
||||
import {
|
||||
UploadArtifactOptions,
|
||||
UploadArtifactResponse,
|
||||
DownloadArtifactOptions,
|
||||
GetArtifactResponse,
|
||||
ListArtifactsOptions,
|
||||
ListArtifactsResponse,
|
||||
DownloadArtifactResponse,
|
||||
FindOptions,
|
||||
DeleteArtifactResponse
|
||||
} from './shared/interfaces'
|
||||
import {uploadArtifact} from './upload/upload-artifact'
|
||||
import {
|
||||
downloadArtifactPublic,
|
||||
downloadArtifactInternal
|
||||
} from './download/download-artifact'
|
||||
import {
|
||||
deleteArtifactPublic,
|
||||
deleteArtifactInternal
|
||||
} from './delete/delete-artifact'
|
||||
import {getArtifactPublic, getArtifactInternal} from './find/get-artifact'
|
||||
import {listArtifactsPublic, listArtifactsInternal} from './find/list-artifacts'
|
||||
import {GHESNotSupportedError} from './shared/errors'
|
||||
|
||||
/**
|
||||
* Generic interface for the artifact client.
|
||||
*/
|
||||
export interface ArtifactClient {
|
||||
/**
|
||||
* Uploads an artifact.
|
||||
*
|
||||
* @param name The name of the artifact, required
|
||||
* @param files A list of absolute or relative paths that denote what files should be uploaded
|
||||
* @param rootDirectory An absolute or relative file path that denotes the root parent directory of the files being uploaded
|
||||
* @param options Extra options for customizing the upload behavior
|
||||
* @returns single UploadArtifactResponse object
|
||||
*/
|
||||
uploadArtifact(
|
||||
name: string,
|
||||
files: string[],
|
||||
rootDirectory: string,
|
||||
options?: UploadArtifactOptions
|
||||
): Promise<UploadArtifactResponse>
|
||||
|
||||
/**
|
||||
* Lists all artifacts that are part of the current workflow run.
|
||||
* This function will return at most 1000 artifacts per workflow run.
|
||||
*
|
||||
* If `options.findBy` is specified, this will call the public List-Artifacts API which can list from other runs.
|
||||
* https://docs.github.com/en/rest/actions/artifacts?apiVersion=2022-11-28#list-workflow-run-artifacts
|
||||
*
|
||||
* @param options Extra options that allow for the customization of the list behavior
|
||||
* @returns ListArtifactResponse object
|
||||
*/
|
||||
listArtifacts(
|
||||
options?: ListArtifactsOptions & FindOptions
|
||||
): Promise<ListArtifactsResponse>
|
||||
|
||||
/**
|
||||
* Finds an artifact by name.
|
||||
* If there are multiple artifacts with the same name in the same workflow run, this will return the latest.
|
||||
* If the artifact is not found, it will throw.
|
||||
*
|
||||
* If `options.findBy` is specified, this will use the public List Artifacts API with a name filter which can get artifacts from other runs.
|
||||
* https://docs.github.com/en/rest/actions/artifacts?apiVersion=2022-11-28#list-workflow-run-artifacts
|
||||
* `@actions/artifact` v2+ does not allow for creating multiple artifacts with the same name in the same workflow run.
|
||||
* It is possible to have multiple artifacts with the same name in the same workflow run by using old versions of upload-artifact (v1,v2 and v3), @actions/artifact < v2 or it is a rerun.
|
||||
* If there are multiple artifacts with the same name in the same workflow run this function will return the first artifact that matches the name.
|
||||
*
|
||||
* @param artifactName The name of the artifact to find
|
||||
* @param options Extra options that allow for the customization of the get behavior
|
||||
*/
|
||||
getArtifact(
|
||||
artifactName: string,
|
||||
options?: FindOptions
|
||||
): Promise<GetArtifactResponse>
|
||||
|
||||
/**
|
||||
* Downloads an artifact and unzips the content.
|
||||
*
|
||||
* If `options.findBy` is specified, this will use the public Download Artifact API https://docs.github.com/en/rest/actions/artifacts?apiVersion=2022-11-28#download-an-artifact
|
||||
*
|
||||
* @param artifactId The id of the artifact to download
|
||||
* @param options Extra options that allow for the customization of the download behavior
|
||||
* @returns single DownloadArtifactResponse object
|
||||
*/
|
||||
downloadArtifact(
|
||||
artifactId: number,
|
||||
options?: DownloadArtifactOptions & FindOptions
|
||||
): Promise<DownloadArtifactResponse>
|
||||
|
||||
/**
|
||||
* Delete an Artifact
|
||||
*
|
||||
* If `options.findBy` is specified, this will use the public Delete Artifact API https://docs.github.com/en/rest/actions/artifacts?apiVersion=2022-11-28#delete-an-artifact
|
||||
*
|
||||
* @param artifactName The name of the artifact to delete
|
||||
* @param options Extra options that allow for the customization of the delete behavior
|
||||
* @returns single DeleteArtifactResponse object
|
||||
*/
|
||||
deleteArtifact(
|
||||
artifactName: string,
|
||||
options?: FindOptions
|
||||
): Promise<DeleteArtifactResponse>
|
||||
}
|
||||
|
||||
/**
|
||||
* The default artifact client that is used by the artifact action(s).
|
||||
*/
|
||||
export class DefaultArtifactClient implements ArtifactClient {
|
||||
async uploadArtifact(
|
||||
name: string,
|
||||
files: string[],
|
||||
rootDirectory: string,
|
||||
options?: UploadArtifactOptions
|
||||
): Promise<UploadArtifactResponse> {
|
||||
try {
|
||||
if (isGhes()) {
|
||||
throw new GHESNotSupportedError()
|
||||
}
|
||||
|
||||
return uploadArtifact(name, files, rootDirectory, options)
|
||||
} catch (error) {
|
||||
warning(
|
||||
`Artifact upload failed with error: ${error}.
|
||||
|
||||
Errors can be temporary, so please try again and optionally run the action with debug mode enabled for more information.
|
||||
|
||||
If the error persists, please check whether Actions is operating normally at [https://githubstatus.com](https://www.githubstatus.com).`
|
||||
)
|
||||
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
async downloadArtifact(
|
||||
artifactId: number,
|
||||
options?: DownloadArtifactOptions & FindOptions
|
||||
): Promise<DownloadArtifactResponse> {
|
||||
try {
|
||||
if (isGhes()) {
|
||||
throw new GHESNotSupportedError()
|
||||
}
|
||||
|
||||
if (options?.findBy) {
|
||||
const {
|
||||
findBy: {repositoryOwner, repositoryName, token},
|
||||
...downloadOptions
|
||||
} = options
|
||||
|
||||
return downloadArtifactPublic(
|
||||
artifactId,
|
||||
repositoryOwner,
|
||||
repositoryName,
|
||||
token,
|
||||
downloadOptions
|
||||
)
|
||||
}
|
||||
|
||||
return downloadArtifactInternal(artifactId, options)
|
||||
} catch (error) {
|
||||
warning(
|
||||
`Download Artifact failed with error: ${error}.
|
||||
|
||||
Errors can be temporary, so please try again and optionally run the action with debug mode enabled for more information.
|
||||
|
||||
If the error persists, please check whether Actions and API requests are operating normally at [https://githubstatus.com](https://www.githubstatus.com).`
|
||||
)
|
||||
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
async listArtifacts(
|
||||
options?: ListArtifactsOptions & FindOptions
|
||||
): Promise<ListArtifactsResponse> {
|
||||
try {
|
||||
if (isGhes()) {
|
||||
throw new GHESNotSupportedError()
|
||||
}
|
||||
|
||||
if (options?.findBy) {
|
||||
const {
|
||||
findBy: {workflowRunId, repositoryOwner, repositoryName, token}
|
||||
} = options
|
||||
|
||||
return listArtifactsPublic(
|
||||
workflowRunId,
|
||||
repositoryOwner,
|
||||
repositoryName,
|
||||
token,
|
||||
options?.latest
|
||||
)
|
||||
}
|
||||
|
||||
return listArtifactsInternal(options?.latest)
|
||||
} catch (error: unknown) {
|
||||
warning(
|
||||
`Listing Artifacts failed with error: ${error}.
|
||||
|
||||
Errors can be temporary, so please try again and optionally run the action with debug mode enabled for more information.
|
||||
|
||||
If the error persists, please check whether Actions and API requests are operating normally at [https://githubstatus.com](https://www.githubstatus.com).`
|
||||
)
|
||||
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
async getArtifact(
|
||||
artifactName: string,
|
||||
options?: FindOptions
|
||||
): Promise<GetArtifactResponse> {
|
||||
try {
|
||||
if (isGhes()) {
|
||||
throw new GHESNotSupportedError()
|
||||
}
|
||||
|
||||
if (options?.findBy) {
|
||||
const {
|
||||
findBy: {workflowRunId, repositoryOwner, repositoryName, token}
|
||||
} = options
|
||||
|
||||
return getArtifactPublic(
|
||||
artifactName,
|
||||
workflowRunId,
|
||||
repositoryOwner,
|
||||
repositoryName,
|
||||
token
|
||||
)
|
||||
}
|
||||
|
||||
return getArtifactInternal(artifactName)
|
||||
} catch (error: unknown) {
|
||||
warning(
|
||||
`Get Artifact failed with error: ${error}.
|
||||
|
||||
Errors can be temporary, so please try again and optionally run the action with debug mode enabled for more information.
|
||||
|
||||
If the error persists, please check whether Actions and API requests are operating normally at [https://githubstatus.com](https://www.githubstatus.com).`
|
||||
)
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
async deleteArtifact(
|
||||
artifactName: string,
|
||||
options?: FindOptions
|
||||
): Promise<DeleteArtifactResponse> {
|
||||
try {
|
||||
if (isGhes()) {
|
||||
throw new GHESNotSupportedError()
|
||||
}
|
||||
|
||||
if (options?.findBy) {
|
||||
const {
|
||||
findBy: {repositoryOwner, repositoryName, workflowRunId, token}
|
||||
} = options
|
||||
|
||||
return deleteArtifactPublic(
|
||||
artifactName,
|
||||
workflowRunId,
|
||||
repositoryOwner,
|
||||
repositoryName,
|
||||
token
|
||||
)
|
||||
}
|
||||
|
||||
return deleteArtifactInternal(artifactName)
|
||||
} catch (error) {
|
||||
warning(
|
||||
`Delete Artifact failed with error: ${error}.
|
||||
|
||||
Errors can be temporary, so please try again and optionally run the action with debug mode enabled for more information.
|
||||
|
||||
If the error persists, please check whether Actions and API requests are operating normally at [https://githubstatus.com](https://www.githubstatus.com).`
|
||||
)
|
||||
|
||||
throw error
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,67 +0,0 @@
|
|||
// The number of concurrent uploads that happens at the same time
|
||||
export function getUploadFileConcurrency(): number {
|
||||
return 2
|
||||
}
|
||||
|
||||
// When uploading large files that can't be uploaded with a single http call, this controls
|
||||
// the chunk size that is used during upload
|
||||
export function getUploadChunkSize(): number {
|
||||
return 8 * 1024 * 1024 // 8 MB Chunks
|
||||
}
|
||||
|
||||
// The maximum number of retries that can be attempted before an upload or download fails
|
||||
export function getRetryLimit(): number {
|
||||
return 5
|
||||
}
|
||||
|
||||
// With exponential backoff, the larger the retry count, the larger the wait time before another attempt
|
||||
// The retry multiplier controls by how much the backOff time increases depending on the number of retries
|
||||
export function getRetryMultiplier(): number {
|
||||
return 1.5
|
||||
}
|
||||
|
||||
// The initial wait time if an upload or download fails and a retry is being attempted for the first time
|
||||
export function getInitialRetryIntervalInMilliseconds(): number {
|
||||
return 3000
|
||||
}
|
||||
|
||||
// The number of concurrent downloads that happens at the same time
|
||||
export function getDownloadFileConcurrency(): number {
|
||||
return 2
|
||||
}
|
||||
|
||||
export function getRuntimeToken(): string {
|
||||
const token = process.env['ACTIONS_RUNTIME_TOKEN']
|
||||
if (!token) {
|
||||
throw new Error('Unable to get ACTIONS_RUNTIME_TOKEN env variable')
|
||||
}
|
||||
return token
|
||||
}
|
||||
|
||||
export function getRuntimeUrl(): string {
|
||||
const runtimeUrl = process.env['ACTIONS_RUNTIME_URL']
|
||||
if (!runtimeUrl) {
|
||||
throw new Error('Unable to get ACTIONS_RUNTIME_URL env variable')
|
||||
}
|
||||
return runtimeUrl
|
||||
}
|
||||
|
||||
export function getWorkFlowRunId(): string {
|
||||
const workFlowRunId = process.env['GITHUB_RUN_ID']
|
||||
if (!workFlowRunId) {
|
||||
throw new Error('Unable to get GITHUB_RUN_ID env variable')
|
||||
}
|
||||
return workFlowRunId
|
||||
}
|
||||
|
||||
export function getWorkSpaceDirectory(): string {
|
||||
const workspaceDirectory = process.env['GITHUB_WORKSPACE']
|
||||
if (!workspaceDirectory) {
|
||||
throw new Error('Unable to get GITHUB_WORKSPACE env variable')
|
||||
}
|
||||
return workspaceDirectory
|
||||
}
|
||||
|
||||
export function getRetentionDays(): string | undefined {
|
||||
return process.env['GITHUB_RETENTION_DAYS']
|
||||
}
|
|
@ -1,64 +0,0 @@
|
|||
export interface ArtifactResponse {
|
||||
containerId: string
|
||||
size: number
|
||||
signedContent: string
|
||||
fileContainerResourceUrl: string
|
||||
type: string
|
||||
name: string
|
||||
url: string
|
||||
}
|
||||
|
||||
export interface CreateArtifactParameters {
|
||||
Type: string
|
||||
Name: string
|
||||
RetentionDays?: number
|
||||
}
|
||||
|
||||
export interface PatchArtifactSize {
|
||||
Size: number
|
||||
}
|
||||
|
||||
export interface PatchArtifactSizeSuccessResponse {
|
||||
containerId: number
|
||||
size: number
|
||||
signedContent: string
|
||||
type: string
|
||||
name: string
|
||||
url: string
|
||||
uploadUrl: string
|
||||
}
|
||||
|
||||
export interface UploadResults {
|
||||
uploadSize: number
|
||||
totalSize: number
|
||||
failedItems: string[]
|
||||
}
|
||||
|
||||
export interface ListArtifactsResponse {
|
||||
count: number
|
||||
value: ArtifactResponse[]
|
||||
}
|
||||
|
||||
export interface QueryArtifactResponse {
|
||||
count: number
|
||||
value: ContainerEntry[]
|
||||
}
|
||||
|
||||
export interface ContainerEntry {
|
||||
containerId: number
|
||||
scopeIdentifier: string
|
||||
path: string
|
||||
itemType: string
|
||||
status: string
|
||||
fileLength?: number
|
||||
fileEncoding?: number
|
||||
fileType?: number
|
||||
dateCreated: string
|
||||
dateLastModified: string
|
||||
createdBy: string
|
||||
lastModifiedBy: string
|
||||
itemLocation: string
|
||||
contentLocation: string
|
||||
fileId?: number
|
||||
contentId: string
|
||||
}
|
|
@ -0,0 +1,109 @@
|
|||
import {info, debug} from '@actions/core'
|
||||
import {getOctokit} from '@actions/github'
|
||||
import {DeleteArtifactResponse} from '../shared/interfaces'
|
||||
import {getUserAgentString} from '../shared/user-agent'
|
||||
import {getRetryOptions} from '../find/retry-options'
|
||||
import {defaults as defaultGitHubOptions} from '@actions/github/lib/utils'
|
||||
import {requestLog} from '@octokit/plugin-request-log'
|
||||
import {retry} from '@octokit/plugin-retry'
|
||||
import {OctokitOptions} from '@octokit/core/dist-types/types'
|
||||
import {internalArtifactTwirpClient} from '../shared/artifact-twirp-client'
|
||||
import {getBackendIdsFromToken} from '../shared/util'
|
||||
import {
|
||||
DeleteArtifactRequest,
|
||||
ListArtifactsRequest,
|
||||
StringValue
|
||||
} from '../../generated'
|
||||
import {getArtifactPublic} from '../find/get-artifact'
|
||||
import {ArtifactNotFoundError, InvalidResponseError} from '../shared/errors'
|
||||
|
||||
export async function deleteArtifactPublic(
|
||||
artifactName: string,
|
||||
workflowRunId: number,
|
||||
repositoryOwner: string,
|
||||
repositoryName: string,
|
||||
token: string
|
||||
): Promise<DeleteArtifactResponse> {
|
||||
const [retryOpts, requestOpts] = getRetryOptions(defaultGitHubOptions)
|
||||
|
||||
const opts: OctokitOptions = {
|
||||
log: undefined,
|
||||
userAgent: getUserAgentString(),
|
||||
previews: undefined,
|
||||
retry: retryOpts,
|
||||
request: requestOpts
|
||||
}
|
||||
|
||||
const github = getOctokit(token, opts, retry, requestLog)
|
||||
|
||||
const getArtifactResp = await getArtifactPublic(
|
||||
artifactName,
|
||||
workflowRunId,
|
||||
repositoryOwner,
|
||||
repositoryName,
|
||||
token
|
||||
)
|
||||
|
||||
const deleteArtifactResp = await github.rest.actions.deleteArtifact({
|
||||
owner: repositoryOwner,
|
||||
repo: repositoryName,
|
||||
artifact_id: getArtifactResp.artifact.id
|
||||
})
|
||||
|
||||
if (deleteArtifactResp.status !== 204) {
|
||||
throw new InvalidResponseError(
|
||||
`Invalid response from GitHub API: ${deleteArtifactResp.status} (${deleteArtifactResp?.headers?.['x-github-request-id']})`
|
||||
)
|
||||
}
|
||||
|
||||
return {
|
||||
id: getArtifactResp.artifact.id
|
||||
}
|
||||
}
|
||||
|
||||
export async function deleteArtifactInternal(
|
||||
artifactName
|
||||
): Promise<DeleteArtifactResponse> {
|
||||
const artifactClient = internalArtifactTwirpClient()
|
||||
|
||||
const {workflowRunBackendId, workflowJobRunBackendId} =
|
||||
getBackendIdsFromToken()
|
||||
|
||||
const listReq: ListArtifactsRequest = {
|
||||
workflowRunBackendId,
|
||||
workflowJobRunBackendId,
|
||||
nameFilter: StringValue.create({value: artifactName})
|
||||
}
|
||||
|
||||
const listRes = await artifactClient.ListArtifacts(listReq)
|
||||
|
||||
if (listRes.artifacts.length === 0) {
|
||||
throw new ArtifactNotFoundError(
|
||||
`Artifact not found for name: ${artifactName}`
|
||||
)
|
||||
}
|
||||
|
||||
let artifact = listRes.artifacts[0]
|
||||
if (listRes.artifacts.length > 1) {
|
||||
artifact = listRes.artifacts.sort(
|
||||
(a, b) => Number(b.databaseId) - Number(a.databaseId)
|
||||
)[0]
|
||||
|
||||
debug(
|
||||
`More than one artifact found for a single name, returning newest (id: ${artifact.databaseId})`
|
||||
)
|
||||
}
|
||||
|
||||
const req: DeleteArtifactRequest = {
|
||||
workflowRunBackendId: artifact.workflowRunBackendId,
|
||||
workflowJobRunBackendId: artifact.workflowJobRunBackendId,
|
||||
name: artifact.name
|
||||
}
|
||||
|
||||
const res = await artifactClient.DeleteArtifact(req)
|
||||
info(`Artifact '${artifactName}' (ID: ${res.artifactId}) deleted`)
|
||||
|
||||
return {
|
||||
id: Number(res.artifactId)
|
||||
}
|
||||
}
|
|
@ -1,358 +0,0 @@
|
|||
import * as fs from 'fs'
|
||||
import * as core from '@actions/core'
|
||||
import * as zlib from 'zlib'
|
||||
import {
|
||||
getArtifactUrl,
|
||||
getDownloadHeaders,
|
||||
isSuccessStatusCode,
|
||||
isRetryableStatusCode,
|
||||
isThrottledStatusCode,
|
||||
getExponentialRetryTimeInMilliseconds,
|
||||
tryGetRetryAfterValueTimeInMilliseconds,
|
||||
displayHttpDiagnostics,
|
||||
getFileSize,
|
||||
rmFile,
|
||||
sleep
|
||||
} from './utils'
|
||||
import {URL} from 'url'
|
||||
import {StatusReporter} from './status-reporter'
|
||||
import {performance} from 'perf_hooks'
|
||||
import {ListArtifactsResponse, QueryArtifactResponse} from './contracts'
|
||||
import {IHttpClientResponse} from '@actions/http-client/interfaces'
|
||||
import {HttpManager} from './http-manager'
|
||||
import {DownloadItem} from './download-specification'
|
||||
import {getDownloadFileConcurrency, getRetryLimit} from './config-variables'
|
||||
import {IncomingHttpHeaders} from 'http'
|
||||
import {retryHttpClientRequest} from './requestUtils'
|
||||
|
||||
export class DownloadHttpClient {
|
||||
// http manager is used for concurrent connections when downloading multiple files at once
|
||||
private downloadHttpManager: HttpManager
|
||||
private statusReporter: StatusReporter
|
||||
|
||||
constructor() {
|
||||
this.downloadHttpManager = new HttpManager(
|
||||
getDownloadFileConcurrency(),
|
||||
'@actions/artifact-download'
|
||||
)
|
||||
// downloads are usually significantly faster than uploads so display status information every second
|
||||
this.statusReporter = new StatusReporter(1000)
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets a list of all artifacts that are in a specific container
|
||||
*/
|
||||
async listArtifacts(): Promise<ListArtifactsResponse> {
|
||||
const artifactUrl = getArtifactUrl()
|
||||
|
||||
// use the first client from the httpManager, `keep-alive` is not used so the connection will close immediately
|
||||
const client = this.downloadHttpManager.getClient(0)
|
||||
const headers = getDownloadHeaders('application/json')
|
||||
const response = await retryHttpClientRequest('List Artifacts', async () =>
|
||||
client.get(artifactUrl, headers)
|
||||
)
|
||||
const body: string = await response.readBody()
|
||||
return JSON.parse(body)
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetches a set of container items that describe the contents of an artifact
|
||||
* @param artifactName the name of the artifact
|
||||
* @param containerUrl the artifact container URL for the run
|
||||
*/
|
||||
async getContainerItems(
|
||||
artifactName: string,
|
||||
containerUrl: string
|
||||
): Promise<QueryArtifactResponse> {
|
||||
// the itemPath search parameter controls which containers will be returned
|
||||
const resourceUrl = new URL(containerUrl)
|
||||
resourceUrl.searchParams.append('itemPath', artifactName)
|
||||
|
||||
// use the first client from the httpManager, `keep-alive` is not used so the connection will close immediately
|
||||
const client = this.downloadHttpManager.getClient(0)
|
||||
const headers = getDownloadHeaders('application/json')
|
||||
const response = await retryHttpClientRequest(
|
||||
'Get Container Items',
|
||||
async () => client.get(resourceUrl.toString(), headers)
|
||||
)
|
||||
const body: string = await response.readBody()
|
||||
return JSON.parse(body)
|
||||
}
|
||||
|
||||
/**
|
||||
* Concurrently downloads all the files that are part of an artifact
|
||||
* @param downloadItems information about what items to download and where to save them
|
||||
*/
|
||||
async downloadSingleArtifact(downloadItems: DownloadItem[]): Promise<void> {
|
||||
const DOWNLOAD_CONCURRENCY = getDownloadFileConcurrency()
|
||||
// limit the number of files downloaded at a single time
|
||||
core.debug(`Download file concurrency is set to ${DOWNLOAD_CONCURRENCY}`)
|
||||
const parallelDownloads = [...new Array(DOWNLOAD_CONCURRENCY).keys()]
|
||||
let currentFile = 0
|
||||
let downloadedFiles = 0
|
||||
|
||||
core.info(
|
||||
`Total number of files that will be downloaded: ${downloadItems.length}`
|
||||
)
|
||||
|
||||
this.statusReporter.setTotalNumberOfFilesToProcess(downloadItems.length)
|
||||
this.statusReporter.start()
|
||||
|
||||
await Promise.all(
|
||||
parallelDownloads.map(async index => {
|
||||
while (currentFile < downloadItems.length) {
|
||||
const currentFileToDownload = downloadItems[currentFile]
|
||||
currentFile += 1
|
||||
|
||||
const startTime = performance.now()
|
||||
await this.downloadIndividualFile(
|
||||
index,
|
||||
currentFileToDownload.sourceLocation,
|
||||
currentFileToDownload.targetPath
|
||||
)
|
||||
|
||||
if (core.isDebug()) {
|
||||
core.debug(
|
||||
`File: ${++downloadedFiles}/${downloadItems.length}. ${
|
||||
currentFileToDownload.targetPath
|
||||
} took ${(performance.now() - startTime).toFixed(
|
||||
3
|
||||
)} milliseconds to finish downloading`
|
||||
)
|
||||
}
|
||||
|
||||
this.statusReporter.incrementProcessedCount()
|
||||
}
|
||||
})
|
||||
)
|
||||
.catch(error => {
|
||||
throw new Error(`Unable to download the artifact: ${error}`)
|
||||
})
|
||||
.finally(() => {
|
||||
this.statusReporter.stop()
|
||||
// safety dispose all connections
|
||||
this.downloadHttpManager.disposeAndReplaceAllClients()
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Downloads an individual file
|
||||
* @param httpClientIndex the index of the http client that is used to make all of the calls
|
||||
* @param artifactLocation origin location where a file will be downloaded from
|
||||
* @param downloadPath destination location for the file being downloaded
|
||||
*/
|
||||
private async downloadIndividualFile(
|
||||
httpClientIndex: number,
|
||||
artifactLocation: string,
|
||||
downloadPath: string
|
||||
): Promise<void> {
|
||||
let retryCount = 0
|
||||
const retryLimit = getRetryLimit()
|
||||
let destinationStream = fs.createWriteStream(downloadPath)
|
||||
const headers = getDownloadHeaders('application/json', true, true)
|
||||
|
||||
// a single GET request is used to download a file
|
||||
const makeDownloadRequest = async (): Promise<IHttpClientResponse> => {
|
||||
const client = this.downloadHttpManager.getClient(httpClientIndex)
|
||||
return await client.get(artifactLocation, headers)
|
||||
}
|
||||
|
||||
// check the response headers to determine if the file was compressed using gzip
|
||||
const isGzip = (incomingHeaders: IncomingHttpHeaders): boolean => {
|
||||
return (
|
||||
'content-encoding' in incomingHeaders &&
|
||||
incomingHeaders['content-encoding'] === 'gzip'
|
||||
)
|
||||
}
|
||||
|
||||
// Increments the current retry count and then checks if the retry limit has been reached
|
||||
// If there have been too many retries, fail so the download stops. If there is a retryAfterValue value provided,
|
||||
// it will be used
|
||||
const backOff = async (retryAfterValue?: number): Promise<void> => {
|
||||
retryCount++
|
||||
if (retryCount > retryLimit) {
|
||||
return Promise.reject(
|
||||
new Error(
|
||||
`Retry limit has been reached. Unable to download ${artifactLocation}`
|
||||
)
|
||||
)
|
||||
} else {
|
||||
this.downloadHttpManager.disposeAndReplaceClient(httpClientIndex)
|
||||
if (retryAfterValue) {
|
||||
// Back off by waiting the specified time denoted by the retry-after header
|
||||
core.info(
|
||||
`Backoff due to too many requests, retry #${retryCount}. Waiting for ${retryAfterValue} milliseconds before continuing the download`
|
||||
)
|
||||
await sleep(retryAfterValue)
|
||||
} else {
|
||||
// Back off using an exponential value that depends on the retry count
|
||||
const backoffTime = getExponentialRetryTimeInMilliseconds(retryCount)
|
||||
core.info(
|
||||
`Exponential backoff for retry #${retryCount}. Waiting for ${backoffTime} milliseconds before continuing the download`
|
||||
)
|
||||
await sleep(backoffTime)
|
||||
}
|
||||
core.info(
|
||||
`Finished backoff for retry #${retryCount}, continuing with download`
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
const isAllBytesReceived = (
|
||||
expected?: string,
|
||||
received?: number
|
||||
): boolean => {
|
||||
// be lenient, if any input is missing, assume success, i.e. not truncated
|
||||
if (
|
||||
!expected ||
|
||||
!received ||
|
||||
process.env['ACTIONS_ARTIFACT_SKIP_DOWNLOAD_VALIDATION']
|
||||
) {
|
||||
core.info('Skipping download validation.')
|
||||
return true
|
||||
}
|
||||
|
||||
return parseInt(expected) === received
|
||||
}
|
||||
|
||||
const resetDestinationStream = async (
|
||||
fileDownloadPath: string
|
||||
): Promise<void> => {
|
||||
destinationStream.close()
|
||||
await rmFile(fileDownloadPath)
|
||||
destinationStream = fs.createWriteStream(fileDownloadPath)
|
||||
}
|
||||
|
||||
// keep trying to download a file until a retry limit has been reached
|
||||
while (retryCount <= retryLimit) {
|
||||
let response: IHttpClientResponse
|
||||
try {
|
||||
response = await makeDownloadRequest()
|
||||
if (core.isDebug()) {
|
||||
displayHttpDiagnostics(response)
|
||||
}
|
||||
} catch (error) {
|
||||
// if an error is caught, it is usually indicative of a timeout so retry the download
|
||||
core.info('An error occurred while attempting to download a file')
|
||||
// eslint-disable-next-line no-console
|
||||
console.log(error)
|
||||
|
||||
// increment the retryCount and use exponential backoff to wait before making the next request
|
||||
await backOff()
|
||||
continue
|
||||
}
|
||||
|
||||
let forceRetry = false
|
||||
if (isSuccessStatusCode(response.message.statusCode)) {
|
||||
// The body contains the contents of the file however calling response.readBody() causes all the content to be converted to a string
|
||||
// which can cause some gzip encoded data to be lost
|
||||
// Instead of using response.readBody(), response.message is a readableStream that can be directly used to get the raw body contents
|
||||
try {
|
||||
const isGzipped = isGzip(response.message.headers)
|
||||
await this.pipeResponseToFile(response, destinationStream, isGzipped)
|
||||
|
||||
if (
|
||||
isGzipped ||
|
||||
isAllBytesReceived(
|
||||
response.message.headers['content-length'],
|
||||
await getFileSize(downloadPath)
|
||||
)
|
||||
) {
|
||||
return
|
||||
} else {
|
||||
forceRetry = true
|
||||
}
|
||||
} catch (error) {
|
||||
// retry on error, most likely streams were corrupted
|
||||
forceRetry = true
|
||||
}
|
||||
}
|
||||
|
||||
if (forceRetry || isRetryableStatusCode(response.message.statusCode)) {
|
||||
core.info(
|
||||
`A ${response.message.statusCode} response code has been received while attempting to download an artifact`
|
||||
)
|
||||
resetDestinationStream(downloadPath)
|
||||
// if a throttled status code is received, try to get the retryAfter header value, else differ to standard exponential backoff
|
||||
isThrottledStatusCode(response.message.statusCode)
|
||||
? await backOff(
|
||||
tryGetRetryAfterValueTimeInMilliseconds(response.message.headers)
|
||||
)
|
||||
: await backOff()
|
||||
} else {
|
||||
// Some unexpected response code, fail immediately and stop the download
|
||||
displayHttpDiagnostics(response)
|
||||
return Promise.reject(
|
||||
new Error(
|
||||
`Unexpected http ${response.message.statusCode} during download for ${artifactLocation}`
|
||||
)
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Pipes the response from downloading an individual file to the appropriate destination stream while decoding gzip content if necessary
|
||||
* @param response the http response received when downloading a file
|
||||
* @param destinationStream the stream where the file should be written to
|
||||
* @param isGzip a boolean denoting if the content is compressed using gzip and if we need to decode it
|
||||
*/
|
||||
async pipeResponseToFile(
|
||||
response: IHttpClientResponse,
|
||||
destinationStream: fs.WriteStream,
|
||||
isGzip: boolean
|
||||
): Promise<void> {
|
||||
await new Promise((resolve, reject) => {
|
||||
if (isGzip) {
|
||||
const gunzip = zlib.createGunzip()
|
||||
response.message
|
||||
.on('error', error => {
|
||||
core.error(
|
||||
`An error occurred while attempting to read the response stream`
|
||||
)
|
||||
gunzip.close()
|
||||
destinationStream.close()
|
||||
reject(error)
|
||||
})
|
||||
.pipe(gunzip)
|
||||
.on('error', error => {
|
||||
core.error(
|
||||
`An error occurred while attempting to decompress the response stream`
|
||||
)
|
||||
destinationStream.close()
|
||||
reject(error)
|
||||
})
|
||||
.pipe(destinationStream)
|
||||
.on('close', () => {
|
||||
resolve()
|
||||
})
|
||||
.on('error', error => {
|
||||
core.error(
|
||||
`An error occurred while writing a downloaded file to ${destinationStream.path}`
|
||||
)
|
||||
reject(error)
|
||||
})
|
||||
} else {
|
||||
response.message
|
||||
.on('error', error => {
|
||||
core.error(
|
||||
`An error occurred while attempting to read the response stream`
|
||||
)
|
||||
destinationStream.close()
|
||||
reject(error)
|
||||
})
|
||||
.pipe(destinationStream)
|
||||
.on('close', () => {
|
||||
resolve()
|
||||
})
|
||||
.on('error', error => {
|
||||
core.error(
|
||||
`An error occurred while writing a downloaded file to ${destinationStream.path}`
|
||||
)
|
||||
reject(error)
|
||||
})
|
||||
}
|
||||
})
|
||||
return
|
||||
}
|
||||
}
|
|
@ -1,7 +0,0 @@
|
|||
export interface DownloadOptions {
|
||||
/**
|
||||
* Specifies if a folder is created for the artifact that is downloaded (contents downloaded into this folder),
|
||||
* defaults to false if not specified
|
||||
* */
|
||||
createArtifactFolder?: boolean
|
||||
}
|
|
@ -1,11 +0,0 @@
|
|||
export interface DownloadResponse {
|
||||
/**
|
||||
* The name of the artifact that was downloaded
|
||||
*/
|
||||
artifactName: string
|
||||
|
||||
/**
|
||||
* The full Path to where the artifact was downloaded
|
||||
*/
|
||||
downloadPath: string
|
||||
}
|
|
@ -1,87 +0,0 @@
|
|||
import * as path from 'path'
|
||||
import {ContainerEntry} from './contracts'
|
||||
|
||||
export interface DownloadSpecification {
|
||||
// root download location for the artifact
|
||||
rootDownloadLocation: string
|
||||
|
||||
// directories that need to be created for all the items in the artifact
|
||||
directoryStructure: string[]
|
||||
|
||||
// empty files that are part of the artifact that don't require any downloading
|
||||
emptyFilesToCreate: string[]
|
||||
|
||||
// individual files that need to be downloaded as part of the artifact
|
||||
filesToDownload: DownloadItem[]
|
||||
}
|
||||
|
||||
export interface DownloadItem {
|
||||
// Url that denotes where to download the item from
|
||||
sourceLocation: string
|
||||
|
||||
// Information about where the file should be downloaded to
|
||||
targetPath: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a specification for a set of files that will be downloaded
|
||||
* @param artifactName the name of the artifact
|
||||
* @param artifactEntries a set of container entries that describe that files that make up an artifact
|
||||
* @param downloadPath the path where the artifact will be downloaded to
|
||||
* @param includeRootDirectory specifies if there should be an extra directory (denoted by the artifact name) where the artifact files should be downloaded to
|
||||
*/
|
||||
export function getDownloadSpecification(
|
||||
artifactName: string,
|
||||
artifactEntries: ContainerEntry[],
|
||||
downloadPath: string,
|
||||
includeRootDirectory: boolean
|
||||
): DownloadSpecification {
|
||||
// use a set for the directory paths so that there are no duplicates
|
||||
const directories = new Set<string>()
|
||||
|
||||
const specifications: DownloadSpecification = {
|
||||
rootDownloadLocation: includeRootDirectory
|
||||
? path.join(downloadPath, artifactName)
|
||||
: downloadPath,
|
||||
directoryStructure: [],
|
||||
emptyFilesToCreate: [],
|
||||
filesToDownload: []
|
||||
}
|
||||
|
||||
for (const entry of artifactEntries) {
|
||||
// Ignore artifacts in the container that don't begin with the same name
|
||||
if (
|
||||
entry.path.startsWith(`${artifactName}/`) ||
|
||||
entry.path.startsWith(`${artifactName}\\`)
|
||||
) {
|
||||
// normalize all separators to the local OS
|
||||
const normalizedPathEntry = path.normalize(entry.path)
|
||||
// entry.path always starts with the artifact name, if includeRootDirectory is false, remove the name from the beginning of the path
|
||||
const filePath = path.join(
|
||||
downloadPath,
|
||||
includeRootDirectory
|
||||
? normalizedPathEntry
|
||||
: normalizedPathEntry.replace(artifactName, '')
|
||||
)
|
||||
|
||||
// Case insensitive folder structure maintained in the backend, not every folder is created so the 'folder'
|
||||
// itemType cannot be relied upon. The file must be used to determine the directory structure
|
||||
if (entry.itemType === 'file') {
|
||||
// Get the directories that we need to create from the filePath for each individual file
|
||||
directories.add(path.dirname(filePath))
|
||||
if (entry.fileLength === 0) {
|
||||
// An empty file was uploaded, create the empty files locally so that no extra http calls are made
|
||||
specifications.emptyFilesToCreate.push(filePath)
|
||||
} else {
|
||||
specifications.filesToDownload.push({
|
||||
sourceLocation: entry.contentLocation,
|
||||
targetPath: filePath
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
specifications.directoryStructure = Array.from(directories)
|
||||
return specifications
|
||||
}
|
|
@ -0,0 +1,265 @@
|
|||
import fs from 'fs/promises'
|
||||
import * as stream from 'stream'
|
||||
import {createWriteStream} from 'fs'
|
||||
import * as path from 'path'
|
||||
import * as github from '@actions/github'
|
||||
import * as core from '@actions/core'
|
||||
import * as httpClient from '@actions/http-client'
|
||||
import unzip from 'unzip-stream'
|
||||
import {
|
||||
DownloadArtifactOptions,
|
||||
DownloadArtifactResponse
|
||||
} from '../shared/interfaces'
|
||||
import {getUserAgentString} from '../shared/user-agent'
|
||||
import {getGitHubWorkspaceDir} from '../shared/config'
|
||||
import {internalArtifactTwirpClient} from '../shared/artifact-twirp-client'
|
||||
import {
|
||||
GetSignedArtifactURLRequest,
|
||||
Int64Value,
|
||||
ListArtifactsRequest
|
||||
} from '../../generated'
|
||||
import {getBackendIdsFromToken} from '../shared/util'
|
||||
import {ArtifactNotFoundError} from '../shared/errors'
|
||||
|
||||
const scrubQueryParameters = (url: string): string => {
|
||||
const parsed = new URL(url)
|
||||
parsed.search = ''
|
||||
return parsed.toString()
|
||||
}
|
||||
|
||||
async function exists(path: string): Promise<boolean> {
|
||||
try {
|
||||
await fs.access(path)
|
||||
return true
|
||||
} catch (error) {
|
||||
if (error.code === 'ENOENT') {
|
||||
return false
|
||||
} else {
|
||||
throw error
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function streamExtract(url: string, directory: string): Promise<void> {
|
||||
let retryCount = 0
|
||||
while (retryCount < 5) {
|
||||
try {
|
||||
await streamExtractExternal(url, directory)
|
||||
return
|
||||
} catch (error) {
|
||||
if (error.message.includes('Malformed extraction path')) {
|
||||
throw new Error(
|
||||
`Artifact download failed with unretryable error: ${error.message}`
|
||||
)
|
||||
}
|
||||
retryCount++
|
||||
core.debug(
|
||||
`Failed to download artifact after ${retryCount} retries due to ${error.message}. Retrying in 5 seconds...`
|
||||
)
|
||||
// wait 5 seconds before retrying
|
||||
await new Promise(resolve => setTimeout(resolve, 5000))
|
||||
}
|
||||
}
|
||||
|
||||
throw new Error(`Artifact download failed after ${retryCount} retries.`)
|
||||
}
|
||||
|
||||
export async function streamExtractExternal(
|
||||
url: string,
|
||||
directory: string
|
||||
): Promise<void> {
|
||||
const client = new httpClient.HttpClient(getUserAgentString())
|
||||
const response = await client.get(url)
|
||||
if (response.message.statusCode !== 200) {
|
||||
throw new Error(
|
||||
`Unexpected HTTP response from blob storage: ${response.message.statusCode} ${response.message.statusMessage}`
|
||||
)
|
||||
}
|
||||
|
||||
const timeout = 30 * 1000 // 30 seconds
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
const timerFn = (): void => {
|
||||
response.message.destroy(
|
||||
new Error(`Blob storage chunk did not respond in ${timeout}ms`)
|
||||
)
|
||||
}
|
||||
const timer = setTimeout(timerFn, timeout)
|
||||
|
||||
const createdDirectories = new Set<string>()
|
||||
createdDirectories.add(directory)
|
||||
response.message
|
||||
.on('data', () => {
|
||||
timer.refresh()
|
||||
})
|
||||
.on('error', (error: Error) => {
|
||||
core.debug(
|
||||
`response.message: Artifact download failed: ${error.message}`
|
||||
)
|
||||
clearTimeout(timer)
|
||||
reject(error)
|
||||
})
|
||||
.pipe(unzip.Parse())
|
||||
.pipe(
|
||||
new stream.Transform({
|
||||
objectMode: true,
|
||||
transform: async (entry, _, callback) => {
|
||||
const fullPath = path.normalize(path.join(directory, entry.path))
|
||||
if (!directory.endsWith(path.sep)) {
|
||||
directory += path.sep
|
||||
}
|
||||
if (!fullPath.startsWith(directory)) {
|
||||
reject(new Error(`Malformed extraction path: ${fullPath}`))
|
||||
}
|
||||
|
||||
if (entry.type === 'Directory') {
|
||||
if (!createdDirectories.has(fullPath)) {
|
||||
createdDirectories.add(fullPath)
|
||||
await resolveOrCreateDirectory(fullPath).then(() => {
|
||||
entry.autodrain()
|
||||
callback()
|
||||
})
|
||||
} else {
|
||||
entry.autodrain()
|
||||
callback()
|
||||
}
|
||||
} else {
|
||||
core.info(`Extracting artifact entry: ${fullPath}`)
|
||||
if (!createdDirectories.has(path.dirname(fullPath))) {
|
||||
createdDirectories.add(path.dirname(fullPath))
|
||||
await resolveOrCreateDirectory(path.dirname(fullPath))
|
||||
}
|
||||
|
||||
const writeStream = createWriteStream(fullPath)
|
||||
writeStream.on('finish', callback)
|
||||
writeStream.on('error', reject)
|
||||
entry.pipe(writeStream)
|
||||
}
|
||||
}
|
||||
})
|
||||
)
|
||||
.on('finish', async () => {
|
||||
clearTimeout(timer)
|
||||
resolve()
|
||||
})
|
||||
.on('error', (error: Error) => {
|
||||
reject(error)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
export async function downloadArtifactPublic(
|
||||
artifactId: number,
|
||||
repositoryOwner: string,
|
||||
repositoryName: string,
|
||||
token: string,
|
||||
options?: DownloadArtifactOptions
|
||||
): Promise<DownloadArtifactResponse> {
|
||||
const downloadPath = await resolveOrCreateDirectory(options?.path)
|
||||
|
||||
const api = github.getOctokit(token)
|
||||
|
||||
core.info(
|
||||
`Downloading artifact '${artifactId}' from '${repositoryOwner}/${repositoryName}'`
|
||||
)
|
||||
|
||||
const {headers, status} = await api.rest.actions.downloadArtifact({
|
||||
owner: repositoryOwner,
|
||||
repo: repositoryName,
|
||||
artifact_id: artifactId,
|
||||
archive_format: 'zip',
|
||||
request: {
|
||||
redirect: 'manual'
|
||||
}
|
||||
})
|
||||
|
||||
if (status !== 302) {
|
||||
throw new Error(`Unable to download artifact. Unexpected status: ${status}`)
|
||||
}
|
||||
|
||||
const {location} = headers
|
||||
if (!location) {
|
||||
throw new Error(`Unable to redirect to artifact download url`)
|
||||
}
|
||||
|
||||
core.info(
|
||||
`Redirecting to blob download url: ${scrubQueryParameters(location)}`
|
||||
)
|
||||
|
||||
try {
|
||||
core.info(`Starting download of artifact to: ${downloadPath}`)
|
||||
await streamExtract(location, downloadPath)
|
||||
core.info(`Artifact download completed successfully.`)
|
||||
} catch (error) {
|
||||
throw new Error(`Unable to download and extract artifact: ${error.message}`)
|
||||
}
|
||||
|
||||
return {downloadPath}
|
||||
}
|
||||
|
||||
export async function downloadArtifactInternal(
|
||||
artifactId: number,
|
||||
options?: DownloadArtifactOptions
|
||||
): Promise<DownloadArtifactResponse> {
|
||||
const downloadPath = await resolveOrCreateDirectory(options?.path)
|
||||
|
||||
const artifactClient = internalArtifactTwirpClient()
|
||||
|
||||
const {workflowRunBackendId, workflowJobRunBackendId} =
|
||||
getBackendIdsFromToken()
|
||||
|
||||
const listReq: ListArtifactsRequest = {
|
||||
workflowRunBackendId,
|
||||
workflowJobRunBackendId,
|
||||
idFilter: Int64Value.create({value: artifactId.toString()})
|
||||
}
|
||||
|
||||
const {artifacts} = await artifactClient.ListArtifacts(listReq)
|
||||
|
||||
if (artifacts.length === 0) {
|
||||
throw new ArtifactNotFoundError(
|
||||
`No artifacts found for ID: ${artifactId}\nAre you trying to download from a different run? Try specifying a github-token with \`actions:read\` scope.`
|
||||
)
|
||||
}
|
||||
|
||||
if (artifacts.length > 1) {
|
||||
core.warning('Multiple artifacts found, defaulting to first.')
|
||||
}
|
||||
|
||||
const signedReq: GetSignedArtifactURLRequest = {
|
||||
workflowRunBackendId: artifacts[0].workflowRunBackendId,
|
||||
workflowJobRunBackendId: artifacts[0].workflowJobRunBackendId,
|
||||
name: artifacts[0].name
|
||||
}
|
||||
|
||||
const {signedUrl} = await artifactClient.GetSignedArtifactURL(signedReq)
|
||||
|
||||
core.info(
|
||||
`Redirecting to blob download url: ${scrubQueryParameters(signedUrl)}`
|
||||
)
|
||||
|
||||
try {
|
||||
core.info(`Starting download of artifact to: ${downloadPath}`)
|
||||
await streamExtract(signedUrl, downloadPath)
|
||||
core.info(`Artifact download completed successfully.`)
|
||||
} catch (error) {
|
||||
throw new Error(`Unable to download and extract artifact: ${error.message}`)
|
||||
}
|
||||
|
||||
return {downloadPath}
|
||||
}
|
||||
|
||||
async function resolveOrCreateDirectory(
|
||||
downloadPath = getGitHubWorkspaceDir()
|
||||
): Promise<string> {
|
||||
if (!(await exists(downloadPath))) {
|
||||
core.debug(
|
||||
`Artifact destination folder does not exist, creating: ${downloadPath}`
|
||||
)
|
||||
await fs.mkdir(downloadPath, {recursive: true})
|
||||
} else {
|
||||
core.debug(`Artifact destination folder already exists: ${downloadPath}`)
|
||||
}
|
||||
|
||||
return downloadPath
|
||||
}
|
|
@ -0,0 +1,121 @@
|
|||
import {getOctokit} from '@actions/github'
|
||||
import {retry} from '@octokit/plugin-retry'
|
||||
import * as core from '@actions/core'
|
||||
import {OctokitOptions} from '@octokit/core/dist-types/types'
|
||||
import {defaults as defaultGitHubOptions} from '@actions/github/lib/utils'
|
||||
import {getRetryOptions} from './retry-options'
|
||||
import {requestLog} from '@octokit/plugin-request-log'
|
||||
import {GetArtifactResponse} from '../shared/interfaces'
|
||||
import {getBackendIdsFromToken} from '../shared/util'
|
||||
import {getUserAgentString} from '../shared/user-agent'
|
||||
import {internalArtifactTwirpClient} from '../shared/artifact-twirp-client'
|
||||
import {ListArtifactsRequest, StringValue, Timestamp} from '../../generated'
|
||||
import {ArtifactNotFoundError, InvalidResponseError} from '../shared/errors'
|
||||
|
||||
export async function getArtifactPublic(
|
||||
artifactName: string,
|
||||
workflowRunId: number,
|
||||
repositoryOwner: string,
|
||||
repositoryName: string,
|
||||
token: string
|
||||
): Promise<GetArtifactResponse> {
|
||||
const [retryOpts, requestOpts] = getRetryOptions(defaultGitHubOptions)
|
||||
|
||||
const opts: OctokitOptions = {
|
||||
log: undefined,
|
||||
userAgent: getUserAgentString(),
|
||||
previews: undefined,
|
||||
retry: retryOpts,
|
||||
request: requestOpts
|
||||
}
|
||||
|
||||
const github = getOctokit(token, opts, retry, requestLog)
|
||||
|
||||
const getArtifactResp = await github.request(
|
||||
'GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts{?name}',
|
||||
{
|
||||
owner: repositoryOwner,
|
||||
repo: repositoryName,
|
||||
run_id: workflowRunId,
|
||||
name: artifactName
|
||||
}
|
||||
)
|
||||
|
||||
if (getArtifactResp.status !== 200) {
|
||||
throw new InvalidResponseError(
|
||||
`Invalid response from GitHub API: ${getArtifactResp.status} (${getArtifactResp?.headers?.['x-github-request-id']})`
|
||||
)
|
||||
}
|
||||
|
||||
if (getArtifactResp.data.artifacts.length === 0) {
|
||||
throw new ArtifactNotFoundError(
|
||||
`Artifact not found for name: ${artifactName}
|
||||
Please ensure that your artifact is not expired and the artifact was uploaded using a compatible version of toolkit/upload-artifact.
|
||||
For more information, visit the GitHub Artifacts FAQ: https://github.com/actions/toolkit/blob/main/packages/artifact/docs/faq.md`
|
||||
)
|
||||
}
|
||||
|
||||
let artifact = getArtifactResp.data.artifacts[0]
|
||||
if (getArtifactResp.data.artifacts.length > 1) {
|
||||
artifact = getArtifactResp.data.artifacts.sort((a, b) => b.id - a.id)[0]
|
||||
core.debug(
|
||||
`More than one artifact found for a single name, returning newest (id: ${artifact.id})`
|
||||
)
|
||||
}
|
||||
|
||||
return {
|
||||
artifact: {
|
||||
name: artifact.name,
|
||||
id: artifact.id,
|
||||
size: artifact.size_in_bytes,
|
||||
createdAt: artifact.created_at ? new Date(artifact.created_at) : undefined
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function getArtifactInternal(
|
||||
artifactName: string
|
||||
): Promise<GetArtifactResponse> {
|
||||
const artifactClient = internalArtifactTwirpClient()
|
||||
|
||||
const {workflowRunBackendId, workflowJobRunBackendId} =
|
||||
getBackendIdsFromToken()
|
||||
|
||||
const req: ListArtifactsRequest = {
|
||||
workflowRunBackendId,
|
||||
workflowJobRunBackendId,
|
||||
nameFilter: StringValue.create({value: artifactName})
|
||||
}
|
||||
|
||||
const res = await artifactClient.ListArtifacts(req)
|
||||
|
||||
if (res.artifacts.length === 0) {
|
||||
throw new ArtifactNotFoundError(
|
||||
`Artifact not found for name: ${artifactName}
|
||||
Please ensure that your artifact is not expired and the artifact was uploaded using a compatible version of toolkit/upload-artifact.
|
||||
For more information, visit the GitHub Artifacts FAQ: https://github.com/actions/toolkit/blob/main/packages/artifact/docs/faq.md`
|
||||
)
|
||||
}
|
||||
|
||||
let artifact = res.artifacts[0]
|
||||
if (res.artifacts.length > 1) {
|
||||
artifact = res.artifacts.sort(
|
||||
(a, b) => Number(b.databaseId) - Number(a.databaseId)
|
||||
)[0]
|
||||
|
||||
core.debug(
|
||||
`More than one artifact found for a single name, returning newest (id: ${artifact.databaseId})`
|
||||
)
|
||||
}
|
||||
|
||||
return {
|
||||
artifact: {
|
||||
name: artifact.name,
|
||||
id: Number(artifact.databaseId),
|
||||
size: Number(artifact.size),
|
||||
createdAt: artifact.createdAt
|
||||
? Timestamp.toDate(artifact.createdAt)
|
||||
: undefined
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,165 @@
|
|||
import {info, warning, debug} from '@actions/core'
|
||||
import {getOctokit} from '@actions/github'
|
||||
import {ListArtifactsResponse, Artifact} from '../shared/interfaces'
|
||||
import {getUserAgentString} from '../shared/user-agent'
|
||||
import {getRetryOptions} from './retry-options'
|
||||
import {defaults as defaultGitHubOptions} from '@actions/github/lib/utils'
|
||||
import {requestLog} from '@octokit/plugin-request-log'
|
||||
import {retry} from '@octokit/plugin-retry'
|
||||
import {OctokitOptions} from '@octokit/core/dist-types/types'
|
||||
import {internalArtifactTwirpClient} from '../shared/artifact-twirp-client'
|
||||
import {getBackendIdsFromToken} from '../shared/util'
|
||||
import {ListArtifactsRequest, Timestamp} from '../../generated'
|
||||
|
||||
// Limiting to 1000 for perf reasons
|
||||
const maximumArtifactCount = 1000
|
||||
const paginationCount = 100
|
||||
const maxNumberOfPages = maximumArtifactCount / paginationCount
|
||||
|
||||
export async function listArtifactsPublic(
|
||||
workflowRunId: number,
|
||||
repositoryOwner: string,
|
||||
repositoryName: string,
|
||||
token: string,
|
||||
latest = false
|
||||
): Promise<ListArtifactsResponse> {
|
||||
info(
|
||||
`Fetching artifact list for workflow run ${workflowRunId} in repository ${repositoryOwner}/${repositoryName}`
|
||||
)
|
||||
|
||||
let artifacts: Artifact[] = []
|
||||
const [retryOpts, requestOpts] = getRetryOptions(defaultGitHubOptions)
|
||||
|
||||
const opts: OctokitOptions = {
|
||||
log: undefined,
|
||||
userAgent: getUserAgentString(),
|
||||
previews: undefined,
|
||||
retry: retryOpts,
|
||||
request: requestOpts
|
||||
}
|
||||
|
||||
const github = getOctokit(token, opts, retry, requestLog)
|
||||
|
||||
let currentPageNumber = 1
|
||||
const {data: listArtifactResponse} =
|
||||
await github.rest.actions.listWorkflowRunArtifacts({
|
||||
owner: repositoryOwner,
|
||||
repo: repositoryName,
|
||||
run_id: workflowRunId,
|
||||
per_page: paginationCount,
|
||||
page: currentPageNumber
|
||||
})
|
||||
|
||||
let numberOfPages = Math.ceil(
|
||||
listArtifactResponse.total_count / paginationCount
|
||||
)
|
||||
const totalArtifactCount = listArtifactResponse.total_count
|
||||
if (totalArtifactCount > maximumArtifactCount) {
|
||||
warning(
|
||||
`Workflow run ${workflowRunId} has more than 1000 artifacts. Results will be incomplete as only the first ${maximumArtifactCount} artifacts will be returned`
|
||||
)
|
||||
numberOfPages = maxNumberOfPages
|
||||
}
|
||||
|
||||
// Iterate over the first page
|
||||
for (const artifact of listArtifactResponse.artifacts) {
|
||||
artifacts.push({
|
||||
name: artifact.name,
|
||||
id: artifact.id,
|
||||
size: artifact.size_in_bytes,
|
||||
createdAt: artifact.created_at ? new Date(artifact.created_at) : undefined
|
||||
})
|
||||
}
|
||||
|
||||
// Iterate over any remaining pages
|
||||
for (
|
||||
currentPageNumber;
|
||||
currentPageNumber < numberOfPages;
|
||||
currentPageNumber++
|
||||
) {
|
||||
currentPageNumber++
|
||||
debug(`Fetching page ${currentPageNumber} of artifact list`)
|
||||
|
||||
const {data: listArtifactResponse} =
|
||||
await github.rest.actions.listWorkflowRunArtifacts({
|
||||
owner: repositoryOwner,
|
||||
repo: repositoryName,
|
||||
run_id: workflowRunId,
|
||||
per_page: paginationCount,
|
||||
page: currentPageNumber
|
||||
})
|
||||
|
||||
for (const artifact of listArtifactResponse.artifacts) {
|
||||
artifacts.push({
|
||||
name: artifact.name,
|
||||
id: artifact.id,
|
||||
size: artifact.size_in_bytes,
|
||||
createdAt: artifact.created_at
|
||||
? new Date(artifact.created_at)
|
||||
: undefined
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
if (latest) {
|
||||
artifacts = filterLatest(artifacts)
|
||||
}
|
||||
|
||||
info(`Found ${artifacts.length} artifact(s)`)
|
||||
|
||||
return {
|
||||
artifacts
|
||||
}
|
||||
}
|
||||
|
||||
export async function listArtifactsInternal(
|
||||
latest = false
|
||||
): Promise<ListArtifactsResponse> {
|
||||
const artifactClient = internalArtifactTwirpClient()
|
||||
|
||||
const {workflowRunBackendId, workflowJobRunBackendId} =
|
||||
getBackendIdsFromToken()
|
||||
|
||||
const req: ListArtifactsRequest = {
|
||||
workflowRunBackendId,
|
||||
workflowJobRunBackendId
|
||||
}
|
||||
|
||||
const res = await artifactClient.ListArtifacts(req)
|
||||
let artifacts: Artifact[] = res.artifacts.map(artifact => ({
|
||||
name: artifact.name,
|
||||
id: Number(artifact.databaseId),
|
||||
size: Number(artifact.size),
|
||||
createdAt: artifact.createdAt
|
||||
? Timestamp.toDate(artifact.createdAt)
|
||||
: undefined
|
||||
}))
|
||||
|
||||
if (latest) {
|
||||
artifacts = filterLatest(artifacts)
|
||||
}
|
||||
|
||||
info(`Found ${artifacts.length} artifact(s)`)
|
||||
|
||||
return {
|
||||
artifacts
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Filters a list of artifacts to only include the latest artifact for each name
|
||||
* @param artifacts The artifacts to filter
|
||||
* @returns The filtered list of artifacts
|
||||
*/
|
||||
function filterLatest(artifacts: Artifact[]): Artifact[] {
|
||||
artifacts.sort((a, b) => b.id - a.id)
|
||||
const latestArtifacts: Artifact[] = []
|
||||
const seenArtifactNames = new Set<string>()
|
||||
for (const artifact of artifacts) {
|
||||
if (!seenArtifactNames.has(artifact.name)) {
|
||||
latestArtifacts.push(artifact)
|
||||
seenArtifactNames.add(artifact.name)
|
||||
}
|
||||
}
|
||||
return latestArtifacts
|
||||
}
|
|
@ -0,0 +1,48 @@
|
|||
import * as core from '@actions/core'
|
||||
import {OctokitOptions} from '@octokit/core/dist-types/types'
|
||||
import {RequestRequestOptions} from '@octokit/types'
|
||||
|
||||
export type RetryOptions = {
|
||||
doNotRetry?: number[]
|
||||
enabled?: boolean
|
||||
}
|
||||
|
||||
// Defaults for fetching artifacts
|
||||
const defaultMaxRetryNumber = 5
|
||||
const defaultExemptStatusCodes = [400, 401, 403, 404, 422] // https://github.com/octokit/plugin-retry.js/blob/9a2443746c350b3beedec35cf26e197ea318a261/src/index.ts#L14
|
||||
|
||||
export function getRetryOptions(
|
||||
defaultOptions: OctokitOptions,
|
||||
retries: number = defaultMaxRetryNumber,
|
||||
exemptStatusCodes: number[] = defaultExemptStatusCodes
|
||||
): [RetryOptions, RequestRequestOptions | undefined] {
|
||||
if (retries <= 0) {
|
||||
return [{enabled: false}, defaultOptions.request]
|
||||
}
|
||||
|
||||
const retryOptions: RetryOptions = {
|
||||
enabled: true
|
||||
}
|
||||
|
||||
if (exemptStatusCodes.length > 0) {
|
||||
retryOptions.doNotRetry = exemptStatusCodes
|
||||
}
|
||||
|
||||
// The GitHub type has some defaults for `options.request`
|
||||
// see: https://github.com/actions/toolkit/blob/4fbc5c941a57249b19562015edbd72add14be93d/packages/github/src/utils.ts#L15
|
||||
// We pass these in here so they are not overridden.
|
||||
const requestOptions: RequestRequestOptions = {
|
||||
...defaultOptions.request,
|
||||
retries
|
||||
}
|
||||
|
||||
core.debug(
|
||||
`GitHub client configured with: (retries: ${
|
||||
requestOptions.retries
|
||||
}, retry-exempt-status-code: ${
|
||||
retryOptions.doNotRetry ?? 'octokit default: [400, 401, 403, 404, 422]'
|
||||
})`
|
||||
)
|
||||
|
||||
return [retryOptions, requestOptions]
|
||||
}
|
|
@ -1,35 +0,0 @@
|
|||
import {HttpClient} from '@actions/http-client/index'
|
||||
import {createHttpClient} from './utils'
|
||||
|
||||
/**
|
||||
* Used for managing http clients during either upload or download
|
||||
*/
|
||||
export class HttpManager {
|
||||
private clients: HttpClient[]
|
||||
private userAgent: string
|
||||
|
||||
constructor(clientCount: number, userAgent: string) {
|
||||
if (clientCount < 1) {
|
||||
throw new Error('There must be at least one client')
|
||||
}
|
||||
this.userAgent = userAgent
|
||||
this.clients = new Array(clientCount).fill(createHttpClient(userAgent))
|
||||
}
|
||||
|
||||
getClient(index: number): HttpClient {
|
||||
return this.clients[index]
|
||||
}
|
||||
|
||||
// client disposal is necessary if a keep-alive connection is used to properly close the connection
|
||||
// for more information see: https://github.com/actions/http-client/blob/04e5ad73cd3fd1f5610a32116b0759eddf6570d2/index.ts#L292
|
||||
disposeAndReplaceClient(index: number): void {
|
||||
this.clients[index].dispose()
|
||||
this.clients[index] = createHttpClient(this.userAgent)
|
||||
}
|
||||
|
||||
disposeAndReplaceAllClients(): void {
|
||||
for (const [index] of this.clients.entries()) {
|
||||
this.disposeAndReplaceClient(index)
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,79 +0,0 @@
|
|||
import {IHttpClientResponse} from '@actions/http-client/interfaces'
|
||||
import {
|
||||
isRetryableStatusCode,
|
||||
isSuccessStatusCode,
|
||||
sleep,
|
||||
getExponentialRetryTimeInMilliseconds,
|
||||
displayHttpDiagnostics
|
||||
} from './utils'
|
||||
import * as core from '@actions/core'
|
||||
import {getRetryLimit} from './config-variables'
|
||||
|
||||
export async function retry(
|
||||
name: string,
|
||||
operation: () => Promise<IHttpClientResponse>,
|
||||
customErrorMessages: Map<number, string>,
|
||||
maxAttempts: number
|
||||
): Promise<IHttpClientResponse> {
|
||||
let response: IHttpClientResponse | undefined = undefined
|
||||
let statusCode: number | undefined = undefined
|
||||
let isRetryable = false
|
||||
let errorMessage = ''
|
||||
let customErrorInformation: string | undefined = undefined
|
||||
let attempt = 1
|
||||
|
||||
while (attempt <= maxAttempts) {
|
||||
try {
|
||||
response = await operation()
|
||||
statusCode = response.message.statusCode
|
||||
|
||||
if (isSuccessStatusCode(statusCode)) {
|
||||
return response
|
||||
}
|
||||
|
||||
// Extra error information that we want to display if a particular response code is hit
|
||||
if (statusCode) {
|
||||
customErrorInformation = customErrorMessages.get(statusCode)
|
||||
}
|
||||
|
||||
isRetryable = isRetryableStatusCode(statusCode)
|
||||
errorMessage = `Artifact service responded with ${statusCode}`
|
||||
} catch (error) {
|
||||
isRetryable = true
|
||||
errorMessage = error.message
|
||||
}
|
||||
|
||||
if (!isRetryable) {
|
||||
core.info(`${name} - Error is not retryable`)
|
||||
if (response) {
|
||||
displayHttpDiagnostics(response)
|
||||
}
|
||||
break
|
||||
}
|
||||
|
||||
core.info(
|
||||
`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`
|
||||
)
|
||||
|
||||
await sleep(getExponentialRetryTimeInMilliseconds(attempt))
|
||||
attempt++
|
||||
}
|
||||
|
||||
if (response) {
|
||||
displayHttpDiagnostics(response)
|
||||
}
|
||||
|
||||
if (customErrorInformation) {
|
||||
throw Error(`${name} failed: ${customErrorInformation}`)
|
||||
}
|
||||
throw Error(`${name} failed: ${errorMessage}`)
|
||||
}
|
||||
|
||||
export async function retryHttpClientRequest(
|
||||
name: string,
|
||||
method: () => Promise<IHttpClientResponse>,
|
||||
customErrorMessages: Map<number, string> = new Map(),
|
||||
maxAttempts = getRetryLimit()
|
||||
): Promise<IHttpClientResponse> {
|
||||
return await retry(name, method, customErrorMessages, maxAttempts)
|
||||
}
|
|
@ -0,0 +1,197 @@
|
|||
import {HttpClient, HttpClientResponse, HttpCodes} from '@actions/http-client'
|
||||
import {BearerCredentialHandler} from '@actions/http-client/lib/auth'
|
||||
import {info, debug} from '@actions/core'
|
||||
import {ArtifactServiceClientJSON} from '../../generated'
|
||||
import {getResultsServiceUrl, getRuntimeToken} from './config'
|
||||
import {getUserAgentString} from './user-agent'
|
||||
import {NetworkError, UsageError} from './errors'
|
||||
|
||||
// The twirp http client must implement this interface
|
||||
interface Rpc {
|
||||
request(
|
||||
service: string,
|
||||
method: string,
|
||||
contentType: 'application/json' | 'application/protobuf',
|
||||
data: object | Uint8Array
|
||||
): Promise<object | Uint8Array>
|
||||
}
|
||||
|
||||
class ArtifactHttpClient implements Rpc {
|
||||
private httpClient: HttpClient
|
||||
private baseUrl: string
|
||||
private maxAttempts = 5
|
||||
private baseRetryIntervalMilliseconds = 3000
|
||||
private retryMultiplier = 1.5
|
||||
|
||||
constructor(
|
||||
userAgent: string,
|
||||
maxAttempts?: number,
|
||||
baseRetryIntervalMilliseconds?: number,
|
||||
retryMultiplier?: number
|
||||
) {
|
||||
const token = getRuntimeToken()
|
||||
this.baseUrl = getResultsServiceUrl()
|
||||
if (maxAttempts) {
|
||||
this.maxAttempts = maxAttempts
|
||||
}
|
||||
if (baseRetryIntervalMilliseconds) {
|
||||
this.baseRetryIntervalMilliseconds = baseRetryIntervalMilliseconds
|
||||
}
|
||||
if (retryMultiplier) {
|
||||
this.retryMultiplier = retryMultiplier
|
||||
}
|
||||
|
||||
this.httpClient = new HttpClient(userAgent, [
|
||||
new BearerCredentialHandler(token)
|
||||
])
|
||||
}
|
||||
|
||||
// This function satisfies the Rpc interface. It is compatible with the JSON
|
||||
// JSON generated client.
|
||||
async request(
|
||||
service: string,
|
||||
method: string,
|
||||
contentType: 'application/json' | 'application/protobuf',
|
||||
data: object | Uint8Array
|
||||
): Promise<object | Uint8Array> {
|
||||
const url = new URL(`/twirp/${service}/${method}`, this.baseUrl).href
|
||||
debug(`[Request] ${method} ${url}`)
|
||||
const headers = {
|
||||
'Content-Type': contentType
|
||||
}
|
||||
try {
|
||||
const {body} = await this.retryableRequest(async () =>
|
||||
this.httpClient.post(url, JSON.stringify(data), headers)
|
||||
)
|
||||
|
||||
return body
|
||||
} catch (error) {
|
||||
throw new Error(`Failed to ${method}: ${error.message}`)
|
||||
}
|
||||
}
|
||||
|
||||
async retryableRequest(
|
||||
operation: () => Promise<HttpClientResponse>
|
||||
): Promise<{response: HttpClientResponse; body: object}> {
|
||||
let attempt = 0
|
||||
let errorMessage = ''
|
||||
let rawBody = ''
|
||||
while (attempt < this.maxAttempts) {
|
||||
let isRetryable = false
|
||||
|
||||
try {
|
||||
const response = await operation()
|
||||
const statusCode = response.message.statusCode
|
||||
rawBody = await response.readBody()
|
||||
debug(`[Response] - ${response.message.statusCode}`)
|
||||
debug(`Headers: ${JSON.stringify(response.message.headers, null, 2)}`)
|
||||
const body = JSON.parse(rawBody)
|
||||
debug(`Body: ${JSON.stringify(body, null, 2)}`)
|
||||
if (this.isSuccessStatusCode(statusCode)) {
|
||||
return {response, body}
|
||||
}
|
||||
isRetryable = this.isRetryableHttpStatusCode(statusCode)
|
||||
errorMessage = `Failed request: (${statusCode}) ${response.message.statusMessage}`
|
||||
if (body.msg) {
|
||||
if (UsageError.isUsageErrorMessage(body.msg)) {
|
||||
throw new UsageError()
|
||||
}
|
||||
|
||||
errorMessage = `${errorMessage}: ${body.msg}`
|
||||
}
|
||||
} catch (error) {
|
||||
if (error instanceof SyntaxError) {
|
||||
debug(`Raw Body: ${rawBody}`)
|
||||
throw error
|
||||
}
|
||||
|
||||
if (error instanceof UsageError) {
|
||||
throw error
|
||||
}
|
||||
|
||||
if (NetworkError.isNetworkErrorCode(error?.code)) {
|
||||
throw new NetworkError(error?.code)
|
||||
}
|
||||
|
||||
isRetryable = true
|
||||
errorMessage = error.message
|
||||
}
|
||||
|
||||
if (!isRetryable) {
|
||||
throw new Error(`Received non-retryable error: ${errorMessage}`)
|
||||
}
|
||||
|
||||
if (attempt + 1 === this.maxAttempts) {
|
||||
throw new Error(
|
||||
`Failed to make request after ${this.maxAttempts} attempts: ${errorMessage}`
|
||||
)
|
||||
}
|
||||
|
||||
const retryTimeMilliseconds =
|
||||
this.getExponentialRetryTimeMilliseconds(attempt)
|
||||
info(
|
||||
`Attempt ${attempt + 1} of ${
|
||||
this.maxAttempts
|
||||
} failed with error: ${errorMessage}. Retrying request in ${retryTimeMilliseconds} ms...`
|
||||
)
|
||||
await this.sleep(retryTimeMilliseconds)
|
||||
attempt++
|
||||
}
|
||||
|
||||
throw new Error(`Request failed`)
|
||||
}
|
||||
|
||||
isSuccessStatusCode(statusCode?: number): boolean {
|
||||
if (!statusCode) return false
|
||||
return statusCode >= 200 && statusCode < 300
|
||||
}
|
||||
|
||||
isRetryableHttpStatusCode(statusCode?: number): boolean {
|
||||
if (!statusCode) return false
|
||||
|
||||
const retryableStatusCodes = [
|
||||
HttpCodes.BadGateway,
|
||||
HttpCodes.GatewayTimeout,
|
||||
HttpCodes.InternalServerError,
|
||||
HttpCodes.ServiceUnavailable,
|
||||
HttpCodes.TooManyRequests
|
||||
]
|
||||
|
||||
return retryableStatusCodes.includes(statusCode)
|
||||
}
|
||||
|
||||
async sleep(milliseconds: number): Promise<void> {
|
||||
return new Promise(resolve => setTimeout(resolve, milliseconds))
|
||||
}
|
||||
|
||||
getExponentialRetryTimeMilliseconds(attempt: number): number {
|
||||
if (attempt < 0) {
|
||||
throw new Error('attempt should be a positive integer')
|
||||
}
|
||||
|
||||
if (attempt === 0) {
|
||||
return this.baseRetryIntervalMilliseconds
|
||||
}
|
||||
|
||||
const minTime =
|
||||
this.baseRetryIntervalMilliseconds * this.retryMultiplier ** attempt
|
||||
const maxTime = minTime * this.retryMultiplier
|
||||
|
||||
// returns a random number between minTime and maxTime (exclusive)
|
||||
return Math.trunc(Math.random() * (maxTime - minTime) + minTime)
|
||||
}
|
||||
}
|
||||
|
||||
export function internalArtifactTwirpClient(options?: {
|
||||
maxAttempts?: number
|
||||
retryIntervalMs?: number
|
||||
retryMultiplier?: number
|
||||
}): ArtifactServiceClientJSON {
|
||||
const client = new ArtifactHttpClient(
|
||||
getUserAgentString(),
|
||||
options?.maxAttempts,
|
||||
options?.retryIntervalMs,
|
||||
options?.retryMultiplier
|
||||
)
|
||||
return new ArtifactServiceClientJSON(client)
|
||||
}
|
|
@ -0,0 +1,59 @@
|
|||
import os from 'os'
|
||||
|
||||
// Used for controlling the highWaterMark value of the zip that is being streamed
|
||||
// The same value is used as the chunk size that is use during upload to blob storage
|
||||
export function getUploadChunkSize(): number {
|
||||
return 8 * 1024 * 1024 // 8 MB Chunks
|
||||
}
|
||||
|
||||
export function getRuntimeToken(): string {
|
||||
const token = process.env['ACTIONS_RUNTIME_TOKEN']
|
||||
if (!token) {
|
||||
throw new Error('Unable to get the ACTIONS_RUNTIME_TOKEN env variable')
|
||||
}
|
||||
return token
|
||||
}
|
||||
|
||||
export function getResultsServiceUrl(): string {
|
||||
const resultsUrl = process.env['ACTIONS_RESULTS_URL']
|
||||
if (!resultsUrl) {
|
||||
throw new Error('Unable to get the ACTIONS_RESULTS_URL env variable')
|
||||
}
|
||||
|
||||
return new URL(resultsUrl).origin
|
||||
}
|
||||
|
||||
export function isGhes(): boolean {
|
||||
const ghUrl = new URL(
|
||||
process.env['GITHUB_SERVER_URL'] || 'https://github.com'
|
||||
)
|
||||
|
||||
const hostname = ghUrl.hostname.trimEnd().toUpperCase()
|
||||
const isGitHubHost = hostname === 'GITHUB.COM'
|
||||
const isGheHost =
|
||||
hostname.endsWith('.GHE.COM') || hostname.endsWith('.GHE.LOCALHOST')
|
||||
|
||||
return !isGitHubHost && !isGheHost
|
||||
}
|
||||
|
||||
export function getGitHubWorkspaceDir(): string {
|
||||
const ghWorkspaceDir = process.env['GITHUB_WORKSPACE']
|
||||
if (!ghWorkspaceDir) {
|
||||
throw new Error('Unable to get the GITHUB_WORKSPACE env variable')
|
||||
}
|
||||
return ghWorkspaceDir
|
||||
}
|
||||
|
||||
// Mimics behavior of azcopy: https://learn.microsoft.com/en-us/azure/storage/common/storage-use-azcopy-optimize
|
||||
// If your machine has fewer than 5 CPUs, then the value of this variable is set to 32.
|
||||
// Otherwise, the default value is equal to 16 multiplied by the number of CPUs. The maximum value of this variable is 300.
|
||||
export function getConcurrency(): number {
|
||||
const numCPUs = os.cpus().length
|
||||
|
||||
if (numCPUs <= 4) {
|
||||
return 32
|
||||
}
|
||||
|
||||
const concurrency = 16 * numCPUs
|
||||
return concurrency > 300 ? 300 : concurrency
|
||||
}
|
|
@ -0,0 +1,72 @@
|
|||
export class FilesNotFoundError extends Error {
|
||||
files: string[]
|
||||
|
||||
constructor(files: string[] = []) {
|
||||
let message = 'No files were found to upload'
|
||||
if (files.length > 0) {
|
||||
message += `: ${files.join(', ')}`
|
||||
}
|
||||
|
||||
super(message)
|
||||
this.files = files
|
||||
this.name = 'FilesNotFoundError'
|
||||
}
|
||||
}
|
||||
|
||||
export class InvalidResponseError extends Error {
|
||||
constructor(message: string) {
|
||||
super(message)
|
||||
this.name = 'InvalidResponseError'
|
||||
}
|
||||
}
|
||||
|
||||
export class ArtifactNotFoundError extends Error {
|
||||
constructor(message = 'Artifact not found') {
|
||||
super(message)
|
||||
this.name = 'ArtifactNotFoundError'
|
||||
}
|
||||
}
|
||||
|
||||
export class GHESNotSupportedError extends Error {
|
||||
constructor(
|
||||
message = '@actions/artifact v2.0.0+, upload-artifact@v4+ and download-artifact@v4+ are not currently supported on GHES.'
|
||||
) {
|
||||
super(message)
|
||||
this.name = 'GHESNotSupportedError'
|
||||
}
|
||||
}
|
||||
|
||||
export class NetworkError extends Error {
|
||||
code: string
|
||||
|
||||
constructor(code: string) {
|
||||
const message = `Unable to make request: ${code}\nIf you are using self-hosted runners, please make sure your runner has access to all GitHub endpoints: https://docs.github.com/en/actions/hosting-your-own-runners/managing-self-hosted-runners/about-self-hosted-runners#communication-between-self-hosted-runners-and-github`
|
||||
super(message)
|
||||
this.code = code
|
||||
this.name = 'NetworkError'
|
||||
}
|
||||
|
||||
static isNetworkErrorCode = (code?: string): boolean => {
|
||||
if (!code) return false
|
||||
return [
|
||||
'ECONNRESET',
|
||||
'ENOTFOUND',
|
||||
'ETIMEDOUT',
|
||||
'ECONNREFUSED',
|
||||
'EHOSTUNREACH'
|
||||
].includes(code)
|
||||
}
|
||||
}
|
||||
|
||||
export class UsageError extends Error {
|
||||
constructor() {
|
||||
const message = `Artifact storage quota has been hit. Unable to upload any new artifacts. Usage is recalculated every 6-12 hours.\nMore info on storage limits: https://docs.github.com/en/billing/managing-billing-for-github-actions/about-billing-for-github-actions#calculating-minute-and-storage-spending`
|
||||
super(message)
|
||||
this.name = 'UsageError'
|
||||
}
|
||||
|
||||
static isUsageErrorMessage = (msg?: string): boolean => {
|
||||
if (!msg) return false
|
||||
return msg.includes('insufficient usage')
|
||||
}
|
||||
}
|
|
@ -0,0 +1,159 @@
|
|||
/**
|
||||
* Response from the server when an artifact is uploaded
|
||||
*/
|
||||
export interface UploadArtifactResponse {
|
||||
/**
|
||||
* Total size of the artifact in bytes. Not provided if no artifact was uploaded
|
||||
*/
|
||||
size?: number
|
||||
|
||||
/**
|
||||
* The id of the artifact that was created. Not provided if no artifact was uploaded
|
||||
* This ID can be used as input to other APIs to download, delete or get more information about an artifact: https://docs.github.com/en/rest/actions/artifacts
|
||||
*/
|
||||
id?: number
|
||||
}
|
||||
|
||||
/**
|
||||
* Options for uploading an artifact
|
||||
*/
|
||||
export interface UploadArtifactOptions {
|
||||
/**
|
||||
* Duration after which artifact will expire in days.
|
||||
*
|
||||
* By default artifact expires after 90 days:
|
||||
* https://docs.github.com/en/actions/configuring-and-managing-workflows/persisting-workflow-data-using-artifacts#downloading-and-deleting-artifacts-after-a-workflow-run-is-complete
|
||||
*
|
||||
* Use this option to override the default expiry.
|
||||
*
|
||||
* Min value: 1
|
||||
* Max value: 90 unless changed by repository setting
|
||||
*
|
||||
* If this is set to a greater value than the retention settings allowed, the retention on artifacts
|
||||
* will be reduced to match the max value allowed on server, and the upload process will continue. An
|
||||
* input of 0 assumes default retention setting.
|
||||
*/
|
||||
retentionDays?: number
|
||||
/**
|
||||
* The level of compression for Zlib to be applied to the artifact archive.
|
||||
* The value can range from 0 to 9:
|
||||
* - 0: No compression
|
||||
* - 1: Best speed
|
||||
* - 6: Default compression (same as GNU Gzip)
|
||||
* - 9: Best compression
|
||||
* Higher levels will result in better compression, but will take longer to complete.
|
||||
* For large files that are not easily compressed, a value of 0 is recommended for significantly faster uploads.
|
||||
*/
|
||||
compressionLevel?: number
|
||||
}
|
||||
|
||||
/**
|
||||
* Response from the server when getting an artifact
|
||||
*/
|
||||
export interface GetArtifactResponse {
|
||||
/**
|
||||
* Metadata about the artifact that was found
|
||||
*/
|
||||
artifact: Artifact
|
||||
}
|
||||
|
||||
/**
|
||||
* Options for listing artifacts
|
||||
*/
|
||||
export interface ListArtifactsOptions {
|
||||
/**
|
||||
* Filter the workflow run's artifacts to the latest by name
|
||||
* In the case of reruns, this can be useful to avoid duplicates
|
||||
*/
|
||||
latest?: boolean
|
||||
}
|
||||
|
||||
/**
|
||||
* Response from the server when listing artifacts
|
||||
*/
|
||||
export interface ListArtifactsResponse {
|
||||
/**
|
||||
* A list of artifacts that were found
|
||||
*/
|
||||
artifacts: Artifact[]
|
||||
}
|
||||
|
||||
/**
|
||||
* Response from the server when downloading an artifact
|
||||
*/
|
||||
export interface DownloadArtifactResponse {
|
||||
/**
|
||||
* The path where the artifact was downloaded to
|
||||
*/
|
||||
downloadPath?: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Options for downloading an artifact
|
||||
*/
|
||||
export interface DownloadArtifactOptions {
|
||||
/**
|
||||
* Denotes where the artifact will be downloaded to. If not specified then the artifact is download to GITHUB_WORKSPACE
|
||||
*/
|
||||
path?: string
|
||||
}
|
||||
|
||||
/**
|
||||
* An Actions Artifact
|
||||
*/
|
||||
export interface Artifact {
|
||||
/**
|
||||
* The name of the artifact
|
||||
*/
|
||||
name: string
|
||||
|
||||
/**
|
||||
* The ID of the artifact
|
||||
*/
|
||||
id: number
|
||||
|
||||
/**
|
||||
* The size of the artifact in bytes
|
||||
*/
|
||||
size: number
|
||||
|
||||
/**
|
||||
* The time when the artifact was created
|
||||
*/
|
||||
createdAt?: Date
|
||||
}
|
||||
|
||||
// FindOptions are for fetching Artifact(s) out of the scope of the current run.
|
||||
export interface FindOptions {
|
||||
/**
|
||||
* The criteria for finding Artifact(s) out of the scope of the current run.
|
||||
*/
|
||||
findBy?: {
|
||||
/**
|
||||
* Token with actions:read permissions
|
||||
*/
|
||||
token: string
|
||||
/**
|
||||
* WorkflowRun of the artifact(s) to lookup
|
||||
*/
|
||||
workflowRunId: number
|
||||
/**
|
||||
* Repository owner (eg. 'actions')
|
||||
*/
|
||||
repositoryOwner: string
|
||||
/**
|
||||
* Repository owner (eg. 'toolkit')
|
||||
*/
|
||||
repositoryName: string
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Response from the server when deleting an artifact
|
||||
*/
|
||||
export interface DeleteArtifactResponse {
|
||||
/**
|
||||
* The id of the artifact that was deleted
|
||||
*/
|
||||
id: number
|
||||
}
|
|
@ -0,0 +1,9 @@
|
|||
// eslint-disable-next-line @typescript-eslint/no-var-requires, @typescript-eslint/no-require-imports
|
||||
const packageJson = require('../../../package.json')
|
||||
|
||||
/**
|
||||
* Ensure that this User Agent String is used in all HTTP calls so that we can monitor telemetry between different versions of this package
|
||||
*/
|
||||
export function getUserAgentString(): string {
|
||||
return `@actions/artifact-${packageJson.version}`
|
||||
}
|
|
@ -0,0 +1,71 @@
|
|||
import * as core from '@actions/core'
|
||||
import {getRuntimeToken} from './config'
|
||||
import jwt_decode from 'jwt-decode'
|
||||
|
||||
export interface BackendIds {
|
||||
workflowRunBackendId: string
|
||||
workflowJobRunBackendId: string
|
||||
}
|
||||
|
||||
interface ActionsToken {
|
||||
scp: string
|
||||
}
|
||||
|
||||
const InvalidJwtError = new Error(
|
||||
'Failed to get backend IDs: The provided JWT token is invalid and/or missing claims'
|
||||
)
|
||||
|
||||
// uses the JWT token claims to get the
|
||||
// workflow run and workflow job run backend ids
|
||||
export function getBackendIdsFromToken(): BackendIds {
|
||||
const token = getRuntimeToken()
|
||||
const decoded = jwt_decode<ActionsToken>(token)
|
||||
if (!decoded.scp) {
|
||||
throw InvalidJwtError
|
||||
}
|
||||
|
||||
/*
|
||||
* example decoded:
|
||||
* {
|
||||
* scp: "Actions.ExampleScope Actions.Results:ce7f54c7-61c7-4aae-887f-30da475f5f1a:ca395085-040a-526b-2ce8-bdc85f692774"
|
||||
* }
|
||||
*/
|
||||
|
||||
const scpParts = decoded.scp.split(' ')
|
||||
if (scpParts.length === 0) {
|
||||
throw InvalidJwtError
|
||||
}
|
||||
/*
|
||||
* example scpParts:
|
||||
* ["Actions.ExampleScope", "Actions.Results:ce7f54c7-61c7-4aae-887f-30da475f5f1a:ca395085-040a-526b-2ce8-bdc85f692774"]
|
||||
*/
|
||||
|
||||
for (const scopes of scpParts) {
|
||||
const scopeParts = scopes.split(':')
|
||||
if (scopeParts?.[0] !== 'Actions.Results') {
|
||||
// not the Actions.Results scope
|
||||
continue
|
||||
}
|
||||
|
||||
/*
|
||||
* example scopeParts:
|
||||
* ["Actions.Results", "ce7f54c7-61c7-4aae-887f-30da475f5f1a", "ca395085-040a-526b-2ce8-bdc85f692774"]
|
||||
*/
|
||||
if (scopeParts.length !== 3) {
|
||||
// missing expected number of claims
|
||||
throw InvalidJwtError
|
||||
}
|
||||
|
||||
const ids = {
|
||||
workflowRunBackendId: scopeParts[1],
|
||||
workflowJobRunBackendId: scopeParts[2]
|
||||
}
|
||||
|
||||
core.debug(`Workflow Run Backend ID: ${ids.workflowRunBackendId}`)
|
||||
core.debug(`Workflow Job Run Backend ID: ${ids.workflowJobRunBackendId}`)
|
||||
|
||||
return ids
|
||||
}
|
||||
|
||||
throw InvalidJwtError
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue