mirror of https://github.com/actions/toolkit
Merge remote-tracking branch 'upstream/master' into tool-cache-xar
# Conflicts: # packages/tool-cache/__tests__/tool-cache.test.tspull/207/head
commit
ac3a3233fa
|
@ -5,7 +5,7 @@
|
|||
"parserOptions": {
|
||||
"ecmaVersion": 9,
|
||||
"sourceType": "module",
|
||||
"project": "./tsconfig.json"
|
||||
"project": "./tsconfig.eslint.json"
|
||||
},
|
||||
"rules": {
|
||||
"eslint-comments/no-use": "off",
|
||||
|
|
|
@ -1,4 +1,23 @@
|
|||
## Development
|
||||
# Contributions
|
||||
|
||||
We welcome contributions in the form of issues and pull requests. We view the contributions and process as the same for internal and external contributors.
|
||||
|
||||
## Issues
|
||||
|
||||
Log issues for both bugs and enhancement requests. Logging issues are important for the open community.
|
||||
|
||||
Issues in this repository should be for the toolkit packages. Runner specific issues can be filed [in the runner repository](https://github.com/actions/runner).
|
||||
|
||||
## Enhancements and Feature Requests
|
||||
|
||||
We ask that before significant effort is put into code changes, that we have agreement on taking the change before time is invested in code changes.
|
||||
|
||||
1. Create a feature request.
|
||||
2. When we agree to take the enhancement, create an ADR to agree on the details of the change.
|
||||
|
||||
An ADR is an Architectural Decision Record. This allows consensus on the direction forward and also serves as a record of the change and motivation. [Read more here](../docs/adrs/README.md).
|
||||
|
||||
## Development Life Cycle
|
||||
|
||||
This repository uses [Lerna](https://github.com/lerna/lerna#readme) to manage multiple packages. Read the documentation there to begin contributing.
|
||||
|
||||
|
@ -37,4 +56,4 @@ This will ask you some questions about the new package. Start with `0.0.0` as th
|
|||
}
|
||||
```
|
||||
|
||||
3. Start developing 😄 and open a pull request.
|
||||
3. Start developing 😄.
|
|
@ -11,7 +11,7 @@ Thank you 🙇♀ for wanting to create an issue in this repository. Before y
|
|||
|
||||
* If you have found a security issue [please submit it here](https://hackerone.com/github)
|
||||
* If you have questions about writing workflows or action files, then please [visit the GitHub Community Forum's Actions Board](https://github.community/t5/GitHub-Actions/bd-p/actions)
|
||||
* If you are having an issue or question about GitHub Actions then please [contact customer support](https://help.github.com/en/articles/about-github-actions#contacting-support)
|
||||
* If you are having an issue or question about GitHub Actions then please [contact customer support](https://help.github.com/en/actions/automating-your-workflow-with-github-actions/about-github-actions#contacting-support)
|
||||
|
||||
If your issue is relevant to this repository, please include the information below:
|
||||
|
||||
|
|
|
@ -0,0 +1,79 @@
|
|||
name: artifact-unit-tests
|
||||
on: push
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: Build
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
runs-on: [ubuntu-latest, windows-latest, macOS-latest]
|
||||
fail-fast: false
|
||||
|
||||
runs-on: ${{ matrix.runs-on }}
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Set Node.js 12.x
|
||||
uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: 12.x
|
||||
|
||||
# In order to upload & download artifacts from a shell script, certain env variables need to be set that are only available in the
|
||||
# node context. This runs a local action that gets and sets the necessary env variables that are needed
|
||||
- name: Set env variables
|
||||
uses: ./packages/artifact/__tests__/ci-test-action/
|
||||
|
||||
# Need root node_modules because certain npm packages like jest are configured for the entire repository and it won't be possible
|
||||
# without these to just compile the artifacts package
|
||||
- name: Install root npm packages
|
||||
run: npm ci
|
||||
|
||||
- name: Compile artifact package
|
||||
run: |
|
||||
npm ci
|
||||
npm run tsc
|
||||
working-directory: packages/artifact
|
||||
|
||||
- name: Set artifact file contents
|
||||
run: |
|
||||
echo "::set-env name=non-gzip-artifact-content::hello"
|
||||
echo "::set-env name=gzip-artifact-content::Some large amount of text that has a compression ratio that is greater than 100%. If greater than 100%, gzip is used to upload the file"
|
||||
|
||||
- name: Create files that will be uploaded
|
||||
run: |
|
||||
mkdir artifact-path
|
||||
echo ${{ env.non-gzip-artifact-content }} > artifact-path/world.txt
|
||||
echo ${{ env.gzip-artifact-content }} > artifact-path/gzip.txt
|
||||
|
||||
# We're using node -e to call the functions directly available in the @actions/artifact package
|
||||
- name: Upload artifacts using uploadArtifact()
|
||||
run: |
|
||||
node -e "Promise.resolve(require('./packages/artifact/lib/artifact-client').create().uploadArtifact('my-artifact-1',['artifact-path/world.txt'], '${{ github.workspace }}'))"
|
||||
node -e "Promise.resolve(require('./packages/artifact/lib/artifact-client').create().uploadArtifact('my-artifact-2',['artifact-path/gzip.txt'], '${{ github.workspace }}'))"
|
||||
|
||||
- name: Download artifacts using downloadArtifact()
|
||||
run: |
|
||||
mkdir artifact-1-directory
|
||||
node -e "Promise.resolve(require('./packages/artifact/lib/artifact-client').create().downloadArtifact('my-artifact-1','artifact-1-directory'))"
|
||||
mkdir artifact-2-directory
|
||||
node -e "Promise.resolve(require('./packages/artifact/lib/artifact-client').create().downloadArtifact('my-artifact-2','artifact-2-directory'))"
|
||||
|
||||
- name: Verify downloadArtifact()
|
||||
shell: bash
|
||||
run: |
|
||||
scripts/test-artifact-file.sh "artifact-1-directory/artifact-path/world.txt" "${{ env.non-gzip-artifact-content }}"
|
||||
scripts/test-artifact-file.sh "artifact-2-directory/artifact-path/gzip.txt" "${{ env.gzip-artifact-content }}"
|
||||
|
||||
- name: Download artifacts using downloadAllArtifacts()
|
||||
run: |
|
||||
mkdir multi-artifact-directory
|
||||
node -e "Promise.resolve(require('./packages/artifact/lib/artifact-client').create().downloadAllArtifacts('multi-artifact-directory'))"
|
||||
|
||||
- name: Verify downloadAllArtifacts()
|
||||
shell: bash
|
||||
run: |
|
||||
scripts/test-artifact-file.sh "multi-artifact-directory/my-artifact-1/artifact-path/world.txt" "${{ env.non-gzip-artifact-content }}"
|
||||
scripts/test-artifact-file.sh "multi-artifact-directory/my-artifact-2/artifact-path/gzip.txt" "${{ env.gzip-artifact-content }}"
|
|
@ -1,23 +1,34 @@
|
|||
name: toolkit-unit-tests
|
||||
on:
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
paths-ignore:
|
||||
- '**.md'
|
||||
pull_request:
|
||||
paths-ignore:
|
||||
- '**.md'
|
||||
- '**.md'
|
||||
|
||||
jobs:
|
||||
Ubuntu:
|
||||
name: Run Ubuntu
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
build:
|
||||
name: Build
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
runs-on: [ubuntu-latest, macOS-latest, windows-latest]
|
||||
fail-fast: false
|
||||
|
||||
runs-on: ${{ matrix.runs-on }}
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@master
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Set Node.js 10.x
|
||||
uses: actions/setup-node@master
|
||||
- name: Set Node.js 12.x
|
||||
uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: 10.x
|
||||
node-version: 12.x
|
||||
|
||||
- name: npm install
|
||||
run: npm install
|
||||
|
@ -30,58 +41,19 @@ jobs:
|
|||
|
||||
- name: npm test
|
||||
run: npm test
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ github.token }}
|
||||
|
||||
- name: Lint
|
||||
run: npm run lint
|
||||
|
||||
- name: Format
|
||||
run: npm run format-check
|
||||
macOS:
|
||||
name: Run macOS
|
||||
runs-on: macos-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@master
|
||||
|
||||
- name: Set Node.js 10.x
|
||||
uses: actions/setup-node@master
|
||||
with:
|
||||
node-version: 10.x
|
||||
- name: audit tools
|
||||
run: npm audit --audit-level=moderate
|
||||
if: matrix.runs-on == 'ubuntu-latest'
|
||||
|
||||
- name: npm install
|
||||
run: npm install
|
||||
|
||||
- name: Bootstrap
|
||||
run: npm run bootstrap
|
||||
|
||||
- name: Compile
|
||||
run: npm run build
|
||||
|
||||
- name: npm test
|
||||
run: npm test
|
||||
Windows:
|
||||
name: Run Windows
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@master
|
||||
|
||||
- name: Set Node.js 10.x
|
||||
uses: actions/setup-node@master
|
||||
with:
|
||||
node-version: 10.x
|
||||
|
||||
- name: npm install
|
||||
run: npm install
|
||||
|
||||
- name: Bootstrap
|
||||
run: npm run bootstrap
|
||||
|
||||
- name: Compile
|
||||
run: npm run build
|
||||
|
||||
# TODO: This currently ignores exec due to issues with Node and spawning on Windows, which I think is exacerbated by Jest.
|
||||
# It doesn't seem to affect behavior in actions themselves, just when testing with Jest.
|
||||
# See other similar issues here: https://github.com/nodejs/node/issues/25484
|
||||
- name: npm test
|
||||
run: npm run test-ci
|
||||
- name: audit packages
|
||||
run: npm run audit-all
|
||||
if: matrix.runs-on == 'ubuntu-latest'
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
node_modules/
|
||||
packages/*/node_modules/
|
||||
packages/*/lib/
|
||||
packages/*/__tests__/_temp/
|
||||
packages/*/__tests__/_temp/
|
||||
.DS_Store
|
||||
|
|
36
README.md
36
README.md
|
@ -36,6 +36,15 @@ $ npm install @actions/exec --save
|
|||
```
|
||||
<br/>
|
||||
|
||||
:ice_cream: [@actions/glob](packages/glob)
|
||||
|
||||
Provides functions to search for files matching glob patterns. Read more [here](packages/glob)
|
||||
|
||||
```bash
|
||||
$ npm install @actions/glob --save
|
||||
```
|
||||
<br/>
|
||||
|
||||
:pencil2: [@actions/io](packages/io)
|
||||
|
||||
Provides disk i/o functions like cp, mv, rmRF, find etc. Read more [here](packages/io)
|
||||
|
@ -63,6 +72,15 @@ $ npm install @actions/github --save
|
|||
```
|
||||
<br/>
|
||||
|
||||
:floppy_disk: [@actions/artifact](packages/artifact)
|
||||
|
||||
Provides functions to interact with actions artifacts. Read more [here](packages/artifact)
|
||||
|
||||
```bash
|
||||
$ npm install @actions/artifact --save
|
||||
```
|
||||
<br/>
|
||||
|
||||
## Creating an Action with the Toolkit
|
||||
|
||||
:question: [Choosing an action type](docs/action-types.md)
|
||||
|
@ -77,6 +95,18 @@ Actions are downloaded and run from the GitHub graph of repos. This contains gu
|
|||
<br/>
|
||||
<br/>
|
||||
|
||||
:warning: [Problem Matchers](docs/problem-matchers.md)
|
||||
|
||||
Problem Matchers are a way to scan the output of actions for a specified regex pattern and surface that information prominently in the UI.
|
||||
<br/>
|
||||
<br/>
|
||||
|
||||
:warning: [Proxy Server Support](docs/proxy-support.md)
|
||||
|
||||
Self-hosted runners can be configured to run behind proxy servers.
|
||||
<br/>
|
||||
<br/>
|
||||
|
||||
<h3><a href="https://github.com/actions/hello-world-javascript-action">Hello World JavaScript Action</a></h3>
|
||||
|
||||
Illustrates how to create a simple hello world javascript action.
|
||||
|
@ -167,13 +197,9 @@ console.log(`We can even get context data, like the repo: ${context.repo.repo}`)
|
|||
```
|
||||
<br/>
|
||||
|
||||
Recommendations on versioning, releases and tagging your action.
|
||||
<br/>
|
||||
<br/>
|
||||
|
||||
## Contributing
|
||||
|
||||
We welcome contributions. See [how to contribute](docs/contribute.md).
|
||||
We welcome contributions. See [how to contribute](.github/CONTRIBUTING.md).
|
||||
|
||||
## Code of Conduct
|
||||
|
||||
|
|
|
@ -1,9 +1,23 @@
|
|||
# Debugging
|
||||
If the build logs do not provide enough detail on why a build may be failing, some other options exist to assist with troubleshooting.
|
||||
If the job logs do not provide enough detail on why a job may be failing, some other options exist to assist with troubleshooting.
|
||||
|
||||
## Step Debug Logs
|
||||
This is the primary way for customers to debug job failures caused by failed steps.
|
||||
|
||||
Step debug logs increase the verbosity of a job's logs during and after a job's execution to assist with troubleshooting.
|
||||
|
||||
Additional log events with the prefix `::debug::` will now also appear in the job's logs, these log events are provided by the Action's author and the runner process.
|
||||
|
||||
### How to Access Step Debug Logs
|
||||
This flag can be enabled by [setting the secret](https://help.github.com/en/actions/automating-your-workflow-with-github-actions/creating-and-using-encrypted-secrets#creating-encrypted-secrets) `ACTIONS_STEP_DEBUG` to `true`.
|
||||
|
||||
All actions ran while this secret is enabled will show debug events in the [Downloaded Logs](https://help.github.com/en/actions/automating-your-workflow-with-github-actions/managing-a-workflow-run#downloading-logs) and [Web Logs](https://help.github.com/en/actions/automating-your-workflow-with-github-actions/managing-a-workflow-run#viewing-logs-to-diagnose-failures).
|
||||
|
||||
## Runner Diagnostic Logs
|
||||
Runner Diagnostic Logs provide additional log files detailing how the Runner is executing an action.
|
||||
|
||||
You need the runner diagnostic logs only if you think there is an infrastructure problem with GitHub Actions and you want the product team to check the logs.
|
||||
|
||||
Each file contains different logging information that corresponds to that process:
|
||||
* The Runner process coordinates setting up workers to execute jobs.
|
||||
* The Worker process executes the job.
|
||||
|
@ -11,16 +25,7 @@ Each file contains different logging information that corresponds to that proces
|
|||
These files contain the prefix `Runner_` or `Worker_` to indicate the log source.
|
||||
|
||||
### How to Access Runner Diagnostic Logs
|
||||
These log files are enabled by [setting the secret](https://help.github.com/en/articles/virtual-environments-for-github-actions#creating-and-using-secrets-encrypted-variables) `ACTIONS_RUNNER_DEBUG` to `true`.
|
||||
These log files are enabled by [setting the secret](https://help.github.com/en/actions/automating-your-workflow-with-github-actions/creating-and-using-encrypted-secrets#creating-encrypted-secrets) `ACTIONS_RUNNER_DEBUG` to `true`.
|
||||
|
||||
All actions ran while this secret is enabled contain additional diagnostic log files in the `runner-diagnostic-logs` folder of the [log archive](https://help.github.com/en/articles/managing-a-workflow-run#downloading-logs-and-artifacts).
|
||||
All actions ran while this secret is enabled contain additional diagnostic log files in the `runner-diagnostic-logs` folder of the [log archive](https://help.github.com/en/actions/automating-your-workflow-with-github-actions/managing-a-workflow-run#downloading-logs).
|
||||
|
||||
## Step Debug Logs
|
||||
Step debug logs increase the verbosity of a job's logs during and after a job's execution to assist with troubleshooting.
|
||||
|
||||
Additional log events with the prefix `::debug::` will now also appear in the job's logs.
|
||||
|
||||
### How to Access Step Debug Logs
|
||||
This flag can be enabled by [setting the secret](https://help.github.com/en/articles/virtual-environments-for-github-actions#creating-and-using-secrets-encrypted-variables) `ACTIONS_STEP_DEBUG` to `true`.
|
||||
|
||||
All actions ran while this secret is enabled will show debug events in the [Downloaded Logs](https://help.github.com/en/articles/managing-a-workflow-run#downloading-logs-and-artifacts) and [Web Logs](https://help.github.com/en/articles/managing-a-workflow-run#viewing-logs-to-diagnose-failures).
|
||||
|
|
|
@ -7,8 +7,8 @@ Examples:
|
|||
```yaml
|
||||
steps:
|
||||
- uses: actions/javascript-action@v1 # recommended. starter workflows use this
|
||||
- user: actions/javascript-action@v1.0.0 # if an action offers specific releases
|
||||
- uses: actions/javascript-action@41775a4 # binding to a specific sha
|
||||
- uses: actions/javascript-action@v1.0.0 # if an action offers specific releases
|
||||
- uses: actions/javascript-action@41775a4da8ffae865553a738ab8ac1cd5a3c0044 # sha
|
||||
```
|
||||
|
||||
# Compatibility
|
||||
|
@ -17,7 +17,7 @@ Binding to a major version is the latest of that major version ( e.g. `v1` == "1
|
|||
|
||||
Major versions should guarantee compatibility. A major version can add net new capabilities but should not break existing input compatibility or break existing workflows.
|
||||
|
||||
Major version binding allows you to take advantage of bug fixes and critical functionality and security fixes. The `master` branch has the latest code and is unstable to bind to since a breaking new major version may first get implemented in master.
|
||||
Major version binding allows you to take advantage of bug fixes and critical functionality and security fixes. The `master` branch has the latest code and is unstable to bind to since changes get committed to master and released to the market place by creating a tag. In addition, a new major version carrying breaking changes will get implemented in master after branching off the previous major version.
|
||||
|
||||
> Warning: do not reference `master` since that is the latest code and can be carrying breaking changes of the next major version.
|
||||
|
||||
|
@ -26,36 +26,35 @@ steps:
|
|||
- uses: actions/javascript-action@master # do not do this
|
||||
```
|
||||
|
||||
Binding to the immutable sha1 may offer more reliability. However, note that the hosted images toolsets (e.g. ubuntu-latest) move forward and if there is a tool breaking issue, actions may react with a patch to a major version to compensate so binding to a specific SHA may prevent you from getting fixes.
|
||||
Binding to the immutable full sha1 may offer more reliability. However, note that the hosted images toolsets (e.g. ubuntu-latest) move forward and if there is a tool breaking issue, actions may react with a patch to a major version to compensate so binding to a specific SHA may prevent you from getting fixes.
|
||||
|
||||
> Recommendation: bind to major versions to get functionality and fixes but reserve binding to a specific release or SHA as a mitigation strategy for unforeseen breaks.
|
||||
|
||||
# Recommendations
|
||||
|
||||
1. **Create a release branch for each major version**: For example, `releases/v1`. This will allow for releases of that major version while the development of a different major version proceeds.
|
||||
1. **Create a GitHub release for each specific version**: Creating a release like [ v1.0.0 ](https://github.com/actions/javascript-action/releases/tag/v1.0.0) allows users to bind back to a specific version if an issue is encountered with the latest major version.
|
||||
|
||||
2. **Validate changes referencing the release branch**:
|
||||
2. **Publish the specific version to the marketplace**: When you release a specific version, choose the option to "Publish this Action to the GitHub Marketplace".
|
||||
|
||||
```yaml
|
||||
steps:
|
||||
- uses: actions/sample-action@releases/v1
|
||||
```
|
||||
<img src="https://user-images.githubusercontent.com/33549821/78670739-36f5ae00-78ac-11ea-9660-57d5687ce520.png" alt="screenshot" height="250"/>
|
||||
|
||||
3. **Create a GitHub release for each specific version**: Creating a release like [ v1.0.0 ](https://github.com/actions/javascript-action/releases/tag/v1.0.0) allows users to bind back to a specific version if an issue is encountered with the latest major version.
|
||||
|
||||
4. **Release that version by updating the major version tag**: Move the major version tag (v1, v2, etc.) to point to the ref of the current release. This will act as the stable release for that major version. You should keep this tag updated to the most recent stable minor/patch release.
|
||||
3. **Make the new release available to those binding to the major version tag**: Move the major version tag (v1, v2, etc.) to point to the ref of the current release. This will act as the stable release for that major version. You should keep this tag updated to the most recent stable minor/patch release.
|
||||
|
||||
```
|
||||
git checkout releases/v1
|
||||
git tag -fa v1 -m "Update v1 tag"
|
||||
git push origin v1 --force
|
||||
```
|
||||
# Major Versions
|
||||
|
||||
This will result in a major version tag and the latest specific version pointing to the same SHA. See [javascript-action tags](https://github.com/actions/javascript-action/tags) as an example.
|
||||
All releases for a major version should hold compat including input compatibility and behavior compatibility.
|
||||
|
||||
5. **Compatibility Breaks**: introduce a new major version branch (releases/v2) and tag (v2) if changes will break existing workflows. For example, changing inputs.
|
||||
Introduce a major version for compatibility breaks and major rewrites of the action.
|
||||
|
||||
See [Git-Basics-Tagging](https://git-scm.com/book/en/v2/Git-Basics-Tagging)
|
||||
Ideally, a major version would carry other benefits to the user to entice them to upgrade their workflows. Since updating their workflows will need to be done with an understanding of the changes and what compatibility was broken, introducing a new major version shouldn't be taken lightly.
|
||||
|
||||
To get feedback and to set expectations, the new major version can be initially released with `v2-beta` tag to indicate you can try it out but it's still going under some churn. Upon release the `-beta` can be dropped and there's an expectation of compatibility from that point forward.
|
||||
|
||||
[An example of v2-beta with checkout](https://github.com/actions/checkout/tree/c170eefc2657d93cc91397be50a299bff978a052#checkout-v2-beta)
|
||||
|
||||
# Sample Workflow
|
||||
|
||||
|
|
|
@ -0,0 +1,216 @@
|
|||
# ADR 381: `glob` module
|
||||
|
||||
**Date**: 2019-12-05
|
||||
|
||||
**Status**: Accepted
|
||||
|
||||
## Context
|
||||
|
||||
This ADR proposes adding a `glob` function to the toolkit.
|
||||
|
||||
First party actions should have a consistent glob experience.
|
||||
|
||||
Related to artifact upload/download v2.
|
||||
|
||||
## Decision
|
||||
|
||||
### New module
|
||||
|
||||
Create a new module `@actions/glob` that can be versioned at it's own pace - not tied to `@actions/io`.
|
||||
|
||||
### Signature
|
||||
|
||||
```js
|
||||
/**
|
||||
* Constructs a globber from patterns
|
||||
*
|
||||
* @param patterns Patterns separated by newlines
|
||||
* @param options Glob options
|
||||
*/
|
||||
export function create(
|
||||
patterns: string,
|
||||
options?: GlobOptions
|
||||
): Promise<Globber> {}
|
||||
|
||||
/**
|
||||
* Used to match files and directories
|
||||
*/
|
||||
export interface Globber {
|
||||
/**
|
||||
* Returns the search path preceding the first glob segment, from each pattern.
|
||||
* Duplicates and descendants of other paths are filtered out.
|
||||
*
|
||||
* Example 1: The patterns `/foo/*` and `/bar/*` returns `/foo` and `/bar`.
|
||||
*
|
||||
* Example 2: The patterns `/foo/*` and `/foo/bar/*` returns `/foo`.
|
||||
*/
|
||||
getSearchPaths(): string[]
|
||||
|
||||
/**
|
||||
* Returns files and directories matching the glob patterns.
|
||||
*
|
||||
* Order of the results is not guaranteed.
|
||||
*/
|
||||
glob(): Promise<string[]>
|
||||
|
||||
/**
|
||||
* Returns files and directories matching the glob patterns.
|
||||
*
|
||||
* Order of the results is not guaranteed.
|
||||
*/
|
||||
globGenerator(): AsyncGenerator<string, void>
|
||||
}
|
||||
|
||||
/**
|
||||
* Options to control globbing behavior
|
||||
*/
|
||||
export interface GlobOptions {
|
||||
/**
|
||||
* Indicates whether to follow symbolic links. Generally should set to false
|
||||
* when deleting files.
|
||||
*
|
||||
* @default true
|
||||
*/
|
||||
followSymbolicLinks?: boolean
|
||||
|
||||
/**
|
||||
* Indicates whether directories that match a glob pattern, should implicitly
|
||||
* cause all descendant paths to be matched.
|
||||
*
|
||||
* For example, given the directory `my-dir`, the following glob patterns
|
||||
* would produce the same results: `my-dir/**`, `my-dir/`, `my-dir`
|
||||
*
|
||||
* @default true
|
||||
*/
|
||||
implicitDescendants?: boolean
|
||||
|
||||
/**
|
||||
* Indicates whether broken symbolic should be ignored and omitted from the
|
||||
* result set. Otherwise an error will be thrown.
|
||||
*
|
||||
* @default true
|
||||
*/
|
||||
omitBrokenSymbolicLinks?: boolean
|
||||
}
|
||||
```
|
||||
|
||||
### Toolkit usage
|
||||
|
||||
Example, do not follow symbolic links:
|
||||
|
||||
```js
|
||||
const patterns = core.getInput('path')
|
||||
const globber = glob.create(patterns, {followSymbolicLinks: false})
|
||||
const files = globber.glob()
|
||||
```
|
||||
|
||||
Example, iterator:
|
||||
|
||||
```js
|
||||
const patterns = core.getInput('path')
|
||||
const globber = glob.create(patterns)
|
||||
for await (const file of this.globGenerator()) {
|
||||
console.log(file)
|
||||
}
|
||||
```
|
||||
|
||||
### Action usage
|
||||
|
||||
Actions should follow symbolic links by default.
|
||||
|
||||
Users can opt-out.
|
||||
|
||||
Example:
|
||||
|
||||
```yaml
|
||||
jobs:
|
||||
build:
|
||||
steps:
|
||||
- uses: actions/upload-artifact@v1
|
||||
with:
|
||||
path: |
|
||||
**/*.tar.gz
|
||||
**/*.pkg
|
||||
follow-symbolic-links: false # opt out, should default to true
|
||||
```
|
||||
|
||||
### HashFiles function
|
||||
|
||||
Hash files should not follow symbolic links by default.
|
||||
|
||||
User can opt-in by specifying flag `--follow-symbolic-links`.
|
||||
|
||||
Example:
|
||||
|
||||
```yaml
|
||||
jobs:
|
||||
build:
|
||||
steps:
|
||||
- uses: actions/cache@v1
|
||||
with:
|
||||
hash: ${{ hashFiles('--follow-symbolic-links', '**/package-lock.json') }}
|
||||
```
|
||||
|
||||
### Glob behavior
|
||||
|
||||
Patterns `*`, `?`, `[...]`, `**` (globstar) are supported.
|
||||
|
||||
With the following behaviors:
|
||||
|
||||
- File names that begin with `.` may be included in the results
|
||||
- Case insensitive on Windows
|
||||
- Directory separator `/` and `\` both supported on Windows
|
||||
|
||||
Note:
|
||||
- Refer [here](https://www.gnu.org/software/bash/manual/html_node/Pattern-Matching.html#Pattern-Matching) for more information about Bash glob patterns.
|
||||
- Refer [here](https://www.gnu.org/software/bash/manual/html_node/The-Shopt-Builtin.html) for more information about Bash glob options.
|
||||
|
||||
### Tilde expansion
|
||||
|
||||
Support basic tilde expansion, for current user HOME replacement only.
|
||||
|
||||
For example, on macOS:
|
||||
- `~` may expand to `/Users/johndoe`
|
||||
- `~/foo` may expand to `/Users/johndoe/foo`
|
||||
|
||||
Note:
|
||||
- Refer [here](https://www.gnu.org/software/bash/manual/html_node/Tilde-Expansion.html) for more information about Bash tilde expansion.
|
||||
- All other forms of tilde expansion are not supported.
|
||||
- Use `os.homedir()` to resolve the HOME path
|
||||
|
||||
### Root and normalize paths
|
||||
|
||||
An unrooted pattern will be rooted using the current working directory, prior to searching. Additionally the search path will be normalized prior to searching (relative pathing removed, slashes normalized on Windows, extra slashes removed).
|
||||
|
||||
The two side effects are:
|
||||
1. Rooted and normalized paths are always returned
|
||||
2. The pattern `**` will include the working directory in the results
|
||||
|
||||
These side effects diverge from Bash behavior. Whereas Bash is designed to be a shell, we are designing an API. This decision is intended to improve predictability of the API results.
|
||||
|
||||
Note:
|
||||
- In Bash, the results are not rooted when the pattern is relative.
|
||||
- In Bash, the results are not normalized. For example, the results from `./*` may look like: `./foo ./bar`
|
||||
- In Bash, the results from the pattern `**` does not include the working directory. However the results from `/foo/**` would include the directory `/foo`. Also the results from `foo/**` would include the directory `foo`.
|
||||
|
||||
## Comments
|
||||
|
||||
Patterns that begin with `#` are treated as comments.
|
||||
|
||||
## Exclude patterns
|
||||
|
||||
Leading `!` changes the meaning of an include pattern to exclude.
|
||||
|
||||
Note:
|
||||
- Multiple leading `!` flips the meaning.
|
||||
|
||||
## Escaping
|
||||
|
||||
Wrapping special characters in `[]` can be used to escape literal glob characters in a file name. For example the literal file name `hello[a-z]` can be escaped as `hello[[]a-z]`.
|
||||
|
||||
On Linux/macOS `\` is also treated as an escape character.
|
||||
|
||||
## Consequences
|
||||
|
||||
- Publish new module `@actions/glob`
|
||||
- Publish docs for the module (add link from `./README.md` to new doc `./packages/glob/README.md`)
|
|
@ -0,0 +1,19 @@
|
|||
# ADRs
|
||||
|
||||
ADR, short for "Architecture Decision Record" is a way of capturing important architectural decisions, along with their context and consequences.
|
||||
|
||||
This folder includes ADRs for the actions toolkit. ADRs are proposed in the form of a pull request, and they commonly follow this format:
|
||||
|
||||
* **Title**: short present tense imperative phrase, less than 50 characters, like a git commit message.
|
||||
|
||||
* **Status**: proposed, accepted, rejected, deprecated, superseded, etc.
|
||||
|
||||
* **Context**: what is the issue that we're seeing that is motivating this decision or change.
|
||||
|
||||
* **Decision**: what is the change that we're actually proposing or doing.
|
||||
|
||||
* **Consequences**: what becomes easier or more difficult to do because of this change.
|
||||
|
||||
---
|
||||
|
||||
- More information about ADRs can be found [here](https://github.com/joelparkerhenderson/architecture_decision_record).
|
|
@ -1 +1 @@
|
|||
<mxfile modified="2019-09-04T11:27:58.307Z" host="www.draw.io" agent="Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/76.0.3809.100 Safari/537.36" etag="MFzGVuAX-y50cwbudjLg" version="11.2.5" type="device" pages="1"><diagram name="Page-1" id="ff44883e-f642-bcb2-894b-16b3d25a3f0b">7Vtbc5s4GP01nj4lA+L+WCdp96E70910uu1TB4OMmWDkEdhx9tfvJ5AMSPIta4Mbx5mJ0R10zncVHll38/VnGi5mf5IYZyNkxOuRdT9CyHds+M8qXuoKxzPqioSmcV1lNhWP6b+YV4puyzTGRadjSUhWpotuZUTyHEdlpy6klDx3u01J1l11ESZYqXiMwkzU3jpN/T9pXM54vekGTcMfOE1mfHEfuXXDJIyeEkqWOV9xhKxp9amb56GYiz9qMQtj8tyqsh5G1h0lpKyv5us7nLHNFRsnxpUv4m5H1nhWzjMomHBZNX/aMtg8ZDA8HMV52V5u23xTYxJFQWAZNnanTngTKPPjGHaWF3OSw9c4WtIVjvmKrcWLMqQlJwMwwRrjPP7I0IRylIVFkUZ15ac0E2Og1B5RlJQ84TuSEVotbxnVZ9MisGSPPSX5ZjVUTVXSlx+beaHwEwoGUIEX7xm9jU3pRZTWafmjdd0aBaVmECuIMeo+C2TIkkZYv7mIS0JIE1zqu/BZ2Ka3puXYfcZkjuHOoQPFWVimq64QhFxqkk2/zdCvJIUbRQYXcFsIKpdvT8i3mKJ+DD6q4Q9ctG6jqapYdRjDTPfUFNvHpp2c0WG/E/kNy1rE+tkm3RaWvYox1n7GoH4YY0mM8WXG1Pd4FsbUC63CbMnvPlLVIEwBxoVR53mWlvhxEVZ7+gzmrcugSrVX1KoYIrS3qVM+bvVRiGR3iWSxicIsTXKm5wBeTFkHYGVrqqnD/nbxYIVpidc7MeStyO9isbHOz42lE11mbRtnbEe9g9cx4Kjm7NLEeY8qf63ZeJVAuxcj0E4gkcjqTaCRKtBoUIE2BxZo27scgbZUcKyrBseRwPHRcODYKjjOVYPjyVpsSFNov5vCI0yhvd8Uuv2YQs8azBS6qkBrWHRFAu3awwl08ovef//7r+Rhtfj+7UtBvkR3N5roY4TcDFYdT+AiqS5omEdwbx9FCyy0aYTvcM5wyicF+wKu4rDABfRZqWEMYLZgl1GYZWRZ7od8gWkKD8pgEYO+NlXjBSnSMiU5quTY9E9kEiWt69kqSCbqEyWNSznD0VOas0elJGa5TLwAJYrzKGWbzyGkAqZvuGAa4QE9KJjAppSyrm4LE1fsbaHgVYrgsC1OAaaPvGGexnG2TbC70nwK2XKdrifja2DTCRc6F2waZ1OTHroi9ee7wzmbWoTefZpdPs0uUu9ybKx+HBsnMG890/GQ7zng1XhBl1u2tnVPClhdBHUp67pn85602+0oOmSartmgolgyTW9MKZkzPEDDp3nydtW7Z3TVu2vZqno3+lTvl5nq3yiMgZP9r1Uedj/Kw5fzHgeeESkTubJNO9/RgXZLPUVBzDEsCFUgzGybJuCPv1214DuSftZEVHafWsFX8FAiqojisMQ1h3GFDtzMuJKLZGucZZi3EOKorr0y+85Jeg/KwO6eRv17clBmqurf7dN3DPYDfVToPA8LtoG/Z8yMpJjZDoaOmcXRXTv68lSLfUXRV3BxySfNQfcFeFBHhF+v94QEPc/u5xiSfZQN36F+juwwuWa/fo6pSYL51y3Pgd/1PIfOppiao9TfS577T6cIWrdDol2h03tcdCgZLzO3d2uDlDSEvDFujQMpeWSQziZp+X190bQvuxbIb+zJp5oH2zXJK+o7wWeqGb6rCuBNQ/ZQPEsxY71G8KZ6bF3nXCfLN5xf9QPZUVTzq/3CoGa2ytYxJgjGFFN24Mmy3oZ08Gzw0BsZH0DNwLAPcLnMea/47cIY2N1jkMBAA8N41oQYOiAhNicrvG+u3vNilv73LMfj7Xfx1r6s0GtezFQTY1tSW5chdJWrxm9KCunYxp/Exsnv8QWBApIOo7MJpZi4hdHuN3euByp/i1+5L5F5Pqx072jJkU3zUzS+7XFYzHSBzcFbtteBb22Io9kPUfc//XwZD09OOx3q55umfIJwsgQWFJtfRNbdm9+dWg//AQ==</diagram></mxfile>
|
||||
<mxfile modified="2019-12-12T18:56:00.899Z" host="www.draw.io" agent="Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/78.0.3904.108 Safari/537.36" etag="CISsL8yLQ-3TSrXBbF_M" version="12.3.8" type="device" pages="1"><diagram name="Page-1" id="ff44883e-f642-bcb2-894b-16b3d25a3f0b">7VvbctsqFP0aP9YjkISkx9hJcx7amZ7mTNs8dYiEZVpZeBC+9esPyMi64UsdW3Ymdh4Mm5tgrbU3AqdnDyfLR46n488sIkkPWtGyZ9/3IASWi+SXsqzWFhf5a0PMaaQrlYYn+ocULbV1RiOS1SoKxhJBp3VjyNKUhKJmw5yzRb3aiCX1Uac4Ji3DU4iTwtp3S/t3GomxtgMUlAX/EBqP9eA+1FN+weHvmLNZqkfsQXuUf9bFE1z0paeajXHEFhWT/dCzh5wxsU5NlkOSqNUtFq5oJ1bF0/bswVhMEpkBMpkXf9zSGBzSWE6Ok1RUh9vW38h6CcMgsC2HoJGLPwSt/kkkV1ZnU5bKr0E443MS6RErg2cCc6HJIJlgD0ga3Sk0ZT5McJbRcG38SJOijcxVW2SCs99kyBLG8+FtK/9sSgos1bRHLN2MBvOuBF/92PQrM88yY0kq6Oy94re1ya2K3JKKH5V0pZXMlY1UpmjTXucCGTbjITEvLtRKwDwmwlxF96IWvdKtxu6RsAmRTy4rcJJgQed1EWCtmnhTb9P0C6PyQaGlFe4UQtX69jyr3sV6GrpVyR+ZqDxGacpZdRjDADo1xfaxaSdnTNjvRH7Dsgqxnquk28Kyoxhjd0MH5NfpgNwGHdaUPQsd1gPNcTIrHFqLHkki44ZixWJMBXma4ny5FjJ01cmRe+2cNTn4hWMGJr+C8k+LI06dI7bqCCc0TpULk8gRripIwlW6GrnqbxfEc8IFWe5EUJfCBhJuIcxFGcSKKuNq+LK2Y15D62+gaUeqa1PqHi99bEQ4Sqtov3eH3cjZDRoksjuTM2zJud/vX1TR4MKKdrzrUbR9Xc720tB4Th0aH14OGucGTQ2apgu7ZBx0bnHwL+Kgsz8Oom7ioGdfLA6im5yrckbO5eQc/+T3377+Gz/Mp9/++5SxT+Hwg/G1AyVy1MGLTMR5guM0lM92V5TIgTaF8htPFE7pS6a+JFMJzkgm68zbZzQSs6lKhjhJ2Ezsh3xKOJUTVbAUjb6UpsGUZVRQlsJcxcA/kc9FjUMBpw0SgF2idNuv1AACln25DYsRoFtk3BUZd3F6V3js6NTHQ6DvAdeDvufK2OgFdW45xtI9B4Tto6XGRg6hs8Vg43L7+x19yAkWZL2eyon3lKoGOUbxVvdvgT7o25u+eCUw1Hvf2UnnsULCeBJXFDjNWAHasQJ1GtGtW7DYhdDlt1xtrr+xYHG80y/IeW6X7lsNbwsbaB7qtv3GqRUC3bptELTUPMGZEk2TQ1IOokmVqkI1r6pK06aWGpW4qHShd7pgQqMo2eYt6i4ip5d+qIbyFVgn2f017uyQZdieewZFg2A7jV6laNh2ubvfg94PVm5jo24f+CrV1OvpsDrkoq28rtfLHuFsbPLGBy/ZXodXWRDXsB6F7ZV+ETSu1bymOzvULwLg7enpdI4xcFf8bjX8NcOfvybsz+Pz/XffcMFysv2s1bfe3H7WLvezLXUaCLn93rVx9uEEBsGeaz9rBNp09nEKoOvnVvAkqLddydtA3XP9K0PddA10HtTBu0U9gAdovWBGJ6i753PqoA/frVNH1rU59fa10ASnqxyoZYmt3D1bhl9NXOPu+fUgOY5bA8kFBpDsM22VjSB5LZCGcs6YpgoXMVbSk5tRkqlt4QT/kgudL0Em+XulmJ0iVDbPcQ3HSYHbIUqG874RVSKi6QamjPA5DWkay7S+zoMWG6mJczKnbJa9JwiDxqtLYFKa6UjwCAxltvwJ/Prdp/xPA/vhfw==</diagram></mxfile>
|
Binary file not shown.
Before Width: | Height: | Size: 62 KiB After Width: | Height: | Size: 52 KiB |
|
@ -107,7 +107,7 @@ Problems matchers can be used to scan a build's output to automatically surface
|
|||
|
||||
```bash
|
||||
echo "::add-matcher::eslint-compact-problem-matcher.json"
|
||||
echo "::remove-matcher::eslint-compact"
|
||||
echo "::remove-matcher owner=eslint-compact::"
|
||||
```
|
||||
|
||||
`add-matcher` takes a path to a Problem Matcher file
|
||||
|
@ -122,7 +122,7 @@ echo "::save-state name=FOO::foovalue"
|
|||
|
||||
### Log Level
|
||||
|
||||
Finally, there are several commands to emit different levels of log output:
|
||||
There are several commands to emit different levels of log output:
|
||||
|
||||
| log level | example usage |
|
||||
|---|---|
|
||||
|
@ -130,6 +130,29 @@ Finally, there are several commands to emit different levels of log output:
|
|||
| warning | `echo "::warning::My warning message"` |
|
||||
| error | `echo "::error::My error message"` |
|
||||
|
||||
### Command Echoing
|
||||
By default, the echoing of commands to stdout only occurs if [Step Debugging is enabled](./action-debugging.md#How-to-Access-Step-Debug-Logs)
|
||||
|
||||
You can enable or disable this for the current step by using the `echo` command.
|
||||
|
||||
```bash
|
||||
echo "::echo::on"
|
||||
```
|
||||
|
||||
You can also disable echoing.
|
||||
|
||||
```bash
|
||||
echo "::echo::off"
|
||||
```
|
||||
|
||||
This is wrapped by the core method:
|
||||
|
||||
```javascript
|
||||
function setCommandEcho(enabled: boolean): void {}
|
||||
```
|
||||
|
||||
The `add-mask`, `debug`, `warning` and `error` commands do not support echoing.
|
||||
|
||||
### Command Prompt
|
||||
CMD processes the `"` character differently from other shells when echoing. In CMD, the above snippets should have the `"` characters removed in order to correctly process. For example, the set output command would be:
|
||||
```cmd
|
||||
|
|
|
@ -8,7 +8,7 @@ Note that a complete version of this action can be found at https://github.com/d
|
|||
|
||||
## Prerequisites
|
||||
|
||||
This walkthrough assumes that you have gone through the basic [javascript action walkthrough](./javascript-action.md) and have a basic action set up. If not, we recommend you go through that first.
|
||||
This walkthrough assumes that you have gone through the basic [javascript action walkthrough](https://github.com/actions/javascript-action) and have a basic action set up. If not, we recommend you go through that first.
|
||||
|
||||
## Installing dependencies
|
||||
|
||||
|
@ -159,7 +159,7 @@ run();
|
|||
|
||||
## Writing unit tests for your action
|
||||
|
||||
Next, we're going to write a basic unit test for our action using jest. If you followed the [javascript walkthrough](./javascript-action.md), you should have a file `__tests__/main.test.ts` that runs tests when `npm test` is called. We're going to start by populating that with one test:
|
||||
Next, we're going to write a basic unit test for our action using jest. If you followed the [javascript walkthrough](https://github.com/actions/javascript-action), you should have a file `__tests__/main.test.ts` that runs tests when `npm test` is called. We're going to start by populating that with one test:
|
||||
|
||||
```ts
|
||||
const nock = require('nock');
|
||||
|
|
|
@ -1,13 +1,17 @@
|
|||
# Problem Matchers
|
||||
|
||||
Problem Matchers are a way to scan the output of actions for a specified regex pattern and surface that information prominently in the UI. Both [GitHub Annotations](https://developer.github.com/v3/checks/runs/#annotations-object-1) and log file decorations are created when a match is detected.
|
||||
|
||||
## Single Line Matchers
|
||||
|
||||
Let's consider the ESLint compact output:
|
||||
|
||||
```
|
||||
badFile.js: line 50, col 11, Error - 'myVar' is defined but never used. (no-unused-vars)
|
||||
```
|
||||
|
||||
We can define a problem matcher in json that detects input in that format:
|
||||
|
||||
```json
|
||||
{
|
||||
"problemMatcher": [
|
||||
|
@ -33,31 +37,34 @@ The following fields are available for problem matchers:
|
|||
|
||||
```
|
||||
{
|
||||
owner: An ID field that can be used to remove or replace the problem matcher. **required**
|
||||
owner: an ID field that can be used to remove or replace the problem matcher. **required**
|
||||
severity: indicates the default severity, either 'warning' or 'error' case-insensitive. Defaults to 'error'
|
||||
pattern: [
|
||||
{
|
||||
regexp: The regex pattern that provides the groups to match against **required**
|
||||
regexp: the regex pattern that provides the groups to match against **required**
|
||||
file: a group number containing the file name
|
||||
fromPath: a group number containing a filepath used to root the file (e.g. a project file)
|
||||
line: a group number containing the line number
|
||||
column: a group number containing the column information
|
||||
severity: a group number containing either 'warning' or 'error' case-insensitive. Defaults to `error`
|
||||
code: a group number containing the error code
|
||||
message: a group number containing the error message. **required** at least one pattern must set the message
|
||||
loop: loops until a match is not found, only valid on the last pattern of a multipattern matcher
|
||||
loop: whether to loop until a match is not found, only valid on the last pattern of a multipattern matcher
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
## Multiline Matching
|
||||
Consider the following output:
|
||||
|
||||
```
|
||||
test.js
|
||||
1:0 error Missing "use strict" statement strict
|
||||
5:10 error 'addOne' is defined but never used no-unused-vars
|
||||
✖ 2 problems (2 errors, 0 warnings)
|
||||
```
|
||||
|
||||
The file name is printed once, yet multiple error lines are printed. The `loop` keyword provides a way to discover multiple errors in outputs.
|
||||
|
||||
The eslint-stylish problem matcher defined below catches that output, and creates two annotations from it.
|
||||
|
@ -76,7 +83,7 @@ The eslint-stylish problem matcher defined below catches that output, and create
|
|||
{
|
||||
// Matches the 2nd and 3rd line in the output
|
||||
"regexp": "^\\s+(\\d+):(\\d+)\\s+(error|warning|info)\\s+(.*)\\s\\s+(.*)$",
|
||||
// File is carried through from above, so we definte the rest of the groups
|
||||
// File is carried through from above, so we define the rest of the groups
|
||||
"line": 1,
|
||||
"column": 2,
|
||||
"severity": 3,
|
||||
|
@ -94,14 +101,29 @@ The first pattern matches the `test.js` line and records the file information. T
|
|||
The second pattern loops through the remaining lines with `loop: true` until it fails to find a match, and surfaces these lines prominently in the UI.
|
||||
|
||||
## Adding and Removing Problem Matchers
|
||||
|
||||
Problem Matchers are enabled and removed via the toolkit [commands](commands.md#problem-matchers).
|
||||
|
||||
## Duplicate Problem Matchers
|
||||
|
||||
Registering two problem-matchers with the same owner will result in only the problem matcher registered last running.
|
||||
|
||||
## Examples
|
||||
|
||||
Some of the starter actions are already using problem matchers, for example:
|
||||
- [setup-node](https://github.com/actions/setup-node/tree/master/.github)
|
||||
- [setup-python](https://github.com/actions/setup-python/tree/master/.github)
|
||||
- [setup-go](https://github.com/actions/setup-go/tree/master/.github)
|
||||
- [setup-dotnet](https://github.com/actions/setup-dotnet/tree/master/.github)
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Regular expression not matching
|
||||
|
||||
Use ECMAScript regular expression syntax when testing patterns.
|
||||
|
||||
### File property getting dropped
|
||||
|
||||
[Enable debug logging](https://help.github.com/en/actions/configuring-and-managing-workflows/managing-a-workflow-run#enabling-debug-logging) to determine why the file is getting dropped.
|
||||
|
||||
This usually happens when the file does not exist or is not under the workflow repo.
|
||||
|
|
|
@ -0,0 +1,10 @@
|
|||
# Proxy Server Support
|
||||
|
||||
Self-hosted runners [can be configured](https://help.github.com/en/actions/hosting-your-own-runners/using-a-proxy-server-with-self-hosted-runners) to run behind a proxy server in enterprises.
|
||||
|
||||
For actions to **just work** behind a proxy server:
|
||||
|
||||
1. Use [tool-cache] version >= 1.3.1
|
||||
2. Optionally use [actions/http-client](https://github.com/actions/http-client)
|
||||
|
||||
If you are using other http clients, refer to the [environment variables set by the runner](https://help.github.com/en/actions/hosting-your-own-runners/using-a-proxy-server-with-self-hosted-runners).
|
File diff suppressed because it is too large
Load Diff
17
package.json
17
package.json
|
@ -2,6 +2,7 @@
|
|||
"name": "root",
|
||||
"private": true,
|
||||
"scripts": {
|
||||
"audit-all": "lerna run audit-moderate",
|
||||
"bootstrap": "lerna bootstrap",
|
||||
"build": "lerna run tsc",
|
||||
"check-all": "concurrently \"npm:format-check\" \"npm:lint\" \"npm:test\" \"npm:build -- -- --noEmit\"",
|
||||
|
@ -9,23 +10,23 @@
|
|||
"format-check": "prettier --check packages/**/*.ts",
|
||||
"lint": "eslint packages/**/*.ts",
|
||||
"new-package": "scripts/create-package",
|
||||
"test": "jest",
|
||||
"test-ci": "jest --testPathIgnorePatterns=\"<rootDir>/packages/exec/__tests__/exec.test.ts\""
|
||||
"test": "jest --testTimeout 10000"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/jest": "^24.0.11",
|
||||
"@types/node": "^11.13.5",
|
||||
"@types/signale": "^1.2.1",
|
||||
"@typescript-eslint/parser": "^1.9.0",
|
||||
"@typescript-eslint/parser": "^2.2.7",
|
||||
"concurrently": "^4.1.0",
|
||||
"eslint": "^5.16.0",
|
||||
"eslint-plugin-github": "^2.0.0",
|
||||
"eslint-plugin-jest": "^22.5.1",
|
||||
"jest": "^24.7.1",
|
||||
"flow-bin": "^0.115.0",
|
||||
"jest": "^25.1.0",
|
||||
"jest-circus": "^24.7.1",
|
||||
"lerna": "^3.13.3",
|
||||
"prettier": "^1.17.0",
|
||||
"ts-jest": "^24.0.2",
|
||||
"typescript": "^3.6.2"
|
||||
"lerna": "^3.18.4",
|
||||
"prettier": "^1.19.1",
|
||||
"ts-jest": "^25.4.0",
|
||||
"typescript": "^3.7.4"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,30 @@
|
|||
# Contributions
|
||||
|
||||
This package is used internally by the v2+ versions of [upload-artifact](https://github.com/actions/upload-artifact) and [download-artifact](https://github.com/actions/download-artifact). This package can also be used by other actions to interact with artifacts. Any changes or updates to this package will propagate updates to these actions so it is important that major changes or updates get properly tested.
|
||||
|
||||
Any issues or feature requests that are related to the artifact actions should be filled in the appropriate repo.
|
||||
|
||||
A limited range of unit tests run as part of each PR when making changes to the artifact packages. For small contributions and fixes, they should be sufficient.
|
||||
|
||||
If making large changes, there are a few scenarios that should be tested.
|
||||
|
||||
- Uploading very large artifacts (large artifacts get compressed using gzip so compression/decompression must be tested)
|
||||
- Uploading artifacts with lots of small files (each file is uploaded with its own HTTP call, timeouts and non-success HTTP responses can be expected so they must be properly handled)
|
||||
- Uploading artifacts using a self-hosted runner (uploads and downloads behave differently due to extra latency)
|
||||
- Downloading a single artifact (large and small, if lots of small files are part of an artifact, timeouts and non-success HTTP responses can be expected)
|
||||
- Downloading all artifacts at once
|
||||
|
||||
Large architectural changes can impact upload/download performance so it is important to separately run extra tests. We request that any large contributions/changes have extra detailed testing so we can verify performance and possible regressions.
|
||||
|
||||
It is not possible to run end-to-end tests for artifacts as part of a PR in this repo because certain env variables such as `ACTIONS_RUNTIME_URL` are only available from the context of an action as opposed to a shell script. These env variables are needed in order to make the necessary API calls.
|
||||
|
||||
# Testing
|
||||
|
||||
Any easy way to test changes is to fork the artifact actions and to use `npm link` to test your changes.
|
||||
|
||||
1. Fork the [upload-artifact](https://github.com/actions/upload-artifact) and [download-artifact](https://github.com/actions/download-artifact) repos
|
||||
2. Clone the forks locally
|
||||
3. With your local changes to the toolkit repo, type `npm link` after ensuring there are no errors when running `tsc`
|
||||
4. In the locally cloned fork, type `npm link @actions/artifact`
|
||||
4. Create a new release for your local fork using `tsc` and `npm run release` (this will create a new `dist/index.js` file using `@zeit/ncc`)
|
||||
5. Commit and push your local changes, you will then be able to test your changes with your forked action
|
|
@ -0,0 +1,208 @@
|
|||
# `@actions/artifact`
|
||||
|
||||
## Usage
|
||||
|
||||
You can use this package to interact with the actions artifacts.
|
||||
- [Upload an Artifact](#Upload-an-Artifact)
|
||||
- [Download a Single Artifact](#Download-a-Single-Artifact)
|
||||
- [Download All Artifacts](#Download-all-Artifacts)
|
||||
- [Additional Documentation](#Additional-Documentation)
|
||||
- [Contributions](#Contributions)
|
||||
|
||||
Relative paths and absolute paths are both allowed. Relative paths are rooted against the current working directory.
|
||||
|
||||
## Upload an Artifact
|
||||
|
||||
Method Name: `uploadArtifact`
|
||||
|
||||
#### Inputs
|
||||
- `name`
|
||||
- The name of the artifact that is being uploaded
|
||||
- Required
|
||||
- `files`
|
||||
- A list of file paths that describe what should be uploaded as part of the artifact
|
||||
- If a path is provided that does not exist, an error will be thrown
|
||||
- Can be absolute or relative. Internally everything is normalized and resolved
|
||||
- Required
|
||||
- `rootDirectory`
|
||||
- A file path that denotes the root directory of the files being uploaded. This path is used to strip the paths provided in `files` to control how they are uploaded and structured
|
||||
- If a file specified in `files` is not in the `rootDirectory`, an error will be thrown
|
||||
- Required
|
||||
- `options`
|
||||
- Extra options that allow for the customization of the upload behavior
|
||||
- Optional
|
||||
|
||||
#### Available Options
|
||||
|
||||
- `continueOnError`
|
||||
- Indicates if the artifact upload should continue in the event a file fails to upload. If there is a error during upload, a partial artifact will always be created and available for download at the end. The `size` reported will be the amount of storage that the user or org will be charged for the partial artifact.
|
||||
- If set to `false`, and an error is encountered, all other uploads will stop and any files that were queued will not be attempted to be uploaded. The partial artifact available will only include files up until the failure.
|
||||
- If set to `true` and an error is encountered, the failed file will be skipped and ignored and all other queued files will be attempted to be uploaded. There will be an artifact available for download at the end with everything excluding the file that failed to upload
|
||||
- Optional, defaults to `true` if not specified
|
||||
|
||||
#### Example using Absolute File Paths
|
||||
|
||||
```js
|
||||
const artifact = require('@actions/artifact');
|
||||
const artifactClient = artifact.create()
|
||||
const artifactName = 'my-artifact';
|
||||
const files = [
|
||||
'/home/user/files/plz-upload/file1.txt',
|
||||
'/home/user/files/plz-upload/file2.txt',
|
||||
'/home/user/files/plz-upload/dir/file3.txt'
|
||||
]
|
||||
const rootDirectory = '/home/user/files/plz-upload'
|
||||
const options = {
|
||||
continueOnError: true
|
||||
}
|
||||
|
||||
const uploadResult = await artifactClient.uploadArtifact(artifactName, files, rootDirectory, options)
|
||||
```
|
||||
|
||||
#### Example using Relative File Paths
|
||||
```js
|
||||
// Assuming the current working directory is /home/user/files/plz-upload
|
||||
const artifact = require('@actions/artifact');
|
||||
const artifactClient = artifact.create()
|
||||
const artifactName = 'my-artifact';
|
||||
const files = [
|
||||
'file1.txt',
|
||||
'file2.txt',
|
||||
'dir/file3.txt'
|
||||
]
|
||||
|
||||
const rootDirectory = '.' // Also possible to use __dirname
|
||||
const options = {
|
||||
continueOnError: false
|
||||
}
|
||||
|
||||
const uploadResponse = await artifactClient.uploadArtifact(artifactName, files, rootDirectory, options)
|
||||
```
|
||||
|
||||
#### Upload Result
|
||||
|
||||
The returned `UploadResponse` will contain the following information
|
||||
|
||||
- `artifactName`
|
||||
- The name of the artifact that was uploaded
|
||||
- `artifactItems`
|
||||
- A list of all files that describe what is uploaded if there are no errors encountered. Usually this will be equal to the inputted `files` with the exception of empty directories (will not be uploaded)
|
||||
- `size`
|
||||
- Total size of the artifact that was uploaded in bytes
|
||||
- `failedItems`
|
||||
- A list of items that were not uploaded successfully (this will include queued items that were not uploaded if `continueOnError` is set to false). This is a subset of `artifactItems`
|
||||
|
||||
## Download a Single Artifact
|
||||
|
||||
Method Name: `downloadArtifact`
|
||||
|
||||
#### Inputs
|
||||
- `name`
|
||||
- The name of the artifact to download
|
||||
- Required
|
||||
- `path`
|
||||
- Path that denotes where the artifact will be downloaded to
|
||||
- Optional. Defaults to the GitHub workspace directory(`$GITHUB_WORKSPACE`) if not specified
|
||||
- `options`
|
||||
- Extra options that allow for the customization of the download behavior
|
||||
- Optional
|
||||
|
||||
|
||||
#### Available Options
|
||||
|
||||
- `createArtifactFolder`
|
||||
- Specifies if a folder (the artifact name) is created for the artifact that is downloaded (contents downloaded into this folder),
|
||||
- Optional. Defaults to false if not specified
|
||||
|
||||
#### Example
|
||||
|
||||
```js
|
||||
const artifact = require('@actions/artifact');
|
||||
const artifactClient = artifact.create()
|
||||
const artifactName = 'my-artifact';
|
||||
const path = 'some/directory'
|
||||
const options = {
|
||||
createArtifactFolder: false
|
||||
}
|
||||
|
||||
const downloadResponse = await artifactClient.downloadArtifact(artifactName, path, options)
|
||||
|
||||
// Post download, the directory structure will look like this
|
||||
/some
|
||||
/directory
|
||||
/file1.txt
|
||||
/file2.txt
|
||||
/dir
|
||||
/file3.txt
|
||||
|
||||
// If createArtifactFolder is set to true, the directory structure will look like this
|
||||
/some
|
||||
/directory
|
||||
/my-artifact
|
||||
/file1.txt
|
||||
/file2.txt
|
||||
/dir
|
||||
/file3.txt
|
||||
```
|
||||
|
||||
#### Download Response
|
||||
|
||||
The returned `DownloadResponse` will contain the following information
|
||||
|
||||
- `artifactName`
|
||||
- The name of the artifact that was downloaded
|
||||
- `downloadPath`
|
||||
- The full Path to where the artifact was downloaded
|
||||
|
||||
|
||||
## Download All Artifacts
|
||||
|
||||
Method Name: `downloadAllArtifacts`
|
||||
|
||||
#### Inputs
|
||||
- `path`
|
||||
- Path that denotes where the artifact will be downloaded to
|
||||
- Optional. Defaults to the GitHub workspace directory(`$GITHUB_WORKSPACE`) if not specified
|
||||
|
||||
```js
|
||||
const artifact = require('@actions/artifact');
|
||||
const artifactClient = artifact.create();
|
||||
const downloadResponse = await artifactClient.downloadAllArtifacts();
|
||||
|
||||
// output result
|
||||
for (response in downloadResponse) {
|
||||
console.log(response.artifactName);
|
||||
console.log(response.downloadPath);
|
||||
}
|
||||
```
|
||||
|
||||
Because there are multiple artifacts, an extra directory (denoted by the name of the artifact) will be created for each artifact in the path. With 2 artifacts(`my-artifact-1` and `my-artifact-2` for example) and the default path, the directory structure will be as follows:
|
||||
```js
|
||||
/GITHUB_WORKSPACE
|
||||
/my-artifact-1
|
||||
/ .. contents of `my-artifact-1`
|
||||
/my-artifact-2
|
||||
/ .. contents of `my-artifact-2`
|
||||
```
|
||||
|
||||
#### Download Result
|
||||
|
||||
An array will be returned that describes the results for downloading all artifacts. The number of items in the array indicates the number of artifacts that were downloaded.
|
||||
|
||||
Each artifact will have the same `DownloadResponse` as if it was individually downloaded
|
||||
- `artifactName`
|
||||
- The name of the artifact that was downloaded
|
||||
- `downloadPath`
|
||||
- The full Path to where the artifact was downloaded
|
||||
|
||||
## Additional Documentation
|
||||
|
||||
Check out [additional-information](docs/additional-information.md) for extra documentation around usage, restrictions and behavior.
|
||||
|
||||
Check out [implementation-details](docs/implementation-details.md) for extra information about the implementation of this package.
|
||||
|
||||
## Contributions
|
||||
|
||||
See [contributor guidelines](https://github.com/actions/toolkit/blob/master/.github/CONTRIBUTING.md) for general guidelines and information about toolkit contributions.
|
||||
|
||||
For contributions related to this package, see [artifact contributions](CONTRIBUTIONS.md) for more information.
|
|
@ -0,0 +1,26 @@
|
|||
# @actions/artifact Releases
|
||||
|
||||
### 0.1.0
|
||||
|
||||
- Initial release
|
||||
|
||||
### 0.2.0
|
||||
|
||||
- Fixes to TCP connections not closing
|
||||
- GZip file compression to speed up downloads
|
||||
- Improved logging and output
|
||||
- Extra documentation
|
||||
|
||||
### 0.3.0
|
||||
|
||||
- Fixes to gzip decompression when downloading artifacts
|
||||
- Support handling 429 response codes
|
||||
- Improved download experience when dealing with empty files
|
||||
- Exponential backoff when retryable status codes are encountered
|
||||
- Clearer error message if storage quota has been reached
|
||||
- Improved logging and output during artifact download
|
||||
|
||||
### 0.3.1
|
||||
|
||||
- Fix to ensure temporary gzip files get correctly deleted during artifact upload
|
||||
- Remove spaces as a forbidden character during upload
|
|
@ -0,0 +1,5 @@
|
|||
name: 'Set env variables'
|
||||
description: 'Sets certain env variables so that e2e artifact upload and download can be tested in a shell'
|
||||
runs:
|
||||
using: 'node12'
|
||||
main: 'index.js'
|
|
@ -0,0 +1,5 @@
|
|||
// Certain env variables are not set by default in a shell context and are only available in a node context from a running action
|
||||
// In order to be able to upload and download artifacts e2e in a shell when running CI tests, we need these env variables set
|
||||
console.log(`::set-env name=ACTIONS_RUNTIME_URL::${process.env.ACTIONS_RUNTIME_URL}`)
|
||||
console.log(`::set-env name=ACTIONS_RUNTIME_TOKEN::${process.env.ACTIONS_RUNTIME_TOKEN}`)
|
||||
console.log(`::set-env name=GITHUB_RUN_ID::${process.env.GITHUB_RUN_ID}`)
|
|
@ -0,0 +1,552 @@
|
|||
import * as path from 'path'
|
||||
import * as core from '@actions/core'
|
||||
import {URL} from 'url'
|
||||
import {getDownloadSpecification} from '../src/internal/download-specification'
|
||||
import {ContainerEntry} from '../src/internal/contracts'
|
||||
|
||||
const artifact1Name = 'my-artifact'
|
||||
const artifact2Name = 'my-artifact-extra'
|
||||
|
||||
// Populating with only the information that is necessary
|
||||
function getPartialContainerEntry(): ContainerEntry {
|
||||
return {
|
||||
containerId: 10,
|
||||
scopeIdentifier: '00000000-0000-0000-0000-000000000000',
|
||||
path: 'ADD_INFORMATION',
|
||||
itemType: 'ADD_INFORMATION',
|
||||
status: 'created',
|
||||
dateCreated: '2020-02-06T22:13:35.373Z',
|
||||
dateLastModified: '2020-02-06T22:13:35.453Z',
|
||||
createdBy: '82f0bf89-6e55-4e5a-b8b6-f75eb992578c',
|
||||
lastModifiedBy: '82f0bf89-6e55-4e5a-b8b6-f75eb992578c',
|
||||
itemLocation: 'ADD_INFORMATION',
|
||||
contentLocation: 'ADD_INFORMATION',
|
||||
contentId: '',
|
||||
fileLength: 100
|
||||
}
|
||||
}
|
||||
|
||||
function createFileEntry(entryPath: string): ContainerEntry {
|
||||
const newFileEntry = getPartialContainerEntry()
|
||||
newFileEntry.path = entryPath
|
||||
newFileEntry.itemType = 'file'
|
||||
newFileEntry.itemLocation = createItemLocation(entryPath)
|
||||
newFileEntry.contentLocation = createContentLocation(entryPath)
|
||||
return newFileEntry
|
||||
}
|
||||
|
||||
function createDirectoryEntry(directoryPath: string): ContainerEntry {
|
||||
const newDirectoryEntry = getPartialContainerEntry()
|
||||
newDirectoryEntry.path = directoryPath
|
||||
newDirectoryEntry.itemType = 'folder'
|
||||
newDirectoryEntry.itemLocation = createItemLocation(directoryPath)
|
||||
newDirectoryEntry.contentLocation = createContentLocation(directoryPath)
|
||||
return newDirectoryEntry
|
||||
}
|
||||
|
||||
function createItemLocation(relativePath: string): string {
|
||||
const itemLocation = new URL(
|
||||
'https://testing/_apis/resources/Containers/10000'
|
||||
)
|
||||
itemLocation.searchParams.append('itemPath', relativePath)
|
||||
itemLocation.searchParams.append('metadata', 'true')
|
||||
return itemLocation.toString()
|
||||
}
|
||||
|
||||
function createContentLocation(relativePath: string): string {
|
||||
const itemLocation = new URL(
|
||||
'https://testing/_apis/resources/Containers/10000'
|
||||
)
|
||||
itemLocation.searchParams.append('itemPath', relativePath)
|
||||
return itemLocation.toString()
|
||||
}
|
||||
|
||||
/*
|
||||
Represents a set of container entries for two artifacts with the following directory structure
|
||||
|
||||
/my-artifact
|
||||
/file1.txt
|
||||
/file2.txt
|
||||
/dir1
|
||||
/file3.txt
|
||||
/dir2
|
||||
/dir3
|
||||
/dir4
|
||||
file4.txt
|
||||
file5.txt (no length property)
|
||||
file6.txt (empty file)
|
||||
/my-artifact-extra
|
||||
/file1.txt
|
||||
*/
|
||||
|
||||
// main artifact
|
||||
const file1Path = path.join(artifact1Name, 'file1.txt')
|
||||
const file2Path = path.join(artifact1Name, 'file2.txt')
|
||||
const dir1Path = path.join(artifact1Name, 'dir1')
|
||||
const file3Path = path.join(dir1Path, 'file3.txt')
|
||||
const dir2Path = path.join(dir1Path, 'dir2')
|
||||
const dir3Path = path.join(dir2Path, 'dir3')
|
||||
const dir4Path = path.join(dir3Path, 'dir4')
|
||||
const file4Path = path.join(dir4Path, 'file4.txt')
|
||||
const file5Path = path.join(dir4Path, 'file5.txt')
|
||||
const file6Path = path.join(dir4Path, 'file6.txt')
|
||||
|
||||
const rootDirectoryEntry = createDirectoryEntry(artifact1Name)
|
||||
const directoryEntry1 = createDirectoryEntry(dir1Path)
|
||||
const directoryEntry2 = createDirectoryEntry(dir2Path)
|
||||
const directoryEntry3 = createDirectoryEntry(dir3Path)
|
||||
const directoryEntry4 = createDirectoryEntry(dir4Path)
|
||||
const fileEntry1 = createFileEntry(file1Path)
|
||||
const fileEntry2 = createFileEntry(file2Path)
|
||||
const fileEntry3 = createFileEntry(file3Path)
|
||||
const fileEntry4 = createFileEntry(file4Path)
|
||||
|
||||
const missingLengthFileEntry = createFileEntry(file5Path)
|
||||
missingLengthFileEntry.fileLength = undefined // one file does not have a fileLength
|
||||
const emptyLengthFileEntry = createFileEntry(file6Path)
|
||||
emptyLengthFileEntry.fileLength = 0 // empty file path
|
||||
|
||||
// extra artifact
|
||||
const artifact2File1Path = path.join(artifact2Name, 'file1.txt')
|
||||
const rootDirectoryEntry2 = createDirectoryEntry(artifact2Name)
|
||||
const extraFileEntry = createFileEntry(artifact2File1Path)
|
||||
|
||||
const artifactContainerEntries: ContainerEntry[] = [
|
||||
rootDirectoryEntry,
|
||||
fileEntry1,
|
||||
fileEntry2,
|
||||
directoryEntry1,
|
||||
fileEntry3,
|
||||
directoryEntry2,
|
||||
directoryEntry3,
|
||||
directoryEntry4,
|
||||
fileEntry4,
|
||||
missingLengthFileEntry,
|
||||
emptyLengthFileEntry,
|
||||
rootDirectoryEntry2,
|
||||
extraFileEntry
|
||||
]
|
||||
|
||||
describe('Search', () => {
|
||||
beforeAll(async () => {
|
||||
// mock all output so that there is less noise when running tests
|
||||
jest.spyOn(console, 'log').mockImplementation(() => {})
|
||||
jest.spyOn(core, 'debug').mockImplementation(() => {})
|
||||
jest.spyOn(core, 'info').mockImplementation(() => {})
|
||||
jest.spyOn(core, 'warning').mockImplementation(() => {})
|
||||
})
|
||||
|
||||
it('Download Specification - Absolute Path with no root directory', () => {
|
||||
const testDownloadPath = path.join(
|
||||
__dirname,
|
||||
'some',
|
||||
'destination',
|
||||
'folder'
|
||||
)
|
||||
|
||||
const specification = getDownloadSpecification(
|
||||
artifact1Name,
|
||||
artifactContainerEntries,
|
||||
testDownloadPath,
|
||||
false
|
||||
)
|
||||
|
||||
expect(specification.rootDownloadLocation).toEqual(testDownloadPath)
|
||||
expect(specification.filesToDownload.length).toEqual(5)
|
||||
|
||||
const item1ExpectedTargetPath = path.join(testDownloadPath, 'file1.txt')
|
||||
const item2ExpectedTargetPath = path.join(testDownloadPath, 'file2.txt')
|
||||
const item3ExpectedTargetPath = path.join(
|
||||
testDownloadPath,
|
||||
'dir1',
|
||||
'file3.txt'
|
||||
)
|
||||
const item4ExpectedTargetPath = path.join(
|
||||
testDownloadPath,
|
||||
'dir1',
|
||||
'dir2',
|
||||
'dir3',
|
||||
'dir4',
|
||||
'file4.txt'
|
||||
)
|
||||
const item5ExpectedTargetPath = path.join(
|
||||
testDownloadPath,
|
||||
'dir1',
|
||||
'dir2',
|
||||
'dir3',
|
||||
'dir4',
|
||||
'file5.txt'
|
||||
)
|
||||
const item6ExpectedTargetPath = path.join(
|
||||
testDownloadPath,
|
||||
'dir1',
|
||||
'dir2',
|
||||
'dir3',
|
||||
'dir4',
|
||||
'file6.txt'
|
||||
)
|
||||
|
||||
const targetLocations = specification.filesToDownload.map(
|
||||
item => item.targetPath
|
||||
)
|
||||
expect(targetLocations).toContain(item1ExpectedTargetPath)
|
||||
expect(targetLocations).toContain(item2ExpectedTargetPath)
|
||||
expect(targetLocations).toContain(item3ExpectedTargetPath)
|
||||
expect(targetLocations).toContain(item4ExpectedTargetPath)
|
||||
expect(targetLocations).toContain(item5ExpectedTargetPath)
|
||||
|
||||
for (const downloadItem of specification.filesToDownload) {
|
||||
if (downloadItem.targetPath === item1ExpectedTargetPath) {
|
||||
expect(downloadItem.sourceLocation).toEqual(
|
||||
createContentLocation(file1Path)
|
||||
)
|
||||
} else if (downloadItem.targetPath === item2ExpectedTargetPath) {
|
||||
expect(downloadItem.sourceLocation).toEqual(
|
||||
createContentLocation(file2Path)
|
||||
)
|
||||
} else if (downloadItem.targetPath === item3ExpectedTargetPath) {
|
||||
expect(downloadItem.sourceLocation).toEqual(
|
||||
createContentLocation(file3Path)
|
||||
)
|
||||
} else if (downloadItem.targetPath === item4ExpectedTargetPath) {
|
||||
expect(downloadItem.sourceLocation).toEqual(
|
||||
createContentLocation(file4Path)
|
||||
)
|
||||
} else if (downloadItem.targetPath === item5ExpectedTargetPath) {
|
||||
expect(downloadItem.sourceLocation).toEqual(
|
||||
createContentLocation(file5Path)
|
||||
)
|
||||
} else {
|
||||
throw new Error('this should never be reached')
|
||||
}
|
||||
}
|
||||
|
||||
expect(specification.directoryStructure.length).toEqual(3)
|
||||
expect(specification.directoryStructure).toContain(testDownloadPath)
|
||||
expect(specification.directoryStructure).toContain(
|
||||
path.join(testDownloadPath, 'dir1')
|
||||
)
|
||||
expect(specification.directoryStructure).toContain(
|
||||
path.join(testDownloadPath, 'dir1', 'dir2', 'dir3', 'dir4')
|
||||
)
|
||||
|
||||
expect(specification.emptyFilesToCreate.length).toEqual(1)
|
||||
expect(specification.emptyFilesToCreate).toContain(item6ExpectedTargetPath)
|
||||
})
|
||||
|
||||
it('Download Specification - Relative Path with no root directory', () => {
|
||||
const testDownloadPath = path.join('some', 'destination', 'folder')
|
||||
|
||||
const specification = getDownloadSpecification(
|
||||
artifact1Name,
|
||||
artifactContainerEntries,
|
||||
testDownloadPath,
|
||||
false
|
||||
)
|
||||
|
||||
expect(specification.rootDownloadLocation).toEqual(testDownloadPath)
|
||||
expect(specification.filesToDownload.length).toEqual(5)
|
||||
|
||||
const item1ExpectedTargetPath = path.join(testDownloadPath, 'file1.txt')
|
||||
const item2ExpectedTargetPath = path.join(testDownloadPath, 'file2.txt')
|
||||
const item3ExpectedTargetPath = path.join(
|
||||
testDownloadPath,
|
||||
'dir1',
|
||||
'file3.txt'
|
||||
)
|
||||
const item4ExpectedTargetPath = path.join(
|
||||
testDownloadPath,
|
||||
'dir1',
|
||||
'dir2',
|
||||
'dir3',
|
||||
'dir4',
|
||||
'file4.txt'
|
||||
)
|
||||
const item5ExpectedTargetPath = path.join(
|
||||
testDownloadPath,
|
||||
'dir1',
|
||||
'dir2',
|
||||
'dir3',
|
||||
'dir4',
|
||||
'file5.txt'
|
||||
)
|
||||
const item6ExpectedTargetPath = path.join(
|
||||
testDownloadPath,
|
||||
'dir1',
|
||||
'dir2',
|
||||
'dir3',
|
||||
'dir4',
|
||||
'file6.txt'
|
||||
)
|
||||
|
||||
const targetLocations = specification.filesToDownload.map(
|
||||
item => item.targetPath
|
||||
)
|
||||
expect(targetLocations).toContain(item1ExpectedTargetPath)
|
||||
expect(targetLocations).toContain(item2ExpectedTargetPath)
|
||||
expect(targetLocations).toContain(item3ExpectedTargetPath)
|
||||
expect(targetLocations).toContain(item4ExpectedTargetPath)
|
||||
expect(targetLocations).toContain(item5ExpectedTargetPath)
|
||||
|
||||
for (const downloadItem of specification.filesToDownload) {
|
||||
if (downloadItem.targetPath === item1ExpectedTargetPath) {
|
||||
expect(downloadItem.sourceLocation).toEqual(
|
||||
createContentLocation(file1Path)
|
||||
)
|
||||
} else if (downloadItem.targetPath === item2ExpectedTargetPath) {
|
||||
expect(downloadItem.sourceLocation).toEqual(
|
||||
createContentLocation(file2Path)
|
||||
)
|
||||
} else if (downloadItem.targetPath === item3ExpectedTargetPath) {
|
||||
expect(downloadItem.sourceLocation).toEqual(
|
||||
createContentLocation(file3Path)
|
||||
)
|
||||
} else if (downloadItem.targetPath === item4ExpectedTargetPath) {
|
||||
expect(downloadItem.sourceLocation).toEqual(
|
||||
createContentLocation(file4Path)
|
||||
)
|
||||
} else if (downloadItem.targetPath === item5ExpectedTargetPath) {
|
||||
expect(downloadItem.sourceLocation).toEqual(
|
||||
createContentLocation(file5Path)
|
||||
)
|
||||
} else {
|
||||
throw new Error('this should never be reached')
|
||||
}
|
||||
}
|
||||
|
||||
expect(specification.directoryStructure.length).toEqual(3)
|
||||
expect(specification.directoryStructure).toContain(testDownloadPath)
|
||||
expect(specification.directoryStructure).toContain(
|
||||
path.join(testDownloadPath, 'dir1')
|
||||
)
|
||||
expect(specification.directoryStructure).toContain(
|
||||
path.join(testDownloadPath, 'dir1', 'dir2', 'dir3', 'dir4')
|
||||
)
|
||||
|
||||
expect(specification.emptyFilesToCreate.length).toEqual(1)
|
||||
expect(specification.emptyFilesToCreate).toContain(item6ExpectedTargetPath)
|
||||
})
|
||||
|
||||
it('Download Specification - Absolute Path with root directory', () => {
|
||||
const testDownloadPath = path.join(
|
||||
__dirname,
|
||||
'some',
|
||||
'destination',
|
||||
'folder'
|
||||
)
|
||||
|
||||
const specification = getDownloadSpecification(
|
||||
artifact1Name,
|
||||
artifactContainerEntries,
|
||||
testDownloadPath,
|
||||
true
|
||||
)
|
||||
|
||||
expect(specification.rootDownloadLocation).toEqual(
|
||||
path.join(testDownloadPath, artifact1Name)
|
||||
)
|
||||
expect(specification.filesToDownload.length).toEqual(5)
|
||||
|
||||
const item1ExpectedTargetPath = path.join(
|
||||
testDownloadPath,
|
||||
artifact1Name,
|
||||
'file1.txt'
|
||||
)
|
||||
const item2ExpectedTargetPath = path.join(
|
||||
testDownloadPath,
|
||||
artifact1Name,
|
||||
'file2.txt'
|
||||
)
|
||||
const item3ExpectedTargetPath = path.join(
|
||||
testDownloadPath,
|
||||
artifact1Name,
|
||||
'dir1',
|
||||
'file3.txt'
|
||||
)
|
||||
const item4ExpectedTargetPath = path.join(
|
||||
testDownloadPath,
|
||||
artifact1Name,
|
||||
'dir1',
|
||||
'dir2',
|
||||
'dir3',
|
||||
'dir4',
|
||||
'file4.txt'
|
||||
)
|
||||
const item5ExpectedTargetPath = path.join(
|
||||
testDownloadPath,
|
||||
artifact1Name,
|
||||
'dir1',
|
||||
'dir2',
|
||||
'dir3',
|
||||
'dir4',
|
||||
'file5.txt'
|
||||
)
|
||||
const item6ExpectedTargetPath = path.join(
|
||||
testDownloadPath,
|
||||
artifact1Name,
|
||||
'dir1',
|
||||
'dir2',
|
||||
'dir3',
|
||||
'dir4',
|
||||
'file6.txt'
|
||||
)
|
||||
|
||||
const targetLocations = specification.filesToDownload.map(
|
||||
item => item.targetPath
|
||||
)
|
||||
expect(targetLocations).toContain(item1ExpectedTargetPath)
|
||||
expect(targetLocations).toContain(item2ExpectedTargetPath)
|
||||
expect(targetLocations).toContain(item3ExpectedTargetPath)
|
||||
expect(targetLocations).toContain(item4ExpectedTargetPath)
|
||||
expect(targetLocations).toContain(item5ExpectedTargetPath)
|
||||
|
||||
for (const downloadItem of specification.filesToDownload) {
|
||||
if (downloadItem.targetPath === item1ExpectedTargetPath) {
|
||||
expect(downloadItem.sourceLocation).toEqual(
|
||||
createContentLocation(file1Path)
|
||||
)
|
||||
} else if (downloadItem.targetPath === item2ExpectedTargetPath) {
|
||||
expect(downloadItem.sourceLocation).toEqual(
|
||||
createContentLocation(file2Path)
|
||||
)
|
||||
} else if (downloadItem.targetPath === item3ExpectedTargetPath) {
|
||||
expect(downloadItem.sourceLocation).toEqual(
|
||||
createContentLocation(file3Path)
|
||||
)
|
||||
} else if (downloadItem.targetPath === item4ExpectedTargetPath) {
|
||||
expect(downloadItem.sourceLocation).toEqual(
|
||||
createContentLocation(file4Path)
|
||||
)
|
||||
} else if (downloadItem.targetPath === item5ExpectedTargetPath) {
|
||||
expect(downloadItem.sourceLocation).toEqual(
|
||||
createContentLocation(file5Path)
|
||||
)
|
||||
} else {
|
||||
throw new Error('this should never be reached')
|
||||
}
|
||||
}
|
||||
|
||||
expect(specification.directoryStructure.length).toEqual(3)
|
||||
expect(specification.directoryStructure).toContain(
|
||||
path.join(testDownloadPath, artifact1Name)
|
||||
)
|
||||
expect(specification.directoryStructure).toContain(
|
||||
path.join(testDownloadPath, dir1Path)
|
||||
)
|
||||
expect(specification.directoryStructure).toContain(
|
||||
path.join(testDownloadPath, dir4Path)
|
||||
)
|
||||
|
||||
expect(specification.emptyFilesToCreate.length).toEqual(1)
|
||||
expect(specification.emptyFilesToCreate).toContain(item6ExpectedTargetPath)
|
||||
})
|
||||
|
||||
it('Download Specification - Relative Path with root directory', () => {
|
||||
const testDownloadPath = path.join('some', 'destination', 'folder')
|
||||
|
||||
const specification = getDownloadSpecification(
|
||||
artifact1Name,
|
||||
artifactContainerEntries,
|
||||
testDownloadPath,
|
||||
true
|
||||
)
|
||||
|
||||
expect(specification.rootDownloadLocation).toEqual(
|
||||
path.join(testDownloadPath, artifact1Name)
|
||||
)
|
||||
expect(specification.filesToDownload.length).toEqual(5)
|
||||
|
||||
const item1ExpectedTargetPath = path.join(
|
||||
testDownloadPath,
|
||||
artifact1Name,
|
||||
'file1.txt'
|
||||
)
|
||||
const item2ExpectedTargetPath = path.join(
|
||||
testDownloadPath,
|
||||
artifact1Name,
|
||||
'file2.txt'
|
||||
)
|
||||
const item3ExpectedTargetPath = path.join(
|
||||
testDownloadPath,
|
||||
artifact1Name,
|
||||
'dir1',
|
||||
'file3.txt'
|
||||
)
|
||||
const item4ExpectedTargetPath = path.join(
|
||||
testDownloadPath,
|
||||
artifact1Name,
|
||||
'dir1',
|
||||
'dir2',
|
||||
'dir3',
|
||||
'dir4',
|
||||
'file4.txt'
|
||||
)
|
||||
const item5ExpectedTargetPath = path.join(
|
||||
testDownloadPath,
|
||||
artifact1Name,
|
||||
'dir1',
|
||||
'dir2',
|
||||
'dir3',
|
||||
'dir4',
|
||||
'file5.txt'
|
||||
)
|
||||
const item6ExpectedTargetPath = path.join(
|
||||
testDownloadPath,
|
||||
artifact1Name,
|
||||
'dir1',
|
||||
'dir2',
|
||||
'dir3',
|
||||
'dir4',
|
||||
'file6.txt'
|
||||
)
|
||||
|
||||
const targetLocations = specification.filesToDownload.map(
|
||||
item => item.targetPath
|
||||
)
|
||||
expect(targetLocations).toContain(item1ExpectedTargetPath)
|
||||
expect(targetLocations).toContain(item2ExpectedTargetPath)
|
||||
expect(targetLocations).toContain(item3ExpectedTargetPath)
|
||||
expect(targetLocations).toContain(item4ExpectedTargetPath)
|
||||
expect(targetLocations).toContain(item5ExpectedTargetPath)
|
||||
|
||||
for (const downloadItem of specification.filesToDownload) {
|
||||
if (downloadItem.targetPath === item1ExpectedTargetPath) {
|
||||
expect(downloadItem.sourceLocation).toEqual(
|
||||
createContentLocation(file1Path)
|
||||
)
|
||||
} else if (downloadItem.targetPath === item2ExpectedTargetPath) {
|
||||
expect(downloadItem.sourceLocation).toEqual(
|
||||
createContentLocation(file2Path)
|
||||
)
|
||||
} else if (downloadItem.targetPath === item3ExpectedTargetPath) {
|
||||
expect(downloadItem.sourceLocation).toEqual(
|
||||
createContentLocation(file3Path)
|
||||
)
|
||||
} else if (downloadItem.targetPath === item4ExpectedTargetPath) {
|
||||
expect(downloadItem.sourceLocation).toEqual(
|
||||
createContentLocation(file4Path)
|
||||
)
|
||||
} else if (downloadItem.targetPath === item5ExpectedTargetPath) {
|
||||
expect(downloadItem.sourceLocation).toEqual(
|
||||
createContentLocation(file5Path)
|
||||
)
|
||||
} else {
|
||||
throw new Error('this should never be reached')
|
||||
}
|
||||
}
|
||||
|
||||
expect(specification.directoryStructure.length).toEqual(3)
|
||||
expect(specification.directoryStructure).toContain(
|
||||
path.join(testDownloadPath, artifact1Name)
|
||||
)
|
||||
expect(specification.directoryStructure).toContain(
|
||||
path.join(testDownloadPath, dir1Path)
|
||||
)
|
||||
expect(specification.directoryStructure).toContain(
|
||||
path.join(testDownloadPath, dir4Path)
|
||||
)
|
||||
|
||||
expect(specification.emptyFilesToCreate.length).toEqual(1)
|
||||
expect(specification.emptyFilesToCreate).toContain(item6ExpectedTargetPath)
|
||||
})
|
||||
})
|
|
@ -0,0 +1,350 @@
|
|||
import * as core from '@actions/core'
|
||||
import * as http from 'http'
|
||||
import * as io from '../../io/src/io'
|
||||
import * as net from 'net'
|
||||
import * as path from 'path'
|
||||
import * as configVariables from '../src/internal/config-variables'
|
||||
import {promises as fs} from 'fs'
|
||||
import {DownloadItem} from '../src/internal/download-specification'
|
||||
import {HttpClient, HttpClientResponse} from '@actions/http-client'
|
||||
import {DownloadHttpClient} from '../src/internal/download-http-client'
|
||||
import {
|
||||
ListArtifactsResponse,
|
||||
QueryArtifactResponse
|
||||
} from '../src/internal/contracts'
|
||||
|
||||
const root = path.join(__dirname, '_temp', 'artifact-download-tests')
|
||||
|
||||
jest.mock('../src/internal/config-variables')
|
||||
jest.mock('@actions/http-client')
|
||||
|
||||
describe('Download Tests', () => {
|
||||
beforeAll(async () => {
|
||||
await io.rmRF(root)
|
||||
await fs.mkdir(path.join(root), {
|
||||
recursive: true
|
||||
})
|
||||
|
||||
// mock all output so that there is less noise when running tests
|
||||
jest.spyOn(console, 'log').mockImplementation(() => {})
|
||||
jest.spyOn(core, 'debug').mockImplementation(() => {})
|
||||
jest.spyOn(core, 'info').mockImplementation(() => {})
|
||||
jest.spyOn(core, 'warning').mockImplementation(() => {})
|
||||
jest.spyOn(core, 'error').mockImplementation(() => {})
|
||||
})
|
||||
|
||||
/**
|
||||
* Test Listing Artifacts
|
||||
*/
|
||||
it('List Artifacts - Success', async () => {
|
||||
setupSuccessfulListArtifactsResponse()
|
||||
const downloadHttpClient = new DownloadHttpClient()
|
||||
const artifacts = await downloadHttpClient.listArtifacts()
|
||||
expect(artifacts.count).toEqual(2)
|
||||
|
||||
const artifactNames = artifacts.value.map(item => item.name)
|
||||
expect(artifactNames).toContain('artifact1-name')
|
||||
expect(artifactNames).toContain('artifact2-name')
|
||||
|
||||
for (const artifact of artifacts.value) {
|
||||
if (artifact.name === 'artifact1-name') {
|
||||
expect(artifact.url).toEqual(
|
||||
`${configVariables.getRuntimeUrl()}_apis/pipelines/1/runs/1/artifacts?artifactName=artifact1-name`
|
||||
)
|
||||
} else if (artifact.name === 'artifact2-name') {
|
||||
expect(artifact.url).toEqual(
|
||||
`${configVariables.getRuntimeUrl()}_apis/pipelines/1/runs/1/artifacts?artifactName=artifact2-name`
|
||||
)
|
||||
} else {
|
||||
throw new Error(
|
||||
'Invalid artifact combination. This should never be reached'
|
||||
)
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
it('List Artifacts - Failure', async () => {
|
||||
setupFailedResponse()
|
||||
const downloadHttpClient = new DownloadHttpClient()
|
||||
expect(downloadHttpClient.listArtifacts()).rejects.toThrow(
|
||||
'Unable to list artifacts for the run'
|
||||
)
|
||||
})
|
||||
|
||||
/**
|
||||
* Test Container Items
|
||||
*/
|
||||
it('Container Items - Success', async () => {
|
||||
setupSuccessfulContainerItemsResponse()
|
||||
const downloadHttpClient = new DownloadHttpClient()
|
||||
const response = await downloadHttpClient.getContainerItems(
|
||||
'artifact-name',
|
||||
configVariables.getRuntimeUrl()
|
||||
)
|
||||
expect(response.count).toEqual(2)
|
||||
|
||||
const itemPaths = response.value.map(item => item.path)
|
||||
expect(itemPaths).toContain('artifact-name')
|
||||
expect(itemPaths).toContain('artifact-name/file1.txt')
|
||||
|
||||
for (const containerEntry of response.value) {
|
||||
if (containerEntry.path === 'artifact-name') {
|
||||
expect(containerEntry.itemType).toEqual('folder')
|
||||
} else if (containerEntry.path === 'artifact-name/file1.txt') {
|
||||
expect(containerEntry.itemType).toEqual('file')
|
||||
} else {
|
||||
throw new Error(
|
||||
'Invalid container combination. This should never be reached'
|
||||
)
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
it('Container Items - Failure', async () => {
|
||||
setupFailedResponse()
|
||||
const downloadHttpClient = new DownloadHttpClient()
|
||||
expect(
|
||||
downloadHttpClient.getContainerItems(
|
||||
'artifact-name',
|
||||
configVariables.getRuntimeUrl()
|
||||
)
|
||||
).rejects.toThrow(
|
||||
`Unable to get ContainersItems from ${configVariables.getRuntimeUrl()}`
|
||||
)
|
||||
})
|
||||
|
||||
it('Test downloading an individual artifact with gzip', async () => {
|
||||
setupDownloadItemResponse(true, 200)
|
||||
const downloadHttpClient = new DownloadHttpClient()
|
||||
|
||||
const items: DownloadItem[] = []
|
||||
items.push({
|
||||
sourceLocation: `${configVariables.getRuntimeUrl()}_apis/resources/Containers/13?itemPath=my-artifact%2FFileA.txt`,
|
||||
targetPath: path.join(root, 'FileA.txt')
|
||||
})
|
||||
|
||||
await expect(
|
||||
downloadHttpClient.downloadSingleArtifact(items)
|
||||
).resolves.not.toThrow()
|
||||
})
|
||||
|
||||
it('Test downloading an individual artifact without gzip', async () => {
|
||||
setupDownloadItemResponse(false, 200)
|
||||
const downloadHttpClient = new DownloadHttpClient()
|
||||
|
||||
const items: DownloadItem[] = []
|
||||
items.push({
|
||||
sourceLocation: `${configVariables.getRuntimeUrl()}_apis/resources/Containers/13?itemPath=my-artifact%2FFileB.txt`,
|
||||
targetPath: path.join(root, 'FileB.txt')
|
||||
})
|
||||
|
||||
await expect(
|
||||
downloadHttpClient.downloadSingleArtifact(items)
|
||||
).resolves.not.toThrow()
|
||||
})
|
||||
|
||||
it('Test retryable status codes during artifact download', async () => {
|
||||
// The first http response should return a retryable status call while the subsequent call should return a 200 so
|
||||
// the download should successfully finish
|
||||
const retryableStatusCodes = [429, 502, 503, 504]
|
||||
for (const statusCode of retryableStatusCodes) {
|
||||
setupDownloadItemResponse(false, statusCode)
|
||||
const downloadHttpClient = new DownloadHttpClient()
|
||||
|
||||
const items: DownloadItem[] = []
|
||||
items.push({
|
||||
sourceLocation: `${configVariables.getRuntimeUrl()}_apis/resources/Containers/13?itemPath=my-artifact%2FFileC.txt`,
|
||||
targetPath: path.join(root, 'FileC.txt')
|
||||
})
|
||||
|
||||
await expect(
|
||||
downloadHttpClient.downloadSingleArtifact(items)
|
||||
).resolves.not.toThrow()
|
||||
}
|
||||
})
|
||||
|
||||
/**
|
||||
* Helper used to setup mocking for the HttpClient
|
||||
*/
|
||||
async function emptyMockReadBody(): Promise<string> {
|
||||
return new Promise(resolve => {
|
||||
resolve()
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Setups up HTTP GET response for a successful listArtifacts() call
|
||||
*/
|
||||
function setupSuccessfulListArtifactsResponse(): void {
|
||||
jest.spyOn(HttpClient.prototype, 'get').mockImplementationOnce(async () => {
|
||||
const mockMessage = new http.IncomingMessage(new net.Socket())
|
||||
let mockReadBody = emptyMockReadBody
|
||||
|
||||
mockMessage.statusCode = 201
|
||||
const response: ListArtifactsResponse = {
|
||||
count: 2,
|
||||
value: [
|
||||
{
|
||||
containerId: '13',
|
||||
size: -1,
|
||||
signedContent: 'false',
|
||||
fileContainerResourceUrl: `${configVariables.getRuntimeUrl()}_apis/resources/Containers/13`,
|
||||
type: 'actions_storage',
|
||||
name: 'artifact1-name',
|
||||
url: `${configVariables.getRuntimeUrl()}_apis/pipelines/1/runs/1/artifacts?artifactName=artifact1-name`
|
||||
},
|
||||
{
|
||||
containerId: '13',
|
||||
size: -1,
|
||||
signedContent: 'false',
|
||||
fileContainerResourceUrl: `${configVariables.getRuntimeUrl()}_apis/resources/Containers/13`,
|
||||
type: 'actions_storage',
|
||||
name: 'artifact2-name',
|
||||
url: `${configVariables.getRuntimeUrl()}_apis/pipelines/1/runs/1/artifacts?artifactName=artifact2-name`
|
||||
}
|
||||
]
|
||||
}
|
||||
const returnData: string = JSON.stringify(response, null, 2)
|
||||
mockReadBody = async function(): Promise<string> {
|
||||
return new Promise(resolve => {
|
||||
resolve(returnData)
|
||||
})
|
||||
}
|
||||
|
||||
return new Promise<HttpClientResponse>(resolve => {
|
||||
resolve({
|
||||
message: mockMessage,
|
||||
readBody: mockReadBody
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Setups up HTTP GET response for downloading items
|
||||
* @param isGzip is the downloaded item gzip encoded
|
||||
* @param firstHttpResponseCode the http response code that should be returned
|
||||
*/
|
||||
function setupDownloadItemResponse(
|
||||
isGzip: boolean,
|
||||
firstHttpResponseCode: number
|
||||
): void {
|
||||
jest
|
||||
.spyOn(DownloadHttpClient.prototype, 'pipeResponseToFile')
|
||||
.mockImplementationOnce(async () => {
|
||||
return new Promise<void>(resolve => {
|
||||
resolve()
|
||||
})
|
||||
})
|
||||
|
||||
jest
|
||||
.spyOn(HttpClient.prototype, 'get')
|
||||
.mockImplementationOnce(async () => {
|
||||
const mockMessage = new http.IncomingMessage(new net.Socket())
|
||||
mockMessage.statusCode = firstHttpResponseCode
|
||||
if (isGzip) {
|
||||
mockMessage.headers = {
|
||||
'content-type': 'gzip'
|
||||
}
|
||||
}
|
||||
|
||||
return new Promise<HttpClientResponse>(resolve => {
|
||||
resolve({
|
||||
message: mockMessage,
|
||||
readBody: emptyMockReadBody
|
||||
})
|
||||
})
|
||||
})
|
||||
.mockImplementationOnce(async () => {
|
||||
// chained response, if the HTTP GET function gets called again, return a successful response
|
||||
const mockMessage = new http.IncomingMessage(new net.Socket())
|
||||
mockMessage.statusCode = 200
|
||||
if (isGzip) {
|
||||
mockMessage.headers = {
|
||||
'content-type': 'gzip'
|
||||
}
|
||||
}
|
||||
|
||||
return new Promise<HttpClientResponse>(resolve => {
|
||||
resolve({
|
||||
message: mockMessage,
|
||||
readBody: emptyMockReadBody
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Setups up HTTP GET response when querying for container items
|
||||
*/
|
||||
function setupSuccessfulContainerItemsResponse(): void {
|
||||
jest.spyOn(HttpClient.prototype, 'get').mockImplementationOnce(async () => {
|
||||
const mockMessage = new http.IncomingMessage(new net.Socket())
|
||||
let mockReadBody = emptyMockReadBody
|
||||
|
||||
mockMessage.statusCode = 201
|
||||
const response: QueryArtifactResponse = {
|
||||
count: 2,
|
||||
value: [
|
||||
{
|
||||
containerId: 10000,
|
||||
scopeIdentifier: '00000000-0000-0000-0000-000000000000',
|
||||
path: 'artifact-name',
|
||||
itemType: 'folder',
|
||||
status: 'created',
|
||||
dateCreated: '2020-02-06T22:13:35.373Z',
|
||||
dateLastModified: '2020-02-06T22:13:35.453Z',
|
||||
createdBy: '82f0bf89-6e55-4e5a-b8b6-f75eb992578c',
|
||||
lastModifiedBy: '82f0bf89-6e55-4e5a-b8b6-f75eb992578c',
|
||||
itemLocation: `${configVariables.getRuntimeUrl()}/_apis/resources/Containers/10000?itemPath=artifact-name&metadata=True`,
|
||||
contentLocation: `${configVariables.getRuntimeUrl()}/_apis/resources/Containers/10000?itemPath=artifact-name`,
|
||||
contentId: ''
|
||||
},
|
||||
{
|
||||
containerId: 10000,
|
||||
scopeIdentifier: '00000000-0000-0000-0000-000000000000',
|
||||
path: 'artifact-name/file1.txt',
|
||||
itemType: 'file',
|
||||
status: 'created',
|
||||
dateCreated: '2020-02-06T22:13:35.373Z',
|
||||
dateLastModified: '2020-02-06T22:13:35.453Z',
|
||||
createdBy: '82f0bf89-6e55-4e5a-b8b6-f75eb992578c',
|
||||
lastModifiedBy: '82f0bf89-6e55-4e5a-b8b6-f75eb992578c',
|
||||
itemLocation: `${configVariables.getRuntimeUrl()}/_apis/resources/Containers/10000?itemPath=artifact-name%2Ffile1.txt&metadata=True`,
|
||||
contentLocation: `${configVariables.getRuntimeUrl()}/_apis/resources/Containers/10000?itemPath=artifact-name%2Ffile1.txt`,
|
||||
contentId: ''
|
||||
}
|
||||
]
|
||||
}
|
||||
const returnData: string = JSON.stringify(response, null, 2)
|
||||
mockReadBody = async function(): Promise<string> {
|
||||
return new Promise(resolve => {
|
||||
resolve(returnData)
|
||||
})
|
||||
}
|
||||
|
||||
return new Promise<HttpClientResponse>(resolve => {
|
||||
resolve({
|
||||
message: mockMessage,
|
||||
readBody: mockReadBody
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Setups up HTTP GET response for a generic failed request
|
||||
*/
|
||||
function setupFailedResponse(): void {
|
||||
jest.spyOn(HttpClient.prototype, 'get').mockImplementationOnce(async () => {
|
||||
const mockMessage = new http.IncomingMessage(new net.Socket())
|
||||
mockMessage.statusCode = 500
|
||||
return new Promise<HttpClientResponse>(resolve => {
|
||||
resolve({
|
||||
message: mockMessage,
|
||||
readBody: emptyMockReadBody
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
})
|
|
@ -0,0 +1,353 @@
|
|||
import * as io from '../../io/src/io'
|
||||
import * as path from 'path'
|
||||
import {promises as fs} from 'fs'
|
||||
import * as core from '@actions/core'
|
||||
import {getUploadSpecification} from '../src/internal/upload-specification'
|
||||
|
||||
const artifactName = 'my-artifact'
|
||||
const root = path.join(__dirname, '_temp', 'upload-specification')
|
||||
const goodItem1Path = path.join(
|
||||
root,
|
||||
'folder-a',
|
||||
'folder-b',
|
||||
'folder-c',
|
||||
'good-item1.txt'
|
||||
)
|
||||
const goodItem2Path = path.join(root, 'folder-d', 'good-item2.txt')
|
||||
const goodItem3Path = path.join(root, 'folder-d', 'good-item3.txt')
|
||||
const goodItem4Path = path.join(root, 'folder-d', 'good-item4.txt')
|
||||
const goodItem5Path = path.join(root, 'good-item5.txt')
|
||||
const badItem1Path = path.join(
|
||||
root,
|
||||
'folder-a',
|
||||
'folder-b',
|
||||
'folder-c',
|
||||
'bad-item1.txt'
|
||||
)
|
||||
const badItem2Path = path.join(root, 'folder-d', 'bad-item2.txt')
|
||||
const badItem3Path = path.join(root, 'folder-f', 'bad-item3.txt')
|
||||
const badItem4Path = path.join(root, 'folder-h', 'folder-i', 'bad-item4.txt')
|
||||
const badItem5Path = path.join(root, 'folder-h', 'folder-i', 'bad-item5.txt')
|
||||
const extraFileInFolderCPath = path.join(
|
||||
root,
|
||||
'folder-a',
|
||||
'folder-b',
|
||||
'folder-c',
|
||||
'extra-file-in-folder-c.txt'
|
||||
)
|
||||
const amazingFileInFolderHPath = path.join(root, 'folder-h', 'amazing-item.txt')
|
||||
|
||||
const artifactFilesToUpload = [
|
||||
goodItem1Path,
|
||||
goodItem2Path,
|
||||
goodItem3Path,
|
||||
goodItem4Path,
|
||||
goodItem5Path,
|
||||
extraFileInFolderCPath,
|
||||
amazingFileInFolderHPath
|
||||
]
|
||||
|
||||
describe('Search', () => {
|
||||
beforeAll(async () => {
|
||||
// mock all output so that there is less noise when running tests
|
||||
jest.spyOn(console, 'log').mockImplementation(() => {})
|
||||
jest.spyOn(core, 'debug').mockImplementation(() => {})
|
||||
jest.spyOn(core, 'info').mockImplementation(() => {})
|
||||
jest.spyOn(core, 'warning').mockImplementation(() => {})
|
||||
|
||||
// clear temp directory
|
||||
await io.rmRF(root)
|
||||
await fs.mkdir(path.join(root, 'folder-a', 'folder-b', 'folder-c'), {
|
||||
recursive: true
|
||||
})
|
||||
await fs.mkdir(path.join(root, 'folder-a', 'folder-b', 'folder-e'), {
|
||||
recursive: true
|
||||
})
|
||||
await fs.mkdir(path.join(root, 'folder-d'), {
|
||||
recursive: true
|
||||
})
|
||||
await fs.mkdir(path.join(root, 'folder-f'), {
|
||||
recursive: true
|
||||
})
|
||||
await fs.mkdir(path.join(root, 'folder-g'), {
|
||||
recursive: true
|
||||
})
|
||||
await fs.mkdir(path.join(root, 'folder-h', 'folder-i'), {
|
||||
recursive: true
|
||||
})
|
||||
|
||||
await fs.writeFile(goodItem1Path, 'good item1 file')
|
||||
await fs.writeFile(goodItem2Path, 'good item2 file')
|
||||
await fs.writeFile(goodItem3Path, 'good item3 file')
|
||||
await fs.writeFile(goodItem4Path, 'good item4 file')
|
||||
await fs.writeFile(goodItem5Path, 'good item5 file')
|
||||
|
||||
await fs.writeFile(badItem1Path, 'bad item1 file')
|
||||
await fs.writeFile(badItem2Path, 'bad item2 file')
|
||||
await fs.writeFile(badItem3Path, 'bad item3 file')
|
||||
await fs.writeFile(badItem4Path, 'bad item4 file')
|
||||
await fs.writeFile(badItem5Path, 'bad item5 file')
|
||||
|
||||
await fs.writeFile(extraFileInFolderCPath, 'extra file')
|
||||
|
||||
await fs.writeFile(amazingFileInFolderHPath, 'amazing file')
|
||||
/*
|
||||
Directory structure of files that get created:
|
||||
root/
|
||||
folder-a/
|
||||
folder-b/
|
||||
folder-c/
|
||||
good-item1.txt
|
||||
bad-item1.txt
|
||||
extra-file-in-folder-c.txt
|
||||
folder-e/
|
||||
folder-d/
|
||||
good-item2.txt
|
||||
good-item3.txt
|
||||
good-item4.txt
|
||||
bad-item2.txt
|
||||
folder-f/
|
||||
bad-item3.txt
|
||||
folder-g/
|
||||
folder-h/
|
||||
amazing-item.txt
|
||||
folder-i/
|
||||
bad-item4.txt
|
||||
bad-item5.txt
|
||||
good-item5.txt
|
||||
*/
|
||||
})
|
||||
|
||||
it('Upload Specification - Fail non-existent rootDirectory', async () => {
|
||||
const invalidRootDirectory = path.join(
|
||||
__dirname,
|
||||
'_temp',
|
||||
'upload-specification-invalid'
|
||||
)
|
||||
expect(() => {
|
||||
getUploadSpecification(
|
||||
artifactName,
|
||||
invalidRootDirectory,
|
||||
artifactFilesToUpload
|
||||
)
|
||||
}).toThrow(`Provided rootDirectory ${invalidRootDirectory} does not exist`)
|
||||
})
|
||||
|
||||
it('Upload Specification - Fail invalid rootDirectory', async () => {
|
||||
expect(() => {
|
||||
getUploadSpecification(artifactName, goodItem1Path, artifactFilesToUpload)
|
||||
}).toThrow(
|
||||
`Provided rootDirectory ${goodItem1Path} is not a valid directory`
|
||||
)
|
||||
})
|
||||
|
||||
it('Upload Specification - File does not exist', async () => {
|
||||
const fakeFilePath = path.join(
|
||||
artifactName,
|
||||
'folder-a',
|
||||
'folder-b',
|
||||
'non-existent-file.txt'
|
||||
)
|
||||
expect(() => {
|
||||
getUploadSpecification(artifactName, root, [fakeFilePath])
|
||||
}).toThrow(`File ${fakeFilePath} does not exist`)
|
||||
})
|
||||
|
||||
it('Upload Specification - Non parent directory', async () => {
|
||||
const folderADirectory = path.join(root, 'folder-a')
|
||||
const artifactFiles = [
|
||||
goodItem1Path,
|
||||
badItem1Path,
|
||||
extraFileInFolderCPath,
|
||||
goodItem5Path
|
||||
]
|
||||
expect(() => {
|
||||
getUploadSpecification(artifactName, folderADirectory, artifactFiles)
|
||||
}).toThrow(
|
||||
`The rootDirectory: ${folderADirectory} is not a parent directory of the file: ${goodItem5Path}`
|
||||
)
|
||||
})
|
||||
|
||||
it('Upload Specification - Success', async () => {
|
||||
const specifications = getUploadSpecification(
|
||||
artifactName,
|
||||
root,
|
||||
artifactFilesToUpload
|
||||
)
|
||||
expect(specifications.length).toEqual(7)
|
||||
|
||||
const absolutePaths = specifications.map(item => item.absoluteFilePath)
|
||||
expect(absolutePaths).toContain(goodItem1Path)
|
||||
expect(absolutePaths).toContain(goodItem2Path)
|
||||
expect(absolutePaths).toContain(goodItem3Path)
|
||||
expect(absolutePaths).toContain(goodItem4Path)
|
||||
expect(absolutePaths).toContain(goodItem5Path)
|
||||
expect(absolutePaths).toContain(extraFileInFolderCPath)
|
||||
expect(absolutePaths).toContain(amazingFileInFolderHPath)
|
||||
|
||||
for (const specification of specifications) {
|
||||
if (specification.absoluteFilePath === goodItem1Path) {
|
||||
expect(specification.uploadFilePath).toEqual(
|
||||
path.join(
|
||||
artifactName,
|
||||
'folder-a',
|
||||
'folder-b',
|
||||
'folder-c',
|
||||
'good-item1.txt'
|
||||
)
|
||||
)
|
||||
} else if (specification.absoluteFilePath === goodItem2Path) {
|
||||
expect(specification.uploadFilePath).toEqual(
|
||||
path.join(artifactName, 'folder-d', 'good-item2.txt')
|
||||
)
|
||||
} else if (specification.absoluteFilePath === goodItem3Path) {
|
||||
expect(specification.uploadFilePath).toEqual(
|
||||
path.join(artifactName, 'folder-d', 'good-item3.txt')
|
||||
)
|
||||
} else if (specification.absoluteFilePath === goodItem4Path) {
|
||||
expect(specification.uploadFilePath).toEqual(
|
||||
path.join(artifactName, 'folder-d', 'good-item4.txt')
|
||||
)
|
||||
} else if (specification.absoluteFilePath === goodItem5Path) {
|
||||
expect(specification.uploadFilePath).toEqual(
|
||||
path.join(artifactName, 'good-item5.txt')
|
||||
)
|
||||
} else if (specification.absoluteFilePath === extraFileInFolderCPath) {
|
||||
expect(specification.uploadFilePath).toEqual(
|
||||
path.join(
|
||||
artifactName,
|
||||
'folder-a',
|
||||
'folder-b',
|
||||
'folder-c',
|
||||
'extra-file-in-folder-c.txt'
|
||||
)
|
||||
)
|
||||
} else if (specification.absoluteFilePath === amazingFileInFolderHPath) {
|
||||
expect(specification.uploadFilePath).toEqual(
|
||||
path.join(artifactName, 'folder-h', 'amazing-item.txt')
|
||||
)
|
||||
} else {
|
||||
throw new Error(
|
||||
'Invalid specification found. This should never be reached'
|
||||
)
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
it('Upload Specification - Success with extra slash', async () => {
|
||||
const rootWithSlash = `${root}/`
|
||||
const specifications = getUploadSpecification(
|
||||
artifactName,
|
||||
rootWithSlash,
|
||||
artifactFilesToUpload
|
||||
)
|
||||
expect(specifications.length).toEqual(7)
|
||||
|
||||
const absolutePaths = specifications.map(item => item.absoluteFilePath)
|
||||
expect(absolutePaths).toContain(goodItem1Path)
|
||||
expect(absolutePaths).toContain(goodItem2Path)
|
||||
expect(absolutePaths).toContain(goodItem3Path)
|
||||
expect(absolutePaths).toContain(goodItem4Path)
|
||||
expect(absolutePaths).toContain(goodItem5Path)
|
||||
expect(absolutePaths).toContain(extraFileInFolderCPath)
|
||||
expect(absolutePaths).toContain(amazingFileInFolderHPath)
|
||||
|
||||
for (const specification of specifications) {
|
||||
if (specification.absoluteFilePath === goodItem1Path) {
|
||||
expect(specification.uploadFilePath).toEqual(
|
||||
path.join(
|
||||
artifactName,
|
||||
'folder-a',
|
||||
'folder-b',
|
||||
'folder-c',
|
||||
'good-item1.txt'
|
||||
)
|
||||
)
|
||||
} else if (specification.absoluteFilePath === goodItem2Path) {
|
||||
expect(specification.uploadFilePath).toEqual(
|
||||
path.join(artifactName, 'folder-d', 'good-item2.txt')
|
||||
)
|
||||
} else if (specification.absoluteFilePath === goodItem3Path) {
|
||||
expect(specification.uploadFilePath).toEqual(
|
||||
path.join(artifactName, 'folder-d', 'good-item3.txt')
|
||||
)
|
||||
} else if (specification.absoluteFilePath === goodItem4Path) {
|
||||
expect(specification.uploadFilePath).toEqual(
|
||||
path.join(artifactName, 'folder-d', 'good-item4.txt')
|
||||
)
|
||||
} else if (specification.absoluteFilePath === goodItem5Path) {
|
||||
expect(specification.uploadFilePath).toEqual(
|
||||
path.join(artifactName, 'good-item5.txt')
|
||||
)
|
||||
} else if (specification.absoluteFilePath === extraFileInFolderCPath) {
|
||||
expect(specification.uploadFilePath).toEqual(
|
||||
path.join(
|
||||
artifactName,
|
||||
'folder-a',
|
||||
'folder-b',
|
||||
'folder-c',
|
||||
'extra-file-in-folder-c.txt'
|
||||
)
|
||||
)
|
||||
} else if (specification.absoluteFilePath === amazingFileInFolderHPath) {
|
||||
expect(specification.uploadFilePath).toEqual(
|
||||
path.join(artifactName, 'folder-h', 'amazing-item.txt')
|
||||
)
|
||||
} else {
|
||||
throw new Error(
|
||||
'Invalid specification found. This should never be reached'
|
||||
)
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
it('Upload Specification - Directories should not be included', async () => {
|
||||
const folderEPath = path.join(root, 'folder-a', 'folder-b', 'folder-e')
|
||||
const filesWithDirectory = [
|
||||
goodItem1Path,
|
||||
goodItem4Path,
|
||||
folderEPath,
|
||||
badItem3Path
|
||||
]
|
||||
const specifications = getUploadSpecification(
|
||||
artifactName,
|
||||
root,
|
||||
filesWithDirectory
|
||||
)
|
||||
expect(specifications.length).toEqual(3)
|
||||
const absolutePaths = specifications.map(item => item.absoluteFilePath)
|
||||
expect(absolutePaths).toContain(goodItem1Path)
|
||||
expect(absolutePaths).toContain(goodItem4Path)
|
||||
expect(absolutePaths).toContain(badItem3Path)
|
||||
|
||||
for (const specification of specifications) {
|
||||
if (specification.absoluteFilePath === goodItem1Path) {
|
||||
expect(specification.uploadFilePath).toEqual(
|
||||
path.join(
|
||||
artifactName,
|
||||
'folder-a',
|
||||
'folder-b',
|
||||
'folder-c',
|
||||
'good-item1.txt'
|
||||
)
|
||||
)
|
||||
} else if (specification.absoluteFilePath === goodItem2Path) {
|
||||
expect(specification.uploadFilePath).toEqual(
|
||||
path.join(artifactName, 'folder-d', 'good-item2.txt')
|
||||
)
|
||||
} else if (specification.absoluteFilePath === goodItem4Path) {
|
||||
expect(specification.uploadFilePath).toEqual(
|
||||
path.join(artifactName, 'folder-d', 'good-item4.txt')
|
||||
)
|
||||
} else if (specification.absoluteFilePath === badItem3Path) {
|
||||
expect(specification.uploadFilePath).toEqual(
|
||||
path.join(artifactName, 'folder-f', 'bad-item3.txt')
|
||||
)
|
||||
} else {
|
||||
throw new Error(
|
||||
'Invalid specification found. This should never be reached'
|
||||
)
|
||||
}
|
||||
}
|
||||
})
|
||||
})
|
|
@ -0,0 +1,480 @@
|
|||
import * as http from 'http'
|
||||
import * as io from '../../io/src/io'
|
||||
import * as net from 'net'
|
||||
import * as path from 'path'
|
||||
import {UploadHttpClient} from '../src/internal/upload-http-client'
|
||||
import * as core from '@actions/core'
|
||||
import {promises as fs} from 'fs'
|
||||
import {getRuntimeUrl} from '../src/internal/config-variables'
|
||||
import {HttpClient, HttpClientResponse} from '@actions/http-client'
|
||||
import {
|
||||
ArtifactResponse,
|
||||
PatchArtifactSizeSuccessResponse
|
||||
} from '../src/internal/contracts'
|
||||
import {UploadSpecification} from '../src/internal/upload-specification'
|
||||
import {getArtifactUrl} from '../src/internal/utils'
|
||||
|
||||
const root = path.join(__dirname, '_temp', 'artifact-upload')
|
||||
const file1Path = path.join(root, 'file1.txt')
|
||||
const file2Path = path.join(root, 'file2.txt')
|
||||
const file3Path = path.join(root, 'folder1', 'file3.txt')
|
||||
const file4Path = path.join(root, 'folder1', 'file4.txt')
|
||||
const file5Path = path.join(root, 'folder1', 'folder2', 'folder3', 'file5.txt')
|
||||
|
||||
let file1Size = 0
|
||||
let file2Size = 0
|
||||
let file3Size = 0
|
||||
let file4Size = 0
|
||||
let file5Size = 0
|
||||
|
||||
jest.mock('../src/internal/config-variables')
|
||||
jest.mock('@actions/http-client')
|
||||
|
||||
describe('Upload Tests', () => {
|
||||
beforeAll(async () => {
|
||||
// mock all output so that there is less noise when running tests
|
||||
jest.spyOn(console, 'log').mockImplementation(() => {})
|
||||
jest.spyOn(core, 'debug').mockImplementation(() => {})
|
||||
jest.spyOn(core, 'info').mockImplementation(() => {})
|
||||
jest.spyOn(core, 'warning').mockImplementation(() => {})
|
||||
jest.spyOn(core, 'error').mockImplementation(() => {})
|
||||
|
||||
// setup mocking for calls that got through the HttpClient
|
||||
setupHttpClientMock()
|
||||
|
||||
// clear temp directory and create files that will be "uploaded"
|
||||
await io.rmRF(root)
|
||||
await fs.mkdir(path.join(root, 'folder1', 'folder2', 'folder3'), {
|
||||
recursive: true
|
||||
})
|
||||
await fs.writeFile(file1Path, 'this is file 1')
|
||||
await fs.writeFile(file2Path, 'this is file 2')
|
||||
await fs.writeFile(file3Path, 'this is file 3')
|
||||
await fs.writeFile(file4Path, 'this is file 4')
|
||||
await fs.writeFile(file5Path, 'this is file 5')
|
||||
/*
|
||||
Directory structure for files that get created:
|
||||
root/
|
||||
file1.txt
|
||||
file2.txt
|
||||
folder1/
|
||||
file3.txt
|
||||
file4.txt
|
||||
folder2/
|
||||
folder3/
|
||||
file5.txt
|
||||
*/
|
||||
|
||||
file1Size = (await fs.stat(file1Path)).size
|
||||
file2Size = (await fs.stat(file2Path)).size
|
||||
file3Size = (await fs.stat(file3Path)).size
|
||||
file4Size = (await fs.stat(file4Path)).size
|
||||
file5Size = (await fs.stat(file5Path)).size
|
||||
})
|
||||
|
||||
/**
|
||||
* Artifact Creation Tests
|
||||
*/
|
||||
it('Create Artifact - Success', async () => {
|
||||
const artifactName = 'valid-artifact-name'
|
||||
const uploadHttpClient = new UploadHttpClient()
|
||||
const response = await uploadHttpClient.createArtifactInFileContainer(
|
||||
artifactName
|
||||
)
|
||||
expect(response.containerId).toEqual('13')
|
||||
expect(response.size).toEqual(-1)
|
||||
expect(response.signedContent).toEqual('false')
|
||||
expect(response.fileContainerResourceUrl).toEqual(
|
||||
`${getRuntimeUrl()}_apis/resources/Containers/13`
|
||||
)
|
||||
expect(response.type).toEqual('actions_storage')
|
||||
expect(response.name).toEqual(artifactName)
|
||||
expect(response.url).toEqual(
|
||||
`${getRuntimeUrl()}_apis/pipelines/1/runs/1/artifacts?artifactName=${artifactName}`
|
||||
)
|
||||
})
|
||||
|
||||
it('Create Artifact - Failure', async () => {
|
||||
const artifactName = 'invalid-artifact-name'
|
||||
const uploadHttpClient = new UploadHttpClient()
|
||||
expect(
|
||||
uploadHttpClient.createArtifactInFileContainer(artifactName)
|
||||
).rejects.toEqual(
|
||||
new Error(
|
||||
`Unable to create a container for the artifact invalid-artifact-name at ${getArtifactUrl()}`
|
||||
)
|
||||
)
|
||||
})
|
||||
|
||||
it('Create Artifact - Storage Quota Error', async () => {
|
||||
const artifactName = 'storage-quota-hit'
|
||||
const uploadHttpClient = new UploadHttpClient()
|
||||
expect(
|
||||
uploadHttpClient.createArtifactInFileContainer(artifactName)
|
||||
).rejects.toEqual(
|
||||
new Error(
|
||||
'Artifact storage quota has been hit. Unable to upload any new artifacts'
|
||||
)
|
||||
)
|
||||
})
|
||||
|
||||
/**
|
||||
* Artifact Upload Tests
|
||||
*/
|
||||
it('Upload Artifact - Success', async () => {
|
||||
/**
|
||||
* Normally search.findFilesToUpload() would be used for providing information about what to upload. These tests however
|
||||
* focuses solely on the upload APIs so searchResult[] will be hard-coded
|
||||
*/
|
||||
const artifactName = 'successful-artifact'
|
||||
const uploadSpecification: UploadSpecification[] = [
|
||||
{
|
||||
absoluteFilePath: file1Path,
|
||||
uploadFilePath: `${artifactName}/file1.txt`
|
||||
},
|
||||
{
|
||||
absoluteFilePath: file2Path,
|
||||
uploadFilePath: `${artifactName}/file2.txt`
|
||||
},
|
||||
{
|
||||
absoluteFilePath: file3Path,
|
||||
uploadFilePath: `${artifactName}/folder1/file3.txt`
|
||||
},
|
||||
{
|
||||
absoluteFilePath: file4Path,
|
||||
uploadFilePath: `${artifactName}/folder1/file4.txt`
|
||||
},
|
||||
{
|
||||
absoluteFilePath: file5Path,
|
||||
uploadFilePath: `${artifactName}/folder1/folder2/folder3/file5.txt`
|
||||
}
|
||||
]
|
||||
|
||||
const expectedTotalSize =
|
||||
file1Size + file2Size + file3Size + file4Size + file5Size
|
||||
const uploadUrl = `${getRuntimeUrl()}_apis/resources/Containers/13`
|
||||
const uploadHttpClient = new UploadHttpClient()
|
||||
const uploadResult = await uploadHttpClient.uploadArtifactToFileContainer(
|
||||
uploadUrl,
|
||||
uploadSpecification
|
||||
)
|
||||
expect(uploadResult.failedItems.length).toEqual(0)
|
||||
expect(uploadResult.uploadSize).toEqual(expectedTotalSize)
|
||||
})
|
||||
|
||||
it('Upload Artifact - Failed Single File Upload', async () => {
|
||||
const uploadSpecification: UploadSpecification[] = [
|
||||
{
|
||||
absoluteFilePath: file1Path,
|
||||
uploadFilePath: `this-file-upload-will-fail`
|
||||
}
|
||||
]
|
||||
|
||||
const uploadUrl = `${getRuntimeUrl()}_apis/resources/Containers/13`
|
||||
const uploadHttpClient = new UploadHttpClient()
|
||||
const uploadResult = await uploadHttpClient.uploadArtifactToFileContainer(
|
||||
uploadUrl,
|
||||
uploadSpecification
|
||||
)
|
||||
expect(uploadResult.failedItems.length).toEqual(1)
|
||||
expect(uploadResult.uploadSize).toEqual(0)
|
||||
})
|
||||
|
||||
it('Upload Artifact - Partial Upload Continue On Error', async () => {
|
||||
const artifactName = 'partial-artifact'
|
||||
const uploadSpecification: UploadSpecification[] = [
|
||||
{
|
||||
absoluteFilePath: file1Path,
|
||||
uploadFilePath: `${artifactName}/file1.txt`
|
||||
},
|
||||
{
|
||||
absoluteFilePath: file2Path,
|
||||
uploadFilePath: `${artifactName}/file2.txt`
|
||||
},
|
||||
{
|
||||
absoluteFilePath: file3Path,
|
||||
uploadFilePath: `${artifactName}/folder1/file3.txt`
|
||||
},
|
||||
{
|
||||
absoluteFilePath: file4Path,
|
||||
uploadFilePath: `this-file-upload-will-fail`
|
||||
},
|
||||
{
|
||||
absoluteFilePath: file5Path,
|
||||
uploadFilePath: `${artifactName}/folder1/folder2/folder3/file5.txt`
|
||||
}
|
||||
]
|
||||
|
||||
const expectedPartialSize = file1Size + file2Size + file4Size + file5Size
|
||||
const uploadUrl = `${getRuntimeUrl()}_apis/resources/Containers/13`
|
||||
const uploadHttpClient = new UploadHttpClient()
|
||||
const uploadResult = await uploadHttpClient.uploadArtifactToFileContainer(
|
||||
uploadUrl,
|
||||
uploadSpecification,
|
||||
{continueOnError: true}
|
||||
)
|
||||
expect(uploadResult.failedItems.length).toEqual(1)
|
||||
expect(uploadResult.uploadSize).toEqual(expectedPartialSize)
|
||||
})
|
||||
|
||||
it('Upload Artifact - Partial Upload Fail Fast', async () => {
|
||||
const artifactName = 'partial-artifact'
|
||||
const uploadSpecification: UploadSpecification[] = [
|
||||
{
|
||||
absoluteFilePath: file1Path,
|
||||
uploadFilePath: `${artifactName}/file1.txt`
|
||||
},
|
||||
{
|
||||
absoluteFilePath: file2Path,
|
||||
uploadFilePath: `${artifactName}/file2.txt`
|
||||
},
|
||||
{
|
||||
absoluteFilePath: file3Path,
|
||||
uploadFilePath: `${artifactName}/folder1/file3.txt`
|
||||
},
|
||||
{
|
||||
absoluteFilePath: file4Path,
|
||||
uploadFilePath: `this-file-upload-will-fail`
|
||||
},
|
||||
{
|
||||
absoluteFilePath: file5Path,
|
||||
uploadFilePath: `${artifactName}/folder1/folder2/folder3/file5.txt`
|
||||
}
|
||||
]
|
||||
|
||||
const expectedPartialSize = file1Size + file2Size + file3Size
|
||||
const uploadUrl = `${getRuntimeUrl()}_apis/resources/Containers/13`
|
||||
const uploadHttpClient = new UploadHttpClient()
|
||||
const uploadResult = await uploadHttpClient.uploadArtifactToFileContainer(
|
||||
uploadUrl,
|
||||
uploadSpecification,
|
||||
{continueOnError: false}
|
||||
)
|
||||
expect(uploadResult.failedItems.length).toEqual(2)
|
||||
expect(uploadResult.uploadSize).toEqual(expectedPartialSize)
|
||||
})
|
||||
|
||||
it('Upload Artifact - Failed upload with no options', async () => {
|
||||
const artifactName = 'partial-artifact'
|
||||
const uploadSpecification: UploadSpecification[] = [
|
||||
{
|
||||
absoluteFilePath: file1Path,
|
||||
uploadFilePath: `${artifactName}/file1.txt`
|
||||
},
|
||||
{
|
||||
absoluteFilePath: file2Path,
|
||||
uploadFilePath: `${artifactName}/file2.txt`
|
||||
},
|
||||
{
|
||||
absoluteFilePath: file3Path,
|
||||
uploadFilePath: `${artifactName}/folder1/file3.txt`
|
||||
},
|
||||
{
|
||||
absoluteFilePath: file4Path,
|
||||
uploadFilePath: `this-file-upload-will-fail`
|
||||
},
|
||||
{
|
||||
absoluteFilePath: file5Path,
|
||||
uploadFilePath: `${artifactName}/folder1/folder2/folder3/file5.txt`
|
||||
}
|
||||
]
|
||||
|
||||
const expectedPartialSize = file1Size + file2Size + file3Size + file5Size
|
||||
const uploadUrl = `${getRuntimeUrl()}_apis/resources/Containers/13`
|
||||
const uploadHttpClient = new UploadHttpClient()
|
||||
const uploadResult = await uploadHttpClient.uploadArtifactToFileContainer(
|
||||
uploadUrl,
|
||||
uploadSpecification
|
||||
)
|
||||
expect(uploadResult.failedItems.length).toEqual(1)
|
||||
expect(uploadResult.uploadSize).toEqual(expectedPartialSize)
|
||||
})
|
||||
|
||||
it('Upload Artifact - Failed upload with empty options', async () => {
|
||||
const artifactName = 'partial-artifact'
|
||||
const uploadSpecification: UploadSpecification[] = [
|
||||
{
|
||||
absoluteFilePath: file1Path,
|
||||
uploadFilePath: `${artifactName}/file1.txt`
|
||||
},
|
||||
{
|
||||
absoluteFilePath: file2Path,
|
||||
uploadFilePath: `${artifactName}/file2.txt`
|
||||
},
|
||||
{
|
||||
absoluteFilePath: file3Path,
|
||||
uploadFilePath: `${artifactName}/folder1/file3.txt`
|
||||
},
|
||||
{
|
||||
absoluteFilePath: file4Path,
|
||||
uploadFilePath: `this-file-upload-will-fail`
|
||||
},
|
||||
{
|
||||
absoluteFilePath: file5Path,
|
||||
uploadFilePath: `${artifactName}/folder1/folder2/folder3/file5.txt`
|
||||
}
|
||||
]
|
||||
|
||||
const expectedPartialSize = file1Size + file2Size + file3Size + file5Size
|
||||
const uploadUrl = `${getRuntimeUrl()}_apis/resources/Containers/13`
|
||||
const uploadHttpClient = new UploadHttpClient()
|
||||
const uploadResult = await uploadHttpClient.uploadArtifactToFileContainer(
|
||||
uploadUrl,
|
||||
uploadSpecification,
|
||||
{}
|
||||
)
|
||||
expect(uploadResult.failedItems.length).toEqual(1)
|
||||
expect(uploadResult.uploadSize).toEqual(expectedPartialSize)
|
||||
})
|
||||
|
||||
/**
|
||||
* Artifact Association Tests
|
||||
*/
|
||||
it('Associate Artifact - Success', async () => {
|
||||
const uploadHttpClient = new UploadHttpClient()
|
||||
expect(async () => {
|
||||
uploadHttpClient.patchArtifactSize(130, 'my-artifact')
|
||||
}).not.toThrow()
|
||||
})
|
||||
|
||||
it('Associate Artifact - Not Found', async () => {
|
||||
const uploadHttpClient = new UploadHttpClient()
|
||||
expect(
|
||||
uploadHttpClient.patchArtifactSize(100, 'non-existent-artifact')
|
||||
).rejects.toThrow(
|
||||
'An Artifact with the name non-existent-artifact was not found'
|
||||
)
|
||||
})
|
||||
|
||||
it('Associate Artifact - Error', async () => {
|
||||
const uploadHttpClient = new UploadHttpClient()
|
||||
expect(
|
||||
uploadHttpClient.patchArtifactSize(-2, 'my-artifact')
|
||||
).rejects.toThrow('Unable to finish uploading artifact my-artifact')
|
||||
})
|
||||
|
||||
/**
|
||||
* Helpers used to setup mocking for the HttpClient
|
||||
*/
|
||||
async function emptyMockReadBody(): Promise<string> {
|
||||
return new Promise(resolve => {
|
||||
resolve()
|
||||
})
|
||||
}
|
||||
|
||||
function setupHttpClientMock(): void {
|
||||
/**
|
||||
* Mocks Post calls that are used during Artifact Creation tests
|
||||
*
|
||||
* Simulates success and non-success status codes depending on the artifact name along with an appropriate
|
||||
* payload that represents an expected response
|
||||
*/
|
||||
jest
|
||||
.spyOn(HttpClient.prototype, 'post')
|
||||
.mockImplementation(async (requestdata, data) => {
|
||||
// parse the input data and use the provided artifact name as part of the response
|
||||
const inputData = JSON.parse(data)
|
||||
const mockMessage = new http.IncomingMessage(new net.Socket())
|
||||
let mockReadBody = emptyMockReadBody
|
||||
|
||||
if (inputData.Name === 'invalid-artifact-name') {
|
||||
mockMessage.statusCode = 400
|
||||
} else if (inputData.Name === 'storage-quota-hit') {
|
||||
mockMessage.statusCode = 403
|
||||
} else {
|
||||
mockMessage.statusCode = 201
|
||||
const response: ArtifactResponse = {
|
||||
containerId: '13',
|
||||
size: -1,
|
||||
signedContent: 'false',
|
||||
fileContainerResourceUrl: `${getRuntimeUrl()}_apis/resources/Containers/13`,
|
||||
type: 'actions_storage',
|
||||
name: inputData.Name,
|
||||
url: `${getRuntimeUrl()}_apis/pipelines/1/runs/1/artifacts?artifactName=${
|
||||
inputData.Name
|
||||
}`
|
||||
}
|
||||
const returnData: string = JSON.stringify(response, null, 2)
|
||||
mockReadBody = async function(): Promise<string> {
|
||||
return new Promise(resolve => {
|
||||
resolve(returnData)
|
||||
})
|
||||
}
|
||||
}
|
||||
return new Promise<HttpClientResponse>(resolve => {
|
||||
resolve({
|
||||
message: mockMessage,
|
||||
readBody: mockReadBody
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
/**
|
||||
* Mocks SendStream calls that are made during Artifact Upload tests
|
||||
*
|
||||
* A 500 response is used to simulate a failed upload stream. The uploadUrl can be set to
|
||||
* include 'fail' to specify that the upload should fail
|
||||
*/
|
||||
jest
|
||||
.spyOn(HttpClient.prototype, 'sendStream')
|
||||
.mockImplementation(async (verb, requestUrl) => {
|
||||
const mockMessage = new http.IncomingMessage(new net.Socket())
|
||||
mockMessage.statusCode = 200
|
||||
if (requestUrl.includes('fail')) {
|
||||
mockMessage.statusCode = 500
|
||||
}
|
||||
|
||||
return new Promise<HttpClientResponse>(resolve => {
|
||||
resolve({
|
||||
message: mockMessage,
|
||||
readBody: emptyMockReadBody
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
/**
|
||||
* Mocks Patch calls that are made during Artifact Association tests
|
||||
*
|
||||
* Simulates success and non-success status codes depending on the input size along with an appropriate
|
||||
* payload that represents an expected response
|
||||
*/
|
||||
jest
|
||||
.spyOn(HttpClient.prototype, 'patch')
|
||||
.mockImplementation(async (requestdata, data) => {
|
||||
const inputData = JSON.parse(data)
|
||||
const mockMessage = new http.IncomingMessage(new net.Socket())
|
||||
|
||||
// Get the name from the end of requestdata. Will be something like https://www.example.com/_apis/pipelines/workflows/15/artifacts?api-version=6.0-preview&artifactName=my-artifact
|
||||
const artifactName = requestdata.split('=')[2]
|
||||
let mockReadBody = emptyMockReadBody
|
||||
if (inputData.Size < 1) {
|
||||
mockMessage.statusCode = 400
|
||||
} else if (artifactName === 'non-existent-artifact') {
|
||||
mockMessage.statusCode = 404
|
||||
} else {
|
||||
mockMessage.statusCode = 200
|
||||
const response: PatchArtifactSizeSuccessResponse = {
|
||||
containerId: 13,
|
||||
size: inputData.Size,
|
||||
signedContent: 'false',
|
||||
type: 'actions_storage',
|
||||
name: artifactName,
|
||||
url: `${getRuntimeUrl()}_apis/pipelines/1/runs/1/artifacts?artifactName=${artifactName}`,
|
||||
uploadUrl: `${getRuntimeUrl()}_apis/resources/Containers/13`
|
||||
}
|
||||
const returnData: string = JSON.stringify(response, null, 2)
|
||||
mockReadBody = async function(): Promise<string> {
|
||||
return new Promise(resolve => {
|
||||
resolve(returnData)
|
||||
})
|
||||
}
|
||||
}
|
||||
return new Promise<HttpClientResponse>(resolve => {
|
||||
resolve({
|
||||
message: mockMessage,
|
||||
readBody: mockReadBody
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
})
|
|
@ -0,0 +1,267 @@
|
|||
import * as fs from 'fs'
|
||||
import * as io from '../../io/src/io'
|
||||
import * as path from 'path'
|
||||
import * as utils from '../src/internal/utils'
|
||||
import * as core from '@actions/core'
|
||||
import {HttpCodes} from '@actions/http-client'
|
||||
import {
|
||||
getRuntimeUrl,
|
||||
getWorkFlowRunId,
|
||||
getInitialRetryIntervalInMilliseconds,
|
||||
getRetryMultiplier
|
||||
} from '../src/internal/config-variables'
|
||||
|
||||
jest.mock('../src/internal/config-variables')
|
||||
|
||||
describe('Utils', () => {
|
||||
beforeAll(() => {
|
||||
// mock all output so that there is less noise when running tests
|
||||
jest.spyOn(console, 'log').mockImplementation(() => {})
|
||||
jest.spyOn(core, 'debug').mockImplementation(() => {})
|
||||
jest.spyOn(core, 'info').mockImplementation(() => {})
|
||||
jest.spyOn(core, 'warning').mockImplementation(() => {})
|
||||
})
|
||||
|
||||
it('Check exponential retry range', () => {
|
||||
// No retries should return the initial retry interval
|
||||
const retryWaitTime0 = utils.getExponentialRetryTimeInMilliseconds(0)
|
||||
expect(retryWaitTime0).toEqual(getInitialRetryIntervalInMilliseconds())
|
||||
|
||||
const testMinMaxRange = (retryCount: number): void => {
|
||||
const retryWaitTime = utils.getExponentialRetryTimeInMilliseconds(
|
||||
retryCount
|
||||
)
|
||||
const minRange =
|
||||
getInitialRetryIntervalInMilliseconds() *
|
||||
getRetryMultiplier() *
|
||||
retryCount
|
||||
const maxRange = minRange * getRetryMultiplier()
|
||||
|
||||
expect(retryWaitTime).toBeGreaterThanOrEqual(minRange)
|
||||
expect(retryWaitTime).toBeLessThan(maxRange)
|
||||
}
|
||||
|
||||
for (let i = 1; i < 10; i++) {
|
||||
testMinMaxRange(i)
|
||||
}
|
||||
})
|
||||
|
||||
it('Check Artifact Name for any invalid characters', () => {
|
||||
const invalidNames = [
|
||||
'my\\artifact',
|
||||
'my/artifact',
|
||||
'my"artifact',
|
||||
'my:artifact',
|
||||
'my<artifact',
|
||||
'my>artifact',
|
||||
'my|artifact',
|
||||
'my*artifact',
|
||||
'my?artifact',
|
||||
''
|
||||
]
|
||||
for (const invalidName of invalidNames) {
|
||||
expect(() => {
|
||||
utils.checkArtifactName(invalidName)
|
||||
}).toThrow()
|
||||
}
|
||||
|
||||
const validNames = [
|
||||
'my-normal-artifact',
|
||||
'myNormalArtifact',
|
||||
'm¥ñðrmålÄr†ï£å¢†'
|
||||
]
|
||||
for (const validName of validNames) {
|
||||
expect(() => {
|
||||
utils.checkArtifactName(validName)
|
||||
}).not.toThrow()
|
||||
}
|
||||
})
|
||||
|
||||
it('Check Artifact File Path for any invalid characters', () => {
|
||||
const invalidNames = [
|
||||
'some/invalid"artifact/path',
|
||||
'some/invalid:artifact/path',
|
||||
'some/invalid<artifact/path',
|
||||
'some/invalid>artifact/path',
|
||||
'some/invalid|artifact/path',
|
||||
'some/invalid*artifact/path',
|
||||
'some/invalid?artifact/path',
|
||||
''
|
||||
]
|
||||
for (const invalidName of invalidNames) {
|
||||
expect(() => {
|
||||
utils.checkArtifactFilePath(invalidName)
|
||||
}).toThrow()
|
||||
}
|
||||
|
||||
const validNames = [
|
||||
'my/perfectly-normal/artifact-path',
|
||||
'my/perfectly\\Normal/Artifact-path',
|
||||
'm¥/ñðrmål/Är†ï£å¢†'
|
||||
]
|
||||
for (const validName of validNames) {
|
||||
expect(() => {
|
||||
utils.checkArtifactFilePath(validName)
|
||||
}).not.toThrow()
|
||||
}
|
||||
})
|
||||
|
||||
it('Test constructing artifact URL', () => {
|
||||
const runtimeUrl = getRuntimeUrl()
|
||||
const runId = getWorkFlowRunId()
|
||||
const artifactUrl = utils.getArtifactUrl()
|
||||
expect(artifactUrl).toEqual(
|
||||
`${runtimeUrl}_apis/pipelines/workflows/${runId}/artifacts?api-version=${utils.getApiVersion()}`
|
||||
)
|
||||
})
|
||||
|
||||
it('Test constructing upload headers with all optional parameters', () => {
|
||||
const contentType = 'application/octet-stream'
|
||||
const size = 24
|
||||
const uncompressedLength = 100
|
||||
const range = 'bytes 0-199/200'
|
||||
const options = utils.getUploadRequestOptions(
|
||||
contentType,
|
||||
true,
|
||||
true,
|
||||
uncompressedLength,
|
||||
size,
|
||||
range
|
||||
)
|
||||
expect(Object.keys(options).length).toEqual(8)
|
||||
expect(options['Accept']).toEqual(
|
||||
`application/json;api-version=${utils.getApiVersion()}`
|
||||
)
|
||||
expect(options['Content-Type']).toEqual(contentType)
|
||||
expect(options['Connection']).toEqual('Keep-Alive')
|
||||
expect(options['Keep-Alive']).toEqual('10')
|
||||
expect(options['Content-Encoding']).toEqual('gzip')
|
||||
expect(options['x-tfs-filelength']).toEqual(uncompressedLength)
|
||||
expect(options['Content-Length']).toEqual(size)
|
||||
expect(options['Content-Range']).toEqual(range)
|
||||
})
|
||||
|
||||
it('Test constructing upload headers with only required parameter', () => {
|
||||
const options = utils.getUploadRequestOptions('application/octet-stream')
|
||||
expect(Object.keys(options).length).toEqual(2)
|
||||
expect(options['Accept']).toEqual(
|
||||
`application/json;api-version=${utils.getApiVersion()}`
|
||||
)
|
||||
expect(options['Content-Type']).toEqual('application/octet-stream')
|
||||
})
|
||||
|
||||
it('Test constructing download headers with all optional parameters', () => {
|
||||
const contentType = 'application/json'
|
||||
const options = utils.getDownloadRequestOptions(contentType, true, true)
|
||||
expect(Object.keys(options).length).toEqual(5)
|
||||
expect(options['Content-Type']).toEqual(contentType)
|
||||
expect(options['Connection']).toEqual('Keep-Alive')
|
||||
expect(options['Keep-Alive']).toEqual('10')
|
||||
expect(options['Accept-Encoding']).toEqual('gzip')
|
||||
expect(options['Accept']).toEqual(
|
||||
`application/octet-stream;api-version=${utils.getApiVersion()}`
|
||||
)
|
||||
})
|
||||
|
||||
it('Test constructing download headers with only required parameter', () => {
|
||||
const options = utils.getDownloadRequestOptions('application/octet-stream')
|
||||
expect(Object.keys(options).length).toEqual(2)
|
||||
expect(options['Content-Type']).toEqual('application/octet-stream')
|
||||
// check for default accept type
|
||||
expect(options['Accept']).toEqual(
|
||||
`application/json;api-version=${utils.getApiVersion()}`
|
||||
)
|
||||
})
|
||||
|
||||
it('Test Success Status Code', () => {
|
||||
expect(utils.isSuccessStatusCode(HttpCodes.OK)).toEqual(true)
|
||||
expect(utils.isSuccessStatusCode(201)).toEqual(true)
|
||||
expect(utils.isSuccessStatusCode(299)).toEqual(true)
|
||||
expect(utils.isSuccessStatusCode(HttpCodes.NotFound)).toEqual(false)
|
||||
expect(utils.isSuccessStatusCode(HttpCodes.BadGateway)).toEqual(false)
|
||||
expect(utils.isSuccessStatusCode(HttpCodes.Forbidden)).toEqual(false)
|
||||
})
|
||||
|
||||
it('Test Retry Status Code', () => {
|
||||
expect(utils.isRetryableStatusCode(HttpCodes.BadGateway)).toEqual(true)
|
||||
expect(utils.isRetryableStatusCode(HttpCodes.ServiceUnavailable)).toEqual(
|
||||
true
|
||||
)
|
||||
expect(utils.isRetryableStatusCode(HttpCodes.GatewayTimeout)).toEqual(true)
|
||||
expect(utils.isRetryableStatusCode(HttpCodes.TooManyRequests)).toEqual(true)
|
||||
expect(utils.isRetryableStatusCode(HttpCodes.OK)).toEqual(false)
|
||||
expect(utils.isRetryableStatusCode(HttpCodes.NotFound)).toEqual(false)
|
||||
expect(utils.isRetryableStatusCode(HttpCodes.Forbidden)).toEqual(false)
|
||||
})
|
||||
|
||||
it('Test Throttled Status Code', () => {
|
||||
expect(utils.isThrottledStatusCode(HttpCodes.TooManyRequests)).toEqual(true)
|
||||
expect(utils.isThrottledStatusCode(HttpCodes.InternalServerError)).toEqual(
|
||||
false
|
||||
)
|
||||
expect(utils.isThrottledStatusCode(HttpCodes.BadGateway)).toEqual(false)
|
||||
expect(utils.isThrottledStatusCode(HttpCodes.ServiceUnavailable)).toEqual(
|
||||
false
|
||||
)
|
||||
})
|
||||
|
||||
it('Test Forbidden Status Code', () => {
|
||||
expect(utils.isForbiddenStatusCode(HttpCodes.Forbidden)).toEqual(true)
|
||||
expect(utils.isForbiddenStatusCode(HttpCodes.InternalServerError)).toEqual(
|
||||
false
|
||||
)
|
||||
expect(utils.isForbiddenStatusCode(HttpCodes.TooManyRequests)).toEqual(
|
||||
false
|
||||
)
|
||||
expect(utils.isForbiddenStatusCode(HttpCodes.OK)).toEqual(false)
|
||||
})
|
||||
|
||||
it('Test Creating Artifact Directories', async () => {
|
||||
const root = path.join(__dirname, '_temp', 'artifact-download')
|
||||
// remove directory before starting
|
||||
await io.rmRF(root)
|
||||
|
||||
const directory1 = path.join(root, 'folder2', 'folder3')
|
||||
const directory2 = path.join(directory1, 'folder1')
|
||||
|
||||
// Initially should not exist
|
||||
await expect(fs.promises.access(directory1)).rejects.not.toBeUndefined()
|
||||
await expect(fs.promises.access(directory2)).rejects.not.toBeUndefined()
|
||||
const directoryStructure = [directory1, directory2]
|
||||
await utils.createDirectoriesForArtifact(directoryStructure)
|
||||
// directories should now be created
|
||||
await expect(fs.promises.access(directory1)).resolves.toEqual(undefined)
|
||||
await expect(fs.promises.access(directory2)).resolves.toEqual(undefined)
|
||||
})
|
||||
|
||||
it('Test Creating Empty Files', async () => {
|
||||
const root = path.join(__dirname, '_temp', 'empty-files')
|
||||
await io.rmRF(root)
|
||||
|
||||
const emptyFile1 = path.join(root, 'emptyFile1')
|
||||
const directoryToCreate = path.join(root, 'folder1')
|
||||
const emptyFile2 = path.join(directoryToCreate, 'emptyFile2')
|
||||
|
||||
// empty files should only be created after the directory structure is fully setup
|
||||
// ensure they are first created by using the createDirectoriesForArtifact method
|
||||
const directoryStructure = [root, directoryToCreate]
|
||||
await utils.createDirectoriesForArtifact(directoryStructure)
|
||||
await expect(fs.promises.access(root)).resolves.toEqual(undefined)
|
||||
await expect(fs.promises.access(directoryToCreate)).resolves.toEqual(
|
||||
undefined
|
||||
)
|
||||
|
||||
await expect(fs.promises.access(emptyFile1)).rejects.not.toBeUndefined()
|
||||
await expect(fs.promises.access(emptyFile2)).rejects.not.toBeUndefined()
|
||||
|
||||
const emptyFilesToCreate = [emptyFile1, emptyFile2]
|
||||
await utils.createEmptyFilesForArtifact(emptyFilesToCreate)
|
||||
|
||||
await expect(fs.promises.access(emptyFile1)).resolves.toEqual(undefined)
|
||||
const size1 = (await fs.promises.stat(emptyFile1)).size
|
||||
expect(size1).toEqual(0)
|
||||
await expect(fs.promises.access(emptyFile2)).resolves.toEqual(undefined)
|
||||
const size2 = (await fs.promises.stat(emptyFile2)).size
|
||||
expect(size2).toEqual(0)
|
||||
})
|
||||
})
|
|
@ -0,0 +1,53 @@
|
|||
# Additional Information
|
||||
|
||||
Extra information
|
||||
- [Non-Supported Characters](#Non-Supported-Characters)
|
||||
- [Permission loss](#Permission-Loss)
|
||||
- [Considerations](#Considerations)
|
||||
- [Compression](#Is-my-artifact-compressed)
|
||||
|
||||
## Non-Supported Characters
|
||||
|
||||
When uploading an artifact, the inputted `name` parameter along with the files specified in `files` cannot contain any of the following characters. They will be rejected by the server if attempted to be sent over and the upload will fail. These characters are not allowed due to limitations and restrictions with certain file systems such as NTFS. To maintain platform-agnostic behavior, all characters that are not supported by an individual filesystem/platform will not be supported on all filesystems/platforms.
|
||||
|
||||
- "
|
||||
- :
|
||||
- <
|
||||
- \>
|
||||
- |
|
||||
- \*
|
||||
- ?
|
||||
|
||||
In addition to the aforementioned characters, the inputted `name` also cannot include the following
|
||||
- \
|
||||
- /
|
||||
|
||||
|
||||
## Permission Loss
|
||||
|
||||
File permissions are not maintained between uploaded and downloaded artifacts. If file permissions are something that need to be maintained (such as an executable), consider archiving all of the files using something like `tar` and then uploading the single archive. After downloading the artifact, you can `un-tar` the individual file and permissions will be preserved.
|
||||
|
||||
```js
|
||||
const artifact = require('@actions/artifact');
|
||||
const artifactClient = artifact.create()
|
||||
const artifactName = 'my-artifact';
|
||||
const files = [
|
||||
'/home/user/files/plz-upload/my-archive.tgz',
|
||||
]
|
||||
const rootDirectory = '/home/user/files/plz-upload'
|
||||
const uploadResult = await artifactClient.uploadArtifact(artifactName, files, rootDirectory)
|
||||
```
|
||||
|
||||
## Considerations
|
||||
|
||||
During upload, each file is uploaded concurrently in 4MB chunks using a separate HTTPS connection per file. Chunked uploads are used so that in the event of a failure (which is entirely possible because the internet is not perfect), the upload can be retried. If there is an error, a retry will be attempted after a certain period of time.
|
||||
|
||||
Uploading will be generally be faster if there are fewer files that are larger in size vs if there are lots of smaller files. Depending on the types and quantities of files being uploaded, it might be beneficial to separately compress and archive everything into a single archive (using something like `tar` or `zip`) before starting and artifact upload to speed things up.
|
||||
|
||||
## Is my artifact compressed?
|
||||
|
||||
GZip is used internally to compress individual files before starting an upload. Compression helps reduce the total amount of data that must be uploaded and stored while helping to speed up uploads (this performance benefit is significant especially on self hosted runners). If GZip does not reduce the size of the file that is being uploaded, the original file is uploaded as-is.
|
||||
|
||||
Compression using GZip also helps speed up artifact download as part of a workflow. Header information is used to determine if an individual file was uploaded using GZip and if necessary, decompression is used.
|
||||
|
||||
When downloading an artifact from the GitHub UI (this differs from downloading an artifact during a workflow), a single Zip file is dynamically created that contains all of the files uploaded as part of an artifact. Any files that were uploaded using GZip will be decompressed on the server before being added to the Zip file with the remaining files.
|
|
@ -0,0 +1,53 @@
|
|||
# Implementation Details
|
||||
|
||||
Warning: Implementation details may change at any time without notice. This is meant to serve as a reference to help users understand the package.
|
||||
|
||||
## Upload/Compression flow
|
||||
|
||||

|
||||
|
||||
## Retry Logic when downloading an individual file
|
||||
|
||||

|
||||
|
||||
## Proxy support
|
||||
|
||||
This package uses the `@actions/http-client` NPM package internally which supports proxied requests out of the box.
|
||||
|
||||
## HttpManager
|
||||
|
||||
### `keep-alive` header
|
||||
|
||||
When an HTTP call is made to upload or download an individual file, the server will close the HTTP connection after the upload/download is complete and respond with a header indicating `Connection: close`.
|
||||
|
||||
[HTTP closed connection header information](https://tools.ietf.org/html/rfc2616#section-14.10)
|
||||
|
||||
TCP connections are sometimes not immediately closed by the node client (Windows might hold on to the port for an extra period of time before actually releasing it for example) and a large amount of closed connections can cause port exhaustion before ports get released and are available again.
|
||||
|
||||
VMs hosted by GitHub Actions have 1024 available ports so uploading 1000+ files very quickly can cause port exhaustion if connections get closed immediately. This can start to cause strange undefined behavior and timeouts.
|
||||
|
||||
In order for connections to not close immediately, the `keep-alive` header is used to indicate to the server that the connection should stay open. If a `keep-alive` header is used, the connection needs to be disposed of by calling `dispose()` in the `HttpClient`.
|
||||
|
||||
[`keep-alive` header information](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Keep-Alive)
|
||||
[@actions/http-client client disposal](https://github.com/actions/http-client/blob/04e5ad73cd3fd1f5610a32116b0759eddf6570d2/index.ts#L292)
|
||||
|
||||
|
||||
### Multiple HTTP clients
|
||||
|
||||
During an artifact upload or download, files are concurrently uploaded or downloaded using `async/await`. When an error or retry is encountered, the `HttpClient` that made a call is disposed of and a new one is created. If a single `HttpClient` was used for all HTTP calls and it had to be disposed, it could inadvertently effect any other calls that could be concurrently happening.
|
||||
|
||||
Any other concurrent uploads or downloads should be left untouched. Because of this, each concurrent upload or download gets its own `HttpClient`. The `http-manager` is used to manage all available clients and each concurrent upload or download maintains a `httpClientIndex` that keep track of which client should be used (and potentially disposed and recycled if necessary)
|
||||
|
||||
### Potential resource leaks
|
||||
|
||||
When an HTTP response is received, it consists of two parts
|
||||
- `message`
|
||||
- `body`
|
||||
|
||||
The `message` contains information such as the response code and header information and it is available immediately. The body however is not available immediately and it can be read by calling `await response.readBody()`.
|
||||
|
||||
TCP connections consist of an input and output buffer to manage what is sent and received across a connection. If the body is not read (even if its contents are not needed) the buffers can stay in use even after `dispose()` gets called on the `HttpClient`. The buffers get released automatically after a certain period of time, but in order for them to be explicitly cleared, `readBody()` is always called.
|
||||
|
||||
### Non Concurrent calls
|
||||
|
||||
Both `upload-http-client` and `download-http-client` do not instantiate or create any HTTP clients (the `HttpManager` has that responsibility). If an HTTP call has to be made that does not require the `keep-alive` header (such as when calling `listArtifacts` or `patchArtifactSize`), the first `HttpClient` in the `HttpManager` is used. The number of available clients is equal to the upload or download concurrency and there will always be at least one available.
|
|
@ -0,0 +1,138 @@
|
|||
{
|
||||
"name": "@actions/artifact",
|
||||
"version": "0.3.1",
|
||||
"lockfileVersion": 1,
|
||||
"requires": true,
|
||||
"dependencies": {
|
||||
"@actions/core": {
|
||||
"version": "1.2.3",
|
||||
"resolved": "https://registry.npmjs.org/@actions/core/-/core-1.2.3.tgz",
|
||||
"integrity": "sha512-Wp4xnyokakM45Uuj4WLUxdsa8fJjKVl1fDTsPbTEcTcuu0Nb26IPQbOtjmnfaCPGcaoPOOqId8H9NapZ8gii4w=="
|
||||
},
|
||||
"@actions/http-client": {
|
||||
"version": "1.0.8",
|
||||
"resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-1.0.8.tgz",
|
||||
"integrity": "sha512-G4JjJ6f9Hb3Zvejj+ewLLKLf99ZC+9v+yCxoYf9vSyH+WkzPLB2LuUtRMGNkooMqdugGBFStIKXOuvH1W+EctA==",
|
||||
"requires": {
|
||||
"tunnel": "0.0.6"
|
||||
}
|
||||
},
|
||||
"@types/tmp": {
|
||||
"version": "0.1.0",
|
||||
"resolved": "https://registry.npmjs.org/@types/tmp/-/tmp-0.1.0.tgz",
|
||||
"integrity": "sha512-6IwZ9HzWbCq6XoQWhxLpDjuADodH/MKXRUIDFudvgjcVdjFknvmR+DNsoUeer4XPrEnrZs04Jj+kfV9pFsrhmA=="
|
||||
},
|
||||
"balanced-match": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz",
|
||||
"integrity": "sha1-ibTRmasr7kneFk6gK4nORi1xt2c="
|
||||
},
|
||||
"brace-expansion": {
|
||||
"version": "1.1.11",
|
||||
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz",
|
||||
"integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==",
|
||||
"requires": {
|
||||
"balanced-match": "^1.0.0",
|
||||
"concat-map": "0.0.1"
|
||||
}
|
||||
},
|
||||
"concat-map": {
|
||||
"version": "0.0.1",
|
||||
"resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz",
|
||||
"integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s="
|
||||
},
|
||||
"fs.realpath": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz",
|
||||
"integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8="
|
||||
},
|
||||
"glob": {
|
||||
"version": "7.1.6",
|
||||
"resolved": "https://registry.npmjs.org/glob/-/glob-7.1.6.tgz",
|
||||
"integrity": "sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA==",
|
||||
"requires": {
|
||||
"fs.realpath": "^1.0.0",
|
||||
"inflight": "^1.0.4",
|
||||
"inherits": "2",
|
||||
"minimatch": "^3.0.4",
|
||||
"once": "^1.3.0",
|
||||
"path-is-absolute": "^1.0.0"
|
||||
}
|
||||
},
|
||||
"inflight": {
|
||||
"version": "1.0.6",
|
||||
"resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz",
|
||||
"integrity": "sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=",
|
||||
"requires": {
|
||||
"once": "^1.3.0",
|
||||
"wrappy": "1"
|
||||
}
|
||||
},
|
||||
"inherits": {
|
||||
"version": "2.0.4",
|
||||
"resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz",
|
||||
"integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ=="
|
||||
},
|
||||
"minimatch": {
|
||||
"version": "3.0.4",
|
||||
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz",
|
||||
"integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==",
|
||||
"requires": {
|
||||
"brace-expansion": "^1.1.7"
|
||||
}
|
||||
},
|
||||
"once": {
|
||||
"version": "1.4.0",
|
||||
"resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz",
|
||||
"integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=",
|
||||
"requires": {
|
||||
"wrappy": "1"
|
||||
}
|
||||
},
|
||||
"path-is-absolute": {
|
||||
"version": "1.0.1",
|
||||
"resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz",
|
||||
"integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18="
|
||||
},
|
||||
"rimraf": {
|
||||
"version": "2.7.1",
|
||||
"resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.7.1.tgz",
|
||||
"integrity": "sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w==",
|
||||
"requires": {
|
||||
"glob": "^7.1.3"
|
||||
}
|
||||
},
|
||||
"tmp": {
|
||||
"version": "0.1.0",
|
||||
"resolved": "https://registry.npmjs.org/tmp/-/tmp-0.1.0.tgz",
|
||||
"integrity": "sha512-J7Z2K08jbGcdA1kkQpJSqLF6T0tdQqpR2pnSUXsIchbPdTI9v3e85cLW0d6WDhwuAleOV71j2xWs8qMPfK7nKw==",
|
||||
"requires": {
|
||||
"rimraf": "^2.6.3"
|
||||
}
|
||||
},
|
||||
"tmp-promise": {
|
||||
"version": "2.0.2",
|
||||
"resolved": "https://registry.npmjs.org/tmp-promise/-/tmp-promise-2.0.2.tgz",
|
||||
"integrity": "sha512-zl71nFWjPKW2KXs+73gEk8RmqvtAeXPxhWDkTUoa3MSMkjq3I+9OeknjF178MQoMYsdqL730hfzvNfEkePxq9Q==",
|
||||
"requires": {
|
||||
"tmp": "0.1.0"
|
||||
}
|
||||
},
|
||||
"tunnel": {
|
||||
"version": "0.0.6",
|
||||
"resolved": "https://registry.npmjs.org/tunnel/-/tunnel-0.0.6.tgz",
|
||||
"integrity": "sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg=="
|
||||
},
|
||||
"typescript": {
|
||||
"version": "3.8.3",
|
||||
"resolved": "https://registry.npmjs.org/typescript/-/typescript-3.8.3.tgz",
|
||||
"integrity": "sha512-MYlEfn5VrLNsgudQTVJeNaQFUAI7DkhnOjdpAp4T+ku1TfQClewlbSuTVHiA+8skNBgaf02TL/kLOvig4y3G8w==",
|
||||
"dev": true
|
||||
},
|
||||
"wrappy": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz",
|
||||
"integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8="
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,48 @@
|
|||
{
|
||||
"name": "@actions/artifact",
|
||||
"version": "0.3.1",
|
||||
"preview": true,
|
||||
"description": "Actions artifact lib",
|
||||
"keywords": [
|
||||
"github",
|
||||
"actions",
|
||||
"artifact"
|
||||
],
|
||||
"homepage": "https://github.com/actions/toolkit/tree/master/packages/artifact",
|
||||
"license": "MIT",
|
||||
"main": "lib/artifact-client.js",
|
||||
"types": "lib/artifact-client.d.ts",
|
||||
"directories": {
|
||||
"lib": "lib",
|
||||
"test": "__tests__"
|
||||
},
|
||||
"files": [
|
||||
"lib"
|
||||
],
|
||||
"publishConfig": {
|
||||
"access": "public"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/actions/toolkit.git",
|
||||
"directory": "packages/artifact"
|
||||
},
|
||||
"scripts": {
|
||||
"audit-moderate": "npm install && npm audit --audit-level=moderate",
|
||||
"test": "echo \"Error: run tests from root\" && exit 1",
|
||||
"tsc": "tsc"
|
||||
},
|
||||
"bugs": {
|
||||
"url": "https://github.com/actions/toolkit/issues"
|
||||
},
|
||||
"dependencies": {
|
||||
"@actions/core": "^1.2.1",
|
||||
"@actions/http-client": "^1.0.7",
|
||||
"@types/tmp": "^0.1.0",
|
||||
"tmp": "^0.1.0",
|
||||
"tmp-promise": "^2.0.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"typescript": "^3.8.3"
|
||||
}
|
||||
}
|
|
@ -0,0 +1,20 @@
|
|||
import {UploadOptions} from './internal/upload-options'
|
||||
import {UploadResponse} from './internal/upload-response'
|
||||
import {DownloadOptions} from './internal/download-options'
|
||||
import {DownloadResponse} from './internal/download-response'
|
||||
import {ArtifactClient, DefaultArtifactClient} from './internal/artifact-client'
|
||||
|
||||
export {
|
||||
ArtifactClient,
|
||||
UploadResponse,
|
||||
UploadOptions,
|
||||
DownloadResponse,
|
||||
DownloadOptions
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructs an ArtifactClient
|
||||
*/
|
||||
export function create(): ArtifactClient {
|
||||
return DefaultArtifactClient.create()
|
||||
}
|
|
@ -0,0 +1,47 @@
|
|||
/**
|
||||
* Mocks default limits for easier testing
|
||||
*/
|
||||
export function getUploadFileConcurrency(): number {
|
||||
return 1
|
||||
}
|
||||
|
||||
export function getUploadChunkConcurrency(): number {
|
||||
return 1
|
||||
}
|
||||
|
||||
export function getUploadChunkSize(): number {
|
||||
return 4 * 1024 * 1024 // 4 MB Chunks
|
||||
}
|
||||
|
||||
export function getRetryLimit(): number {
|
||||
return 2
|
||||
}
|
||||
|
||||
export function getRetryMultiplier(): number {
|
||||
return 1.5
|
||||
}
|
||||
|
||||
export function getInitialRetryIntervalInMilliseconds(): number {
|
||||
return 10
|
||||
}
|
||||
|
||||
export function getDownloadFileConcurrency(): number {
|
||||
return 1
|
||||
}
|
||||
|
||||
/**
|
||||
* Mocks the 'ACTIONS_RUNTIME_TOKEN', 'ACTIONS_RUNTIME_URL' and 'GITHUB_RUN_ID' env variables
|
||||
* that are only available from a node context on the runner. This allows for tests to run
|
||||
* locally without the env variables actually being set
|
||||
*/
|
||||
export function getRuntimeToken(): string {
|
||||
return 'totally-valid-token'
|
||||
}
|
||||
|
||||
export function getRuntimeUrl(): string {
|
||||
return 'https://www.example.com/'
|
||||
}
|
||||
|
||||
export function getWorkFlowRunId(): string {
|
||||
return '15'
|
||||
}
|
|
@ -0,0 +1,253 @@
|
|||
import * as core from '@actions/core'
|
||||
import {
|
||||
UploadSpecification,
|
||||
getUploadSpecification
|
||||
} from './upload-specification'
|
||||
import {UploadHttpClient} from './upload-http-client'
|
||||
import {UploadResponse} from './upload-response'
|
||||
import {UploadOptions} from './upload-options'
|
||||
import {DownloadOptions} from './download-options'
|
||||
import {DownloadResponse} from './download-response'
|
||||
import {
|
||||
checkArtifactName,
|
||||
createDirectoriesForArtifact,
|
||||
createEmptyFilesForArtifact
|
||||
} from './utils'
|
||||
import {DownloadHttpClient} from './download-http-client'
|
||||
import {getDownloadSpecification} from './download-specification'
|
||||
import {getWorkSpaceDirectory} from './config-variables'
|
||||
import {normalize, resolve} from 'path'
|
||||
|
||||
export interface ArtifactClient {
|
||||
/**
|
||||
* Uploads an artifact
|
||||
*
|
||||
* @param name the name of the artifact, required
|
||||
* @param files a list of absolute or relative paths that denote what files should be uploaded
|
||||
* @param rootDirectory an absolute or relative file path that denotes the root parent directory of the files being uploaded
|
||||
* @param options extra options for customizing the upload behavior
|
||||
* @returns single UploadInfo object
|
||||
*/
|
||||
uploadArtifact(
|
||||
name: string,
|
||||
files: string[],
|
||||
rootDirectory: string,
|
||||
options?: UploadOptions
|
||||
): Promise<UploadResponse>
|
||||
|
||||
/**
|
||||
* Downloads a single artifact associated with a run
|
||||
*
|
||||
* @param name the name of the artifact being downloaded
|
||||
* @param path optional path that denotes where the artifact will be downloaded to
|
||||
* @param options extra options that allow for the customization of the download behavior
|
||||
*/
|
||||
downloadArtifact(
|
||||
name: string,
|
||||
path?: string,
|
||||
options?: DownloadOptions
|
||||
): Promise<DownloadResponse>
|
||||
|
||||
/**
|
||||
* Downloads all artifacts associated with a run. Because there are multiple artifacts being downloaded, a folder will be created for each one in the specified or default directory
|
||||
* @param path optional path that denotes where the artifacts will be downloaded to
|
||||
*/
|
||||
downloadAllArtifacts(path?: string): Promise<DownloadResponse[]>
|
||||
}
|
||||
|
||||
export class DefaultArtifactClient implements ArtifactClient {
|
||||
/**
|
||||
* Constructs a DefaultArtifactClient
|
||||
*/
|
||||
static create(): DefaultArtifactClient {
|
||||
return new DefaultArtifactClient()
|
||||
}
|
||||
|
||||
/**
|
||||
* Uploads an artifact
|
||||
*/
|
||||
async uploadArtifact(
|
||||
name: string,
|
||||
files: string[],
|
||||
rootDirectory: string,
|
||||
options?: UploadOptions | undefined
|
||||
): Promise<UploadResponse> {
|
||||
checkArtifactName(name)
|
||||
|
||||
// Get specification for the files being uploaded
|
||||
const uploadSpecification: UploadSpecification[] = getUploadSpecification(
|
||||
name,
|
||||
rootDirectory,
|
||||
files
|
||||
)
|
||||
const uploadResponse: UploadResponse = {
|
||||
artifactName: name,
|
||||
artifactItems: [],
|
||||
size: 0,
|
||||
failedItems: []
|
||||
}
|
||||
|
||||
const uploadHttpClient = new UploadHttpClient()
|
||||
|
||||
if (uploadSpecification.length === 0) {
|
||||
core.warning(`No files found that can be uploaded`)
|
||||
} else {
|
||||
// Create an entry for the artifact in the file container
|
||||
const response = await uploadHttpClient.createArtifactInFileContainer(
|
||||
name
|
||||
)
|
||||
if (!response.fileContainerResourceUrl) {
|
||||
core.debug(response.toString())
|
||||
throw new Error(
|
||||
'No URL provided by the Artifact Service to upload an artifact to'
|
||||
)
|
||||
}
|
||||
core.debug(`Upload Resource URL: ${response.fileContainerResourceUrl}`)
|
||||
|
||||
// Upload each of the files that were found concurrently
|
||||
const uploadResult = await uploadHttpClient.uploadArtifactToFileContainer(
|
||||
response.fileContainerResourceUrl,
|
||||
uploadSpecification,
|
||||
options
|
||||
)
|
||||
|
||||
// Update the size of the artifact to indicate we are done uploading
|
||||
// The uncompressed size is used for display when downloading a zip of the artifact from the UI
|
||||
await uploadHttpClient.patchArtifactSize(uploadResult.totalSize, name)
|
||||
|
||||
core.info(
|
||||
`Finished uploading artifact ${name}. Reported size is ${uploadResult.uploadSize} bytes. There were ${uploadResult.failedItems.length} items that failed to upload`
|
||||
)
|
||||
|
||||
uploadResponse.artifactItems = uploadSpecification.map(
|
||||
item => item.absoluteFilePath
|
||||
)
|
||||
uploadResponse.size = uploadResult.uploadSize
|
||||
uploadResponse.failedItems = uploadResult.failedItems
|
||||
}
|
||||
return uploadResponse
|
||||
}
|
||||
|
||||
async downloadArtifact(
|
||||
name: string,
|
||||
path?: string | undefined,
|
||||
options?: DownloadOptions | undefined
|
||||
): Promise<DownloadResponse> {
|
||||
const downloadHttpClient = new DownloadHttpClient()
|
||||
|
||||
const artifacts = await downloadHttpClient.listArtifacts()
|
||||
if (artifacts.count === 0) {
|
||||
throw new Error(
|
||||
`Unable to find any artifacts for the associated workflow`
|
||||
)
|
||||
}
|
||||
|
||||
const artifactToDownload = artifacts.value.find(artifact => {
|
||||
return artifact.name === name
|
||||
})
|
||||
if (!artifactToDownload) {
|
||||
throw new Error(`Unable to find an artifact with the name: ${name}`)
|
||||
}
|
||||
|
||||
const items = await downloadHttpClient.getContainerItems(
|
||||
artifactToDownload.name,
|
||||
artifactToDownload.fileContainerResourceUrl
|
||||
)
|
||||
|
||||
if (!path) {
|
||||
path = getWorkSpaceDirectory()
|
||||
}
|
||||
path = normalize(path)
|
||||
path = resolve(path)
|
||||
|
||||
// During upload, empty directories are rejected by the remote server so there should be no artifacts that consist of only empty directories
|
||||
const downloadSpecification = getDownloadSpecification(
|
||||
name,
|
||||
items.value,
|
||||
path,
|
||||
options?.createArtifactFolder || false
|
||||
)
|
||||
|
||||
if (downloadSpecification.filesToDownload.length === 0) {
|
||||
core.info(
|
||||
`No downloadable files were found for the artifact: ${artifactToDownload.name}`
|
||||
)
|
||||
} else {
|
||||
// Create all necessary directories recursively before starting any download
|
||||
await createDirectoriesForArtifact(
|
||||
downloadSpecification.directoryStructure
|
||||
)
|
||||
core.info('Directory structure has been setup for the artifact')
|
||||
await createEmptyFilesForArtifact(
|
||||
downloadSpecification.emptyFilesToCreate
|
||||
)
|
||||
await downloadHttpClient.downloadSingleArtifact(
|
||||
downloadSpecification.filesToDownload
|
||||
)
|
||||
}
|
||||
|
||||
return {
|
||||
artifactName: name,
|
||||
downloadPath: downloadSpecification.rootDownloadLocation
|
||||
}
|
||||
}
|
||||
|
||||
async downloadAllArtifacts(
|
||||
path?: string | undefined
|
||||
): Promise<DownloadResponse[]> {
|
||||
const downloadHttpClient = new DownloadHttpClient()
|
||||
|
||||
const response: DownloadResponse[] = []
|
||||
const artifacts = await downloadHttpClient.listArtifacts()
|
||||
if (artifacts.count === 0) {
|
||||
core.info('Unable to find any artifacts for the associated workflow')
|
||||
return response
|
||||
}
|
||||
|
||||
if (!path) {
|
||||
path = getWorkSpaceDirectory()
|
||||
}
|
||||
path = normalize(path)
|
||||
path = resolve(path)
|
||||
|
||||
let downloadedArtifacts = 0
|
||||
while (downloadedArtifacts < artifacts.count) {
|
||||
const currentArtifactToDownload = artifacts.value[downloadedArtifacts]
|
||||
downloadedArtifacts += 1
|
||||
|
||||
// Get container entries for the specific artifact
|
||||
const items = await downloadHttpClient.getContainerItems(
|
||||
currentArtifactToDownload.name,
|
||||
currentArtifactToDownload.fileContainerResourceUrl
|
||||
)
|
||||
|
||||
const downloadSpecification = getDownloadSpecification(
|
||||
currentArtifactToDownload.name,
|
||||
items.value,
|
||||
path,
|
||||
true
|
||||
)
|
||||
if (downloadSpecification.filesToDownload.length === 0) {
|
||||
core.info(
|
||||
`No downloadable files were found for any artifact ${currentArtifactToDownload.name}`
|
||||
)
|
||||
} else {
|
||||
await createDirectoriesForArtifact(
|
||||
downloadSpecification.directoryStructure
|
||||
)
|
||||
await createEmptyFilesForArtifact(
|
||||
downloadSpecification.emptyFilesToCreate
|
||||
)
|
||||
await downloadHttpClient.downloadSingleArtifact(
|
||||
downloadSpecification.filesToDownload
|
||||
)
|
||||
}
|
||||
|
||||
response.push({
|
||||
artifactName: currentArtifactToDownload.name,
|
||||
downloadPath: downloadSpecification.rootDownloadLocation
|
||||
})
|
||||
}
|
||||
return response
|
||||
}
|
||||
}
|
|
@ -0,0 +1,63 @@
|
|||
// The number of concurrent uploads that happens at the same time
|
||||
export function getUploadFileConcurrency(): number {
|
||||
return 2
|
||||
}
|
||||
|
||||
// When uploading large files that can't be uploaded with a single http call, this controls
|
||||
// the chunk size that is used during upload
|
||||
export function getUploadChunkSize(): number {
|
||||
return 4 * 1024 * 1024 // 4 MB Chunks
|
||||
}
|
||||
|
||||
// The maximum number of retries that can be attempted before an upload or download fails
|
||||
export function getRetryLimit(): number {
|
||||
return 5
|
||||
}
|
||||
|
||||
// With exponential backoff, the larger the retry count, the larger the wait time before another attempt
|
||||
// The retry multiplier controls by how much the backOff time increases depending on the number of retries
|
||||
export function getRetryMultiplier(): number {
|
||||
return 1.5
|
||||
}
|
||||
|
||||
// The initial wait time if an upload or download fails and a retry is being attempted for the first time
|
||||
export function getInitialRetryIntervalInMilliseconds(): number {
|
||||
return 3000
|
||||
}
|
||||
|
||||
// The number of concurrent downloads that happens at the same time
|
||||
export function getDownloadFileConcurrency(): number {
|
||||
return 2
|
||||
}
|
||||
|
||||
export function getRuntimeToken(): string {
|
||||
const token = process.env['ACTIONS_RUNTIME_TOKEN']
|
||||
if (!token) {
|
||||
throw new Error('Unable to get ACTIONS_RUNTIME_TOKEN env variable')
|
||||
}
|
||||
return token
|
||||
}
|
||||
|
||||
export function getRuntimeUrl(): string {
|
||||
const runtimeUrl = process.env['ACTIONS_RUNTIME_URL']
|
||||
if (!runtimeUrl) {
|
||||
throw new Error('Unable to get ACTIONS_RUNTIME_URL env variable')
|
||||
}
|
||||
return runtimeUrl
|
||||
}
|
||||
|
||||
export function getWorkFlowRunId(): string {
|
||||
const workFlowRunId = process.env['GITHUB_RUN_ID']
|
||||
if (!workFlowRunId) {
|
||||
throw new Error('Unable to get GITHUB_RUN_ID env variable')
|
||||
}
|
||||
return workFlowRunId
|
||||
}
|
||||
|
||||
export function getWorkSpaceDirectory(): string {
|
||||
const workspaceDirectory = process.env['GITHUB_WORKSPACE']
|
||||
if (!workspaceDirectory) {
|
||||
throw new Error('Unable to get GITHUB_WORKSPACE env variable')
|
||||
}
|
||||
return workspaceDirectory
|
||||
}
|
|
@ -0,0 +1,63 @@
|
|||
export interface ArtifactResponse {
|
||||
containerId: string
|
||||
size: number
|
||||
signedContent: string
|
||||
fileContainerResourceUrl: string
|
||||
type: string
|
||||
name: string
|
||||
url: string
|
||||
}
|
||||
|
||||
export interface CreateArtifactParameters {
|
||||
Type: string
|
||||
Name: string
|
||||
}
|
||||
|
||||
export interface PatchArtifactSize {
|
||||
Size: number
|
||||
}
|
||||
|
||||
export interface PatchArtifactSizeSuccessResponse {
|
||||
containerId: number
|
||||
size: number
|
||||
signedContent: string
|
||||
type: string
|
||||
name: string
|
||||
url: string
|
||||
uploadUrl: string
|
||||
}
|
||||
|
||||
export interface UploadResults {
|
||||
uploadSize: number
|
||||
totalSize: number
|
||||
failedItems: string[]
|
||||
}
|
||||
|
||||
export interface ListArtifactsResponse {
|
||||
count: number
|
||||
value: ArtifactResponse[]
|
||||
}
|
||||
|
||||
export interface QueryArtifactResponse {
|
||||
count: number
|
||||
value: ContainerEntry[]
|
||||
}
|
||||
|
||||
export interface ContainerEntry {
|
||||
containerId: number
|
||||
scopeIdentifier: string
|
||||
path: string
|
||||
itemType: string
|
||||
status: string
|
||||
fileLength?: number
|
||||
fileEncoding?: number
|
||||
fileType?: number
|
||||
dateCreated: string
|
||||
dateLastModified: string
|
||||
createdBy: string
|
||||
lastModifiedBy: string
|
||||
itemLocation: string
|
||||
contentLocation: string
|
||||
fileId?: number
|
||||
contentId: string
|
||||
}
|
|
@ -0,0 +1,293 @@
|
|||
import * as fs from 'fs'
|
||||
import * as core from '@actions/core'
|
||||
import * as zlib from 'zlib'
|
||||
import {
|
||||
getArtifactUrl,
|
||||
getDownloadRequestOptions,
|
||||
isSuccessStatusCode,
|
||||
isRetryableStatusCode,
|
||||
isThrottledStatusCode,
|
||||
getExponentialRetryTimeInMilliseconds,
|
||||
tryGetRetryAfterValueTimeInMilliseconds,
|
||||
displayHttpDiagnostics
|
||||
} from './utils'
|
||||
import {URL} from 'url'
|
||||
import {StatusReporter} from './status-reporter'
|
||||
import {performance} from 'perf_hooks'
|
||||
import {ListArtifactsResponse, QueryArtifactResponse} from './contracts'
|
||||
import {IHttpClientResponse} from '@actions/http-client/interfaces'
|
||||
import {HttpManager} from './http-manager'
|
||||
import {DownloadItem} from './download-specification'
|
||||
import {getDownloadFileConcurrency, getRetryLimit} from './config-variables'
|
||||
import {IncomingHttpHeaders} from 'http'
|
||||
|
||||
export class DownloadHttpClient {
|
||||
// http manager is used for concurrent connections when downloading multiple files at once
|
||||
private downloadHttpManager: HttpManager
|
||||
private statusReporter: StatusReporter
|
||||
|
||||
constructor() {
|
||||
this.downloadHttpManager = new HttpManager(getDownloadFileConcurrency())
|
||||
// downloads are usually significantly faster than uploads so display status information every second
|
||||
this.statusReporter = new StatusReporter(1000)
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets a list of all artifacts that are in a specific container
|
||||
*/
|
||||
async listArtifacts(): Promise<ListArtifactsResponse> {
|
||||
const artifactUrl = getArtifactUrl()
|
||||
|
||||
// use the first client from the httpManager, `keep-alive` is not used so the connection will close immediately
|
||||
const client = this.downloadHttpManager.getClient(0)
|
||||
const requestOptions = getDownloadRequestOptions('application/json')
|
||||
const response = await client.get(artifactUrl, requestOptions)
|
||||
const body: string = await response.readBody()
|
||||
|
||||
if (isSuccessStatusCode(response.message.statusCode) && body) {
|
||||
return JSON.parse(body)
|
||||
}
|
||||
displayHttpDiagnostics(response)
|
||||
throw new Error(
|
||||
`Unable to list artifacts for the run. Resource Url ${artifactUrl}`
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetches a set of container items that describe the contents of an artifact
|
||||
* @param artifactName the name of the artifact
|
||||
* @param containerUrl the artifact container URL for the run
|
||||
*/
|
||||
async getContainerItems(
|
||||
artifactName: string,
|
||||
containerUrl: string
|
||||
): Promise<QueryArtifactResponse> {
|
||||
// the itemPath search parameter controls which containers will be returned
|
||||
const resourceUrl = new URL(containerUrl)
|
||||
resourceUrl.searchParams.append('itemPath', artifactName)
|
||||
|
||||
// use the first client from the httpManager, `keep-alive` is not used so the connection will close immediately
|
||||
const client = this.downloadHttpManager.getClient(0)
|
||||
const requestOptions = getDownloadRequestOptions('application/json')
|
||||
const response = await client.get(resourceUrl.toString(), requestOptions)
|
||||
const body: string = await response.readBody()
|
||||
|
||||
if (isSuccessStatusCode(response.message.statusCode) && body) {
|
||||
return JSON.parse(body)
|
||||
}
|
||||
displayHttpDiagnostics(response)
|
||||
throw new Error(`Unable to get ContainersItems from ${resourceUrl}`)
|
||||
}
|
||||
|
||||
/**
|
||||
* Concurrently downloads all the files that are part of an artifact
|
||||
* @param downloadItems information about what items to download and where to save them
|
||||
*/
|
||||
async downloadSingleArtifact(downloadItems: DownloadItem[]): Promise<void> {
|
||||
const DOWNLOAD_CONCURRENCY = getDownloadFileConcurrency()
|
||||
// limit the number of files downloaded at a single time
|
||||
core.debug(`Download file concurrency is set to ${DOWNLOAD_CONCURRENCY}`)
|
||||
const parallelDownloads = [...new Array(DOWNLOAD_CONCURRENCY).keys()]
|
||||
let currentFile = 0
|
||||
let downloadedFiles = 0
|
||||
|
||||
core.info(
|
||||
`Total number of files that will be downloaded: ${downloadItems.length}`
|
||||
)
|
||||
|
||||
this.statusReporter.setTotalNumberOfFilesToProcess(downloadItems.length)
|
||||
this.statusReporter.start()
|
||||
|
||||
await Promise.all(
|
||||
parallelDownloads.map(async index => {
|
||||
while (currentFile < downloadItems.length) {
|
||||
const currentFileToDownload = downloadItems[currentFile]
|
||||
currentFile += 1
|
||||
|
||||
const startTime = performance.now()
|
||||
await this.downloadIndividualFile(
|
||||
index,
|
||||
currentFileToDownload.sourceLocation,
|
||||
currentFileToDownload.targetPath
|
||||
)
|
||||
|
||||
if (core.isDebug()) {
|
||||
core.debug(
|
||||
`File: ${++downloadedFiles}/${downloadItems.length}. ${
|
||||
currentFileToDownload.targetPath
|
||||
} took ${(performance.now() - startTime).toFixed(
|
||||
3
|
||||
)} milliseconds to finish downloading`
|
||||
)
|
||||
}
|
||||
|
||||
this.statusReporter.incrementProcessedCount()
|
||||
}
|
||||
})
|
||||
)
|
||||
.catch(error => {
|
||||
throw new Error(`Unable to download the artifact: ${error}`)
|
||||
})
|
||||
.finally(() => {
|
||||
this.statusReporter.stop()
|
||||
// safety dispose all connections
|
||||
this.downloadHttpManager.disposeAndReplaceAllClients()
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Downloads an individual file
|
||||
* @param httpClientIndex the index of the http client that is used to make all of the calls
|
||||
* @param artifactLocation origin location where a file will be downloaded from
|
||||
* @param downloadPath destination location for the file being downloaded
|
||||
*/
|
||||
private async downloadIndividualFile(
|
||||
httpClientIndex: number,
|
||||
artifactLocation: string,
|
||||
downloadPath: string
|
||||
): Promise<void> {
|
||||
let retryCount = 0
|
||||
const retryLimit = getRetryLimit()
|
||||
const destinationStream = fs.createWriteStream(downloadPath)
|
||||
const requestOptions = getDownloadRequestOptions(
|
||||
'application/json',
|
||||
true,
|
||||
true
|
||||
)
|
||||
|
||||
// a single GET request is used to download a file
|
||||
const makeDownloadRequest = async (): Promise<IHttpClientResponse> => {
|
||||
const client = this.downloadHttpManager.getClient(httpClientIndex)
|
||||
return await client.get(artifactLocation, requestOptions)
|
||||
}
|
||||
|
||||
// check the response headers to determine if the file was compressed using gzip
|
||||
const isGzip = (headers: IncomingHttpHeaders): boolean => {
|
||||
return (
|
||||
'content-encoding' in headers && headers['content-encoding'] === 'gzip'
|
||||
)
|
||||
}
|
||||
|
||||
// Increments the current retry count and then checks if the retry limit has been reached
|
||||
// If there have been too many retries, fail so the download stops. If there is a retryAfterValue value provided,
|
||||
// it will be used
|
||||
const backOff = async (retryAfterValue?: number): Promise<void> => {
|
||||
retryCount++
|
||||
if (retryCount > retryLimit) {
|
||||
return Promise.reject(
|
||||
new Error(
|
||||
`Retry limit has been reached. Unable to download ${artifactLocation}`
|
||||
)
|
||||
)
|
||||
} else {
|
||||
this.downloadHttpManager.disposeAndReplaceClient(httpClientIndex)
|
||||
if (retryAfterValue) {
|
||||
// Back off by waiting the specified time denoted by the retry-after header
|
||||
core.info(
|
||||
`Backoff due to too many requests, retry #${retryCount}. Waiting for ${retryAfterValue} milliseconds before continuing the download`
|
||||
)
|
||||
await new Promise(resolve => setTimeout(resolve, retryAfterValue))
|
||||
} else {
|
||||
// Back off using an exponential value that depends on the retry count
|
||||
const backoffTime = getExponentialRetryTimeInMilliseconds(retryCount)
|
||||
core.info(
|
||||
`Exponential backoff for retry #${retryCount}. Waiting for ${backoffTime} milliseconds before continuing the download`
|
||||
)
|
||||
await new Promise(resolve => setTimeout(resolve, backoffTime))
|
||||
}
|
||||
core.info(
|
||||
`Finished backoff for retry #${retryCount}, continuing with download`
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// keep trying to download a file until a retry limit has been reached
|
||||
while (retryCount <= retryLimit) {
|
||||
let response: IHttpClientResponse
|
||||
try {
|
||||
response = await makeDownloadRequest()
|
||||
} catch (error) {
|
||||
// if an error is caught, it is usually indicative of a timeout so retry the download
|
||||
core.info('An error occurred while attempting to download a file')
|
||||
// eslint-disable-next-line no-console
|
||||
console.log(error)
|
||||
|
||||
// increment the retryCount and use exponential backoff to wait before making the next request
|
||||
await backOff()
|
||||
continue
|
||||
}
|
||||
|
||||
if (isSuccessStatusCode(response.message.statusCode)) {
|
||||
// The body contains the contents of the file however calling response.readBody() causes all the content to be converted to a string
|
||||
// which can cause some gzip encoded data to be lost
|
||||
// Instead of using response.readBody(), response.message is a readableStream that can be directly used to get the raw body contents
|
||||
return this.pipeResponseToFile(
|
||||
response,
|
||||
destinationStream,
|
||||
isGzip(response.message.headers)
|
||||
)
|
||||
} else if (isRetryableStatusCode(response.message.statusCode)) {
|
||||
core.info(
|
||||
`A ${response.message.statusCode} response code has been received while attempting to download an artifact`
|
||||
)
|
||||
// if a throttled status code is received, try to get the retryAfter header value, else differ to standard exponential backoff
|
||||
isThrottledStatusCode(response.message.statusCode)
|
||||
? await backOff(
|
||||
tryGetRetryAfterValueTimeInMilliseconds(response.message.headers)
|
||||
)
|
||||
: await backOff()
|
||||
} else {
|
||||
// Some unexpected response code, fail immediately and stop the download
|
||||
displayHttpDiagnostics(response)
|
||||
return Promise.reject(
|
||||
new Error(
|
||||
`Unexpected http ${response.message.statusCode} during download for ${artifactLocation}`
|
||||
)
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Pipes the response from downloading an individual file to the appropriate destination stream while decoding gzip content if necessary
|
||||
* @param response the http response received when downloading a file
|
||||
* @param destinationStream the stream where the file should be written to
|
||||
* @param isGzip a boolean denoting if the content is compressed using gzip and if we need to decode it
|
||||
*/
|
||||
async pipeResponseToFile(
|
||||
response: IHttpClientResponse,
|
||||
destinationStream: fs.WriteStream,
|
||||
isGzip: boolean
|
||||
): Promise<void> {
|
||||
await new Promise((resolve, reject) => {
|
||||
if (isGzip) {
|
||||
const gunzip = zlib.createGunzip()
|
||||
response.message
|
||||
.pipe(gunzip)
|
||||
.pipe(destinationStream)
|
||||
.on('close', () => {
|
||||
resolve()
|
||||
})
|
||||
.on('error', error => {
|
||||
core.error(
|
||||
`An error has been encountered while decompressing and writing a downloaded file to ${destinationStream.path}`
|
||||
)
|
||||
reject(error)
|
||||
})
|
||||
} else {
|
||||
response.message
|
||||
.pipe(destinationStream)
|
||||
.on('close', () => {
|
||||
resolve()
|
||||
})
|
||||
.on('error', error => {
|
||||
core.error(
|
||||
`An error has been encountered while writing a downloaded file to ${destinationStream.path}`
|
||||
)
|
||||
reject(error)
|
||||
})
|
||||
}
|
||||
})
|
||||
return
|
||||
}
|
||||
}
|
|
@ -0,0 +1,7 @@
|
|||
export interface DownloadOptions {
|
||||
/**
|
||||
* Specifies if a folder is created for the artifact that is downloaded (contents downloaded into this folder),
|
||||
* defaults to false if not specified
|
||||
* */
|
||||
createArtifactFolder?: boolean
|
||||
}
|
|
@ -0,0 +1,11 @@
|
|||
export interface DownloadResponse {
|
||||
/**
|
||||
* The name of the artifact that was downloaded
|
||||
*/
|
||||
artifactName: string
|
||||
|
||||
/**
|
||||
* The full Path to where the artifact was downloaded
|
||||
*/
|
||||
downloadPath: string
|
||||
}
|
|
@ -0,0 +1,87 @@
|
|||
import * as path from 'path'
|
||||
import {ContainerEntry} from './contracts'
|
||||
|
||||
export interface DownloadSpecification {
|
||||
// root download location for the artifact
|
||||
rootDownloadLocation: string
|
||||
|
||||
// directories that need to be created for all the items in the artifact
|
||||
directoryStructure: string[]
|
||||
|
||||
// empty files that are part of the artifact that don't require any downloading
|
||||
emptyFilesToCreate: string[]
|
||||
|
||||
// individual files that need to be downloaded as part of the artifact
|
||||
filesToDownload: DownloadItem[]
|
||||
}
|
||||
|
||||
export interface DownloadItem {
|
||||
// Url that denotes where to download the item from
|
||||
sourceLocation: string
|
||||
|
||||
// Information about where the file should be downloaded to
|
||||
targetPath: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a specification for a set of files that will be downloaded
|
||||
* @param artifactName the name of the artifact
|
||||
* @param artifactEntries a set of container entries that describe that files that make up an artifact
|
||||
* @param downloadPath the path where the artifact will be downloaded to
|
||||
* @param includeRootDirectory specifies if there should be an extra directory (denoted by the artifact name) where the artifact files should be downloaded to
|
||||
*/
|
||||
export function getDownloadSpecification(
|
||||
artifactName: string,
|
||||
artifactEntries: ContainerEntry[],
|
||||
downloadPath: string,
|
||||
includeRootDirectory: boolean
|
||||
): DownloadSpecification {
|
||||
// use a set for the directory paths so that there are no duplicates
|
||||
const directories = new Set<string>()
|
||||
|
||||
const specifications: DownloadSpecification = {
|
||||
rootDownloadLocation: includeRootDirectory
|
||||
? path.join(downloadPath, artifactName)
|
||||
: downloadPath,
|
||||
directoryStructure: [],
|
||||
emptyFilesToCreate: [],
|
||||
filesToDownload: []
|
||||
}
|
||||
|
||||
for (const entry of artifactEntries) {
|
||||
// Ignore artifacts in the container that don't begin with the same name
|
||||
if (
|
||||
entry.path.startsWith(`${artifactName}/`) ||
|
||||
entry.path.startsWith(`${artifactName}\\`)
|
||||
) {
|
||||
// normalize all separators to the local OS
|
||||
const normalizedPathEntry = path.normalize(entry.path)
|
||||
// entry.path always starts with the artifact name, if includeRootDirectory is false, remove the name from the beginning of the path
|
||||
const filePath = path.join(
|
||||
downloadPath,
|
||||
includeRootDirectory
|
||||
? normalizedPathEntry
|
||||
: normalizedPathEntry.replace(artifactName, '')
|
||||
)
|
||||
|
||||
// Case insensitive folder structure maintained in the backend, not every folder is created so the 'folder'
|
||||
// itemType cannot be relied upon. The file must be used to determine the directory structure
|
||||
if (entry.itemType === 'file') {
|
||||
// Get the directories that we need to create from the filePath for each individual file
|
||||
directories.add(path.dirname(filePath))
|
||||
if (entry.fileLength === 0) {
|
||||
// An empty file was uploaded, create the empty files locally so that no extra http calls are made
|
||||
specifications.emptyFilesToCreate.push(filePath)
|
||||
} else {
|
||||
specifications.filesToDownload.push({
|
||||
sourceLocation: entry.contentLocation,
|
||||
targetPath: filePath
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
specifications.directoryStructure = Array.from(directories)
|
||||
return specifications
|
||||
}
|
|
@ -0,0 +1,33 @@
|
|||
import {HttpClient} from '@actions/http-client/index'
|
||||
import {createHttpClient} from './utils'
|
||||
|
||||
/**
|
||||
* Used for managing http clients during either upload or download
|
||||
*/
|
||||
export class HttpManager {
|
||||
private clients: HttpClient[]
|
||||
|
||||
constructor(clientCount: number) {
|
||||
if (clientCount < 1) {
|
||||
throw new Error('There must be at least one client')
|
||||
}
|
||||
this.clients = new Array(clientCount).fill(createHttpClient())
|
||||
}
|
||||
|
||||
getClient(index: number): HttpClient {
|
||||
return this.clients[index]
|
||||
}
|
||||
|
||||
// client disposal is necessary if a keep-alive connection is used to properly close the connection
|
||||
// for more information see: https://github.com/actions/http-client/blob/04e5ad73cd3fd1f5610a32116b0759eddf6570d2/index.ts#L292
|
||||
disposeAndReplaceClient(index: number): void {
|
||||
this.clients[index].dispose()
|
||||
this.clients[index] = createHttpClient()
|
||||
}
|
||||
|
||||
disposeAndReplaceAllClients(): void {
|
||||
for (const [index] of this.clients.entries()) {
|
||||
this.disposeAndReplaceClient(index)
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,92 @@
|
|||
import {info} from '@actions/core'
|
||||
|
||||
/**
|
||||
* Status Reporter that displays information about the progress/status of an artifact that is being uploaded or downloaded
|
||||
*
|
||||
* Variable display time that can be adjusted using the displayFrequencyInMilliseconds variable
|
||||
* The total status of the upload/download gets displayed according to this value
|
||||
* If there is a large file that is being uploaded, extra information about the individual status can also be displayed using the updateLargeFileStatus function
|
||||
*/
|
||||
|
||||
export class StatusReporter {
|
||||
private totalNumberOfFilesToProcess = 0
|
||||
private processedCount = 0
|
||||
private displayFrequencyInMilliseconds: number
|
||||
private largeFiles = new Map<string, string>()
|
||||
private totalFileStatus: NodeJS.Timeout | undefined
|
||||
private largeFileStatus: NodeJS.Timeout | undefined
|
||||
|
||||
constructor(displayFrequencyInMilliseconds: number) {
|
||||
this.totalFileStatus = undefined
|
||||
this.largeFileStatus = undefined
|
||||
this.displayFrequencyInMilliseconds = displayFrequencyInMilliseconds
|
||||
}
|
||||
|
||||
setTotalNumberOfFilesToProcess(fileTotal: number): void {
|
||||
this.totalNumberOfFilesToProcess = fileTotal
|
||||
}
|
||||
|
||||
start(): void {
|
||||
// displays information about the total upload/download status
|
||||
this.totalFileStatus = setInterval(() => {
|
||||
// display 1 decimal place without any rounding
|
||||
const percentage = this.formatPercentage(
|
||||
this.processedCount,
|
||||
this.totalNumberOfFilesToProcess
|
||||
)
|
||||
info(
|
||||
`Total file count: ${
|
||||
this.totalNumberOfFilesToProcess
|
||||
} ---- Processed file #${this.processedCount} (${percentage.slice(
|
||||
0,
|
||||
percentage.indexOf('.') + 2
|
||||
)}%)`
|
||||
)
|
||||
}, this.displayFrequencyInMilliseconds)
|
||||
|
||||
// displays extra information about any large files that take a significant amount of time to upload or download every 1 second
|
||||
this.largeFileStatus = setInterval(() => {
|
||||
for (const value of Array.from(this.largeFiles.values())) {
|
||||
info(value)
|
||||
}
|
||||
// delete all entries in the map after displaying the information so it will not be displayed again unless explicitly added
|
||||
this.largeFiles.clear()
|
||||
}, 1000)
|
||||
}
|
||||
|
||||
// if there is a large file that is being uploaded in chunks, this is used to display extra information about the status of the upload
|
||||
updateLargeFileStatus(
|
||||
fileName: string,
|
||||
numerator: number,
|
||||
denominator: number
|
||||
): void {
|
||||
// display 1 decimal place without any rounding
|
||||
const percentage = this.formatPercentage(numerator, denominator)
|
||||
const displayInformation = `Uploading ${fileName} (${percentage.slice(
|
||||
0,
|
||||
percentage.indexOf('.') + 2
|
||||
)}%)`
|
||||
|
||||
// any previously added display information should be overwritten for the specific large file because a map is being used
|
||||
this.largeFiles.set(fileName, displayInformation)
|
||||
}
|
||||
|
||||
stop(): void {
|
||||
if (this.totalFileStatus) {
|
||||
clearInterval(this.totalFileStatus)
|
||||
}
|
||||
|
||||
if (this.largeFileStatus) {
|
||||
clearInterval(this.largeFileStatus)
|
||||
}
|
||||
}
|
||||
|
||||
incrementProcessedCount(): void {
|
||||
this.processedCount++
|
||||
}
|
||||
|
||||
private formatPercentage(numerator: number, denominator: number): string {
|
||||
// toFixed() rounds, so use extra precision to display accurate information even though 4 decimal places are not displayed
|
||||
return ((numerator / denominator) * 100).toFixed(4).toString()
|
||||
}
|
||||
}
|
|
@ -0,0 +1,53 @@
|
|||
import * as fs from 'fs'
|
||||
import * as zlib from 'zlib'
|
||||
import {promisify} from 'util'
|
||||
const stat = promisify(fs.stat)
|
||||
|
||||
/**
|
||||
* Creates a Gzip compressed file of an original file at the provided temporary filepath location
|
||||
* @param {string} originalFilePath filepath of whatever will be compressed. The original file will be unmodified
|
||||
* @param {string} tempFilePath the location of where the Gzip file will be created
|
||||
* @returns the size of gzip file that gets created
|
||||
*/
|
||||
export async function createGZipFileOnDisk(
|
||||
originalFilePath: string,
|
||||
tempFilePath: string
|
||||
): Promise<number> {
|
||||
return new Promise((resolve, reject) => {
|
||||
const inputStream = fs.createReadStream(originalFilePath)
|
||||
const gzip = zlib.createGzip()
|
||||
const outputStream = fs.createWriteStream(tempFilePath)
|
||||
inputStream.pipe(gzip).pipe(outputStream)
|
||||
outputStream.on('finish', async () => {
|
||||
// wait for stream to finish before calculating the size which is needed as part of the Content-Length header when starting an upload
|
||||
const size = (await stat(tempFilePath)).size
|
||||
resolve(size)
|
||||
})
|
||||
outputStream.on('error', error => {
|
||||
// eslint-disable-next-line no-console
|
||||
console.log(error)
|
||||
reject
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a GZip file in memory using a buffer. Should be used for smaller files to reduce disk I/O
|
||||
* @param originalFilePath the path to the original file that is being GZipped
|
||||
* @returns a buffer with the GZip file
|
||||
*/
|
||||
export async function createGZipFileInBuffer(
|
||||
originalFilePath: string
|
||||
): Promise<Buffer> {
|
||||
return new Promise(async resolve => {
|
||||
const inputStream = fs.createReadStream(originalFilePath)
|
||||
const gzip = zlib.createGzip()
|
||||
inputStream.pipe(gzip)
|
||||
// read stream into buffer, using experimental async iterators see https://github.com/nodejs/readable-stream/issues/403#issuecomment-479069043
|
||||
const chunks = []
|
||||
for await (const chunk of gzip) {
|
||||
chunks.push(chunk)
|
||||
}
|
||||
resolve(Buffer.concat(chunks))
|
||||
})
|
||||
}
|
|
@ -0,0 +1,510 @@
|
|||
import * as fs from 'fs'
|
||||
import * as core from '@actions/core'
|
||||
import * as tmp from 'tmp-promise'
|
||||
import * as stream from 'stream'
|
||||
import {
|
||||
ArtifactResponse,
|
||||
CreateArtifactParameters,
|
||||
PatchArtifactSize,
|
||||
UploadResults
|
||||
} from './contracts'
|
||||
import {
|
||||
getArtifactUrl,
|
||||
getContentRange,
|
||||
getUploadRequestOptions,
|
||||
isRetryableStatusCode,
|
||||
isSuccessStatusCode,
|
||||
isThrottledStatusCode,
|
||||
isForbiddenStatusCode,
|
||||
displayHttpDiagnostics,
|
||||
getExponentialRetryTimeInMilliseconds,
|
||||
tryGetRetryAfterValueTimeInMilliseconds
|
||||
} from './utils'
|
||||
import {
|
||||
getUploadChunkSize,
|
||||
getUploadFileConcurrency,
|
||||
getRetryLimit
|
||||
} from './config-variables'
|
||||
import {promisify} from 'util'
|
||||
import {URL} from 'url'
|
||||
import {performance} from 'perf_hooks'
|
||||
import {StatusReporter} from './status-reporter'
|
||||
import {HttpClientResponse} from '@actions/http-client/index'
|
||||
import {IHttpClientResponse} from '@actions/http-client/interfaces'
|
||||
import {HttpManager} from './http-manager'
|
||||
import {UploadSpecification} from './upload-specification'
|
||||
import {UploadOptions} from './upload-options'
|
||||
import {createGZipFileOnDisk, createGZipFileInBuffer} from './upload-gzip'
|
||||
const stat = promisify(fs.stat)
|
||||
|
||||
export class UploadHttpClient {
|
||||
private uploadHttpManager: HttpManager
|
||||
private statusReporter: StatusReporter
|
||||
|
||||
constructor() {
|
||||
this.uploadHttpManager = new HttpManager(getUploadFileConcurrency())
|
||||
this.statusReporter = new StatusReporter(10000)
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a file container for the new artifact in the remote blob storage/file service
|
||||
* @param {string} artifactName Name of the artifact being created
|
||||
* @returns The response from the Artifact Service if the file container was successfully created
|
||||
*/
|
||||
async createArtifactInFileContainer(
|
||||
artifactName: string
|
||||
): Promise<ArtifactResponse> {
|
||||
const parameters: CreateArtifactParameters = {
|
||||
Type: 'actions_storage',
|
||||
Name: artifactName
|
||||
}
|
||||
const data: string = JSON.stringify(parameters, null, 2)
|
||||
const artifactUrl = getArtifactUrl()
|
||||
|
||||
// use the first client from the httpManager, `keep-alive` is not used so the connection will close immediately
|
||||
const client = this.uploadHttpManager.getClient(0)
|
||||
const requestOptions = getUploadRequestOptions('application/json', false)
|
||||
const rawResponse = await client.post(artifactUrl, data, requestOptions)
|
||||
const body: string = await rawResponse.readBody()
|
||||
|
||||
if (isSuccessStatusCode(rawResponse.message.statusCode) && body) {
|
||||
return JSON.parse(body)
|
||||
} else if (isForbiddenStatusCode(rawResponse.message.statusCode)) {
|
||||
// if a 403 is returned when trying to create a file container, the customer has exceeded
|
||||
// their storage quota so no new artifact containers can be created
|
||||
throw new Error(
|
||||
`Artifact storage quota has been hit. Unable to upload any new artifacts`
|
||||
)
|
||||
} else {
|
||||
displayHttpDiagnostics(rawResponse)
|
||||
throw new Error(
|
||||
`Unable to create a container for the artifact ${artifactName} at ${artifactUrl}`
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Concurrently upload all of the files in chunks
|
||||
* @param {string} uploadUrl Base Url for the artifact that was created
|
||||
* @param {SearchResult[]} filesToUpload A list of information about the files being uploaded
|
||||
* @returns The size of all the files uploaded in bytes
|
||||
*/
|
||||
async uploadArtifactToFileContainer(
|
||||
uploadUrl: string,
|
||||
filesToUpload: UploadSpecification[],
|
||||
options?: UploadOptions
|
||||
): Promise<UploadResults> {
|
||||
const FILE_CONCURRENCY = getUploadFileConcurrency()
|
||||
const MAX_CHUNK_SIZE = getUploadChunkSize()
|
||||
core.debug(
|
||||
`File Concurrency: ${FILE_CONCURRENCY}, and Chunk Size: ${MAX_CHUNK_SIZE}`
|
||||
)
|
||||
|
||||
const parameters: UploadFileParameters[] = []
|
||||
// by default, file uploads will continue if there is an error unless specified differently in the options
|
||||
let continueOnError = true
|
||||
if (options) {
|
||||
if (options.continueOnError === false) {
|
||||
continueOnError = false
|
||||
}
|
||||
}
|
||||
|
||||
// prepare the necessary parameters to upload all the files
|
||||
for (const file of filesToUpload) {
|
||||
const resourceUrl = new URL(uploadUrl)
|
||||
resourceUrl.searchParams.append('itemPath', file.uploadFilePath)
|
||||
parameters.push({
|
||||
file: file.absoluteFilePath,
|
||||
resourceUrl: resourceUrl.toString(),
|
||||
maxChunkSize: MAX_CHUNK_SIZE,
|
||||
continueOnError
|
||||
})
|
||||
}
|
||||
|
||||
const parallelUploads = [...new Array(FILE_CONCURRENCY).keys()]
|
||||
const failedItemsToReport: string[] = []
|
||||
let currentFile = 0
|
||||
let completedFiles = 0
|
||||
let uploadFileSize = 0
|
||||
let totalFileSize = 0
|
||||
let abortPendingFileUploads = false
|
||||
|
||||
this.statusReporter.setTotalNumberOfFilesToProcess(filesToUpload.length)
|
||||
this.statusReporter.start()
|
||||
|
||||
// only allow a certain amount of files to be uploaded at once, this is done to reduce potential errors
|
||||
await Promise.all(
|
||||
parallelUploads.map(async index => {
|
||||
while (currentFile < filesToUpload.length) {
|
||||
const currentFileParameters = parameters[currentFile]
|
||||
currentFile += 1
|
||||
if (abortPendingFileUploads) {
|
||||
failedItemsToReport.push(currentFileParameters.file)
|
||||
continue
|
||||
}
|
||||
|
||||
const startTime = performance.now()
|
||||
const uploadFileResult = await this.uploadFileAsync(
|
||||
index,
|
||||
currentFileParameters
|
||||
)
|
||||
|
||||
if (core.isDebug()) {
|
||||
core.debug(
|
||||
`File: ${++completedFiles}/${filesToUpload.length}. ${
|
||||
currentFileParameters.file
|
||||
} took ${(performance.now() - startTime).toFixed(
|
||||
3
|
||||
)} milliseconds to finish upload`
|
||||
)
|
||||
}
|
||||
|
||||
uploadFileSize += uploadFileResult.successfulUploadSize
|
||||
totalFileSize += uploadFileResult.totalSize
|
||||
if (uploadFileResult.isSuccess === false) {
|
||||
failedItemsToReport.push(currentFileParameters.file)
|
||||
if (!continueOnError) {
|
||||
// fail fast
|
||||
core.error(`aborting artifact upload`)
|
||||
abortPendingFileUploads = true
|
||||
}
|
||||
}
|
||||
this.statusReporter.incrementProcessedCount()
|
||||
}
|
||||
})
|
||||
)
|
||||
|
||||
this.statusReporter.stop()
|
||||
// done uploading, safety dispose all connections
|
||||
this.uploadHttpManager.disposeAndReplaceAllClients()
|
||||
|
||||
core.info(`Total size of all the files uploaded is ${uploadFileSize} bytes`)
|
||||
return {
|
||||
uploadSize: uploadFileSize,
|
||||
totalSize: totalFileSize,
|
||||
failedItems: failedItemsToReport
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Asynchronously uploads a file. The file is compressed and uploaded using GZip if it is determined to save space.
|
||||
* If the upload file is bigger than the max chunk size it will be uploaded via multiple calls
|
||||
* @param {number} httpClientIndex The index of the httpClient that is being used to make all of the calls
|
||||
* @param {UploadFileParameters} parameters Information about the file that needs to be uploaded
|
||||
* @returns The size of the file that was uploaded in bytes along with any failed uploads
|
||||
*/
|
||||
private async uploadFileAsync(
|
||||
httpClientIndex: number,
|
||||
parameters: UploadFileParameters
|
||||
): Promise<UploadFileResult> {
|
||||
const totalFileSize: number = (await stat(parameters.file)).size
|
||||
let offset = 0
|
||||
let isUploadSuccessful = true
|
||||
let failedChunkSizes = 0
|
||||
let uploadFileSize = 0
|
||||
let isGzip = true
|
||||
|
||||
// the file that is being uploaded is less than 64k in size, to increase throughput and to minimize disk I/O
|
||||
// for creating a new GZip file, an in-memory buffer is used for compression
|
||||
if (totalFileSize < 65536) {
|
||||
const buffer = await createGZipFileInBuffer(parameters.file)
|
||||
let uploadStream: NodeJS.ReadableStream
|
||||
|
||||
if (totalFileSize < buffer.byteLength) {
|
||||
// compression did not help with reducing the size, use a readable stream from the original file for upload
|
||||
uploadStream = fs.createReadStream(parameters.file)
|
||||
isGzip = false
|
||||
uploadFileSize = totalFileSize
|
||||
} else {
|
||||
// create a readable stream using a PassThrough stream that is both readable and writable
|
||||
const passThrough = new stream.PassThrough()
|
||||
passThrough.end(buffer)
|
||||
uploadStream = passThrough
|
||||
uploadFileSize = buffer.byteLength
|
||||
}
|
||||
|
||||
const result = await this.uploadChunk(
|
||||
httpClientIndex,
|
||||
parameters.resourceUrl,
|
||||
uploadStream,
|
||||
0,
|
||||
uploadFileSize - 1,
|
||||
uploadFileSize,
|
||||
isGzip,
|
||||
totalFileSize
|
||||
)
|
||||
|
||||
if (!result) {
|
||||
// chunk failed to upload
|
||||
isUploadSuccessful = false
|
||||
failedChunkSizes += uploadFileSize
|
||||
core.warning(`Aborting upload for ${parameters.file} due to failure`)
|
||||
}
|
||||
|
||||
return {
|
||||
isSuccess: isUploadSuccessful,
|
||||
successfulUploadSize: uploadFileSize - failedChunkSizes,
|
||||
totalSize: totalFileSize
|
||||
}
|
||||
} else {
|
||||
// the file that is being uploaded is greater than 64k in size, a temporary file gets created on disk using the
|
||||
// npm tmp-promise package and this file gets used to create a GZipped file
|
||||
const tempFile = await tmp.file()
|
||||
|
||||
// create a GZip file of the original file being uploaded, the original file should not be modified in any way
|
||||
uploadFileSize = await createGZipFileOnDisk(
|
||||
parameters.file,
|
||||
tempFile.path
|
||||
)
|
||||
|
||||
let uploadFilePath = tempFile.path
|
||||
|
||||
// compression did not help with size reduction, use the original file for upload and delete the temp GZip file
|
||||
if (totalFileSize < uploadFileSize) {
|
||||
uploadFileSize = totalFileSize
|
||||
uploadFilePath = parameters.file
|
||||
isGzip = false
|
||||
}
|
||||
|
||||
let abortFileUpload = false
|
||||
// upload only a single chunk at a time
|
||||
while (offset < uploadFileSize) {
|
||||
const chunkSize = Math.min(
|
||||
uploadFileSize - offset,
|
||||
parameters.maxChunkSize
|
||||
)
|
||||
|
||||
// if an individual file is greater than 100MB (1024*1024*100) in size, display extra information about the upload status
|
||||
if (uploadFileSize > 104857600) {
|
||||
this.statusReporter.updateLargeFileStatus(
|
||||
parameters.file,
|
||||
offset,
|
||||
uploadFileSize
|
||||
)
|
||||
}
|
||||
|
||||
const start = offset
|
||||
const end = offset + chunkSize - 1
|
||||
offset += parameters.maxChunkSize
|
||||
|
||||
if (abortFileUpload) {
|
||||
// if we don't want to continue in the event of an error, any pending upload chunks will be marked as failed
|
||||
failedChunkSizes += chunkSize
|
||||
continue
|
||||
}
|
||||
|
||||
const result = await this.uploadChunk(
|
||||
httpClientIndex,
|
||||
parameters.resourceUrl,
|
||||
fs.createReadStream(uploadFilePath, {
|
||||
start,
|
||||
end,
|
||||
autoClose: false
|
||||
}),
|
||||
start,
|
||||
end,
|
||||
uploadFileSize,
|
||||
isGzip,
|
||||
totalFileSize
|
||||
)
|
||||
|
||||
if (!result) {
|
||||
// Chunk failed to upload, report as failed and do not continue uploading any more chunks for the file. It is possible that part of a chunk was
|
||||
// successfully uploaded so the server may report a different size for what was uploaded
|
||||
isUploadSuccessful = false
|
||||
failedChunkSizes += chunkSize
|
||||
core.warning(`Aborting upload for ${parameters.file} due to failure`)
|
||||
abortFileUpload = true
|
||||
}
|
||||
}
|
||||
|
||||
// Delete the temporary file that was created as part of the upload. If the temp file does not get manually deleted by
|
||||
// calling cleanup, it gets removed when the node process exits. For more info see: https://www.npmjs.com/package/tmp-promise#about
|
||||
await tempFile.cleanup()
|
||||
|
||||
return {
|
||||
isSuccess: isUploadSuccessful,
|
||||
successfulUploadSize: uploadFileSize - failedChunkSizes,
|
||||
totalSize: totalFileSize
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Uploads a chunk of an individual file to the specified resourceUrl. If the upload fails and the status code
|
||||
* indicates a retryable status, we try to upload the chunk as well
|
||||
* @param {number} httpClientIndex The index of the httpClient being used to make all the necessary calls
|
||||
* @param {string} resourceUrl Url of the resource that the chunk will be uploaded to
|
||||
* @param {NodeJS.ReadableStream} data Stream of the file that will be uploaded
|
||||
* @param {number} start Starting byte index of file that the chunk belongs to
|
||||
* @param {number} end Ending byte index of file that the chunk belongs to
|
||||
* @param {number} uploadFileSize Total size of the file in bytes that is being uploaded
|
||||
* @param {boolean} isGzip Denotes if we are uploading a Gzip compressed stream
|
||||
* @param {number} totalFileSize Original total size of the file that is being uploaded
|
||||
* @returns if the chunk was successfully uploaded
|
||||
*/
|
||||
private async uploadChunk(
|
||||
httpClientIndex: number,
|
||||
resourceUrl: string,
|
||||
data: NodeJS.ReadableStream,
|
||||
start: number,
|
||||
end: number,
|
||||
uploadFileSize: number,
|
||||
isGzip: boolean,
|
||||
totalFileSize: number
|
||||
): Promise<boolean> {
|
||||
// prepare all the necessary headers before making any http call
|
||||
const requestOptions = getUploadRequestOptions(
|
||||
'application/octet-stream',
|
||||
true,
|
||||
isGzip,
|
||||
totalFileSize,
|
||||
end - start + 1,
|
||||
getContentRange(start, end, uploadFileSize)
|
||||
)
|
||||
|
||||
const uploadChunkRequest = async (): Promise<IHttpClientResponse> => {
|
||||
const client = this.uploadHttpManager.getClient(httpClientIndex)
|
||||
return await client.sendStream('PUT', resourceUrl, data, requestOptions)
|
||||
}
|
||||
|
||||
let retryCount = 0
|
||||
const retryLimit = getRetryLimit()
|
||||
|
||||
// Increments the current retry count and then checks if the retry limit has been reached
|
||||
// If there have been too many retries, fail so the download stops
|
||||
const incrementAndCheckRetryLimit = (
|
||||
response?: IHttpClientResponse
|
||||
): boolean => {
|
||||
retryCount++
|
||||
if (retryCount > retryLimit) {
|
||||
if (response) {
|
||||
displayHttpDiagnostics(response)
|
||||
}
|
||||
core.info(
|
||||
`Retry limit has been reached for chunk at offset ${start} to ${resourceUrl}`
|
||||
)
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
const backOff = async (retryAfterValue?: number): Promise<void> => {
|
||||
this.uploadHttpManager.disposeAndReplaceClient(httpClientIndex)
|
||||
if (retryAfterValue) {
|
||||
core.info(
|
||||
`Backoff due to too many requests, retry #${retryCount}. Waiting for ${retryAfterValue} milliseconds before continuing the upload`
|
||||
)
|
||||
await new Promise(resolve => setTimeout(resolve, retryAfterValue))
|
||||
} else {
|
||||
const backoffTime = getExponentialRetryTimeInMilliseconds(retryCount)
|
||||
core.info(
|
||||
`Exponential backoff for retry #${retryCount}. Waiting for ${backoffTime} milliseconds before continuing the upload at offset ${start}`
|
||||
)
|
||||
await new Promise(resolve => setTimeout(resolve, backoffTime))
|
||||
}
|
||||
core.info(
|
||||
`Finished backoff for retry #${retryCount}, continuing with upload`
|
||||
)
|
||||
return
|
||||
}
|
||||
|
||||
// allow for failed chunks to be retried multiple times
|
||||
while (retryCount <= retryLimit) {
|
||||
let response: IHttpClientResponse
|
||||
|
||||
try {
|
||||
response = await uploadChunkRequest()
|
||||
} catch (error) {
|
||||
// if an error is caught, it is usually indicative of a timeout so retry the upload
|
||||
core.info(
|
||||
`An error has been caught http-client index ${httpClientIndex}, retrying the upload`
|
||||
)
|
||||
// eslint-disable-next-line no-console
|
||||
console.log(error)
|
||||
|
||||
if (incrementAndCheckRetryLimit()) {
|
||||
return false
|
||||
}
|
||||
await backOff()
|
||||
continue
|
||||
}
|
||||
|
||||
// Always read the body of the response. There is potential for a resource leak if the body is not read which will
|
||||
// result in the connection remaining open along with unintended consequences when trying to dispose of the client
|
||||
await response.readBody()
|
||||
|
||||
if (isSuccessStatusCode(response.message.statusCode)) {
|
||||
return true
|
||||
} else if (isRetryableStatusCode(response.message.statusCode)) {
|
||||
core.info(
|
||||
`A ${response.message.statusCode} status code has been received, will attempt to retry the upload`
|
||||
)
|
||||
if (incrementAndCheckRetryLimit(response)) {
|
||||
return false
|
||||
}
|
||||
isThrottledStatusCode(response.message.statusCode)
|
||||
? await backOff(
|
||||
tryGetRetryAfterValueTimeInMilliseconds(response.message.headers)
|
||||
)
|
||||
: await backOff()
|
||||
} else {
|
||||
core.error(
|
||||
`Unexpected response. Unable to upload chunk to ${resourceUrl}`
|
||||
)
|
||||
displayHttpDiagnostics(response)
|
||||
return false
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates the size of the artifact from -1 which was initially set when the container was first created for the artifact.
|
||||
* Updating the size indicates that we are done uploading all the contents of the artifact
|
||||
*/
|
||||
async patchArtifactSize(size: number, artifactName: string): Promise<void> {
|
||||
const requestOptions = getUploadRequestOptions('application/json', false)
|
||||
const resourceUrl = new URL(getArtifactUrl())
|
||||
resourceUrl.searchParams.append('artifactName', artifactName)
|
||||
|
||||
const parameters: PatchArtifactSize = {Size: size}
|
||||
const data: string = JSON.stringify(parameters, null, 2)
|
||||
core.debug(`URL is ${resourceUrl.toString()}`)
|
||||
|
||||
// use the first client from the httpManager, `keep-alive` is not used so the connection will close immediately
|
||||
const client = this.uploadHttpManager.getClient(0)
|
||||
const response: HttpClientResponse = await client.patch(
|
||||
resourceUrl.toString(),
|
||||
data,
|
||||
requestOptions
|
||||
)
|
||||
const body: string = await response.readBody()
|
||||
if (isSuccessStatusCode(response.message.statusCode)) {
|
||||
core.debug(
|
||||
`Artifact ${artifactName} has been successfully uploaded, total size in bytes: ${size}`
|
||||
)
|
||||
} else if (response.message.statusCode === 404) {
|
||||
throw new Error(`An Artifact with the name ${artifactName} was not found`)
|
||||
} else {
|
||||
displayHttpDiagnostics(response)
|
||||
core.info(body)
|
||||
throw new Error(
|
||||
`Unable to finish uploading artifact ${artifactName} to ${resourceUrl}`
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
interface UploadFileParameters {
|
||||
file: string
|
||||
resourceUrl: string
|
||||
maxChunkSize: number
|
||||
continueOnError: boolean
|
||||
}
|
||||
|
||||
interface UploadFileResult {
|
||||
isSuccess: boolean
|
||||
successfulUploadSize: number
|
||||
totalSize: number
|
||||
}
|
|
@ -0,0 +1,18 @@
|
|||
export interface UploadOptions {
|
||||
/**
|
||||
* Indicates if the artifact upload should continue if file or chunk fails to upload from any error.
|
||||
* If there is a error during upload, a partial artifact will always be associated and available for
|
||||
* download at the end. The size reported will be the amount of storage that the user or org will be
|
||||
* charged for the partial artifact. Defaults to true if not specified
|
||||
*
|
||||
* If set to false, and an error is encountered, all other uploads will stop and any files or chunks
|
||||
* that were queued will not be attempted to be uploaded. The partial artifact available will only
|
||||
* include files and chunks up until the failure
|
||||
*
|
||||
* If set to true and an error is encountered, the failed file will be skipped and ignored and all
|
||||
* other queued files will be attempted to be uploaded. The partial artifact at the end will have all
|
||||
* files with the exception of the problematic files(s)/chunks(s) that failed to upload
|
||||
*
|
||||
*/
|
||||
continueOnError?: boolean
|
||||
}
|
|
@ -0,0 +1,22 @@
|
|||
export interface UploadResponse {
|
||||
/**
|
||||
* The name of the artifact that was uploaded
|
||||
*/
|
||||
artifactName: string
|
||||
|
||||
/**
|
||||
* A list of all items that are meant to be uploaded as part of the artifact
|
||||
*/
|
||||
artifactItems: string[]
|
||||
|
||||
/**
|
||||
* Total size of the artifact in bytes that was uploaded
|
||||
*/
|
||||
size: number
|
||||
|
||||
/**
|
||||
* A list of items that were not uploaded as part of the artifact (includes queued items that were not uploaded if
|
||||
* continueOnError is set to false). This is a subset of artifactItems.
|
||||
*/
|
||||
failedItems: string[]
|
||||
}
|
|
@ -0,0 +1,95 @@
|
|||
import * as fs from 'fs'
|
||||
import {debug} from '@actions/core'
|
||||
import {join, normalize, resolve} from 'path'
|
||||
import {checkArtifactName, checkArtifactFilePath} from './utils'
|
||||
|
||||
export interface UploadSpecification {
|
||||
absoluteFilePath: string
|
||||
uploadFilePath: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a specification that describes how each file that is part of the artifact will be uploaded
|
||||
* @param artifactName the name of the artifact being uploaded. Used during upload to denote where the artifact is stored on the server
|
||||
* @param rootDirectory an absolute file path that denotes the path that should be removed from the beginning of each artifact file
|
||||
* @param artifactFiles a list of absolute file paths that denote what should be uploaded as part of the artifact
|
||||
*/
|
||||
export function getUploadSpecification(
|
||||
artifactName: string,
|
||||
rootDirectory: string,
|
||||
artifactFiles: string[]
|
||||
): UploadSpecification[] {
|
||||
checkArtifactName(artifactName)
|
||||
|
||||
const specifications: UploadSpecification[] = []
|
||||
|
||||
if (!fs.existsSync(rootDirectory)) {
|
||||
throw new Error(`Provided rootDirectory ${rootDirectory} does not exist`)
|
||||
}
|
||||
if (!fs.lstatSync(rootDirectory).isDirectory()) {
|
||||
throw new Error(
|
||||
`Provided rootDirectory ${rootDirectory} is not a valid directory`
|
||||
)
|
||||
}
|
||||
// Normalize and resolve, this allows for either absolute or relative paths to be used
|
||||
rootDirectory = normalize(rootDirectory)
|
||||
rootDirectory = resolve(rootDirectory)
|
||||
|
||||
/*
|
||||
Example to demonstrate behavior
|
||||
|
||||
Input:
|
||||
artifactName: my-artifact
|
||||
rootDirectory: '/home/user/files/plz-upload'
|
||||
artifactFiles: [
|
||||
'/home/user/files/plz-upload/file1.txt',
|
||||
'/home/user/files/plz-upload/file2.txt',
|
||||
'/home/user/files/plz-upload/dir/file3.txt'
|
||||
]
|
||||
|
||||
Output:
|
||||
specifications: [
|
||||
['/home/user/files/plz-upload/file1.txt', 'my-artifact/file1.txt'],
|
||||
['/home/user/files/plz-upload/file1.txt', 'my-artifact/file2.txt'],
|
||||
['/home/user/files/plz-upload/file1.txt', 'my-artifact/dir/file3.txt']
|
||||
]
|
||||
*/
|
||||
for (let file of artifactFiles) {
|
||||
if (!fs.existsSync(file)) {
|
||||
throw new Error(`File ${file} does not exist`)
|
||||
}
|
||||
if (!fs.lstatSync(file).isDirectory()) {
|
||||
// Normalize and resolve, this allows for either absolute or relative paths to be used
|
||||
file = normalize(file)
|
||||
file = resolve(file)
|
||||
if (!file.startsWith(rootDirectory)) {
|
||||
throw new Error(
|
||||
`The rootDirectory: ${rootDirectory} is not a parent directory of the file: ${file}`
|
||||
)
|
||||
}
|
||||
|
||||
// Check for forbidden characters in file paths that will be rejected during upload
|
||||
const uploadPath = file.replace(rootDirectory, '')
|
||||
checkArtifactFilePath(uploadPath)
|
||||
|
||||
/*
|
||||
uploadFilePath denotes where the file will be uploaded in the file container on the server. During a run, if multiple artifacts are uploaded, they will all
|
||||
be saved in the same container. The artifact name is used as the root directory in the container to separate and distinguish uploaded artifacts
|
||||
|
||||
path.join handles all the following cases and would return 'artifact-name/file-to-upload.txt
|
||||
join('artifact-name/', 'file-to-upload.txt')
|
||||
join('artifact-name/', '/file-to-upload.txt')
|
||||
join('artifact-name', 'file-to-upload.txt')
|
||||
join('artifact-name', '/file-to-upload.txt')
|
||||
*/
|
||||
specifications.push({
|
||||
absoluteFilePath: file,
|
||||
uploadFilePath: join(artifactName, uploadPath)
|
||||
})
|
||||
} else {
|
||||
// Directories are rejected by the server during upload
|
||||
debug(`Removing ${file} from rawSearchResults because it is a directory`)
|
||||
}
|
||||
}
|
||||
return specifications
|
||||
}
|
|
@ -0,0 +1,305 @@
|
|||
import {debug, info} from '@actions/core'
|
||||
import {promises as fs} from 'fs'
|
||||
import {HttpCodes, HttpClient} from '@actions/http-client'
|
||||
import {BearerCredentialHandler} from '@actions/http-client/auth'
|
||||
import {IHeaders, IHttpClientResponse} from '@actions/http-client/interfaces'
|
||||
import {IncomingHttpHeaders} from 'http'
|
||||
import {
|
||||
getRuntimeToken,
|
||||
getRuntimeUrl,
|
||||
getWorkFlowRunId,
|
||||
getRetryMultiplier,
|
||||
getInitialRetryIntervalInMilliseconds
|
||||
} from './config-variables'
|
||||
|
||||
/**
|
||||
* Returns a retry time in milliseconds that exponentially gets larger
|
||||
* depending on the amount of retries that have been attempted
|
||||
*/
|
||||
export function getExponentialRetryTimeInMilliseconds(
|
||||
retryCount: number
|
||||
): number {
|
||||
if (retryCount < 0) {
|
||||
throw new Error('RetryCount should not be negative')
|
||||
} else if (retryCount === 0) {
|
||||
return getInitialRetryIntervalInMilliseconds()
|
||||
}
|
||||
|
||||
const minTime =
|
||||
getInitialRetryIntervalInMilliseconds() * getRetryMultiplier() * retryCount
|
||||
const maxTime = minTime * getRetryMultiplier()
|
||||
|
||||
// returns a random number between the minTime (inclusive) and the maxTime (exclusive)
|
||||
return Math.random() * (maxTime - minTime) + minTime
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses a env variable that is a number
|
||||
*/
|
||||
export function parseEnvNumber(key: string): number | undefined {
|
||||
const value = Number(process.env[key])
|
||||
if (Number.isNaN(value) || value < 0) {
|
||||
return undefined
|
||||
}
|
||||
return value
|
||||
}
|
||||
|
||||
/**
|
||||
* Various utility functions to help with the necessary API calls
|
||||
*/
|
||||
export function getApiVersion(): string {
|
||||
return '6.0-preview'
|
||||
}
|
||||
|
||||
export function isSuccessStatusCode(statusCode?: number): boolean {
|
||||
if (!statusCode) {
|
||||
return false
|
||||
}
|
||||
return statusCode >= 200 && statusCode < 300
|
||||
}
|
||||
|
||||
export function isForbiddenStatusCode(statusCode?: number): boolean {
|
||||
if (!statusCode) {
|
||||
return false
|
||||
}
|
||||
|
||||
return statusCode === HttpCodes.Forbidden
|
||||
}
|
||||
|
||||
export function isRetryableStatusCode(statusCode?: number): boolean {
|
||||
if (!statusCode) {
|
||||
return false
|
||||
}
|
||||
|
||||
const retryableStatusCodes = [
|
||||
HttpCodes.BadGateway,
|
||||
HttpCodes.ServiceUnavailable,
|
||||
HttpCodes.GatewayTimeout,
|
||||
HttpCodes.TooManyRequests
|
||||
]
|
||||
return retryableStatusCodes.includes(statusCode)
|
||||
}
|
||||
|
||||
export function isThrottledStatusCode(statusCode?: number): boolean {
|
||||
if (!statusCode) {
|
||||
return false
|
||||
}
|
||||
|
||||
return statusCode === HttpCodes.TooManyRequests
|
||||
}
|
||||
|
||||
/**
|
||||
* Attempts to get the retry-after value from a set of http headers. The retry time
|
||||
* is originally denoted in seconds, so if present, it is converted to milliseconds
|
||||
* @param headers all the headers received when making an http call
|
||||
*/
|
||||
export function tryGetRetryAfterValueTimeInMilliseconds(
|
||||
headers: IncomingHttpHeaders
|
||||
): number | undefined {
|
||||
if (headers['retry-after']) {
|
||||
const retryTime = Number(headers['retry-after'])
|
||||
if (!isNaN(retryTime)) {
|
||||
info(`Retry-After header is present with a value of ${retryTime}`)
|
||||
return retryTime * 1000
|
||||
}
|
||||
info(
|
||||
`Returned retry-after header value: ${retryTime} is non-numeric and cannot be used`
|
||||
)
|
||||
return undefined
|
||||
}
|
||||
info(
|
||||
`No retry-after header was found. Dumping all headers for diagnostic purposes`
|
||||
)
|
||||
// eslint-disable-next-line no-console
|
||||
console.log(headers)
|
||||
return undefined
|
||||
}
|
||||
|
||||
export function getContentRange(
|
||||
start: number,
|
||||
end: number,
|
||||
total: number
|
||||
): string {
|
||||
// Format: `bytes start-end/fileSize
|
||||
// start and end are inclusive
|
||||
// For a 200 byte chunk starting at byte 0:
|
||||
// Content-Range: bytes 0-199/200
|
||||
return `bytes ${start}-${end}/${total}`
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets all the necessary headers when downloading an artifact
|
||||
* @param {string} contentType the type of content being uploaded
|
||||
* @param {boolean} isKeepAlive is the same connection being used to make multiple calls
|
||||
* @param {boolean} acceptGzip can we accept a gzip encoded response
|
||||
* @param {string} acceptType the type of content that we can accept
|
||||
* @returns appropriate request options to make a specific http call during artifact download
|
||||
*/
|
||||
export function getDownloadRequestOptions(
|
||||
contentType: string,
|
||||
isKeepAlive?: boolean,
|
||||
acceptGzip?: boolean
|
||||
): IHeaders {
|
||||
const requestOptions: IHeaders = {}
|
||||
|
||||
if (contentType) {
|
||||
requestOptions['Content-Type'] = contentType
|
||||
}
|
||||
if (isKeepAlive) {
|
||||
requestOptions['Connection'] = 'Keep-Alive'
|
||||
// keep alive for at least 10 seconds before closing the connection
|
||||
requestOptions['Keep-Alive'] = '10'
|
||||
}
|
||||
if (acceptGzip) {
|
||||
// if we are expecting a response with gzip encoding, it should be using an octet-stream in the accept header
|
||||
requestOptions['Accept-Encoding'] = 'gzip'
|
||||
requestOptions[
|
||||
'Accept'
|
||||
] = `application/octet-stream;api-version=${getApiVersion()}`
|
||||
} else {
|
||||
// default to application/json if we are not working with gzip content
|
||||
requestOptions['Accept'] = `application/json;api-version=${getApiVersion()}`
|
||||
}
|
||||
|
||||
return requestOptions
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets all the necessary headers when uploading an artifact
|
||||
* @param {string} contentType the type of content being uploaded
|
||||
* @param {boolean} isKeepAlive is the same connection being used to make multiple calls
|
||||
* @param {boolean} isGzip is the connection being used to upload GZip compressed content
|
||||
* @param {number} uncompressedLength the original size of the content if something is being uploaded that has been compressed
|
||||
* @param {number} contentLength the length of the content that is being uploaded
|
||||
* @param {string} contentRange the range of the content that is being uploaded
|
||||
* @returns appropriate request options to make a specific http call during artifact upload
|
||||
*/
|
||||
export function getUploadRequestOptions(
|
||||
contentType: string,
|
||||
isKeepAlive?: boolean,
|
||||
isGzip?: boolean,
|
||||
uncompressedLength?: number,
|
||||
contentLength?: number,
|
||||
contentRange?: string
|
||||
): IHeaders {
|
||||
const requestOptions: IHeaders = {}
|
||||
requestOptions['Accept'] = `application/json;api-version=${getApiVersion()}`
|
||||
if (contentType) {
|
||||
requestOptions['Content-Type'] = contentType
|
||||
}
|
||||
if (isKeepAlive) {
|
||||
requestOptions['Connection'] = 'Keep-Alive'
|
||||
// keep alive for at least 10 seconds before closing the connection
|
||||
requestOptions['Keep-Alive'] = '10'
|
||||
}
|
||||
if (isGzip) {
|
||||
requestOptions['Content-Encoding'] = 'gzip'
|
||||
requestOptions['x-tfs-filelength'] = uncompressedLength
|
||||
}
|
||||
if (contentLength) {
|
||||
requestOptions['Content-Length'] = contentLength
|
||||
}
|
||||
if (contentRange) {
|
||||
requestOptions['Content-Range'] = contentRange
|
||||
}
|
||||
|
||||
return requestOptions
|
||||
}
|
||||
|
||||
export function createHttpClient(): HttpClient {
|
||||
return new HttpClient('action/artifact', [
|
||||
new BearerCredentialHandler(getRuntimeToken())
|
||||
])
|
||||
}
|
||||
|
||||
export function getArtifactUrl(): string {
|
||||
const artifactUrl = `${getRuntimeUrl()}_apis/pipelines/workflows/${getWorkFlowRunId()}/artifacts?api-version=${getApiVersion()}`
|
||||
debug(`Artifact Url: ${artifactUrl}`)
|
||||
return artifactUrl
|
||||
}
|
||||
|
||||
/**
|
||||
* Uh oh! Something might have gone wrong during either upload or download. The IHtttpClientResponse object contains information
|
||||
* about the http call that was made by the actions http client. This information might be useful to display for diagnostic purposes, but
|
||||
* this entire object is really big and most of the information is not really useful. This function takes the response object and displays only
|
||||
* the information that we want.
|
||||
*
|
||||
* Certain information such as the TLSSocket and the Readable state are not really useful for diagnostic purposes so they can be avoided.
|
||||
* Other information such as the headers, the response code and message might be useful, so this is displayed.
|
||||
*/
|
||||
export function displayHttpDiagnostics(response: IHttpClientResponse): void {
|
||||
info(
|
||||
`##### Begin Diagnostic HTTP information #####
|
||||
Status Code: ${response.message.statusCode}
|
||||
Status Message: ${response.message.statusMessage}
|
||||
Header Information: ${JSON.stringify(response.message.headers, undefined, 2)}
|
||||
###### End Diagnostic HTTP information ######`
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Invalid characters that cannot be in the artifact name or an uploaded file. Will be rejected
|
||||
* from the server if attempted to be sent over. These characters are not allowed due to limitations with certain
|
||||
* file systems such as NTFS. To maintain platform-agnostic behavior, all characters that are not supported by an
|
||||
* individual filesystem/platform will not be supported on all fileSystems/platforms
|
||||
*
|
||||
* FilePaths can include characters such as \ and / which are not permitted in the artifact name alone
|
||||
*/
|
||||
const invalidArtifactFilePathCharacters = ['"', ':', '<', '>', '|', '*', '?']
|
||||
const invalidArtifactNameCharacters = [
|
||||
...invalidArtifactFilePathCharacters,
|
||||
'\\',
|
||||
'/'
|
||||
]
|
||||
|
||||
/**
|
||||
* Scans the name of the artifact to make sure there are no illegal characters
|
||||
*/
|
||||
export function checkArtifactName(name: string): void {
|
||||
if (!name) {
|
||||
throw new Error(`Artifact name: ${name}, is incorrectly provided`)
|
||||
}
|
||||
|
||||
for (const invalidChar of invalidArtifactNameCharacters) {
|
||||
if (name.includes(invalidChar)) {
|
||||
throw new Error(
|
||||
`Artifact name is not valid: ${name}. Contains character: "${invalidChar}". Invalid artifact name characters include: ${invalidArtifactNameCharacters.toString()}.`
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Scans the name of the filePath used to make sure there are no illegal characters
|
||||
*/
|
||||
export function checkArtifactFilePath(path: string): void {
|
||||
if (!path) {
|
||||
throw new Error(`Artifact path: ${path}, is incorrectly provided`)
|
||||
}
|
||||
|
||||
for (const invalidChar of invalidArtifactFilePathCharacters) {
|
||||
if (path.includes(invalidChar)) {
|
||||
throw new Error(
|
||||
`Artifact path is not valid: ${path}. Contains character: "${invalidChar}". Invalid characters include: ${invalidArtifactFilePathCharacters.toString()}.`
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function createDirectoriesForArtifact(
|
||||
directories: string[]
|
||||
): Promise<void> {
|
||||
for (const directory of directories) {
|
||||
await fs.mkdir(directory, {
|
||||
recursive: true
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
export async function createEmptyFilesForArtifact(
|
||||
emptyFilesToCreate: string[]
|
||||
): Promise<void> {
|
||||
for (const filePath of emptyFilesToCreate) {
|
||||
await (await fs.open(filePath, 'w')).close()
|
||||
}
|
||||
}
|
|
@ -0,0 +1,11 @@
|
|||
{
|
||||
"extends": "../../tsconfig.json",
|
||||
"compilerOptions": {
|
||||
"baseUrl": "./",
|
||||
"outDir": "./lib",
|
||||
"rootDir": "./src"
|
||||
},
|
||||
"include": [
|
||||
"./src"
|
||||
]
|
||||
}
|
|
@ -82,6 +82,12 @@ try {
|
|||
core.warning('myInput was not set');
|
||||
}
|
||||
|
||||
if (core.isDebug()) {
|
||||
// curl -v https://github.com
|
||||
} else {
|
||||
// curl https://github.com
|
||||
}
|
||||
|
||||
// Do stuff
|
||||
}
|
||||
catch (err) {
|
||||
|
|
|
@ -1,5 +1,22 @@
|
|||
# @actions/core Releases
|
||||
|
||||
### 1.2.4
|
||||
- [Be more lenient in accepting non-string command inputs](https://github.com/actions/toolkit/pull/405)
|
||||
- [Add Echo commands](https://github.com/actions/toolkit/pull/411)
|
||||
|
||||
### 1.2.3
|
||||
|
||||
- [IsDebug logging](README.md#logging)
|
||||
|
||||
### 1.2.2
|
||||
|
||||
- [Fix escaping for runner commands](https://github.com/actions/toolkit/pull/302)
|
||||
|
||||
### 1.2.1
|
||||
|
||||
- [Remove trailing comma from commands](https://github.com/actions/toolkit/pull/263)
|
||||
- [Add \"types\" to package.json](https://github.com/actions/toolkit/pull/221)
|
||||
|
||||
### 1.2.0
|
||||
|
||||
- saveState and getState functions for wrapper tasks (on finally entry points that run post job)
|
||||
|
|
|
@ -0,0 +1,139 @@
|
|||
import * as command from '../src/command'
|
||||
import * as os from 'os'
|
||||
|
||||
/* eslint-disable @typescript-eslint/unbound-method */
|
||||
|
||||
let originalWriteFunction: (str: string) => boolean
|
||||
|
||||
describe('@actions/core/src/command', () => {
|
||||
beforeAll(() => {
|
||||
originalWriteFunction = process.stdout.write
|
||||
})
|
||||
|
||||
beforeEach(() => {
|
||||
process.stdout.write = jest.fn()
|
||||
})
|
||||
|
||||
afterEach(() => {})
|
||||
|
||||
afterAll(() => {
|
||||
process.stdout.write = (originalWriteFunction as unknown) as (
|
||||
str: string
|
||||
) => boolean
|
||||
})
|
||||
|
||||
it('command only', () => {
|
||||
command.issueCommand('some-command', {}, '')
|
||||
assertWriteCalls([`::some-command::${os.EOL}`])
|
||||
})
|
||||
|
||||
it('command escapes message', () => {
|
||||
// Verify replaces each instance, not just first instance
|
||||
command.issueCommand(
|
||||
'some-command',
|
||||
{},
|
||||
'percent % percent % cr \r cr \r lf \n lf \n'
|
||||
)
|
||||
assertWriteCalls([
|
||||
`::some-command::percent %25 percent %25 cr %0D cr %0D lf %0A lf %0A${os.EOL}`
|
||||
])
|
||||
|
||||
// Verify literal escape sequences
|
||||
process.stdout.write = jest.fn()
|
||||
command.issueCommand('some-command', {}, '%25 %25 %0D %0D %0A %0A')
|
||||
assertWriteCalls([
|
||||
`::some-command::%2525 %2525 %250D %250D %250A %250A${os.EOL}`
|
||||
])
|
||||
})
|
||||
|
||||
it('command escapes property', () => {
|
||||
// Verify replaces each instance, not just first instance
|
||||
command.issueCommand(
|
||||
'some-command',
|
||||
{
|
||||
name:
|
||||
'percent % percent % cr \r cr \r lf \n lf \n colon : colon : comma , comma ,'
|
||||
},
|
||||
''
|
||||
)
|
||||
assertWriteCalls([
|
||||
`::some-command name=percent %25 percent %25 cr %0D cr %0D lf %0A lf %0A colon %3A colon %3A comma %2C comma %2C::${os.EOL}`
|
||||
])
|
||||
|
||||
// Verify literal escape sequences
|
||||
process.stdout.write = jest.fn()
|
||||
command.issueCommand(
|
||||
'some-command',
|
||||
{},
|
||||
'%25 %25 %0D %0D %0A %0A %3A %3A %2C %2C'
|
||||
)
|
||||
assertWriteCalls([
|
||||
`::some-command::%2525 %2525 %250D %250D %250A %250A %253A %253A %252C %252C${os.EOL}`
|
||||
])
|
||||
})
|
||||
|
||||
it('command with message', () => {
|
||||
command.issueCommand('some-command', {}, 'some message')
|
||||
assertWriteCalls([`::some-command::some message${os.EOL}`])
|
||||
})
|
||||
|
||||
it('command with message and properties', () => {
|
||||
command.issueCommand(
|
||||
'some-command',
|
||||
{prop1: 'value 1', prop2: 'value 2'},
|
||||
'some message'
|
||||
)
|
||||
assertWriteCalls([
|
||||
`::some-command prop1=value 1,prop2=value 2::some message${os.EOL}`
|
||||
])
|
||||
})
|
||||
|
||||
it('command with one property', () => {
|
||||
command.issueCommand('some-command', {prop1: 'value 1'}, '')
|
||||
assertWriteCalls([`::some-command prop1=value 1::${os.EOL}`])
|
||||
})
|
||||
|
||||
it('command with two properties', () => {
|
||||
command.issueCommand(
|
||||
'some-command',
|
||||
{prop1: 'value 1', prop2: 'value 2'},
|
||||
''
|
||||
)
|
||||
assertWriteCalls([`::some-command prop1=value 1,prop2=value 2::${os.EOL}`])
|
||||
})
|
||||
|
||||
it('command with three properties', () => {
|
||||
command.issueCommand(
|
||||
'some-command',
|
||||
{prop1: 'value 1', prop2: 'value 2', prop3: 'value 3'},
|
||||
''
|
||||
)
|
||||
assertWriteCalls([
|
||||
`::some-command prop1=value 1,prop2=value 2,prop3=value 3::${os.EOL}`
|
||||
])
|
||||
})
|
||||
|
||||
it('should handle issuing commands for non-string objects', () => {
|
||||
command.issueCommand(
|
||||
'some-command',
|
||||
{
|
||||
prop1: ({test: 'object'} as unknown) as string,
|
||||
prop2: (123 as unknown) as string,
|
||||
prop3: (true as unknown) as string
|
||||
},
|
||||
({test: 'object'} as unknown) as string
|
||||
)
|
||||
assertWriteCalls([
|
||||
`::some-command prop1={"test"%3A"object"},prop2=123,prop3=true::{"test":"object"}${os.EOL}`
|
||||
])
|
||||
})
|
||||
})
|
||||
|
||||
// Assert that process.stdout.write calls called only with the given arguments.
|
||||
function assertWriteCalls(calls: string[]): void {
|
||||
expect(process.stdout.write).toHaveBeenCalledTimes(calls.length)
|
||||
|
||||
for (let i = 0; i < calls.length; i++) {
|
||||
expect(process.stdout.write).toHaveBeenNthCalledWith(i + 1, calls[i])
|
||||
}
|
||||
}
|
|
@ -37,21 +37,31 @@ describe('@actions/core', () => {
|
|||
|
||||
it('exportVariable produces the correct command and sets the env', () => {
|
||||
core.exportVariable('my var', 'var val')
|
||||
assertWriteCalls([`::set-env name=my var,::var val${os.EOL}`])
|
||||
assertWriteCalls([`::set-env name=my var::var val${os.EOL}`])
|
||||
})
|
||||
|
||||
it('exportVariable escapes variable names', () => {
|
||||
core.exportVariable('special char var \r\n];', 'special val')
|
||||
expect(process.env['special char var \r\n];']).toBe('special val')
|
||||
core.exportVariable('special char var \r\n,:', 'special val')
|
||||
expect(process.env['special char var \r\n,:']).toBe('special val')
|
||||
assertWriteCalls([
|
||||
`::set-env name=special char var %0D%0A%5D%3B,::special val${os.EOL}`
|
||||
`::set-env name=special char var %0D%0A%2C%3A::special val${os.EOL}`
|
||||
])
|
||||
})
|
||||
|
||||
it('exportVariable escapes variable values', () => {
|
||||
core.exportVariable('my var2', 'var val\r\n')
|
||||
expect(process.env['my var2']).toBe('var val\r\n')
|
||||
assertWriteCalls([`::set-env name=my var2,::var val%0D%0A${os.EOL}`])
|
||||
assertWriteCalls([`::set-env name=my var2::var val%0D%0A${os.EOL}`])
|
||||
})
|
||||
|
||||
it('exportVariable handles boolean inputs', () => {
|
||||
core.exportVariable('my var', true)
|
||||
assertWriteCalls([`::set-env name=my var::true${os.EOL}`])
|
||||
})
|
||||
|
||||
it('exportVariable handles number inputs', () => {
|
||||
core.exportVariable('my var', 5)
|
||||
assertWriteCalls([`::set-env name=my var::5${os.EOL}`])
|
||||
})
|
||||
|
||||
it('setSecret produces the correct command', () => {
|
||||
|
@ -101,21 +111,38 @@ describe('@actions/core', () => {
|
|||
|
||||
it('setOutput produces the correct command', () => {
|
||||
core.setOutput('some output', 'some value')
|
||||
assertWriteCalls([`::set-output name=some output,::some value${os.EOL}`])
|
||||
assertWriteCalls([`::set-output name=some output::some value${os.EOL}`])
|
||||
})
|
||||
|
||||
it('setFailure sets the correct exit code and failure message', () => {
|
||||
it('setOutput handles bools', () => {
|
||||
core.setOutput('some output', false)
|
||||
assertWriteCalls([`::set-output name=some output::false${os.EOL}`])
|
||||
})
|
||||
|
||||
it('setOutput handles numbers', () => {
|
||||
core.setOutput('some output', 1.01)
|
||||
assertWriteCalls([`::set-output name=some output::1.01${os.EOL}`])
|
||||
})
|
||||
|
||||
it('setFailed sets the correct exit code and failure message', () => {
|
||||
core.setFailed('Failure message')
|
||||
expect(process.exitCode).toBe(core.ExitCode.Failure)
|
||||
assertWriteCalls([`::error::Failure message${os.EOL}`])
|
||||
})
|
||||
|
||||
it('setFailure escapes the failure message', () => {
|
||||
it('setFailed escapes the failure message', () => {
|
||||
core.setFailed('Failure \r\n\nmessage\r')
|
||||
expect(process.exitCode).toBe(core.ExitCode.Failure)
|
||||
assertWriteCalls([`::error::Failure %0D%0A%0Amessage%0D${os.EOL}`])
|
||||
})
|
||||
|
||||
it('setFailed handles Error', () => {
|
||||
const message = 'this is my error message'
|
||||
core.setFailed(new Error(message))
|
||||
expect(process.exitCode).toBe(core.ExitCode.Failure)
|
||||
assertWriteCalls([`::error::Error: ${message}${os.EOL}`])
|
||||
})
|
||||
|
||||
it('error sets the correct error message', () => {
|
||||
core.error('Error message')
|
||||
assertWriteCalls([`::error::Error message${os.EOL}`])
|
||||
|
@ -126,6 +153,12 @@ describe('@actions/core', () => {
|
|||
assertWriteCalls([`::error::Error message%0D%0A%0A${os.EOL}`])
|
||||
})
|
||||
|
||||
it('error handles an error object', () => {
|
||||
const message = 'this is my error message'
|
||||
core.error(new Error(message))
|
||||
assertWriteCalls([`::error::Error: ${message}${os.EOL}`])
|
||||
})
|
||||
|
||||
it('warning sets the correct message', () => {
|
||||
core.warning('Warning')
|
||||
assertWriteCalls([`::warning::Warning${os.EOL}`])
|
||||
|
@ -136,6 +169,12 @@ describe('@actions/core', () => {
|
|||
assertWriteCalls([`::warning::%0D%0Awarning%0A${os.EOL}`])
|
||||
})
|
||||
|
||||
it('warning handles an error object', () => {
|
||||
const message = 'this is my error message'
|
||||
core.warning(new Error(message))
|
||||
assertWriteCalls([`::warning::Error: ${message}${os.EOL}`])
|
||||
})
|
||||
|
||||
it('startGroup starts a new group', () => {
|
||||
core.startGroup('my-group')
|
||||
assertWriteCalls([`::group::my-group${os.EOL}`])
|
||||
|
@ -171,12 +210,45 @@ describe('@actions/core', () => {
|
|||
|
||||
it('saveState produces the correct command', () => {
|
||||
core.saveState('state_1', 'some value')
|
||||
assertWriteCalls([`::save-state name=state_1,::some value${os.EOL}`])
|
||||
assertWriteCalls([`::save-state name=state_1::some value${os.EOL}`])
|
||||
})
|
||||
|
||||
it('saveState handles numbers', () => {
|
||||
core.saveState('state_1', 1)
|
||||
assertWriteCalls([`::save-state name=state_1::1${os.EOL}`])
|
||||
})
|
||||
|
||||
it('saveState handles bools', () => {
|
||||
core.saveState('state_1', true)
|
||||
assertWriteCalls([`::save-state name=state_1::true${os.EOL}`])
|
||||
})
|
||||
|
||||
it('getState gets wrapper action state', () => {
|
||||
expect(core.getState('TEST_1')).toBe('state_val')
|
||||
})
|
||||
|
||||
it('isDebug check debug state', () => {
|
||||
const current = process.env['RUNNER_DEBUG']
|
||||
try {
|
||||
delete process.env.RUNNER_DEBUG
|
||||
expect(core.isDebug()).toBe(false)
|
||||
|
||||
process.env['RUNNER_DEBUG'] = '1'
|
||||
expect(core.isDebug()).toBe(true)
|
||||
} finally {
|
||||
process.env['RUNNER_DEBUG'] = current
|
||||
}
|
||||
})
|
||||
|
||||
it('setCommandEcho can enable echoing', () => {
|
||||
core.setCommandEcho(true)
|
||||
assertWriteCalls([`::echo::on${os.EOL}`])
|
||||
})
|
||||
|
||||
it('setCommandEcho can disable echoing', () => {
|
||||
core.setCommandEcho(false)
|
||||
assertWriteCalls([`::echo::off${os.EOL}`])
|
||||
})
|
||||
})
|
||||
|
||||
// Assert that process.stdout.write calls called only with the given arguments.
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@actions/core",
|
||||
"version": "1.1.1",
|
||||
"version": "1.2.4",
|
||||
"lockfileVersion": 1,
|
||||
"requires": true,
|
||||
"dependencies": {
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@actions/core",
|
||||
"version": "1.2.0",
|
||||
"version": "1.2.4",
|
||||
"description": "Actions core lib",
|
||||
"keywords": [
|
||||
"github",
|
||||
|
@ -10,6 +10,7 @@
|
|||
"homepage": "https://github.com/actions/toolkit/tree/master/packages/core",
|
||||
"license": "MIT",
|
||||
"main": "lib/core.js",
|
||||
"types": "lib/core.d.ts",
|
||||
"directories": {
|
||||
"lib": "lib",
|
||||
"test": "__tests__"
|
||||
|
@ -26,6 +27,7 @@
|
|||
"directory": "packages/core"
|
||||
},
|
||||
"scripts": {
|
||||
"audit-moderate": "npm install && npm audit --audit-level=moderate",
|
||||
"test": "echo \"Error: run tests from root\" && exit 1",
|
||||
"tsc": "tsc"
|
||||
},
|
||||
|
|
|
@ -2,24 +2,27 @@ import * as os from 'os'
|
|||
|
||||
// For internal use, subject to change.
|
||||
|
||||
// We use any as a valid input type
|
||||
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||||
|
||||
interface CommandProperties {
|
||||
[key: string]: string
|
||||
[key: string]: any
|
||||
}
|
||||
|
||||
/**
|
||||
* Commands
|
||||
*
|
||||
* Command Format:
|
||||
* ##[name key=value;key=value]message
|
||||
* ::name key=value,key=value::message
|
||||
*
|
||||
* Examples:
|
||||
* ##[warning]This is the user warning message
|
||||
* ##[set-secret name=mypassword]definitelyNotAPassword!
|
||||
* ::warning::This is the message
|
||||
* ::set-env name=MY_VAR::some value
|
||||
*/
|
||||
export function issueCommand(
|
||||
command: string,
|
||||
properties: CommandProperties,
|
||||
message: string
|
||||
message: any
|
||||
): void {
|
||||
const cmd = new Command(command, properties, message)
|
||||
process.stdout.write(cmd.toString() + os.EOL)
|
||||
|
@ -51,37 +54,53 @@ class Command {
|
|||
|
||||
if (this.properties && Object.keys(this.properties).length > 0) {
|
||||
cmdStr += ' '
|
||||
let first = true
|
||||
for (const key in this.properties) {
|
||||
if (this.properties.hasOwnProperty(key)) {
|
||||
const val = this.properties[key]
|
||||
if (val) {
|
||||
// safely append the val - avoid blowing up when attempting to
|
||||
// call .replace() if message is not a string for some reason
|
||||
cmdStr += `${key}=${escape(`${val || ''}`)},`
|
||||
if (first) {
|
||||
first = false
|
||||
} else {
|
||||
cmdStr += ','
|
||||
}
|
||||
|
||||
cmdStr += `${key}=${escapeProperty(val)}`
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
cmdStr += CMD_STRING
|
||||
|
||||
// safely append the message - avoid blowing up when attempting to
|
||||
// call .replace() if message is not a string for some reason
|
||||
const message = `${this.message || ''}`
|
||||
cmdStr += escapeData(message)
|
||||
|
||||
cmdStr += `${CMD_STRING}${escapeData(this.message)}`
|
||||
return cmdStr
|
||||
}
|
||||
}
|
||||
|
||||
function escapeData(s: string): string {
|
||||
return s.replace(/\r/g, '%0D').replace(/\n/g, '%0A')
|
||||
/**
|
||||
* Sanitizes an input into a string so it can be passed into issueCommand safely
|
||||
* @param input input to sanitize into a string
|
||||
*/
|
||||
export function toCommandValue(input: any): string {
|
||||
if (input === null || input === undefined) {
|
||||
return ''
|
||||
} else if (typeof input === 'string' || input instanceof String) {
|
||||
return input as string
|
||||
}
|
||||
return JSON.stringify(input)
|
||||
}
|
||||
|
||||
function escape(s: string): string {
|
||||
return s
|
||||
function escapeData(s: any): string {
|
||||
return toCommandValue(s)
|
||||
.replace(/%/g, '%25')
|
||||
.replace(/\r/g, '%0D')
|
||||
.replace(/\n/g, '%0A')
|
||||
.replace(/]/g, '%5D')
|
||||
.replace(/;/g, '%3B')
|
||||
}
|
||||
|
||||
function escapeProperty(s: any): string {
|
||||
return toCommandValue(s)
|
||||
.replace(/%/g, '%25')
|
||||
.replace(/\r/g, '%0D')
|
||||
.replace(/\n/g, '%0A')
|
||||
.replace(/:/g, '%3A')
|
||||
.replace(/,/g, '%2C')
|
||||
}
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import {issue, issueCommand} from './command'
|
||||
import {issue, issueCommand, toCommandValue} from './command'
|
||||
|
||||
import * as os from 'os'
|
||||
import * as path from 'path'
|
||||
|
@ -33,11 +33,13 @@ export enum ExitCode {
|
|||
/**
|
||||
* Sets env variable for this action and future actions in the job
|
||||
* @param name the name of the variable to set
|
||||
* @param val the value of the variable
|
||||
* @param val the value of the variable. Non-string values will be converted to a string via JSON.stringify
|
||||
*/
|
||||
export function exportVariable(name: string, val: string): void {
|
||||
process.env[name] = val
|
||||
issueCommand('set-env', {name}, val)
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
export function exportVariable(name: string, val: any): void {
|
||||
const convertedVal = toCommandValue(val)
|
||||
process.env[name] = convertedVal
|
||||
issueCommand('set-env', {name}, convertedVal)
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -78,12 +80,22 @@ export function getInput(name: string, options?: InputOptions): string {
|
|||
* Sets the value of an output.
|
||||
*
|
||||
* @param name name of the output to set
|
||||
* @param value value to store
|
||||
* @param value value to store. Non-string values will be converted to a string via JSON.stringify
|
||||
*/
|
||||
export function setOutput(name: string, value: string): void {
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
export function setOutput(name: string, value: any): void {
|
||||
issueCommand('set-output', {name}, value)
|
||||
}
|
||||
|
||||
/**
|
||||
* Enables or disables the echoing of commands into stdout for the rest of the step.
|
||||
* Echoing is disabled by default if ACTIONS_STEP_DEBUG is not set.
|
||||
*
|
||||
*/
|
||||
export function setCommandEcho(enabled: boolean): void {
|
||||
issue('echo', enabled ? 'on' : 'off')
|
||||
}
|
||||
|
||||
//-----------------------------------------------------------------------
|
||||
// Results
|
||||
//-----------------------------------------------------------------------
|
||||
|
@ -93,8 +105,9 @@ export function setOutput(name: string, value: string): void {
|
|||
* When the action exits it will be with an exit code of 1
|
||||
* @param message add error issue message
|
||||
*/
|
||||
export function setFailed(message: string): void {
|
||||
export function setFailed(message: string | Error): void {
|
||||
process.exitCode = ExitCode.Failure
|
||||
|
||||
error(message)
|
||||
}
|
||||
|
||||
|
@ -102,6 +115,13 @@ export function setFailed(message: string): void {
|
|||
// Logging Commands
|
||||
//-----------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Gets whether Actions Step Debug is on or not
|
||||
*/
|
||||
export function isDebug(): boolean {
|
||||
return process.env['RUNNER_DEBUG'] === '1'
|
||||
}
|
||||
|
||||
/**
|
||||
* Writes debug message to user log
|
||||
* @param message debug message
|
||||
|
@ -112,18 +132,18 @@ export function debug(message: string): void {
|
|||
|
||||
/**
|
||||
* Adds an error issue
|
||||
* @param message error issue message
|
||||
* @param message error issue message. Errors will be converted to string via toString()
|
||||
*/
|
||||
export function error(message: string): void {
|
||||
issue('error', message)
|
||||
export function error(message: string | Error): void {
|
||||
issue('error', message instanceof Error ? message.toString() : message)
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds an warning issue
|
||||
* @param message warning issue message
|
||||
* @param message warning issue message. Errors will be converted to string via toString()
|
||||
*/
|
||||
export function warning(message: string): void {
|
||||
issue('warning', message)
|
||||
export function warning(message: string | Error): void {
|
||||
issue('warning', message instanceof Error ? message.toString() : message)
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -182,9 +202,10 @@ export async function group<T>(name: string, fn: () => Promise<T>): Promise<T> {
|
|||
* Saves state for current action, the state can only be retrieved by this action's post job execution.
|
||||
*
|
||||
* @param name name of the state to store
|
||||
* @param value value to store
|
||||
* @param value value to store. Non-string values will be converted to a string via JSON.stringify
|
||||
*/
|
||||
export function saveState(name: string, value: string): void {
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
export function saveState(name: string, value: any): void {
|
||||
issueCommand('save-state', {name}, value)
|
||||
}
|
||||
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
|
||||
#### Basic
|
||||
|
||||
You can use this package to execute your tools on the command line in a cross platform way:
|
||||
You can use this package to execute tools in a cross platform way:
|
||||
|
||||
```js
|
||||
const exec = require('@actions/exec');
|
||||
|
@ -48,13 +48,10 @@ await exec.exec('node', ['index.js', 'foo=bar'], options);
|
|||
|
||||
#### Exec tools not in the PATH
|
||||
|
||||
You can use it in conjunction with the `which` function from `@actions/io` to execute tools that are not in the PATH:
|
||||
You can specify the full path for tools not in the PATH:
|
||||
|
||||
```js
|
||||
const exec = require('@actions/exec');
|
||||
const io = require('@actions/io');
|
||||
|
||||
const pythonPath: string = await io.which('python', true)
|
||||
|
||||
await exec.exec(`"${pythonPath}"`, ['main.py']);
|
||||
await exec.exec('"/path/to/my-tool"', ['arg1']);
|
||||
```
|
||||
|
|
|
@ -1,5 +1,13 @@
|
|||
# @actions/exec Releases
|
||||
|
||||
### 1.0.3
|
||||
|
||||
- [Add \"types\" to package.json](https://github.com/actions/toolkit/pull/221)
|
||||
|
||||
### 1.0.2
|
||||
|
||||
- [Which before invoking tool](https://github.com/actions/toolkit/pull/220)
|
||||
|
||||
### 1.0.0
|
||||
|
||||
- Initial release
|
|
@ -53,7 +53,7 @@ describe('@actions/exec', () => {
|
|||
expect(outstream.write).toBeCalledWith(
|
||||
`[command]${toolpath} /c echo hello${os.EOL}`
|
||||
)
|
||||
expect(outstream.write).toBeCalledWith(new Buffer(`hello${os.EOL}`))
|
||||
expect(outstream.write).toBeCalledWith(Buffer.from(`hello${os.EOL}`))
|
||||
} else {
|
||||
expect(outstream.write).toBeCalledWith(
|
||||
`[command]${toolpath} -l -a${os.EOL}`
|
||||
|
@ -83,7 +83,7 @@ describe('@actions/exec', () => {
|
|||
expect(outstream.write).toBeCalledWith(
|
||||
`[command]${toolpath} /c echo hello${os.EOL}`
|
||||
)
|
||||
expect(outstream.write).toBeCalledWith(new Buffer(`hello${os.EOL}`))
|
||||
expect(outstream.write).toBeCalledWith(Buffer.from(`hello${os.EOL}`))
|
||||
} else {
|
||||
expect(outstream.write).toBeCalledWith(
|
||||
`[command]${toolpath} -l -a${os.EOL}`
|
||||
|
@ -113,7 +113,7 @@ describe('@actions/exec', () => {
|
|||
expect(outstream.write).toBeCalledWith(
|
||||
`[command]${toolpath} /c echo hello${os.EOL}`
|
||||
)
|
||||
expect(outstream.write).toBeCalledWith(new Buffer(`hello${os.EOL}`))
|
||||
expect(outstream.write).toBeCalledWith(Buffer.from(`hello${os.EOL}`))
|
||||
} else {
|
||||
expect(outstream.write).toBeCalledWith(
|
||||
`[command]${toolpath} -l -a${os.EOL}`
|
||||
|
@ -121,6 +121,38 @@ describe('@actions/exec', () => {
|
|||
}
|
||||
})
|
||||
|
||||
it('Runs exec successfully with command from PATH', async () => {
|
||||
const execOptions = getExecOptions()
|
||||
const outStream = new StringStream()
|
||||
execOptions.outStream = outStream
|
||||
let output = ''
|
||||
execOptions.listeners = {
|
||||
stdout: (data: Buffer) => {
|
||||
output += data.toString()
|
||||
}
|
||||
}
|
||||
|
||||
let exitCode = 1
|
||||
let tool: string
|
||||
let args: string[]
|
||||
if (IS_WINDOWS) {
|
||||
tool = 'cmd'
|
||||
args = ['/c', 'echo', 'hello']
|
||||
} else {
|
||||
tool = 'sh'
|
||||
args = ['-c', 'echo hello']
|
||||
}
|
||||
|
||||
exitCode = await exec.exec(tool, args, execOptions)
|
||||
|
||||
expect(exitCode).toBe(0)
|
||||
const rootedTool = await io.which(tool, true)
|
||||
expect(outStream.getContents().split(os.EOL)[0]).toBe(
|
||||
`[command]${rootedTool} ${args.join(' ')}`
|
||||
)
|
||||
expect(output.trim()).toBe(`hello`)
|
||||
})
|
||||
|
||||
it('Exec fails with error on bad call', async () => {
|
||||
const _testExecOptions = getExecOptions()
|
||||
|
||||
|
@ -173,7 +205,7 @@ describe('@actions/exec', () => {
|
|||
|
||||
expect(exitCode).toBe(0)
|
||||
expect(outstream.write).toBeCalledWith(
|
||||
new Buffer('this is output to stderr')
|
||||
Buffer.from('this is output to stderr')
|
||||
)
|
||||
})
|
||||
|
||||
|
@ -197,7 +229,7 @@ describe('@actions/exec', () => {
|
|||
|
||||
expect(failed).toBe(true)
|
||||
expect(errstream.write).toBeCalledWith(
|
||||
new Buffer('this is output to stderr')
|
||||
Buffer.from('this is output to stderr')
|
||||
)
|
||||
})
|
||||
|
||||
|
@ -232,11 +264,11 @@ describe('@actions/exec', () => {
|
|||
const _testExecOptions = getExecOptions()
|
||||
_testExecOptions.listeners = {
|
||||
stdout: (data: Buffer) => {
|
||||
expect(data).toEqual(new Buffer('this is output to stdout'))
|
||||
expect(data).toEqual(Buffer.from('this is output to stdout'))
|
||||
stdoutCalled = true
|
||||
},
|
||||
stderr: (data: Buffer) => {
|
||||
expect(data).toEqual(new Buffer('this is output to stderr'))
|
||||
expect(data).toEqual(Buffer.from('this is output to stderr'))
|
||||
stderrCalled = true
|
||||
}
|
||||
}
|
||||
|
@ -254,9 +286,53 @@ describe('@actions/exec', () => {
|
|||
expect(stderrCalled).toBeTruthy()
|
||||
})
|
||||
|
||||
it('Handles stdin shell', async () => {
|
||||
let command: string
|
||||
if (IS_WINDOWS) {
|
||||
command = 'wait-for-input.cmd'
|
||||
} else {
|
||||
command = 'wait-for-input.sh'
|
||||
}
|
||||
|
||||
const waitForInput: string = path.join(__dirname, 'scripts', command)
|
||||
|
||||
const _testExecOptions = getExecOptions()
|
||||
|
||||
_testExecOptions.listeners = {
|
||||
stdout: (data: Buffer) => {
|
||||
expect(data).toEqual(Buffer.from(`this is my input${os.EOL}`))
|
||||
}
|
||||
}
|
||||
|
||||
_testExecOptions.input = Buffer.from('this is my input')
|
||||
|
||||
const exitCode = await exec.exec(`"${waitForInput}"`, [], _testExecOptions)
|
||||
expect(exitCode).toBe(0)
|
||||
})
|
||||
|
||||
it('Handles stdin js', async () => {
|
||||
const waitForInput: string = path.join(
|
||||
__dirname,
|
||||
'scripts',
|
||||
'wait-for-input.js'
|
||||
)
|
||||
|
||||
const _testExecOptions = getExecOptions()
|
||||
|
||||
_testExecOptions.listeners = {
|
||||
stdout: (data: Buffer) => {
|
||||
expect(data).toEqual(Buffer.from(`this is my input`))
|
||||
}
|
||||
}
|
||||
|
||||
_testExecOptions.input = Buffer.from('this is my input')
|
||||
|
||||
const nodePath = await io.which('node', true)
|
||||
const exitCode = await exec.exec(nodePath, [waitForInput], _testExecOptions)
|
||||
expect(exitCode).toBe(0)
|
||||
})
|
||||
|
||||
it('Handles child process holding streams open', async function() {
|
||||
// this was timing out on some slower hosted macOS runs at default 5s
|
||||
jest.setTimeout(10000)
|
||||
const semaphorePath = path.join(
|
||||
getTestTemp(),
|
||||
'child-process-semaphore.txt'
|
||||
|
@ -300,11 +376,9 @@ describe('@actions/exec', () => {
|
|||
).toBe(1)
|
||||
|
||||
fs.unlinkSync(semaphorePath)
|
||||
})
|
||||
}, 10000) // this was timing out on some slower hosted macOS runs at default 5s
|
||||
|
||||
it('Handles child process holding streams open and non-zero exit code', async function() {
|
||||
// this was timing out on some slower hosted macOS runs at default 5s
|
||||
jest.setTimeout(10000)
|
||||
const semaphorePath = path.join(
|
||||
getTestTemp(),
|
||||
'child-process-semaphore.txt'
|
||||
|
@ -356,7 +430,7 @@ describe('@actions/exec', () => {
|
|||
).toBe(1)
|
||||
|
||||
fs.unlinkSync(semaphorePath)
|
||||
})
|
||||
}, 10000) // this was timing out on some slower hosted macOS runs at default 5s
|
||||
|
||||
it('Handles child process holding streams open and stderr', async function() {
|
||||
const semaphorePath = path.join(
|
||||
|
@ -418,7 +492,165 @@ describe('@actions/exec', () => {
|
|||
fs.unlinkSync(semaphorePath)
|
||||
})
|
||||
|
||||
it('Exec roots relative tool path using unrooted options.cwd', async () => {
|
||||
let exitCode: number
|
||||
let command: string
|
||||
if (IS_WINDOWS) {
|
||||
command = './print-args-cmd' // let ToolRunner resolve the extension
|
||||
} else {
|
||||
command = './print-args-sh.sh'
|
||||
}
|
||||
const execOptions = getExecOptions()
|
||||
execOptions.cwd = 'scripts'
|
||||
const outStream = new StringStream()
|
||||
execOptions.outStream = outStream
|
||||
let output = ''
|
||||
execOptions.listeners = {
|
||||
stdout: (data: Buffer) => {
|
||||
output += data.toString()
|
||||
}
|
||||
}
|
||||
|
||||
const originalCwd = process.cwd()
|
||||
try {
|
||||
process.chdir(__dirname)
|
||||
exitCode = await exec.exec(`${command} hello world`, [], execOptions)
|
||||
} catch (err) {
|
||||
process.chdir(originalCwd)
|
||||
throw err
|
||||
}
|
||||
|
||||
expect(exitCode).toBe(0)
|
||||
const toolPath = path.resolve(
|
||||
__dirname,
|
||||
execOptions.cwd,
|
||||
`${command}${IS_WINDOWS ? '.cmd' : ''}`
|
||||
)
|
||||
if (IS_WINDOWS) {
|
||||
expect(outStream.getContents().split(os.EOL)[0]).toBe(
|
||||
`[command]${process.env.ComSpec} /D /S /C "${toolPath} hello world"`
|
||||
)
|
||||
} else {
|
||||
expect(outStream.getContents().split(os.EOL)[0]).toBe(
|
||||
`[command]${toolPath} hello world`
|
||||
)
|
||||
}
|
||||
expect(output.trim()).toBe(`args[0]: "hello"${os.EOL}args[1]: "world"`)
|
||||
})
|
||||
|
||||
it('Exec roots relative tool path using rooted options.cwd', async () => {
|
||||
let command: string
|
||||
if (IS_WINDOWS) {
|
||||
command = './print-args-cmd' // let ToolRunner resolve the extension
|
||||
} else {
|
||||
command = './print-args-sh.sh'
|
||||
}
|
||||
const execOptions = getExecOptions()
|
||||
execOptions.cwd = path.join(__dirname, 'scripts')
|
||||
const outStream = new StringStream()
|
||||
execOptions.outStream = outStream
|
||||
let output = ''
|
||||
execOptions.listeners = {
|
||||
stdout: (data: Buffer) => {
|
||||
output += data.toString()
|
||||
}
|
||||
}
|
||||
|
||||
const exitCode = await exec.exec(`${command} hello world`, [], execOptions)
|
||||
|
||||
expect(exitCode).toBe(0)
|
||||
const toolPath = path.resolve(
|
||||
__dirname,
|
||||
execOptions.cwd,
|
||||
`${command}${IS_WINDOWS ? '.cmd' : ''}`
|
||||
)
|
||||
if (IS_WINDOWS) {
|
||||
expect(outStream.getContents().split(os.EOL)[0]).toBe(
|
||||
`[command]${process.env.ComSpec} /D /S /C "${toolPath} hello world"`
|
||||
)
|
||||
} else {
|
||||
expect(outStream.getContents().split(os.EOL)[0]).toBe(
|
||||
`[command]${toolPath} hello world`
|
||||
)
|
||||
}
|
||||
expect(output.trim()).toBe(`args[0]: "hello"${os.EOL}args[1]: "world"`)
|
||||
})
|
||||
|
||||
it('Exec roots relative tool path using process.cwd', async () => {
|
||||
let exitCode: number
|
||||
let command: string
|
||||
if (IS_WINDOWS) {
|
||||
command = 'scripts/print-args-cmd' // let ToolRunner resolve the extension
|
||||
} else {
|
||||
command = 'scripts/print-args-sh.sh'
|
||||
}
|
||||
const execOptions = getExecOptions()
|
||||
const outStream = new StringStream()
|
||||
execOptions.outStream = outStream
|
||||
let output = ''
|
||||
execOptions.listeners = {
|
||||
stdout: (data: Buffer) => {
|
||||
output += data.toString()
|
||||
}
|
||||
}
|
||||
|
||||
const originalCwd = process.cwd()
|
||||
try {
|
||||
process.chdir(__dirname)
|
||||
exitCode = await exec.exec(`${command} hello world`, [], execOptions)
|
||||
} catch (err) {
|
||||
process.chdir(originalCwd)
|
||||
throw err
|
||||
}
|
||||
|
||||
expect(exitCode).toBe(0)
|
||||
const toolPath = path.resolve(
|
||||
__dirname,
|
||||
`${command}${IS_WINDOWS ? '.cmd' : ''}`
|
||||
)
|
||||
if (IS_WINDOWS) {
|
||||
expect(outStream.getContents().split(os.EOL)[0]).toBe(
|
||||
`[command]${process.env.ComSpec} /D /S /C "${toolPath} hello world"`
|
||||
)
|
||||
} else {
|
||||
expect(outStream.getContents().split(os.EOL)[0]).toBe(
|
||||
`[command]${toolPath} hello world`
|
||||
)
|
||||
}
|
||||
expect(output.trim()).toBe(`args[0]: "hello"${os.EOL}args[1]: "world"`)
|
||||
})
|
||||
|
||||
if (IS_WINDOWS) {
|
||||
it('Exec roots relative tool path using process.cwd (Windows path separator)', async () => {
|
||||
let exitCode: number
|
||||
const command = 'scripts\\print-args-cmd' // let ToolRunner resolve the extension
|
||||
const execOptions = getExecOptions()
|
||||
const outStream = new StringStream()
|
||||
execOptions.outStream = outStream
|
||||
let output = ''
|
||||
execOptions.listeners = {
|
||||
stdout: (data: Buffer) => {
|
||||
output += data.toString()
|
||||
}
|
||||
}
|
||||
|
||||
const originalCwd = process.cwd()
|
||||
try {
|
||||
process.chdir(__dirname)
|
||||
exitCode = await exec.exec(`${command} hello world`, [], execOptions)
|
||||
} catch (err) {
|
||||
process.chdir(originalCwd)
|
||||
throw err
|
||||
}
|
||||
|
||||
expect(exitCode).toBe(0)
|
||||
const toolPath = path.resolve(__dirname, `${command}.cmd`)
|
||||
expect(outStream.getContents().split(os.EOL)[0]).toBe(
|
||||
`[command]${process.env.ComSpec} /D /S /C "${toolPath} hello world"`
|
||||
)
|
||||
expect(output.trim()).toBe(`args[0]: "hello"${os.EOL}args[1]: "world"`)
|
||||
})
|
||||
|
||||
// Win specific quoting tests
|
||||
it('execs .exe with verbatim args (Windows)', async () => {
|
||||
const exePath = process.env.ComSpec
|
||||
|
@ -505,7 +737,7 @@ describe('@actions/exec', () => {
|
|||
`[command]"${exePath}" myarg1 myarg2`
|
||||
)
|
||||
expect(output.trim()).toBe("args[0]: 'myarg1'\r\nargs[1]: 'myarg2'")
|
||||
})
|
||||
}, 20000) // slower windows runs timeout, so upping timeout to 20s (from default of 5s)
|
||||
|
||||
it('execs .cmd with a space and with verbatim args (Windows)', async () => {
|
||||
// this test validates the quoting that tool runner adds around the script path.
|
||||
|
@ -562,9 +794,7 @@ describe('@actions/exec', () => {
|
|||
const exitCode = await exec.exec(`"${cmdPath}"`, args, options)
|
||||
expect(exitCode).toBe(0)
|
||||
expect(outStream.getContents().split(os.EOL)[0]).toBe(
|
||||
`[command]${
|
||||
process.env.ComSpec
|
||||
} /D /S /C ""${cmdPath}" "my arg 1" "my arg 2""`
|
||||
`[command]${process.env.ComSpec} /D /S /C ""${cmdPath}" "my arg 1" "my arg 2""`
|
||||
)
|
||||
expect(output.trim()).toBe(
|
||||
'args[0]: "<quote>my arg 1<quote>"\r\n' +
|
||||
|
@ -572,6 +802,40 @@ describe('@actions/exec', () => {
|
|||
)
|
||||
})
|
||||
|
||||
it('execs .cmd from path (Windows)', async () => {
|
||||
// this test validates whether a .cmd is resolved from the PATH when the extension is not specified
|
||||
const cmd = 'print-args-cmd' // note, not print-args-cmd.cmd
|
||||
const cmdPath = path.join(__dirname, 'scripts', `${cmd}.cmd`)
|
||||
const args: string[] = ['my arg 1', 'my arg 2']
|
||||
const outStream = new StringStream()
|
||||
let output = ''
|
||||
const options = {
|
||||
outStream: <stream.Writable>outStream,
|
||||
listeners: {
|
||||
stdout: (data: Buffer) => {
|
||||
output += data.toString()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const originalPath = process.env['Path']
|
||||
try {
|
||||
process.env['Path'] = `${originalPath};${path.dirname(cmdPath)}`
|
||||
const exitCode = await exec.exec(`${cmd}`, args, options)
|
||||
expect(exitCode).toBe(0)
|
||||
expect(outStream.getContents().split(os.EOL)[0]).toBe(
|
||||
`[command]${process.env.ComSpec} /D /S /C "${cmdPath} "my arg 1" "my arg 2""`
|
||||
)
|
||||
expect(output.trim()).toBe(
|
||||
'args[0]: "<quote>my arg 1<quote>"\r\n' +
|
||||
'args[1]: "<quote>my arg 2<quote>"'
|
||||
)
|
||||
} catch (err) {
|
||||
process.env['Path'] = originalPath
|
||||
throw err
|
||||
}
|
||||
})
|
||||
|
||||
it('execs .cmd with arg quoting (Windows)', async () => {
|
||||
// this test validates .cmd quoting rules are applied, not the default libuv rules
|
||||
const cmdPath = path.join(
|
||||
|
|
|
@ -0,0 +1,12 @@
|
|||
@echo off
|
||||
setlocal
|
||||
set index=0
|
||||
|
||||
:check_arg
|
||||
set arg=%1
|
||||
if not defined arg goto :eof
|
||||
set "arg=%arg:"=<quote>%"
|
||||
echo args[%index%]: "%arg%"
|
||||
set /a index=%index%+1
|
||||
shift
|
||||
goto check_arg
|
|
@ -0,0 +1,11 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
# store arguments in a special array
|
||||
args=("$@")
|
||||
# get number of elements
|
||||
ELEMENTS=${#args[@]}
|
||||
|
||||
# echo each element
|
||||
for (( i=0;i<$ELEMENTS;i++)); do
|
||||
echo "args[$i]: \"${args[${i}]}\""
|
||||
done
|
|
@ -0,0 +1,3 @@
|
|||
@echo off
|
||||
set /p var=
|
||||
echo %var%
|
|
@ -0,0 +1,3 @@
|
|||
var fs = require('fs')
|
||||
var data = fs.readFileSync(0, 'utf-8')
|
||||
process.stdout.write(data)
|
|
@ -0,0 +1,4 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
read var
|
||||
echo $var
|
|
@ -1,12 +1,13 @@
|
|||
{
|
||||
"name": "@actions/exec",
|
||||
"version": "1.0.0",
|
||||
"version": "1.0.4",
|
||||
"lockfileVersion": 1,
|
||||
"requires": true,
|
||||
"dependencies": {
|
||||
"@actions/io": {
|
||||
"version": "1.0.0",
|
||||
"dev": true
|
||||
"version": "1.0.1",
|
||||
"resolved": "https://registry.npmjs.org/@actions/io/-/io-1.0.1.tgz",
|
||||
"integrity": "sha512-rhq+tfZukbtaus7xyUtwKfuiCRXd1hWSfmJNEpFgBQJ4woqPEpsBw04awicjwz9tyG2/MVhAEMfVn664Cri5zA=="
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@actions/exec",
|
||||
"version": "1.0.1",
|
||||
"version": "1.0.4",
|
||||
"description": "Actions exec lib",
|
||||
"keywords": [
|
||||
"github",
|
||||
|
@ -10,6 +10,7 @@
|
|||
"homepage": "https://github.com/actions/toolkit/tree/master/packages/exec",
|
||||
"license": "MIT",
|
||||
"main": "lib/exec.js",
|
||||
"types": "lib/exec.d.ts",
|
||||
"directories": {
|
||||
"lib": "lib",
|
||||
"test": "__tests__"
|
||||
|
@ -26,13 +27,14 @@
|
|||
"directory": "packages/exec"
|
||||
},
|
||||
"scripts": {
|
||||
"audit-moderate": "npm install && npm audit --audit-level=moderate",
|
||||
"test": "echo \"Error: run tests from root\" && exit 1",
|
||||
"tsc": "tsc"
|
||||
},
|
||||
"bugs": {
|
||||
"url": "https://github.com/actions/toolkit/issues"
|
||||
},
|
||||
"devDependencies": {
|
||||
"dependencies": {
|
||||
"@actions/io": "^1.0.1"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,6 +1,8 @@
|
|||
import * as im from './interfaces'
|
||||
import {ExecOptions} from './interfaces'
|
||||
import * as tr from './toolrunner'
|
||||
|
||||
export {ExecOptions}
|
||||
|
||||
/**
|
||||
* Exec a command.
|
||||
* Output will be streamed to the live console.
|
||||
|
@ -14,7 +16,7 @@ import * as tr from './toolrunner'
|
|||
export async function exec(
|
||||
commandLine: string,
|
||||
args?: string[],
|
||||
options?: im.ExecOptions
|
||||
options?: ExecOptions
|
||||
): Promise<number> {
|
||||
const commandArgs = tr.argStringToArray(commandLine)
|
||||
if (commandArgs.length === 0) {
|
||||
|
|
|
@ -30,6 +30,9 @@ export interface ExecOptions {
|
|||
/** optional. How long in ms to wait for STDIO streams to close after the exit event of the process before terminating. defaults to 10000 */
|
||||
delay?: number
|
||||
|
||||
/** optional. input to write to the process on STDIN. */
|
||||
input?: Buffer
|
||||
|
||||
/** optional. Listeners for output. Callback functions that will be called on these events */
|
||||
listeners?: {
|
||||
stdout?: (data: Buffer) => void
|
||||
|
|
|
@ -1,8 +1,11 @@
|
|||
import * as os from 'os'
|
||||
import * as events from 'events'
|
||||
import * as child from 'child_process'
|
||||
import * as path from 'path'
|
||||
import * as stream from 'stream'
|
||||
import * as im from './interfaces'
|
||||
import * as io from '@actions/io'
|
||||
import * as ioUtil from '@actions/io/lib/io-util'
|
||||
|
||||
/* eslint-disable @typescript-eslint/unbound-method */
|
||||
|
||||
|
@ -392,6 +395,24 @@ export class ToolRunner extends events.EventEmitter {
|
|||
* @returns number
|
||||
*/
|
||||
async exec(): Promise<number> {
|
||||
// root the tool path if it is unrooted and contains relative pathing
|
||||
if (
|
||||
!ioUtil.isRooted(this.toolPath) &&
|
||||
(this.toolPath.includes('/') ||
|
||||
(IS_WINDOWS && this.toolPath.includes('\\')))
|
||||
) {
|
||||
// prefer options.cwd if it is specified, however options.cwd may also need to be rooted
|
||||
this.toolPath = path.resolve(
|
||||
process.cwd(),
|
||||
this.options.cwd || process.cwd(),
|
||||
this.toolPath
|
||||
)
|
||||
}
|
||||
|
||||
// if the tool is only a file name, then resolve it from the PATH
|
||||
// otherwise verify it exists (add extension on Windows if necessary)
|
||||
this.toolPath = await io.which(this.toolPath, true)
|
||||
|
||||
return new Promise<number>((resolve, reject) => {
|
||||
this._debug(`exec tool: ${this.toolPath}`)
|
||||
this._debug('arguments:')
|
||||
|
@ -503,6 +524,14 @@ export class ToolRunner extends events.EventEmitter {
|
|||
resolve(exitCode)
|
||||
}
|
||||
})
|
||||
|
||||
if (this.options.input) {
|
||||
if (!cp.stdin) {
|
||||
throw new Error('child process missing stdin')
|
||||
}
|
||||
|
||||
cp.stdin.end(this.options.input)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
@ -618,23 +647,15 @@ class ExecState extends events.EventEmitter {
|
|||
if (this.processExited) {
|
||||
if (this.processError) {
|
||||
error = new Error(
|
||||
`There was an error when attempting to execute the process '${
|
||||
this.toolPath
|
||||
}'. This may indicate the process failed to start. Error: ${
|
||||
this.processError
|
||||
}`
|
||||
`There was an error when attempting to execute the process '${this.toolPath}'. This may indicate the process failed to start. Error: ${this.processError}`
|
||||
)
|
||||
} else if (this.processExitCode !== 0 && !this.options.ignoreReturnCode) {
|
||||
error = new Error(
|
||||
`The process '${this.toolPath}' failed with exit code ${
|
||||
this.processExitCode
|
||||
}`
|
||||
`The process '${this.toolPath}' failed with exit code ${this.processExitCode}`
|
||||
)
|
||||
} else if (this.processStderr && this.options.failOnStdErr) {
|
||||
error = new Error(
|
||||
`The process '${
|
||||
this.toolPath
|
||||
}' failed because one or more lines were written to the STDERR stream`
|
||||
`The process '${this.toolPath}' failed because one or more lines were written to the STDERR stream`
|
||||
)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
|
||||
## Usage
|
||||
|
||||
Returns an Octokit client. See https://octokit.github.io/rest.js for the API.
|
||||
Returns an authenticated Octokit client that follows the machine [proxy settings](https://help.github.com/en/actions/hosting-your-own-runners/using-a-proxy-server-with-self-hosted-runners). See https://octokit.github.io/rest.js for the API.
|
||||
|
||||
```js
|
||||
const github = require('@actions/github');
|
||||
|
@ -14,7 +14,7 @@ async function run() {
|
|||
// This should be a token with access to your repository scoped in as a secret.
|
||||
// The YML workflow will need to set myToken with the GitHub Secret Token
|
||||
// myToken: ${{ secrets.GITHUB_TOKEN }}
|
||||
// https://help.github.com/en/articles/virtual-environments-for-github-actions#github_token-secret
|
||||
// https://help.github.com/en/actions/automating-your-workflow-with-github-actions/authenticating-with-the-github_token#about-the-github_token-secret
|
||||
const myToken = core.getInput('myToken');
|
||||
|
||||
const octokit = new github.GitHub(myToken);
|
||||
|
@ -34,7 +34,7 @@ async function run() {
|
|||
run();
|
||||
```
|
||||
|
||||
You can pass client options (except `auth`, which is handled by the token argument), as specified by [Octokit](https://octokit.github.io/rest.js/), as a second argument to the `GitHub` constructor.
|
||||
You can pass client options, as specified by [Octokit](https://octokit.github.io/rest.js/), as a second argument to the `GitHub` constructor.
|
||||
|
||||
You can also make GraphQL requests. See https://github.com/octokit/graphql.js for the API.
|
||||
|
||||
|
@ -55,3 +55,20 @@ const newIssue = await octokit.issues.create({
|
|||
body: 'Hello Universe!'
|
||||
});
|
||||
```
|
||||
|
||||
## Webhook payload typescript definitions
|
||||
|
||||
The npm module `@octokit/webhooks` provides type definitions for the response payloads. You can cast the payload to these types for better type information.
|
||||
|
||||
First, install the npm module `npm install @octokit/webhooks`
|
||||
|
||||
Then, assert the type based on the eventName
|
||||
```ts
|
||||
import * as core from '@actions/core'
|
||||
import * as github from '@actions/github'
|
||||
import * as Webhooks from '@octokit/webhooks'
|
||||
if (github.context.eventName === 'push') {
|
||||
const pushPayload = github.context.payload as Webhooks.WebhookPayloadPush
|
||||
core.info(`The head commit is: ${pushPayload.head}`)
|
||||
}
|
||||
```
|
||||
|
|
|
@ -1,5 +1,23 @@
|
|||
# @actions/github Releases
|
||||
|
||||
### 2.1.1
|
||||
|
||||
- [Use import {Octokit}](https://github.com/actions/toolkit/pull/332)
|
||||
- [Check proxy bypass before setting proxy agent](https://github.com/actions/toolkit/pull/320)
|
||||
|
||||
### 2.1.0
|
||||
|
||||
- [Octokit client follows proxy settings](https://github.com/actions/toolkit/pull/314)
|
||||
- [Fix issue number for pull request comment events](https://github.com/actions/toolkit/pull/311)
|
||||
|
||||
### 2.0.1
|
||||
|
||||
- [Add \"types\" to package.json](https://github.com/actions/toolkit/pull/221)
|
||||
|
||||
### 2.0.0
|
||||
|
||||
- Upgrade Octokit version to 4.x to include typescript types [#228](https://github.com/actions/toolkit/pull/228)
|
||||
|
||||
### 1.1.0
|
||||
|
||||
- Accept Octokit.Options in the GitHub constructor [#113](https://github.com/actions/toolkit/pull/113)
|
||||
|
|
|
@ -0,0 +1,173 @@
|
|||
import * as http from 'http'
|
||||
import proxy from 'proxy'
|
||||
import {GitHub} from '../src/github'
|
||||
|
||||
describe('@actions/github', () => {
|
||||
const proxyUrl = 'http://127.0.0.1:8080'
|
||||
const originalProxyUrl = process.env['https_proxy']
|
||||
let proxyConnects: string[]
|
||||
let proxyServer: http.Server
|
||||
let first = true
|
||||
|
||||
beforeAll(async () => {
|
||||
// Start proxy server
|
||||
proxyServer = proxy()
|
||||
await new Promise(resolve => {
|
||||
const port = Number(proxyUrl.split(':')[2])
|
||||
proxyServer.listen(port, () => resolve())
|
||||
})
|
||||
proxyServer.on('connect', req => {
|
||||
proxyConnects.push(req.url)
|
||||
})
|
||||
})
|
||||
|
||||
beforeEach(() => {
|
||||
delete process.env['https_proxy']
|
||||
proxyConnects = []
|
||||
})
|
||||
|
||||
afterAll(async () => {
|
||||
// Stop proxy server
|
||||
await new Promise(resolve => {
|
||||
proxyServer.once('close', () => resolve())
|
||||
proxyServer.close()
|
||||
})
|
||||
|
||||
if (originalProxyUrl) {
|
||||
process.env['https_proxy'] = originalProxyUrl
|
||||
}
|
||||
})
|
||||
|
||||
it('basic REST client', async () => {
|
||||
const token = getToken()
|
||||
if (!token) {
|
||||
return
|
||||
}
|
||||
|
||||
const octokit = new GitHub(token)
|
||||
const branch = await octokit.repos.getBranch({
|
||||
owner: 'actions',
|
||||
repo: 'toolkit',
|
||||
branch: 'master'
|
||||
})
|
||||
expect(branch.data.name).toBe('master')
|
||||
expect(proxyConnects).toHaveLength(0)
|
||||
})
|
||||
|
||||
it('basic REST client with custom auth', async () => {
|
||||
const token = getToken()
|
||||
if (!token) {
|
||||
return
|
||||
}
|
||||
|
||||
// Valid token
|
||||
let octokit = new GitHub({auth: `token ${token}`})
|
||||
const branch = await octokit.repos.getBranch({
|
||||
owner: 'actions',
|
||||
repo: 'toolkit',
|
||||
branch: 'master'
|
||||
})
|
||||
expect(branch.data.name).toBe('master')
|
||||
expect(proxyConnects).toHaveLength(0)
|
||||
|
||||
// Invalid token
|
||||
octokit = new GitHub({auth: `token asdf`})
|
||||
let failed = false
|
||||
try {
|
||||
await octokit.repos.getBranch({
|
||||
owner: 'actions',
|
||||
repo: 'toolkit',
|
||||
branch: 'master'
|
||||
})
|
||||
} catch (err) {
|
||||
failed = true
|
||||
}
|
||||
expect(failed).toBeTruthy()
|
||||
})
|
||||
|
||||
it('basic REST client with proxy', async () => {
|
||||
const token = getToken()
|
||||
if (!token) {
|
||||
return
|
||||
}
|
||||
|
||||
process.env['https_proxy'] = proxyUrl
|
||||
const octokit = new GitHub(token)
|
||||
const branch = await octokit.repos.getBranch({
|
||||
owner: 'actions',
|
||||
repo: 'toolkit',
|
||||
branch: 'master'
|
||||
})
|
||||
expect(branch.data.name).toBe('master')
|
||||
expect(proxyConnects).toEqual(['api.github.com:443'])
|
||||
})
|
||||
|
||||
it('basic GraphQL client', async () => {
|
||||
const token = getToken()
|
||||
if (!token) {
|
||||
return
|
||||
}
|
||||
|
||||
const octokit = new GitHub(token)
|
||||
const repository = await octokit.graphql(
|
||||
'{repository(owner:"actions", name:"toolkit"){name}}'
|
||||
)
|
||||
expect(repository).toEqual({repository: {name: 'toolkit'}})
|
||||
expect(proxyConnects).toHaveLength(0)
|
||||
})
|
||||
|
||||
it('basic GraphQL client with custom auth', async () => {
|
||||
const token = getToken()
|
||||
if (!token) {
|
||||
return
|
||||
}
|
||||
|
||||
// Valid token
|
||||
let octokit = new GitHub(token)
|
||||
const repository = await octokit.graphql(
|
||||
'{repository(owner:"actions", name:"toolkit"){name}}'
|
||||
)
|
||||
expect(repository).toEqual({repository: {name: 'toolkit'}})
|
||||
expect(proxyConnects).toHaveLength(0)
|
||||
|
||||
// Invalid token
|
||||
octokit = new GitHub({auth: `token asdf`})
|
||||
let failed = false
|
||||
try {
|
||||
await octokit.graphql(
|
||||
'{repository(owner:"actions", name:"toolkit"){name}}'
|
||||
)
|
||||
} catch (err) {
|
||||
failed = true
|
||||
}
|
||||
expect(failed).toBeTruthy()
|
||||
})
|
||||
|
||||
it('basic GraphQL client with proxy', async () => {
|
||||
const token = getToken()
|
||||
if (!token) {
|
||||
return
|
||||
}
|
||||
|
||||
process.env['https_proxy'] = proxyUrl
|
||||
const octokit = new GitHub(token)
|
||||
const repository = await octokit.graphql(
|
||||
'{repository(owner:"actions", name:"toolkit"){name}}'
|
||||
)
|
||||
expect(repository).toEqual({repository: {name: 'toolkit'}})
|
||||
expect(proxyConnects).toEqual(['api.github.com:443'])
|
||||
})
|
||||
|
||||
function getToken(): string {
|
||||
const token = process.env['GITHUB_TOKEN'] || ''
|
||||
if (!token && first) {
|
||||
/* eslint-disable-next-line no-console */
|
||||
console.warn(
|
||||
'Skipping GitHub tests. Set $GITHUB_TOKEN to run REST client and GraphQL client tests'
|
||||
)
|
||||
first = false
|
||||
}
|
||||
|
||||
return token
|
||||
}
|
||||
})
|
|
@ -52,10 +52,11 @@ describe('@actions/context', () => {
|
|||
})
|
||||
})
|
||||
|
||||
it('works with pullRequest payloads', () => {
|
||||
it('works with pull_request payloads', () => {
|
||||
delete process.env.GITHUB_REPOSITORY
|
||||
context.payload = {
|
||||
pullRequest: {number: 2},
|
||||
// eslint-disable-next-line @typescript-eslint/camelcase
|
||||
pull_request: {number: 2},
|
||||
repository: {owner: {login: 'user'}, name: 'test'}
|
||||
}
|
||||
expect(context.issue).toEqual({
|
||||
|
|
|
@ -0,0 +1,5 @@
|
|||
declare module 'proxy' {
|
||||
import * as http from 'http'
|
||||
function internal(): http.Server
|
||||
export = internal
|
||||
}
|
File diff suppressed because it is too large
Load Diff
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@actions/github",
|
||||
"version": "1.1.0",
|
||||
"version": "2.1.1",
|
||||
"description": "Actions github lib",
|
||||
"keywords": [
|
||||
"github",
|
||||
|
@ -9,6 +9,7 @@
|
|||
"homepage": "https://github.com/actions/toolkit/tree/master/packages/github",
|
||||
"license": "MIT",
|
||||
"main": "lib/github.js",
|
||||
"types": "lib/github.d.ts",
|
||||
"directories": {
|
||||
"lib": "lib",
|
||||
"test": "__tests__"
|
||||
|
@ -25,18 +26,23 @@
|
|||
"directory": "packages/github"
|
||||
},
|
||||
"scripts": {
|
||||
"audit-moderate": "npm install && npm audit --audit-level=moderate",
|
||||
"test": "jest",
|
||||
"build": "tsc",
|
||||
"format": "prettier --write **/*.ts",
|
||||
"format-check": "prettier --check **/*.ts",
|
||||
"tsc": "tsc"
|
||||
},
|
||||
"bugs": {
|
||||
"url": "https://github.com/actions/toolkit/issues"
|
||||
},
|
||||
"dependencies": {
|
||||
"@octokit/graphql": "^2.0.1",
|
||||
"@octokit/rest": "^16.15.0"
|
||||
"@actions/http-client": "^1.0.3",
|
||||
"@octokit/graphql": "^4.3.1",
|
||||
"@octokit/rest": "^16.43.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"jest": "^24.7.1"
|
||||
"jest": "^25.1.0",
|
||||
"proxy": "^1.0.1"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,36 +0,0 @@
|
|||
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||||
declare module '@octokit/graphql' {
|
||||
export interface GraphQlQueryResponse {
|
||||
data: {[key: string]: any} | null
|
||||
errors?: [
|
||||
{
|
||||
message: string
|
||||
path: [string]
|
||||
extensions: {[key: string]: any}
|
||||
locations: [
|
||||
{
|
||||
line: number
|
||||
column: number
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
export interface GraphQLError {
|
||||
message: string
|
||||
locations?: {line: number; column: number}[]
|
||||
path?: (string | number)[]
|
||||
extensions?: {
|
||||
[key: string]: any
|
||||
}
|
||||
}
|
||||
|
||||
export interface Variables {
|
||||
[key: string]: any
|
||||
}
|
||||
|
||||
export function defaults(
|
||||
options: any
|
||||
): (query: string, variables?: Variables) => Promise<GraphQlQueryResponse>
|
||||
}
|
|
@ -27,11 +27,8 @@ export class Context {
|
|||
readFileSync(process.env.GITHUB_EVENT_PATH, {encoding: 'utf8'})
|
||||
)
|
||||
} else {
|
||||
process.stdout.write(
|
||||
`GITHUB_EVENT_PATH ${
|
||||
process.env.GITHUB_EVENT_PATH
|
||||
} does not exist${EOL}`
|
||||
)
|
||||
const path = process.env.GITHUB_EVENT_PATH
|
||||
process.stdout.write(`GITHUB_EVENT_PATH ${path} does not exist${EOL}`)
|
||||
}
|
||||
}
|
||||
this.eventName = process.env.GITHUB_EVENT_NAME as string
|
||||
|
@ -47,7 +44,7 @@ export class Context {
|
|||
|
||||
return {
|
||||
...this.repo,
|
||||
number: (payload.issue || payload.pullRequest || payload).number
|
||||
number: (payload.issue || payload.pull_request || payload).number
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,7 +1,18 @@
|
|||
// Originally pulled from https://github.com/JasonEtco/actions-toolkit/blob/master/src/github.ts
|
||||
import {GraphQlQueryResponse, Variables, defaults} from '@octokit/graphql'
|
||||
import Octokit from '@octokit/rest'
|
||||
import {graphql} from '@octokit/graphql'
|
||||
|
||||
// we need this type to set up a property on the GitHub object
|
||||
// that has token authorization
|
||||
// (it is not exported from octokit by default)
|
||||
import {
|
||||
graphql as GraphQL,
|
||||
RequestParameters as GraphQLRequestParameters
|
||||
} from '@octokit/graphql/dist-types/types'
|
||||
|
||||
import {Octokit} from '@octokit/rest'
|
||||
import * as Context from './context'
|
||||
import * as http from 'http'
|
||||
import * as httpClient from '@actions/http-client'
|
||||
|
||||
// We need this in order to extend Octokit
|
||||
Octokit.prototype = new Octokit()
|
||||
|
@ -9,15 +20,115 @@ Octokit.prototype = new Octokit()
|
|||
export const context = new Context.Context()
|
||||
|
||||
export class GitHub extends Octokit {
|
||||
graphql: (
|
||||
query: string,
|
||||
variables?: Variables
|
||||
) => Promise<GraphQlQueryResponse>
|
||||
graphql: GraphQL
|
||||
|
||||
constructor(token: string, opts: Omit<Octokit.Options, 'auth'> = {}) {
|
||||
super({...opts, auth: `token ${token}`})
|
||||
this.graphql = defaults({
|
||||
headers: {authorization: `token ${token}`}
|
||||
})
|
||||
/* eslint-disable no-dupe-class-members */
|
||||
// Disable no-dupe-class-members due to false positive for method overload
|
||||
// https://github.com/typescript-eslint/typescript-eslint/issues/291
|
||||
|
||||
/**
|
||||
* Sets up the REST client and GraphQL client with auth and proxy support.
|
||||
* The parameter `token` or `opts.auth` must be supplied. The GraphQL client
|
||||
* authorization is not setup when `opts.auth` is a function or object.
|
||||
*
|
||||
* @param token Auth token
|
||||
* @param opts Octokit options
|
||||
*/
|
||||
constructor(token: string, opts?: Omit<Octokit.Options, 'auth'>)
|
||||
constructor(opts: Octokit.Options)
|
||||
constructor(token: string | Octokit.Options, opts?: Octokit.Options) {
|
||||
super(GitHub.getOctokitOptions(GitHub.disambiguate(token, opts)))
|
||||
|
||||
this.graphql = GitHub.getGraphQL(GitHub.disambiguate(token, opts))
|
||||
}
|
||||
|
||||
/**
|
||||
* Disambiguates the constructor overload parameters
|
||||
*/
|
||||
private static disambiguate(
|
||||
token: string | Octokit.Options,
|
||||
opts?: Octokit.Options
|
||||
): [string, Octokit.Options] {
|
||||
return [
|
||||
typeof token === 'string' ? token : '',
|
||||
typeof token === 'object' ? token : opts || {}
|
||||
]
|
||||
}
|
||||
|
||||
private static getOctokitOptions(
|
||||
args: [string, Octokit.Options]
|
||||
): Octokit.Options {
|
||||
const token = args[0]
|
||||
const options = {...args[1]} // Shallow clone - don't mutate the object provided by the caller
|
||||
|
||||
// Auth
|
||||
const auth = GitHub.getAuthString(token, options)
|
||||
if (auth) {
|
||||
options.auth = auth
|
||||
}
|
||||
|
||||
// Proxy
|
||||
const agent = GitHub.getProxyAgent(options)
|
||||
if (agent) {
|
||||
// Shallow clone - don't mutate the object provided by the caller
|
||||
options.request = options.request ? {...options.request} : {}
|
||||
|
||||
// Set the agent
|
||||
options.request.agent = agent
|
||||
}
|
||||
|
||||
return options
|
||||
}
|
||||
|
||||
private static getGraphQL(args: [string, Octokit.Options]): GraphQL {
|
||||
const defaults: GraphQLRequestParameters = {}
|
||||
const token = args[0]
|
||||
const options = args[1]
|
||||
|
||||
// Authorization
|
||||
const auth = this.getAuthString(token, options)
|
||||
if (auth) {
|
||||
defaults.headers = {
|
||||
authorization: auth
|
||||
}
|
||||
}
|
||||
|
||||
// Proxy
|
||||
const agent = GitHub.getProxyAgent(options)
|
||||
if (agent) {
|
||||
defaults.request = {agent}
|
||||
}
|
||||
|
||||
return graphql.defaults(defaults)
|
||||
}
|
||||
|
||||
private static getAuthString(
|
||||
token: string,
|
||||
options: Octokit.Options
|
||||
): string | undefined {
|
||||
// Validate args
|
||||
if (!token && !options.auth) {
|
||||
throw new Error('Parameter token or opts.auth is required')
|
||||
} else if (token && options.auth) {
|
||||
throw new Error(
|
||||
'Parameters token and opts.auth may not both be specified'
|
||||
)
|
||||
}
|
||||
|
||||
return typeof options.auth === 'string' ? options.auth : `token ${token}`
|
||||
}
|
||||
|
||||
private static getProxyAgent(
|
||||
options: Octokit.Options
|
||||
): http.Agent | undefined {
|
||||
if (!options.request?.agent) {
|
||||
const serverUrl = 'https://api.github.com'
|
||||
if (httpClient.getProxyUrl(serverUrl)) {
|
||||
const hc = new httpClient.HttpClient()
|
||||
return hc.getAgent(serverUrl)
|
||||
}
|
||||
}
|
||||
|
||||
return undefined
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2,7 +2,6 @@
|
|||
"extends": "../../tsconfig.json",
|
||||
"compilerOptions": {
|
||||
"baseUrl": "./",
|
||||
"esModuleInterop": true,
|
||||
"outDir": "./lib",
|
||||
"rootDir": "./src"
|
||||
},
|
||||
|
|
|
@ -0,0 +1,113 @@
|
|||
# `@actions/glob`
|
||||
|
||||
## Usage
|
||||
|
||||
### Basic
|
||||
|
||||
You can use this package to search for files matching glob patterns.
|
||||
|
||||
Relative paths and absolute paths are both allowed. Relative paths are rooted against the current working directory.
|
||||
|
||||
```js
|
||||
const glob = require('@actions/glob');
|
||||
|
||||
const patterns = ['**/tar.gz', '**/tar.bz']
|
||||
const globber = await glob.create(patterns.join('\n'))
|
||||
const files = await globber.glob()
|
||||
```
|
||||
|
||||
### Opt out of following symbolic links
|
||||
|
||||
```js
|
||||
const glob = require('@actions/glob');
|
||||
|
||||
const globber = await glob.create('**', {followSymbolicLinks: false})
|
||||
const files = await globber.glob()
|
||||
```
|
||||
|
||||
### Iterator
|
||||
|
||||
When dealing with a large amount of results, consider iterating the results as they are returned:
|
||||
|
||||
```js
|
||||
const glob = require('@actions/glob');
|
||||
|
||||
const globber = await glob.create('**')
|
||||
for await (const file of globber.globGenerator()) {
|
||||
console.log(file)
|
||||
}
|
||||
```
|
||||
|
||||
## Recommended action inputs
|
||||
|
||||
Glob follows symbolic links by default. Following is often appropriate unless deleting files.
|
||||
|
||||
Users may want to opt-out from following symbolic links for other reasons. For example,
|
||||
excessive amounts of symbolic links can create the appearance of very, very many files
|
||||
and slow the search.
|
||||
|
||||
When an action allows a user to specify input patterns, it is generally recommended to
|
||||
allow users to opt-out from following symbolic links.
|
||||
|
||||
Snippet from `action.yml`:
|
||||
|
||||
```yaml
|
||||
inputs:
|
||||
files:
|
||||
description: 'Files to print'
|
||||
required: true
|
||||
follow-symbolic-links:
|
||||
description: 'Indicates whether to follow symbolic links'
|
||||
default: true
|
||||
```
|
||||
|
||||
And corresponding toolkit consumption:
|
||||
|
||||
```js
|
||||
const core = require('@actions/core')
|
||||
const glob = require('@actions/glob')
|
||||
|
||||
const globOptions = {
|
||||
followSymbolicLinks: core.getInput('follow-symbolic-links').toUpper() !== 'FALSE'
|
||||
}
|
||||
const globber = glob.create(core.getInput('files'), globOptions)
|
||||
for await (const file of globber.globGenerator()) {
|
||||
console.log(file)
|
||||
}
|
||||
```
|
||||
|
||||
## Patterns
|
||||
|
||||
### Glob behavior
|
||||
|
||||
Patterns `*`, `?`, `[...]`, `**` (globstar) are supported.
|
||||
|
||||
With the following behaviors:
|
||||
- File names that begin with `.` may be included in the results
|
||||
- Case insensitive on Windows
|
||||
- Directory separator `/` and `\` both supported on Windows
|
||||
|
||||
### Tilde expansion
|
||||
|
||||
Supports basic tilde expansion, for current user HOME replacement only.
|
||||
|
||||
Example:
|
||||
- `~` may expand to /Users/johndoe
|
||||
- `~/foo` may expand to /Users/johndoe/foo
|
||||
|
||||
### Comments
|
||||
|
||||
Patterns that begin with `#` are treated as comments.
|
||||
|
||||
### Exclude patterns
|
||||
|
||||
Leading `!` changes the meaning of an include pattern to exclude.
|
||||
|
||||
Multiple leading `!` flips the meaning.
|
||||
|
||||
### Escaping
|
||||
|
||||
Wrapping special characters in `[]` can be used to escape literal glob characters
|
||||
in a file name. For example the literal file name `hello[a-z]` can be escaped as `hello[[]a-z]`.
|
||||
|
||||
On Linux/macOS `\` is also treated as an escape character.
|
|
@ -0,0 +1,5 @@
|
|||
# @actions/glob Releases
|
||||
|
||||
### 0.1.0
|
||||
|
||||
- Initial release
|
|
@ -0,0 +1,825 @@
|
|||
import * as child from 'child_process'
|
||||
import * as io from '../../io/src/io'
|
||||
import * as os from 'os'
|
||||
import * as path from 'path'
|
||||
import {Globber, DefaultGlobber} from '../src/internal-globber'
|
||||
import {GlobOptions} from '../src/internal-glob-options'
|
||||
import {promises as fs} from 'fs'
|
||||
|
||||
const IS_WINDOWS = process.platform === 'win32'
|
||||
|
||||
/**
|
||||
* These test focus on the ability of globber to find files
|
||||
* and not on the pattern matching aspect
|
||||
*/
|
||||
describe('globber', () => {
|
||||
beforeAll(async () => {
|
||||
await io.rmRF(getTestTemp())
|
||||
})
|
||||
|
||||
it('captures cwd', async () => {
|
||||
// Create the following layout:
|
||||
// first-cwd
|
||||
// first-cwd/the-correct-file
|
||||
// second-cwd
|
||||
// second-cwd/the-wrong-file
|
||||
const root = path.join(getTestTemp(), 'preserves-cwd')
|
||||
await fs.mkdir(path.join(root, 'first-cwd'), {recursive: true})
|
||||
await fs.writeFile(
|
||||
path.join(root, 'first-cwd', 'the-correct-file.txt'),
|
||||
'test file content'
|
||||
)
|
||||
await fs.mkdir(path.join(root, 'second-cwd'), {recursive: true})
|
||||
await fs.writeFile(
|
||||
path.join(root, 'second-cwd', 'the-wrong-file.txt'),
|
||||
'test file content'
|
||||
)
|
||||
|
||||
const originalCwd = process.cwd()
|
||||
try {
|
||||
process.chdir(path.join(root, 'first-cwd'))
|
||||
const globber = await DefaultGlobber.create('*')
|
||||
process.chdir(path.join(root, 'second-cwd'))
|
||||
expect(globber.getSearchPaths()).toEqual([path.join(root, 'first-cwd')])
|
||||
const itemPaths = await globber.glob()
|
||||
expect(itemPaths).toEqual([
|
||||
path.join(root, 'first-cwd', 'the-correct-file.txt')
|
||||
])
|
||||
} finally {
|
||||
process.chdir(originalCwd)
|
||||
}
|
||||
})
|
||||
|
||||
it('defaults to followSymbolicLinks=true', async () => {
|
||||
// Create the following layout:
|
||||
// <root>
|
||||
// <root>/folder-a
|
||||
// <root>/folder-a/file
|
||||
// <root>/symDir -> <root>/folder-a
|
||||
const root = path.join(
|
||||
getTestTemp(),
|
||||
'defaults-to-follow-symbolic-links-true'
|
||||
)
|
||||
await fs.mkdir(path.join(root, 'folder-a'), {recursive: true})
|
||||
await fs.writeFile(path.join(root, 'folder-a', 'file'), 'test file content')
|
||||
await createSymlinkDir(
|
||||
path.join(root, 'folder-a'),
|
||||
path.join(root, 'symDir')
|
||||
)
|
||||
|
||||
const itemPaths = await glob(root, {})
|
||||
expect(itemPaths).toEqual([
|
||||
root,
|
||||
path.join(root, 'folder-a'),
|
||||
path.join(root, 'folder-a', 'file'),
|
||||
path.join(root, 'symDir'),
|
||||
path.join(root, 'symDir', 'file')
|
||||
])
|
||||
})
|
||||
|
||||
it('defaults to implicitDescendants=true', async () => {
|
||||
// Create the following layout:
|
||||
// <root>
|
||||
// <root>/folder-a
|
||||
// <root>/folder-a/file
|
||||
const root = path.join(
|
||||
getTestTemp(),
|
||||
'defaults-to-implicit-descendants-true'
|
||||
)
|
||||
await fs.mkdir(path.join(root, 'folder-a'), {recursive: true})
|
||||
await fs.writeFile(path.join(root, 'folder-a', 'file'), 'test file content')
|
||||
|
||||
const itemPaths = await glob(root, {})
|
||||
expect(itemPaths).toEqual([
|
||||
root,
|
||||
path.join(root, 'folder-a'),
|
||||
path.join(root, 'folder-a', 'file')
|
||||
])
|
||||
})
|
||||
|
||||
it('defaults to omitBrokenSymbolicLinks=true', async () => {
|
||||
// Create the following layout:
|
||||
// <root>
|
||||
// <root>/folder-a
|
||||
// <root>/folder-a/file
|
||||
// <root>/symDir -> <root>/no-such
|
||||
const root = path.join(
|
||||
getTestTemp(),
|
||||
'defaults-to-omit-broken-symbolic-links-true'
|
||||
)
|
||||
await fs.mkdir(path.join(root, 'folder-a'), {recursive: true})
|
||||
await fs.writeFile(path.join(root, 'folder-a', 'file'), 'test file content')
|
||||
await createSymlinkDir(
|
||||
path.join(root, 'no-such'),
|
||||
path.join(root, 'symDir')
|
||||
)
|
||||
|
||||
const itemPaths = await glob(root, {})
|
||||
expect(itemPaths).toEqual([
|
||||
root,
|
||||
path.join(root, 'folder-a'),
|
||||
path.join(root, 'folder-a', 'file')
|
||||
])
|
||||
})
|
||||
|
||||
it('detects cycle when followSymbolicLinks=true', async () => {
|
||||
// Create the following layout:
|
||||
// <root>
|
||||
// <root>/file
|
||||
// <root>/symDir -> <root>
|
||||
const root = path.join(getTestTemp(), 'detects-cycle-when-follow-true')
|
||||
await fs.mkdir(root, {recursive: true})
|
||||
await fs.writeFile(path.join(root, 'file'), 'test file content')
|
||||
await createSymlinkDir(root, path.join(root, 'symDir'))
|
||||
|
||||
const itemPaths = await glob(root, {followSymbolicLinks: true})
|
||||
expect(itemPaths).toEqual([root, path.join(root, 'file')])
|
||||
})
|
||||
|
||||
it('detects deep cycle starting from middle when followSymbolicLinks=true', async () => {
|
||||
// Create the following layout:
|
||||
// <root>
|
||||
// <root>/file-under-root
|
||||
// <root>/folder-a
|
||||
// <root>/folder-a/file-under-a
|
||||
// <root>/folder-a/folder-b
|
||||
// <root>/folder-a/folder-b/file-under-b
|
||||
// <root>/folder-a/folder-b/folder-c
|
||||
// <root>/folder-a/folder-b/folder-c/file-under-c
|
||||
// <root>/folder-a/folder-b/folder-c/sym-folder -> <root>
|
||||
const root = path.join(
|
||||
getTestTemp(),
|
||||
'detects-deep-cycle-starting-from-middle-when-follow-true'
|
||||
)
|
||||
await fs.mkdir(path.join(root, 'folder-a', 'folder-b', 'folder-c'), {
|
||||
recursive: true
|
||||
})
|
||||
await fs.writeFile(
|
||||
path.join(root, 'file-under-root'),
|
||||
'test file under root contents'
|
||||
)
|
||||
await fs.writeFile(
|
||||
path.join(root, 'folder-a', 'file-under-a'),
|
||||
'test file under a contents'
|
||||
)
|
||||
await fs.writeFile(
|
||||
path.join(root, 'folder-a', 'folder-b', 'file-under-b'),
|
||||
'test file under b contents'
|
||||
)
|
||||
await fs.writeFile(
|
||||
path.join(root, 'folder-a', 'folder-b', 'folder-c', 'file-under-c'),
|
||||
'test file under c contents'
|
||||
)
|
||||
await createSymlinkDir(
|
||||
root,
|
||||
path.join(root, 'folder-a', 'folder-b', 'folder-c', 'sym-folder')
|
||||
)
|
||||
await fs.stat(
|
||||
path.join(
|
||||
root,
|
||||
'folder-a',
|
||||
'folder-b',
|
||||
'folder-c',
|
||||
'sym-folder',
|
||||
'file-under-root'
|
||||
)
|
||||
)
|
||||
|
||||
const itemPaths = await glob(path.join(root, 'folder-a', 'folder-b'), {
|
||||
followSymbolicLinks: true
|
||||
})
|
||||
expect(itemPaths).toEqual([
|
||||
path.join(root, 'folder-a', 'folder-b'),
|
||||
path.join(root, 'folder-a', 'folder-b', 'file-under-b'),
|
||||
path.join(root, 'folder-a', 'folder-b', 'folder-c'),
|
||||
path.join(root, 'folder-a', 'folder-b', 'folder-c', 'file-under-c'),
|
||||
path.join(root, 'folder-a', 'folder-b', 'folder-c', 'sym-folder'),
|
||||
path.join(
|
||||
root,
|
||||
'folder-a',
|
||||
'folder-b',
|
||||
'folder-c',
|
||||
'sym-folder',
|
||||
'file-under-root'
|
||||
),
|
||||
path.join(
|
||||
root,
|
||||
'folder-a',
|
||||
'folder-b',
|
||||
'folder-c',
|
||||
'sym-folder',
|
||||
'folder-a'
|
||||
),
|
||||
path.join(
|
||||
root,
|
||||
'folder-a',
|
||||
'folder-b',
|
||||
'folder-c',
|
||||
'sym-folder',
|
||||
'folder-a',
|
||||
'file-under-a'
|
||||
)
|
||||
])
|
||||
})
|
||||
|
||||
it('detects cycle starting from symlink when followSymbolicLinks=true', async () => {
|
||||
// Create the following layout:
|
||||
// <root>
|
||||
// <root>/file
|
||||
// <root>/symDir -> <root>
|
||||
const root: string = path.join(
|
||||
getTestTemp(),
|
||||
'detects-cycle-starting-from-symlink-when-follow-true'
|
||||
)
|
||||
await fs.mkdir(root, {recursive: true})
|
||||
await fs.writeFile(path.join(root, 'file'), 'test file content')
|
||||
await createSymlinkDir(root, path.join(root, 'symDir'))
|
||||
await fs.stat(path.join(root, 'symDir'))
|
||||
|
||||
const itemPaths = await glob(path.join(root, 'symDir'), {
|
||||
followSymbolicLinks: true
|
||||
})
|
||||
expect(itemPaths).toEqual([
|
||||
path.join(root, 'symDir'),
|
||||
path.join(root, 'symDir', 'file')
|
||||
])
|
||||
})
|
||||
|
||||
it('does not follow symlink when followSymbolicLinks=false', async () => {
|
||||
// Create the following layout:
|
||||
// <root>
|
||||
// <root>/realDir
|
||||
// <root>/realDir/file
|
||||
// <root>/symDir -> <root>/realDir
|
||||
const root = path.join(
|
||||
getTestTemp(),
|
||||
'does-not-follow-symlink-when-follow-false'
|
||||
)
|
||||
await fs.mkdir(path.join(root, 'realDir'), {recursive: true})
|
||||
await fs.writeFile(path.join(root, 'realDir', 'file'), 'test file content')
|
||||
await createSymlinkDir(
|
||||
path.join(root, 'realDir'),
|
||||
path.join(root, 'symDir')
|
||||
)
|
||||
|
||||
const itemPaths = await glob(root, {followSymbolicLinks: false})
|
||||
expect(itemPaths).toEqual([
|
||||
root,
|
||||
path.join(root, 'realDir'),
|
||||
path.join(root, 'realDir', 'file'),
|
||||
path.join(root, 'symDir')
|
||||
])
|
||||
})
|
||||
|
||||
it('does not follow symlink when search path is symlink and followSymbolicLinks=false', async () => {
|
||||
// Create the following layout:
|
||||
// realDir
|
||||
// realDir/file
|
||||
// symDir -> realDir
|
||||
const root = path.join(
|
||||
getTestTemp(),
|
||||
'does-not-follow-symlink-when-search-path-is-symlink-and-follow-false'
|
||||
)
|
||||
await fs.mkdir(path.join(root, 'realDir'), {recursive: true})
|
||||
await fs.writeFile(path.join(root, 'realDir', 'file'), 'test file content')
|
||||
await createSymlinkDir(
|
||||
path.join(root, 'realDir'),
|
||||
path.join(root, 'symDir')
|
||||
)
|
||||
|
||||
const itemPaths = await glob(path.join(root, 'symDir'), {
|
||||
followSymbolicLinks: false
|
||||
})
|
||||
expect(itemPaths).toEqual([path.join(root, 'symDir')])
|
||||
})
|
||||
|
||||
it('does not return broken symlink when follow-true and omit-true', async () => {
|
||||
// Create the following layout:
|
||||
// <root>
|
||||
// <root>/brokenSym -> <root>/noSuch
|
||||
// <root>/realDir
|
||||
// <root>/realDir/file
|
||||
// <root>/symDir -> <root>/realDir
|
||||
const root = path.join(
|
||||
getTestTemp(),
|
||||
'does-not-return-broken-symlink-when-follow-true-and-omit-true'
|
||||
)
|
||||
await fs.mkdir(root, {recursive: true})
|
||||
await createSymlinkDir(
|
||||
path.join(root, 'noSuch'),
|
||||
path.join(root, 'brokenSym')
|
||||
)
|
||||
await fs.mkdir(path.join(root, 'realDir'), {recursive: true})
|
||||
await fs.writeFile(path.join(root, 'realDir', 'file'), 'test file content')
|
||||
await createSymlinkDir(
|
||||
path.join(root, 'realDir'),
|
||||
path.join(root, 'symDir')
|
||||
)
|
||||
|
||||
const itemPaths = await glob(root, {followSymbolicLinks: true})
|
||||
expect(itemPaths).toEqual([
|
||||
root,
|
||||
path.join(root, 'realDir'),
|
||||
path.join(root, 'realDir', 'file'),
|
||||
path.join(root, 'symDir'),
|
||||
path.join(root, 'symDir', 'file')
|
||||
])
|
||||
})
|
||||
|
||||
it('does not return broken symlink when search path is broken symlink and followSymbolicLinks=true', async () => {
|
||||
// Create the following layout:
|
||||
// <root>
|
||||
// <root>/brokenSym -> <root>/noSuch
|
||||
const root = path.join(
|
||||
getTestTemp(),
|
||||
'does-not-return-broken-symlink-when-search-path-is-broken-symlink-and-follow-true'
|
||||
)
|
||||
await fs.mkdir(root, {recursive: true})
|
||||
const brokenSymPath = path.join(root, 'brokenSym')
|
||||
await createSymlinkDir(path.join(root, 'noSuch'), brokenSymPath)
|
||||
await fs.lstat(brokenSymPath)
|
||||
|
||||
const itemPaths = await glob(brokenSymPath, {followSymbolicLinks: true})
|
||||
expect(itemPaths).toEqual([])
|
||||
})
|
||||
|
||||
it('does not search paths that are not partial matches', async () => {
|
||||
// Create the following layout:
|
||||
// <root>
|
||||
// <root>/realDir
|
||||
// <root>/realDir/nested
|
||||
// <root>/realDir/nested/file
|
||||
// <root>/realDir2
|
||||
// <root>/realDir2/nested2
|
||||
// <root>/realDir2/nested2/symDir -> <root>/noSuch
|
||||
const root = path.join(
|
||||
getTestTemp(),
|
||||
'does-not-search-paths-that-are-not-partial-matches'
|
||||
)
|
||||
await fs.mkdir(path.join(root, 'realDir', 'nested'), {recursive: true})
|
||||
await fs.writeFile(
|
||||
path.join(root, 'realDir', 'nested', 'file'),
|
||||
'test file content'
|
||||
)
|
||||
await fs.mkdir(path.join(root, 'realDir2', 'nested2'), {recursive: true})
|
||||
await createSymlinkDir(
|
||||
path.join(root, 'noSuch'),
|
||||
path.join(root, 'realDir2', 'nested2', 'symDir')
|
||||
)
|
||||
|
||||
const options: GlobOptions = {
|
||||
followSymbolicLinks: true,
|
||||
omitBrokenSymbolicLinks: false
|
||||
}
|
||||
|
||||
// Should throw
|
||||
try {
|
||||
await glob(`${root}/*Dir*/*nested*/*`, options)
|
||||
throw new Error('should not reach here')
|
||||
} catch (err) {
|
||||
expect(err.message).toMatch(/broken symbolic link/i)
|
||||
}
|
||||
|
||||
// Not partial match
|
||||
let itemPaths = await glob(`${root}/*Dir/*nested*/*`, options)
|
||||
expect(itemPaths).toEqual([path.join(root, 'realDir', 'nested', 'file')])
|
||||
|
||||
// Not partial match
|
||||
itemPaths = await glob(`${root}/*Dir*/*nested/*`, options)
|
||||
expect(itemPaths).toEqual([path.join(root, 'realDir', 'nested', 'file')])
|
||||
})
|
||||
|
||||
it('does not throw for broken symlinks that are not matches or partial matches when followSymbolicLinks=true and omitBrokenSymbolicLinks=false', async () => {
|
||||
// Create the following layout:
|
||||
// <root>
|
||||
// <root>/realDir
|
||||
// <root>/realDir/file
|
||||
// <root>/symDir -> <root>/noSuch
|
||||
const root = path.join(
|
||||
getTestTemp(),
|
||||
'does-not-throw-for-broken-symlinks-that-are-not-matches-or-partial-matches-when-follow-true-and-omit-false'
|
||||
)
|
||||
await fs.mkdir(path.join(root, 'realDir'), {recursive: true})
|
||||
await fs.writeFile(path.join(root, 'realDir', 'file'), 'test file content')
|
||||
await createSymlinkDir(path.join(root, 'noSuch'), path.join(root, 'symDir'))
|
||||
|
||||
const options: GlobOptions = {
|
||||
followSymbolicLinks: true,
|
||||
omitBrokenSymbolicLinks: false
|
||||
}
|
||||
|
||||
// Match should throw
|
||||
try {
|
||||
await glob(`${root}/*`, options)
|
||||
throw new Error('should not reach here')
|
||||
} catch (err) {
|
||||
expect(err.message).toMatch(/broken symbolic link/i)
|
||||
}
|
||||
|
||||
// Partial match should throw
|
||||
try {
|
||||
await glob(`${root}/*/*`, options)
|
||||
throw new Error('should not reach here')
|
||||
} catch (err) {
|
||||
expect(err.message).toMatch(/broken symbolic link/i)
|
||||
}
|
||||
|
||||
// Not match or partial match
|
||||
const itemPaths = await glob(`${root}/*eal*/*`, options)
|
||||
expect(itemPaths).toEqual([path.join(root, 'realDir', 'file')])
|
||||
})
|
||||
|
||||
it('follows symlink when follow-symbolic-links=true', async () => {
|
||||
// Create the following layout:
|
||||
// <root>
|
||||
// <root>/realDir
|
||||
// <root>/realDir/file
|
||||
// <root>/symDir -> <root>/realDir
|
||||
const root = path.join(getTestTemp(), 'follows-symlink')
|
||||
await fs.mkdir(path.join(root, 'realDir'), {recursive: true})
|
||||
await fs.writeFile(path.join(root, 'realDir', 'file'), 'test file content')
|
||||
await createSymlinkDir(
|
||||
path.join(root, 'realDir'),
|
||||
path.join(root, 'symDir')
|
||||
)
|
||||
|
||||
const itemPaths = await glob(root, {followSymbolicLinks: true})
|
||||
expect(itemPaths).toEqual([
|
||||
root,
|
||||
path.join(root, 'realDir'),
|
||||
path.join(root, 'realDir', 'file'),
|
||||
path.join(root, 'symDir'),
|
||||
path.join(root, 'symDir', 'file')
|
||||
])
|
||||
})
|
||||
|
||||
it('follows symlink when search path is symlink and follow-symbolic-links=true', async () => {
|
||||
// Create the following layout:
|
||||
// realDir
|
||||
// realDir/file
|
||||
// symDir -> realDir
|
||||
const root = path.join(
|
||||
getTestTemp(),
|
||||
'follows-symlink-when-search-path-is-symlink-and-follow-true'
|
||||
)
|
||||
await fs.mkdir(path.join(root, 'realDir'), {recursive: true})
|
||||
await fs.writeFile(path.join(root, 'realDir', 'file'), 'test file content')
|
||||
await createSymlinkDir(
|
||||
path.join(root, 'realDir'),
|
||||
path.join(root, 'symDir')
|
||||
)
|
||||
|
||||
const itemPaths = await glob(path.join(root, 'symDir'), {
|
||||
followSymbolicLinks: true
|
||||
})
|
||||
expect(itemPaths).toEqual([
|
||||
path.join(root, 'symDir'),
|
||||
path.join(root, 'symDir', 'file')
|
||||
])
|
||||
})
|
||||
|
||||
it('returns broken symlink when followSymbolicLinks=false', async () => {
|
||||
// Create the following layout:
|
||||
// <root>
|
||||
// <root>/brokenSym -> <root>/noSuch
|
||||
// <root>/realDir
|
||||
// <root>/realDir/file
|
||||
// <root>/symDir -> <root>/realDir
|
||||
const root = path.join(
|
||||
getTestTemp(),
|
||||
'returns-broken-symlink-when-follow-false'
|
||||
)
|
||||
await fs.mkdir(root, {recursive: true})
|
||||
await createSymlinkDir(
|
||||
path.join(root, 'noSuch'),
|
||||
path.join(root, 'brokenSym')
|
||||
)
|
||||
await fs.mkdir(path.join(root, 'realDir'), {recursive: true})
|
||||
await fs.writeFile(path.join(root, 'realDir', 'file'), 'test file content')
|
||||
await createSymlinkDir(
|
||||
path.join(root, 'realDir'),
|
||||
path.join(root, 'symDir')
|
||||
)
|
||||
|
||||
const itemPaths = await glob(root, {followSymbolicLinks: false})
|
||||
expect(itemPaths).toEqual([
|
||||
root,
|
||||
path.join(root, 'brokenSym'),
|
||||
path.join(root, 'realDir'),
|
||||
path.join(root, 'realDir', 'file'),
|
||||
path.join(root, 'symDir')
|
||||
])
|
||||
})
|
||||
|
||||
it('returns broken symlink when search path is broken symlink and followSymbolicLinks=false', async () => {
|
||||
// Create the following layout:
|
||||
// <root>
|
||||
// <root>/brokenSym -> <root>/noSuch
|
||||
const root = path.join(
|
||||
getTestTemp(),
|
||||
'returns-broken-symlink-when-search-path-is-broken-symlink-and-follow-false'
|
||||
)
|
||||
await fs.mkdir(root, {recursive: true})
|
||||
const brokenSymPath = path.join(root, 'brokenSym')
|
||||
await createSymlinkDir(path.join(root, 'noSuch'), brokenSymPath)
|
||||
|
||||
const itemPaths = await glob(brokenSymPath, {followSymbolicLinks: false})
|
||||
expect(itemPaths).toEqual([brokenSymPath])
|
||||
})
|
||||
|
||||
it('returns depth first', async () => {
|
||||
// Create the following layout:
|
||||
// <root>/a-file
|
||||
// <root>/b-folder
|
||||
// <root>/b-folder/a-file
|
||||
// <root>/b-folder/b-folder
|
||||
// <root>/b-folder/b-folder/file
|
||||
// <root>/b-folder/c-file
|
||||
// <root>/c-file
|
||||
const root = path.join(getTestTemp(), 'returns-depth-first')
|
||||
await fs.mkdir(path.join(root, 'b-folder', 'b-folder'), {recursive: true})
|
||||
await fs.writeFile(path.join(root, 'a-file'), 'test a-file content')
|
||||
await fs.writeFile(
|
||||
path.join(root, 'b-folder', 'a-file'),
|
||||
'test b-folder/a-file content'
|
||||
)
|
||||
await fs.writeFile(
|
||||
path.join(root, 'b-folder', 'b-folder', 'file'),
|
||||
'test b-folder/b-folder/file content'
|
||||
)
|
||||
await fs.writeFile(
|
||||
path.join(root, 'b-folder', 'c-file'),
|
||||
'test b-folder/c-file content'
|
||||
)
|
||||
await fs.writeFile(path.join(root, 'c-file'), 'test c-file content')
|
||||
|
||||
const itemPaths = await glob(root)
|
||||
expect(itemPaths).toEqual([
|
||||
root,
|
||||
path.join(root, 'a-file'),
|
||||
path.join(root, 'b-folder'),
|
||||
path.join(root, 'b-folder', 'a-file'),
|
||||
path.join(root, 'b-folder', 'b-folder'),
|
||||
path.join(root, 'b-folder', 'b-folder', 'file'),
|
||||
path.join(root, 'b-folder', 'c-file'),
|
||||
path.join(root, 'c-file')
|
||||
])
|
||||
})
|
||||
|
||||
it('returns descendants', async () => {
|
||||
// Create the following layout:
|
||||
// <root>/file-1
|
||||
// <root>/dir-1
|
||||
// <root>/dir-1/file-2
|
||||
// <root>/dir-1/dir-2
|
||||
// <root>/dir-1/dir-2/file-3
|
||||
const root = path.join(getTestTemp(), 'returns-descendants')
|
||||
await fs.mkdir(path.join(root, 'dir-1', 'dir-2'), {recursive: true})
|
||||
await fs.writeFile(path.join(root, 'file-1'), '')
|
||||
await fs.writeFile(path.join(root, 'dir-1', 'file-2'), '')
|
||||
await fs.writeFile(path.join(root, 'dir-1', 'dir-2', 'file-3'), '')
|
||||
|
||||
// When pattern ends with `/**/`
|
||||
let pattern = `${root}${path.sep}**${path.sep}`
|
||||
expect(
|
||||
await glob(pattern, {
|
||||
implicitDescendants: false
|
||||
})
|
||||
).toHaveLength(3) // sanity check
|
||||
expect(await glob(pattern)).toEqual([
|
||||
root,
|
||||
path.join(root, 'dir-1'),
|
||||
path.join(root, 'dir-1', 'dir-2'),
|
||||
path.join(root, 'dir-1', 'dir-2', 'file-3'),
|
||||
path.join(root, 'dir-1', 'file-2'),
|
||||
path.join(root, 'file-1')
|
||||
])
|
||||
|
||||
// When pattern ends with something other than `/**/`
|
||||
pattern = `${root}${path.sep}**${path.sep}dir-?`
|
||||
expect(
|
||||
await glob(pattern, {
|
||||
implicitDescendants: false
|
||||
})
|
||||
).toHaveLength(2) // sanity check
|
||||
expect(await glob(pattern)).toEqual([
|
||||
path.join(root, 'dir-1'),
|
||||
path.join(root, 'dir-1', 'dir-2'),
|
||||
path.join(root, 'dir-1', 'dir-2', 'file-3'),
|
||||
path.join(root, 'dir-1', 'file-2')
|
||||
])
|
||||
})
|
||||
|
||||
it('returns directories only when trailing slash and implicit descendants false', async () => {
|
||||
// Create the following layout:
|
||||
// <root>/file-1
|
||||
// <root>/dir-1
|
||||
// <root>/dir-1/file-2
|
||||
// <root>/dir-1/dir-2
|
||||
// <root>/dir-1/dir-2/file-3
|
||||
const root = path.join(
|
||||
getTestTemp(),
|
||||
'returns-directories-only-when-trailing-slash-and-implicit-descendants-false'
|
||||
)
|
||||
await fs.mkdir(path.join(root, 'dir-1', 'dir-2'), {recursive: true})
|
||||
await fs.writeFile(path.join(root, 'file-1'), '')
|
||||
await fs.writeFile(path.join(root, 'dir-1', 'file-2'), '')
|
||||
await fs.writeFile(path.join(root, 'dir-1', 'dir-2', 'file-3'), '')
|
||||
|
||||
const pattern = `${root}${path.sep}**${path.sep}`
|
||||
expect(await glob(pattern)).toHaveLength(6) // sanity check
|
||||
expect(
|
||||
await glob(pattern, {
|
||||
implicitDescendants: false
|
||||
})
|
||||
).toEqual([
|
||||
root,
|
||||
path.join(root, 'dir-1'),
|
||||
path.join(root, 'dir-1', 'dir-2')
|
||||
])
|
||||
})
|
||||
|
||||
it('returns empty when search path does not exist', async () => {
|
||||
const itemPaths = await glob(path.join(getTestTemp(), 'nosuch'))
|
||||
expect(itemPaths).toEqual([])
|
||||
})
|
||||
|
||||
it('returns hidden files', async () => {
|
||||
// Create the following layout:
|
||||
// <root>
|
||||
// <root>/.emptyFolder
|
||||
// <root>/.file
|
||||
// <root>/.folder
|
||||
// <root>/.folder/file
|
||||
const root = path.join(getTestTemp(), 'returns-hidden-files')
|
||||
await createHiddenDirectory(path.join(root, '.emptyFolder'))
|
||||
await createHiddenDirectory(path.join(root, '.folder'))
|
||||
await createHiddenFile(path.join(root, '.file'), 'test .file content')
|
||||
await fs.writeFile(
|
||||
path.join(root, '.folder', 'file'),
|
||||
'test .folder/file content'
|
||||
)
|
||||
|
||||
const itemPaths = await glob(root)
|
||||
expect(itemPaths).toEqual([
|
||||
root,
|
||||
path.join(root, '.emptyFolder'),
|
||||
path.join(root, '.file'),
|
||||
path.join(root, '.folder'),
|
||||
path.join(root, '.folder', 'file')
|
||||
])
|
||||
})
|
||||
|
||||
it('returns normalized paths', async () => {
|
||||
// Create the following layout:
|
||||
// <root>/hello/world.txt
|
||||
const root: string = path.join(getTestTemp(), 'returns-normalized-paths')
|
||||
await fs.mkdir(path.join(root, 'hello'), {recursive: true})
|
||||
await fs.writeFile(path.join(root, 'hello', 'world.txt'), '')
|
||||
|
||||
const itemPaths = await glob(
|
||||
`${root}${path.sep}${path.sep}${path.sep}hello`
|
||||
)
|
||||
expect(itemPaths).toEqual([
|
||||
path.join(root, 'hello'),
|
||||
path.join(root, 'hello', 'world.txt')
|
||||
])
|
||||
})
|
||||
|
||||
it('skips comments', async () => {
|
||||
const searchPaths = await getSearchPaths(
|
||||
`#aaa/*${os.EOL}/foo/*${os.EOL}#bbb/*${os.EOL}/bar/*`
|
||||
)
|
||||
const drive = IS_WINDOWS ? process.cwd().substr(0, 2) : ''
|
||||
expect(searchPaths).toEqual([
|
||||
IS_WINDOWS ? `${drive}\\foo` : '/foo',
|
||||
IS_WINDOWS ? `${drive}\\bar` : '/bar'
|
||||
])
|
||||
})
|
||||
|
||||
it('skips empty lines', async () => {
|
||||
const searchPaths = await getSearchPaths(
|
||||
`${os.EOL}${os.EOL}/foo/*${os.EOL}${os.EOL}/bar/*${os.EOL}/baz/**${os.EOL}`
|
||||
)
|
||||
const drive = IS_WINDOWS ? process.cwd().substr(0, 2) : ''
|
||||
expect(searchPaths).toEqual([
|
||||
IS_WINDOWS ? `${drive}\\foo` : '/foo',
|
||||
IS_WINDOWS ? `${drive}\\bar` : '/bar',
|
||||
IS_WINDOWS ? `${drive}\\baz` : '/baz'
|
||||
])
|
||||
})
|
||||
|
||||
it('throws when match broken symlink and followSymbolicLinks=true and omitBrokenSymbolicLinks=false', async () => {
|
||||
// Create the following layout:
|
||||
// <root>
|
||||
// <root>/brokenSym -> <root>/noSuch
|
||||
const root = path.join(
|
||||
getTestTemp(),
|
||||
'throws-when-match-broken-symlink-and-follow-true-and-omit-false'
|
||||
)
|
||||
await fs.mkdir(root, {recursive: true})
|
||||
await createSymlinkDir(
|
||||
path.join(root, 'noSuch'),
|
||||
path.join(root, 'brokenSym')
|
||||
)
|
||||
|
||||
try {
|
||||
await glob(root, {
|
||||
followSymbolicLinks: true,
|
||||
omitBrokenSymbolicLinks: false
|
||||
})
|
||||
throw new Error('Expected tl.find to throw')
|
||||
} catch (err) {
|
||||
expect(err.message).toMatch(/broken symbolic link/)
|
||||
}
|
||||
})
|
||||
|
||||
it('throws when search path is broken symlink and followSymbolicLinks=true and omitBrokenSymbolicLinks=false', async () => {
|
||||
// Create the following layout:
|
||||
// <root>
|
||||
// <root>/brokenSym -> <root>/noSuch
|
||||
const root = path.join(
|
||||
getTestTemp(),
|
||||
'throws-when-search-path-is-broken-symlink-and-follow-true-and-omit-false'
|
||||
)
|
||||
await fs.mkdir(root, {recursive: true})
|
||||
const brokenSymPath = path.join(root, 'brokenSym')
|
||||
await createSymlinkDir(path.join(root, 'noSuch'), brokenSymPath)
|
||||
await fs.lstat(brokenSymPath)
|
||||
|
||||
try {
|
||||
await glob(brokenSymPath, {
|
||||
followSymbolicLinks: true,
|
||||
omitBrokenSymbolicLinks: false
|
||||
})
|
||||
throw new Error('Expected tl.find to throw')
|
||||
} catch (err) {
|
||||
expect(err.message).toMatch(/broken symbolic link/)
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
async function createHiddenDirectory(dir: string): Promise<void> {
|
||||
if (!path.basename(dir).match(/^\./)) {
|
||||
throw new Error(`Expected dir '${dir}' to start with '.'.`)
|
||||
}
|
||||
|
||||
await fs.mkdir(dir, {recursive: true})
|
||||
if (IS_WINDOWS) {
|
||||
const result = child.spawnSync('attrib.exe', ['+H', dir])
|
||||
if (result.status !== 0) {
|
||||
const message: string = (result.output || []).join(' ').trim()
|
||||
throw new Error(
|
||||
`Failed to set hidden attribute for directory '${dir}'. ${message}`
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function createHiddenFile(file: string, content: string): Promise<void> {
|
||||
if (!path.basename(file).match(/^\./)) {
|
||||
throw new Error(`Expected dir '${file}' to start with '.'.`)
|
||||
}
|
||||
|
||||
await fs.mkdir(path.dirname(file), {recursive: true})
|
||||
await fs.writeFile(file, content)
|
||||
|
||||
if (IS_WINDOWS) {
|
||||
const result = child.spawnSync('attrib.exe', ['+H', file])
|
||||
if (result.status !== 0) {
|
||||
const message: string = (result.output || []).join(' ').trim()
|
||||
throw new Error(
|
||||
`Failed to set hidden attribute for file '${file}'. ${message}`
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function getTestTemp(): string {
|
||||
return path.join(__dirname, '_temp', 'glob')
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a symlink directory on OSX/Linux, and a junction point directory on Windows.
|
||||
* A symlink directory is not created on Windows since it requires an elevated context.
|
||||
*/
|
||||
async function createSymlinkDir(real: string, link: string): Promise<void> {
|
||||
if (IS_WINDOWS) {
|
||||
await fs.symlink(real, link, 'junction')
|
||||
} else {
|
||||
await fs.symlink(real, link)
|
||||
}
|
||||
}
|
||||
|
||||
async function getSearchPaths(patterns: string): Promise<string[]> {
|
||||
const globber: Globber = await DefaultGlobber.create(patterns)
|
||||
return globber.getSearchPaths()
|
||||
}
|
||||
|
||||
async function glob(
|
||||
patterns: string,
|
||||
options?: GlobOptions
|
||||
): Promise<string[]> {
|
||||
const globber: Globber = await DefaultGlobber.create(patterns, options)
|
||||
return await globber.glob()
|
||||
}
|
|
@ -0,0 +1,640 @@
|
|||
import * as pathHelper from '../src/internal-path-helper'
|
||||
|
||||
const IS_WINDOWS = process.platform === 'win32'
|
||||
|
||||
describe('path-helper', () => {
|
||||
it('dirname interprets directory name from paths', () => {
|
||||
assertDirectoryName('', '.')
|
||||
assertDirectoryName('.', '.')
|
||||
assertDirectoryName('..', '.')
|
||||
assertDirectoryName('hello', '.')
|
||||
assertDirectoryName('hello/', '.')
|
||||
assertDirectoryName('hello/world', 'hello')
|
||||
|
||||
if (IS_WINDOWS) {
|
||||
// Removes redundant slashes
|
||||
assertDirectoryName('C:\\\\hello\\\\\\world\\\\', 'C:\\hello')
|
||||
assertDirectoryName('C://hello///world//', 'C:\\hello')
|
||||
// Relative root:
|
||||
assertDirectoryName('\\hello\\\\world\\\\again\\\\', '\\hello\\world')
|
||||
assertDirectoryName('/hello///world//again//', '\\hello\\world')
|
||||
// UNC:
|
||||
assertDirectoryName('\\\\hello\\world\\again\\', '\\\\hello\\world')
|
||||
assertDirectoryName(
|
||||
'\\\\hello\\\\\\world\\\\again\\\\',
|
||||
'\\\\hello\\world'
|
||||
)
|
||||
assertDirectoryName(
|
||||
'\\\\\\hello\\\\\\world\\\\again\\\\',
|
||||
'\\\\hello\\world'
|
||||
)
|
||||
assertDirectoryName(
|
||||
'\\\\\\\\hello\\\\\\world\\\\again\\\\',
|
||||
'\\\\hello\\world'
|
||||
)
|
||||
assertDirectoryName('//hello///world//again//', '\\\\hello\\world')
|
||||
assertDirectoryName('///hello///world//again//', '\\\\hello\\world')
|
||||
assertDirectoryName('/////hello///world//again//', '\\\\hello\\world')
|
||||
// Relative:
|
||||
assertDirectoryName('hello\\world', 'hello')
|
||||
|
||||
// Directory trimming
|
||||
assertDirectoryName('a:/hello', 'a:\\')
|
||||
assertDirectoryName('z:/hello', 'z:\\')
|
||||
assertDirectoryName('A:/hello', 'A:\\')
|
||||
assertDirectoryName('Z:/hello', 'Z:\\')
|
||||
assertDirectoryName('C:/', 'C:\\')
|
||||
assertDirectoryName('C:/hello', 'C:\\')
|
||||
assertDirectoryName('C:/hello/', 'C:\\')
|
||||
assertDirectoryName('C:/hello/world', 'C:\\hello')
|
||||
assertDirectoryName('C:/hello/world/', 'C:\\hello')
|
||||
assertDirectoryName('C:', 'C:')
|
||||
assertDirectoryName('C:hello', 'C:')
|
||||
assertDirectoryName('C:hello/', 'C:')
|
||||
assertDirectoryName('C:hello/world', 'C:hello')
|
||||
assertDirectoryName('C:hello/world/', 'C:hello')
|
||||
assertDirectoryName('/', '\\')
|
||||
assertDirectoryName('/hello', '\\')
|
||||
assertDirectoryName('/hello/', '\\')
|
||||
assertDirectoryName('/hello/world', '\\hello')
|
||||
assertDirectoryName('/hello/world/', '\\hello')
|
||||
assertDirectoryName('\\', '\\')
|
||||
assertDirectoryName('\\hello', '\\')
|
||||
assertDirectoryName('\\hello\\', '\\')
|
||||
assertDirectoryName('\\hello\\world', '\\hello')
|
||||
assertDirectoryName('\\hello\\world\\', '\\hello')
|
||||
assertDirectoryName('//hello', '\\\\hello')
|
||||
assertDirectoryName('//hello/', '\\\\hello')
|
||||
assertDirectoryName('//hello/world', '\\\\hello\\world')
|
||||
assertDirectoryName('//hello/world/', '\\\\hello\\world')
|
||||
assertDirectoryName('\\\\hello', '\\\\hello')
|
||||
assertDirectoryName('\\\\hello\\', '\\\\hello')
|
||||
assertDirectoryName('\\\\hello\\world', '\\\\hello\\world')
|
||||
assertDirectoryName('\\\\hello\\world\\', '\\\\hello\\world')
|
||||
assertDirectoryName('//hello/world/again', '\\\\hello\\world')
|
||||
assertDirectoryName('//hello/world/again/', '\\\\hello\\world')
|
||||
assertDirectoryName('hello/world/', 'hello')
|
||||
assertDirectoryName('hello/world/again', 'hello\\world')
|
||||
assertDirectoryName('../../hello', '..\\..')
|
||||
} else {
|
||||
// Should not converts slashes
|
||||
assertDirectoryName('/hello\\world', '/')
|
||||
assertDirectoryName('/hello\\world/', '/')
|
||||
assertDirectoryName('\\\\hello\\world\\again', '.')
|
||||
assertDirectoryName('\\\\hello\\world/', '.')
|
||||
assertDirectoryName('\\\\hello\\world/again', '\\\\hello\\world')
|
||||
assertDirectoryName('hello\\world', '.')
|
||||
assertDirectoryName('hello\\world/', '.')
|
||||
|
||||
// Should remove redundant slashes (rooted paths; UNC format not special)
|
||||
assertDirectoryName('//hello', '/')
|
||||
assertDirectoryName('//hello/world', '/hello')
|
||||
assertDirectoryName('//hello/world/', '/hello')
|
||||
assertDirectoryName('//hello//world//', '/hello')
|
||||
assertDirectoryName('///hello////world///', '/hello')
|
||||
|
||||
// Should remove redundant slashes (relative paths)
|
||||
assertDirectoryName('hello//world//again//', 'hello/world')
|
||||
assertDirectoryName('hello///world///again///', 'hello/world')
|
||||
|
||||
// Directory trimming (Windows drive root format not special)
|
||||
assertDirectoryName('C:/', '.')
|
||||
assertDirectoryName('C:/hello', 'C:')
|
||||
assertDirectoryName('C:/hello/', 'C:')
|
||||
assertDirectoryName('C:/hello/world', 'C:/hello')
|
||||
assertDirectoryName('C:/hello/world/', 'C:/hello')
|
||||
assertDirectoryName('C:', '.')
|
||||
assertDirectoryName('C:hello', '.')
|
||||
assertDirectoryName('C:hello/', '.')
|
||||
assertDirectoryName('C:hello/world', 'C:hello')
|
||||
assertDirectoryName('C:hello/world/', 'C:hello')
|
||||
|
||||
// Directory trimming (rooted paths)
|
||||
assertDirectoryName('/', '/')
|
||||
assertDirectoryName('/hello', '/')
|
||||
assertDirectoryName('/hello/', '/')
|
||||
assertDirectoryName('/hello/world', '/hello')
|
||||
assertDirectoryName('/hello/world/', '/hello')
|
||||
|
||||
// Directory trimming (relative paths)
|
||||
assertDirectoryName('hello/world/', 'hello')
|
||||
assertDirectoryName('hello/world/again', 'hello/world')
|
||||
assertDirectoryName('../../hello', '../..')
|
||||
}
|
||||
})
|
||||
|
||||
it('ensureAbsoluteRoot roots paths', () => {
|
||||
if (IS_WINDOWS) {
|
||||
const currentDrive = process.cwd().substr(0, 2)
|
||||
expect(currentDrive.match(/^[A-Z]:$/i)).toBeTruthy()
|
||||
const otherDrive = currentDrive.toUpperCase().startsWith('C')
|
||||
? 'D:'
|
||||
: 'C:'
|
||||
|
||||
// Preserves relative pathing
|
||||
assertEnsureAbsoluteRoot('C:/foo', '.', `C:/foo\\.`)
|
||||
assertEnsureAbsoluteRoot('C:/foo/..', 'bar', `C:/foo/..\\bar`)
|
||||
assertEnsureAbsoluteRoot('C:/foo', 'bar/../baz', `C:/foo\\bar/../baz`)
|
||||
|
||||
// Already rooted - drive root
|
||||
assertEnsureAbsoluteRoot('D:\\', 'C:/', 'C:/')
|
||||
assertEnsureAbsoluteRoot('D:\\', 'a:/hello', 'a:/hello')
|
||||
assertEnsureAbsoluteRoot('D:\\', 'C:\\', 'C:\\')
|
||||
assertEnsureAbsoluteRoot('D:\\', 'C:\\hello', 'C:\\hello')
|
||||
|
||||
// Already rooted - relative current drive root
|
||||
expect(process.cwd().length).toBeGreaterThan(3) // sanity check not drive root
|
||||
assertEnsureAbsoluteRoot(`${otherDrive}\\`, currentDrive, process.cwd())
|
||||
assertEnsureAbsoluteRoot(
|
||||
`${otherDrive}\\`,
|
||||
`${currentDrive}hello`,
|
||||
`${process.cwd()}\\hello`
|
||||
)
|
||||
assertEnsureAbsoluteRoot(
|
||||
`${otherDrive}\\`,
|
||||
`${currentDrive}hello/world`,
|
||||
`${process.cwd()}\\hello/world`
|
||||
)
|
||||
assertEnsureAbsoluteRoot(
|
||||
`${otherDrive}\\`,
|
||||
`${currentDrive}hello\\world`,
|
||||
`${process.cwd()}\\hello\\world`
|
||||
)
|
||||
|
||||
// Already rooted - relative other drive root
|
||||
assertEnsureAbsoluteRoot(
|
||||
`${currentDrive}\\`,
|
||||
otherDrive,
|
||||
`${otherDrive}\\`
|
||||
)
|
||||
assertEnsureAbsoluteRoot(
|
||||
`${currentDrive}\\`,
|
||||
`${otherDrive}hello`,
|
||||
`${otherDrive}\\hello`
|
||||
)
|
||||
assertEnsureAbsoluteRoot(
|
||||
`${currentDrive}\\`,
|
||||
`${otherDrive}hello/world`,
|
||||
`${otherDrive}\\hello/world`
|
||||
)
|
||||
assertEnsureAbsoluteRoot(
|
||||
`${currentDrive}\\`,
|
||||
`${otherDrive}hello\\world`,
|
||||
`${otherDrive}\\hello\\world`
|
||||
)
|
||||
|
||||
// Already rooted - current drive root
|
||||
assertEnsureAbsoluteRoot(`${otherDrive}\\`, '/', `${currentDrive}\\`)
|
||||
assertEnsureAbsoluteRoot(
|
||||
`${otherDrive}\\`,
|
||||
'/hello',
|
||||
`${currentDrive}\\hello`
|
||||
)
|
||||
assertEnsureAbsoluteRoot(`${otherDrive}\\`, '\\', `${currentDrive}\\`)
|
||||
assertEnsureAbsoluteRoot(
|
||||
`${otherDrive}\\`,
|
||||
'\\hello',
|
||||
`${currentDrive}\\hello`
|
||||
)
|
||||
|
||||
// Already rooted - UNC
|
||||
assertEnsureAbsoluteRoot('D:\\', '//machine/share', '//machine/share')
|
||||
assertEnsureAbsoluteRoot(
|
||||
'D:\\',
|
||||
'\\\\machine\\share',
|
||||
'\\\\machine\\share'
|
||||
)
|
||||
|
||||
// Relative
|
||||
assertEnsureAbsoluteRoot('D:/', 'hello', 'D:/hello')
|
||||
assertEnsureAbsoluteRoot('D:/', 'hello/world', 'D:/hello/world')
|
||||
assertEnsureAbsoluteRoot('D:\\', 'hello', 'D:\\hello')
|
||||
assertEnsureAbsoluteRoot('D:\\', 'hello\\world', 'D:\\hello\\world')
|
||||
assertEnsureAbsoluteRoot('D:/root', 'hello', 'D:/root\\hello')
|
||||
assertEnsureAbsoluteRoot('D:/root', 'hello/world', 'D:/root\\hello/world')
|
||||
assertEnsureAbsoluteRoot('D:\\root', 'hello', 'D:\\root\\hello')
|
||||
assertEnsureAbsoluteRoot(
|
||||
'D:\\root',
|
||||
'hello\\world',
|
||||
'D:\\root\\hello\\world'
|
||||
)
|
||||
assertEnsureAbsoluteRoot('D:/root/', 'hello', 'D:/root/hello')
|
||||
assertEnsureAbsoluteRoot('D:/root/', 'hello/world', 'D:/root/hello/world')
|
||||
assertEnsureAbsoluteRoot('D:\\root\\', 'hello', 'D:\\root\\hello')
|
||||
assertEnsureAbsoluteRoot(
|
||||
'D:\\root\\',
|
||||
'hello\\world',
|
||||
'D:\\root\\hello\\world'
|
||||
)
|
||||
} else {
|
||||
// Preserves relative pathing
|
||||
assertEnsureAbsoluteRoot('/foo', '.', `/foo/.`)
|
||||
assertEnsureAbsoluteRoot('/foo/..', 'bar', `/foo/../bar`)
|
||||
assertEnsureAbsoluteRoot('/foo', 'bar/../baz', `/foo/bar/../baz`)
|
||||
|
||||
// Already rooted
|
||||
assertEnsureAbsoluteRoot('/root', '/', '/')
|
||||
assertEnsureAbsoluteRoot('/root', '/hello', '/hello')
|
||||
assertEnsureAbsoluteRoot('/root', '/hello/world', '/hello/world')
|
||||
|
||||
// Not already rooted - Windows style drive root
|
||||
assertEnsureAbsoluteRoot('/root', 'C:/', '/root/C:/')
|
||||
assertEnsureAbsoluteRoot('/root', 'C:/hello', '/root/C:/hello')
|
||||
assertEnsureAbsoluteRoot('/root', 'C:\\', '/root/C:\\')
|
||||
|
||||
// Not already rooted - Windows style relative drive root
|
||||
assertEnsureAbsoluteRoot('/root', 'C:', '/root/C:')
|
||||
assertEnsureAbsoluteRoot('/root', 'C:hello/world', '/root/C:hello/world')
|
||||
|
||||
// Not already rooted - Windows style current drive root
|
||||
assertEnsureAbsoluteRoot('/root', '\\', '/root/\\')
|
||||
assertEnsureAbsoluteRoot(
|
||||
'/root',
|
||||
'\\hello\\world',
|
||||
'/root/\\hello\\world'
|
||||
)
|
||||
|
||||
// Not already rooted - Windows style UNC
|
||||
assertEnsureAbsoluteRoot(
|
||||
'/root',
|
||||
'\\\\machine\\share',
|
||||
'/root/\\\\machine\\share'
|
||||
)
|
||||
|
||||
// Not already rooted - relative
|
||||
assertEnsureAbsoluteRoot('/', 'hello', '/hello')
|
||||
assertEnsureAbsoluteRoot('/', 'hello/world', '/hello/world')
|
||||
assertEnsureAbsoluteRoot('/', 'hello\\world', '/hello\\world')
|
||||
assertEnsureAbsoluteRoot('/root', 'hello', '/root/hello')
|
||||
assertEnsureAbsoluteRoot('/root', 'hello/world', '/root/hello/world')
|
||||
assertEnsureAbsoluteRoot('/root', 'hello\\world', '/root/hello\\world')
|
||||
assertEnsureAbsoluteRoot('/root/', 'hello', '/root/hello')
|
||||
assertEnsureAbsoluteRoot('/root/', 'hello/world', '/root/hello/world')
|
||||
assertEnsureAbsoluteRoot('/root/', 'hello\\world', '/root/hello\\world')
|
||||
assertEnsureAbsoluteRoot('/root\\', 'hello', '/root\\/hello')
|
||||
assertEnsureAbsoluteRoot('/root\\', 'hello/world', '/root\\/hello/world')
|
||||
assertEnsureAbsoluteRoot(
|
||||
'/root\\',
|
||||
'hello\\world',
|
||||
'/root\\/hello\\world'
|
||||
)
|
||||
}
|
||||
})
|
||||
|
||||
it('hasAbsoluteRoot detects absolute root', () => {
|
||||
if (IS_WINDOWS) {
|
||||
// Drive root
|
||||
assertHasAbsoluteRoot('C:/', true)
|
||||
assertHasAbsoluteRoot('a:/hello', true)
|
||||
assertHasAbsoluteRoot('c:/hello', true)
|
||||
assertHasAbsoluteRoot('z:/hello', true)
|
||||
assertHasAbsoluteRoot('A:/hello', true)
|
||||
assertHasAbsoluteRoot('C:/hello', true)
|
||||
assertHasAbsoluteRoot('Z:/hello', true)
|
||||
assertHasAbsoluteRoot('C:\\', true)
|
||||
assertHasAbsoluteRoot('C:\\hello', true)
|
||||
|
||||
// Relative drive root
|
||||
assertHasAbsoluteRoot('C:', false)
|
||||
assertHasAbsoluteRoot('C:hello', false)
|
||||
assertHasAbsoluteRoot('C:hello/world', false)
|
||||
assertHasAbsoluteRoot('C:hello\\world', false)
|
||||
|
||||
// Current drive root
|
||||
assertHasAbsoluteRoot('/', false)
|
||||
assertHasAbsoluteRoot('/hello', false)
|
||||
assertHasAbsoluteRoot('/hello/world', false)
|
||||
assertHasAbsoluteRoot('\\', false)
|
||||
assertHasAbsoluteRoot('\\hello', false)
|
||||
assertHasAbsoluteRoot('\\hello\\world', false)
|
||||
|
||||
// UNC
|
||||
assertHasAbsoluteRoot('//machine/share', true)
|
||||
assertHasAbsoluteRoot('//machine/share/', true)
|
||||
assertHasAbsoluteRoot('//machine/share/hello', true)
|
||||
assertHasAbsoluteRoot('\\\\machine\\share', true)
|
||||
assertHasAbsoluteRoot('\\\\machine\\share\\', true)
|
||||
assertHasAbsoluteRoot('\\\\machine\\share\\hello', true)
|
||||
|
||||
// Relative
|
||||
assertHasAbsoluteRoot('hello', false)
|
||||
assertHasAbsoluteRoot('hello/world', false)
|
||||
assertHasAbsoluteRoot('hello\\world', false)
|
||||
} else {
|
||||
// Root
|
||||
assertHasAbsoluteRoot('/', true)
|
||||
assertHasAbsoluteRoot('/hello', true)
|
||||
assertHasAbsoluteRoot('/hello/world', true)
|
||||
|
||||
// Windows style drive root - false on OSX/Linux
|
||||
assertHasAbsoluteRoot('C:/', false)
|
||||
assertHasAbsoluteRoot('a:/hello', false)
|
||||
assertHasAbsoluteRoot('c:/hello', false)
|
||||
assertHasAbsoluteRoot('z:/hello', false)
|
||||
assertHasAbsoluteRoot('A:/hello', false)
|
||||
assertHasAbsoluteRoot('C:/hello', false)
|
||||
assertHasAbsoluteRoot('Z:/hello', false)
|
||||
assertHasAbsoluteRoot('C:\\', false)
|
||||
assertHasAbsoluteRoot('C:\\hello', false)
|
||||
|
||||
// Windows style relative drive root - false on OSX/Linux
|
||||
assertHasAbsoluteRoot('C:', false)
|
||||
assertHasAbsoluteRoot('C:hello', false)
|
||||
assertHasAbsoluteRoot('C:hello/world', false)
|
||||
assertHasAbsoluteRoot('C:hello\\world', false)
|
||||
|
||||
// Windows style current drive root - false on OSX/Linux
|
||||
assertHasAbsoluteRoot('\\', false)
|
||||
assertHasAbsoluteRoot('\\hello', false)
|
||||
assertHasAbsoluteRoot('\\hello\\world', false)
|
||||
|
||||
// Windows style UNC - false on OSX/Linux
|
||||
assertHasAbsoluteRoot('\\\\machine\\share', false)
|
||||
assertHasAbsoluteRoot('\\\\machine\\share\\', false)
|
||||
assertHasAbsoluteRoot('\\\\machine\\share\\hello', false)
|
||||
|
||||
// Relative
|
||||
assertHasAbsoluteRoot('hello', false)
|
||||
assertHasAbsoluteRoot('hello/world', false)
|
||||
assertHasAbsoluteRoot('hello\\world', false)
|
||||
}
|
||||
})
|
||||
|
||||
it('hasRoot detects root', () => {
|
||||
if (IS_WINDOWS) {
|
||||
// Drive root
|
||||
assertHasRoot('C:/', true)
|
||||
assertHasRoot('a:/hello', true)
|
||||
assertHasRoot('c:/hello', true)
|
||||
assertHasRoot('z:/hello', true)
|
||||
assertHasRoot('A:/hello', true)
|
||||
assertHasRoot('C:/hello', true)
|
||||
assertHasRoot('Z:/hello', true)
|
||||
assertHasRoot('C:\\', true)
|
||||
assertHasRoot('C:\\hello', true)
|
||||
|
||||
// Relative drive root
|
||||
assertHasRoot('C:', true)
|
||||
assertHasRoot('C:hello', true)
|
||||
assertHasRoot('C:hello/world', true)
|
||||
assertHasRoot('C:hello\\world', true)
|
||||
|
||||
// Current drive root
|
||||
assertHasRoot('/', true)
|
||||
assertHasRoot('/hello', true)
|
||||
assertHasRoot('/hello/world', true)
|
||||
assertHasRoot('\\', true)
|
||||
assertHasRoot('\\hello', true)
|
||||
assertHasRoot('\\hello\\world', true)
|
||||
|
||||
// UNC
|
||||
assertHasRoot('//machine/share', true)
|
||||
assertHasRoot('//machine/share/', true)
|
||||
assertHasRoot('//machine/share/hello', true)
|
||||
assertHasRoot('\\\\machine\\share', true)
|
||||
assertHasRoot('\\\\machine\\share\\', true)
|
||||
assertHasRoot('\\\\machine\\share\\hello', true)
|
||||
|
||||
// Relative
|
||||
assertHasRoot('hello', false)
|
||||
assertHasRoot('hello/world', false)
|
||||
assertHasRoot('hello\\world', false)
|
||||
} else {
|
||||
// Root
|
||||
assertHasRoot('/', true)
|
||||
assertHasRoot('/hello', true)
|
||||
assertHasRoot('/hello/world', true)
|
||||
|
||||
// Windows style drive root - false on OSX/Linux
|
||||
assertHasRoot('C:/', false)
|
||||
assertHasRoot('a:/hello', false)
|
||||
assertHasRoot('c:/hello', false)
|
||||
assertHasRoot('z:/hello', false)
|
||||
assertHasRoot('A:/hello', false)
|
||||
assertHasRoot('C:/hello', false)
|
||||
assertHasRoot('Z:/hello', false)
|
||||
assertHasRoot('C:\\', false)
|
||||
assertHasRoot('C:\\hello', false)
|
||||
|
||||
// Windows style relative drive root - false on OSX/Linux
|
||||
assertHasRoot('C:', false)
|
||||
assertHasRoot('C:hello', false)
|
||||
assertHasRoot('C:hello/world', false)
|
||||
assertHasRoot('C:hello\\world', false)
|
||||
|
||||
// Windows style current drive root - false on OSX/Linux
|
||||
assertHasRoot('\\', false)
|
||||
assertHasRoot('\\hello', false)
|
||||
assertHasRoot('\\hello\\world', false)
|
||||
|
||||
// Windows style UNC - false on OSX/Linux
|
||||
assertHasRoot('\\\\machine\\share', false)
|
||||
assertHasRoot('\\\\machine\\share\\', false)
|
||||
assertHasRoot('\\\\machine\\share\\hello', false)
|
||||
|
||||
// Relative
|
||||
assertHasRoot('hello', false)
|
||||
assertHasRoot('hello/world', false)
|
||||
assertHasRoot('hello\\world', false)
|
||||
}
|
||||
})
|
||||
|
||||
it('normalizeSeparators normalizes slashes', () => {
|
||||
if (IS_WINDOWS) {
|
||||
// Drive-rooted
|
||||
assertNormalizeSeparators('C:/', 'C:\\')
|
||||
assertNormalizeSeparators('C:/hello', 'C:\\hello')
|
||||
assertNormalizeSeparators('C:/hello/', 'C:\\hello\\')
|
||||
assertNormalizeSeparators('C:\\', 'C:\\')
|
||||
assertNormalizeSeparators('C:\\hello', 'C:\\hello')
|
||||
assertNormalizeSeparators('C:', 'C:')
|
||||
assertNormalizeSeparators('C:hello', 'C:hello')
|
||||
assertNormalizeSeparators('C:hello/world', 'C:hello\\world')
|
||||
assertNormalizeSeparators('C:hello\\world', 'C:hello\\world')
|
||||
assertNormalizeSeparators('/', '\\')
|
||||
assertNormalizeSeparators('/hello', '\\hello')
|
||||
assertNormalizeSeparators('/hello/world', '\\hello\\world')
|
||||
assertNormalizeSeparators('/hello//world', '\\hello\\world')
|
||||
assertNormalizeSeparators('\\', '\\')
|
||||
assertNormalizeSeparators('\\hello', '\\hello')
|
||||
assertNormalizeSeparators('\\hello\\', '\\hello\\')
|
||||
assertNormalizeSeparators('\\hello\\world', '\\hello\\world')
|
||||
assertNormalizeSeparators('\\hello\\\\world', '\\hello\\world')
|
||||
|
||||
// UNC
|
||||
assertNormalizeSeparators('//machine/share', '\\\\machine\\share')
|
||||
assertNormalizeSeparators('//machine/share/', '\\\\machine\\share\\')
|
||||
assertNormalizeSeparators(
|
||||
'//machine/share/hello',
|
||||
'\\\\machine\\share\\hello'
|
||||
)
|
||||
assertNormalizeSeparators('///machine/share', '\\\\machine\\share')
|
||||
assertNormalizeSeparators('\\\\machine\\share', '\\\\machine\\share')
|
||||
assertNormalizeSeparators('\\\\machine\\share\\', '\\\\machine\\share\\')
|
||||
assertNormalizeSeparators(
|
||||
'\\\\machine\\share\\hello',
|
||||
'\\\\machine\\share\\hello'
|
||||
)
|
||||
assertNormalizeSeparators('\\\\\\machine\\share', '\\\\machine\\share')
|
||||
|
||||
// Relative
|
||||
assertNormalizeSeparators('hello', 'hello')
|
||||
assertNormalizeSeparators('hello/world', 'hello\\world')
|
||||
assertNormalizeSeparators('hello//world', 'hello\\world')
|
||||
assertNormalizeSeparators('hello\\world', 'hello\\world')
|
||||
assertNormalizeSeparators('hello\\\\world', 'hello\\world')
|
||||
} else {
|
||||
// Rooted
|
||||
assertNormalizeSeparators('/', '/')
|
||||
assertNormalizeSeparators('/hello', '/hello')
|
||||
assertNormalizeSeparators('/hello/world', '/hello/world')
|
||||
assertNormalizeSeparators('//hello/world/', '/hello/world/')
|
||||
|
||||
// Backslash not converted
|
||||
assertNormalizeSeparators('C:\\', 'C:\\')
|
||||
assertNormalizeSeparators('C:\\\\hello\\\\', 'C:\\\\hello\\\\')
|
||||
assertNormalizeSeparators('\\', '\\')
|
||||
assertNormalizeSeparators('\\hello', '\\hello')
|
||||
assertNormalizeSeparators('\\hello\\world', '\\hello\\world')
|
||||
assertNormalizeSeparators('hello\\world', 'hello\\world')
|
||||
|
||||
// UNC not converted
|
||||
assertNormalizeSeparators('\\\\machine\\share', '\\\\machine\\share')
|
||||
|
||||
// UNC not preserved
|
||||
assertNormalizeSeparators('//machine/share', '/machine/share')
|
||||
|
||||
// Relative
|
||||
assertNormalizeSeparators('hello', 'hello')
|
||||
assertNormalizeSeparators('hello/////world', 'hello/world')
|
||||
}
|
||||
})
|
||||
|
||||
it('safeTrimTrailingSeparator safely trims trailing separator', () => {
|
||||
assertSafeTrimTrailingSeparator('', '')
|
||||
|
||||
if (IS_WINDOWS) {
|
||||
// Removes redundant slashes
|
||||
assertSafeTrimTrailingSeparator(
|
||||
'C:\\\\hello\\\\\\world\\\\',
|
||||
'C:\\hello\\world'
|
||||
)
|
||||
assertSafeTrimTrailingSeparator('C://hello///world//', 'C:\\hello\\world')
|
||||
// Relative root:
|
||||
assertSafeTrimTrailingSeparator(
|
||||
'\\hello\\\\world\\\\again\\\\',
|
||||
'\\hello\\world\\again'
|
||||
)
|
||||
assertSafeTrimTrailingSeparator(
|
||||
'/hello///world//again//',
|
||||
'\\hello\\world\\again'
|
||||
)
|
||||
// UNC:
|
||||
assertSafeTrimTrailingSeparator('\\\\hello\\world\\', '\\\\hello\\world')
|
||||
assertSafeTrimTrailingSeparator(
|
||||
'\\\\hello\\world\\\\',
|
||||
'\\\\hello\\world'
|
||||
)
|
||||
assertSafeTrimTrailingSeparator(
|
||||
'\\\\hello\\\\\\world\\\\again\\',
|
||||
'\\\\hello\\world\\again'
|
||||
)
|
||||
assertSafeTrimTrailingSeparator('//hello/world/', '\\\\hello\\world')
|
||||
assertSafeTrimTrailingSeparator('//hello/world//', '\\\\hello\\world')
|
||||
assertSafeTrimTrailingSeparator(
|
||||
'//hello//world//again/',
|
||||
'\\\\hello\\world\\again'
|
||||
)
|
||||
// Relative:
|
||||
assertSafeTrimTrailingSeparator('hello\\world\\', 'hello\\world')
|
||||
|
||||
// Slash trimming
|
||||
assertSafeTrimTrailingSeparator('a:/hello/', 'a:\\hello')
|
||||
assertSafeTrimTrailingSeparator('z:/hello', 'z:\\hello')
|
||||
assertSafeTrimTrailingSeparator('C:/', 'C:\\')
|
||||
assertSafeTrimTrailingSeparator('C:\\', 'C:\\')
|
||||
assertSafeTrimTrailingSeparator('C:/hello/world', 'C:\\hello\\world')
|
||||
assertSafeTrimTrailingSeparator('C:/hello/world/', 'C:\\hello\\world')
|
||||
assertSafeTrimTrailingSeparator('C:', 'C:')
|
||||
assertSafeTrimTrailingSeparator('C:hello/', 'C:hello')
|
||||
assertSafeTrimTrailingSeparator('/', '\\')
|
||||
assertSafeTrimTrailingSeparator('/hello/', '\\hello')
|
||||
assertSafeTrimTrailingSeparator('\\', '\\')
|
||||
assertSafeTrimTrailingSeparator('\\hello\\', '\\hello')
|
||||
assertSafeTrimTrailingSeparator('//hello/', '\\\\hello')
|
||||
assertSafeTrimTrailingSeparator('//hello/world', '\\\\hello\\world')
|
||||
assertSafeTrimTrailingSeparator('//hello/world/', '\\\\hello\\world')
|
||||
assertSafeTrimTrailingSeparator('\\\\hello', '\\\\hello')
|
||||
assertSafeTrimTrailingSeparator('\\\\hello\\', '\\\\hello')
|
||||
assertSafeTrimTrailingSeparator('\\\\hello\\world', '\\\\hello\\world')
|
||||
assertSafeTrimTrailingSeparator('\\\\hello\\world\\', '\\\\hello\\world')
|
||||
assertSafeTrimTrailingSeparator('hello/world/', 'hello\\world')
|
||||
assertSafeTrimTrailingSeparator('hello/', 'hello')
|
||||
assertSafeTrimTrailingSeparator('../../', '..\\..')
|
||||
} else {
|
||||
// Should not converts slashes
|
||||
assertSafeTrimTrailingSeparator('/hello\\world', '/hello\\world')
|
||||
assertSafeTrimTrailingSeparator('/hello\\world/', '/hello\\world')
|
||||
assertSafeTrimTrailingSeparator('\\\\hello\\world/', '\\\\hello\\world')
|
||||
assertSafeTrimTrailingSeparator('hello\\world/', 'hello\\world')
|
||||
|
||||
// Should remove redundant slashes (rooted paths; UNC format not special)
|
||||
assertSafeTrimTrailingSeparator('//hello', '/hello')
|
||||
assertSafeTrimTrailingSeparator('//hello/world', '/hello/world')
|
||||
assertSafeTrimTrailingSeparator('//hello/world/', '/hello/world')
|
||||
assertSafeTrimTrailingSeparator('//hello//world//', '/hello/world')
|
||||
assertSafeTrimTrailingSeparator('///hello////world///', '/hello/world')
|
||||
|
||||
// Should remove redundant slashes (relative paths)
|
||||
assertSafeTrimTrailingSeparator('hello//world//', 'hello/world')
|
||||
assertSafeTrimTrailingSeparator('hello///world///', 'hello/world')
|
||||
|
||||
// Slash trimming (Windows drive root format not special)
|
||||
assertSafeTrimTrailingSeparator('C:/', 'C:')
|
||||
assertSafeTrimTrailingSeparator('C:/hello', 'C:/hello')
|
||||
assertSafeTrimTrailingSeparator('C:/hello/', 'C:/hello')
|
||||
assertSafeTrimTrailingSeparator('C:hello/', 'C:hello')
|
||||
|
||||
// Slash trimming (rooted paths)
|
||||
assertSafeTrimTrailingSeparator('/', '/')
|
||||
assertSafeTrimTrailingSeparator('/hello', '/hello')
|
||||
assertSafeTrimTrailingSeparator('/hello/', '/hello')
|
||||
assertSafeTrimTrailingSeparator('/hello/world/', '/hello/world')
|
||||
|
||||
// Slash trimming (relative paths)
|
||||
assertSafeTrimTrailingSeparator('hello/world/', 'hello/world')
|
||||
assertSafeTrimTrailingSeparator('../../', '../..')
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
function assertDirectoryName(itemPath: string, expected: string): void {
|
||||
expect(pathHelper.dirname(itemPath)).toBe(expected)
|
||||
}
|
||||
|
||||
function assertEnsureAbsoluteRoot(
|
||||
root: string,
|
||||
itemPath: string,
|
||||
expected: string
|
||||
): void {
|
||||
expect(pathHelper.ensureAbsoluteRoot(root, itemPath)).toBe(expected)
|
||||
}
|
||||
|
||||
function assertHasAbsoluteRoot(itemPath: string, expected: boolean): void {
|
||||
expect(pathHelper.hasAbsoluteRoot(itemPath)).toBe(expected)
|
||||
}
|
||||
|
||||
function assertHasRoot(itemPath: string, expected: boolean): void {
|
||||
expect(pathHelper.hasRoot(itemPath)).toBe(expected)
|
||||
}
|
||||
|
||||
function assertNormalizeSeparators(itemPath: string, expected: string): void {
|
||||
expect(pathHelper.normalizeSeparators(itemPath)).toBe(expected)
|
||||
}
|
||||
|
||||
function assertSafeTrimTrailingSeparator(
|
||||
itemPath: string,
|
||||
expected: string
|
||||
): void {
|
||||
expect(pathHelper.safeTrimTrailingSeparator(itemPath)).toBe(expected)
|
||||
}
|
|
@ -0,0 +1,92 @@
|
|||
import * as path from 'path'
|
||||
import {Path} from '../src/internal-path'
|
||||
|
||||
const IS_WINDOWS = process.platform === 'win32'
|
||||
|
||||
describe('path', () => {
|
||||
it('constructs from rooted path', () => {
|
||||
assertPath(`/`, `${path.sep}`, [path.sep])
|
||||
assertPath(`/foo`, `${path.sep}foo`, [path.sep, 'foo'])
|
||||
if (IS_WINDOWS) {
|
||||
assertPath(`C:\\foo`, `C:\\foo`, ['C:\\', 'foo'])
|
||||
assertPath(`C:foo`, `C:foo`, ['C:', 'foo'])
|
||||
assertPath(`\\\\foo\\bar\\baz`, `\\\\foo\\bar\\baz`, [
|
||||
'\\\\foo\\bar',
|
||||
'baz'
|
||||
])
|
||||
}
|
||||
})
|
||||
|
||||
it('constructs from rooted segments', () => {
|
||||
assertPath([`/`], `${path.sep}`, [path.sep])
|
||||
assertPath([`/`, `foo`], `${path.sep}foo`, [path.sep, 'foo'])
|
||||
if (IS_WINDOWS) {
|
||||
assertPath([`C:\\`, `foo`], `C:\\foo`, ['C:\\', 'foo'])
|
||||
assertPath([`C:`, `foo`], `C:foo`, ['C:', 'foo'])
|
||||
assertPath([`\\\\foo\\bar`, `baz`], `\\\\foo\\bar\\baz`, [
|
||||
'\\\\foo\\bar',
|
||||
'baz'
|
||||
])
|
||||
}
|
||||
})
|
||||
|
||||
it('constructs from relative path', () => {
|
||||
assertPath(`foo`, `foo`, ['foo'])
|
||||
assertPath(`foo/bar`, `foo${path.sep}bar`, ['foo', 'bar'])
|
||||
})
|
||||
|
||||
it('constructs from relative segments', () => {
|
||||
assertPath([`foo`], `foo`, ['foo'])
|
||||
assertPath([`foo`, `bar`], `foo${path.sep}bar`, ['foo', 'bar'])
|
||||
})
|
||||
|
||||
it('normalizes slashes', () => {
|
||||
assertPath(
|
||||
`/foo///bar${path.sep}${path.sep}${path.sep}baz`,
|
||||
`${path.sep}foo${path.sep}bar${path.sep}baz`,
|
||||
[path.sep, 'foo', 'bar', 'baz']
|
||||
)
|
||||
})
|
||||
|
||||
it('preserves relative pathing', () => {
|
||||
assertPath(
|
||||
'/foo/../bar/./baz',
|
||||
`${path.sep}foo${path.sep}..${path.sep}bar${path.sep}.${path.sep}baz`,
|
||||
[path.sep, 'foo', '..', 'bar', '.', 'baz']
|
||||
)
|
||||
})
|
||||
|
||||
it('trims unnecessary trailing slash', () => {
|
||||
assertPath('/', path.sep, [path.sep])
|
||||
assertPath('/foo/', `${path.sep}foo`, [path.sep, 'foo'])
|
||||
assertPath('foo/', 'foo', ['foo'])
|
||||
assertPath('foo/bar/', `foo${path.sep}bar`, ['foo', 'bar'])
|
||||
if (IS_WINDOWS) {
|
||||
assertPath('\\', '\\', ['\\'])
|
||||
assertPath('C:\\', 'C:\\', ['C:\\'])
|
||||
assertPath('C:\\foo\\', 'C:\\foo', ['C:\\', 'foo'])
|
||||
assertPath('C:foo\\', 'C:foo', ['C:', 'foo'])
|
||||
assertPath('\\\\computer\\share\\', '\\\\computer\\share', [
|
||||
'\\\\computer\\share'
|
||||
])
|
||||
assertPath('\\\\computer\\share\\foo', '\\\\computer\\share\\foo', [
|
||||
'\\\\computer\\share',
|
||||
'foo'
|
||||
])
|
||||
assertPath('\\\\computer\\share\\foo\\', '\\\\computer\\share\\foo', [
|
||||
'\\\\computer\\share',
|
||||
'foo'
|
||||
])
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
function assertPath(
|
||||
itemPath: string | string[],
|
||||
expectedPath: string,
|
||||
expectedSegments: string[]
|
||||
): void {
|
||||
const actual = new Path(itemPath)
|
||||
expect(actual.toString()).toBe(expectedPath)
|
||||
expect(actual.segments).toEqual(expectedSegments)
|
||||
}
|
|
@ -0,0 +1,152 @@
|
|||
import * as path from 'path'
|
||||
import * as patternHelper from '../src/internal-pattern-helper'
|
||||
import {MatchKind} from '../src/internal-match-kind'
|
||||
import {IS_WINDOWS} from '../../io/src/io-util'
|
||||
import {Pattern} from '../src/internal-pattern'
|
||||
|
||||
describe('pattern-helper', () => {
|
||||
it('getSearchPaths omits negate search paths', () => {
|
||||
const root = IS_WINDOWS ? 'C:\\' : '/'
|
||||
const patterns = [
|
||||
`${root}search1/foo/**`,
|
||||
`${root}search2/bar/**`,
|
||||
`!${root}search3/baz/**`
|
||||
].map(x => new Pattern(x))
|
||||
const searchPaths = patternHelper.getSearchPaths(patterns)
|
||||
expect(searchPaths).toEqual([
|
||||
`${root}search1${path.sep}foo`,
|
||||
`${root}search2${path.sep}bar`
|
||||
])
|
||||
})
|
||||
|
||||
it('getSearchPaths omits search path when ancestor is also a search path', () => {
|
||||
if (IS_WINDOWS) {
|
||||
const patterns = [
|
||||
'C:\\Search1\\Foo\\**',
|
||||
'C:\\sEARCH1\\fOO\\bar\\**',
|
||||
'C:\\sEARCH1\\foo\\bar',
|
||||
'C:\\Search2\\**',
|
||||
'C:\\Search3\\Foo\\Bar\\**',
|
||||
'C:\\sEARCH3\\fOO\\bAR\\**'
|
||||
].map(x => new Pattern(x))
|
||||
const searchPaths = patternHelper.getSearchPaths(patterns)
|
||||
expect(searchPaths).toEqual([
|
||||
'C:\\Search1\\Foo',
|
||||
'C:\\Search2',
|
||||
'C:\\Search3\\Foo\\Bar'
|
||||
])
|
||||
} else {
|
||||
const patterns = [
|
||||
'/search1/foo/**',
|
||||
'/search1/foo/bar/**',
|
||||
'/search2/foo/bar',
|
||||
'/search2/**',
|
||||
'/search3/foo/bar/**',
|
||||
'/search3/foo/bar/**'
|
||||
].map(x => new Pattern(x))
|
||||
|
||||
const searchPaths = patternHelper.getSearchPaths(patterns)
|
||||
expect(searchPaths).toEqual([
|
||||
'/search1/foo',
|
||||
'/search2',
|
||||
'/search3/foo/bar'
|
||||
])
|
||||
}
|
||||
})
|
||||
|
||||
it('match supports interleaved exclude patterns', () => {
|
||||
const root = IS_WINDOWS ? 'C:\\' : '/'
|
||||
const itemPaths = [
|
||||
`${root}solution1/proj1/proj1.proj`,
|
||||
`${root}solution1/proj1/README.txt`,
|
||||
`${root}solution1/proj2/proj2.proj`,
|
||||
`${root}solution1/proj2/README.txt`,
|
||||
`${root}solution1/solution1.sln`,
|
||||
`${root}solution2/proj1/proj1.proj`,
|
||||
`${root}solution2/proj1/README.txt`,
|
||||
`${root}solution2/proj2/proj2.proj`,
|
||||
`${root}solution2/proj2/README.txt`,
|
||||
`${root}solution2/solution2.sln`
|
||||
]
|
||||
const patterns = [
|
||||
`${root}**/*.proj`, // include all proj files
|
||||
`${root}**/README.txt`, // include all README files
|
||||
`!${root}**/solution2/**`, // exclude the solution 2 folder entirely
|
||||
`${root}**/*.sln`, // include all sln files
|
||||
`!${root}**/proj2/README.txt` // exclude proj2 README files
|
||||
].map(x => new Pattern(x))
|
||||
const matched = itemPaths.filter(
|
||||
x => patternHelper.match(patterns, x) === MatchKind.All
|
||||
)
|
||||
expect(matched).toEqual([
|
||||
`${root}solution1/proj1/proj1.proj`,
|
||||
`${root}solution1/proj1/README.txt`,
|
||||
`${root}solution1/proj2/proj2.proj`,
|
||||
`${root}solution1/solution1.sln`,
|
||||
`${root}solution2/solution2.sln`
|
||||
])
|
||||
})
|
||||
|
||||
it('match supports excluding directories', () => {
|
||||
const root = IS_WINDOWS ? 'C:\\' : '/'
|
||||
const itemPaths = [
|
||||
root,
|
||||
`${root}foo`,
|
||||
`${root}foo/bar`,
|
||||
`${root}foo/bar/baz`
|
||||
]
|
||||
const patterns = [
|
||||
`${root}foo/**`, // include all files and directories
|
||||
`!${root}foo/**/` // exclude directories
|
||||
].map(x => new Pattern(x))
|
||||
const matchKinds = itemPaths.map(x => patternHelper.match(patterns, x))
|
||||
expect(matchKinds).toEqual([
|
||||
MatchKind.None,
|
||||
MatchKind.File,
|
||||
MatchKind.File,
|
||||
MatchKind.File
|
||||
])
|
||||
})
|
||||
|
||||
it('match supports including directories only', () => {
|
||||
const root = IS_WINDOWS ? 'C:\\' : '/'
|
||||
const itemPaths = [
|
||||
root,
|
||||
`${root}foo/`,
|
||||
`${root}foo/bar`,
|
||||
`${root}foo/bar/baz`
|
||||
]
|
||||
const patterns = [
|
||||
`${root}foo/**/` // include directories only
|
||||
].map(x => new Pattern(x))
|
||||
const matchKinds = itemPaths.map(x => patternHelper.match(patterns, x))
|
||||
expect(matchKinds).toEqual([
|
||||
MatchKind.None,
|
||||
MatchKind.Directory,
|
||||
MatchKind.Directory,
|
||||
MatchKind.Directory
|
||||
])
|
||||
})
|
||||
|
||||
it('partialMatch skips negate patterns', () => {
|
||||
const root = IS_WINDOWS ? 'C:\\' : '/'
|
||||
const patterns = [
|
||||
`${root}search1/foo/**`,
|
||||
`${root}search2/bar/**`,
|
||||
`!${root}search2/bar/**`,
|
||||
`!${root}search3/baz/**`
|
||||
].map(x => new Pattern(x))
|
||||
expect(patternHelper.partialMatch(patterns, `${root}search1`)).toBeTruthy()
|
||||
expect(
|
||||
patternHelper.partialMatch(patterns, `${root}search1/foo`)
|
||||
).toBeTruthy()
|
||||
expect(patternHelper.partialMatch(patterns, `${root}search2`)).toBeTruthy()
|
||||
expect(
|
||||
patternHelper.partialMatch(patterns, `${root}search2/bar`)
|
||||
).toBeTruthy()
|
||||
expect(patternHelper.partialMatch(patterns, `${root}search3`)).toBeFalsy()
|
||||
expect(
|
||||
patternHelper.partialMatch(patterns, `${root}search3/bar`)
|
||||
).toBeFalsy()
|
||||
})
|
||||
})
|
|
@ -0,0 +1,352 @@
|
|||
import * as io from '../../io/src/io'
|
||||
import * as os from 'os'
|
||||
import * as path from 'path'
|
||||
import {MatchKind} from '../src/internal-match-kind'
|
||||
import {promises as fs} from 'fs'
|
||||
import {Pattern} from '../src/internal-pattern'
|
||||
|
||||
const IS_WINDOWS = process.platform === 'win32'
|
||||
|
||||
describe('pattern', () => {
|
||||
beforeAll(async () => {
|
||||
await io.rmRF(getTestTemp())
|
||||
})
|
||||
|
||||
it('counts leading negate markers', () => {
|
||||
const actual = [
|
||||
'/initial-includes/*.txt',
|
||||
'!!/hello/two-negate-markers.txt',
|
||||
'!!!!/hello/four-negate-markers.txt',
|
||||
'!/initial-includes/one-negate-markers.txt',
|
||||
'!!!/initial-includes/three-negate-markers.txt'
|
||||
].map(x => new Pattern(x).negate)
|
||||
expect(actual).toEqual([false, false, false, true, true])
|
||||
})
|
||||
|
||||
it('escapes homedir', async () => {
|
||||
const home = path.join(getTestTemp(), 'home-with-[and]')
|
||||
await fs.mkdir(home, {recursive: true})
|
||||
const pattern = new Pattern('~/m*', undefined, home)
|
||||
|
||||
expect(pattern.searchPath).toBe(home)
|
||||
expect(pattern.match(path.join(home, 'match'))).toBeTruthy()
|
||||
expect(pattern.match(path.join(home, 'not-match'))).toBeFalsy()
|
||||
})
|
||||
|
||||
it('escapes root', async () => {
|
||||
const originalCwd = process.cwd()
|
||||
const rootPath = path.join(getTestTemp(), 'cwd-with-[and]')
|
||||
await fs.mkdir(rootPath, {recursive: true})
|
||||
try {
|
||||
process.chdir(rootPath)
|
||||
|
||||
// Relative
|
||||
let pattern = new Pattern('m*')
|
||||
expect(pattern.searchPath).toBe(rootPath)
|
||||
expect(pattern.match(path.join(rootPath, 'match'))).toBeTruthy()
|
||||
expect(pattern.match(path.join(rootPath, 'not-match'))).toBeFalsy()
|
||||
|
||||
if (IS_WINDOWS) {
|
||||
const currentDrive = process.cwd().substr(0, 2)
|
||||
expect(currentDrive.match(/^[A-Z]:$/i)).toBeTruthy()
|
||||
|
||||
// Relative current drive letter, e.g. C:m*
|
||||
pattern = new Pattern(`${currentDrive}m*`)
|
||||
expect(pattern.searchPath).toBe(rootPath)
|
||||
expect(pattern.match(path.join(rootPath, 'match'))).toBeTruthy()
|
||||
expect(pattern.match(path.join(rootPath, 'not-match'))).toBeFalsy()
|
||||
|
||||
// Relative current drive, e.g. \path\to\cwd\m*
|
||||
pattern = new Pattern(
|
||||
`${Pattern.globEscape(process.cwd().substr(2))}\\m*`
|
||||
)
|
||||
expect(pattern.searchPath).toBe(rootPath)
|
||||
expect(pattern.match(path.join(rootPath, 'match'))).toBeTruthy()
|
||||
expect(pattern.match(path.join(rootPath, 'not-match'))).toBeFalsy()
|
||||
}
|
||||
} finally {
|
||||
process.chdir(originalCwd)
|
||||
}
|
||||
})
|
||||
|
||||
it('globstar matches immediately preceding directory', () => {
|
||||
const root = IS_WINDOWS ? 'C:\\' : '/'
|
||||
const pattern = new Pattern(`${root}foo/bar/**`)
|
||||
const actual = [
|
||||
root,
|
||||
`${root}foo`,
|
||||
`${root}foo/bar`,
|
||||
`${root}foo/bar/baz`
|
||||
].map(x => pattern.match(x))
|
||||
expect(actual).toEqual([
|
||||
MatchKind.None,
|
||||
MatchKind.None,
|
||||
MatchKind.All,
|
||||
MatchKind.All
|
||||
])
|
||||
})
|
||||
|
||||
it('is case insensitive match on Windows', () => {
|
||||
const root = IS_WINDOWS ? 'C:\\' : '/'
|
||||
const pattern = new Pattern(`${root}Foo/**/Baz`)
|
||||
expect(pattern.match(`${root}Foo/Baz`)).toBe(MatchKind.All)
|
||||
expect(pattern.match(`${root}Foo/bAZ`)).toBe(
|
||||
IS_WINDOWS ? MatchKind.All : MatchKind.None
|
||||
)
|
||||
expect(pattern.match(`${root}fOO/Baz`)).toBe(
|
||||
IS_WINDOWS ? MatchKind.All : MatchKind.None
|
||||
)
|
||||
expect(pattern.match(`${root}fOO/bar/bAZ`)).toBe(
|
||||
IS_WINDOWS ? MatchKind.All : MatchKind.None
|
||||
)
|
||||
})
|
||||
|
||||
it('is case insensitive partial match on Windows', () => {
|
||||
const root = IS_WINDOWS ? 'C:\\' : '/'
|
||||
const pattern = new Pattern(`${root}Foo/Bar/**/Baz`)
|
||||
expect(pattern.partialMatch(`${root}Foo`)).toBeTruthy()
|
||||
expect(pattern.partialMatch(`${root}fOO`)).toBe(IS_WINDOWS ? true : false)
|
||||
})
|
||||
|
||||
it('matches root', () => {
|
||||
const pattern = new Pattern(IS_WINDOWS ? 'C:\\**' : '/**')
|
||||
expect(pattern.match(IS_WINDOWS ? 'C:\\' : '/')).toBe(MatchKind.All)
|
||||
})
|
||||
|
||||
it('partial matches root', () => {
|
||||
if (IS_WINDOWS) {
|
||||
let pattern = new Pattern('C:\\foo\\**')
|
||||
expect(pattern.partialMatch('c:\\')).toBeTruthy()
|
||||
pattern = new Pattern('c:\\foo\\**')
|
||||
expect(pattern.partialMatch('C:\\')).toBeTruthy()
|
||||
} else {
|
||||
const pattern = new Pattern('/foo/**')
|
||||
expect(pattern.partialMatch('/')).toBeTruthy()
|
||||
}
|
||||
})
|
||||
|
||||
it('replaces leading . segment', () => {
|
||||
// Pattern is '.'
|
||||
let pattern = new Pattern('.')
|
||||
expect(pattern.match(process.cwd())).toBe(MatchKind.All)
|
||||
expect(pattern.match(path.join(process.cwd(), 'foo'))).toBe(MatchKind.None)
|
||||
|
||||
// Pattern is './foo'
|
||||
pattern = new Pattern('./foo')
|
||||
expect(pattern.match(path.join(process.cwd(), 'foo'))).toBe(MatchKind.All)
|
||||
expect(pattern.match(path.join(process.cwd(), 'bar'))).toBe(MatchKind.None)
|
||||
|
||||
// Pattern is '.foo'
|
||||
pattern = new Pattern('.foo')
|
||||
expect(pattern.match(path.join(process.cwd(), '.foo'))).toBe(MatchKind.All)
|
||||
expect(pattern.match(path.join(process.cwd(), 'foo'))).toBe(MatchKind.None)
|
||||
expect(pattern.match(`${process.cwd()}foo`)).toBe(MatchKind.None)
|
||||
})
|
||||
|
||||
it('replaces leading ~ segment', async () => {
|
||||
const homedir = os.homedir()
|
||||
expect(homedir).toBeTruthy()
|
||||
await fs.stat(homedir)
|
||||
|
||||
// Pattern is '~'
|
||||
let pattern = new Pattern('~')
|
||||
expect(pattern.match(homedir)).toBe(MatchKind.All)
|
||||
expect(pattern.match(path.join(homedir, 'foo'))).toBe(MatchKind.None)
|
||||
|
||||
// Pattern is '~/foo'
|
||||
pattern = new Pattern('~/foo')
|
||||
expect(pattern.match(path.join(homedir, 'foo'))).toBe(MatchKind.All)
|
||||
expect(pattern.match(path.join(homedir, 'bar'))).toBe(MatchKind.None)
|
||||
|
||||
// Pattern is '~foo'
|
||||
pattern = new Pattern('~foo')
|
||||
expect(pattern.match(path.join(process.cwd(), '~foo'))).toBe(MatchKind.All)
|
||||
expect(pattern.match(path.join(homedir, 'foo'))).toBe(MatchKind.None)
|
||||
expect(pattern.match(`${homedir}foo`)).toBe(MatchKind.None)
|
||||
})
|
||||
|
||||
it('replaces leading relative root', () => {
|
||||
if (IS_WINDOWS) {
|
||||
const currentDrive = process.cwd().substr(0, 2)
|
||||
expect(currentDrive.match(/^[A-Z]:$/i)).toBeTruthy()
|
||||
const otherDrive = currentDrive.toUpperCase().startsWith('C')
|
||||
? 'D:'
|
||||
: 'C:'
|
||||
expect(process.cwd().length).toBeGreaterThan(3) // sanity check not drive root
|
||||
|
||||
// Pattern is 'C:'
|
||||
let pattern = new Pattern(currentDrive)
|
||||
expect(pattern.match(process.cwd())).toBeTruthy()
|
||||
expect(pattern.match(path.join(process.cwd(), 'foo'))).toBeFalsy()
|
||||
|
||||
// Pattern is 'C:foo'
|
||||
pattern = new Pattern(`${currentDrive}foo`)
|
||||
expect(pattern.match(path.join(process.cwd(), 'foo'))).toBeTruthy()
|
||||
expect(pattern.match(path.join(process.cwd(), 'bar'))).toBeFalsy()
|
||||
expect(pattern.match(`${currentDrive}\\foo`)).toBeFalsy()
|
||||
|
||||
// Pattern is 'X:'
|
||||
pattern = new Pattern(otherDrive)
|
||||
expect(pattern.match(`${otherDrive}\\`)).toBeTruthy()
|
||||
expect(pattern.match(`${otherDrive}\\foo`)).toBeFalsy()
|
||||
|
||||
// Pattern is 'X:foo'
|
||||
pattern = new Pattern(`${otherDrive}foo`)
|
||||
expect(pattern.match(`${otherDrive}\\foo`)).toBeTruthy()
|
||||
expect(pattern.match(`${otherDrive}\\bar`)).toBeFalsy()
|
||||
|
||||
// Pattern is '\\path\\to\\cwd'
|
||||
pattern = new Pattern(`${process.cwd().substr(2)}\\foo`)
|
||||
expect(pattern.match(path.join(process.cwd(), 'foo'))).toBeTruthy()
|
||||
expect(pattern.match(path.join(process.cwd(), 'bar'))).toBeFalsy()
|
||||
}
|
||||
})
|
||||
|
||||
it('roots exclude pattern', () => {
|
||||
const patternStrings = ['!hello.txt', '!**/world.txt']
|
||||
const actual = patternStrings.map(x => new Pattern(x))
|
||||
const expected = patternStrings
|
||||
.map(x => x.substr(1))
|
||||
.map(x => path.join(Pattern.globEscape(process.cwd()), x))
|
||||
.map(x => `!${x}`)
|
||||
.map(x => new Pattern(x))
|
||||
expect(actual.map(x => x.negate)).toEqual([true, true])
|
||||
expect(actual.map(x => x.segments)).toEqual(expected.map(x => x.segments))
|
||||
})
|
||||
|
||||
it('roots include pattern', () => {
|
||||
const patternStrings = ['hello.txt', '**/world.txt']
|
||||
const actual = patternStrings.map(x => new Pattern(x))
|
||||
const expected = patternStrings.map(
|
||||
x => new Pattern(path.join(Pattern.globEscape(process.cwd()), x))
|
||||
)
|
||||
expect(actual.map(x => x.segments)).toEqual(expected.map(x => x.segments))
|
||||
})
|
||||
|
||||
it('sets trailing separator', () => {
|
||||
expect(new Pattern(' foo ').trailingSeparator).toBeFalsy()
|
||||
expect(new Pattern(' /foo ').trailingSeparator).toBeFalsy()
|
||||
expect(new Pattern('! /foo ').trailingSeparator).toBeFalsy()
|
||||
expect(new Pattern(' /foo/* ').trailingSeparator).toBeFalsy()
|
||||
expect(new Pattern(' /foo/** ').trailingSeparator).toBeFalsy()
|
||||
expect(new Pattern(' \\foo ').trailingSeparator).toBeFalsy()
|
||||
expect(new Pattern('! \\foo ').trailingSeparator).toBeFalsy()
|
||||
expect(new Pattern(' \\foo\\* ').trailingSeparator).toBeFalsy()
|
||||
expect(new Pattern(' \\foo\\** ').trailingSeparator).toBeFalsy()
|
||||
expect(new Pattern(' foo/ ').trailingSeparator).toBeTruthy()
|
||||
expect(new Pattern(' /foo/ ').trailingSeparator).toBeTruthy()
|
||||
expect(new Pattern(' C:/foo/ ').trailingSeparator).toBeTruthy()
|
||||
expect(new Pattern(' C:foo/ ').trailingSeparator).toBeTruthy()
|
||||
expect(new Pattern(' D:foo/ ').trailingSeparator).toBeTruthy()
|
||||
expect(new Pattern('! /foo/ ').trailingSeparator).toBeTruthy()
|
||||
expect(new Pattern(' /foo/*/ ').trailingSeparator).toBeTruthy()
|
||||
expect(new Pattern(' /foo/**/ ').trailingSeparator).toBeTruthy()
|
||||
expect(new Pattern(' foo\\ ').trailingSeparator).toEqual(
|
||||
IS_WINDOWS ? true : false
|
||||
)
|
||||
expect(new Pattern(' \\foo\\ ').trailingSeparator).toEqual(
|
||||
IS_WINDOWS ? true : false
|
||||
)
|
||||
expect(new Pattern('! \\foo\\ ').trailingSeparator).toEqual(
|
||||
IS_WINDOWS ? true : false
|
||||
)
|
||||
expect(new Pattern(' \\foo\\*\\ ').trailingSeparator).toEqual(
|
||||
IS_WINDOWS ? true : false
|
||||
)
|
||||
expect(new Pattern(' \\foo\\**\\ ').trailingSeparator).toEqual(
|
||||
IS_WINDOWS ? true : false
|
||||
)
|
||||
})
|
||||
|
||||
it('supports including directories only', () => {
|
||||
const root = IS_WINDOWS ? 'C:\\' : '/'
|
||||
const pattern = new Pattern(`${root}foo/**/`) // trailing slash
|
||||
const actual = [
|
||||
root,
|
||||
`${root}foo/`,
|
||||
`${root}foo/bar`,
|
||||
`${root}foo/bar/baz`
|
||||
].map(x => pattern.match(x))
|
||||
expect(pattern.trailingSeparator).toBeTruthy()
|
||||
expect(actual).toEqual([
|
||||
MatchKind.None,
|
||||
MatchKind.Directory,
|
||||
MatchKind.Directory,
|
||||
MatchKind.Directory
|
||||
])
|
||||
})
|
||||
|
||||
it('trims pattern', () => {
|
||||
const pattern = new Pattern(' hello.txt ')
|
||||
expect(pattern.segments.reverse()[0]).toBe('hello.txt')
|
||||
})
|
||||
|
||||
it('trims whitespace after trimming negate markers', () => {
|
||||
const pattern = new Pattern(' ! ! ! hello.txt ')
|
||||
expect(pattern.negate).toBeTruthy()
|
||||
expect(pattern.segments.reverse()[0]).toBe('hello.txt')
|
||||
})
|
||||
|
||||
it('unescapes segments to narrow search path', () => {
|
||||
// Positive
|
||||
const root = IS_WINDOWS ? 'C:\\' : '/'
|
||||
let pattern = new Pattern(`${root}foo/b[a]r/b*`)
|
||||
expect(pattern.searchPath).toBe(`${root}foo${path.sep}bar`)
|
||||
expect(pattern.match(`${root}foo/bar/baz`)).toBeTruthy()
|
||||
pattern = new Pattern(`${root}foo/b[*]r/b*`)
|
||||
expect(pattern.searchPath).toBe(`${root}foo${path.sep}b*r`)
|
||||
expect(pattern.match(`${root}foo/b*r/baz`)).toBeTruthy()
|
||||
expect(pattern.match(`${root}foo/bar/baz`)).toBeFalsy()
|
||||
pattern = new Pattern(`${root}foo/b[?]r/b*`)
|
||||
expect(pattern.searchPath).toBe(`${root}foo${path.sep}b?r`)
|
||||
expect(pattern.match(`${root}foo/b?r/baz`)).toBeTruthy()
|
||||
expect(pattern.match(`${root}foo/bar/baz`)).toBeFalsy()
|
||||
pattern = new Pattern(`${root}foo/b[!]r/b*`)
|
||||
expect(pattern.searchPath).toBe(`${root}foo${path.sep}b!r`)
|
||||
expect(pattern.match(`${root}foo/b!r/baz`)).toBeTruthy()
|
||||
pattern = new Pattern(`${root}foo/b[[]ar/b*`)
|
||||
expect(pattern.searchPath).toBe(`${root}foo${path.sep}b[ar`)
|
||||
expect(pattern.match(`${root}foo/b[ar/baz`)).toBeTruthy()
|
||||
pattern = new Pattern(`${root}foo/b[]r/b*`)
|
||||
expect(pattern.searchPath).toBe(`${root}foo${path.sep}b[]r`)
|
||||
expect(pattern.match(`${root}foo/b[]r/baz`)).toBeTruthy()
|
||||
pattern = new Pattern(`${root}foo/b[r/b*`)
|
||||
expect(pattern.searchPath).toBe(`${root}foo${path.sep}b[r`)
|
||||
expect(pattern.match(`${root}foo/b[r/baz`)).toBeTruthy()
|
||||
pattern = new Pattern(`${root}foo/b]r/b*`)
|
||||
expect(pattern.searchPath).toBe(`${root}foo${path.sep}b]r`)
|
||||
expect(pattern.match(`${root}foo/b]r/baz`)).toBeTruthy()
|
||||
if (!IS_WINDOWS) {
|
||||
pattern = new Pattern('/foo/b\\[a]r/b*')
|
||||
expect(pattern.searchPath).toBe(`${path.sep}foo${path.sep}b[a]r`)
|
||||
expect(pattern.match('/foo/b[a]r/baz')).toBeTruthy()
|
||||
pattern = new Pattern('/foo/b[\\!]r/b*')
|
||||
expect(pattern.searchPath).toBe(`${path.sep}foo${path.sep}b!r`)
|
||||
expect(pattern.match('/foo/b!r/baz')).toBeTruthy()
|
||||
pattern = new Pattern('/foo/b[\\]]r/b*')
|
||||
expect(pattern.searchPath).toBe(`${path.sep}foo${path.sep}b]r`)
|
||||
expect(pattern.match('/foo/b]r/baz')).toBeTruthy()
|
||||
pattern = new Pattern('/foo/b[\\a]r/b*')
|
||||
expect(pattern.searchPath).toBe(`${path.sep}foo${path.sep}bar`)
|
||||
expect(pattern.match('/foo/bar/baz')).toBeTruthy()
|
||||
}
|
||||
|
||||
// Negative
|
||||
pattern = new Pattern(`${root}foo/b[aA]r/b*`)
|
||||
expect(pattern.searchPath).toBe(`${root}foo`)
|
||||
pattern = new Pattern(`${root}foo/b[!a]r/b*`)
|
||||
expect(pattern.searchPath).toBe(`${root}foo`)
|
||||
if (IS_WINDOWS) {
|
||||
pattern = new Pattern('C:/foo/b\\[a]r/b*')
|
||||
expect(pattern.searchPath).toBe(`C:\\foo\\b\\ar`)
|
||||
expect(pattern.match('C:/foo/b/ar/baz')).toBeTruthy()
|
||||
pattern = new Pattern('C:/foo/b[\\!]r/b*')
|
||||
expect(pattern.searchPath).toBe('C:\\foo\\b[\\!]r')
|
||||
expect(pattern.match('C:/foo/b[undefined/!]r/baz')).toBeTruthy() // Note, "undefined" substr to accommodate a bug in Minimatch when nocase=true
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
function getTestTemp(): string {
|
||||
return path.join(__dirname, '_temp', 'internal-pattern')
|
||||
}
|
|
@ -0,0 +1,35 @@
|
|||
{
|
||||
"name": "@actions/glob",
|
||||
"version": "0.1.0",
|
||||
"lockfileVersion": 1,
|
||||
"requires": true,
|
||||
"dependencies": {
|
||||
"balanced-match": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz",
|
||||
"integrity": "sha1-ibTRmasr7kneFk6gK4nORi1xt2c="
|
||||
},
|
||||
"brace-expansion": {
|
||||
"version": "1.1.11",
|
||||
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz",
|
||||
"integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==",
|
||||
"requires": {
|
||||
"balanced-match": "^1.0.0",
|
||||
"concat-map": "0.0.1"
|
||||
}
|
||||
},
|
||||
"concat-map": {
|
||||
"version": "0.0.1",
|
||||
"resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz",
|
||||
"integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s="
|
||||
},
|
||||
"minimatch": {
|
||||
"version": "3.0.4",
|
||||
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz",
|
||||
"integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==",
|
||||
"requires": {
|
||||
"brace-expansion": "^1.1.7"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,42 @@
|
|||
{
|
||||
"name": "@actions/glob",
|
||||
"version": "0.1.0",
|
||||
"preview": true,
|
||||
"description": "Actions glob lib",
|
||||
"keywords": [
|
||||
"github",
|
||||
"actions",
|
||||
"glob"
|
||||
],
|
||||
"homepage": "https://github.com/actions/toolkit/tree/master/packages/glob",
|
||||
"license": "MIT",
|
||||
"main": "lib/glob.js",
|
||||
"types": "lib/glob.d.ts",
|
||||
"directories": {
|
||||
"lib": "lib",
|
||||
"test": "__tests__"
|
||||
},
|
||||
"files": [
|
||||
"lib"
|
||||
],
|
||||
"publishConfig": {
|
||||
"access": "public"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/actions/toolkit.git",
|
||||
"directory": "packages/glob"
|
||||
},
|
||||
"scripts": {
|
||||
"audit-moderate": "npm install && npm audit --audit-level=moderate",
|
||||
"test": "echo \"Error: run tests from root\" && exit 1",
|
||||
"tsc": "tsc"
|
||||
},
|
||||
"bugs": {
|
||||
"url": "https://github.com/actions/toolkit/issues"
|
||||
},
|
||||
"dependencies": {
|
||||
"@actions/core": "^1.2.0",
|
||||
"minimatch": "^3.0.4"
|
||||
}
|
||||
}
|
|
@ -0,0 +1,17 @@
|
|||
import {Globber, DefaultGlobber} from './internal-globber'
|
||||
import {GlobOptions} from './internal-glob-options'
|
||||
|
||||
export {Globber, GlobOptions}
|
||||
|
||||
/**
|
||||
* Constructs a globber
|
||||
*
|
||||
* @param patterns Patterns separated by newlines
|
||||
* @param options Glob options
|
||||
*/
|
||||
export async function create(
|
||||
patterns: string,
|
||||
options?: GlobOptions
|
||||
): Promise<Globber> {
|
||||
return await DefaultGlobber.create(patterns, options)
|
||||
}
|
|
@ -0,0 +1,32 @@
|
|||
import * as core from '@actions/core'
|
||||
import {GlobOptions} from './internal-glob-options'
|
||||
|
||||
/**
|
||||
* Returns a copy with defaults filled in.
|
||||
*/
|
||||
export function getOptions(copy?: GlobOptions): GlobOptions {
|
||||
const result: GlobOptions = {
|
||||
followSymbolicLinks: true,
|
||||
implicitDescendants: true,
|
||||
omitBrokenSymbolicLinks: true
|
||||
}
|
||||
|
||||
if (copy) {
|
||||
if (typeof copy.followSymbolicLinks === 'boolean') {
|
||||
result.followSymbolicLinks = copy.followSymbolicLinks
|
||||
core.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`)
|
||||
}
|
||||
|
||||
if (typeof copy.implicitDescendants === 'boolean') {
|
||||
result.implicitDescendants = copy.implicitDescendants
|
||||
core.debug(`implicitDescendants '${result.implicitDescendants}'`)
|
||||
}
|
||||
|
||||
if (typeof copy.omitBrokenSymbolicLinks === 'boolean') {
|
||||
result.omitBrokenSymbolicLinks = copy.omitBrokenSymbolicLinks
|
||||
core.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`)
|
||||
}
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
|
@ -0,0 +1,31 @@
|
|||
/**
|
||||
* Options to control globbing behavior
|
||||
*/
|
||||
export interface GlobOptions {
|
||||
/**
|
||||
* Indicates whether to follow symbolic links. Generally should set to false
|
||||
* when deleting files.
|
||||
*
|
||||
* @default true
|
||||
*/
|
||||
followSymbolicLinks?: boolean
|
||||
|
||||
/**
|
||||
* Indicates whether directories that match a glob pattern, should implicitly
|
||||
* cause all descendant paths to be matched.
|
||||
*
|
||||
* For example, given the directory `my-dir`, the following glob patterns
|
||||
* would produce the same results: `my-dir/**`, `my-dir/`, `my-dir`
|
||||
*
|
||||
* @default true
|
||||
*/
|
||||
implicitDescendants?: boolean
|
||||
|
||||
/**
|
||||
* Indicates whether broken symbolic should be ignored and omitted from the
|
||||
* result set. Otherwise an error will be thrown.
|
||||
*
|
||||
* @default true
|
||||
*/
|
||||
omitBrokenSymbolicLinks?: boolean
|
||||
}
|
|
@ -0,0 +1,242 @@
|
|||
import * as core from '@actions/core'
|
||||
import * as fs from 'fs'
|
||||
import * as globOptionsHelper from './internal-glob-options-helper'
|
||||
import * as path from 'path'
|
||||
import * as patternHelper from './internal-pattern-helper'
|
||||
import {GlobOptions} from './internal-glob-options'
|
||||
import {MatchKind} from './internal-match-kind'
|
||||
import {Pattern} from './internal-pattern'
|
||||
import {SearchState} from './internal-search-state'
|
||||
|
||||
const IS_WINDOWS = process.platform === 'win32'
|
||||
|
||||
export {GlobOptions}
|
||||
|
||||
/**
|
||||
* Used to match files and directories
|
||||
*/
|
||||
export interface Globber {
|
||||
/**
|
||||
* Returns the search path preceding the first glob segment, from each pattern.
|
||||
* Duplicates and descendants of other paths are filtered out.
|
||||
*
|
||||
* Example 1: The patterns `/foo/*` and `/bar/*` returns `/foo` and `/bar`.
|
||||
*
|
||||
* Example 2: The patterns `/foo/*` and `/foo/bar/*` returns `/foo`.
|
||||
*/
|
||||
getSearchPaths(): string[]
|
||||
|
||||
/**
|
||||
* Returns files and directories matching the glob patterns.
|
||||
*
|
||||
* Order of the results is not guaranteed.
|
||||
*/
|
||||
glob(): Promise<string[]>
|
||||
|
||||
/**
|
||||
* Returns files and directories matching the glob patterns.
|
||||
*
|
||||
* Order of the results is not guaranteed.
|
||||
*/
|
||||
globGenerator(): AsyncGenerator<string, void>
|
||||
}
|
||||
|
||||
export class DefaultGlobber implements Globber {
|
||||
private readonly options: GlobOptions
|
||||
private readonly patterns: Pattern[] = []
|
||||
private readonly searchPaths: string[] = []
|
||||
|
||||
private constructor(options?: GlobOptions) {
|
||||
this.options = globOptionsHelper.getOptions(options)
|
||||
}
|
||||
|
||||
getSearchPaths(): string[] {
|
||||
// Return a copy
|
||||
return this.searchPaths.slice()
|
||||
}
|
||||
|
||||
async glob(): Promise<string[]> {
|
||||
const result: string[] = []
|
||||
for await (const itemPath of this.globGenerator()) {
|
||||
result.push(itemPath)
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
async *globGenerator(): AsyncGenerator<string, void> {
|
||||
// Fill in defaults options
|
||||
const options = globOptionsHelper.getOptions(this.options)
|
||||
|
||||
// Implicit descendants?
|
||||
const patterns: Pattern[] = []
|
||||
for (const pattern of this.patterns) {
|
||||
patterns.push(pattern)
|
||||
if (
|
||||
options.implicitDescendants &&
|
||||
(pattern.trailingSeparator ||
|
||||
pattern.segments[pattern.segments.length - 1] !== '**')
|
||||
) {
|
||||
patterns.push(
|
||||
new Pattern(pattern.negate, pattern.segments.concat('**'))
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// Push the search paths
|
||||
const stack: SearchState[] = []
|
||||
for (const searchPath of patternHelper.getSearchPaths(patterns)) {
|
||||
core.debug(`Search path '${searchPath}'`)
|
||||
|
||||
// Exists?
|
||||
try {
|
||||
// Intentionally using lstat. Detection for broken symlink
|
||||
// will be performed later (if following symlinks).
|
||||
await fs.promises.lstat(searchPath)
|
||||
} catch (err) {
|
||||
if (err.code === 'ENOENT') {
|
||||
continue
|
||||
}
|
||||
throw err
|
||||
}
|
||||
|
||||
stack.unshift(new SearchState(searchPath, 1))
|
||||
}
|
||||
|
||||
// Search
|
||||
const traversalChain: string[] = [] // used to detect cycles
|
||||
while (stack.length) {
|
||||
// Pop
|
||||
const item = stack.pop() as SearchState
|
||||
|
||||
// Match?
|
||||
const match = patternHelper.match(patterns, item.path)
|
||||
const partialMatch =
|
||||
!!match || patternHelper.partialMatch(patterns, item.path)
|
||||
if (!match && !partialMatch) {
|
||||
continue
|
||||
}
|
||||
|
||||
// Stat
|
||||
const stats: fs.Stats | undefined = await DefaultGlobber.stat(
|
||||
item,
|
||||
options,
|
||||
traversalChain
|
||||
)
|
||||
|
||||
// Broken symlink, or symlink cycle detected, or no longer exists
|
||||
if (!stats) {
|
||||
continue
|
||||
}
|
||||
|
||||
// Directory
|
||||
if (stats.isDirectory()) {
|
||||
// Matched
|
||||
if (match & MatchKind.Directory) {
|
||||
yield item.path
|
||||
}
|
||||
// Descend?
|
||||
else if (!partialMatch) {
|
||||
continue
|
||||
}
|
||||
|
||||
// Push the child items in reverse
|
||||
const childLevel = item.level + 1
|
||||
const childItems = (await fs.promises.readdir(item.path)).map(
|
||||
x => new SearchState(path.join(item.path, x), childLevel)
|
||||
)
|
||||
stack.push(...childItems.reverse())
|
||||
}
|
||||
// File
|
||||
else if (match & MatchKind.File) {
|
||||
yield item.path
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructs a DefaultGlobber
|
||||
*/
|
||||
static async create(
|
||||
patterns: string,
|
||||
options?: GlobOptions
|
||||
): Promise<DefaultGlobber> {
|
||||
const result = new DefaultGlobber(options)
|
||||
|
||||
if (IS_WINDOWS) {
|
||||
patterns = patterns.replace(/\r\n/g, '\n')
|
||||
patterns = patterns.replace(/\r/g, '\n')
|
||||
}
|
||||
|
||||
const lines = patterns.split('\n').map(x => x.trim())
|
||||
for (const line of lines) {
|
||||
// Empty or comment
|
||||
if (!line || line.startsWith('#')) {
|
||||
continue
|
||||
}
|
||||
// Pattern
|
||||
else {
|
||||
result.patterns.push(new Pattern(line))
|
||||
}
|
||||
}
|
||||
|
||||
result.searchPaths.push(...patternHelper.getSearchPaths(result.patterns))
|
||||
return result
|
||||
}
|
||||
|
||||
private static async stat(
|
||||
item: SearchState,
|
||||
options: GlobOptions,
|
||||
traversalChain: string[]
|
||||
): Promise<fs.Stats | undefined> {
|
||||
// Note:
|
||||
// `stat` returns info about the target of a symlink (or symlink chain)
|
||||
// `lstat` returns info about a symlink itself
|
||||
let stats: fs.Stats
|
||||
if (options.followSymbolicLinks) {
|
||||
try {
|
||||
// Use `stat` (following symlinks)
|
||||
stats = await fs.promises.stat(item.path)
|
||||
} catch (err) {
|
||||
if (err.code === 'ENOENT') {
|
||||
if (options.omitBrokenSymbolicLinks) {
|
||||
core.debug(`Broken symlink '${item.path}'`)
|
||||
return undefined
|
||||
}
|
||||
|
||||
throw new Error(
|
||||
`No information found for the path '${item.path}'. This may indicate a broken symbolic link.`
|
||||
)
|
||||
}
|
||||
|
||||
throw err
|
||||
}
|
||||
} else {
|
||||
// Use `lstat` (not following symlinks)
|
||||
stats = await fs.promises.lstat(item.path)
|
||||
}
|
||||
|
||||
// Note, isDirectory() returns false for the lstat of a symlink
|
||||
if (stats.isDirectory() && options.followSymbolicLinks) {
|
||||
// Get the realpath
|
||||
const realPath: string = await fs.promises.realpath(item.path)
|
||||
|
||||
// Fixup the traversal chain to match the item level
|
||||
while (traversalChain.length >= item.level) {
|
||||
traversalChain.pop()
|
||||
}
|
||||
|
||||
// Test for a cycle
|
||||
if (traversalChain.some((x: string) => x === realPath)) {
|
||||
core.debug(
|
||||
`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`
|
||||
)
|
||||
return undefined
|
||||
}
|
||||
|
||||
// Update the traversal chain
|
||||
traversalChain.push(realPath)
|
||||
}
|
||||
|
||||
return stats
|
||||
}
|
||||
}
|
|
@ -0,0 +1,16 @@
|
|||
/**
|
||||
* Indicates whether a pattern matches a path
|
||||
*/
|
||||
export enum MatchKind {
|
||||
/** Not matched */
|
||||
None = 0,
|
||||
|
||||
/** Matched if the path is a directory */
|
||||
Directory = 1,
|
||||
|
||||
/** Matched if the path is a regular file */
|
||||
File = 2,
|
||||
|
||||
/** Matched */
|
||||
All = Directory | File
|
||||
}
|
|
@ -0,0 +1,206 @@
|
|||
import * as path from 'path'
|
||||
import assert from 'assert'
|
||||
|
||||
const IS_WINDOWS = process.platform === 'win32'
|
||||
|
||||
/**
|
||||
* Similar to path.dirname except normalizes the path separators and slightly better handling for Windows UNC paths.
|
||||
*
|
||||
* For example, on Linux/macOS:
|
||||
* - `/ => /`
|
||||
* - `/hello => /`
|
||||
*
|
||||
* For example, on Windows:
|
||||
* - `C:\ => C:\`
|
||||
* - `C:\hello => C:\`
|
||||
* - `C: => C:`
|
||||
* - `C:hello => C:`
|
||||
* - `\ => \`
|
||||
* - `\hello => \`
|
||||
* - `\\hello => \\hello`
|
||||
* - `\\hello\world => \\hello\world`
|
||||
*/
|
||||
export function dirname(p: string): string {
|
||||
// Normalize slashes and trim unnecessary trailing slash
|
||||
p = safeTrimTrailingSeparator(p)
|
||||
|
||||
// Windows UNC root, e.g. \\hello or \\hello\world
|
||||
if (IS_WINDOWS && /^\\\\[^\\]+(\\[^\\]+)?$/.test(p)) {
|
||||
return p
|
||||
}
|
||||
|
||||
// Get dirname
|
||||
let result = path.dirname(p)
|
||||
|
||||
// Trim trailing slash for Windows UNC root, e.g. \\hello\world\
|
||||
if (IS_WINDOWS && /^\\\\[^\\]+\\[^\\]+\\$/.test(result)) {
|
||||
result = safeTrimTrailingSeparator(result)
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
/**
|
||||
* Roots the path if not already rooted. On Windows, relative roots like `\`
|
||||
* or `C:` are expanded based on the current working directory.
|
||||
*/
|
||||
export function ensureAbsoluteRoot(root: string, itemPath: string): string {
|
||||
assert(root, `ensureAbsoluteRoot parameter 'root' must not be empty`)
|
||||
assert(itemPath, `ensureAbsoluteRoot parameter 'itemPath' must not be empty`)
|
||||
|
||||
// Already rooted
|
||||
if (hasAbsoluteRoot(itemPath)) {
|
||||
return itemPath
|
||||
}
|
||||
|
||||
// Windows
|
||||
if (IS_WINDOWS) {
|
||||
// Check for itemPath like C: or C:foo
|
||||
if (itemPath.match(/^[A-Z]:[^\\/]|^[A-Z]:$/i)) {
|
||||
let cwd = process.cwd()
|
||||
assert(
|
||||
cwd.match(/^[A-Z]:\\/i),
|
||||
`Expected current directory to start with an absolute drive root. Actual '${cwd}'`
|
||||
)
|
||||
|
||||
// Drive letter matches cwd? Expand to cwd
|
||||
if (itemPath[0].toUpperCase() === cwd[0].toUpperCase()) {
|
||||
// Drive only, e.g. C:
|
||||
if (itemPath.length === 2) {
|
||||
// Preserve specified drive letter case (upper or lower)
|
||||
return `${itemPath[0]}:\\${cwd.substr(3)}`
|
||||
}
|
||||
// Drive + path, e.g. C:foo
|
||||
else {
|
||||
if (!cwd.endsWith('\\')) {
|
||||
cwd += '\\'
|
||||
}
|
||||
// Preserve specified drive letter case (upper or lower)
|
||||
return `${itemPath[0]}:\\${cwd.substr(3)}${itemPath.substr(2)}`
|
||||
}
|
||||
}
|
||||
// Different drive
|
||||
else {
|
||||
return `${itemPath[0]}:\\${itemPath.substr(2)}`
|
||||
}
|
||||
}
|
||||
// Check for itemPath like \ or \foo
|
||||
else if (normalizeSeparators(itemPath).match(/^\\$|^\\[^\\]/)) {
|
||||
const cwd = process.cwd()
|
||||
assert(
|
||||
cwd.match(/^[A-Z]:\\/i),
|
||||
`Expected current directory to start with an absolute drive root. Actual '${cwd}'`
|
||||
)
|
||||
|
||||
return `${cwd[0]}:\\${itemPath.substr(1)}`
|
||||
}
|
||||
}
|
||||
|
||||
assert(
|
||||
hasAbsoluteRoot(root),
|
||||
`ensureAbsoluteRoot parameter 'root' must have an absolute root`
|
||||
)
|
||||
|
||||
// Otherwise ensure root ends with a separator
|
||||
if (root.endsWith('/') || (IS_WINDOWS && root.endsWith('\\'))) {
|
||||
// Intentionally empty
|
||||
} else {
|
||||
// Append separator
|
||||
root += path.sep
|
||||
}
|
||||
|
||||
return root + itemPath
|
||||
}
|
||||
|
||||
/**
|
||||
* On Linux/macOS, true if path starts with `/`. On Windows, true for paths like:
|
||||
* `\\hello\share` and `C:\hello` (and using alternate separator).
|
||||
*/
|
||||
export function hasAbsoluteRoot(itemPath: string): boolean {
|
||||
assert(itemPath, `hasAbsoluteRoot parameter 'itemPath' must not be empty`)
|
||||
|
||||
// Normalize separators
|
||||
itemPath = normalizeSeparators(itemPath)
|
||||
|
||||
// Windows
|
||||
if (IS_WINDOWS) {
|
||||
// E.g. \\hello\share or C:\hello
|
||||
return itemPath.startsWith('\\\\') || /^[A-Z]:\\/i.test(itemPath)
|
||||
}
|
||||
|
||||
// E.g. /hello
|
||||
return itemPath.startsWith('/')
|
||||
}
|
||||
|
||||
/**
|
||||
* On Linux/macOS, true if path starts with `/`. On Windows, true for paths like:
|
||||
* `\`, `\hello`, `\\hello\share`, `C:`, and `C:\hello` (and using alternate separator).
|
||||
*/
|
||||
export function hasRoot(itemPath: string): boolean {
|
||||
assert(itemPath, `isRooted parameter 'itemPath' must not be empty`)
|
||||
|
||||
// Normalize separators
|
||||
itemPath = normalizeSeparators(itemPath)
|
||||
|
||||
// Windows
|
||||
if (IS_WINDOWS) {
|
||||
// E.g. \ or \hello or \\hello
|
||||
// E.g. C: or C:\hello
|
||||
return itemPath.startsWith('\\') || /^[A-Z]:/i.test(itemPath)
|
||||
}
|
||||
|
||||
// E.g. /hello
|
||||
return itemPath.startsWith('/')
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes redundant slashes and converts `/` to `\` on Windows
|
||||
*/
|
||||
export function normalizeSeparators(p: string): string {
|
||||
p = p || ''
|
||||
|
||||
// Windows
|
||||
if (IS_WINDOWS) {
|
||||
// Convert slashes on Windows
|
||||
p = p.replace(/\//g, '\\')
|
||||
|
||||
// Remove redundant slashes
|
||||
const isUnc = /^\\\\+[^\\]/.test(p) // e.g. \\hello
|
||||
return (isUnc ? '\\' : '') + p.replace(/\\\\+/g, '\\') // preserve leading \\ for UNC
|
||||
}
|
||||
|
||||
// Remove redundant slashes
|
||||
return p.replace(/\/\/+/g, '/')
|
||||
}
|
||||
|
||||
/**
|
||||
* Normalizes the path separators and trims the trailing separator (when safe).
|
||||
* For example, `/foo/ => /foo` but `/ => /`
|
||||
*/
|
||||
export function safeTrimTrailingSeparator(p: string): string {
|
||||
// Short-circuit if empty
|
||||
if (!p) {
|
||||
return ''
|
||||
}
|
||||
|
||||
// Normalize separators
|
||||
p = normalizeSeparators(p)
|
||||
|
||||
// No trailing slash
|
||||
if (!p.endsWith(path.sep)) {
|
||||
return p
|
||||
}
|
||||
|
||||
// Check '/' on Linux/macOS and '\' on Windows
|
||||
if (p === path.sep) {
|
||||
return p
|
||||
}
|
||||
|
||||
// On Windows check if drive root. E.g. C:\
|
||||
if (IS_WINDOWS && /^[A-Z]:\\$/i.test(p)) {
|
||||
return p
|
||||
}
|
||||
|
||||
// Otherwise trim trailing slash
|
||||
return p.substr(0, p.length - 1)
|
||||
}
|
|
@ -0,0 +1,113 @@
|
|||
import * as path from 'path'
|
||||
import * as pathHelper from './internal-path-helper'
|
||||
import assert from 'assert'
|
||||
|
||||
const IS_WINDOWS = process.platform === 'win32'
|
||||
|
||||
/**
|
||||
* Helper class for parsing paths into segments
|
||||
*/
|
||||
export class Path {
|
||||
segments: string[] = []
|
||||
|
||||
/**
|
||||
* Constructs a Path
|
||||
* @param itemPath Path or array of segments
|
||||
*/
|
||||
constructor(itemPath: string | string[]) {
|
||||
// String
|
||||
if (typeof itemPath === 'string') {
|
||||
assert(itemPath, `Parameter 'itemPath' must not be empty`)
|
||||
|
||||
// Normalize slashes and trim unnecessary trailing slash
|
||||
itemPath = pathHelper.safeTrimTrailingSeparator(itemPath)
|
||||
|
||||
// Not rooted
|
||||
if (!pathHelper.hasRoot(itemPath)) {
|
||||
this.segments = itemPath.split(path.sep)
|
||||
}
|
||||
// Rooted
|
||||
else {
|
||||
// Add all segments, while not at the root
|
||||
let remaining = itemPath
|
||||
let dir = pathHelper.dirname(remaining)
|
||||
while (dir !== remaining) {
|
||||
// Add the segment
|
||||
const basename = path.basename(remaining)
|
||||
this.segments.unshift(basename)
|
||||
|
||||
// Truncate the last segment
|
||||
remaining = dir
|
||||
dir = pathHelper.dirname(remaining)
|
||||
}
|
||||
|
||||
// Remainder is the root
|
||||
this.segments.unshift(remaining)
|
||||
}
|
||||
}
|
||||
// Array
|
||||
else {
|
||||
// Must not be empty
|
||||
assert(
|
||||
itemPath.length > 0,
|
||||
`Parameter 'itemPath' must not be an empty array`
|
||||
)
|
||||
|
||||
// Each segment
|
||||
for (let i = 0; i < itemPath.length; i++) {
|
||||
let segment = itemPath[i]
|
||||
|
||||
// Must not be empty
|
||||
assert(
|
||||
segment,
|
||||
`Parameter 'itemPath' must not contain any empty segments`
|
||||
)
|
||||
|
||||
// Normalize slashes
|
||||
segment = pathHelper.normalizeSeparators(itemPath[i])
|
||||
|
||||
// Root segment
|
||||
if (i === 0 && pathHelper.hasRoot(segment)) {
|
||||
segment = pathHelper.safeTrimTrailingSeparator(segment)
|
||||
assert(
|
||||
segment === pathHelper.dirname(segment),
|
||||
`Parameter 'itemPath' root segment contains information for multiple segments`
|
||||
)
|
||||
this.segments.push(segment)
|
||||
}
|
||||
// All other segments
|
||||
else {
|
||||
// Must not contain slash
|
||||
assert(
|
||||
!segment.includes(path.sep),
|
||||
`Parameter 'itemPath' contains unexpected path separators`
|
||||
)
|
||||
this.segments.push(segment)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts the path to it's string representation
|
||||
*/
|
||||
toString(): string {
|
||||
// First segment
|
||||
let result = this.segments[0]
|
||||
|
||||
// All others
|
||||
let skipSlash =
|
||||
result.endsWith(path.sep) || (IS_WINDOWS && /^[A-Z]:$/i.test(result))
|
||||
for (let i = 1; i < this.segments.length; i++) {
|
||||
if (skipSlash) {
|
||||
skipSlash = false
|
||||
} else {
|
||||
result += path.sep
|
||||
}
|
||||
|
||||
result += this.segments[i]
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
}
|
|
@ -0,0 +1,81 @@
|
|||
import * as pathHelper from './internal-path-helper'
|
||||
import {MatchKind} from './internal-match-kind'
|
||||
import {Pattern} from './internal-pattern'
|
||||
|
||||
const IS_WINDOWS = process.platform === 'win32'
|
||||
|
||||
/**
|
||||
* Given an array of patterns, returns an array of paths to search.
|
||||
* Duplicates and paths under other included paths are filtered out.
|
||||
*/
|
||||
export function getSearchPaths(patterns: Pattern[]): string[] {
|
||||
// Ignore negate patterns
|
||||
patterns = patterns.filter(x => !x.negate)
|
||||
|
||||
// Create a map of all search paths
|
||||
const searchPathMap: {[key: string]: string} = {}
|
||||
for (const pattern of patterns) {
|
||||
const key = IS_WINDOWS
|
||||
? pattern.searchPath.toUpperCase()
|
||||
: pattern.searchPath
|
||||
searchPathMap[key] = 'candidate'
|
||||
}
|
||||
|
||||
const result: string[] = []
|
||||
|
||||
for (const pattern of patterns) {
|
||||
// Check if already included
|
||||
const key = IS_WINDOWS
|
||||
? pattern.searchPath.toUpperCase()
|
||||
: pattern.searchPath
|
||||
if (searchPathMap[key] === 'included') {
|
||||
continue
|
||||
}
|
||||
|
||||
// Check for an ancestor search path
|
||||
let foundAncestor = false
|
||||
let tempKey = key
|
||||
let parent = pathHelper.dirname(tempKey)
|
||||
while (parent !== tempKey) {
|
||||
if (searchPathMap[parent]) {
|
||||
foundAncestor = true
|
||||
break
|
||||
}
|
||||
|
||||
tempKey = parent
|
||||
parent = pathHelper.dirname(tempKey)
|
||||
}
|
||||
|
||||
// Include the search pattern in the result
|
||||
if (!foundAncestor) {
|
||||
result.push(pattern.searchPath)
|
||||
searchPathMap[key] = 'included'
|
||||
}
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
/**
|
||||
* Matches the patterns against the path
|
||||
*/
|
||||
export function match(patterns: Pattern[], itemPath: string): MatchKind {
|
||||
let result: MatchKind = MatchKind.None
|
||||
|
||||
for (const pattern of patterns) {
|
||||
if (pattern.negate) {
|
||||
result &= ~pattern.match(itemPath)
|
||||
} else {
|
||||
result |= pattern.match(itemPath)
|
||||
}
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks whether to descend further into the directory
|
||||
*/
|
||||
export function partialMatch(patterns: Pattern[], itemPath: string): boolean {
|
||||
return patterns.some(x => !x.negate && x.partialMatch(itemPath))
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue